ZTWHHH commited on
Commit
7461165
·
verified ·
1 Parent(s): 9b6ae76

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-310.pyc +0 -0
  2. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-310.pyc +0 -0
  3. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-310.pyc +0 -0
  4. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-310.pyc +0 -0
  5. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/kcomponents.py +369 -0
  6. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_approx_clust_coeff.cpython-310.pyc +0 -0
  7. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_clique.cpython-310.pyc +0 -0
  8. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_distance_measures.cpython-310.pyc +0 -0
  9. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_dominating_set.cpython-310.pyc +0 -0
  10. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_kcomponents.cpython-310.pyc +0 -0
  11. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_maxcut.cpython-310.pyc +0 -0
  12. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_steinertree.cpython-310.pyc +0 -0
  13. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_treewidth.cpython-310.pyc +0 -0
  14. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/test_clique.py +112 -0
  15. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/test_dominating_set.py +78 -0
  16. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py +303 -0
  17. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py +280 -0
  18. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/test_vertex_cover.py +68 -0
  19. wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/traveling_salesman.py +1501 -0
  20. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/covering.py +57 -0
  21. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/matching.py +590 -0
  22. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/matrix.py +168 -0
  23. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_project.cpython-310.pyc +0 -0
  24. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_covering.py +33 -0
  25. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_matrix.py +84 -0
  26. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py +80 -0
  27. wemm/lib/python3.10/site-packages/networkx/algorithms/bridges.py +205 -0
  28. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness_subset.cpython-310.pyc +0 -0
  29. wemm/lib/python3.10/site-packages/networkx/algorithms/community/label_propagation.py +338 -0
  30. wemm/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-310.pyc +0 -0
  31. wemm/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_lukes.cpython-310.pyc +0 -0
  32. wemm/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-310.pyc +0 -0
  33. wemm/lib/python3.10/site-packages/networkx/algorithms/dominance.py +135 -0
  34. wemm/lib/python3.10/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py +500 -0
  35. wemm/lib/python3.10/site-packages/networkx/algorithms/lowest_common_ancestors.py +269 -0
  36. wemm/lib/python3.10/site-packages/networkx/algorithms/planarity.py +1402 -0
  37. wemm/lib/python3.10/site-packages/networkx/algorithms/shortest_paths/__pycache__/dense.cpython-310.pyc +0 -0
  38. wemm/lib/python3.10/site-packages/networkx/algorithms/shortest_paths/__pycache__/unweighted.cpython-310.pyc +0 -0
  39. wemm/lib/python3.10/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  40. wemm/lib/python3.10/site-packages/networkx/algorithms/smallworld.py +404 -0
  41. wemm/lib/python3.10/site-packages/networkx/algorithms/summarization.py +564 -0
  42. wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_asteroidal.cpython-310.pyc +0 -0
  43. wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_bridges.cpython-310.pyc +0 -0
  44. wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_core.cpython-310.pyc +0 -0
  45. wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_d_separation.cpython-310.pyc +0 -0
  46. wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_graph_hashing.cpython-310.pyc +0 -0
  47. wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_mis.cpython-310.pyc +0 -0
  48. wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_moral.cpython-310.pyc +0 -0
  49. wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_polynomials.cpython-310.pyc +0 -0
  50. wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_richclub.cpython-310.pyc +0 -0
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.34 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-310.pyc ADDED
Binary file (11.2 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-310.pyc ADDED
Binary file (1.43 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-310.pyc ADDED
Binary file (1.56 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/kcomponents.py ADDED
@@ -0,0 +1,369 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Fast approximation for k-component structure"""
2
+
3
+ import itertools
4
+ from collections import defaultdict
5
+ from collections.abc import Mapping
6
+ from functools import cached_property
7
+
8
+ import networkx as nx
9
+ from networkx.algorithms.approximation import local_node_connectivity
10
+ from networkx.exception import NetworkXError
11
+ from networkx.utils import not_implemented_for
12
+
13
+ __all__ = ["k_components"]
14
+
15
+
16
+ @not_implemented_for("directed")
17
+ @nx._dispatchable(name="approximate_k_components")
18
+ def k_components(G, min_density=0.95):
19
+ r"""Returns the approximate k-component structure of a graph G.
20
+
21
+ A `k`-component is a maximal subgraph of a graph G that has, at least,
22
+ node connectivity `k`: we need to remove at least `k` nodes to break it
23
+ into more components. `k`-components have an inherent hierarchical
24
+ structure because they are nested in terms of connectivity: a connected
25
+ graph can contain several 2-components, each of which can contain
26
+ one or more 3-components, and so forth.
27
+
28
+ This implementation is based on the fast heuristics to approximate
29
+ the `k`-component structure of a graph [1]_. Which, in turn, it is based on
30
+ a fast approximation algorithm for finding good lower bounds of the number
31
+ of node independent paths between two nodes [2]_.
32
+
33
+ Parameters
34
+ ----------
35
+ G : NetworkX graph
36
+ Undirected graph
37
+
38
+ min_density : Float
39
+ Density relaxation threshold. Default value 0.95
40
+
41
+ Returns
42
+ -------
43
+ k_components : dict
44
+ Dictionary with connectivity level `k` as key and a list of
45
+ sets of nodes that form a k-component of level `k` as values.
46
+
47
+ Raises
48
+ ------
49
+ NetworkXNotImplemented
50
+ If G is directed.
51
+
52
+ Examples
53
+ --------
54
+ >>> # Petersen graph has 10 nodes and it is triconnected, thus all
55
+ >>> # nodes are in a single component on all three connectivity levels
56
+ >>> from networkx.algorithms import approximation as apxa
57
+ >>> G = nx.petersen_graph()
58
+ >>> k_components = apxa.k_components(G)
59
+
60
+ Notes
61
+ -----
62
+ The logic of the approximation algorithm for computing the `k`-component
63
+ structure [1]_ is based on repeatedly applying simple and fast algorithms
64
+ for `k`-cores and biconnected components in order to narrow down the
65
+ number of pairs of nodes over which we have to compute White and Newman's
66
+ approximation algorithm for finding node independent paths [2]_. More
67
+ formally, this algorithm is based on Whitney's theorem, which states
68
+ an inclusion relation among node connectivity, edge connectivity, and
69
+ minimum degree for any graph G. This theorem implies that every
70
+ `k`-component is nested inside a `k`-edge-component, which in turn,
71
+ is contained in a `k`-core. Thus, this algorithm computes node independent
72
+ paths among pairs of nodes in each biconnected part of each `k`-core,
73
+ and repeats this procedure for each `k` from 3 to the maximal core number
74
+ of a node in the input graph.
75
+
76
+ Because, in practice, many nodes of the core of level `k` inside a
77
+ bicomponent actually are part of a component of level k, the auxiliary
78
+ graph needed for the algorithm is likely to be very dense. Thus, we use
79
+ a complement graph data structure (see `AntiGraph`) to save memory.
80
+ AntiGraph only stores information of the edges that are *not* present
81
+ in the actual auxiliary graph. When applying algorithms to this
82
+ complement graph data structure, it behaves as if it were the dense
83
+ version.
84
+
85
+ See also
86
+ --------
87
+ k_components
88
+
89
+ References
90
+ ----------
91
+ .. [1] Torrents, J. and F. Ferraro (2015) Structural Cohesion:
92
+ Visualization and Heuristics for Fast Computation.
93
+ https://arxiv.org/pdf/1503.04476v1
94
+
95
+ .. [2] White, Douglas R., and Mark Newman (2001) A Fast Algorithm for
96
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
97
+ https://www.santafe.edu/research/results/working-papers/fast-approximation-algorithms-for-finding-node-ind
98
+
99
+ .. [3] Moody, J. and D. White (2003). Social cohesion and embeddedness:
100
+ A hierarchical conception of social groups.
101
+ American Sociological Review 68(1), 103--28.
102
+ https://doi.org/10.2307/3088904
103
+
104
+ """
105
+ # Dictionary with connectivity level (k) as keys and a list of
106
+ # sets of nodes that form a k-component as values
107
+ k_components = defaultdict(list)
108
+ # make a few functions local for speed
109
+ node_connectivity = local_node_connectivity
110
+ k_core = nx.k_core
111
+ core_number = nx.core_number
112
+ biconnected_components = nx.biconnected_components
113
+ combinations = itertools.combinations
114
+ # Exact solution for k = {1,2}
115
+ # There is a linear time algorithm for triconnectivity, if we had an
116
+ # implementation available we could start from k = 4.
117
+ for component in nx.connected_components(G):
118
+ # isolated nodes have connectivity 0
119
+ comp = set(component)
120
+ if len(comp) > 1:
121
+ k_components[1].append(comp)
122
+ for bicomponent in nx.biconnected_components(G):
123
+ # avoid considering dyads as bicomponents
124
+ bicomp = set(bicomponent)
125
+ if len(bicomp) > 2:
126
+ k_components[2].append(bicomp)
127
+ # There is no k-component of k > maximum core number
128
+ # \kappa(G) <= \lambda(G) <= \delta(G)
129
+ g_cnumber = core_number(G)
130
+ max_core = max(g_cnumber.values())
131
+ for k in range(3, max_core + 1):
132
+ C = k_core(G, k, core_number=g_cnumber)
133
+ for nodes in biconnected_components(C):
134
+ # Build a subgraph SG induced by the nodes that are part of
135
+ # each biconnected component of the k-core subgraph C.
136
+ if len(nodes) < k:
137
+ continue
138
+ SG = G.subgraph(nodes)
139
+ # Build auxiliary graph
140
+ H = _AntiGraph()
141
+ H.add_nodes_from(SG.nodes())
142
+ for u, v in combinations(SG, 2):
143
+ K = node_connectivity(SG, u, v, cutoff=k)
144
+ if k > K:
145
+ H.add_edge(u, v)
146
+ for h_nodes in biconnected_components(H):
147
+ if len(h_nodes) <= k:
148
+ continue
149
+ SH = H.subgraph(h_nodes)
150
+ for Gc in _cliques_heuristic(SG, SH, k, min_density):
151
+ for k_nodes in biconnected_components(Gc):
152
+ Gk = nx.k_core(SG.subgraph(k_nodes), k)
153
+ if len(Gk) <= k:
154
+ continue
155
+ k_components[k].append(set(Gk))
156
+ return k_components
157
+
158
+
159
+ def _cliques_heuristic(G, H, k, min_density):
160
+ h_cnumber = nx.core_number(H)
161
+ for i, c_value in enumerate(sorted(set(h_cnumber.values()), reverse=True)):
162
+ cands = {n for n, c in h_cnumber.items() if c == c_value}
163
+ # Skip checking for overlap for the highest core value
164
+ if i == 0:
165
+ overlap = False
166
+ else:
167
+ overlap = set.intersection(
168
+ *[{x for x in H[n] if x not in cands} for n in cands]
169
+ )
170
+ if overlap and len(overlap) < k:
171
+ SH = H.subgraph(cands | overlap)
172
+ else:
173
+ SH = H.subgraph(cands)
174
+ sh_cnumber = nx.core_number(SH)
175
+ SG = nx.k_core(G.subgraph(SH), k)
176
+ while not (_same(sh_cnumber) and nx.density(SH) >= min_density):
177
+ # This subgraph must be writable => .copy()
178
+ SH = H.subgraph(SG).copy()
179
+ if len(SH) <= k:
180
+ break
181
+ sh_cnumber = nx.core_number(SH)
182
+ sh_deg = dict(SH.degree())
183
+ min_deg = min(sh_deg.values())
184
+ SH.remove_nodes_from(n for n, d in sh_deg.items() if d == min_deg)
185
+ SG = nx.k_core(G.subgraph(SH), k)
186
+ else:
187
+ yield SG
188
+
189
+
190
+ def _same(measure, tol=0):
191
+ vals = set(measure.values())
192
+ if (max(vals) - min(vals)) <= tol:
193
+ return True
194
+ return False
195
+
196
+
197
+ class _AntiGraph(nx.Graph):
198
+ """
199
+ Class for complement graphs.
200
+
201
+ The main goal is to be able to work with big and dense graphs with
202
+ a low memory footprint.
203
+
204
+ In this class you add the edges that *do not exist* in the dense graph,
205
+ the report methods of the class return the neighbors, the edges and
206
+ the degree as if it was the dense graph. Thus it's possible to use
207
+ an instance of this class with some of NetworkX functions. In this
208
+ case we only use k-core, connected_components, and biconnected_components.
209
+ """
210
+
211
+ all_edge_dict = {"weight": 1}
212
+
213
+ def single_edge_dict(self):
214
+ return self.all_edge_dict
215
+
216
+ edge_attr_dict_factory = single_edge_dict # type: ignore[assignment]
217
+
218
+ def __getitem__(self, n):
219
+ """Returns a dict of neighbors of node n in the dense graph.
220
+
221
+ Parameters
222
+ ----------
223
+ n : node
224
+ A node in the graph.
225
+
226
+ Returns
227
+ -------
228
+ adj_dict : dictionary
229
+ The adjacency dictionary for nodes connected to n.
230
+
231
+ """
232
+ all_edge_dict = self.all_edge_dict
233
+ return {
234
+ node: all_edge_dict for node in set(self._adj) - set(self._adj[n]) - {n}
235
+ }
236
+
237
+ def neighbors(self, n):
238
+ """Returns an iterator over all neighbors of node n in the
239
+ dense graph.
240
+ """
241
+ try:
242
+ return iter(set(self._adj) - set(self._adj[n]) - {n})
243
+ except KeyError as err:
244
+ raise NetworkXError(f"The node {n} is not in the graph.") from err
245
+
246
+ class AntiAtlasView(Mapping):
247
+ """An adjacency inner dict for AntiGraph"""
248
+
249
+ def __init__(self, graph, node):
250
+ self._graph = graph
251
+ self._atlas = graph._adj[node]
252
+ self._node = node
253
+
254
+ def __len__(self):
255
+ return len(self._graph) - len(self._atlas) - 1
256
+
257
+ def __iter__(self):
258
+ return (n for n in self._graph if n not in self._atlas and n != self._node)
259
+
260
+ def __getitem__(self, nbr):
261
+ nbrs = set(self._graph._adj) - set(self._atlas) - {self._node}
262
+ if nbr in nbrs:
263
+ return self._graph.all_edge_dict
264
+ raise KeyError(nbr)
265
+
266
+ class AntiAdjacencyView(AntiAtlasView):
267
+ """An adjacency outer dict for AntiGraph"""
268
+
269
+ def __init__(self, graph):
270
+ self._graph = graph
271
+ self._atlas = graph._adj
272
+
273
+ def __len__(self):
274
+ return len(self._atlas)
275
+
276
+ def __iter__(self):
277
+ return iter(self._graph)
278
+
279
+ def __getitem__(self, node):
280
+ if node not in self._graph:
281
+ raise KeyError(node)
282
+ return self._graph.AntiAtlasView(self._graph, node)
283
+
284
+ @cached_property
285
+ def adj(self):
286
+ return self.AntiAdjacencyView(self)
287
+
288
+ def subgraph(self, nodes):
289
+ """This subgraph method returns a full AntiGraph. Not a View"""
290
+ nodes = set(nodes)
291
+ G = _AntiGraph()
292
+ G.add_nodes_from(nodes)
293
+ for n in G:
294
+ Gnbrs = G.adjlist_inner_dict_factory()
295
+ G._adj[n] = Gnbrs
296
+ for nbr, d in self._adj[n].items():
297
+ if nbr in G._adj:
298
+ Gnbrs[nbr] = d
299
+ G._adj[nbr][n] = d
300
+ G.graph = self.graph
301
+ return G
302
+
303
+ class AntiDegreeView(nx.reportviews.DegreeView):
304
+ def __iter__(self):
305
+ all_nodes = set(self._succ)
306
+ for n in self._nodes:
307
+ nbrs = all_nodes - set(self._succ[n]) - {n}
308
+ yield (n, len(nbrs))
309
+
310
+ def __getitem__(self, n):
311
+ nbrs = set(self._succ) - set(self._succ[n]) - {n}
312
+ # AntiGraph is a ThinGraph so all edges have weight 1
313
+ return len(nbrs) + (n in nbrs)
314
+
315
+ @cached_property
316
+ def degree(self):
317
+ """Returns an iterator for (node, degree) and degree for single node.
318
+
319
+ The node degree is the number of edges adjacent to the node.
320
+
321
+ Parameters
322
+ ----------
323
+ nbunch : iterable container, optional (default=all nodes)
324
+ A container of nodes. The container will be iterated
325
+ through once.
326
+
327
+ weight : string or None, optional (default=None)
328
+ The edge attribute that holds the numerical value used
329
+ as a weight. If None, then each edge has weight 1.
330
+ The degree is the sum of the edge weights adjacent to the node.
331
+
332
+ Returns
333
+ -------
334
+ deg:
335
+ Degree of the node, if a single node is passed as argument.
336
+ nd_iter : an iterator
337
+ The iterator returns two-tuples of (node, degree).
338
+
339
+ See Also
340
+ --------
341
+ degree
342
+
343
+ Examples
344
+ --------
345
+ >>> G = nx.path_graph(4)
346
+ >>> G.degree(0) # node 0 with degree 1
347
+ 1
348
+ >>> list(G.degree([0, 1]))
349
+ [(0, 1), (1, 2)]
350
+
351
+ """
352
+ return self.AntiDegreeView(self)
353
+
354
+ def adjacency(self):
355
+ """Returns an iterator of (node, adjacency set) tuples for all nodes
356
+ in the dense graph.
357
+
358
+ This is the fastest way to look at every edge.
359
+ For directed graphs, only outgoing adjacencies are included.
360
+
361
+ Returns
362
+ -------
363
+ adj_iter : iterator
364
+ An iterator of (node, adjacency set) for all nodes in
365
+ the graph.
366
+
367
+ """
368
+ for n in self._adj:
369
+ yield (n, set(self._adj) - set(self._adj[n]) - {n})
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_approx_clust_coeff.cpython-310.pyc ADDED
Binary file (1.39 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_clique.cpython-310.pyc ADDED
Binary file (4.4 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_distance_measures.cpython-310.pyc ADDED
Binary file (3.09 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_dominating_set.cpython-310.pyc ADDED
Binary file (2.54 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_kcomponents.cpython-310.pyc ADDED
Binary file (9.62 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_maxcut.cpython-310.pyc ADDED
Binary file (3.02 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_steinertree.cpython-310.pyc ADDED
Binary file (8.18 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_treewidth.cpython-310.pyc ADDED
Binary file (7.16 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/test_clique.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for the :mod:`networkx.algorithms.approximation.clique` module."""
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms.approximation import (
5
+ clique_removal,
6
+ large_clique_size,
7
+ max_clique,
8
+ maximum_independent_set,
9
+ )
10
+
11
+
12
+ def is_independent_set(G, nodes):
13
+ """Returns True if and only if `nodes` is a clique in `G`.
14
+
15
+ `G` is a NetworkX graph. `nodes` is an iterable of nodes in
16
+ `G`.
17
+
18
+ """
19
+ return G.subgraph(nodes).number_of_edges() == 0
20
+
21
+
22
+ def is_clique(G, nodes):
23
+ """Returns True if and only if `nodes` is an independent set
24
+ in `G`.
25
+
26
+ `G` is an undirected simple graph. `nodes` is an iterable of
27
+ nodes in `G`.
28
+
29
+ """
30
+ H = G.subgraph(nodes)
31
+ n = len(H)
32
+ return H.number_of_edges() == n * (n - 1) // 2
33
+
34
+
35
+ class TestCliqueRemoval:
36
+ """Unit tests for the
37
+ :func:`~networkx.algorithms.approximation.clique_removal` function.
38
+
39
+ """
40
+
41
+ def test_trivial_graph(self):
42
+ G = nx.trivial_graph()
43
+ independent_set, cliques = clique_removal(G)
44
+ assert is_independent_set(G, independent_set)
45
+ assert all(is_clique(G, clique) for clique in cliques)
46
+ # In fact, we should only have 1-cliques, that is, singleton nodes.
47
+ assert all(len(clique) == 1 for clique in cliques)
48
+
49
+ def test_complete_graph(self):
50
+ G = nx.complete_graph(10)
51
+ independent_set, cliques = clique_removal(G)
52
+ assert is_independent_set(G, independent_set)
53
+ assert all(is_clique(G, clique) for clique in cliques)
54
+
55
+ def test_barbell_graph(self):
56
+ G = nx.barbell_graph(10, 5)
57
+ independent_set, cliques = clique_removal(G)
58
+ assert is_independent_set(G, independent_set)
59
+ assert all(is_clique(G, clique) for clique in cliques)
60
+
61
+
62
+ class TestMaxClique:
63
+ """Unit tests for the :func:`networkx.algorithms.approximation.max_clique`
64
+ function.
65
+
66
+ """
67
+
68
+ def test_null_graph(self):
69
+ G = nx.null_graph()
70
+ assert len(max_clique(G)) == 0
71
+
72
+ def test_complete_graph(self):
73
+ graph = nx.complete_graph(30)
74
+ # this should return the entire graph
75
+ mc = max_clique(graph)
76
+ assert 30 == len(mc)
77
+
78
+ def test_maximal_by_cardinality(self):
79
+ """Tests that the maximal clique is computed according to maximum
80
+ cardinality of the sets.
81
+
82
+ For more information, see pull request #1531.
83
+
84
+ """
85
+ G = nx.complete_graph(5)
86
+ G.add_edge(4, 5)
87
+ clique = max_clique(G)
88
+ assert len(clique) > 1
89
+
90
+ G = nx.lollipop_graph(30, 2)
91
+ clique = max_clique(G)
92
+ assert len(clique) > 2
93
+
94
+
95
+ def test_large_clique_size():
96
+ G = nx.complete_graph(9)
97
+ nx.add_cycle(G, [9, 10, 11])
98
+ G.add_edge(8, 9)
99
+ G.add_edge(1, 12)
100
+ G.add_node(13)
101
+
102
+ assert large_clique_size(G) == 9
103
+ G.remove_node(5)
104
+ assert large_clique_size(G) == 8
105
+ G.remove_edge(2, 3)
106
+ assert large_clique_size(G) == 7
107
+
108
+
109
+ def test_independent_set():
110
+ # smoke test
111
+ G = nx.Graph()
112
+ assert len(maximum_independent_set(G)) == 0
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/test_dominating_set.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms.approximation import (
5
+ min_edge_dominating_set,
6
+ min_weighted_dominating_set,
7
+ )
8
+
9
+
10
+ class TestMinWeightDominatingSet:
11
+ def test_min_weighted_dominating_set(self):
12
+ graph = nx.Graph()
13
+ graph.add_edge(1, 2)
14
+ graph.add_edge(1, 5)
15
+ graph.add_edge(2, 3)
16
+ graph.add_edge(2, 5)
17
+ graph.add_edge(3, 4)
18
+ graph.add_edge(3, 6)
19
+ graph.add_edge(5, 6)
20
+
21
+ vertices = {1, 2, 3, 4, 5, 6}
22
+ # due to ties, this might be hard to test tight bounds
23
+ dom_set = min_weighted_dominating_set(graph)
24
+ for vertex in vertices - dom_set:
25
+ neighbors = set(graph.neighbors(vertex))
26
+ assert len(neighbors & dom_set) > 0, "Non dominating set found!"
27
+
28
+ def test_star_graph(self):
29
+ """Tests that an approximate dominating set for the star graph,
30
+ even when the center node does not have the smallest integer
31
+ label, gives just the center node.
32
+
33
+ For more information, see #1527.
34
+
35
+ """
36
+ # Create a star graph in which the center node has the highest
37
+ # label instead of the lowest.
38
+ G = nx.star_graph(10)
39
+ G = nx.relabel_nodes(G, {0: 9, 9: 0})
40
+ assert min_weighted_dominating_set(G) == {9}
41
+
42
+ def test_null_graph(self):
43
+ """Tests that the unique dominating set for the null graph is an empty set"""
44
+ G = nx.Graph()
45
+ assert min_weighted_dominating_set(G) == set()
46
+
47
+ def test_min_edge_dominating_set(self):
48
+ graph = nx.path_graph(5)
49
+ dom_set = min_edge_dominating_set(graph)
50
+
51
+ # this is a crappy way to test, but good enough for now.
52
+ for edge in graph.edges():
53
+ if edge in dom_set:
54
+ continue
55
+ else:
56
+ u, v = edge
57
+ found = False
58
+ for dom_edge in dom_set:
59
+ found |= u == dom_edge[0] or u == dom_edge[1]
60
+ assert found, "Non adjacent edge found!"
61
+
62
+ graph = nx.complete_graph(10)
63
+ dom_set = min_edge_dominating_set(graph)
64
+
65
+ # this is a crappy way to test, but good enough for now.
66
+ for edge in graph.edges():
67
+ if edge in dom_set:
68
+ continue
69
+ else:
70
+ u, v = edge
71
+ found = False
72
+ for dom_edge in dom_set:
73
+ found |= u == dom_edge[0] or u == dom_edge[1]
74
+ assert found, "Non adjacent edge found!"
75
+
76
+ graph = nx.Graph() # empty Networkx graph
77
+ with pytest.raises(ValueError, match="Expected non-empty NetworkX graph!"):
78
+ min_edge_dominating_set(graph)
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py ADDED
@@ -0,0 +1,303 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Test for approximation to k-components algorithm
2
+ import pytest
3
+
4
+ import networkx as nx
5
+ from networkx.algorithms.approximation import k_components
6
+ from networkx.algorithms.approximation.kcomponents import _AntiGraph, _same
7
+
8
+
9
+ def build_k_number_dict(k_components):
10
+ k_num = {}
11
+ for k, comps in sorted(k_components.items()):
12
+ for comp in comps:
13
+ for node in comp:
14
+ k_num[node] = k
15
+ return k_num
16
+
17
+
18
+ ##
19
+ # Some nice synthetic graphs
20
+ ##
21
+
22
+
23
+ def graph_example_1():
24
+ G = nx.convert_node_labels_to_integers(
25
+ nx.grid_graph([5, 5]), label_attribute="labels"
26
+ )
27
+ rlabels = nx.get_node_attributes(G, "labels")
28
+ labels = {v: k for k, v in rlabels.items()}
29
+
30
+ for nodes in [
31
+ (labels[(0, 0)], labels[(1, 0)]),
32
+ (labels[(0, 4)], labels[(1, 4)]),
33
+ (labels[(3, 0)], labels[(4, 0)]),
34
+ (labels[(3, 4)], labels[(4, 4)]),
35
+ ]:
36
+ new_node = G.order() + 1
37
+ # Petersen graph is triconnected
38
+ P = nx.petersen_graph()
39
+ G = nx.disjoint_union(G, P)
40
+ # Add two edges between the grid and P
41
+ G.add_edge(new_node + 1, nodes[0])
42
+ G.add_edge(new_node, nodes[1])
43
+ # K5 is 4-connected
44
+ K = nx.complete_graph(5)
45
+ G = nx.disjoint_union(G, K)
46
+ # Add three edges between P and K5
47
+ G.add_edge(new_node + 2, new_node + 11)
48
+ G.add_edge(new_node + 3, new_node + 12)
49
+ G.add_edge(new_node + 4, new_node + 13)
50
+ # Add another K5 sharing a node
51
+ G = nx.disjoint_union(G, K)
52
+ nbrs = G[new_node + 10]
53
+ G.remove_node(new_node + 10)
54
+ for nbr in nbrs:
55
+ G.add_edge(new_node + 17, nbr)
56
+ G.add_edge(new_node + 16, new_node + 5)
57
+ return G
58
+
59
+
60
+ def torrents_and_ferraro_graph():
61
+ G = nx.convert_node_labels_to_integers(
62
+ nx.grid_graph([5, 5]), label_attribute="labels"
63
+ )
64
+ rlabels = nx.get_node_attributes(G, "labels")
65
+ labels = {v: k for k, v in rlabels.items()}
66
+
67
+ for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]:
68
+ new_node = G.order() + 1
69
+ # Petersen graph is triconnected
70
+ P = nx.petersen_graph()
71
+ G = nx.disjoint_union(G, P)
72
+ # Add two edges between the grid and P
73
+ G.add_edge(new_node + 1, nodes[0])
74
+ G.add_edge(new_node, nodes[1])
75
+ # K5 is 4-connected
76
+ K = nx.complete_graph(5)
77
+ G = nx.disjoint_union(G, K)
78
+ # Add three edges between P and K5
79
+ G.add_edge(new_node + 2, new_node + 11)
80
+ G.add_edge(new_node + 3, new_node + 12)
81
+ G.add_edge(new_node + 4, new_node + 13)
82
+ # Add another K5 sharing a node
83
+ G = nx.disjoint_union(G, K)
84
+ nbrs = G[new_node + 10]
85
+ G.remove_node(new_node + 10)
86
+ for nbr in nbrs:
87
+ G.add_edge(new_node + 17, nbr)
88
+ # Commenting this makes the graph not biconnected !!
89
+ # This stupid mistake make one reviewer very angry :P
90
+ G.add_edge(new_node + 16, new_node + 8)
91
+
92
+ for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]:
93
+ new_node = G.order() + 1
94
+ # Petersen graph is triconnected
95
+ P = nx.petersen_graph()
96
+ G = nx.disjoint_union(G, P)
97
+ # Add two edges between the grid and P
98
+ G.add_edge(new_node + 1, nodes[0])
99
+ G.add_edge(new_node, nodes[1])
100
+ # K5 is 4-connected
101
+ K = nx.complete_graph(5)
102
+ G = nx.disjoint_union(G, K)
103
+ # Add three edges between P and K5
104
+ G.add_edge(new_node + 2, new_node + 11)
105
+ G.add_edge(new_node + 3, new_node + 12)
106
+ G.add_edge(new_node + 4, new_node + 13)
107
+ # Add another K5 sharing two nodes
108
+ G = nx.disjoint_union(G, K)
109
+ nbrs = G[new_node + 10]
110
+ G.remove_node(new_node + 10)
111
+ for nbr in nbrs:
112
+ G.add_edge(new_node + 17, nbr)
113
+ nbrs2 = G[new_node + 9]
114
+ G.remove_node(new_node + 9)
115
+ for nbr in nbrs2:
116
+ G.add_edge(new_node + 18, nbr)
117
+ return G
118
+
119
+
120
+ # Helper function
121
+
122
+
123
+ def _check_connectivity(G):
124
+ result = k_components(G)
125
+ for k, components in result.items():
126
+ if k < 3:
127
+ continue
128
+ for component in components:
129
+ C = G.subgraph(component)
130
+ K = nx.node_connectivity(C)
131
+ assert K >= k
132
+
133
+
134
+ def test_torrents_and_ferraro_graph():
135
+ G = torrents_and_ferraro_graph()
136
+ _check_connectivity(G)
137
+
138
+
139
+ def test_example_1():
140
+ G = graph_example_1()
141
+ _check_connectivity(G)
142
+
143
+
144
+ def test_karate_0():
145
+ G = nx.karate_club_graph()
146
+ _check_connectivity(G)
147
+
148
+
149
+ def test_karate_1():
150
+ karate_k_num = {
151
+ 0: 4,
152
+ 1: 4,
153
+ 2: 4,
154
+ 3: 4,
155
+ 4: 3,
156
+ 5: 3,
157
+ 6: 3,
158
+ 7: 4,
159
+ 8: 4,
160
+ 9: 2,
161
+ 10: 3,
162
+ 11: 1,
163
+ 12: 2,
164
+ 13: 4,
165
+ 14: 2,
166
+ 15: 2,
167
+ 16: 2,
168
+ 17: 2,
169
+ 18: 2,
170
+ 19: 3,
171
+ 20: 2,
172
+ 21: 2,
173
+ 22: 2,
174
+ 23: 3,
175
+ 24: 3,
176
+ 25: 3,
177
+ 26: 2,
178
+ 27: 3,
179
+ 28: 3,
180
+ 29: 3,
181
+ 30: 4,
182
+ 31: 3,
183
+ 32: 4,
184
+ 33: 4,
185
+ }
186
+ approx_karate_k_num = karate_k_num.copy()
187
+ approx_karate_k_num[24] = 2
188
+ approx_karate_k_num[25] = 2
189
+ G = nx.karate_club_graph()
190
+ k_comps = k_components(G)
191
+ k_num = build_k_number_dict(k_comps)
192
+ assert k_num in (karate_k_num, approx_karate_k_num)
193
+
194
+
195
+ def test_example_1_detail_3_and_4():
196
+ G = graph_example_1()
197
+ result = k_components(G)
198
+ # In this example graph there are 8 3-components, 4 with 15 nodes
199
+ # and 4 with 5 nodes.
200
+ assert len(result[3]) == 8
201
+ assert len([c for c in result[3] if len(c) == 15]) == 4
202
+ assert len([c for c in result[3] if len(c) == 5]) == 4
203
+ # There are also 8 4-components all with 5 nodes.
204
+ assert len(result[4]) == 8
205
+ assert all(len(c) == 5 for c in result[4])
206
+ # Finally check that the k-components detected have actually node
207
+ # connectivity >= k.
208
+ for k, components in result.items():
209
+ if k < 3:
210
+ continue
211
+ for component in components:
212
+ K = nx.node_connectivity(G.subgraph(component))
213
+ assert K >= k
214
+
215
+
216
+ def test_directed():
217
+ with pytest.raises(nx.NetworkXNotImplemented):
218
+ G = nx.gnp_random_graph(10, 0.4, directed=True)
219
+ kc = k_components(G)
220
+
221
+
222
+ def test_same():
223
+ equal = {"A": 2, "B": 2, "C": 2}
224
+ slightly_different = {"A": 2, "B": 1, "C": 2}
225
+ different = {"A": 2, "B": 8, "C": 18}
226
+ assert _same(equal)
227
+ assert not _same(slightly_different)
228
+ assert _same(slightly_different, tol=1)
229
+ assert not _same(different)
230
+ assert not _same(different, tol=4)
231
+
232
+
233
+ class TestAntiGraph:
234
+ @classmethod
235
+ def setup_class(cls):
236
+ cls.Gnp = nx.gnp_random_graph(20, 0.8, seed=42)
237
+ cls.Anp = _AntiGraph(nx.complement(cls.Gnp))
238
+ cls.Gd = nx.davis_southern_women_graph()
239
+ cls.Ad = _AntiGraph(nx.complement(cls.Gd))
240
+ cls.Gk = nx.karate_club_graph()
241
+ cls.Ak = _AntiGraph(nx.complement(cls.Gk))
242
+ cls.GA = [(cls.Gnp, cls.Anp), (cls.Gd, cls.Ad), (cls.Gk, cls.Ak)]
243
+
244
+ def test_size(self):
245
+ for G, A in self.GA:
246
+ n = G.order()
247
+ s = len(list(G.edges())) + len(list(A.edges()))
248
+ assert s == (n * (n - 1)) / 2
249
+
250
+ def test_degree(self):
251
+ for G, A in self.GA:
252
+ assert sorted(G.degree()) == sorted(A.degree())
253
+
254
+ def test_core_number(self):
255
+ for G, A in self.GA:
256
+ assert nx.core_number(G) == nx.core_number(A)
257
+
258
+ def test_connected_components(self):
259
+ # ccs are same unless isolated nodes or any node has degree=len(G)-1
260
+ # graphs in self.GA avoid this problem
261
+ for G, A in self.GA:
262
+ gc = [set(c) for c in nx.connected_components(G)]
263
+ ac = [set(c) for c in nx.connected_components(A)]
264
+ for comp in ac:
265
+ assert comp in gc
266
+
267
+ def test_adj(self):
268
+ for G, A in self.GA:
269
+ for n, nbrs in G.adj.items():
270
+ a_adj = sorted((n, sorted(ad)) for n, ad in A.adj.items())
271
+ g_adj = sorted((n, sorted(ad)) for n, ad in G.adj.items())
272
+ assert a_adj == g_adj
273
+
274
+ def test_adjacency(self):
275
+ for G, A in self.GA:
276
+ a_adj = list(A.adjacency())
277
+ for n, nbrs in G.adjacency():
278
+ assert (n, set(nbrs)) in a_adj
279
+
280
+ def test_neighbors(self):
281
+ for G, A in self.GA:
282
+ node = list(G.nodes())[0]
283
+ assert set(G.neighbors(node)) == set(A.neighbors(node))
284
+
285
+ def test_node_not_in_graph(self):
286
+ for G, A in self.GA:
287
+ node = "non_existent_node"
288
+ pytest.raises(nx.NetworkXError, A.neighbors, node)
289
+ pytest.raises(nx.NetworkXError, G.neighbors, node)
290
+
291
+ def test_degree_thingraph(self):
292
+ for G, A in self.GA:
293
+ node = list(G.nodes())[0]
294
+ nodes = list(G.nodes())[1:4]
295
+ assert G.degree(node) == A.degree(node)
296
+ assert sum(d for n, d in G.degree()) == sum(d for n, d in A.degree())
297
+ # AntiGraph is a ThinGraph, so all the weights are 1
298
+ assert sum(d for n, d in A.degree()) == sum(
299
+ d for n, d in A.degree(weight="weight")
300
+ )
301
+ assert sum(d for n, d in G.degree(nodes)) == sum(
302
+ d for n, d in A.degree(nodes)
303
+ )
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py ADDED
@@ -0,0 +1,280 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import itertools
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms.approximation import (
5
+ treewidth_min_degree,
6
+ treewidth_min_fill_in,
7
+ )
8
+ from networkx.algorithms.approximation.treewidth import (
9
+ MinDegreeHeuristic,
10
+ min_fill_in_heuristic,
11
+ )
12
+
13
+
14
+ def is_tree_decomp(graph, decomp):
15
+ """Check if the given tree decomposition is valid."""
16
+ for x in graph.nodes():
17
+ appear_once = False
18
+ for bag in decomp.nodes():
19
+ if x in bag:
20
+ appear_once = True
21
+ break
22
+ assert appear_once
23
+
24
+ # Check if each connected pair of nodes are at least once together in a bag
25
+ for x, y in graph.edges():
26
+ appear_together = False
27
+ for bag in decomp.nodes():
28
+ if x in bag and y in bag:
29
+ appear_together = True
30
+ break
31
+ assert appear_together
32
+
33
+ # Check if the nodes associated with vertex v form a connected subset of T
34
+ for v in graph.nodes():
35
+ subset = []
36
+ for bag in decomp.nodes():
37
+ if v in bag:
38
+ subset.append(bag)
39
+ sub_graph = decomp.subgraph(subset)
40
+ assert nx.is_connected(sub_graph)
41
+
42
+
43
+ class TestTreewidthMinDegree:
44
+ """Unit tests for the min_degree function"""
45
+
46
+ @classmethod
47
+ def setup_class(cls):
48
+ """Setup for different kinds of trees"""
49
+ cls.complete = nx.Graph()
50
+ cls.complete.add_edge(1, 2)
51
+ cls.complete.add_edge(2, 3)
52
+ cls.complete.add_edge(1, 3)
53
+
54
+ cls.small_tree = nx.Graph()
55
+ cls.small_tree.add_edge(1, 3)
56
+ cls.small_tree.add_edge(4, 3)
57
+ cls.small_tree.add_edge(2, 3)
58
+ cls.small_tree.add_edge(3, 5)
59
+ cls.small_tree.add_edge(5, 6)
60
+ cls.small_tree.add_edge(5, 7)
61
+ cls.small_tree.add_edge(6, 7)
62
+
63
+ cls.deterministic_graph = nx.Graph()
64
+ cls.deterministic_graph.add_edge(0, 1) # deg(0) = 1
65
+
66
+ cls.deterministic_graph.add_edge(1, 2) # deg(1) = 2
67
+
68
+ cls.deterministic_graph.add_edge(2, 3)
69
+ cls.deterministic_graph.add_edge(2, 4) # deg(2) = 3
70
+
71
+ cls.deterministic_graph.add_edge(3, 4)
72
+ cls.deterministic_graph.add_edge(3, 5)
73
+ cls.deterministic_graph.add_edge(3, 6) # deg(3) = 4
74
+
75
+ cls.deterministic_graph.add_edge(4, 5)
76
+ cls.deterministic_graph.add_edge(4, 6)
77
+ cls.deterministic_graph.add_edge(4, 7) # deg(4) = 5
78
+
79
+ cls.deterministic_graph.add_edge(5, 6)
80
+ cls.deterministic_graph.add_edge(5, 7)
81
+ cls.deterministic_graph.add_edge(5, 8)
82
+ cls.deterministic_graph.add_edge(5, 9) # deg(5) = 6
83
+
84
+ cls.deterministic_graph.add_edge(6, 7)
85
+ cls.deterministic_graph.add_edge(6, 8)
86
+ cls.deterministic_graph.add_edge(6, 9) # deg(6) = 6
87
+
88
+ cls.deterministic_graph.add_edge(7, 8)
89
+ cls.deterministic_graph.add_edge(7, 9) # deg(7) = 5
90
+
91
+ cls.deterministic_graph.add_edge(8, 9) # deg(8) = 4
92
+
93
+ def test_petersen_graph(self):
94
+ """Test Petersen graph tree decomposition result"""
95
+ G = nx.petersen_graph()
96
+ _, decomp = treewidth_min_degree(G)
97
+ is_tree_decomp(G, decomp)
98
+
99
+ def test_small_tree_treewidth(self):
100
+ """Test small tree
101
+
102
+ Test if the computed treewidth of the known self.small_tree is 2.
103
+ As we know which value we can expect from our heuristic, values other
104
+ than two are regressions
105
+ """
106
+ G = self.small_tree
107
+ # the order of removal should be [1,2,4]3[5,6,7]
108
+ # (with [] denoting any order of the containing nodes)
109
+ # resulting in treewidth 2 for the heuristic
110
+ treewidth, _ = treewidth_min_fill_in(G)
111
+ assert treewidth == 2
112
+
113
+ def test_heuristic_abort(self):
114
+ """Test heuristic abort condition for fully connected graph"""
115
+ graph = {}
116
+ for u in self.complete:
117
+ graph[u] = set()
118
+ for v in self.complete[u]:
119
+ if u != v: # ignore self-loop
120
+ graph[u].add(v)
121
+
122
+ deg_heuristic = MinDegreeHeuristic(graph)
123
+ node = deg_heuristic.best_node(graph)
124
+ if node is None:
125
+ pass
126
+ else:
127
+ assert False
128
+
129
+ def test_empty_graph(self):
130
+ """Test empty graph"""
131
+ G = nx.Graph()
132
+ _, _ = treewidth_min_degree(G)
133
+
134
+ def test_two_component_graph(self):
135
+ G = nx.Graph()
136
+ G.add_node(1)
137
+ G.add_node(2)
138
+ treewidth, _ = treewidth_min_degree(G)
139
+ assert treewidth == 0
140
+
141
+ def test_not_sortable_nodes(self):
142
+ G = nx.Graph([(0, "a")])
143
+ treewidth_min_degree(G)
144
+
145
+ def test_heuristic_first_steps(self):
146
+ """Test first steps of min_degree heuristic"""
147
+ graph = {
148
+ n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph
149
+ }
150
+ deg_heuristic = MinDegreeHeuristic(graph)
151
+ elim_node = deg_heuristic.best_node(graph)
152
+ print(f"Graph {graph}:")
153
+ steps = []
154
+
155
+ while elim_node is not None:
156
+ print(f"Removing {elim_node}:")
157
+ steps.append(elim_node)
158
+ nbrs = graph[elim_node]
159
+
160
+ for u, v in itertools.permutations(nbrs, 2):
161
+ if v not in graph[u]:
162
+ graph[u].add(v)
163
+
164
+ for u in graph:
165
+ if elim_node in graph[u]:
166
+ graph[u].remove(elim_node)
167
+
168
+ del graph[elim_node]
169
+ print(f"Graph {graph}:")
170
+ elim_node = deg_heuristic.best_node(graph)
171
+
172
+ # check only the first 5 elements for equality
173
+ assert steps[:5] == [0, 1, 2, 3, 4]
174
+
175
+
176
+ class TestTreewidthMinFillIn:
177
+ """Unit tests for the treewidth_min_fill_in function."""
178
+
179
+ @classmethod
180
+ def setup_class(cls):
181
+ """Setup for different kinds of trees"""
182
+ cls.complete = nx.Graph()
183
+ cls.complete.add_edge(1, 2)
184
+ cls.complete.add_edge(2, 3)
185
+ cls.complete.add_edge(1, 3)
186
+
187
+ cls.small_tree = nx.Graph()
188
+ cls.small_tree.add_edge(1, 2)
189
+ cls.small_tree.add_edge(2, 3)
190
+ cls.small_tree.add_edge(3, 4)
191
+ cls.small_tree.add_edge(1, 4)
192
+ cls.small_tree.add_edge(2, 4)
193
+ cls.small_tree.add_edge(4, 5)
194
+ cls.small_tree.add_edge(5, 6)
195
+ cls.small_tree.add_edge(5, 7)
196
+ cls.small_tree.add_edge(6, 7)
197
+
198
+ cls.deterministic_graph = nx.Graph()
199
+ cls.deterministic_graph.add_edge(1, 2)
200
+ cls.deterministic_graph.add_edge(1, 3)
201
+ cls.deterministic_graph.add_edge(3, 4)
202
+ cls.deterministic_graph.add_edge(2, 4)
203
+ cls.deterministic_graph.add_edge(3, 5)
204
+ cls.deterministic_graph.add_edge(4, 5)
205
+ cls.deterministic_graph.add_edge(3, 6)
206
+ cls.deterministic_graph.add_edge(5, 6)
207
+
208
+ def test_petersen_graph(self):
209
+ """Test Petersen graph tree decomposition result"""
210
+ G = nx.petersen_graph()
211
+ _, decomp = treewidth_min_fill_in(G)
212
+ is_tree_decomp(G, decomp)
213
+
214
+ def test_small_tree_treewidth(self):
215
+ """Test if the computed treewidth of the known self.small_tree is 2"""
216
+ G = self.small_tree
217
+ # the order of removal should be [1,2,4]3[5,6,7]
218
+ # (with [] denoting any order of the containing nodes)
219
+ # resulting in treewidth 2 for the heuristic
220
+ treewidth, _ = treewidth_min_fill_in(G)
221
+ assert treewidth == 2
222
+
223
+ def test_heuristic_abort(self):
224
+ """Test if min_fill_in returns None for fully connected graph"""
225
+ graph = {}
226
+ for u in self.complete:
227
+ graph[u] = set()
228
+ for v in self.complete[u]:
229
+ if u != v: # ignore self-loop
230
+ graph[u].add(v)
231
+ next_node = min_fill_in_heuristic(graph)
232
+ if next_node is None:
233
+ pass
234
+ else:
235
+ assert False
236
+
237
+ def test_empty_graph(self):
238
+ """Test empty graph"""
239
+ G = nx.Graph()
240
+ _, _ = treewidth_min_fill_in(G)
241
+
242
+ def test_two_component_graph(self):
243
+ G = nx.Graph()
244
+ G.add_node(1)
245
+ G.add_node(2)
246
+ treewidth, _ = treewidth_min_fill_in(G)
247
+ assert treewidth == 0
248
+
249
+ def test_not_sortable_nodes(self):
250
+ G = nx.Graph([(0, "a")])
251
+ treewidth_min_fill_in(G)
252
+
253
+ def test_heuristic_first_steps(self):
254
+ """Test first steps of min_fill_in heuristic"""
255
+ graph = {
256
+ n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph
257
+ }
258
+ print(f"Graph {graph}:")
259
+ elim_node = min_fill_in_heuristic(graph)
260
+ steps = []
261
+
262
+ while elim_node is not None:
263
+ print(f"Removing {elim_node}:")
264
+ steps.append(elim_node)
265
+ nbrs = graph[elim_node]
266
+
267
+ for u, v in itertools.permutations(nbrs, 2):
268
+ if v not in graph[u]:
269
+ graph[u].add(v)
270
+
271
+ for u in graph:
272
+ if elim_node in graph[u]:
273
+ graph[u].remove(elim_node)
274
+
275
+ del graph[elim_node]
276
+ print(f"Graph {graph}:")
277
+ elim_node = min_fill_in_heuristic(graph)
278
+
279
+ # check only the first 2 elements for equality
280
+ assert steps[:2] == [6, 5]
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/test_vertex_cover.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+ from networkx.algorithms.approximation import min_weighted_vertex_cover
3
+
4
+
5
+ def is_cover(G, node_cover):
6
+ return all({u, v} & node_cover for u, v in G.edges())
7
+
8
+
9
+ class TestMWVC:
10
+ """Unit tests for the approximate minimum weighted vertex cover
11
+ function,
12
+ :func:`~networkx.algorithms.approximation.vertex_cover.min_weighted_vertex_cover`.
13
+
14
+ """
15
+
16
+ def test_unweighted_directed(self):
17
+ # Create a star graph in which half the nodes are directed in
18
+ # and half are directed out.
19
+ G = nx.DiGraph()
20
+ G.add_edges_from((0, v) for v in range(1, 26))
21
+ G.add_edges_from((v, 0) for v in range(26, 51))
22
+ cover = min_weighted_vertex_cover(G)
23
+ assert 1 == len(cover)
24
+ assert is_cover(G, cover)
25
+
26
+ def test_unweighted_undirected(self):
27
+ # create a simple star graph
28
+ size = 50
29
+ sg = nx.star_graph(size)
30
+ cover = min_weighted_vertex_cover(sg)
31
+ assert 1 == len(cover)
32
+ assert is_cover(sg, cover)
33
+
34
+ def test_weighted(self):
35
+ wg = nx.Graph()
36
+ wg.add_node(0, weight=10)
37
+ wg.add_node(1, weight=1)
38
+ wg.add_node(2, weight=1)
39
+ wg.add_node(3, weight=1)
40
+ wg.add_node(4, weight=1)
41
+
42
+ wg.add_edge(0, 1)
43
+ wg.add_edge(0, 2)
44
+ wg.add_edge(0, 3)
45
+ wg.add_edge(0, 4)
46
+
47
+ wg.add_edge(1, 2)
48
+ wg.add_edge(2, 3)
49
+ wg.add_edge(3, 4)
50
+ wg.add_edge(4, 1)
51
+
52
+ cover = min_weighted_vertex_cover(wg, weight="weight")
53
+ csum = sum(wg.nodes[node]["weight"] for node in cover)
54
+ assert 4 == csum
55
+ assert is_cover(wg, cover)
56
+
57
+ def test_unweighted_self_loop(self):
58
+ slg = nx.Graph()
59
+ slg.add_node(0)
60
+ slg.add_node(1)
61
+ slg.add_node(2)
62
+
63
+ slg.add_edge(0, 1)
64
+ slg.add_edge(2, 2)
65
+
66
+ cover = min_weighted_vertex_cover(slg)
67
+ assert 2 == len(cover)
68
+ assert is_cover(slg, cover)
wemm/lib/python3.10/site-packages/networkx/algorithms/approximation/traveling_salesman.py ADDED
@@ -0,0 +1,1501 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ =================================
3
+ Travelling Salesman Problem (TSP)
4
+ =================================
5
+
6
+ Implementation of approximate algorithms
7
+ for solving and approximating the TSP problem.
8
+
9
+ Categories of algorithms which are implemented:
10
+
11
+ - Christofides (provides a 3/2-approximation of TSP)
12
+ - Greedy
13
+ - Simulated Annealing (SA)
14
+ - Threshold Accepting (TA)
15
+ - Asadpour Asymmetric Traveling Salesman Algorithm
16
+
17
+ The Travelling Salesman Problem tries to find, given the weight
18
+ (distance) between all points where a salesman has to visit, the
19
+ route so that:
20
+
21
+ - The total distance (cost) which the salesman travels is minimized.
22
+ - The salesman returns to the starting point.
23
+ - Note that for a complete graph, the salesman visits each point once.
24
+
25
+ The function `travelling_salesman_problem` allows for incomplete
26
+ graphs by finding all-pairs shortest paths, effectively converting
27
+ the problem to a complete graph problem. It calls one of the
28
+ approximate methods on that problem and then converts the result
29
+ back to the original graph using the previously found shortest paths.
30
+
31
+ TSP is an NP-hard problem in combinatorial optimization,
32
+ important in operations research and theoretical computer science.
33
+
34
+ http://en.wikipedia.org/wiki/Travelling_salesman_problem
35
+ """
36
+
37
+ import math
38
+
39
+ import networkx as nx
40
+ from networkx.algorithms.tree.mst import random_spanning_tree
41
+ from networkx.utils import not_implemented_for, pairwise, py_random_state
42
+
43
+ __all__ = [
44
+ "traveling_salesman_problem",
45
+ "christofides",
46
+ "asadpour_atsp",
47
+ "greedy_tsp",
48
+ "simulated_annealing_tsp",
49
+ "threshold_accepting_tsp",
50
+ ]
51
+
52
+
53
+ def swap_two_nodes(soln, seed):
54
+ """Swap two nodes in `soln` to give a neighbor solution.
55
+
56
+ Parameters
57
+ ----------
58
+ soln : list of nodes
59
+ Current cycle of nodes
60
+
61
+ seed : integer, random_state, or None (default)
62
+ Indicator of random number generation state.
63
+ See :ref:`Randomness<randomness>`.
64
+
65
+ Returns
66
+ -------
67
+ list
68
+ The solution after move is applied. (A neighbor solution.)
69
+
70
+ Notes
71
+ -----
72
+ This function assumes that the incoming list `soln` is a cycle
73
+ (that the first and last element are the same) and also that
74
+ we don't want any move to change the first node in the list
75
+ (and thus not the last node either).
76
+
77
+ The input list is changed as well as returned. Make a copy if needed.
78
+
79
+ See Also
80
+ --------
81
+ move_one_node
82
+ """
83
+ a, b = seed.sample(range(1, len(soln) - 1), k=2)
84
+ soln[a], soln[b] = soln[b], soln[a]
85
+ return soln
86
+
87
+
88
+ def move_one_node(soln, seed):
89
+ """Move one node to another position to give a neighbor solution.
90
+
91
+ The node to move and the position to move to are chosen randomly.
92
+ The first and last nodes are left untouched as soln must be a cycle
93
+ starting at that node.
94
+
95
+ Parameters
96
+ ----------
97
+ soln : list of nodes
98
+ Current cycle of nodes
99
+
100
+ seed : integer, random_state, or None (default)
101
+ Indicator of random number generation state.
102
+ See :ref:`Randomness<randomness>`.
103
+
104
+ Returns
105
+ -------
106
+ list
107
+ The solution after move is applied. (A neighbor solution.)
108
+
109
+ Notes
110
+ -----
111
+ This function assumes that the incoming list `soln` is a cycle
112
+ (that the first and last element are the same) and also that
113
+ we don't want any move to change the first node in the list
114
+ (and thus not the last node either).
115
+
116
+ The input list is changed as well as returned. Make a copy if needed.
117
+
118
+ See Also
119
+ --------
120
+ swap_two_nodes
121
+ """
122
+ a, b = seed.sample(range(1, len(soln) - 1), k=2)
123
+ soln.insert(b, soln.pop(a))
124
+ return soln
125
+
126
+
127
+ @not_implemented_for("directed")
128
+ @nx._dispatchable(edge_attrs="weight")
129
+ def christofides(G, weight="weight", tree=None):
130
+ """Approximate a solution of the traveling salesman problem
131
+
132
+ Compute a 3/2-approximation of the traveling salesman problem
133
+ in a complete undirected graph using Christofides [1]_ algorithm.
134
+
135
+ Parameters
136
+ ----------
137
+ G : Graph
138
+ `G` should be a complete weighted undirected graph.
139
+ The distance between all pairs of nodes should be included.
140
+
141
+ weight : string, optional (default="weight")
142
+ Edge data key corresponding to the edge weight.
143
+ If any edge does not have this attribute the weight is set to 1.
144
+
145
+ tree : NetworkX graph or None (default: None)
146
+ A minimum spanning tree of G. Or, if None, the minimum spanning
147
+ tree is computed using :func:`networkx.minimum_spanning_tree`
148
+
149
+ Returns
150
+ -------
151
+ list
152
+ List of nodes in `G` along a cycle with a 3/2-approximation of
153
+ the minimal Hamiltonian cycle.
154
+
155
+ References
156
+ ----------
157
+ .. [1] Christofides, Nicos. "Worst-case analysis of a new heuristic for
158
+ the travelling salesman problem." No. RR-388. Carnegie-Mellon Univ
159
+ Pittsburgh Pa Management Sciences Research Group, 1976.
160
+ """
161
+ # Remove selfloops if necessary
162
+ loop_nodes = nx.nodes_with_selfloops(G)
163
+ try:
164
+ node = next(loop_nodes)
165
+ except StopIteration:
166
+ pass
167
+ else:
168
+ G = G.copy()
169
+ G.remove_edge(node, node)
170
+ G.remove_edges_from((n, n) for n in loop_nodes)
171
+ # Check that G is a complete graph
172
+ N = len(G) - 1
173
+ # This check ignores selfloops which is what we want here.
174
+ if any(len(nbrdict) != N for n, nbrdict in G.adj.items()):
175
+ raise nx.NetworkXError("G must be a complete graph.")
176
+
177
+ if tree is None:
178
+ tree = nx.minimum_spanning_tree(G, weight=weight)
179
+ L = G.copy()
180
+ L.remove_nodes_from([v for v, degree in tree.degree if not (degree % 2)])
181
+ MG = nx.MultiGraph()
182
+ MG.add_edges_from(tree.edges)
183
+ edges = nx.min_weight_matching(L, weight=weight)
184
+ MG.add_edges_from(edges)
185
+ return _shortcutting(nx.eulerian_circuit(MG))
186
+
187
+
188
+ def _shortcutting(circuit):
189
+ """Remove duplicate nodes in the path"""
190
+ nodes = []
191
+ for u, v in circuit:
192
+ if v in nodes:
193
+ continue
194
+ if not nodes:
195
+ nodes.append(u)
196
+ nodes.append(v)
197
+ nodes.append(nodes[0])
198
+ return nodes
199
+
200
+
201
+ @nx._dispatchable(edge_attrs="weight")
202
+ def traveling_salesman_problem(
203
+ G, weight="weight", nodes=None, cycle=True, method=None, **kwargs
204
+ ):
205
+ """Find the shortest path in `G` connecting specified nodes
206
+
207
+ This function allows approximate solution to the traveling salesman
208
+ problem on networks that are not complete graphs and/or where the
209
+ salesman does not need to visit all nodes.
210
+
211
+ This function proceeds in two steps. First, it creates a complete
212
+ graph using the all-pairs shortest_paths between nodes in `nodes`.
213
+ Edge weights in the new graph are the lengths of the paths
214
+ between each pair of nodes in the original graph.
215
+ Second, an algorithm (default: `christofides` for undirected and
216
+ `asadpour_atsp` for directed) is used to approximate the minimal Hamiltonian
217
+ cycle on this new graph. The available algorithms are:
218
+
219
+ - christofides
220
+ - greedy_tsp
221
+ - simulated_annealing_tsp
222
+ - threshold_accepting_tsp
223
+ - asadpour_atsp
224
+
225
+ Once the Hamiltonian Cycle is found, this function post-processes to
226
+ accommodate the structure of the original graph. If `cycle` is ``False``,
227
+ the biggest weight edge is removed to make a Hamiltonian path.
228
+ Then each edge on the new complete graph used for that analysis is
229
+ replaced by the shortest_path between those nodes on the original graph.
230
+ If the input graph `G` includes edges with weights that do not adhere to
231
+ the triangle inequality, such as when `G` is not a complete graph (i.e
232
+ length of non-existent edges is infinity), then the returned path may
233
+ contain some repeating nodes (other than the starting node).
234
+
235
+ Parameters
236
+ ----------
237
+ G : NetworkX graph
238
+ A possibly weighted graph
239
+
240
+ nodes : collection of nodes (default=G.nodes)
241
+ collection (list, set, etc.) of nodes to visit
242
+
243
+ weight : string, optional (default="weight")
244
+ Edge data key corresponding to the edge weight.
245
+ If any edge does not have this attribute the weight is set to 1.
246
+
247
+ cycle : bool (default: True)
248
+ Indicates whether a cycle should be returned, or a path.
249
+ Note: the cycle is the approximate minimal cycle.
250
+ The path simply removes the biggest edge in that cycle.
251
+
252
+ method : function (default: None)
253
+ A function that returns a cycle on all nodes and approximates
254
+ the solution to the traveling salesman problem on a complete
255
+ graph. The returned cycle is then used to find a corresponding
256
+ solution on `G`. `method` should be callable; take inputs
257
+ `G`, and `weight`; and return a list of nodes along the cycle.
258
+
259
+ Provided options include :func:`christofides`, :func:`greedy_tsp`,
260
+ :func:`simulated_annealing_tsp` and :func:`threshold_accepting_tsp`.
261
+
262
+ If `method is None`: use :func:`christofides` for undirected `G` and
263
+ :func:`asadpour_atsp` for directed `G`.
264
+
265
+ **kwargs : dict
266
+ Other keyword arguments to be passed to the `method` function passed in.
267
+
268
+ Returns
269
+ -------
270
+ list
271
+ List of nodes in `G` along a path with an approximation of the minimal
272
+ path through `nodes`.
273
+
274
+ Raises
275
+ ------
276
+ NetworkXError
277
+ If `G` is a directed graph it has to be strongly connected or the
278
+ complete version cannot be generated.
279
+
280
+ Examples
281
+ --------
282
+ >>> tsp = nx.approximation.traveling_salesman_problem
283
+ >>> G = nx.cycle_graph(9)
284
+ >>> G[4][5]["weight"] = 5 # all other weights are 1
285
+ >>> tsp(G, nodes=[3, 6])
286
+ [3, 2, 1, 0, 8, 7, 6, 7, 8, 0, 1, 2, 3]
287
+ >>> path = tsp(G, cycle=False)
288
+ >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
289
+ True
290
+
291
+ While no longer required, you can still build (curry) your own function
292
+ to provide parameter values to the methods.
293
+
294
+ >>> SA_tsp = nx.approximation.simulated_annealing_tsp
295
+ >>> method = lambda G, weight: SA_tsp(G, "greedy", weight=weight, temp=500)
296
+ >>> path = tsp(G, cycle=False, method=method)
297
+ >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
298
+ True
299
+
300
+ Otherwise, pass other keyword arguments directly into the tsp function.
301
+
302
+ >>> path = tsp(
303
+ ... G,
304
+ ... cycle=False,
305
+ ... method=nx.approximation.simulated_annealing_tsp,
306
+ ... init_cycle="greedy",
307
+ ... temp=500,
308
+ ... )
309
+ >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
310
+ True
311
+ """
312
+ if method is None:
313
+ if G.is_directed():
314
+ method = asadpour_atsp
315
+ else:
316
+ method = christofides
317
+ if nodes is None:
318
+ nodes = list(G.nodes)
319
+
320
+ dist = {}
321
+ path = {}
322
+ for n, (d, p) in nx.all_pairs_dijkstra(G, weight=weight):
323
+ dist[n] = d
324
+ path[n] = p
325
+
326
+ if G.is_directed():
327
+ # If the graph is not strongly connected, raise an exception
328
+ if not nx.is_strongly_connected(G):
329
+ raise nx.NetworkXError("G is not strongly connected")
330
+ GG = nx.DiGraph()
331
+ else:
332
+ GG = nx.Graph()
333
+ for u in nodes:
334
+ for v in nodes:
335
+ if u == v:
336
+ continue
337
+ GG.add_edge(u, v, weight=dist[u][v])
338
+
339
+ best_GG = method(GG, weight=weight, **kwargs)
340
+
341
+ if not cycle:
342
+ # find and remove the biggest edge
343
+ (u, v) = max(pairwise(best_GG), key=lambda x: dist[x[0]][x[1]])
344
+ pos = best_GG.index(u) + 1
345
+ while best_GG[pos] != v:
346
+ pos = best_GG[pos:].index(u) + 1
347
+ best_GG = best_GG[pos:-1] + best_GG[:pos]
348
+
349
+ best_path = []
350
+ for u, v in pairwise(best_GG):
351
+ best_path.extend(path[u][v][:-1])
352
+ best_path.append(v)
353
+ return best_path
354
+
355
+
356
+ @not_implemented_for("undirected")
357
+ @py_random_state(2)
358
+ @nx._dispatchable(edge_attrs="weight", mutates_input=True)
359
+ def asadpour_atsp(G, weight="weight", seed=None, source=None):
360
+ """
361
+ Returns an approximate solution to the traveling salesman problem.
362
+
363
+ This approximate solution is one of the best known approximations for the
364
+ asymmetric traveling salesman problem developed by Asadpour et al,
365
+ [1]_. The algorithm first solves the Held-Karp relaxation to find a lower
366
+ bound for the weight of the cycle. Next, it constructs an exponential
367
+ distribution of undirected spanning trees where the probability of an
368
+ edge being in the tree corresponds to the weight of that edge using a
369
+ maximum entropy rounding scheme. Next we sample that distribution
370
+ $2 \\lceil \\ln n \\rceil$ times and save the minimum sampled tree once the
371
+ direction of the arcs is added back to the edges. Finally, we augment
372
+ then short circuit that graph to find the approximate tour for the
373
+ salesman.
374
+
375
+ Parameters
376
+ ----------
377
+ G : nx.DiGraph
378
+ The graph should be a complete weighted directed graph. The
379
+ distance between all paris of nodes should be included and the triangle
380
+ inequality should hold. That is, the direct edge between any two nodes
381
+ should be the path of least cost.
382
+
383
+ weight : string, optional (default="weight")
384
+ Edge data key corresponding to the edge weight.
385
+ If any edge does not have this attribute the weight is set to 1.
386
+
387
+ seed : integer, random_state, or None (default)
388
+ Indicator of random number generation state.
389
+ See :ref:`Randomness<randomness>`.
390
+
391
+ source : node label (default=`None`)
392
+ If given, return the cycle starting and ending at the given node.
393
+
394
+ Returns
395
+ -------
396
+ cycle : list of nodes
397
+ Returns the cycle (list of nodes) that a salesman can follow to minimize
398
+ the total weight of the trip.
399
+
400
+ Raises
401
+ ------
402
+ NetworkXError
403
+ If `G` is not complete or has less than two nodes, the algorithm raises
404
+ an exception.
405
+
406
+ NetworkXError
407
+ If `source` is not `None` and is not a node in `G`, the algorithm raises
408
+ an exception.
409
+
410
+ NetworkXNotImplemented
411
+ If `G` is an undirected graph.
412
+
413
+ References
414
+ ----------
415
+ .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi,
416
+ An o(log n/log log n)-approximation algorithm for the asymmetric
417
+ traveling salesman problem, Operations research, 65 (2017),
418
+ pp. 1043–1061
419
+
420
+ Examples
421
+ --------
422
+ >>> import networkx as nx
423
+ >>> import networkx.algorithms.approximation as approx
424
+ >>> G = nx.complete_graph(3, create_using=nx.DiGraph)
425
+ >>> nx.set_edge_attributes(
426
+ ... G,
427
+ ... {(0, 1): 2, (1, 2): 2, (2, 0): 2, (0, 2): 1, (2, 1): 1, (1, 0): 1},
428
+ ... "weight",
429
+ ... )
430
+ >>> tour = approx.asadpour_atsp(G, source=0)
431
+ >>> tour
432
+ [0, 2, 1, 0]
433
+ """
434
+ from math import ceil, exp
435
+ from math import log as ln
436
+
437
+ # Check that G is a complete graph
438
+ N = len(G) - 1
439
+ if N < 2:
440
+ raise nx.NetworkXError("G must have at least two nodes")
441
+ # This check ignores selfloops which is what we want here.
442
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
443
+ raise nx.NetworkXError("G is not a complete DiGraph")
444
+ # Check that the source vertex, if given, is in the graph
445
+ if source is not None and source not in G.nodes:
446
+ raise nx.NetworkXError("Given source node not in G.")
447
+
448
+ opt_hk, z_star = held_karp_ascent(G, weight)
449
+
450
+ # Test to see if the ascent method found an integer solution or a fractional
451
+ # solution. If it is integral then z_star is a nx.Graph, otherwise it is
452
+ # a dict
453
+ if not isinstance(z_star, dict):
454
+ # Here we are using the shortcutting method to go from the list of edges
455
+ # returned from eulerian_circuit to a list of nodes
456
+ return _shortcutting(nx.eulerian_circuit(z_star, source=source))
457
+
458
+ # Create the undirected support of z_star
459
+ z_support = nx.MultiGraph()
460
+ for u, v in z_star:
461
+ if (u, v) not in z_support.edges:
462
+ edge_weight = min(G[u][v][weight], G[v][u][weight])
463
+ z_support.add_edge(u, v, **{weight: edge_weight})
464
+
465
+ # Create the exponential distribution of spanning trees
466
+ gamma = spanning_tree_distribution(z_support, z_star)
467
+
468
+ # Write the lambda values to the edges of z_support
469
+ z_support = nx.Graph(z_support)
470
+ lambda_dict = {(u, v): exp(gamma[(u, v)]) for u, v in z_support.edges()}
471
+ nx.set_edge_attributes(z_support, lambda_dict, "weight")
472
+ del gamma, lambda_dict
473
+
474
+ # Sample 2 * ceil( ln(n) ) spanning trees and record the minimum one
475
+ minimum_sampled_tree = None
476
+ minimum_sampled_tree_weight = math.inf
477
+ for _ in range(2 * ceil(ln(G.number_of_nodes()))):
478
+ sampled_tree = random_spanning_tree(z_support, "weight", seed=seed)
479
+ sampled_tree_weight = sampled_tree.size(weight)
480
+ if sampled_tree_weight < minimum_sampled_tree_weight:
481
+ minimum_sampled_tree = sampled_tree.copy()
482
+ minimum_sampled_tree_weight = sampled_tree_weight
483
+
484
+ # Orient the edges in that tree to keep the cost of the tree the same.
485
+ t_star = nx.MultiDiGraph()
486
+ for u, v, d in minimum_sampled_tree.edges(data=weight):
487
+ if d == G[u][v][weight]:
488
+ t_star.add_edge(u, v, **{weight: d})
489
+ else:
490
+ t_star.add_edge(v, u, **{weight: d})
491
+
492
+ # Find the node demands needed to neutralize the flow of t_star in G
493
+ node_demands = {n: t_star.out_degree(n) - t_star.in_degree(n) for n in t_star}
494
+ nx.set_node_attributes(G, node_demands, "demand")
495
+
496
+ # Find the min_cost_flow
497
+ flow_dict = nx.min_cost_flow(G, "demand")
498
+
499
+ # Build the flow into t_star
500
+ for source, values in flow_dict.items():
501
+ for target in values:
502
+ if (source, target) not in t_star.edges and values[target] > 0:
503
+ # IF values[target] > 0 we have to add that many edges
504
+ for _ in range(values[target]):
505
+ t_star.add_edge(source, target)
506
+
507
+ # Return the shortcut eulerian circuit
508
+ circuit = nx.eulerian_circuit(t_star, source=source)
509
+ return _shortcutting(circuit)
510
+
511
+
512
+ @nx._dispatchable(edge_attrs="weight", mutates_input=True, returns_graph=True)
513
+ def held_karp_ascent(G, weight="weight"):
514
+ """
515
+ Minimizes the Held-Karp relaxation of the TSP for `G`
516
+
517
+ Solves the Held-Karp relaxation of the input complete digraph and scales
518
+ the output solution for use in the Asadpour [1]_ ASTP algorithm.
519
+
520
+ The Held-Karp relaxation defines the lower bound for solutions to the
521
+ ATSP, although it does return a fractional solution. This is used in the
522
+ Asadpour algorithm as an initial solution which is later rounded to a
523
+ integral tree within the spanning tree polytopes. This function solves
524
+ the relaxation with the branch and bound method in [2]_.
525
+
526
+ Parameters
527
+ ----------
528
+ G : nx.DiGraph
529
+ The graph should be a complete weighted directed graph.
530
+ The distance between all paris of nodes should be included.
531
+
532
+ weight : string, optional (default="weight")
533
+ Edge data key corresponding to the edge weight.
534
+ If any edge does not have this attribute the weight is set to 1.
535
+
536
+ Returns
537
+ -------
538
+ OPT : float
539
+ The cost for the optimal solution to the Held-Karp relaxation
540
+ z : dict or nx.Graph
541
+ A symmetrized and scaled version of the optimal solution to the
542
+ Held-Karp relaxation for use in the Asadpour algorithm.
543
+
544
+ If an integral solution is found, then that is an optimal solution for
545
+ the ATSP problem and that is returned instead.
546
+
547
+ References
548
+ ----------
549
+ .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi,
550
+ An o(log n/log log n)-approximation algorithm for the asymmetric
551
+ traveling salesman problem, Operations research, 65 (2017),
552
+ pp. 1043–1061
553
+
554
+ .. [2] M. Held, R. M. Karp, The traveling-salesman problem and minimum
555
+ spanning trees, Operations Research, 1970-11-01, Vol. 18 (6),
556
+ pp.1138-1162
557
+ """
558
+ import numpy as np
559
+ from scipy import optimize
560
+
561
+ def k_pi():
562
+ """
563
+ Find the set of minimum 1-Arborescences for G at point pi.
564
+
565
+ Returns
566
+ -------
567
+ Set
568
+ The set of minimum 1-Arborescences
569
+ """
570
+ # Create a copy of G without vertex 1.
571
+ G_1 = G.copy()
572
+ minimum_1_arborescences = set()
573
+ minimum_1_arborescence_weight = math.inf
574
+
575
+ # node is node '1' in the Held and Karp paper
576
+ n = next(G.__iter__())
577
+ G_1.remove_node(n)
578
+
579
+ # Iterate over the spanning arborescences of the graph until we know
580
+ # that we have found the minimum 1-arborescences. My proposed strategy
581
+ # is to find the most extensive root to connect to from 'node 1' and
582
+ # the least expensive one. We then iterate over arborescences until
583
+ # the cost of the basic arborescence is the cost of the minimum one
584
+ # plus the difference between the most and least expensive roots,
585
+ # that way the cost of connecting 'node 1' will by definition not by
586
+ # minimum
587
+ min_root = {"node": None, weight: math.inf}
588
+ max_root = {"node": None, weight: -math.inf}
589
+ for u, v, d in G.edges(n, data=True):
590
+ if d[weight] < min_root[weight]:
591
+ min_root = {"node": v, weight: d[weight]}
592
+ if d[weight] > max_root[weight]:
593
+ max_root = {"node": v, weight: d[weight]}
594
+
595
+ min_in_edge = min(G.in_edges(n, data=True), key=lambda x: x[2][weight])
596
+ min_root[weight] = min_root[weight] + min_in_edge[2][weight]
597
+ max_root[weight] = max_root[weight] + min_in_edge[2][weight]
598
+
599
+ min_arb_weight = math.inf
600
+ for arb in nx.ArborescenceIterator(G_1):
601
+ arb_weight = arb.size(weight)
602
+ if min_arb_weight == math.inf:
603
+ min_arb_weight = arb_weight
604
+ elif arb_weight > min_arb_weight + max_root[weight] - min_root[weight]:
605
+ break
606
+ # We have to pick the root node of the arborescence for the out
607
+ # edge of the first vertex as that is the only node without an
608
+ # edge directed into it.
609
+ for N, deg in arb.in_degree:
610
+ if deg == 0:
611
+ # root found
612
+ arb.add_edge(n, N, **{weight: G[n][N][weight]})
613
+ arb_weight += G[n][N][weight]
614
+ break
615
+
616
+ # We can pick the minimum weight in-edge for the vertex with
617
+ # a cycle. If there are multiple edges with the same, minimum
618
+ # weight, We need to add all of them.
619
+ #
620
+ # Delete the edge (N, v) so that we cannot pick it.
621
+ edge_data = G[N][n]
622
+ G.remove_edge(N, n)
623
+ min_weight = min(G.in_edges(n, data=weight), key=lambda x: x[2])[2]
624
+ min_edges = [
625
+ (u, v, d) for u, v, d in G.in_edges(n, data=weight) if d == min_weight
626
+ ]
627
+ for u, v, d in min_edges:
628
+ new_arb = arb.copy()
629
+ new_arb.add_edge(u, v, **{weight: d})
630
+ new_arb_weight = arb_weight + d
631
+ # Check to see the weight of the arborescence, if it is a
632
+ # new minimum, clear all of the old potential minimum
633
+ # 1-arborescences and add this is the only one. If its
634
+ # weight is above the known minimum, do not add it.
635
+ if new_arb_weight < minimum_1_arborescence_weight:
636
+ minimum_1_arborescences.clear()
637
+ minimum_1_arborescence_weight = new_arb_weight
638
+ # We have a 1-arborescence, add it to the set
639
+ if new_arb_weight == minimum_1_arborescence_weight:
640
+ minimum_1_arborescences.add(new_arb)
641
+ G.add_edge(N, n, **edge_data)
642
+
643
+ return minimum_1_arborescences
644
+
645
+ def direction_of_ascent():
646
+ """
647
+ Find the direction of ascent at point pi.
648
+
649
+ See [1]_ for more information.
650
+
651
+ Returns
652
+ -------
653
+ dict
654
+ A mapping from the nodes of the graph which represents the direction
655
+ of ascent.
656
+
657
+ References
658
+ ----------
659
+ .. [1] M. Held, R. M. Karp, The traveling-salesman problem and minimum
660
+ spanning trees, Operations Research, 1970-11-01, Vol. 18 (6),
661
+ pp.1138-1162
662
+ """
663
+ # 1. Set d equal to the zero n-vector.
664
+ d = {}
665
+ for n in G:
666
+ d[n] = 0
667
+ del n
668
+ # 2. Find a 1-Arborescence T^k such that k is in K(pi, d).
669
+ minimum_1_arborescences = k_pi()
670
+ while True:
671
+ # Reduce K(pi) to K(pi, d)
672
+ # Find the arborescence in K(pi) which increases the lest in
673
+ # direction d
674
+ min_k_d_weight = math.inf
675
+ min_k_d = None
676
+ for arborescence in minimum_1_arborescences:
677
+ weighted_cost = 0
678
+ for n, deg in arborescence.degree:
679
+ weighted_cost += d[n] * (deg - 2)
680
+ if weighted_cost < min_k_d_weight:
681
+ min_k_d_weight = weighted_cost
682
+ min_k_d = arborescence
683
+
684
+ # 3. If sum of d_i * v_{i, k} is greater than zero, terminate
685
+ if min_k_d_weight > 0:
686
+ return d, min_k_d
687
+ # 4. d_i = d_i + v_{i, k}
688
+ for n, deg in min_k_d.degree:
689
+ d[n] += deg - 2
690
+ # Check that we do not need to terminate because the direction
691
+ # of ascent does not exist. This is done with linear
692
+ # programming.
693
+ c = np.full(len(minimum_1_arborescences), -1, dtype=int)
694
+ a_eq = np.empty((len(G) + 1, len(minimum_1_arborescences)), dtype=int)
695
+ b_eq = np.zeros(len(G) + 1, dtype=int)
696
+ b_eq[len(G)] = 1
697
+ for arb_count, arborescence in enumerate(minimum_1_arborescences):
698
+ n_count = len(G) - 1
699
+ for n, deg in arborescence.degree:
700
+ a_eq[n_count][arb_count] = deg - 2
701
+ n_count -= 1
702
+ a_eq[len(G)][arb_count] = 1
703
+ program_result = optimize.linprog(
704
+ c, A_eq=a_eq, b_eq=b_eq, method="highs-ipm"
705
+ )
706
+ # If the constants exist, then the direction of ascent doesn't
707
+ if program_result.success:
708
+ # There is no direction of ascent
709
+ return None, minimum_1_arborescences
710
+
711
+ # 5. GO TO 2
712
+
713
+ def find_epsilon(k, d):
714
+ """
715
+ Given the direction of ascent at pi, find the maximum distance we can go
716
+ in that direction.
717
+
718
+ Parameters
719
+ ----------
720
+ k_xy : set
721
+ The set of 1-arborescences which have the minimum rate of increase
722
+ in the direction of ascent
723
+
724
+ d : dict
725
+ The direction of ascent
726
+
727
+ Returns
728
+ -------
729
+ float
730
+ The distance we can travel in direction `d`
731
+ """
732
+ min_epsilon = math.inf
733
+ for e_u, e_v, e_w in G.edges(data=weight):
734
+ if (e_u, e_v) in k.edges:
735
+ continue
736
+ # Now, I have found a condition which MUST be true for the edges to
737
+ # be a valid substitute. The edge in the graph which is the
738
+ # substitute is the one with the same terminated end. This can be
739
+ # checked rather simply.
740
+ #
741
+ # Find the edge within k which is the substitute. Because k is a
742
+ # 1-arborescence, we know that they is only one such edges
743
+ # leading into every vertex.
744
+ if len(k.in_edges(e_v, data=weight)) > 1:
745
+ raise Exception
746
+ sub_u, sub_v, sub_w = next(k.in_edges(e_v, data=weight).__iter__())
747
+ k.add_edge(e_u, e_v, **{weight: e_w})
748
+ k.remove_edge(sub_u, sub_v)
749
+ if (
750
+ max(d for n, d in k.in_degree()) <= 1
751
+ and len(G) == k.number_of_edges()
752
+ and nx.is_weakly_connected(k)
753
+ ):
754
+ # Ascent method calculation
755
+ if d[sub_u] == d[e_u] or sub_w == e_w:
756
+ # Revert to the original graph
757
+ k.remove_edge(e_u, e_v)
758
+ k.add_edge(sub_u, sub_v, **{weight: sub_w})
759
+ continue
760
+ epsilon = (sub_w - e_w) / (d[e_u] - d[sub_u])
761
+ if 0 < epsilon < min_epsilon:
762
+ min_epsilon = epsilon
763
+ # Revert to the original graph
764
+ k.remove_edge(e_u, e_v)
765
+ k.add_edge(sub_u, sub_v, **{weight: sub_w})
766
+
767
+ return min_epsilon
768
+
769
+ # I have to know that the elements in pi correspond to the correct elements
770
+ # in the direction of ascent, even if the node labels are not integers.
771
+ # Thus, I will use dictionaries to made that mapping.
772
+ pi_dict = {}
773
+ for n in G:
774
+ pi_dict[n] = 0
775
+ del n
776
+ original_edge_weights = {}
777
+ for u, v, d in G.edges(data=True):
778
+ original_edge_weights[(u, v)] = d[weight]
779
+ dir_ascent, k_d = direction_of_ascent()
780
+ while dir_ascent is not None:
781
+ max_distance = find_epsilon(k_d, dir_ascent)
782
+ for n, v in dir_ascent.items():
783
+ pi_dict[n] += max_distance * v
784
+ for u, v, d in G.edges(data=True):
785
+ d[weight] = original_edge_weights[(u, v)] + pi_dict[u]
786
+ dir_ascent, k_d = direction_of_ascent()
787
+ nx._clear_cache(G)
788
+ # k_d is no longer an individual 1-arborescence but rather a set of
789
+ # minimal 1-arborescences at the maximum point of the polytope and should
790
+ # be reflected as such
791
+ k_max = k_d
792
+
793
+ # Search for a cycle within k_max. If a cycle exists, return it as the
794
+ # solution
795
+ for k in k_max:
796
+ if len([n for n in k if k.degree(n) == 2]) == G.order():
797
+ # Tour found
798
+ # TODO: this branch does not restore original_edge_weights of G!
799
+ return k.size(weight), k
800
+
801
+ # Write the original edge weights back to G and every member of k_max at
802
+ # the maximum point. Also average the number of times that edge appears in
803
+ # the set of minimal 1-arborescences.
804
+ x_star = {}
805
+ size_k_max = len(k_max)
806
+ for u, v, d in G.edges(data=True):
807
+ edge_count = 0
808
+ d[weight] = original_edge_weights[(u, v)]
809
+ for k in k_max:
810
+ if (u, v) in k.edges():
811
+ edge_count += 1
812
+ k[u][v][weight] = original_edge_weights[(u, v)]
813
+ x_star[(u, v)] = edge_count / size_k_max
814
+ # Now symmetrize the edges in x_star and scale them according to (5) in
815
+ # reference [1]
816
+ z_star = {}
817
+ scale_factor = (G.order() - 1) / G.order()
818
+ for u, v in x_star:
819
+ frequency = x_star[(u, v)] + x_star[(v, u)]
820
+ if frequency > 0:
821
+ z_star[(u, v)] = scale_factor * frequency
822
+ del x_star
823
+ # Return the optimal weight and the z dict
824
+ return next(k_max.__iter__()).size(weight), z_star
825
+
826
+
827
+ @nx._dispatchable
828
+ def spanning_tree_distribution(G, z):
829
+ """
830
+ Find the asadpour exponential distribution of spanning trees.
831
+
832
+ Solves the Maximum Entropy Convex Program in the Asadpour algorithm [1]_
833
+ using the approach in section 7 to build an exponential distribution of
834
+ undirected spanning trees.
835
+
836
+ This algorithm ensures that the probability of any edge in a spanning
837
+ tree is proportional to the sum of the probabilities of the tress
838
+ containing that edge over the sum of the probabilities of all spanning
839
+ trees of the graph.
840
+
841
+ Parameters
842
+ ----------
843
+ G : nx.MultiGraph
844
+ The undirected support graph for the Held Karp relaxation
845
+
846
+ z : dict
847
+ The output of `held_karp_ascent()`, a scaled version of the Held-Karp
848
+ solution.
849
+
850
+ Returns
851
+ -------
852
+ gamma : dict
853
+ The probability distribution which approximately preserves the marginal
854
+ probabilities of `z`.
855
+ """
856
+ from math import exp
857
+ from math import log as ln
858
+
859
+ def q(e):
860
+ """
861
+ The value of q(e) is described in the Asadpour paper is "the
862
+ probability that edge e will be included in a spanning tree T that is
863
+ chosen with probability proportional to exp(gamma(T))" which
864
+ basically means that it is the total probability of the edge appearing
865
+ across the whole distribution.
866
+
867
+ Parameters
868
+ ----------
869
+ e : tuple
870
+ The `(u, v)` tuple describing the edge we are interested in
871
+
872
+ Returns
873
+ -------
874
+ float
875
+ The probability that a spanning tree chosen according to the
876
+ current values of gamma will include edge `e`.
877
+ """
878
+ # Create the laplacian matrices
879
+ for u, v, d in G.edges(data=True):
880
+ d[lambda_key] = exp(gamma[(u, v)])
881
+ G_Kirchhoff = nx.total_spanning_tree_weight(G, lambda_key)
882
+ G_e = nx.contracted_edge(G, e, self_loops=False)
883
+ G_e_Kirchhoff = nx.total_spanning_tree_weight(G_e, lambda_key)
884
+
885
+ # Multiply by the weight of the contracted edge since it is not included
886
+ # in the total weight of the contracted graph.
887
+ return exp(gamma[(e[0], e[1])]) * G_e_Kirchhoff / G_Kirchhoff
888
+
889
+ # initialize gamma to the zero dict
890
+ gamma = {}
891
+ for u, v, _ in G.edges:
892
+ gamma[(u, v)] = 0
893
+
894
+ # set epsilon
895
+ EPSILON = 0.2
896
+
897
+ # pick an edge attribute name that is unlikely to be in the graph
898
+ lambda_key = "spanning_tree_distribution's secret attribute name for lambda"
899
+
900
+ while True:
901
+ # We need to know that know that no values of q_e are greater than
902
+ # (1 + epsilon) * z_e, however changing one gamma value can increase the
903
+ # value of a different q_e, so we have to complete the for loop without
904
+ # changing anything for the condition to be meet
905
+ in_range_count = 0
906
+ # Search for an edge with q_e > (1 + epsilon) * z_e
907
+ for u, v in gamma:
908
+ e = (u, v)
909
+ q_e = q(e)
910
+ z_e = z[e]
911
+ if q_e > (1 + EPSILON) * z_e:
912
+ delta = ln(
913
+ (q_e * (1 - (1 + EPSILON / 2) * z_e))
914
+ / ((1 - q_e) * (1 + EPSILON / 2) * z_e)
915
+ )
916
+ gamma[e] -= delta
917
+ # Check that delta had the desired effect
918
+ new_q_e = q(e)
919
+ desired_q_e = (1 + EPSILON / 2) * z_e
920
+ if round(new_q_e, 8) != round(desired_q_e, 8):
921
+ raise nx.NetworkXError(
922
+ f"Unable to modify probability for edge ({u}, {v})"
923
+ )
924
+ else:
925
+ in_range_count += 1
926
+ # Check if the for loop terminated without changing any gamma
927
+ if in_range_count == len(gamma):
928
+ break
929
+
930
+ # Remove the new edge attributes
931
+ for _, _, d in G.edges(data=True):
932
+ if lambda_key in d:
933
+ del d[lambda_key]
934
+
935
+ return gamma
936
+
937
+
938
+ @nx._dispatchable(edge_attrs="weight")
939
+ def greedy_tsp(G, weight="weight", source=None):
940
+ """Return a low cost cycle starting at `source` and its cost.
941
+
942
+ This approximates a solution to the traveling salesman problem.
943
+ It finds a cycle of all the nodes that a salesman can visit in order
944
+ to visit many nodes while minimizing total distance.
945
+ It uses a simple greedy algorithm.
946
+ In essence, this function returns a large cycle given a source point
947
+ for which the total cost of the cycle is minimized.
948
+
949
+ Parameters
950
+ ----------
951
+ G : Graph
952
+ The Graph should be a complete weighted undirected graph.
953
+ The distance between all pairs of nodes should be included.
954
+
955
+ weight : string, optional (default="weight")
956
+ Edge data key corresponding to the edge weight.
957
+ If any edge does not have this attribute the weight is set to 1.
958
+
959
+ source : node, optional (default: first node in list(G))
960
+ Starting node. If None, defaults to ``next(iter(G))``
961
+
962
+ Returns
963
+ -------
964
+ cycle : list of nodes
965
+ Returns the cycle (list of nodes) that a salesman
966
+ can follow to minimize total weight of the trip.
967
+
968
+ Raises
969
+ ------
970
+ NetworkXError
971
+ If `G` is not complete, the algorithm raises an exception.
972
+
973
+ Examples
974
+ --------
975
+ >>> from networkx.algorithms import approximation as approx
976
+ >>> G = nx.DiGraph()
977
+ >>> G.add_weighted_edges_from(
978
+ ... {
979
+ ... ("A", "B", 3),
980
+ ... ("A", "C", 17),
981
+ ... ("A", "D", 14),
982
+ ... ("B", "A", 3),
983
+ ... ("B", "C", 12),
984
+ ... ("B", "D", 16),
985
+ ... ("C", "A", 13),
986
+ ... ("C", "B", 12),
987
+ ... ("C", "D", 4),
988
+ ... ("D", "A", 14),
989
+ ... ("D", "B", 15),
990
+ ... ("D", "C", 2),
991
+ ... }
992
+ ... )
993
+ >>> cycle = approx.greedy_tsp(G, source="D")
994
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
995
+ >>> cycle
996
+ ['D', 'C', 'B', 'A', 'D']
997
+ >>> cost
998
+ 31
999
+
1000
+ Notes
1001
+ -----
1002
+ This implementation of a greedy algorithm is based on the following:
1003
+
1004
+ - The algorithm adds a node to the solution at every iteration.
1005
+ - The algorithm selects a node not already in the cycle whose connection
1006
+ to the previous node adds the least cost to the cycle.
1007
+
1008
+ A greedy algorithm does not always give the best solution.
1009
+ However, it can construct a first feasible solution which can
1010
+ be passed as a parameter to an iterative improvement algorithm such
1011
+ as Simulated Annealing, or Threshold Accepting.
1012
+
1013
+ Time complexity: It has a running time $O(|V|^2)$
1014
+ """
1015
+ # Check that G is a complete graph
1016
+ N = len(G) - 1
1017
+ # This check ignores selfloops which is what we want here.
1018
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
1019
+ raise nx.NetworkXError("G must be a complete graph.")
1020
+
1021
+ if source is None:
1022
+ source = nx.utils.arbitrary_element(G)
1023
+
1024
+ if G.number_of_nodes() == 2:
1025
+ neighbor = next(G.neighbors(source))
1026
+ return [source, neighbor, source]
1027
+
1028
+ nodeset = set(G)
1029
+ nodeset.remove(source)
1030
+ cycle = [source]
1031
+ next_node = source
1032
+ while nodeset:
1033
+ nbrdict = G[next_node]
1034
+ next_node = min(nodeset, key=lambda n: nbrdict[n].get(weight, 1))
1035
+ cycle.append(next_node)
1036
+ nodeset.remove(next_node)
1037
+ cycle.append(cycle[0])
1038
+ return cycle
1039
+
1040
+
1041
+ @py_random_state(9)
1042
+ @nx._dispatchable(edge_attrs="weight")
1043
+ def simulated_annealing_tsp(
1044
+ G,
1045
+ init_cycle,
1046
+ weight="weight",
1047
+ source=None,
1048
+ temp=100,
1049
+ move="1-1",
1050
+ max_iterations=10,
1051
+ N_inner=100,
1052
+ alpha=0.01,
1053
+ seed=None,
1054
+ ):
1055
+ """Returns an approximate solution to the traveling salesman problem.
1056
+
1057
+ This function uses simulated annealing to approximate the minimal cost
1058
+ cycle through the nodes. Starting from a suboptimal solution, simulated
1059
+ annealing perturbs that solution, occasionally accepting changes that make
1060
+ the solution worse to escape from a locally optimal solution. The chance
1061
+ of accepting such changes decreases over the iterations to encourage
1062
+ an optimal result. In summary, the function returns a cycle starting
1063
+ at `source` for which the total cost is minimized. It also returns the cost.
1064
+
1065
+ The chance of accepting a proposed change is related to a parameter called
1066
+ the temperature (annealing has a physical analogue of steel hardening
1067
+ as it cools). As the temperature is reduced, the chance of moves that
1068
+ increase cost goes down.
1069
+
1070
+ Parameters
1071
+ ----------
1072
+ G : Graph
1073
+ `G` should be a complete weighted graph.
1074
+ The distance between all pairs of nodes should be included.
1075
+
1076
+ init_cycle : list of all nodes or "greedy"
1077
+ The initial solution (a cycle through all nodes returning to the start).
1078
+ This argument has no default to make you think about it.
1079
+ If "greedy", use `greedy_tsp(G, weight)`.
1080
+ Other common starting cycles are `list(G) + [next(iter(G))]` or the final
1081
+ result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`.
1082
+
1083
+ weight : string, optional (default="weight")
1084
+ Edge data key corresponding to the edge weight.
1085
+ If any edge does not have this attribute the weight is set to 1.
1086
+
1087
+ source : node, optional (default: first node in list(G))
1088
+ Starting node. If None, defaults to ``next(iter(G))``
1089
+
1090
+ temp : int, optional (default=100)
1091
+ The algorithm's temperature parameter. It represents the initial
1092
+ value of temperature
1093
+
1094
+ move : "1-1" or "1-0" or function, optional (default="1-1")
1095
+ Indicator of what move to use when finding new trial solutions.
1096
+ Strings indicate two special built-in moves:
1097
+
1098
+ - "1-1": 1-1 exchange which transposes the position
1099
+ of two elements of the current solution.
1100
+ The function called is :func:`swap_two_nodes`.
1101
+ For example if we apply 1-1 exchange in the solution
1102
+ ``A = [3, 2, 1, 4, 3]``
1103
+ we can get the following by the transposition of 1 and 4 elements:
1104
+ ``A' = [3, 2, 4, 1, 3]``
1105
+ - "1-0": 1-0 exchange which moves an node in the solution
1106
+ to a new position.
1107
+ The function called is :func:`move_one_node`.
1108
+ For example if we apply 1-0 exchange in the solution
1109
+ ``A = [3, 2, 1, 4, 3]``
1110
+ we can transfer the fourth element to the second position:
1111
+ ``A' = [3, 4, 2, 1, 3]``
1112
+
1113
+ You may provide your own functions to enact a move from
1114
+ one solution to a neighbor solution. The function must take
1115
+ the solution as input along with a `seed` input to control
1116
+ random number generation (see the `seed` input here).
1117
+ Your function should maintain the solution as a cycle with
1118
+ equal first and last node and all others appearing once.
1119
+ Your function should return the new solution.
1120
+
1121
+ max_iterations : int, optional (default=10)
1122
+ Declared done when this number of consecutive iterations of
1123
+ the outer loop occurs without any change in the best cost solution.
1124
+
1125
+ N_inner : int, optional (default=100)
1126
+ The number of iterations of the inner loop.
1127
+
1128
+ alpha : float between (0, 1), optional (default=0.01)
1129
+ Percentage of temperature decrease in each iteration
1130
+ of outer loop
1131
+
1132
+ seed : integer, random_state, or None (default)
1133
+ Indicator of random number generation state.
1134
+ See :ref:`Randomness<randomness>`.
1135
+
1136
+ Returns
1137
+ -------
1138
+ cycle : list of nodes
1139
+ Returns the cycle (list of nodes) that a salesman
1140
+ can follow to minimize total weight of the trip.
1141
+
1142
+ Raises
1143
+ ------
1144
+ NetworkXError
1145
+ If `G` is not complete the algorithm raises an exception.
1146
+
1147
+ Examples
1148
+ --------
1149
+ >>> from networkx.algorithms import approximation as approx
1150
+ >>> G = nx.DiGraph()
1151
+ >>> G.add_weighted_edges_from(
1152
+ ... {
1153
+ ... ("A", "B", 3),
1154
+ ... ("A", "C", 17),
1155
+ ... ("A", "D", 14),
1156
+ ... ("B", "A", 3),
1157
+ ... ("B", "C", 12),
1158
+ ... ("B", "D", 16),
1159
+ ... ("C", "A", 13),
1160
+ ... ("C", "B", 12),
1161
+ ... ("C", "D", 4),
1162
+ ... ("D", "A", 14),
1163
+ ... ("D", "B", 15),
1164
+ ... ("D", "C", 2),
1165
+ ... }
1166
+ ... )
1167
+ >>> cycle = approx.simulated_annealing_tsp(G, "greedy", source="D")
1168
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1169
+ >>> cycle
1170
+ ['D', 'C', 'B', 'A', 'D']
1171
+ >>> cost
1172
+ 31
1173
+ >>> incycle = ["D", "B", "A", "C", "D"]
1174
+ >>> cycle = approx.simulated_annealing_tsp(G, incycle, source="D")
1175
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1176
+ >>> cycle
1177
+ ['D', 'C', 'B', 'A', 'D']
1178
+ >>> cost
1179
+ 31
1180
+
1181
+ Notes
1182
+ -----
1183
+ Simulated Annealing is a metaheuristic local search algorithm.
1184
+ The main characteristic of this algorithm is that it accepts
1185
+ even solutions which lead to the increase of the cost in order
1186
+ to escape from low quality local optimal solutions.
1187
+
1188
+ This algorithm needs an initial solution. If not provided, it is
1189
+ constructed by a simple greedy algorithm. At every iteration, the
1190
+ algorithm selects thoughtfully a neighbor solution.
1191
+ Consider $c(x)$ cost of current solution and $c(x')$ cost of a
1192
+ neighbor solution.
1193
+ If $c(x') - c(x) <= 0$ then the neighbor solution becomes the current
1194
+ solution for the next iteration. Otherwise, the algorithm accepts
1195
+ the neighbor solution with probability $p = exp - ([c(x') - c(x)] / temp)$.
1196
+ Otherwise the current solution is retained.
1197
+
1198
+ `temp` is a parameter of the algorithm and represents temperature.
1199
+
1200
+ Time complexity:
1201
+ For $N_i$ iterations of the inner loop and $N_o$ iterations of the
1202
+ outer loop, this algorithm has running time $O(N_i * N_o * |V|)$.
1203
+
1204
+ For more information and how the algorithm is inspired see:
1205
+ http://en.wikipedia.org/wiki/Simulated_annealing
1206
+ """
1207
+ if move == "1-1":
1208
+ move = swap_two_nodes
1209
+ elif move == "1-0":
1210
+ move = move_one_node
1211
+ if init_cycle == "greedy":
1212
+ # Construct an initial solution using a greedy algorithm.
1213
+ cycle = greedy_tsp(G, weight=weight, source=source)
1214
+ if G.number_of_nodes() == 2:
1215
+ return cycle
1216
+
1217
+ else:
1218
+ cycle = list(init_cycle)
1219
+ if source is None:
1220
+ source = cycle[0]
1221
+ elif source != cycle[0]:
1222
+ raise nx.NetworkXError("source must be first node in init_cycle")
1223
+ if cycle[0] != cycle[-1]:
1224
+ raise nx.NetworkXError("init_cycle must be a cycle. (return to start)")
1225
+
1226
+ if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G):
1227
+ raise nx.NetworkXError("init_cycle should be a cycle over all nodes in G.")
1228
+
1229
+ # Check that G is a complete graph
1230
+ N = len(G) - 1
1231
+ # This check ignores selfloops which is what we want here.
1232
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
1233
+ raise nx.NetworkXError("G must be a complete graph.")
1234
+
1235
+ if G.number_of_nodes() == 2:
1236
+ neighbor = next(G.neighbors(source))
1237
+ return [source, neighbor, source]
1238
+
1239
+ # Find the cost of initial solution
1240
+ cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle))
1241
+
1242
+ count = 0
1243
+ best_cycle = cycle.copy()
1244
+ best_cost = cost
1245
+ while count <= max_iterations and temp > 0:
1246
+ count += 1
1247
+ for i in range(N_inner):
1248
+ adj_sol = move(cycle, seed)
1249
+ adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol))
1250
+ delta = adj_cost - cost
1251
+ if delta <= 0:
1252
+ # Set current solution the adjacent solution.
1253
+ cycle = adj_sol
1254
+ cost = adj_cost
1255
+
1256
+ if cost < best_cost:
1257
+ count = 0
1258
+ best_cycle = cycle.copy()
1259
+ best_cost = cost
1260
+ else:
1261
+ # Accept even a worse solution with probability p.
1262
+ p = math.exp(-delta / temp)
1263
+ if p >= seed.random():
1264
+ cycle = adj_sol
1265
+ cost = adj_cost
1266
+ temp -= temp * alpha
1267
+
1268
+ return best_cycle
1269
+
1270
+
1271
+ @py_random_state(9)
1272
+ @nx._dispatchable(edge_attrs="weight")
1273
+ def threshold_accepting_tsp(
1274
+ G,
1275
+ init_cycle,
1276
+ weight="weight",
1277
+ source=None,
1278
+ threshold=1,
1279
+ move="1-1",
1280
+ max_iterations=10,
1281
+ N_inner=100,
1282
+ alpha=0.1,
1283
+ seed=None,
1284
+ ):
1285
+ """Returns an approximate solution to the traveling salesman problem.
1286
+
1287
+ This function uses threshold accepting methods to approximate the minimal cost
1288
+ cycle through the nodes. Starting from a suboptimal solution, threshold
1289
+ accepting methods perturb that solution, accepting any changes that make
1290
+ the solution no worse than increasing by a threshold amount. Improvements
1291
+ in cost are accepted, but so are changes leading to small increases in cost.
1292
+ This allows the solution to leave suboptimal local minima in solution space.
1293
+ The threshold is decreased slowly as iterations proceed helping to ensure
1294
+ an optimum. In summary, the function returns a cycle starting at `source`
1295
+ for which the total cost is minimized.
1296
+
1297
+ Parameters
1298
+ ----------
1299
+ G : Graph
1300
+ `G` should be a complete weighted graph.
1301
+ The distance between all pairs of nodes should be included.
1302
+
1303
+ init_cycle : list or "greedy"
1304
+ The initial solution (a cycle through all nodes returning to the start).
1305
+ This argument has no default to make you think about it.
1306
+ If "greedy", use `greedy_tsp(G, weight)`.
1307
+ Other common starting cycles are `list(G) + [next(iter(G))]` or the final
1308
+ result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`.
1309
+
1310
+ weight : string, optional (default="weight")
1311
+ Edge data key corresponding to the edge weight.
1312
+ If any edge does not have this attribute the weight is set to 1.
1313
+
1314
+ source : node, optional (default: first node in list(G))
1315
+ Starting node. If None, defaults to ``next(iter(G))``
1316
+
1317
+ threshold : int, optional (default=1)
1318
+ The algorithm's threshold parameter. It represents the initial
1319
+ threshold's value
1320
+
1321
+ move : "1-1" or "1-0" or function, optional (default="1-1")
1322
+ Indicator of what move to use when finding new trial solutions.
1323
+ Strings indicate two special built-in moves:
1324
+
1325
+ - "1-1": 1-1 exchange which transposes the position
1326
+ of two elements of the current solution.
1327
+ The function called is :func:`swap_two_nodes`.
1328
+ For example if we apply 1-1 exchange in the solution
1329
+ ``A = [3, 2, 1, 4, 3]``
1330
+ we can get the following by the transposition of 1 and 4 elements:
1331
+ ``A' = [3, 2, 4, 1, 3]``
1332
+ - "1-0": 1-0 exchange which moves an node in the solution
1333
+ to a new position.
1334
+ The function called is :func:`move_one_node`.
1335
+ For example if we apply 1-0 exchange in the solution
1336
+ ``A = [3, 2, 1, 4, 3]``
1337
+ we can transfer the fourth element to the second position:
1338
+ ``A' = [3, 4, 2, 1, 3]``
1339
+
1340
+ You may provide your own functions to enact a move from
1341
+ one solution to a neighbor solution. The function must take
1342
+ the solution as input along with a `seed` input to control
1343
+ random number generation (see the `seed` input here).
1344
+ Your function should maintain the solution as a cycle with
1345
+ equal first and last node and all others appearing once.
1346
+ Your function should return the new solution.
1347
+
1348
+ max_iterations : int, optional (default=10)
1349
+ Declared done when this number of consecutive iterations of
1350
+ the outer loop occurs without any change in the best cost solution.
1351
+
1352
+ N_inner : int, optional (default=100)
1353
+ The number of iterations of the inner loop.
1354
+
1355
+ alpha : float between (0, 1), optional (default=0.1)
1356
+ Percentage of threshold decrease when there is at
1357
+ least one acceptance of a neighbor solution.
1358
+ If no inner loop moves are accepted the threshold remains unchanged.
1359
+
1360
+ seed : integer, random_state, or None (default)
1361
+ Indicator of random number generation state.
1362
+ See :ref:`Randomness<randomness>`.
1363
+
1364
+ Returns
1365
+ -------
1366
+ cycle : list of nodes
1367
+ Returns the cycle (list of nodes) that a salesman
1368
+ can follow to minimize total weight of the trip.
1369
+
1370
+ Raises
1371
+ ------
1372
+ NetworkXError
1373
+ If `G` is not complete the algorithm raises an exception.
1374
+
1375
+ Examples
1376
+ --------
1377
+ >>> from networkx.algorithms import approximation as approx
1378
+ >>> G = nx.DiGraph()
1379
+ >>> G.add_weighted_edges_from(
1380
+ ... {
1381
+ ... ("A", "B", 3),
1382
+ ... ("A", "C", 17),
1383
+ ... ("A", "D", 14),
1384
+ ... ("B", "A", 3),
1385
+ ... ("B", "C", 12),
1386
+ ... ("B", "D", 16),
1387
+ ... ("C", "A", 13),
1388
+ ... ("C", "B", 12),
1389
+ ... ("C", "D", 4),
1390
+ ... ("D", "A", 14),
1391
+ ... ("D", "B", 15),
1392
+ ... ("D", "C", 2),
1393
+ ... }
1394
+ ... )
1395
+ >>> cycle = approx.threshold_accepting_tsp(G, "greedy", source="D")
1396
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1397
+ >>> cycle
1398
+ ['D', 'C', 'B', 'A', 'D']
1399
+ >>> cost
1400
+ 31
1401
+ >>> incycle = ["D", "B", "A", "C", "D"]
1402
+ >>> cycle = approx.threshold_accepting_tsp(G, incycle, source="D")
1403
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1404
+ >>> cycle
1405
+ ['D', 'C', 'B', 'A', 'D']
1406
+ >>> cost
1407
+ 31
1408
+
1409
+ Notes
1410
+ -----
1411
+ Threshold Accepting is a metaheuristic local search algorithm.
1412
+ The main characteristic of this algorithm is that it accepts
1413
+ even solutions which lead to the increase of the cost in order
1414
+ to escape from low quality local optimal solutions.
1415
+
1416
+ This algorithm needs an initial solution. This solution can be
1417
+ constructed by a simple greedy algorithm. At every iteration, it
1418
+ selects thoughtfully a neighbor solution.
1419
+ Consider $c(x)$ cost of current solution and $c(x')$ cost of
1420
+ neighbor solution.
1421
+ If $c(x') - c(x) <= threshold$ then the neighbor solution becomes the current
1422
+ solution for the next iteration, where the threshold is named threshold.
1423
+
1424
+ In comparison to the Simulated Annealing algorithm, the Threshold
1425
+ Accepting algorithm does not accept very low quality solutions
1426
+ (due to the presence of the threshold value). In the case of
1427
+ Simulated Annealing, even a very low quality solution can
1428
+ be accepted with probability $p$.
1429
+
1430
+ Time complexity:
1431
+ It has a running time $O(m * n * |V|)$ where $m$ and $n$ are the number
1432
+ of times the outer and inner loop run respectively.
1433
+
1434
+ For more information and how algorithm is inspired see:
1435
+ https://doi.org/10.1016/0021-9991(90)90201-B
1436
+
1437
+ See Also
1438
+ --------
1439
+ simulated_annealing_tsp
1440
+
1441
+ """
1442
+ if move == "1-1":
1443
+ move = swap_two_nodes
1444
+ elif move == "1-0":
1445
+ move = move_one_node
1446
+ if init_cycle == "greedy":
1447
+ # Construct an initial solution using a greedy algorithm.
1448
+ cycle = greedy_tsp(G, weight=weight, source=source)
1449
+ if G.number_of_nodes() == 2:
1450
+ return cycle
1451
+
1452
+ else:
1453
+ cycle = list(init_cycle)
1454
+ if source is None:
1455
+ source = cycle[0]
1456
+ elif source != cycle[0]:
1457
+ raise nx.NetworkXError("source must be first node in init_cycle")
1458
+ if cycle[0] != cycle[-1]:
1459
+ raise nx.NetworkXError("init_cycle must be a cycle. (return to start)")
1460
+
1461
+ if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G):
1462
+ raise nx.NetworkXError("init_cycle is not all and only nodes.")
1463
+
1464
+ # Check that G is a complete graph
1465
+ N = len(G) - 1
1466
+ # This check ignores selfloops which is what we want here.
1467
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
1468
+ raise nx.NetworkXError("G must be a complete graph.")
1469
+
1470
+ if G.number_of_nodes() == 2:
1471
+ neighbor = list(G.neighbors(source))[0]
1472
+ return [source, neighbor, source]
1473
+
1474
+ # Find the cost of initial solution
1475
+ cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle))
1476
+
1477
+ count = 0
1478
+ best_cycle = cycle.copy()
1479
+ best_cost = cost
1480
+ while count <= max_iterations:
1481
+ count += 1
1482
+ accepted = False
1483
+ for i in range(N_inner):
1484
+ adj_sol = move(cycle, seed)
1485
+ adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol))
1486
+ delta = adj_cost - cost
1487
+ if delta <= threshold:
1488
+ accepted = True
1489
+
1490
+ # Set current solution the adjacent solution.
1491
+ cycle = adj_sol
1492
+ cost = adj_cost
1493
+
1494
+ if cost < best_cost:
1495
+ count = 0
1496
+ best_cycle = cycle.copy()
1497
+ best_cost = cost
1498
+ if accepted:
1499
+ threshold -= threshold * alpha
1500
+
1501
+ return best_cycle
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/covering.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions related to graph covers."""
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms.bipartite.matching import hopcroft_karp_matching
5
+ from networkx.algorithms.covering import min_edge_cover as _min_edge_cover
6
+ from networkx.utils import not_implemented_for
7
+
8
+ __all__ = ["min_edge_cover"]
9
+
10
+
11
+ @not_implemented_for("directed")
12
+ @not_implemented_for("multigraph")
13
+ @nx._dispatchable(name="bipartite_min_edge_cover")
14
+ def min_edge_cover(G, matching_algorithm=None):
15
+ """Returns a set of edges which constitutes
16
+ the minimum edge cover of the graph.
17
+
18
+ The smallest edge cover can be found in polynomial time by finding
19
+ a maximum matching and extending it greedily so that all nodes
20
+ are covered.
21
+
22
+ Parameters
23
+ ----------
24
+ G : NetworkX graph
25
+ An undirected bipartite graph.
26
+
27
+ matching_algorithm : function
28
+ A function that returns a maximum cardinality matching in a
29
+ given bipartite graph. The function must take one input, the
30
+ graph ``G``, and return a dictionary mapping each node to its
31
+ mate. If not specified,
32
+ :func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching`
33
+ will be used. Other possibilities include
34
+ :func:`~networkx.algorithms.bipartite.matching.eppstein_matching`,
35
+
36
+ Returns
37
+ -------
38
+ set
39
+ A set of the edges in a minimum edge cover of the graph, given as
40
+ pairs of nodes. It contains both the edges `(u, v)` and `(v, u)`
41
+ for given nodes `u` and `v` among the edges of minimum edge cover.
42
+
43
+ Notes
44
+ -----
45
+ An edge cover of a graph is a set of edges such that every node of
46
+ the graph is incident to at least one edge of the set.
47
+ A minimum edge cover is an edge covering of smallest cardinality.
48
+
49
+ Due to its implementation, the worst-case running time of this algorithm
50
+ is bounded by the worst-case running time of the function
51
+ ``matching_algorithm``.
52
+ """
53
+ if G.order() == 0: # Special case for the empty graph
54
+ return set()
55
+ if matching_algorithm is None:
56
+ matching_algorithm = hopcroft_karp_matching
57
+ return _min_edge_cover(G, matching_algorithm=matching_algorithm)
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/matching.py ADDED
@@ -0,0 +1,590 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This module uses material from the Wikipedia article Hopcroft--Karp algorithm
2
+ # <https://en.wikipedia.org/wiki/Hopcroft%E2%80%93Karp_algorithm>, accessed on
3
+ # January 3, 2015, which is released under the Creative Commons
4
+ # Attribution-Share-Alike License 3.0
5
+ # <http://creativecommons.org/licenses/by-sa/3.0/>. That article includes
6
+ # pseudocode, which has been translated into the corresponding Python code.
7
+ #
8
+ # Portions of this module use code from David Eppstein's Python Algorithms and
9
+ # Data Structures (PADS) library, which is dedicated to the public domain (for
10
+ # proof, see <http://www.ics.uci.edu/~eppstein/PADS/ABOUT-PADS.txt>).
11
+ """Provides functions for computing maximum cardinality matchings and minimum
12
+ weight full matchings in a bipartite graph.
13
+
14
+ If you don't care about the particular implementation of the maximum matching
15
+ algorithm, simply use the :func:`maximum_matching`. If you do care, you can
16
+ import one of the named maximum matching algorithms directly.
17
+
18
+ For example, to find a maximum matching in the complete bipartite graph with
19
+ two vertices on the left and three vertices on the right:
20
+
21
+ >>> G = nx.complete_bipartite_graph(2, 3)
22
+ >>> left, right = nx.bipartite.sets(G)
23
+ >>> list(left)
24
+ [0, 1]
25
+ >>> list(right)
26
+ [2, 3, 4]
27
+ >>> nx.bipartite.maximum_matching(G)
28
+ {0: 2, 1: 3, 2: 0, 3: 1}
29
+
30
+ The dictionary returned by :func:`maximum_matching` includes a mapping for
31
+ vertices in both the left and right vertex sets.
32
+
33
+ Similarly, :func:`minimum_weight_full_matching` produces, for a complete
34
+ weighted bipartite graph, a matching whose cardinality is the cardinality of
35
+ the smaller of the two partitions, and for which the sum of the weights of the
36
+ edges included in the matching is minimal.
37
+
38
+ """
39
+
40
+ import collections
41
+ import itertools
42
+
43
+ import networkx as nx
44
+ from networkx.algorithms.bipartite import sets as bipartite_sets
45
+ from networkx.algorithms.bipartite.matrix import biadjacency_matrix
46
+
47
+ __all__ = [
48
+ "maximum_matching",
49
+ "hopcroft_karp_matching",
50
+ "eppstein_matching",
51
+ "to_vertex_cover",
52
+ "minimum_weight_full_matching",
53
+ ]
54
+
55
+ INFINITY = float("inf")
56
+
57
+
58
+ @nx._dispatchable
59
+ def hopcroft_karp_matching(G, top_nodes=None):
60
+ """Returns the maximum cardinality matching of the bipartite graph `G`.
61
+
62
+ A matching is a set of edges that do not share any nodes. A maximum
63
+ cardinality matching is a matching with the most edges possible. It
64
+ is not always unique. Finding a matching in a bipartite graph can be
65
+ treated as a networkx flow problem.
66
+
67
+ The functions ``hopcroft_karp_matching`` and ``maximum_matching``
68
+ are aliases of the same function.
69
+
70
+ Parameters
71
+ ----------
72
+ G : NetworkX graph
73
+
74
+ Undirected bipartite graph
75
+
76
+ top_nodes : container of nodes
77
+
78
+ Container with all nodes in one bipartite node set. If not supplied
79
+ it will be computed. But if more than one solution exists an exception
80
+ will be raised.
81
+
82
+ Returns
83
+ -------
84
+ matches : dictionary
85
+
86
+ The matching is returned as a dictionary, `matches`, such that
87
+ ``matches[v] == w`` if node `v` is matched to node `w`. Unmatched
88
+ nodes do not occur as a key in `matches`.
89
+
90
+ Raises
91
+ ------
92
+ AmbiguousSolution
93
+ Raised if the input bipartite graph is disconnected and no container
94
+ with all nodes in one bipartite set is provided. When determining
95
+ the nodes in each bipartite set more than one valid solution is
96
+ possible if the input graph is disconnected.
97
+
98
+ Notes
99
+ -----
100
+ This function is implemented with the `Hopcroft--Karp matching algorithm
101
+ <https://en.wikipedia.org/wiki/Hopcroft%E2%80%93Karp_algorithm>`_ for
102
+ bipartite graphs.
103
+
104
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
105
+ for further details on how bipartite graphs are handled in NetworkX.
106
+
107
+ See Also
108
+ --------
109
+ maximum_matching
110
+ hopcroft_karp_matching
111
+ eppstein_matching
112
+
113
+ References
114
+ ----------
115
+ .. [1] John E. Hopcroft and Richard M. Karp. "An n^{5 / 2} Algorithm for
116
+ Maximum Matchings in Bipartite Graphs" In: **SIAM Journal of Computing**
117
+ 2.4 (1973), pp. 225--231. <https://doi.org/10.1137/0202019>.
118
+
119
+ """
120
+
121
+ # First we define some auxiliary search functions.
122
+ #
123
+ # If you are a human reading these auxiliary search functions, the "global"
124
+ # variables `leftmatches`, `rightmatches`, `distances`, etc. are defined
125
+ # below the functions, so that they are initialized close to the initial
126
+ # invocation of the search functions.
127
+ def breadth_first_search():
128
+ for v in left:
129
+ if leftmatches[v] is None:
130
+ distances[v] = 0
131
+ queue.append(v)
132
+ else:
133
+ distances[v] = INFINITY
134
+ distances[None] = INFINITY
135
+ while queue:
136
+ v = queue.popleft()
137
+ if distances[v] < distances[None]:
138
+ for u in G[v]:
139
+ if distances[rightmatches[u]] is INFINITY:
140
+ distances[rightmatches[u]] = distances[v] + 1
141
+ queue.append(rightmatches[u])
142
+ return distances[None] is not INFINITY
143
+
144
+ def depth_first_search(v):
145
+ if v is not None:
146
+ for u in G[v]:
147
+ if distances[rightmatches[u]] == distances[v] + 1:
148
+ if depth_first_search(rightmatches[u]):
149
+ rightmatches[u] = v
150
+ leftmatches[v] = u
151
+ return True
152
+ distances[v] = INFINITY
153
+ return False
154
+ return True
155
+
156
+ # Initialize the "global" variables that maintain state during the search.
157
+ left, right = bipartite_sets(G, top_nodes)
158
+ leftmatches = {v: None for v in left}
159
+ rightmatches = {v: None for v in right}
160
+ distances = {}
161
+ queue = collections.deque()
162
+
163
+ # Implementation note: this counter is incremented as pairs are matched but
164
+ # it is currently not used elsewhere in the computation.
165
+ num_matched_pairs = 0
166
+ while breadth_first_search():
167
+ for v in left:
168
+ if leftmatches[v] is None:
169
+ if depth_first_search(v):
170
+ num_matched_pairs += 1
171
+
172
+ # Strip the entries matched to `None`.
173
+ leftmatches = {k: v for k, v in leftmatches.items() if v is not None}
174
+ rightmatches = {k: v for k, v in rightmatches.items() if v is not None}
175
+
176
+ # At this point, the left matches and the right matches are inverses of one
177
+ # another. In other words,
178
+ #
179
+ # leftmatches == {v, k for k, v in rightmatches.items()}
180
+ #
181
+ # Finally, we combine both the left matches and right matches.
182
+ return dict(itertools.chain(leftmatches.items(), rightmatches.items()))
183
+
184
+
185
+ @nx._dispatchable
186
+ def eppstein_matching(G, top_nodes=None):
187
+ """Returns the maximum cardinality matching of the bipartite graph `G`.
188
+
189
+ Parameters
190
+ ----------
191
+ G : NetworkX graph
192
+
193
+ Undirected bipartite graph
194
+
195
+ top_nodes : container
196
+
197
+ Container with all nodes in one bipartite node set. If not supplied
198
+ it will be computed. But if more than one solution exists an exception
199
+ will be raised.
200
+
201
+ Returns
202
+ -------
203
+ matches : dictionary
204
+
205
+ The matching is returned as a dictionary, `matching`, such that
206
+ ``matching[v] == w`` if node `v` is matched to node `w`. Unmatched
207
+ nodes do not occur as a key in `matching`.
208
+
209
+ Raises
210
+ ------
211
+ AmbiguousSolution
212
+ Raised if the input bipartite graph is disconnected and no container
213
+ with all nodes in one bipartite set is provided. When determining
214
+ the nodes in each bipartite set more than one valid solution is
215
+ possible if the input graph is disconnected.
216
+
217
+ Notes
218
+ -----
219
+ This function is implemented with David Eppstein's version of the algorithm
220
+ Hopcroft--Karp algorithm (see :func:`hopcroft_karp_matching`), which
221
+ originally appeared in the `Python Algorithms and Data Structures library
222
+ (PADS) <http://www.ics.uci.edu/~eppstein/PADS/ABOUT-PADS.txt>`_.
223
+
224
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
225
+ for further details on how bipartite graphs are handled in NetworkX.
226
+
227
+ See Also
228
+ --------
229
+
230
+ hopcroft_karp_matching
231
+
232
+ """
233
+ # Due to its original implementation, a directed graph is needed
234
+ # so that the two sets of bipartite nodes can be distinguished
235
+ left, right = bipartite_sets(G, top_nodes)
236
+ G = nx.DiGraph(G.edges(left))
237
+ # initialize greedy matching (redundant, but faster than full search)
238
+ matching = {}
239
+ for u in G:
240
+ for v in G[u]:
241
+ if v not in matching:
242
+ matching[v] = u
243
+ break
244
+ while True:
245
+ # structure residual graph into layers
246
+ # pred[u] gives the neighbor in the previous layer for u in U
247
+ # preds[v] gives a list of neighbors in the previous layer for v in V
248
+ # unmatched gives a list of unmatched vertices in final layer of V,
249
+ # and is also used as a flag value for pred[u] when u is in the first
250
+ # layer
251
+ preds = {}
252
+ unmatched = []
253
+ pred = {u: unmatched for u in G}
254
+ for v in matching:
255
+ del pred[matching[v]]
256
+ layer = list(pred)
257
+
258
+ # repeatedly extend layering structure by another pair of layers
259
+ while layer and not unmatched:
260
+ newLayer = {}
261
+ for u in layer:
262
+ for v in G[u]:
263
+ if v not in preds:
264
+ newLayer.setdefault(v, []).append(u)
265
+ layer = []
266
+ for v in newLayer:
267
+ preds[v] = newLayer[v]
268
+ if v in matching:
269
+ layer.append(matching[v])
270
+ pred[matching[v]] = v
271
+ else:
272
+ unmatched.append(v)
273
+
274
+ # did we finish layering without finding any alternating paths?
275
+ if not unmatched:
276
+ # TODO - The lines between --- were unused and were thus commented
277
+ # out. This whole commented chunk should be reviewed to determine
278
+ # whether it should be built upon or completely removed.
279
+ # ---
280
+ # unlayered = {}
281
+ # for u in G:
282
+ # # TODO Why is extra inner loop necessary?
283
+ # for v in G[u]:
284
+ # if v not in preds:
285
+ # unlayered[v] = None
286
+ # ---
287
+ # TODO Originally, this function returned a three-tuple:
288
+ #
289
+ # return (matching, list(pred), list(unlayered))
290
+ #
291
+ # For some reason, the documentation for this function
292
+ # indicated that the second and third elements of the returned
293
+ # three-tuple would be the vertices in the left and right vertex
294
+ # sets, respectively, that are also in the maximum independent set.
295
+ # However, what I think the author meant was that the second
296
+ # element is the list of vertices that were unmatched and the third
297
+ # element was the list of vertices that were matched. Since that
298
+ # seems to be the case, they don't really need to be returned,
299
+ # since that information can be inferred from the matching
300
+ # dictionary.
301
+
302
+ # All the matched nodes must be a key in the dictionary
303
+ for key in matching.copy():
304
+ matching[matching[key]] = key
305
+ return matching
306
+
307
+ # recursively search backward through layers to find alternating paths
308
+ # recursion returns true if found path, false otherwise
309
+ def recurse(v):
310
+ if v in preds:
311
+ L = preds.pop(v)
312
+ for u in L:
313
+ if u in pred:
314
+ pu = pred.pop(u)
315
+ if pu is unmatched or recurse(pu):
316
+ matching[v] = u
317
+ return True
318
+ return False
319
+
320
+ for v in unmatched:
321
+ recurse(v)
322
+
323
+
324
+ def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, targets):
325
+ """Returns True if and only if the vertex `v` is connected to one of
326
+ the target vertices by an alternating path in `G`.
327
+
328
+ An *alternating path* is a path in which every other edge is in the
329
+ specified maximum matching (and the remaining edges in the path are not in
330
+ the matching). An alternating path may have matched edges in the even
331
+ positions or in the odd positions, as long as the edges alternate between
332
+ 'matched' and 'unmatched'.
333
+
334
+ `G` is an undirected bipartite NetworkX graph.
335
+
336
+ `v` is a vertex in `G`.
337
+
338
+ `matched_edges` is a set of edges present in a maximum matching in `G`.
339
+
340
+ `unmatched_edges` is a set of edges not present in a maximum
341
+ matching in `G`.
342
+
343
+ `targets` is a set of vertices.
344
+
345
+ """
346
+
347
+ def _alternating_dfs(u, along_matched=True):
348
+ """Returns True if and only if `u` is connected to one of the
349
+ targets by an alternating path.
350
+
351
+ `u` is a vertex in the graph `G`.
352
+
353
+ If `along_matched` is True, this step of the depth-first search
354
+ will continue only through edges in the given matching. Otherwise, it
355
+ will continue only through edges *not* in the given matching.
356
+
357
+ """
358
+ visited = set()
359
+ # Follow matched edges when depth is even,
360
+ # and follow unmatched edges when depth is odd.
361
+ initial_depth = 0 if along_matched else 1
362
+ stack = [(u, iter(G[u]), initial_depth)]
363
+ while stack:
364
+ parent, children, depth = stack[-1]
365
+ valid_edges = matched_edges if depth % 2 else unmatched_edges
366
+ try:
367
+ child = next(children)
368
+ if child not in visited:
369
+ if (parent, child) in valid_edges or (child, parent) in valid_edges:
370
+ if child in targets:
371
+ return True
372
+ visited.add(child)
373
+ stack.append((child, iter(G[child]), depth + 1))
374
+ except StopIteration:
375
+ stack.pop()
376
+ return False
377
+
378
+ # Check for alternating paths starting with edges in the matching, then
379
+ # check for alternating paths starting with edges not in the
380
+ # matching.
381
+ return _alternating_dfs(v, along_matched=True) or _alternating_dfs(
382
+ v, along_matched=False
383
+ )
384
+
385
+
386
+ def _connected_by_alternating_paths(G, matching, targets):
387
+ """Returns the set of vertices that are connected to one of the target
388
+ vertices by an alternating path in `G` or are themselves a target.
389
+
390
+ An *alternating path* is a path in which every other edge is in the
391
+ specified maximum matching (and the remaining edges in the path are not in
392
+ the matching). An alternating path may have matched edges in the even
393
+ positions or in the odd positions, as long as the edges alternate between
394
+ 'matched' and 'unmatched'.
395
+
396
+ `G` is an undirected bipartite NetworkX graph.
397
+
398
+ `matching` is a dictionary representing a maximum matching in `G`, as
399
+ returned by, for example, :func:`maximum_matching`.
400
+
401
+ `targets` is a set of vertices.
402
+
403
+ """
404
+ # Get the set of matched edges and the set of unmatched edges. Only include
405
+ # one version of each undirected edge (for example, include edge (1, 2) but
406
+ # not edge (2, 1)). Using frozensets as an intermediary step we do not
407
+ # require nodes to be orderable.
408
+ edge_sets = {frozenset((u, v)) for u, v in matching.items()}
409
+ matched_edges = {tuple(edge) for edge in edge_sets}
410
+ unmatched_edges = {
411
+ (u, v) for (u, v) in G.edges() if frozenset((u, v)) not in edge_sets
412
+ }
413
+
414
+ return {
415
+ v
416
+ for v in G
417
+ if v in targets
418
+ or _is_connected_by_alternating_path(
419
+ G, v, matched_edges, unmatched_edges, targets
420
+ )
421
+ }
422
+
423
+
424
+ @nx._dispatchable
425
+ def to_vertex_cover(G, matching, top_nodes=None):
426
+ """Returns the minimum vertex cover corresponding to the given maximum
427
+ matching of the bipartite graph `G`.
428
+
429
+ Parameters
430
+ ----------
431
+ G : NetworkX graph
432
+
433
+ Undirected bipartite graph
434
+
435
+ matching : dictionary
436
+
437
+ A dictionary whose keys are vertices in `G` and whose values are the
438
+ distinct neighbors comprising the maximum matching for `G`, as returned
439
+ by, for example, :func:`maximum_matching`. The dictionary *must*
440
+ represent the maximum matching.
441
+
442
+ top_nodes : container
443
+
444
+ Container with all nodes in one bipartite node set. If not supplied
445
+ it will be computed. But if more than one solution exists an exception
446
+ will be raised.
447
+
448
+ Returns
449
+ -------
450
+ vertex_cover : :class:`set`
451
+
452
+ The minimum vertex cover in `G`.
453
+
454
+ Raises
455
+ ------
456
+ AmbiguousSolution
457
+ Raised if the input bipartite graph is disconnected and no container
458
+ with all nodes in one bipartite set is provided. When determining
459
+ the nodes in each bipartite set more than one valid solution is
460
+ possible if the input graph is disconnected.
461
+
462
+ Notes
463
+ -----
464
+ This function is implemented using the procedure guaranteed by `Konig's
465
+ theorem
466
+ <https://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29>`_,
467
+ which proves an equivalence between a maximum matching and a minimum vertex
468
+ cover in bipartite graphs.
469
+
470
+ Since a minimum vertex cover is the complement of a maximum independent set
471
+ for any graph, one can compute the maximum independent set of a bipartite
472
+ graph this way:
473
+
474
+ >>> G = nx.complete_bipartite_graph(2, 3)
475
+ >>> matching = nx.bipartite.maximum_matching(G)
476
+ >>> vertex_cover = nx.bipartite.to_vertex_cover(G, matching)
477
+ >>> independent_set = set(G) - vertex_cover
478
+ >>> print(list(independent_set))
479
+ [2, 3, 4]
480
+
481
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
482
+ for further details on how bipartite graphs are handled in NetworkX.
483
+
484
+ """
485
+ # This is a Python implementation of the algorithm described at
486
+ # <https://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29#Proof>.
487
+ L, R = bipartite_sets(G, top_nodes)
488
+ # Let U be the set of unmatched vertices in the left vertex set.
489
+ unmatched_vertices = set(G) - set(matching)
490
+ U = unmatched_vertices & L
491
+ # Let Z be the set of vertices that are either in U or are connected to U
492
+ # by alternating paths.
493
+ Z = _connected_by_alternating_paths(G, matching, U)
494
+ # At this point, every edge either has a right endpoint in Z or a left
495
+ # endpoint not in Z. This gives us the vertex cover.
496
+ return (L - Z) | (R & Z)
497
+
498
+
499
+ #: Returns the maximum cardinality matching in the given bipartite graph.
500
+ #:
501
+ #: This function is simply an alias for :func:`hopcroft_karp_matching`.
502
+ maximum_matching = hopcroft_karp_matching
503
+
504
+
505
+ @nx._dispatchable(edge_attrs="weight")
506
+ def minimum_weight_full_matching(G, top_nodes=None, weight="weight"):
507
+ r"""Returns a minimum weight full matching of the bipartite graph `G`.
508
+
509
+ Let :math:`G = ((U, V), E)` be a weighted bipartite graph with real weights
510
+ :math:`w : E \to \mathbb{R}`. This function then produces a matching
511
+ :math:`M \subseteq E` with cardinality
512
+
513
+ .. math::
514
+ \lvert M \rvert = \min(\lvert U \rvert, \lvert V \rvert),
515
+
516
+ which minimizes the sum of the weights of the edges included in the
517
+ matching, :math:`\sum_{e \in M} w(e)`, or raises an error if no such
518
+ matching exists.
519
+
520
+ When :math:`\lvert U \rvert = \lvert V \rvert`, this is commonly
521
+ referred to as a perfect matching; here, since we allow
522
+ :math:`\lvert U \rvert` and :math:`\lvert V \rvert` to differ, we
523
+ follow Karp [1]_ and refer to the matching as *full*.
524
+
525
+ Parameters
526
+ ----------
527
+ G : NetworkX graph
528
+
529
+ Undirected bipartite graph
530
+
531
+ top_nodes : container
532
+
533
+ Container with all nodes in one bipartite node set. If not supplied
534
+ it will be computed.
535
+
536
+ weight : string, optional (default='weight')
537
+
538
+ The edge data key used to provide each value in the matrix.
539
+ If None, then each edge has weight 1.
540
+
541
+ Returns
542
+ -------
543
+ matches : dictionary
544
+
545
+ The matching is returned as a dictionary, `matches`, such that
546
+ ``matches[v] == w`` if node `v` is matched to node `w`. Unmatched
547
+ nodes do not occur as a key in `matches`.
548
+
549
+ Raises
550
+ ------
551
+ ValueError
552
+ Raised if no full matching exists.
553
+
554
+ ImportError
555
+ Raised if SciPy is not available.
556
+
557
+ Notes
558
+ -----
559
+ The problem of determining a minimum weight full matching is also known as
560
+ the rectangular linear assignment problem. This implementation defers the
561
+ calculation of the assignment to SciPy.
562
+
563
+ References
564
+ ----------
565
+ .. [1] Richard Manning Karp:
566
+ An algorithm to Solve the m x n Assignment Problem in Expected Time
567
+ O(mn log n).
568
+ Networks, 10(2):143–152, 1980.
569
+
570
+ """
571
+ import numpy as np
572
+ import scipy as sp
573
+
574
+ left, right = nx.bipartite.sets(G, top_nodes)
575
+ U = list(left)
576
+ V = list(right)
577
+ # We explicitly create the biadjacency matrix having infinities
578
+ # where edges are missing (as opposed to zeros, which is what one would
579
+ # get by using toarray on the sparse matrix).
580
+ weights_sparse = biadjacency_matrix(
581
+ G, row_order=U, column_order=V, weight=weight, format="coo"
582
+ )
583
+ weights = np.full(weights_sparse.shape, np.inf)
584
+ weights[weights_sparse.row, weights_sparse.col] = weights_sparse.data
585
+ left_matches = sp.optimize.linear_sum_assignment(weights)
586
+ d = {U[u]: V[v] for u, v in zip(*left_matches)}
587
+ # d will contain the matching from edges in left to right; we need to
588
+ # add the ones from right to left as well.
589
+ d.update({v: u for u, v in d.items()})
590
+ return d
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/matrix.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ====================
3
+ Biadjacency matrices
4
+ ====================
5
+ """
6
+
7
+ import itertools
8
+
9
+ import networkx as nx
10
+ from networkx.convert_matrix import _generate_weighted_edges
11
+
12
+ __all__ = ["biadjacency_matrix", "from_biadjacency_matrix"]
13
+
14
+
15
+ @nx._dispatchable(edge_attrs="weight")
16
+ def biadjacency_matrix(
17
+ G, row_order, column_order=None, dtype=None, weight="weight", format="csr"
18
+ ):
19
+ r"""Returns the biadjacency matrix of the bipartite graph G.
20
+
21
+ Let `G = (U, V, E)` be a bipartite graph with node sets
22
+ `U = u_{1},...,u_{r}` and `V = v_{1},...,v_{s}`. The biadjacency
23
+ matrix [1]_ is the `r` x `s` matrix `B` in which `b_{i,j} = 1`
24
+ if, and only if, `(u_i, v_j) \in E`. If the parameter `weight` is
25
+ not `None` and matches the name of an edge attribute, its value is
26
+ used instead of 1.
27
+
28
+ Parameters
29
+ ----------
30
+ G : graph
31
+ A NetworkX graph
32
+
33
+ row_order : list of nodes
34
+ The rows of the matrix are ordered according to the list of nodes.
35
+
36
+ column_order : list, optional
37
+ The columns of the matrix are ordered according to the list of nodes.
38
+ If column_order is None, then the ordering of columns is arbitrary.
39
+
40
+ dtype : NumPy data-type, optional
41
+ A valid NumPy dtype used to initialize the array. If None, then the
42
+ NumPy default is used.
43
+
44
+ weight : string or None, optional (default='weight')
45
+ The edge data key used to provide each value in the matrix.
46
+ If None, then each edge has weight 1.
47
+
48
+ format : str in {'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'}
49
+ The type of the matrix to be returned (default 'csr'). For
50
+ some algorithms different implementations of sparse matrices
51
+ can perform better. See [2]_ for details.
52
+
53
+ Returns
54
+ -------
55
+ M : SciPy sparse array
56
+ Biadjacency matrix representation of the bipartite graph G.
57
+
58
+ Notes
59
+ -----
60
+ No attempt is made to check that the input graph is bipartite.
61
+
62
+ For directed bipartite graphs only successors are considered as neighbors.
63
+ To obtain an adjacency matrix with ones (or weight values) for both
64
+ predecessors and successors you have to generate two biadjacency matrices
65
+ where the rows of one of them are the columns of the other, and then add
66
+ one to the transpose of the other.
67
+
68
+ See Also
69
+ --------
70
+ adjacency_matrix
71
+ from_biadjacency_matrix
72
+
73
+ References
74
+ ----------
75
+ .. [1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
76
+ .. [2] Scipy Dev. References, "Sparse Matrices",
77
+ https://docs.scipy.org/doc/scipy/reference/sparse.html
78
+ """
79
+ import scipy as sp
80
+
81
+ nlen = len(row_order)
82
+ if nlen == 0:
83
+ raise nx.NetworkXError("row_order is empty list")
84
+ if len(row_order) != len(set(row_order)):
85
+ msg = "Ambiguous ordering: `row_order` contained duplicates."
86
+ raise nx.NetworkXError(msg)
87
+ if column_order is None:
88
+ column_order = list(set(G) - set(row_order))
89
+ mlen = len(column_order)
90
+ if len(column_order) != len(set(column_order)):
91
+ msg = "Ambiguous ordering: `column_order` contained duplicates."
92
+ raise nx.NetworkXError(msg)
93
+
94
+ row_index = dict(zip(row_order, itertools.count()))
95
+ col_index = dict(zip(column_order, itertools.count()))
96
+
97
+ if G.number_of_edges() == 0:
98
+ row, col, data = [], [], []
99
+ else:
100
+ row, col, data = zip(
101
+ *(
102
+ (row_index[u], col_index[v], d.get(weight, 1))
103
+ for u, v, d in G.edges(row_order, data=True)
104
+ if u in row_index and v in col_index
105
+ )
106
+ )
107
+ A = sp.sparse.coo_array((data, (row, col)), shape=(nlen, mlen), dtype=dtype)
108
+ try:
109
+ return A.asformat(format)
110
+ except ValueError as err:
111
+ raise nx.NetworkXError(f"Unknown sparse array format: {format}") from err
112
+
113
+
114
+ @nx._dispatchable(graphs=None, returns_graph=True)
115
+ def from_biadjacency_matrix(A, create_using=None, edge_attribute="weight"):
116
+ r"""Creates a new bipartite graph from a biadjacency matrix given as a
117
+ SciPy sparse array.
118
+
119
+ Parameters
120
+ ----------
121
+ A: scipy sparse array
122
+ A biadjacency matrix representation of a graph
123
+
124
+ create_using: NetworkX graph
125
+ Use specified graph for result. The default is Graph()
126
+
127
+ edge_attribute: string
128
+ Name of edge attribute to store matrix numeric value. The data will
129
+ have the same type as the matrix entry (int, float, (real,imag)).
130
+
131
+ Notes
132
+ -----
133
+ The nodes are labeled with the attribute `bipartite` set to an integer
134
+ 0 or 1 representing membership in part 0 or part 1 of the bipartite graph.
135
+
136
+ If `create_using` is an instance of :class:`networkx.MultiGraph` or
137
+ :class:`networkx.MultiDiGraph` and the entries of `A` are of
138
+ type :class:`int`, then this function returns a multigraph (of the same
139
+ type as `create_using`) with parallel edges. In this case, `edge_attribute`
140
+ will be ignored.
141
+
142
+ See Also
143
+ --------
144
+ biadjacency_matrix
145
+ from_numpy_array
146
+
147
+ References
148
+ ----------
149
+ [1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
150
+ """
151
+ G = nx.empty_graph(0, create_using)
152
+ n, m = A.shape
153
+ # Make sure we get even the isolated nodes of the graph.
154
+ G.add_nodes_from(range(n), bipartite=0)
155
+ G.add_nodes_from(range(n, n + m), bipartite=1)
156
+ # Create an iterable over (u, v, w) triples and for each triple, add an
157
+ # edge from u to v with weight w.
158
+ triples = ((u, n + v, d) for (u, v, d) in _generate_weighted_edges(A))
159
+ # If the entries in the adjacency matrix are integers and the graph is a
160
+ # multigraph, then create parallel edges, each with weight 1, for each
161
+ # entry in the adjacency matrix. Otherwise, create one edge for each
162
+ # positive entry in the adjacency matrix and set the weight of that edge to
163
+ # be the entry in the matrix.
164
+ if A.dtype.kind in ("i", "u") and G.is_multigraph():
165
+ chain = itertools.chain.from_iterable
166
+ triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples)
167
+ G.add_weighted_edges_from(triples, weight=edge_attribute)
168
+ return G
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_project.cpython-310.pyc ADDED
Binary file (11.7 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_covering.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+ from networkx.algorithms import bipartite
3
+
4
+
5
+ class TestMinEdgeCover:
6
+ """Tests for :func:`networkx.algorithms.bipartite.min_edge_cover`"""
7
+
8
+ def test_empty_graph(self):
9
+ G = nx.Graph()
10
+ assert bipartite.min_edge_cover(G) == set()
11
+
12
+ def test_graph_single_edge(self):
13
+ G = nx.Graph()
14
+ G.add_edge(0, 1)
15
+ assert bipartite.min_edge_cover(G) == {(0, 1), (1, 0)}
16
+
17
+ def test_bipartite_default(self):
18
+ G = nx.Graph()
19
+ G.add_nodes_from([1, 2, 3, 4], bipartite=0)
20
+ G.add_nodes_from(["a", "b", "c"], bipartite=1)
21
+ G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
22
+ min_cover = bipartite.min_edge_cover(G)
23
+ assert nx.is_edge_cover(G, min_cover)
24
+ assert len(min_cover) == 8
25
+
26
+ def test_bipartite_explicit(self):
27
+ G = nx.Graph()
28
+ G.add_nodes_from([1, 2, 3, 4], bipartite=0)
29
+ G.add_nodes_from(["a", "b", "c"], bipartite=1)
30
+ G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
31
+ min_cover = bipartite.min_edge_cover(G, bipartite.eppstein_matching)
32
+ assert nx.is_edge_cover(G, min_cover)
33
+ assert len(min_cover) == 8
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_matrix.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ np = pytest.importorskip("numpy")
4
+ sp = pytest.importorskip("scipy")
5
+ sparse = pytest.importorskip("scipy.sparse")
6
+
7
+
8
+ import networkx as nx
9
+ from networkx.algorithms import bipartite
10
+ from networkx.utils import edges_equal
11
+
12
+
13
+ class TestBiadjacencyMatrix:
14
+ def test_biadjacency_matrix_weight(self):
15
+ G = nx.path_graph(5)
16
+ G.add_edge(0, 1, weight=2, other=4)
17
+ X = [1, 3]
18
+ Y = [0, 2, 4]
19
+ M = bipartite.biadjacency_matrix(G, X, weight="weight")
20
+ assert M[0, 0] == 2
21
+ M = bipartite.biadjacency_matrix(G, X, weight="other")
22
+ assert M[0, 0] == 4
23
+
24
+ def test_biadjacency_matrix(self):
25
+ tops = [2, 5, 10]
26
+ bots = [5, 10, 15]
27
+ for i in range(len(tops)):
28
+ G = bipartite.random_graph(tops[i], bots[i], 0.2)
29
+ top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
30
+ M = bipartite.biadjacency_matrix(G, top)
31
+ assert M.shape[0] == tops[i]
32
+ assert M.shape[1] == bots[i]
33
+
34
+ def test_biadjacency_matrix_order(self):
35
+ G = nx.path_graph(5)
36
+ G.add_edge(0, 1, weight=2)
37
+ X = [3, 1]
38
+ Y = [4, 2, 0]
39
+ M = bipartite.biadjacency_matrix(G, X, Y, weight="weight")
40
+ assert M[1, 2] == 2
41
+
42
+ def test_biadjacency_matrix_empty_graph(self):
43
+ G = nx.empty_graph(2)
44
+ M = nx.bipartite.biadjacency_matrix(G, [0])
45
+ assert np.array_equal(M.toarray(), np.array([[0]]))
46
+
47
+ def test_null_graph(self):
48
+ with pytest.raises(nx.NetworkXError):
49
+ bipartite.biadjacency_matrix(nx.Graph(), [])
50
+
51
+ def test_empty_graph(self):
52
+ with pytest.raises(nx.NetworkXError):
53
+ bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [])
54
+
55
+ def test_duplicate_row(self):
56
+ with pytest.raises(nx.NetworkXError):
57
+ bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [1, 1])
58
+
59
+ def test_duplicate_col(self):
60
+ with pytest.raises(nx.NetworkXError):
61
+ bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], [1, 1])
62
+
63
+ def test_format_keyword(self):
64
+ with pytest.raises(nx.NetworkXError):
65
+ bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], format="foo")
66
+
67
+ def test_from_biadjacency_roundtrip(self):
68
+ B1 = nx.path_graph(5)
69
+ M = bipartite.biadjacency_matrix(B1, [0, 2, 4])
70
+ B2 = bipartite.from_biadjacency_matrix(M)
71
+ assert nx.is_isomorphic(B1, B2)
72
+
73
+ def test_from_biadjacency_weight(self):
74
+ M = sparse.csc_matrix([[1, 2], [0, 3]])
75
+ B = bipartite.from_biadjacency_matrix(M)
76
+ assert edges_equal(B.edges(), [(0, 2), (0, 3), (1, 3)])
77
+ B = bipartite.from_biadjacency_matrix(M, edge_attribute="weight")
78
+ e = [(0, 2, {"weight": 1}), (0, 3, {"weight": 2}), (1, 3, {"weight": 3})]
79
+ assert edges_equal(B.edges(data=True), e)
80
+
81
+ def test_from_biadjacency_multigraph(self):
82
+ M = sparse.csc_matrix([[1, 2], [0, 3]])
83
+ B = bipartite.from_biadjacency_matrix(M, create_using=nx.MultiGraph())
84
+ assert edges_equal(B.edges(), [(0, 2), (0, 3), (0, 3), (1, 3), (1, 3), (1, 3)])
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ pytest.importorskip("scipy")
4
+
5
+ import networkx as nx
6
+ from networkx.algorithms.bipartite import spectral_bipartivity as sb
7
+
8
+ # Examples from Figure 1
9
+ # E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of
10
+ # bipartivity in complex networks", PhysRev E 72, 046105 (2005)
11
+
12
+
13
+ class TestSpectralBipartivity:
14
+ def test_star_like(self):
15
+ # star-like
16
+
17
+ G = nx.star_graph(2)
18
+ G.add_edge(1, 2)
19
+ assert sb(G) == pytest.approx(0.843, abs=1e-3)
20
+
21
+ G = nx.star_graph(3)
22
+ G.add_edge(1, 2)
23
+ assert sb(G) == pytest.approx(0.871, abs=1e-3)
24
+
25
+ G = nx.star_graph(4)
26
+ G.add_edge(1, 2)
27
+ assert sb(G) == pytest.approx(0.890, abs=1e-3)
28
+
29
+ def test_k23_like(self):
30
+ # K2,3-like
31
+ G = nx.complete_bipartite_graph(2, 3)
32
+ G.add_edge(0, 1)
33
+ assert sb(G) == pytest.approx(0.769, abs=1e-3)
34
+
35
+ G = nx.complete_bipartite_graph(2, 3)
36
+ G.add_edge(2, 4)
37
+ assert sb(G) == pytest.approx(0.829, abs=1e-3)
38
+
39
+ G = nx.complete_bipartite_graph(2, 3)
40
+ G.add_edge(2, 4)
41
+ G.add_edge(3, 4)
42
+ assert sb(G) == pytest.approx(0.731, abs=1e-3)
43
+
44
+ G = nx.complete_bipartite_graph(2, 3)
45
+ G.add_edge(0, 1)
46
+ G.add_edge(2, 4)
47
+ assert sb(G) == pytest.approx(0.692, abs=1e-3)
48
+
49
+ G = nx.complete_bipartite_graph(2, 3)
50
+ G.add_edge(2, 4)
51
+ G.add_edge(3, 4)
52
+ G.add_edge(0, 1)
53
+ assert sb(G) == pytest.approx(0.645, abs=1e-3)
54
+
55
+ G = nx.complete_bipartite_graph(2, 3)
56
+ G.add_edge(2, 4)
57
+ G.add_edge(3, 4)
58
+ G.add_edge(2, 3)
59
+ assert sb(G) == pytest.approx(0.645, abs=1e-3)
60
+
61
+ G = nx.complete_bipartite_graph(2, 3)
62
+ G.add_edge(2, 4)
63
+ G.add_edge(3, 4)
64
+ G.add_edge(2, 3)
65
+ G.add_edge(0, 1)
66
+ assert sb(G) == pytest.approx(0.597, abs=1e-3)
67
+
68
+ def test_single_nodes(self):
69
+ # single nodes
70
+ G = nx.complete_bipartite_graph(2, 3)
71
+ G.add_edge(2, 4)
72
+ sbn = sb(G, nodes=[1, 2])
73
+ assert sbn[1] == pytest.approx(0.85, abs=1e-2)
74
+ assert sbn[2] == pytest.approx(0.77, abs=1e-2)
75
+
76
+ G = nx.complete_bipartite_graph(2, 3)
77
+ G.add_edge(0, 1)
78
+ sbn = sb(G, nodes=[1, 2])
79
+ assert sbn[1] == pytest.approx(0.73, abs=1e-2)
80
+ assert sbn[2] == pytest.approx(0.82, abs=1e-2)
wemm/lib/python3.10/site-packages/networkx/algorithms/bridges.py ADDED
@@ -0,0 +1,205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Bridge-finding algorithms."""
2
+
3
+ from itertools import chain
4
+
5
+ import networkx as nx
6
+ from networkx.utils import not_implemented_for
7
+
8
+ __all__ = ["bridges", "has_bridges", "local_bridges"]
9
+
10
+
11
+ @not_implemented_for("directed")
12
+ @nx._dispatchable
13
+ def bridges(G, root=None):
14
+ """Generate all bridges in a graph.
15
+
16
+ A *bridge* in a graph is an edge whose removal causes the number of
17
+ connected components of the graph to increase. Equivalently, a bridge is an
18
+ edge that does not belong to any cycle. Bridges are also known as cut-edges,
19
+ isthmuses, or cut arcs.
20
+
21
+ Parameters
22
+ ----------
23
+ G : undirected graph
24
+
25
+ root : node (optional)
26
+ A node in the graph `G`. If specified, only the bridges in the
27
+ connected component containing this node will be returned.
28
+
29
+ Yields
30
+ ------
31
+ e : edge
32
+ An edge in the graph whose removal disconnects the graph (or
33
+ causes the number of connected components to increase).
34
+
35
+ Raises
36
+ ------
37
+ NodeNotFound
38
+ If `root` is not in the graph `G`.
39
+
40
+ NetworkXNotImplemented
41
+ If `G` is a directed graph.
42
+
43
+ Examples
44
+ --------
45
+ The barbell graph with parameter zero has a single bridge:
46
+
47
+ >>> G = nx.barbell_graph(10, 0)
48
+ >>> list(nx.bridges(G))
49
+ [(9, 10)]
50
+
51
+ Notes
52
+ -----
53
+ This is an implementation of the algorithm described in [1]_. An edge is a
54
+ bridge if and only if it is not contained in any chain. Chains are found
55
+ using the :func:`networkx.chain_decomposition` function.
56
+
57
+ The algorithm described in [1]_ requires a simple graph. If the provided
58
+ graph is a multigraph, we convert it to a simple graph and verify that any
59
+ bridges discovered by the chain decomposition algorithm are not multi-edges.
60
+
61
+ Ignoring polylogarithmic factors, the worst-case time complexity is the
62
+ same as the :func:`networkx.chain_decomposition` function,
63
+ $O(m + n)$, where $n$ is the number of nodes in the graph and $m$ is
64
+ the number of edges.
65
+
66
+ References
67
+ ----------
68
+ .. [1] https://en.wikipedia.org/wiki/Bridge_%28graph_theory%29#Bridge-Finding_with_Chain_Decompositions
69
+ """
70
+ multigraph = G.is_multigraph()
71
+ H = nx.Graph(G) if multigraph else G
72
+ chains = nx.chain_decomposition(H, root=root)
73
+ chain_edges = set(chain.from_iterable(chains))
74
+ if root is not None:
75
+ H = H.subgraph(nx.node_connected_component(H, root)).copy()
76
+ for u, v in H.edges():
77
+ if (u, v) not in chain_edges and (v, u) not in chain_edges:
78
+ if multigraph and len(G[u][v]) > 1:
79
+ continue
80
+ yield u, v
81
+
82
+
83
+ @not_implemented_for("directed")
84
+ @nx._dispatchable
85
+ def has_bridges(G, root=None):
86
+ """Decide whether a graph has any bridges.
87
+
88
+ A *bridge* in a graph is an edge whose removal causes the number of
89
+ connected components of the graph to increase.
90
+
91
+ Parameters
92
+ ----------
93
+ G : undirected graph
94
+
95
+ root : node (optional)
96
+ A node in the graph `G`. If specified, only the bridges in the
97
+ connected component containing this node will be considered.
98
+
99
+ Returns
100
+ -------
101
+ bool
102
+ Whether the graph (or the connected component containing `root`)
103
+ has any bridges.
104
+
105
+ Raises
106
+ ------
107
+ NodeNotFound
108
+ If `root` is not in the graph `G`.
109
+
110
+ NetworkXNotImplemented
111
+ If `G` is a directed graph.
112
+
113
+ Examples
114
+ --------
115
+ The barbell graph with parameter zero has a single bridge::
116
+
117
+ >>> G = nx.barbell_graph(10, 0)
118
+ >>> nx.has_bridges(G)
119
+ True
120
+
121
+ On the other hand, the cycle graph has no bridges::
122
+
123
+ >>> G = nx.cycle_graph(5)
124
+ >>> nx.has_bridges(G)
125
+ False
126
+
127
+ Notes
128
+ -----
129
+ This implementation uses the :func:`networkx.bridges` function, so
130
+ it shares its worst-case time complexity, $O(m + n)$, ignoring
131
+ polylogarithmic factors, where $n$ is the number of nodes in the
132
+ graph and $m$ is the number of edges.
133
+
134
+ """
135
+ try:
136
+ next(bridges(G, root=root))
137
+ except StopIteration:
138
+ return False
139
+ else:
140
+ return True
141
+
142
+
143
+ @not_implemented_for("multigraph")
144
+ @not_implemented_for("directed")
145
+ @nx._dispatchable(edge_attrs="weight")
146
+ def local_bridges(G, with_span=True, weight=None):
147
+ """Iterate over local bridges of `G` optionally computing the span
148
+
149
+ A *local bridge* is an edge whose endpoints have no common neighbors.
150
+ That is, the edge is not part of a triangle in the graph.
151
+
152
+ The *span* of a *local bridge* is the shortest path length between
153
+ the endpoints if the local bridge is removed.
154
+
155
+ Parameters
156
+ ----------
157
+ G : undirected graph
158
+
159
+ with_span : bool
160
+ If True, yield a 3-tuple `(u, v, span)`
161
+
162
+ weight : function, string or None (default: None)
163
+ If function, used to compute edge weights for the span.
164
+ If string, the edge data attribute used in calculating span.
165
+ If None, all edges have weight 1.
166
+
167
+ Yields
168
+ ------
169
+ e : edge
170
+ The local bridges as an edge 2-tuple of nodes `(u, v)` or
171
+ as a 3-tuple `(u, v, span)` when `with_span is True`.
172
+
173
+ Raises
174
+ ------
175
+ NetworkXNotImplemented
176
+ If `G` is a directed graph or multigraph.
177
+
178
+ Examples
179
+ --------
180
+ A cycle graph has every edge a local bridge with span N-1.
181
+
182
+ >>> G = nx.cycle_graph(9)
183
+ >>> (0, 8, 8) in set(nx.local_bridges(G))
184
+ True
185
+ """
186
+ if with_span is not True:
187
+ for u, v in G.edges:
188
+ if not (set(G[u]) & set(G[v])):
189
+ yield u, v
190
+ else:
191
+ wt = nx.weighted._weight_function(G, weight)
192
+ for u, v in G.edges:
193
+ if not (set(G[u]) & set(G[v])):
194
+ enodes = {u, v}
195
+
196
+ def hide_edge(n, nbr, d):
197
+ if n not in enodes or nbr not in enodes:
198
+ return wt(n, nbr, d)
199
+ return None
200
+
201
+ try:
202
+ span = nx.shortest_path_length(G, u, v, weight=hide_edge)
203
+ yield u, v, span
204
+ except nx.NetworkXNoPath:
205
+ yield u, v, float("inf")
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness_subset.cpython-310.pyc ADDED
Binary file (7.95 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/community/label_propagation.py ADDED
@@ -0,0 +1,338 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Label propagation community detection algorithms.
3
+ """
4
+
5
+ from collections import Counter, defaultdict, deque
6
+
7
+ import networkx as nx
8
+ from networkx.utils import groups, not_implemented_for, py_random_state
9
+
10
+ __all__ = [
11
+ "label_propagation_communities",
12
+ "asyn_lpa_communities",
13
+ "fast_label_propagation_communities",
14
+ ]
15
+
16
+
17
+ @py_random_state("seed")
18
+ @nx._dispatchable(edge_attrs="weight")
19
+ def fast_label_propagation_communities(G, *, weight=None, seed=None):
20
+ """Returns communities in `G` as detected by fast label propagation.
21
+
22
+ The fast label propagation algorithm is described in [1]_. The algorithm is
23
+ probabilistic and the found communities may vary in different executions.
24
+
25
+ The algorithm operates as follows. First, the community label of each node is
26
+ set to a unique label. The algorithm then repeatedly updates the labels of
27
+ the nodes to the most frequent label in their neighborhood. In case of ties,
28
+ a random label is chosen from the most frequent labels.
29
+
30
+ The algorithm maintains a queue of nodes that still need to be processed.
31
+ Initially, all nodes are added to the queue in a random order. Then the nodes
32
+ are removed from the queue one by one and processed. If a node updates its label,
33
+ all its neighbors that have a different label are added to the queue (if not
34
+ already in the queue). The algorithm stops when the queue is empty.
35
+
36
+ Parameters
37
+ ----------
38
+ G : Graph, DiGraph, MultiGraph, or MultiDiGraph
39
+ Any NetworkX graph.
40
+
41
+ weight : string, or None (default)
42
+ The edge attribute representing a non-negative weight of an edge. If None,
43
+ each edge is assumed to have weight one. The weight of an edge is used in
44
+ determining the frequency with which a label appears among the neighbors of
45
+ a node (edge with weight `w` is equivalent to `w` unweighted edges).
46
+
47
+ seed : integer, random_state, or None (default)
48
+ Indicator of random number generation state. See :ref:`Randomness<randomness>`.
49
+
50
+ Returns
51
+ -------
52
+ communities : iterable
53
+ Iterable of communities given as sets of nodes.
54
+
55
+ Notes
56
+ -----
57
+ Edge directions are ignored for directed graphs.
58
+ Edge weights must be non-negative numbers.
59
+
60
+ References
61
+ ----------
62
+ .. [1] Vincent A. Traag & Lovro Šubelj. "Large network community detection by
63
+ fast label propagation." Scientific Reports 13 (2023): 2701.
64
+ https://doi.org/10.1038/s41598-023-29610-z
65
+ """
66
+
67
+ # Queue of nodes to be processed.
68
+ nodes_queue = deque(G)
69
+ seed.shuffle(nodes_queue)
70
+
71
+ # Set of nodes in the queue.
72
+ nodes_set = set(G)
73
+
74
+ # Assign unique label to each node.
75
+ comms = {node: i for i, node in enumerate(G)}
76
+
77
+ while nodes_queue:
78
+ # Remove next node from the queue to process.
79
+ node = nodes_queue.popleft()
80
+ nodes_set.remove(node)
81
+
82
+ # Isolated nodes retain their initial label.
83
+ if G.degree(node) > 0:
84
+ # Compute frequency of labels in node's neighborhood.
85
+ label_freqs = _fast_label_count(G, comms, node, weight)
86
+ max_freq = max(label_freqs.values())
87
+
88
+ # Always sample new label from most frequent labels.
89
+ comm = seed.choice(
90
+ [comm for comm in label_freqs if label_freqs[comm] == max_freq]
91
+ )
92
+
93
+ if comms[node] != comm:
94
+ comms[node] = comm
95
+
96
+ # Add neighbors that have different label to the queue.
97
+ for nbr in nx.all_neighbors(G, node):
98
+ if comms[nbr] != comm and nbr not in nodes_set:
99
+ nodes_queue.append(nbr)
100
+ nodes_set.add(nbr)
101
+
102
+ yield from groups(comms).values()
103
+
104
+
105
+ def _fast_label_count(G, comms, node, weight=None):
106
+ """Computes the frequency of labels in the neighborhood of a node.
107
+
108
+ Returns a dictionary keyed by label to the frequency of that label.
109
+ """
110
+
111
+ if weight is None:
112
+ # Unweighted (un)directed simple graph.
113
+ if not G.is_multigraph():
114
+ label_freqs = Counter(map(comms.get, nx.all_neighbors(G, node)))
115
+
116
+ # Unweighted (un)directed multigraph.
117
+ else:
118
+ label_freqs = defaultdict(int)
119
+ for nbr in G[node]:
120
+ label_freqs[comms[nbr]] += len(G[node][nbr])
121
+
122
+ if G.is_directed():
123
+ for nbr in G.pred[node]:
124
+ label_freqs[comms[nbr]] += len(G.pred[node][nbr])
125
+
126
+ else:
127
+ # Weighted undirected simple/multigraph.
128
+ label_freqs = defaultdict(float)
129
+ for _, nbr, w in G.edges(node, data=weight, default=1):
130
+ label_freqs[comms[nbr]] += w
131
+
132
+ # Weighted directed simple/multigraph.
133
+ if G.is_directed():
134
+ for nbr, _, w in G.in_edges(node, data=weight, default=1):
135
+ label_freqs[comms[nbr]] += w
136
+
137
+ return label_freqs
138
+
139
+
140
+ @py_random_state(2)
141
+ @nx._dispatchable(edge_attrs="weight")
142
+ def asyn_lpa_communities(G, weight=None, seed=None):
143
+ """Returns communities in `G` as detected by asynchronous label
144
+ propagation.
145
+
146
+ The asynchronous label propagation algorithm is described in
147
+ [1]_. The algorithm is probabilistic and the found communities may
148
+ vary on different executions.
149
+
150
+ The algorithm proceeds as follows. After initializing each node with
151
+ a unique label, the algorithm repeatedly sets the label of a node to
152
+ be the label that appears most frequently among that nodes
153
+ neighbors. The algorithm halts when each node has the label that
154
+ appears most frequently among its neighbors. The algorithm is
155
+ asynchronous because each node is updated without waiting for
156
+ updates on the remaining nodes.
157
+
158
+ This generalized version of the algorithm in [1]_ accepts edge
159
+ weights.
160
+
161
+ Parameters
162
+ ----------
163
+ G : Graph
164
+
165
+ weight : string
166
+ The edge attribute representing the weight of an edge.
167
+ If None, each edge is assumed to have weight one. In this
168
+ algorithm, the weight of an edge is used in determining the
169
+ frequency with which a label appears among the neighbors of a
170
+ node: a higher weight means the label appears more often.
171
+
172
+ seed : integer, random_state, or None (default)
173
+ Indicator of random number generation state.
174
+ See :ref:`Randomness<randomness>`.
175
+
176
+ Returns
177
+ -------
178
+ communities : iterable
179
+ Iterable of communities given as sets of nodes.
180
+
181
+ Notes
182
+ -----
183
+ Edge weight attributes must be numerical.
184
+
185
+ References
186
+ ----------
187
+ .. [1] Raghavan, Usha Nandini, Réka Albert, and Soundar Kumara. "Near
188
+ linear time algorithm to detect community structures in large-scale
189
+ networks." Physical Review E 76.3 (2007): 036106.
190
+ """
191
+
192
+ labels = {n: i for i, n in enumerate(G)}
193
+ cont = True
194
+
195
+ while cont:
196
+ cont = False
197
+ nodes = list(G)
198
+ seed.shuffle(nodes)
199
+
200
+ for node in nodes:
201
+ if not G[node]:
202
+ continue
203
+
204
+ # Get label frequencies among adjacent nodes.
205
+ # Depending on the order they are processed in,
206
+ # some nodes will be in iteration t and others in t-1,
207
+ # making the algorithm asynchronous.
208
+ if weight is None:
209
+ # initialising a Counter from an iterator of labels is
210
+ # faster for getting unweighted label frequencies
211
+ label_freq = Counter(map(labels.get, G[node]))
212
+ else:
213
+ # updating a defaultdict is substantially faster
214
+ # for getting weighted label frequencies
215
+ label_freq = defaultdict(float)
216
+ for _, v, wt in G.edges(node, data=weight, default=1):
217
+ label_freq[labels[v]] += wt
218
+
219
+ # Get the labels that appear with maximum frequency.
220
+ max_freq = max(label_freq.values())
221
+ best_labels = [
222
+ label for label, freq in label_freq.items() if freq == max_freq
223
+ ]
224
+
225
+ # If the node does not have one of the maximum frequency labels,
226
+ # randomly choose one of them and update the node's label.
227
+ # Continue the iteration as long as at least one node
228
+ # doesn't have a maximum frequency label.
229
+ if labels[node] not in best_labels:
230
+ labels[node] = seed.choice(best_labels)
231
+ cont = True
232
+
233
+ yield from groups(labels).values()
234
+
235
+
236
+ @not_implemented_for("directed")
237
+ @nx._dispatchable
238
+ def label_propagation_communities(G):
239
+ """Generates community sets determined by label propagation
240
+
241
+ Finds communities in `G` using a semi-synchronous label propagation
242
+ method [1]_. This method combines the advantages of both the synchronous
243
+ and asynchronous models. Not implemented for directed graphs.
244
+
245
+ Parameters
246
+ ----------
247
+ G : graph
248
+ An undirected NetworkX graph.
249
+
250
+ Returns
251
+ -------
252
+ communities : iterable
253
+ A dict_values object that contains a set of nodes for each community.
254
+
255
+ Raises
256
+ ------
257
+ NetworkXNotImplemented
258
+ If the graph is directed
259
+
260
+ References
261
+ ----------
262
+ .. [1] Cordasco, G., & Gargano, L. (2010, December). Community detection
263
+ via semi-synchronous label propagation algorithms. In Business
264
+ Applications of Social Network Analysis (BASNA), 2010 IEEE International
265
+ Workshop on (pp. 1-8). IEEE.
266
+ """
267
+ coloring = _color_network(G)
268
+ # Create a unique label for each node in the graph
269
+ labeling = {v: k for k, v in enumerate(G)}
270
+ while not _labeling_complete(labeling, G):
271
+ # Update the labels of every node with the same color.
272
+ for color, nodes in coloring.items():
273
+ for n in nodes:
274
+ _update_label(n, labeling, G)
275
+
276
+ clusters = defaultdict(set)
277
+ for node, label in labeling.items():
278
+ clusters[label].add(node)
279
+ return clusters.values()
280
+
281
+
282
+ def _color_network(G):
283
+ """Colors the network so that neighboring nodes all have distinct colors.
284
+
285
+ Returns a dict keyed by color to a set of nodes with that color.
286
+ """
287
+ coloring = {} # color => set(node)
288
+ colors = nx.coloring.greedy_color(G)
289
+ for node, color in colors.items():
290
+ if color in coloring:
291
+ coloring[color].add(node)
292
+ else:
293
+ coloring[color] = {node}
294
+ return coloring
295
+
296
+
297
+ def _labeling_complete(labeling, G):
298
+ """Determines whether or not LPA is done.
299
+
300
+ Label propagation is complete when all nodes have a label that is
301
+ in the set of highest frequency labels amongst its neighbors.
302
+
303
+ Nodes with no neighbors are considered complete.
304
+ """
305
+ return all(
306
+ labeling[v] in _most_frequent_labels(v, labeling, G) for v in G if len(G[v]) > 0
307
+ )
308
+
309
+
310
+ def _most_frequent_labels(node, labeling, G):
311
+ """Returns a set of all labels with maximum frequency in `labeling`.
312
+
313
+ Input `labeling` should be a dict keyed by node to labels.
314
+ """
315
+ if not G[node]:
316
+ # Nodes with no neighbors are themselves a community and are labeled
317
+ # accordingly, hence the immediate if statement.
318
+ return {labeling[node]}
319
+
320
+ # Compute the frequencies of all neighbors of node
321
+ freqs = Counter(labeling[q] for q in G[node])
322
+ max_freq = max(freqs.values())
323
+ return {label for label, freq in freqs.items() if freq == max_freq}
324
+
325
+
326
+ def _update_label(node, labeling, G):
327
+ """Updates the label of a node using the Prec-Max tie breaking algorithm
328
+
329
+ The algorithm is explained in: 'Community Detection via Semi-Synchronous
330
+ Label Propagation Algorithms' Cordasco and Gargano, 2011
331
+ """
332
+ high_labels = _most_frequent_labels(node, labeling, G)
333
+ if len(high_labels) == 1:
334
+ labeling[node] = high_labels.pop()
335
+ elif len(high_labels) > 1:
336
+ # Prec-Max
337
+ if labeling[node] not in high_labels:
338
+ labeling[node] = max(high_labels)
wemm/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-310.pyc ADDED
Binary file (3.36 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_lukes.cpython-310.pyc ADDED
Binary file (3.54 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-310.pyc ADDED
Binary file (4.65 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/dominance.py ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Dominance algorithms.
3
+ """
4
+
5
+ from functools import reduce
6
+
7
+ import networkx as nx
8
+ from networkx.utils import not_implemented_for
9
+
10
+ __all__ = ["immediate_dominators", "dominance_frontiers"]
11
+
12
+
13
+ @not_implemented_for("undirected")
14
+ @nx._dispatchable
15
+ def immediate_dominators(G, start):
16
+ """Returns the immediate dominators of all nodes of a directed graph.
17
+
18
+ Parameters
19
+ ----------
20
+ G : a DiGraph or MultiDiGraph
21
+ The graph where dominance is to be computed.
22
+
23
+ start : node
24
+ The start node of dominance computation.
25
+
26
+ Returns
27
+ -------
28
+ idom : dict keyed by nodes
29
+ A dict containing the immediate dominators of each node reachable from
30
+ `start`.
31
+
32
+ Raises
33
+ ------
34
+ NetworkXNotImplemented
35
+ If `G` is undirected.
36
+
37
+ NetworkXError
38
+ If `start` is not in `G`.
39
+
40
+ Notes
41
+ -----
42
+ Except for `start`, the immediate dominators are the parents of their
43
+ corresponding nodes in the dominator tree.
44
+
45
+ Examples
46
+ --------
47
+ >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 5), (3, 4), (4, 5)])
48
+ >>> sorted(nx.immediate_dominators(G, 1).items())
49
+ [(1, 1), (2, 1), (3, 1), (4, 3), (5, 1)]
50
+
51
+ References
52
+ ----------
53
+ .. [1] Cooper, Keith D., Harvey, Timothy J. and Kennedy, Ken.
54
+ "A simple, fast dominance algorithm." (2006).
55
+ https://hdl.handle.net/1911/96345
56
+ """
57
+ if start not in G:
58
+ raise nx.NetworkXError("start is not in G")
59
+
60
+ idom = {start: start}
61
+
62
+ order = list(nx.dfs_postorder_nodes(G, start))
63
+ dfn = {u: i for i, u in enumerate(order)}
64
+ order.pop()
65
+ order.reverse()
66
+
67
+ def intersect(u, v):
68
+ while u != v:
69
+ while dfn[u] < dfn[v]:
70
+ u = idom[u]
71
+ while dfn[u] > dfn[v]:
72
+ v = idom[v]
73
+ return u
74
+
75
+ changed = True
76
+ while changed:
77
+ changed = False
78
+ for u in order:
79
+ new_idom = reduce(intersect, (v for v in G.pred[u] if v in idom))
80
+ if u not in idom or idom[u] != new_idom:
81
+ idom[u] = new_idom
82
+ changed = True
83
+
84
+ return idom
85
+
86
+
87
+ @nx._dispatchable
88
+ def dominance_frontiers(G, start):
89
+ """Returns the dominance frontiers of all nodes of a directed graph.
90
+
91
+ Parameters
92
+ ----------
93
+ G : a DiGraph or MultiDiGraph
94
+ The graph where dominance is to be computed.
95
+
96
+ start : node
97
+ The start node of dominance computation.
98
+
99
+ Returns
100
+ -------
101
+ df : dict keyed by nodes
102
+ A dict containing the dominance frontiers of each node reachable from
103
+ `start` as lists.
104
+
105
+ Raises
106
+ ------
107
+ NetworkXNotImplemented
108
+ If `G` is undirected.
109
+
110
+ NetworkXError
111
+ If `start` is not in `G`.
112
+
113
+ Examples
114
+ --------
115
+ >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 5), (3, 4), (4, 5)])
116
+ >>> sorted((u, sorted(df)) for u, df in nx.dominance_frontiers(G, 1).items())
117
+ [(1, []), (2, [5]), (3, [5]), (4, [5]), (5, [])]
118
+
119
+ References
120
+ ----------
121
+ .. [1] Cooper, Keith D., Harvey, Timothy J. and Kennedy, Ken.
122
+ "A simple, fast dominance algorithm." (2006).
123
+ https://hdl.handle.net/1911/96345
124
+ """
125
+ idom = nx.immediate_dominators(G, start)
126
+
127
+ df = {u: set() for u in idom}
128
+ for u in idom:
129
+ if len(G.pred[u]) >= 2:
130
+ for v in G.pred[u]:
131
+ if v in idom:
132
+ while v != idom[u]:
133
+ df[v].add(u)
134
+ v = idom[v]
135
+ return df
wemm/lib/python3.10/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py ADDED
@@ -0,0 +1,500 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """PageRank analysis of graph structure."""
2
+
3
+ from warnings import warn
4
+
5
+ import networkx as nx
6
+
7
+ __all__ = ["pagerank", "google_matrix"]
8
+
9
+
10
+ @nx._dispatchable(edge_attrs="weight")
11
+ def pagerank(
12
+ G,
13
+ alpha=0.85,
14
+ personalization=None,
15
+ max_iter=100,
16
+ tol=1.0e-6,
17
+ nstart=None,
18
+ weight="weight",
19
+ dangling=None,
20
+ ):
21
+ """Returns the PageRank of the nodes in the graph.
22
+
23
+ PageRank computes a ranking of the nodes in the graph G based on
24
+ the structure of the incoming links. It was originally designed as
25
+ an algorithm to rank web pages.
26
+
27
+ Parameters
28
+ ----------
29
+ G : graph
30
+ A NetworkX graph. Undirected graphs will be converted to a directed
31
+ graph with two directed edges for each undirected edge.
32
+
33
+ alpha : float, optional
34
+ Damping parameter for PageRank, default=0.85.
35
+
36
+ personalization: dict, optional
37
+ The "personalization vector" consisting of a dictionary with a
38
+ key some subset of graph nodes and personalization value each of those.
39
+ At least one personalization value must be non-zero.
40
+ If not specified, a nodes personalization value will be zero.
41
+ By default, a uniform distribution is used.
42
+
43
+ max_iter : integer, optional
44
+ Maximum number of iterations in power method eigenvalue solver.
45
+
46
+ tol : float, optional
47
+ Error tolerance used to check convergence in power method solver.
48
+ The iteration will stop after a tolerance of ``len(G) * tol`` is reached.
49
+
50
+ nstart : dictionary, optional
51
+ Starting value of PageRank iteration for each node.
52
+
53
+ weight : key, optional
54
+ Edge data key to use as weight. If None weights are set to 1.
55
+
56
+ dangling: dict, optional
57
+ The outedges to be assigned to any "dangling" nodes, i.e., nodes without
58
+ any outedges. The dict key is the node the outedge points to and the dict
59
+ value is the weight of that outedge. By default, dangling nodes are given
60
+ outedges according to the personalization vector (uniform if not
61
+ specified). This must be selected to result in an irreducible transition
62
+ matrix (see notes under google_matrix). It may be common to have the
63
+ dangling dict to be the same as the personalization dict.
64
+
65
+
66
+ Returns
67
+ -------
68
+ pagerank : dictionary
69
+ Dictionary of nodes with PageRank as value
70
+
71
+ Examples
72
+ --------
73
+ >>> G = nx.DiGraph(nx.path_graph(4))
74
+ >>> pr = nx.pagerank(G, alpha=0.9)
75
+
76
+ Notes
77
+ -----
78
+ The eigenvector calculation is done by the power iteration method
79
+ and has no guarantee of convergence. The iteration will stop after
80
+ an error tolerance of ``len(G) * tol`` has been reached. If the
81
+ number of iterations exceed `max_iter`, a
82
+ :exc:`networkx.exception.PowerIterationFailedConvergence` exception
83
+ is raised.
84
+
85
+ The PageRank algorithm was designed for directed graphs but this
86
+ algorithm does not check if the input graph is directed and will
87
+ execute on undirected graphs by converting each edge in the
88
+ directed graph to two edges.
89
+
90
+ See Also
91
+ --------
92
+ google_matrix
93
+
94
+ Raises
95
+ ------
96
+ PowerIterationFailedConvergence
97
+ If the algorithm fails to converge to the specified tolerance
98
+ within the specified number of iterations of the power iteration
99
+ method.
100
+
101
+ References
102
+ ----------
103
+ .. [1] A. Langville and C. Meyer,
104
+ "A survey of eigenvector methods of web information retrieval."
105
+ http://citeseer.ist.psu.edu/713792.html
106
+ .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry,
107
+ The PageRank citation ranking: Bringing order to the Web. 1999
108
+ http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
109
+
110
+ """
111
+ return _pagerank_scipy(
112
+ G, alpha, personalization, max_iter, tol, nstart, weight, dangling
113
+ )
114
+
115
+
116
+ def _pagerank_python(
117
+ G,
118
+ alpha=0.85,
119
+ personalization=None,
120
+ max_iter=100,
121
+ tol=1.0e-6,
122
+ nstart=None,
123
+ weight="weight",
124
+ dangling=None,
125
+ ):
126
+ if len(G) == 0:
127
+ return {}
128
+
129
+ D = G.to_directed()
130
+
131
+ # Create a copy in (right) stochastic form
132
+ W = nx.stochastic_graph(D, weight=weight)
133
+ N = W.number_of_nodes()
134
+
135
+ # Choose fixed starting vector if not given
136
+ if nstart is None:
137
+ x = dict.fromkeys(W, 1.0 / N)
138
+ else:
139
+ # Normalized nstart vector
140
+ s = sum(nstart.values())
141
+ x = {k: v / s for k, v in nstart.items()}
142
+
143
+ if personalization is None:
144
+ # Assign uniform personalization vector if not given
145
+ p = dict.fromkeys(W, 1.0 / N)
146
+ else:
147
+ s = sum(personalization.values())
148
+ p = {k: v / s for k, v in personalization.items()}
149
+
150
+ if dangling is None:
151
+ # Use personalization vector if dangling vector not specified
152
+ dangling_weights = p
153
+ else:
154
+ s = sum(dangling.values())
155
+ dangling_weights = {k: v / s for k, v in dangling.items()}
156
+ dangling_nodes = [n for n in W if W.out_degree(n, weight=weight) == 0.0]
157
+
158
+ # power iteration: make up to max_iter iterations
159
+ for _ in range(max_iter):
160
+ xlast = x
161
+ x = dict.fromkeys(xlast.keys(), 0)
162
+ danglesum = alpha * sum(xlast[n] for n in dangling_nodes)
163
+ for n in x:
164
+ # this matrix multiply looks odd because it is
165
+ # doing a left multiply x^T=xlast^T*W
166
+ for _, nbr, wt in W.edges(n, data=weight):
167
+ x[nbr] += alpha * xlast[n] * wt
168
+ x[n] += danglesum * dangling_weights.get(n, 0) + (1.0 - alpha) * p.get(n, 0)
169
+ # check convergence, l1 norm
170
+ err = sum(abs(x[n] - xlast[n]) for n in x)
171
+ if err < N * tol:
172
+ return x
173
+ raise nx.PowerIterationFailedConvergence(max_iter)
174
+
175
+
176
+ @nx._dispatchable(edge_attrs="weight")
177
+ def google_matrix(
178
+ G, alpha=0.85, personalization=None, nodelist=None, weight="weight", dangling=None
179
+ ):
180
+ """Returns the Google matrix of the graph.
181
+
182
+ Parameters
183
+ ----------
184
+ G : graph
185
+ A NetworkX graph. Undirected graphs will be converted to a directed
186
+ graph with two directed edges for each undirected edge.
187
+
188
+ alpha : float
189
+ The damping factor.
190
+
191
+ personalization: dict, optional
192
+ The "personalization vector" consisting of a dictionary with a
193
+ key some subset of graph nodes and personalization value each of those.
194
+ At least one personalization value must be non-zero.
195
+ If not specified, a nodes personalization value will be zero.
196
+ By default, a uniform distribution is used.
197
+
198
+ nodelist : list, optional
199
+ The rows and columns are ordered according to the nodes in nodelist.
200
+ If nodelist is None, then the ordering is produced by G.nodes().
201
+
202
+ weight : key, optional
203
+ Edge data key to use as weight. If None weights are set to 1.
204
+
205
+ dangling: dict, optional
206
+ The outedges to be assigned to any "dangling" nodes, i.e., nodes without
207
+ any outedges. The dict key is the node the outedge points to and the dict
208
+ value is the weight of that outedge. By default, dangling nodes are given
209
+ outedges according to the personalization vector (uniform if not
210
+ specified) This must be selected to result in an irreducible transition
211
+ matrix (see notes below). It may be common to have the dangling dict to
212
+ be the same as the personalization dict.
213
+
214
+ Returns
215
+ -------
216
+ A : 2D NumPy ndarray
217
+ Google matrix of the graph
218
+
219
+ Notes
220
+ -----
221
+ The array returned represents the transition matrix that describes the
222
+ Markov chain used in PageRank. For PageRank to converge to a unique
223
+ solution (i.e., a unique stationary distribution in a Markov chain), the
224
+ transition matrix must be irreducible. In other words, it must be that
225
+ there exists a path between every pair of nodes in the graph, or else there
226
+ is the potential of "rank sinks."
227
+
228
+ This implementation works with Multi(Di)Graphs. For multigraphs the
229
+ weight between two nodes is set to be the sum of all edge weights
230
+ between those nodes.
231
+
232
+ See Also
233
+ --------
234
+ pagerank
235
+ """
236
+ import numpy as np
237
+
238
+ if nodelist is None:
239
+ nodelist = list(G)
240
+
241
+ A = nx.to_numpy_array(G, nodelist=nodelist, weight=weight)
242
+ N = len(G)
243
+ if N == 0:
244
+ return A
245
+
246
+ # Personalization vector
247
+ if personalization is None:
248
+ p = np.repeat(1.0 / N, N)
249
+ else:
250
+ p = np.array([personalization.get(n, 0) for n in nodelist], dtype=float)
251
+ if p.sum() == 0:
252
+ raise ZeroDivisionError
253
+ p /= p.sum()
254
+
255
+ # Dangling nodes
256
+ if dangling is None:
257
+ dangling_weights = p
258
+ else:
259
+ # Convert the dangling dictionary into an array in nodelist order
260
+ dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], dtype=float)
261
+ dangling_weights /= dangling_weights.sum()
262
+ dangling_nodes = np.where(A.sum(axis=1) == 0)[0]
263
+
264
+ # Assign dangling_weights to any dangling nodes (nodes with no out links)
265
+ A[dangling_nodes] = dangling_weights
266
+
267
+ A /= A.sum(axis=1)[:, np.newaxis] # Normalize rows to sum to 1
268
+
269
+ return alpha * A + (1 - alpha) * p
270
+
271
+
272
+ def _pagerank_numpy(
273
+ G, alpha=0.85, personalization=None, weight="weight", dangling=None
274
+ ):
275
+ """Returns the PageRank of the nodes in the graph.
276
+
277
+ PageRank computes a ranking of the nodes in the graph G based on
278
+ the structure of the incoming links. It was originally designed as
279
+ an algorithm to rank web pages.
280
+
281
+ Parameters
282
+ ----------
283
+ G : graph
284
+ A NetworkX graph. Undirected graphs will be converted to a directed
285
+ graph with two directed edges for each undirected edge.
286
+
287
+ alpha : float, optional
288
+ Damping parameter for PageRank, default=0.85.
289
+
290
+ personalization: dict, optional
291
+ The "personalization vector" consisting of a dictionary with a
292
+ key some subset of graph nodes and personalization value each of those.
293
+ At least one personalization value must be non-zero.
294
+ If not specified, a nodes personalization value will be zero.
295
+ By default, a uniform distribution is used.
296
+
297
+ weight : key, optional
298
+ Edge data key to use as weight. If None weights are set to 1.
299
+
300
+ dangling: dict, optional
301
+ The outedges to be assigned to any "dangling" nodes, i.e., nodes without
302
+ any outedges. The dict key is the node the outedge points to and the dict
303
+ value is the weight of that outedge. By default, dangling nodes are given
304
+ outedges according to the personalization vector (uniform if not
305
+ specified) This must be selected to result in an irreducible transition
306
+ matrix (see notes under google_matrix). It may be common to have the
307
+ dangling dict to be the same as the personalization dict.
308
+
309
+ Returns
310
+ -------
311
+ pagerank : dictionary
312
+ Dictionary of nodes with PageRank as value.
313
+
314
+ Examples
315
+ --------
316
+ >>> from networkx.algorithms.link_analysis.pagerank_alg import _pagerank_numpy
317
+ >>> G = nx.DiGraph(nx.path_graph(4))
318
+ >>> pr = _pagerank_numpy(G, alpha=0.9)
319
+
320
+ Notes
321
+ -----
322
+ The eigenvector calculation uses NumPy's interface to the LAPACK
323
+ eigenvalue solvers. This will be the fastest and most accurate
324
+ for small graphs.
325
+
326
+ This implementation works with Multi(Di)Graphs. For multigraphs the
327
+ weight between two nodes is set to be the sum of all edge weights
328
+ between those nodes.
329
+
330
+ See Also
331
+ --------
332
+ pagerank, google_matrix
333
+
334
+ References
335
+ ----------
336
+ .. [1] A. Langville and C. Meyer,
337
+ "A survey of eigenvector methods of web information retrieval."
338
+ http://citeseer.ist.psu.edu/713792.html
339
+ .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry,
340
+ The PageRank citation ranking: Bringing order to the Web. 1999
341
+ http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
342
+ """
343
+ import numpy as np
344
+
345
+ if len(G) == 0:
346
+ return {}
347
+ M = google_matrix(
348
+ G, alpha, personalization=personalization, weight=weight, dangling=dangling
349
+ )
350
+ # use numpy LAPACK solver
351
+ eigenvalues, eigenvectors = np.linalg.eig(M.T)
352
+ ind = np.argmax(eigenvalues)
353
+ # eigenvector of largest eigenvalue is at ind, normalized
354
+ largest = np.array(eigenvectors[:, ind]).flatten().real
355
+ norm = largest.sum()
356
+ return dict(zip(G, map(float, largest / norm)))
357
+
358
+
359
+ def _pagerank_scipy(
360
+ G,
361
+ alpha=0.85,
362
+ personalization=None,
363
+ max_iter=100,
364
+ tol=1.0e-6,
365
+ nstart=None,
366
+ weight="weight",
367
+ dangling=None,
368
+ ):
369
+ """Returns the PageRank of the nodes in the graph.
370
+
371
+ PageRank computes a ranking of the nodes in the graph G based on
372
+ the structure of the incoming links. It was originally designed as
373
+ an algorithm to rank web pages.
374
+
375
+ Parameters
376
+ ----------
377
+ G : graph
378
+ A NetworkX graph. Undirected graphs will be converted to a directed
379
+ graph with two directed edges for each undirected edge.
380
+
381
+ alpha : float, optional
382
+ Damping parameter for PageRank, default=0.85.
383
+
384
+ personalization: dict, optional
385
+ The "personalization vector" consisting of a dictionary with a
386
+ key some subset of graph nodes and personalization value each of those.
387
+ At least one personalization value must be non-zero.
388
+ If not specified, a nodes personalization value will be zero.
389
+ By default, a uniform distribution is used.
390
+
391
+ max_iter : integer, optional
392
+ Maximum number of iterations in power method eigenvalue solver.
393
+
394
+ tol : float, optional
395
+ Error tolerance used to check convergence in power method solver.
396
+ The iteration will stop after a tolerance of ``len(G) * tol`` is reached.
397
+
398
+ nstart : dictionary, optional
399
+ Starting value of PageRank iteration for each node.
400
+
401
+ weight : key, optional
402
+ Edge data key to use as weight. If None weights are set to 1.
403
+
404
+ dangling: dict, optional
405
+ The outedges to be assigned to any "dangling" nodes, i.e., nodes without
406
+ any outedges. The dict key is the node the outedge points to and the dict
407
+ value is the weight of that outedge. By default, dangling nodes are given
408
+ outedges according to the personalization vector (uniform if not
409
+ specified) This must be selected to result in an irreducible transition
410
+ matrix (see notes under google_matrix). It may be common to have the
411
+ dangling dict to be the same as the personalization dict.
412
+
413
+ Returns
414
+ -------
415
+ pagerank : dictionary
416
+ Dictionary of nodes with PageRank as value
417
+
418
+ Examples
419
+ --------
420
+ >>> from networkx.algorithms.link_analysis.pagerank_alg import _pagerank_scipy
421
+ >>> G = nx.DiGraph(nx.path_graph(4))
422
+ >>> pr = _pagerank_scipy(G, alpha=0.9)
423
+
424
+ Notes
425
+ -----
426
+ The eigenvector calculation uses power iteration with a SciPy
427
+ sparse matrix representation.
428
+
429
+ This implementation works with Multi(Di)Graphs. For multigraphs the
430
+ weight between two nodes is set to be the sum of all edge weights
431
+ between those nodes.
432
+
433
+ See Also
434
+ --------
435
+ pagerank
436
+
437
+ Raises
438
+ ------
439
+ PowerIterationFailedConvergence
440
+ If the algorithm fails to converge to the specified tolerance
441
+ within the specified number of iterations of the power iteration
442
+ method.
443
+
444
+ References
445
+ ----------
446
+ .. [1] A. Langville and C. Meyer,
447
+ "A survey of eigenvector methods of web information retrieval."
448
+ http://citeseer.ist.psu.edu/713792.html
449
+ .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry,
450
+ The PageRank citation ranking: Bringing order to the Web. 1999
451
+ http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
452
+ """
453
+ import numpy as np
454
+ import scipy as sp
455
+
456
+ N = len(G)
457
+ if N == 0:
458
+ return {}
459
+
460
+ nodelist = list(G)
461
+ A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, dtype=float)
462
+ S = A.sum(axis=1)
463
+ S[S != 0] = 1.0 / S[S != 0]
464
+ # TODO: csr_array
465
+ Q = sp.sparse.csr_array(sp.sparse.spdiags(S.T, 0, *A.shape))
466
+ A = Q @ A
467
+
468
+ # initial vector
469
+ if nstart is None:
470
+ x = np.repeat(1.0 / N, N)
471
+ else:
472
+ x = np.array([nstart.get(n, 0) for n in nodelist], dtype=float)
473
+ x /= x.sum()
474
+
475
+ # Personalization vector
476
+ if personalization is None:
477
+ p = np.repeat(1.0 / N, N)
478
+ else:
479
+ p = np.array([personalization.get(n, 0) for n in nodelist], dtype=float)
480
+ if p.sum() == 0:
481
+ raise ZeroDivisionError
482
+ p /= p.sum()
483
+ # Dangling nodes
484
+ if dangling is None:
485
+ dangling_weights = p
486
+ else:
487
+ # Convert the dangling dictionary into an array in nodelist order
488
+ dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], dtype=float)
489
+ dangling_weights /= dangling_weights.sum()
490
+ is_dangling = np.where(S == 0)[0]
491
+
492
+ # power iteration: make up to max_iter iterations
493
+ for _ in range(max_iter):
494
+ xlast = x
495
+ x = alpha * (x @ A + sum(x[is_dangling]) * dangling_weights) + (1 - alpha) * p
496
+ # check convergence, l1 norm
497
+ err = np.absolute(x - xlast).sum()
498
+ if err < N * tol:
499
+ return dict(zip(nodelist, map(float, x)))
500
+ raise nx.PowerIterationFailedConvergence(max_iter)
wemm/lib/python3.10/site-packages/networkx/algorithms/lowest_common_ancestors.py ADDED
@@ -0,0 +1,269 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Algorithms for finding the lowest common ancestor of trees and DAGs."""
2
+
3
+ from collections import defaultdict
4
+ from collections.abc import Mapping, Set
5
+ from itertools import combinations_with_replacement
6
+
7
+ import networkx as nx
8
+ from networkx.utils import UnionFind, arbitrary_element, not_implemented_for
9
+
10
+ __all__ = [
11
+ "all_pairs_lowest_common_ancestor",
12
+ "tree_all_pairs_lowest_common_ancestor",
13
+ "lowest_common_ancestor",
14
+ ]
15
+
16
+
17
+ @not_implemented_for("undirected")
18
+ @nx._dispatchable
19
+ def all_pairs_lowest_common_ancestor(G, pairs=None):
20
+ """Return the lowest common ancestor of all pairs or the provided pairs
21
+
22
+ Parameters
23
+ ----------
24
+ G : NetworkX directed graph
25
+
26
+ pairs : iterable of pairs of nodes, optional (default: all pairs)
27
+ The pairs of nodes of interest.
28
+ If None, will find the LCA of all pairs of nodes.
29
+
30
+ Yields
31
+ ------
32
+ ((node1, node2), lca) : 2-tuple
33
+ Where lca is least common ancestor of node1 and node2.
34
+ Note that for the default case, the order of the node pair is not considered,
35
+ e.g. you will not get both ``(a, b)`` and ``(b, a)``
36
+
37
+ Raises
38
+ ------
39
+ NetworkXPointlessConcept
40
+ If `G` is null.
41
+ NetworkXError
42
+ If `G` is not a DAG.
43
+
44
+ Examples
45
+ --------
46
+ The default behavior is to yield the lowest common ancestor for all
47
+ possible combinations of nodes in `G`, including self-pairings:
48
+
49
+ >>> G = nx.DiGraph([(0, 1), (0, 3), (1, 2)])
50
+ >>> dict(nx.all_pairs_lowest_common_ancestor(G))
51
+ {(0, 0): 0, (0, 1): 0, (0, 3): 0, (0, 2): 0, (1, 1): 1, (1, 3): 0, (1, 2): 1, (3, 3): 3, (3, 2): 0, (2, 2): 2}
52
+
53
+ The pairs argument can be used to limit the output to only the
54
+ specified node pairings:
55
+
56
+ >>> dict(nx.all_pairs_lowest_common_ancestor(G, pairs=[(1, 2), (2, 3)]))
57
+ {(1, 2): 1, (2, 3): 0}
58
+
59
+ Notes
60
+ -----
61
+ Only defined on non-null directed acyclic graphs.
62
+
63
+ See Also
64
+ --------
65
+ lowest_common_ancestor
66
+ """
67
+ if not nx.is_directed_acyclic_graph(G):
68
+ raise nx.NetworkXError("LCA only defined on directed acyclic graphs.")
69
+ if len(G) == 0:
70
+ raise nx.NetworkXPointlessConcept("LCA meaningless on null graphs.")
71
+
72
+ if pairs is None:
73
+ pairs = combinations_with_replacement(G, 2)
74
+ else:
75
+ # Convert iterator to iterable, if necessary. Trim duplicates.
76
+ pairs = dict.fromkeys(pairs)
77
+ # Verify that each of the nodes in the provided pairs is in G
78
+ nodeset = set(G)
79
+ for pair in pairs:
80
+ if set(pair) - nodeset:
81
+ raise nx.NodeNotFound(
82
+ f"Node(s) {set(pair) - nodeset} from pair {pair} not in G."
83
+ )
84
+
85
+ # Once input validation is done, construct the generator
86
+ def generate_lca_from_pairs(G, pairs):
87
+ ancestor_cache = {}
88
+
89
+ for v, w in pairs:
90
+ if v not in ancestor_cache:
91
+ ancestor_cache[v] = nx.ancestors(G, v)
92
+ ancestor_cache[v].add(v)
93
+ if w not in ancestor_cache:
94
+ ancestor_cache[w] = nx.ancestors(G, w)
95
+ ancestor_cache[w].add(w)
96
+
97
+ common_ancestors = ancestor_cache[v] & ancestor_cache[w]
98
+
99
+ if common_ancestors:
100
+ common_ancestor = next(iter(common_ancestors))
101
+ while True:
102
+ successor = None
103
+ for lower_ancestor in G.successors(common_ancestor):
104
+ if lower_ancestor in common_ancestors:
105
+ successor = lower_ancestor
106
+ break
107
+ if successor is None:
108
+ break
109
+ common_ancestor = successor
110
+ yield ((v, w), common_ancestor)
111
+
112
+ return generate_lca_from_pairs(G, pairs)
113
+
114
+
115
+ @not_implemented_for("undirected")
116
+ @nx._dispatchable
117
+ def lowest_common_ancestor(G, node1, node2, default=None):
118
+ """Compute the lowest common ancestor of the given pair of nodes.
119
+
120
+ Parameters
121
+ ----------
122
+ G : NetworkX directed graph
123
+
124
+ node1, node2 : nodes in the graph.
125
+
126
+ default : object
127
+ Returned if no common ancestor between `node1` and `node2`
128
+
129
+ Returns
130
+ -------
131
+ The lowest common ancestor of node1 and node2,
132
+ or default if they have no common ancestors.
133
+
134
+ Examples
135
+ --------
136
+ >>> G = nx.DiGraph()
137
+ >>> nx.add_path(G, (0, 1, 2, 3))
138
+ >>> nx.add_path(G, (0, 4, 3))
139
+ >>> nx.lowest_common_ancestor(G, 2, 4)
140
+ 0
141
+
142
+ See Also
143
+ --------
144
+ all_pairs_lowest_common_ancestor"""
145
+
146
+ ans = list(all_pairs_lowest_common_ancestor(G, pairs=[(node1, node2)]))
147
+ if ans:
148
+ assert len(ans) == 1
149
+ return ans[0][1]
150
+ return default
151
+
152
+
153
+ @not_implemented_for("undirected")
154
+ @nx._dispatchable
155
+ def tree_all_pairs_lowest_common_ancestor(G, root=None, pairs=None):
156
+ r"""Yield the lowest common ancestor for sets of pairs in a tree.
157
+
158
+ Parameters
159
+ ----------
160
+ G : NetworkX directed graph (must be a tree)
161
+
162
+ root : node, optional (default: None)
163
+ The root of the subtree to operate on.
164
+ If None, assume the entire graph has exactly one source and use that.
165
+
166
+ pairs : iterable or iterator of pairs of nodes, optional (default: None)
167
+ The pairs of interest. If None, Defaults to all pairs of nodes
168
+ under `root` that have a lowest common ancestor.
169
+
170
+ Returns
171
+ -------
172
+ lcas : generator of tuples `((u, v), lca)` where `u` and `v` are nodes
173
+ in `pairs` and `lca` is their lowest common ancestor.
174
+
175
+ Examples
176
+ --------
177
+ >>> import pprint
178
+ >>> G = nx.DiGraph([(1, 3), (2, 4), (1, 2)])
179
+ >>> pprint.pprint(dict(nx.tree_all_pairs_lowest_common_ancestor(G)))
180
+ {(1, 1): 1,
181
+ (2, 1): 1,
182
+ (2, 2): 2,
183
+ (3, 1): 1,
184
+ (3, 2): 1,
185
+ (3, 3): 3,
186
+ (3, 4): 1,
187
+ (4, 1): 1,
188
+ (4, 2): 2,
189
+ (4, 4): 4}
190
+
191
+ We can also use `pairs` argument to specify the pairs of nodes for which we
192
+ want to compute lowest common ancestors. Here is an example:
193
+
194
+ >>> dict(nx.tree_all_pairs_lowest_common_ancestor(G, pairs=[(1, 4), (2, 3)]))
195
+ {(2, 3): 1, (1, 4): 1}
196
+
197
+ Notes
198
+ -----
199
+ Only defined on non-null trees represented with directed edges from
200
+ parents to children. Uses Tarjan's off-line lowest-common-ancestors
201
+ algorithm. Runs in time $O(4 \times (V + E + P))$ time, where 4 is the largest
202
+ value of the inverse Ackermann function likely to ever come up in actual
203
+ use, and $P$ is the number of pairs requested (or $V^2$ if all are needed).
204
+
205
+ Tarjan, R. E. (1979), "Applications of path compression on balanced trees",
206
+ Journal of the ACM 26 (4): 690-715, doi:10.1145/322154.322161.
207
+
208
+ See Also
209
+ --------
210
+ all_pairs_lowest_common_ancestor: similar routine for general DAGs
211
+ lowest_common_ancestor: just a single pair for general DAGs
212
+ """
213
+ if len(G) == 0:
214
+ raise nx.NetworkXPointlessConcept("LCA meaningless on null graphs.")
215
+
216
+ # Index pairs of interest for efficient lookup from either side.
217
+ if pairs is not None:
218
+ pair_dict = defaultdict(set)
219
+ # See note on all_pairs_lowest_common_ancestor.
220
+ if not isinstance(pairs, Mapping | Set):
221
+ pairs = set(pairs)
222
+ for u, v in pairs:
223
+ for n in (u, v):
224
+ if n not in G:
225
+ msg = f"The node {str(n)} is not in the digraph."
226
+ raise nx.NodeNotFound(msg)
227
+ pair_dict[u].add(v)
228
+ pair_dict[v].add(u)
229
+
230
+ # If root is not specified, find the exactly one node with in degree 0 and
231
+ # use it. Raise an error if none are found, or more than one is. Also check
232
+ # for any nodes with in degree larger than 1, which would imply G is not a
233
+ # tree.
234
+ if root is None:
235
+ for n, deg in G.in_degree:
236
+ if deg == 0:
237
+ if root is not None:
238
+ msg = "No root specified and tree has multiple sources."
239
+ raise nx.NetworkXError(msg)
240
+ root = n
241
+ # checking deg>1 is not sufficient for MultiDiGraphs
242
+ elif deg > 1 and len(G.pred[n]) > 1:
243
+ msg = "Tree LCA only defined on trees; use DAG routine."
244
+ raise nx.NetworkXError(msg)
245
+ if root is None:
246
+ raise nx.NetworkXError("Graph contains a cycle.")
247
+
248
+ # Iterative implementation of Tarjan's offline lca algorithm
249
+ # as described in CLRS on page 521 (2nd edition)/page 584 (3rd edition)
250
+ uf = UnionFind()
251
+ ancestors = {}
252
+ for node in G:
253
+ ancestors[node] = uf[node]
254
+
255
+ colors = defaultdict(bool)
256
+ for node in nx.dfs_postorder_nodes(G, root):
257
+ colors[node] = True
258
+ for v in pair_dict[node] if pairs is not None else G:
259
+ if colors[v]:
260
+ # If the user requested both directions of a pair, give it.
261
+ # Otherwise, just give one.
262
+ if pairs is not None and (node, v) in pairs:
263
+ yield (node, v), ancestors[uf[v]]
264
+ if pairs is None or (v, node) in pairs:
265
+ yield (v, node), ancestors[uf[v]]
266
+ if node != root:
267
+ parent = arbitrary_element(G.pred[node])
268
+ uf.union(parent, node)
269
+ ancestors[uf[parent]] = parent
wemm/lib/python3.10/site-packages/networkx/algorithms/planarity.py ADDED
@@ -0,0 +1,1402 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import defaultdict
2
+
3
+ import networkx as nx
4
+
5
+ __all__ = ["check_planarity", "is_planar", "PlanarEmbedding"]
6
+
7
+
8
+ @nx._dispatchable
9
+ def is_planar(G):
10
+ """Returns True if and only if `G` is planar.
11
+
12
+ A graph is *planar* iff it can be drawn in a plane without
13
+ any edge intersections.
14
+
15
+ Parameters
16
+ ----------
17
+ G : NetworkX graph
18
+
19
+ Returns
20
+ -------
21
+ bool
22
+ Whether the graph is planar.
23
+
24
+ Examples
25
+ --------
26
+ >>> G = nx.Graph([(0, 1), (0, 2)])
27
+ >>> nx.is_planar(G)
28
+ True
29
+ >>> nx.is_planar(nx.complete_graph(5))
30
+ False
31
+
32
+ See Also
33
+ --------
34
+ check_planarity :
35
+ Check if graph is planar *and* return a `PlanarEmbedding` instance if True.
36
+ """
37
+
38
+ return check_planarity(G, counterexample=False)[0]
39
+
40
+
41
+ @nx._dispatchable(returns_graph=True)
42
+ def check_planarity(G, counterexample=False):
43
+ """Check if a graph is planar and return a counterexample or an embedding.
44
+
45
+ A graph is planar iff it can be drawn in a plane without
46
+ any edge intersections.
47
+
48
+ Parameters
49
+ ----------
50
+ G : NetworkX graph
51
+ counterexample : bool
52
+ A Kuratowski subgraph (to proof non planarity) is only returned if set
53
+ to true.
54
+
55
+ Returns
56
+ -------
57
+ (is_planar, certificate) : (bool, NetworkX graph) tuple
58
+ is_planar is true if the graph is planar.
59
+ If the graph is planar `certificate` is a PlanarEmbedding
60
+ otherwise it is a Kuratowski subgraph.
61
+
62
+ Examples
63
+ --------
64
+ >>> G = nx.Graph([(0, 1), (0, 2)])
65
+ >>> is_planar, P = nx.check_planarity(G)
66
+ >>> print(is_planar)
67
+ True
68
+
69
+ When `G` is planar, a `PlanarEmbedding` instance is returned:
70
+
71
+ >>> P.get_data()
72
+ {0: [1, 2], 1: [0], 2: [0]}
73
+
74
+ Notes
75
+ -----
76
+ A (combinatorial) embedding consists of cyclic orderings of the incident
77
+ edges at each vertex. Given such an embedding there are multiple approaches
78
+ discussed in literature to drawing the graph (subject to various
79
+ constraints, e.g. integer coordinates), see e.g. [2].
80
+
81
+ The planarity check algorithm and extraction of the combinatorial embedding
82
+ is based on the Left-Right Planarity Test [1].
83
+
84
+ A counterexample is only generated if the corresponding parameter is set,
85
+ because the complexity of the counterexample generation is higher.
86
+
87
+ See also
88
+ --------
89
+ is_planar :
90
+ Check for planarity without creating a `PlanarEmbedding` or counterexample.
91
+
92
+ References
93
+ ----------
94
+ .. [1] Ulrik Brandes:
95
+ The Left-Right Planarity Test
96
+ 2009
97
+ http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.217.9208
98
+ .. [2] Takao Nishizeki, Md Saidur Rahman:
99
+ Planar graph drawing
100
+ Lecture Notes Series on Computing: Volume 12
101
+ 2004
102
+ """
103
+
104
+ planarity_state = LRPlanarity(G)
105
+ embedding = planarity_state.lr_planarity()
106
+ if embedding is None:
107
+ # graph is not planar
108
+ if counterexample:
109
+ return False, get_counterexample(G)
110
+ else:
111
+ return False, None
112
+ else:
113
+ # graph is planar
114
+ return True, embedding
115
+
116
+
117
+ @nx._dispatchable(returns_graph=True)
118
+ def check_planarity_recursive(G, counterexample=False):
119
+ """Recursive version of :meth:`check_planarity`."""
120
+ planarity_state = LRPlanarity(G)
121
+ embedding = planarity_state.lr_planarity_recursive()
122
+ if embedding is None:
123
+ # graph is not planar
124
+ if counterexample:
125
+ return False, get_counterexample_recursive(G)
126
+ else:
127
+ return False, None
128
+ else:
129
+ # graph is planar
130
+ return True, embedding
131
+
132
+
133
+ @nx._dispatchable(returns_graph=True)
134
+ def get_counterexample(G):
135
+ """Obtains a Kuratowski subgraph.
136
+
137
+ Raises nx.NetworkXException if G is planar.
138
+
139
+ The function removes edges such that the graph is still not planar.
140
+ At some point the removal of any edge would make the graph planar.
141
+ This subgraph must be a Kuratowski subgraph.
142
+
143
+ Parameters
144
+ ----------
145
+ G : NetworkX graph
146
+
147
+ Returns
148
+ -------
149
+ subgraph : NetworkX graph
150
+ A Kuratowski subgraph that proves that G is not planar.
151
+
152
+ """
153
+ # copy graph
154
+ G = nx.Graph(G)
155
+
156
+ if check_planarity(G)[0]:
157
+ raise nx.NetworkXException("G is planar - no counter example.")
158
+
159
+ # find Kuratowski subgraph
160
+ subgraph = nx.Graph()
161
+ for u in G:
162
+ nbrs = list(G[u])
163
+ for v in nbrs:
164
+ G.remove_edge(u, v)
165
+ if check_planarity(G)[0]:
166
+ G.add_edge(u, v)
167
+ subgraph.add_edge(u, v)
168
+
169
+ return subgraph
170
+
171
+
172
+ @nx._dispatchable(returns_graph=True)
173
+ def get_counterexample_recursive(G):
174
+ """Recursive version of :meth:`get_counterexample`."""
175
+
176
+ # copy graph
177
+ G = nx.Graph(G)
178
+
179
+ if check_planarity_recursive(G)[0]:
180
+ raise nx.NetworkXException("G is planar - no counter example.")
181
+
182
+ # find Kuratowski subgraph
183
+ subgraph = nx.Graph()
184
+ for u in G:
185
+ nbrs = list(G[u])
186
+ for v in nbrs:
187
+ G.remove_edge(u, v)
188
+ if check_planarity_recursive(G)[0]:
189
+ G.add_edge(u, v)
190
+ subgraph.add_edge(u, v)
191
+
192
+ return subgraph
193
+
194
+
195
+ class Interval:
196
+ """Represents a set of return edges.
197
+
198
+ All return edges in an interval induce a same constraint on the contained
199
+ edges, which means that all edges must either have a left orientation or
200
+ all edges must have a right orientation.
201
+ """
202
+
203
+ def __init__(self, low=None, high=None):
204
+ self.low = low
205
+ self.high = high
206
+
207
+ def empty(self):
208
+ """Check if the interval is empty"""
209
+ return self.low is None and self.high is None
210
+
211
+ def copy(self):
212
+ """Returns a copy of this interval"""
213
+ return Interval(self.low, self.high)
214
+
215
+ def conflicting(self, b, planarity_state):
216
+ """Returns True if interval I conflicts with edge b"""
217
+ return (
218
+ not self.empty()
219
+ and planarity_state.lowpt[self.high] > planarity_state.lowpt[b]
220
+ )
221
+
222
+
223
+ class ConflictPair:
224
+ """Represents a different constraint between two intervals.
225
+
226
+ The edges in the left interval must have a different orientation than
227
+ the one in the right interval.
228
+ """
229
+
230
+ def __init__(self, left=Interval(), right=Interval()):
231
+ self.left = left
232
+ self.right = right
233
+
234
+ def swap(self):
235
+ """Swap left and right intervals"""
236
+ temp = self.left
237
+ self.left = self.right
238
+ self.right = temp
239
+
240
+ def lowest(self, planarity_state):
241
+ """Returns the lowest lowpoint of a conflict pair"""
242
+ if self.left.empty():
243
+ return planarity_state.lowpt[self.right.low]
244
+ if self.right.empty():
245
+ return planarity_state.lowpt[self.left.low]
246
+ return min(
247
+ planarity_state.lowpt[self.left.low], planarity_state.lowpt[self.right.low]
248
+ )
249
+
250
+
251
+ def top_of_stack(l):
252
+ """Returns the element on top of the stack."""
253
+ if not l:
254
+ return None
255
+ return l[-1]
256
+
257
+
258
+ class LRPlanarity:
259
+ """A class to maintain the state during planarity check."""
260
+
261
+ __slots__ = [
262
+ "G",
263
+ "roots",
264
+ "height",
265
+ "lowpt",
266
+ "lowpt2",
267
+ "nesting_depth",
268
+ "parent_edge",
269
+ "DG",
270
+ "adjs",
271
+ "ordered_adjs",
272
+ "ref",
273
+ "side",
274
+ "S",
275
+ "stack_bottom",
276
+ "lowpt_edge",
277
+ "left_ref",
278
+ "right_ref",
279
+ "embedding",
280
+ ]
281
+
282
+ def __init__(self, G):
283
+ # copy G without adding self-loops
284
+ self.G = nx.Graph()
285
+ self.G.add_nodes_from(G.nodes)
286
+ for e in G.edges:
287
+ if e[0] != e[1]:
288
+ self.G.add_edge(e[0], e[1])
289
+
290
+ self.roots = []
291
+
292
+ # distance from tree root
293
+ self.height = defaultdict(lambda: None)
294
+
295
+ self.lowpt = {} # height of lowest return point of an edge
296
+ self.lowpt2 = {} # height of second lowest return point
297
+ self.nesting_depth = {} # for nesting order
298
+
299
+ # None -> missing edge
300
+ self.parent_edge = defaultdict(lambda: None)
301
+
302
+ # oriented DFS graph
303
+ self.DG = nx.DiGraph()
304
+ self.DG.add_nodes_from(G.nodes)
305
+
306
+ self.adjs = {}
307
+ self.ordered_adjs = {}
308
+
309
+ self.ref = defaultdict(lambda: None)
310
+ self.side = defaultdict(lambda: 1)
311
+
312
+ # stack of conflict pairs
313
+ self.S = []
314
+ self.stack_bottom = {}
315
+ self.lowpt_edge = {}
316
+
317
+ self.left_ref = {}
318
+ self.right_ref = {}
319
+
320
+ self.embedding = PlanarEmbedding()
321
+
322
+ def lr_planarity(self):
323
+ """Execute the LR planarity test.
324
+
325
+ Returns
326
+ -------
327
+ embedding : dict
328
+ If the graph is planar an embedding is returned. Otherwise None.
329
+ """
330
+ if self.G.order() > 2 and self.G.size() > 3 * self.G.order() - 6:
331
+ # graph is not planar
332
+ return None
333
+
334
+ # make adjacency lists for dfs
335
+ for v in self.G:
336
+ self.adjs[v] = list(self.G[v])
337
+
338
+ # orientation of the graph by depth first search traversal
339
+ for v in self.G:
340
+ if self.height[v] is None:
341
+ self.height[v] = 0
342
+ self.roots.append(v)
343
+ self.dfs_orientation(v)
344
+
345
+ # Free no longer used variables
346
+ self.G = None
347
+ self.lowpt2 = None
348
+ self.adjs = None
349
+
350
+ # testing
351
+ for v in self.DG: # sort the adjacency lists by nesting depth
352
+ # note: this sorting leads to non linear time
353
+ self.ordered_adjs[v] = sorted(
354
+ self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
355
+ )
356
+ for v in self.roots:
357
+ if not self.dfs_testing(v):
358
+ return None
359
+
360
+ # Free no longer used variables
361
+ self.height = None
362
+ self.lowpt = None
363
+ self.S = None
364
+ self.stack_bottom = None
365
+ self.lowpt_edge = None
366
+
367
+ for e in self.DG.edges:
368
+ self.nesting_depth[e] = self.sign(e) * self.nesting_depth[e]
369
+
370
+ self.embedding.add_nodes_from(self.DG.nodes)
371
+ for v in self.DG:
372
+ # sort the adjacency lists again
373
+ self.ordered_adjs[v] = sorted(
374
+ self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
375
+ )
376
+ # initialize the embedding
377
+ previous_node = None
378
+ for w in self.ordered_adjs[v]:
379
+ self.embedding.add_half_edge(v, w, ccw=previous_node)
380
+ previous_node = w
381
+
382
+ # Free no longer used variables
383
+ self.DG = None
384
+ self.nesting_depth = None
385
+ self.ref = None
386
+
387
+ # compute the complete embedding
388
+ for v in self.roots:
389
+ self.dfs_embedding(v)
390
+
391
+ # Free no longer used variables
392
+ self.roots = None
393
+ self.parent_edge = None
394
+ self.ordered_adjs = None
395
+ self.left_ref = None
396
+ self.right_ref = None
397
+ self.side = None
398
+
399
+ return self.embedding
400
+
401
+ def lr_planarity_recursive(self):
402
+ """Recursive version of :meth:`lr_planarity`."""
403
+ if self.G.order() > 2 and self.G.size() > 3 * self.G.order() - 6:
404
+ # graph is not planar
405
+ return None
406
+
407
+ # orientation of the graph by depth first search traversal
408
+ for v in self.G:
409
+ if self.height[v] is None:
410
+ self.height[v] = 0
411
+ self.roots.append(v)
412
+ self.dfs_orientation_recursive(v)
413
+
414
+ # Free no longer used variable
415
+ self.G = None
416
+
417
+ # testing
418
+ for v in self.DG: # sort the adjacency lists by nesting depth
419
+ # note: this sorting leads to non linear time
420
+ self.ordered_adjs[v] = sorted(
421
+ self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
422
+ )
423
+ for v in self.roots:
424
+ if not self.dfs_testing_recursive(v):
425
+ return None
426
+
427
+ for e in self.DG.edges:
428
+ self.nesting_depth[e] = self.sign_recursive(e) * self.nesting_depth[e]
429
+
430
+ self.embedding.add_nodes_from(self.DG.nodes)
431
+ for v in self.DG:
432
+ # sort the adjacency lists again
433
+ self.ordered_adjs[v] = sorted(
434
+ self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
435
+ )
436
+ # initialize the embedding
437
+ previous_node = None
438
+ for w in self.ordered_adjs[v]:
439
+ self.embedding.add_half_edge(v, w, ccw=previous_node)
440
+ previous_node = w
441
+
442
+ # compute the complete embedding
443
+ for v in self.roots:
444
+ self.dfs_embedding_recursive(v)
445
+
446
+ return self.embedding
447
+
448
+ def dfs_orientation(self, v):
449
+ """Orient the graph by DFS, compute lowpoints and nesting order."""
450
+ # the recursion stack
451
+ dfs_stack = [v]
452
+ # index of next edge to handle in adjacency list of each node
453
+ ind = defaultdict(lambda: 0)
454
+ # boolean to indicate whether to skip the initial work for an edge
455
+ skip_init = defaultdict(lambda: False)
456
+
457
+ while dfs_stack:
458
+ v = dfs_stack.pop()
459
+ e = self.parent_edge[v]
460
+
461
+ for w in self.adjs[v][ind[v] :]:
462
+ vw = (v, w)
463
+
464
+ if not skip_init[vw]:
465
+ if (v, w) in self.DG.edges or (w, v) in self.DG.edges:
466
+ ind[v] += 1
467
+ continue # the edge was already oriented
468
+
469
+ self.DG.add_edge(v, w) # orient the edge
470
+
471
+ self.lowpt[vw] = self.height[v]
472
+ self.lowpt2[vw] = self.height[v]
473
+ if self.height[w] is None: # (v, w) is a tree edge
474
+ self.parent_edge[w] = vw
475
+ self.height[w] = self.height[v] + 1
476
+
477
+ dfs_stack.append(v) # revisit v after finishing w
478
+ dfs_stack.append(w) # visit w next
479
+ skip_init[vw] = True # don't redo this block
480
+ break # handle next node in dfs_stack (i.e. w)
481
+ else: # (v, w) is a back edge
482
+ self.lowpt[vw] = self.height[w]
483
+
484
+ # determine nesting graph
485
+ self.nesting_depth[vw] = 2 * self.lowpt[vw]
486
+ if self.lowpt2[vw] < self.height[v]: # chordal
487
+ self.nesting_depth[vw] += 1
488
+
489
+ # update lowpoints of parent edge e
490
+ if e is not None:
491
+ if self.lowpt[vw] < self.lowpt[e]:
492
+ self.lowpt2[e] = min(self.lowpt[e], self.lowpt2[vw])
493
+ self.lowpt[e] = self.lowpt[vw]
494
+ elif self.lowpt[vw] > self.lowpt[e]:
495
+ self.lowpt2[e] = min(self.lowpt2[e], self.lowpt[vw])
496
+ else:
497
+ self.lowpt2[e] = min(self.lowpt2[e], self.lowpt2[vw])
498
+
499
+ ind[v] += 1
500
+
501
+ def dfs_orientation_recursive(self, v):
502
+ """Recursive version of :meth:`dfs_orientation`."""
503
+ e = self.parent_edge[v]
504
+ for w in self.G[v]:
505
+ if (v, w) in self.DG.edges or (w, v) in self.DG.edges:
506
+ continue # the edge was already oriented
507
+ vw = (v, w)
508
+ self.DG.add_edge(v, w) # orient the edge
509
+
510
+ self.lowpt[vw] = self.height[v]
511
+ self.lowpt2[vw] = self.height[v]
512
+ if self.height[w] is None: # (v, w) is a tree edge
513
+ self.parent_edge[w] = vw
514
+ self.height[w] = self.height[v] + 1
515
+ self.dfs_orientation_recursive(w)
516
+ else: # (v, w) is a back edge
517
+ self.lowpt[vw] = self.height[w]
518
+
519
+ # determine nesting graph
520
+ self.nesting_depth[vw] = 2 * self.lowpt[vw]
521
+ if self.lowpt2[vw] < self.height[v]: # chordal
522
+ self.nesting_depth[vw] += 1
523
+
524
+ # update lowpoints of parent edge e
525
+ if e is not None:
526
+ if self.lowpt[vw] < self.lowpt[e]:
527
+ self.lowpt2[e] = min(self.lowpt[e], self.lowpt2[vw])
528
+ self.lowpt[e] = self.lowpt[vw]
529
+ elif self.lowpt[vw] > self.lowpt[e]:
530
+ self.lowpt2[e] = min(self.lowpt2[e], self.lowpt[vw])
531
+ else:
532
+ self.lowpt2[e] = min(self.lowpt2[e], self.lowpt2[vw])
533
+
534
+ def dfs_testing(self, v):
535
+ """Test for LR partition."""
536
+ # the recursion stack
537
+ dfs_stack = [v]
538
+ # index of next edge to handle in adjacency list of each node
539
+ ind = defaultdict(lambda: 0)
540
+ # boolean to indicate whether to skip the initial work for an edge
541
+ skip_init = defaultdict(lambda: False)
542
+
543
+ while dfs_stack:
544
+ v = dfs_stack.pop()
545
+ e = self.parent_edge[v]
546
+ # to indicate whether to skip the final block after the for loop
547
+ skip_final = False
548
+
549
+ for w in self.ordered_adjs[v][ind[v] :]:
550
+ ei = (v, w)
551
+
552
+ if not skip_init[ei]:
553
+ self.stack_bottom[ei] = top_of_stack(self.S)
554
+
555
+ if ei == self.parent_edge[w]: # tree edge
556
+ dfs_stack.append(v) # revisit v after finishing w
557
+ dfs_stack.append(w) # visit w next
558
+ skip_init[ei] = True # don't redo this block
559
+ skip_final = True # skip final work after breaking
560
+ break # handle next node in dfs_stack (i.e. w)
561
+ else: # back edge
562
+ self.lowpt_edge[ei] = ei
563
+ self.S.append(ConflictPair(right=Interval(ei, ei)))
564
+
565
+ # integrate new return edges
566
+ if self.lowpt[ei] < self.height[v]:
567
+ if w == self.ordered_adjs[v][0]: # e_i has return edge
568
+ self.lowpt_edge[e] = self.lowpt_edge[ei]
569
+ else: # add constraints of e_i
570
+ if not self.add_constraints(ei, e):
571
+ # graph is not planar
572
+ return False
573
+
574
+ ind[v] += 1
575
+
576
+ if not skip_final:
577
+ # remove back edges returning to parent
578
+ if e is not None: # v isn't root
579
+ self.remove_back_edges(e)
580
+
581
+ return True
582
+
583
+ def dfs_testing_recursive(self, v):
584
+ """Recursive version of :meth:`dfs_testing`."""
585
+ e = self.parent_edge[v]
586
+ for w in self.ordered_adjs[v]:
587
+ ei = (v, w)
588
+ self.stack_bottom[ei] = top_of_stack(self.S)
589
+ if ei == self.parent_edge[w]: # tree edge
590
+ if not self.dfs_testing_recursive(w):
591
+ return False
592
+ else: # back edge
593
+ self.lowpt_edge[ei] = ei
594
+ self.S.append(ConflictPair(right=Interval(ei, ei)))
595
+
596
+ # integrate new return edges
597
+ if self.lowpt[ei] < self.height[v]:
598
+ if w == self.ordered_adjs[v][0]: # e_i has return edge
599
+ self.lowpt_edge[e] = self.lowpt_edge[ei]
600
+ else: # add constraints of e_i
601
+ if not self.add_constraints(ei, e):
602
+ # graph is not planar
603
+ return False
604
+
605
+ # remove back edges returning to parent
606
+ if e is not None: # v isn't root
607
+ self.remove_back_edges(e)
608
+ return True
609
+
610
+ def add_constraints(self, ei, e):
611
+ P = ConflictPair()
612
+ # merge return edges of e_i into P.right
613
+ while True:
614
+ Q = self.S.pop()
615
+ if not Q.left.empty():
616
+ Q.swap()
617
+ if not Q.left.empty(): # not planar
618
+ return False
619
+ if self.lowpt[Q.right.low] > self.lowpt[e]:
620
+ # merge intervals
621
+ if P.right.empty(): # topmost interval
622
+ P.right = Q.right.copy()
623
+ else:
624
+ self.ref[P.right.low] = Q.right.high
625
+ P.right.low = Q.right.low
626
+ else: # align
627
+ self.ref[Q.right.low] = self.lowpt_edge[e]
628
+ if top_of_stack(self.S) == self.stack_bottom[ei]:
629
+ break
630
+ # merge conflicting return edges of e_1,...,e_i-1 into P.L
631
+ while top_of_stack(self.S).left.conflicting(ei, self) or top_of_stack(
632
+ self.S
633
+ ).right.conflicting(ei, self):
634
+ Q = self.S.pop()
635
+ if Q.right.conflicting(ei, self):
636
+ Q.swap()
637
+ if Q.right.conflicting(ei, self): # not planar
638
+ return False
639
+ # merge interval below lowpt(e_i) into P.R
640
+ self.ref[P.right.low] = Q.right.high
641
+ if Q.right.low is not None:
642
+ P.right.low = Q.right.low
643
+
644
+ if P.left.empty(): # topmost interval
645
+ P.left = Q.left.copy()
646
+ else:
647
+ self.ref[P.left.low] = Q.left.high
648
+ P.left.low = Q.left.low
649
+
650
+ if not (P.left.empty() and P.right.empty()):
651
+ self.S.append(P)
652
+ return True
653
+
654
+ def remove_back_edges(self, e):
655
+ u = e[0]
656
+ # trim back edges ending at parent u
657
+ # drop entire conflict pairs
658
+ while self.S and top_of_stack(self.S).lowest(self) == self.height[u]:
659
+ P = self.S.pop()
660
+ if P.left.low is not None:
661
+ self.side[P.left.low] = -1
662
+
663
+ if self.S: # one more conflict pair to consider
664
+ P = self.S.pop()
665
+ # trim left interval
666
+ while P.left.high is not None and P.left.high[1] == u:
667
+ P.left.high = self.ref[P.left.high]
668
+ if P.left.high is None and P.left.low is not None:
669
+ # just emptied
670
+ self.ref[P.left.low] = P.right.low
671
+ self.side[P.left.low] = -1
672
+ P.left.low = None
673
+ # trim right interval
674
+ while P.right.high is not None and P.right.high[1] == u:
675
+ P.right.high = self.ref[P.right.high]
676
+ if P.right.high is None and P.right.low is not None:
677
+ # just emptied
678
+ self.ref[P.right.low] = P.left.low
679
+ self.side[P.right.low] = -1
680
+ P.right.low = None
681
+ self.S.append(P)
682
+
683
+ # side of e is side of a highest return edge
684
+ if self.lowpt[e] < self.height[u]: # e has return edge
685
+ hl = top_of_stack(self.S).left.high
686
+ hr = top_of_stack(self.S).right.high
687
+
688
+ if hl is not None and (hr is None or self.lowpt[hl] > self.lowpt[hr]):
689
+ self.ref[e] = hl
690
+ else:
691
+ self.ref[e] = hr
692
+
693
+ def dfs_embedding(self, v):
694
+ """Completes the embedding."""
695
+ # the recursion stack
696
+ dfs_stack = [v]
697
+ # index of next edge to handle in adjacency list of each node
698
+ ind = defaultdict(lambda: 0)
699
+
700
+ while dfs_stack:
701
+ v = dfs_stack.pop()
702
+
703
+ for w in self.ordered_adjs[v][ind[v] :]:
704
+ ind[v] += 1
705
+ ei = (v, w)
706
+
707
+ if ei == self.parent_edge[w]: # tree edge
708
+ self.embedding.add_half_edge_first(w, v)
709
+ self.left_ref[v] = w
710
+ self.right_ref[v] = w
711
+
712
+ dfs_stack.append(v) # revisit v after finishing w
713
+ dfs_stack.append(w) # visit w next
714
+ break # handle next node in dfs_stack (i.e. w)
715
+ else: # back edge
716
+ if self.side[ei] == 1:
717
+ self.embedding.add_half_edge(w, v, ccw=self.right_ref[w])
718
+ else:
719
+ self.embedding.add_half_edge(w, v, cw=self.left_ref[w])
720
+ self.left_ref[w] = v
721
+
722
+ def dfs_embedding_recursive(self, v):
723
+ """Recursive version of :meth:`dfs_embedding`."""
724
+ for w in self.ordered_adjs[v]:
725
+ ei = (v, w)
726
+ if ei == self.parent_edge[w]: # tree edge
727
+ self.embedding.add_half_edge_first(w, v)
728
+ self.left_ref[v] = w
729
+ self.right_ref[v] = w
730
+ self.dfs_embedding_recursive(w)
731
+ else: # back edge
732
+ if self.side[ei] == 1:
733
+ # place v directly after right_ref[w] in embed. list of w
734
+ self.embedding.add_half_edge(w, v, ccw=self.right_ref[w])
735
+ else:
736
+ # place v directly before left_ref[w] in embed. list of w
737
+ self.embedding.add_half_edge(w, v, cw=self.left_ref[w])
738
+ self.left_ref[w] = v
739
+
740
+ def sign(self, e):
741
+ """Resolve the relative side of an edge to the absolute side."""
742
+ # the recursion stack
743
+ dfs_stack = [e]
744
+ # dict to remember reference edges
745
+ old_ref = defaultdict(lambda: None)
746
+
747
+ while dfs_stack:
748
+ e = dfs_stack.pop()
749
+
750
+ if self.ref[e] is not None:
751
+ dfs_stack.append(e) # revisit e after finishing self.ref[e]
752
+ dfs_stack.append(self.ref[e]) # visit self.ref[e] next
753
+ old_ref[e] = self.ref[e] # remember value of self.ref[e]
754
+ self.ref[e] = None
755
+ else:
756
+ self.side[e] *= self.side[old_ref[e]]
757
+
758
+ return self.side[e]
759
+
760
+ def sign_recursive(self, e):
761
+ """Recursive version of :meth:`sign`."""
762
+ if self.ref[e] is not None:
763
+ self.side[e] = self.side[e] * self.sign_recursive(self.ref[e])
764
+ self.ref[e] = None
765
+ return self.side[e]
766
+
767
+
768
+ class PlanarEmbedding(nx.DiGraph):
769
+ """Represents a planar graph with its planar embedding.
770
+
771
+ The planar embedding is given by a `combinatorial embedding
772
+ <https://en.wikipedia.org/wiki/Graph_embedding#Combinatorial_embedding>`_.
773
+
774
+ .. note:: `check_planarity` is the preferred way to check if a graph is planar.
775
+
776
+ **Neighbor ordering:**
777
+
778
+ In comparison to a usual graph structure, the embedding also stores the
779
+ order of all neighbors for every vertex.
780
+ The order of the neighbors can be given in clockwise (cw) direction or
781
+ counterclockwise (ccw) direction. This order is stored as edge attributes
782
+ in the underlying directed graph. For the edge (u, v) the edge attribute
783
+ 'cw' is set to the neighbor of u that follows immediately after v in
784
+ clockwise direction.
785
+
786
+ In order for a PlanarEmbedding to be valid it must fulfill multiple
787
+ conditions. It is possible to check if these conditions are fulfilled with
788
+ the method :meth:`check_structure`.
789
+ The conditions are:
790
+
791
+ * Edges must go in both directions (because the edge attributes differ)
792
+ * Every edge must have a 'cw' and 'ccw' attribute which corresponds to a
793
+ correct planar embedding.
794
+
795
+ As long as a PlanarEmbedding is invalid only the following methods should
796
+ be called:
797
+
798
+ * :meth:`add_half_edge`
799
+ * :meth:`connect_components`
800
+
801
+ Even though the graph is a subclass of nx.DiGraph, it can still be used
802
+ for algorithms that require undirected graphs, because the method
803
+ :meth:`is_directed` is overridden. This is possible, because a valid
804
+ PlanarGraph must have edges in both directions.
805
+
806
+ **Half edges:**
807
+
808
+ In methods like `add_half_edge` the term "half-edge" is used, which is
809
+ a term that is used in `doubly connected edge lists
810
+ <https://en.wikipedia.org/wiki/Doubly_connected_edge_list>`_. It is used
811
+ to emphasize that the edge is only in one direction and there exists
812
+ another half-edge in the opposite direction.
813
+ While conventional edges always have two faces (including outer face) next
814
+ to them, it is possible to assign each half-edge *exactly one* face.
815
+ For a half-edge (u, v) that is oriented such that u is below v then the
816
+ face that belongs to (u, v) is to the right of this half-edge.
817
+
818
+ See Also
819
+ --------
820
+ is_planar :
821
+ Preferred way to check if an existing graph is planar.
822
+
823
+ check_planarity :
824
+ A convenient way to create a `PlanarEmbedding`. If not planar,
825
+ it returns a subgraph that shows this.
826
+
827
+ Examples
828
+ --------
829
+
830
+ Create an embedding of a star graph (compare `nx.star_graph(3)`):
831
+
832
+ >>> G = nx.PlanarEmbedding()
833
+ >>> G.add_half_edge(0, 1)
834
+ >>> G.add_half_edge(0, 2, ccw=1)
835
+ >>> G.add_half_edge(0, 3, ccw=2)
836
+ >>> G.add_half_edge(1, 0)
837
+ >>> G.add_half_edge(2, 0)
838
+ >>> G.add_half_edge(3, 0)
839
+
840
+ Alternatively the same embedding can also be defined in counterclockwise
841
+ orientation. The following results in exactly the same PlanarEmbedding:
842
+
843
+ >>> G = nx.PlanarEmbedding()
844
+ >>> G.add_half_edge(0, 1)
845
+ >>> G.add_half_edge(0, 3, cw=1)
846
+ >>> G.add_half_edge(0, 2, cw=3)
847
+ >>> G.add_half_edge(1, 0)
848
+ >>> G.add_half_edge(2, 0)
849
+ >>> G.add_half_edge(3, 0)
850
+
851
+ After creating a graph, it is possible to validate that the PlanarEmbedding
852
+ object is correct:
853
+
854
+ >>> G.check_structure()
855
+
856
+ """
857
+
858
+ def __init__(self, incoming_graph_data=None, **attr):
859
+ super().__init__(incoming_graph_data=incoming_graph_data, **attr)
860
+ self.add_edge = self.__forbidden
861
+ self.add_edges_from = self.__forbidden
862
+ self.add_weighted_edges_from = self.__forbidden
863
+
864
+ def __forbidden(self, *args, **kwargs):
865
+ """Forbidden operation
866
+
867
+ Any edge additions to a PlanarEmbedding should be done using
868
+ method `add_half_edge`.
869
+ """
870
+ raise NotImplementedError(
871
+ "Use `add_half_edge` method to add edges to a PlanarEmbedding."
872
+ )
873
+
874
+ def get_data(self):
875
+ """Converts the adjacency structure into a better readable structure.
876
+
877
+ Returns
878
+ -------
879
+ embedding : dict
880
+ A dict mapping all nodes to a list of neighbors sorted in
881
+ clockwise order.
882
+
883
+ See Also
884
+ --------
885
+ set_data
886
+
887
+ """
888
+ embedding = {}
889
+ for v in self:
890
+ embedding[v] = list(self.neighbors_cw_order(v))
891
+ return embedding
892
+
893
+ def set_data(self, data):
894
+ """Inserts edges according to given sorted neighbor list.
895
+
896
+ The input format is the same as the output format of get_data().
897
+
898
+ Parameters
899
+ ----------
900
+ data : dict
901
+ A dict mapping all nodes to a list of neighbors sorted in
902
+ clockwise order.
903
+
904
+ See Also
905
+ --------
906
+ get_data
907
+
908
+ """
909
+ for v in data:
910
+ ref = None
911
+ for w in reversed(data[v]):
912
+ self.add_half_edge(v, w, cw=ref)
913
+ ref = w
914
+
915
+ def remove_node(self, n):
916
+ """Remove node n.
917
+
918
+ Removes the node n and all adjacent edges, updating the
919
+ PlanarEmbedding to account for any resulting edge removal.
920
+ Attempting to remove a non-existent node will raise an exception.
921
+
922
+ Parameters
923
+ ----------
924
+ n : node
925
+ A node in the graph
926
+
927
+ Raises
928
+ ------
929
+ NetworkXError
930
+ If n is not in the graph.
931
+
932
+ See Also
933
+ --------
934
+ remove_nodes_from
935
+
936
+ """
937
+ try:
938
+ for u in self._pred[n]:
939
+ succs_u = self._succ[u]
940
+ un_cw = succs_u[n]["cw"]
941
+ un_ccw = succs_u[n]["ccw"]
942
+ del succs_u[n]
943
+ del self._pred[u][n]
944
+ if n != un_cw:
945
+ succs_u[un_cw]["ccw"] = un_ccw
946
+ succs_u[un_ccw]["cw"] = un_cw
947
+ del self._node[n]
948
+ del self._succ[n]
949
+ del self._pred[n]
950
+ except KeyError as err: # NetworkXError if n not in self
951
+ raise nx.NetworkXError(
952
+ f"The node {n} is not in the planar embedding."
953
+ ) from err
954
+ nx._clear_cache(self)
955
+
956
+ def remove_nodes_from(self, nodes):
957
+ """Remove multiple nodes.
958
+
959
+ Parameters
960
+ ----------
961
+ nodes : iterable container
962
+ A container of nodes (list, dict, set, etc.). If a node
963
+ in the container is not in the graph it is silently ignored.
964
+
965
+ See Also
966
+ --------
967
+ remove_node
968
+
969
+ Notes
970
+ -----
971
+ When removing nodes from an iterator over the graph you are changing,
972
+ a `RuntimeError` will be raised with message:
973
+ `RuntimeError: dictionary changed size during iteration`. This
974
+ happens when the graph's underlying dictionary is modified during
975
+ iteration. To avoid this error, evaluate the iterator into a separate
976
+ object, e.g. by using `list(iterator_of_nodes)`, and pass this
977
+ object to `G.remove_nodes_from`.
978
+
979
+ """
980
+ for n in nodes:
981
+ if n in self._node:
982
+ self.remove_node(n)
983
+ # silently skip non-existing nodes
984
+
985
+ def neighbors_cw_order(self, v):
986
+ """Generator for the neighbors of v in clockwise order.
987
+
988
+ Parameters
989
+ ----------
990
+ v : node
991
+
992
+ Yields
993
+ ------
994
+ node
995
+
996
+ """
997
+ succs = self._succ[v]
998
+ if not succs:
999
+ # v has no neighbors
1000
+ return
1001
+ start_node = next(reversed(succs))
1002
+ yield start_node
1003
+ current_node = succs[start_node]["cw"]
1004
+ while start_node != current_node:
1005
+ yield current_node
1006
+ current_node = succs[current_node]["cw"]
1007
+
1008
+ def add_half_edge(self, start_node, end_node, *, cw=None, ccw=None):
1009
+ """Adds a half-edge from `start_node` to `end_node`.
1010
+
1011
+ If the half-edge is not the first one out of `start_node`, a reference
1012
+ node must be provided either in the clockwise (parameter `cw`) or in
1013
+ the counterclockwise (parameter `ccw`) direction. Only one of `cw`/`ccw`
1014
+ can be specified (or neither in the case of the first edge).
1015
+ Note that specifying a reference in the clockwise (`cw`) direction means
1016
+ inserting the new edge in the first counterclockwise position with
1017
+ respect to the reference (and vice-versa).
1018
+
1019
+ Parameters
1020
+ ----------
1021
+ start_node : node
1022
+ Start node of inserted edge.
1023
+ end_node : node
1024
+ End node of inserted edge.
1025
+ cw, ccw: node
1026
+ End node of reference edge.
1027
+ Omit or pass `None` if adding the first out-half-edge of `start_node`.
1028
+
1029
+
1030
+ Raises
1031
+ ------
1032
+ NetworkXException
1033
+ If the `cw` or `ccw` node is not a successor of `start_node`.
1034
+ If `start_node` has successors, but neither `cw` or `ccw` is provided.
1035
+ If both `cw` and `ccw` are specified.
1036
+
1037
+ See Also
1038
+ --------
1039
+ connect_components
1040
+ """
1041
+
1042
+ succs = self._succ.get(start_node)
1043
+ if succs:
1044
+ # there is already some edge out of start_node
1045
+ leftmost_nbr = next(reversed(self._succ[start_node]))
1046
+ if cw is not None:
1047
+ if cw not in succs:
1048
+ raise nx.NetworkXError("Invalid clockwise reference node.")
1049
+ if ccw is not None:
1050
+ raise nx.NetworkXError("Only one of cw/ccw can be specified.")
1051
+ ref_ccw = succs[cw]["ccw"]
1052
+ super().add_edge(start_node, end_node, cw=cw, ccw=ref_ccw)
1053
+ succs[ref_ccw]["cw"] = end_node
1054
+ succs[cw]["ccw"] = end_node
1055
+ # when (cw == leftmost_nbr), the newly added neighbor is
1056
+ # already at the end of dict self._succ[start_node] and
1057
+ # takes the place of the former leftmost_nbr
1058
+ move_leftmost_nbr_to_end = cw != leftmost_nbr
1059
+ elif ccw is not None:
1060
+ if ccw not in succs:
1061
+ raise nx.NetworkXError("Invalid counterclockwise reference node.")
1062
+ ref_cw = succs[ccw]["cw"]
1063
+ super().add_edge(start_node, end_node, cw=ref_cw, ccw=ccw)
1064
+ succs[ref_cw]["ccw"] = end_node
1065
+ succs[ccw]["cw"] = end_node
1066
+ move_leftmost_nbr_to_end = True
1067
+ else:
1068
+ raise nx.NetworkXError(
1069
+ "Node already has out-half-edge(s), either cw or ccw reference node required."
1070
+ )
1071
+ if move_leftmost_nbr_to_end:
1072
+ # LRPlanarity (via self.add_half_edge_first()) requires that
1073
+ # we keep track of the leftmost neighbor, which we accomplish
1074
+ # by keeping it as the last key in dict self._succ[start_node]
1075
+ succs[leftmost_nbr] = succs.pop(leftmost_nbr)
1076
+
1077
+ else:
1078
+ if cw is not None or ccw is not None:
1079
+ raise nx.NetworkXError("Invalid reference node.")
1080
+ # adding the first edge out of start_node
1081
+ super().add_edge(start_node, end_node, ccw=end_node, cw=end_node)
1082
+
1083
+ def check_structure(self):
1084
+ """Runs without exceptions if this object is valid.
1085
+
1086
+ Checks that the following properties are fulfilled:
1087
+
1088
+ * Edges go in both directions (because the edge attributes differ).
1089
+ * Every edge has a 'cw' and 'ccw' attribute which corresponds to a
1090
+ correct planar embedding.
1091
+
1092
+ Running this method verifies that the underlying Graph must be planar.
1093
+
1094
+ Raises
1095
+ ------
1096
+ NetworkXException
1097
+ This exception is raised with a short explanation if the
1098
+ PlanarEmbedding is invalid.
1099
+ """
1100
+ # Check fundamental structure
1101
+ for v in self:
1102
+ try:
1103
+ sorted_nbrs = set(self.neighbors_cw_order(v))
1104
+ except KeyError as err:
1105
+ msg = f"Bad embedding. Missing orientation for a neighbor of {v}"
1106
+ raise nx.NetworkXException(msg) from err
1107
+
1108
+ unsorted_nbrs = set(self[v])
1109
+ if sorted_nbrs != unsorted_nbrs:
1110
+ msg = "Bad embedding. Edge orientations not set correctly."
1111
+ raise nx.NetworkXException(msg)
1112
+ for w in self[v]:
1113
+ # Check if opposite half-edge exists
1114
+ if not self.has_edge(w, v):
1115
+ msg = "Bad embedding. Opposite half-edge is missing."
1116
+ raise nx.NetworkXException(msg)
1117
+
1118
+ # Check planarity
1119
+ counted_half_edges = set()
1120
+ for component in nx.connected_components(self):
1121
+ if len(component) == 1:
1122
+ # Don't need to check single node component
1123
+ continue
1124
+ num_nodes = len(component)
1125
+ num_half_edges = 0
1126
+ num_faces = 0
1127
+ for v in component:
1128
+ for w in self.neighbors_cw_order(v):
1129
+ num_half_edges += 1
1130
+ if (v, w) not in counted_half_edges:
1131
+ # We encountered a new face
1132
+ num_faces += 1
1133
+ # Mark all half-edges belonging to this face
1134
+ self.traverse_face(v, w, counted_half_edges)
1135
+ num_edges = num_half_edges // 2 # num_half_edges is even
1136
+ if num_nodes - num_edges + num_faces != 2:
1137
+ # The result does not match Euler's formula
1138
+ msg = "Bad embedding. The graph does not match Euler's formula"
1139
+ raise nx.NetworkXException(msg)
1140
+
1141
+ def add_half_edge_ccw(self, start_node, end_node, reference_neighbor):
1142
+ """Adds a half-edge from start_node to end_node.
1143
+
1144
+ The half-edge is added counter clockwise next to the existing half-edge
1145
+ (start_node, reference_neighbor).
1146
+
1147
+ Parameters
1148
+ ----------
1149
+ start_node : node
1150
+ Start node of inserted edge.
1151
+ end_node : node
1152
+ End node of inserted edge.
1153
+ reference_neighbor: node
1154
+ End node of reference edge.
1155
+
1156
+ Raises
1157
+ ------
1158
+ NetworkXException
1159
+ If the reference_neighbor does not exist.
1160
+
1161
+ See Also
1162
+ --------
1163
+ add_half_edge
1164
+ add_half_edge_cw
1165
+ connect_components
1166
+
1167
+ """
1168
+ self.add_half_edge(start_node, end_node, cw=reference_neighbor)
1169
+
1170
+ def add_half_edge_cw(self, start_node, end_node, reference_neighbor):
1171
+ """Adds a half-edge from start_node to end_node.
1172
+
1173
+ The half-edge is added clockwise next to the existing half-edge
1174
+ (start_node, reference_neighbor).
1175
+
1176
+ Parameters
1177
+ ----------
1178
+ start_node : node
1179
+ Start node of inserted edge.
1180
+ end_node : node
1181
+ End node of inserted edge.
1182
+ reference_neighbor: node
1183
+ End node of reference edge.
1184
+
1185
+ Raises
1186
+ ------
1187
+ NetworkXException
1188
+ If the reference_neighbor does not exist.
1189
+
1190
+ See Also
1191
+ --------
1192
+ add_half_edge
1193
+ add_half_edge_ccw
1194
+ connect_components
1195
+ """
1196
+ self.add_half_edge(start_node, end_node, ccw=reference_neighbor)
1197
+
1198
+ def remove_edge(self, u, v):
1199
+ """Remove the edge between u and v.
1200
+
1201
+ Parameters
1202
+ ----------
1203
+ u, v : nodes
1204
+ Remove the half-edges (u, v) and (v, u) and update the
1205
+ edge ordering around the removed edge.
1206
+
1207
+ Raises
1208
+ ------
1209
+ NetworkXError
1210
+ If there is not an edge between u and v.
1211
+
1212
+ See Also
1213
+ --------
1214
+ remove_edges_from : remove a collection of edges
1215
+ """
1216
+ try:
1217
+ succs_u = self._succ[u]
1218
+ succs_v = self._succ[v]
1219
+ uv_cw = succs_u[v]["cw"]
1220
+ uv_ccw = succs_u[v]["ccw"]
1221
+ vu_cw = succs_v[u]["cw"]
1222
+ vu_ccw = succs_v[u]["ccw"]
1223
+ del succs_u[v]
1224
+ del self._pred[v][u]
1225
+ del succs_v[u]
1226
+ del self._pred[u][v]
1227
+ if v != uv_cw:
1228
+ succs_u[uv_cw]["ccw"] = uv_ccw
1229
+ succs_u[uv_ccw]["cw"] = uv_cw
1230
+ if u != vu_cw:
1231
+ succs_v[vu_cw]["ccw"] = vu_ccw
1232
+ succs_v[vu_ccw]["cw"] = vu_cw
1233
+ except KeyError as err:
1234
+ raise nx.NetworkXError(
1235
+ f"The edge {u}-{v} is not in the planar embedding."
1236
+ ) from err
1237
+ nx._clear_cache(self)
1238
+
1239
+ def remove_edges_from(self, ebunch):
1240
+ """Remove all edges specified in ebunch.
1241
+
1242
+ Parameters
1243
+ ----------
1244
+ ebunch: list or container of edge tuples
1245
+ Each pair of half-edges between the nodes given in the tuples
1246
+ will be removed from the graph. The nodes can be passed as:
1247
+
1248
+ - 2-tuples (u, v) half-edges (u, v) and (v, u).
1249
+ - 3-tuples (u, v, k) where k is ignored.
1250
+
1251
+ See Also
1252
+ --------
1253
+ remove_edge : remove a single edge
1254
+
1255
+ Notes
1256
+ -----
1257
+ Will fail silently if an edge in ebunch is not in the graph.
1258
+
1259
+ Examples
1260
+ --------
1261
+ >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc
1262
+ >>> ebunch = [(1, 2), (2, 3)]
1263
+ >>> G.remove_edges_from(ebunch)
1264
+ """
1265
+ for e in ebunch:
1266
+ u, v = e[:2] # ignore edge data
1267
+ # assuming that the PlanarEmbedding is valid, if the half_edge
1268
+ # (u, v) is in the graph, then so is half_edge (v, u)
1269
+ if u in self._succ and v in self._succ[u]:
1270
+ self.remove_edge(u, v)
1271
+
1272
+ def connect_components(self, v, w):
1273
+ """Adds half-edges for (v, w) and (w, v) at some position.
1274
+
1275
+ This method should only be called if v and w are in different
1276
+ components, or it might break the embedding.
1277
+ This especially means that if `connect_components(v, w)`
1278
+ is called it is not allowed to call `connect_components(w, v)`
1279
+ afterwards. The neighbor orientations in both directions are
1280
+ all set correctly after the first call.
1281
+
1282
+ Parameters
1283
+ ----------
1284
+ v : node
1285
+ w : node
1286
+
1287
+ See Also
1288
+ --------
1289
+ add_half_edge
1290
+ """
1291
+ if v in self._succ and self._succ[v]:
1292
+ ref = next(reversed(self._succ[v]))
1293
+ else:
1294
+ ref = None
1295
+ self.add_half_edge(v, w, cw=ref)
1296
+ if w in self._succ and self._succ[w]:
1297
+ ref = next(reversed(self._succ[w]))
1298
+ else:
1299
+ ref = None
1300
+ self.add_half_edge(w, v, cw=ref)
1301
+
1302
+ def add_half_edge_first(self, start_node, end_node):
1303
+ """Add a half-edge and set end_node as start_node's leftmost neighbor.
1304
+
1305
+ The new edge is inserted counterclockwise with respect to the current
1306
+ leftmost neighbor, if there is one.
1307
+
1308
+ Parameters
1309
+ ----------
1310
+ start_node : node
1311
+ end_node : node
1312
+
1313
+ See Also
1314
+ --------
1315
+ add_half_edge
1316
+ connect_components
1317
+ """
1318
+ succs = self._succ.get(start_node)
1319
+ # the leftmost neighbor is the last entry in the
1320
+ # self._succ[start_node] dict
1321
+ leftmost_nbr = next(reversed(succs)) if succs else None
1322
+ self.add_half_edge(start_node, end_node, cw=leftmost_nbr)
1323
+
1324
+ def next_face_half_edge(self, v, w):
1325
+ """Returns the following half-edge left of a face.
1326
+
1327
+ Parameters
1328
+ ----------
1329
+ v : node
1330
+ w : node
1331
+
1332
+ Returns
1333
+ -------
1334
+ half-edge : tuple
1335
+ """
1336
+ new_node = self[w][v]["ccw"]
1337
+ return w, new_node
1338
+
1339
+ def traverse_face(self, v, w, mark_half_edges=None):
1340
+ """Returns nodes on the face that belong to the half-edge (v, w).
1341
+
1342
+ The face that is traversed lies to the right of the half-edge (in an
1343
+ orientation where v is below w).
1344
+
1345
+ Optionally it is possible to pass a set to which all encountered half
1346
+ edges are added. Before calling this method, this set must not include
1347
+ any half-edges that belong to the face.
1348
+
1349
+ Parameters
1350
+ ----------
1351
+ v : node
1352
+ Start node of half-edge.
1353
+ w : node
1354
+ End node of half-edge.
1355
+ mark_half_edges: set, optional
1356
+ Set to which all encountered half-edges are added.
1357
+
1358
+ Returns
1359
+ -------
1360
+ face : list
1361
+ A list of nodes that lie on this face.
1362
+ """
1363
+ if mark_half_edges is None:
1364
+ mark_half_edges = set()
1365
+
1366
+ face_nodes = [v]
1367
+ mark_half_edges.add((v, w))
1368
+ prev_node = v
1369
+ cur_node = w
1370
+ # Last half-edge is (incoming_node, v)
1371
+ incoming_node = self[v][w]["cw"]
1372
+
1373
+ while cur_node != v or prev_node != incoming_node:
1374
+ face_nodes.append(cur_node)
1375
+ prev_node, cur_node = self.next_face_half_edge(prev_node, cur_node)
1376
+ if (prev_node, cur_node) in mark_half_edges:
1377
+ raise nx.NetworkXException("Bad planar embedding. Impossible face.")
1378
+ mark_half_edges.add((prev_node, cur_node))
1379
+
1380
+ return face_nodes
1381
+
1382
+ def is_directed(self):
1383
+ """A valid PlanarEmbedding is undirected.
1384
+
1385
+ All reverse edges are contained, i.e. for every existing
1386
+ half-edge (v, w) the half-edge in the opposite direction (w, v) is also
1387
+ contained.
1388
+ """
1389
+ return False
1390
+
1391
+ def copy(self, as_view=False):
1392
+ if as_view is True:
1393
+ return nx.graphviews.generic_graph_view(self)
1394
+ G = self.__class__()
1395
+ G.graph.update(self.graph)
1396
+ G.add_nodes_from((n, d.copy()) for n, d in self._node.items())
1397
+ super(self.__class__, G).add_edges_from(
1398
+ (u, v, datadict.copy())
1399
+ for u, nbrs in self._adj.items()
1400
+ for v, datadict in nbrs.items()
1401
+ )
1402
+ return G
wemm/lib/python3.10/site-packages/networkx/algorithms/shortest_paths/__pycache__/dense.cpython-310.pyc ADDED
Binary file (7.75 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/shortest_paths/__pycache__/unweighted.cpython-310.pyc ADDED
Binary file (13 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (190 Bytes). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/smallworld.py ADDED
@@ -0,0 +1,404 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for estimating the small-world-ness of graphs.
2
+
3
+ A small world network is characterized by a small average shortest path length,
4
+ and a large clustering coefficient.
5
+
6
+ Small-worldness is commonly measured with the coefficient sigma or omega.
7
+
8
+ Both coefficients compare the average clustering coefficient and shortest path
9
+ length of a given graph against the same quantities for an equivalent random
10
+ or lattice graph.
11
+
12
+ For more information, see the Wikipedia article on small-world network [1]_.
13
+
14
+ .. [1] Small-world network:: https://en.wikipedia.org/wiki/Small-world_network
15
+
16
+ """
17
+
18
+ import networkx as nx
19
+ from networkx.utils import not_implemented_for, py_random_state
20
+
21
+ __all__ = ["random_reference", "lattice_reference", "sigma", "omega"]
22
+
23
+
24
+ @not_implemented_for("directed")
25
+ @not_implemented_for("multigraph")
26
+ @py_random_state(3)
27
+ @nx._dispatchable(returns_graph=True)
28
+ def random_reference(G, niter=1, connectivity=True, seed=None):
29
+ """Compute a random graph by swapping edges of a given graph.
30
+
31
+ Parameters
32
+ ----------
33
+ G : graph
34
+ An undirected graph with 4 or more nodes.
35
+
36
+ niter : integer (optional, default=1)
37
+ An edge is rewired approximately `niter` times.
38
+
39
+ connectivity : boolean (optional, default=True)
40
+ When True, ensure connectivity for the randomized graph.
41
+
42
+ seed : integer, random_state, or None (default)
43
+ Indicator of random number generation state.
44
+ See :ref:`Randomness<randomness>`.
45
+
46
+ Returns
47
+ -------
48
+ G : graph
49
+ The randomized graph.
50
+
51
+ Raises
52
+ ------
53
+ NetworkXError
54
+ If there are fewer than 4 nodes or 2 edges in `G`
55
+
56
+ Notes
57
+ -----
58
+ The implementation is adapted from the algorithm by Maslov and Sneppen
59
+ (2002) [1]_.
60
+
61
+ References
62
+ ----------
63
+ .. [1] Maslov, Sergei, and Kim Sneppen.
64
+ "Specificity and stability in topology of protein networks."
65
+ Science 296.5569 (2002): 910-913.
66
+ """
67
+ if len(G) < 4:
68
+ raise nx.NetworkXError("Graph has fewer than four nodes.")
69
+ if len(G.edges) < 2:
70
+ raise nx.NetworkXError("Graph has fewer that 2 edges")
71
+
72
+ from networkx.utils import cumulative_distribution, discrete_sequence
73
+
74
+ local_conn = nx.connectivity.local_edge_connectivity
75
+
76
+ G = G.copy()
77
+ keys, degrees = zip(*G.degree()) # keys, degree
78
+ cdf = cumulative_distribution(degrees) # cdf of degree
79
+ nnodes = len(G)
80
+ nedges = nx.number_of_edges(G)
81
+ niter = niter * nedges
82
+ ntries = int(nnodes * nedges / (nnodes * (nnodes - 1) / 2))
83
+ swapcount = 0
84
+
85
+ for i in range(niter):
86
+ n = 0
87
+ while n < ntries:
88
+ # pick two random edges without creating edge list
89
+ # choose source node indices from discrete distribution
90
+ (ai, ci) = discrete_sequence(2, cdistribution=cdf, seed=seed)
91
+ if ai == ci:
92
+ continue # same source, skip
93
+ a = keys[ai] # convert index to label
94
+ c = keys[ci]
95
+ # choose target uniformly from neighbors
96
+ b = seed.choice(list(G.neighbors(a)))
97
+ d = seed.choice(list(G.neighbors(c)))
98
+ if b in [a, c, d] or d in [a, b, c]:
99
+ continue # all vertices should be different
100
+
101
+ # don't create parallel edges
102
+ if (d not in G[a]) and (b not in G[c]):
103
+ G.add_edge(a, d)
104
+ G.add_edge(c, b)
105
+ G.remove_edge(a, b)
106
+ G.remove_edge(c, d)
107
+
108
+ # Check if the graph is still connected
109
+ if connectivity and local_conn(G, a, b) == 0:
110
+ # Not connected, revert the swap
111
+ G.remove_edge(a, d)
112
+ G.remove_edge(c, b)
113
+ G.add_edge(a, b)
114
+ G.add_edge(c, d)
115
+ else:
116
+ swapcount += 1
117
+ break
118
+ n += 1
119
+ return G
120
+
121
+
122
+ @not_implemented_for("directed")
123
+ @not_implemented_for("multigraph")
124
+ @py_random_state(4)
125
+ @nx._dispatchable(returns_graph=True)
126
+ def lattice_reference(G, niter=5, D=None, connectivity=True, seed=None):
127
+ """Latticize the given graph by swapping edges.
128
+
129
+ Parameters
130
+ ----------
131
+ G : graph
132
+ An undirected graph.
133
+
134
+ niter : integer (optional, default=1)
135
+ An edge is rewired approximately niter times.
136
+
137
+ D : numpy.array (optional, default=None)
138
+ Distance to the diagonal matrix.
139
+
140
+ connectivity : boolean (optional, default=True)
141
+ Ensure connectivity for the latticized graph when set to True.
142
+
143
+ seed : integer, random_state, or None (default)
144
+ Indicator of random number generation state.
145
+ See :ref:`Randomness<randomness>`.
146
+
147
+ Returns
148
+ -------
149
+ G : graph
150
+ The latticized graph.
151
+
152
+ Raises
153
+ ------
154
+ NetworkXError
155
+ If there are fewer than 4 nodes or 2 edges in `G`
156
+
157
+ Notes
158
+ -----
159
+ The implementation is adapted from the algorithm by Sporns et al. [1]_.
160
+ which is inspired from the original work by Maslov and Sneppen(2002) [2]_.
161
+
162
+ References
163
+ ----------
164
+ .. [1] Sporns, Olaf, and Jonathan D. Zwi.
165
+ "The small world of the cerebral cortex."
166
+ Neuroinformatics 2.2 (2004): 145-162.
167
+ .. [2] Maslov, Sergei, and Kim Sneppen.
168
+ "Specificity and stability in topology of protein networks."
169
+ Science 296.5569 (2002): 910-913.
170
+ """
171
+ import numpy as np
172
+
173
+ from networkx.utils import cumulative_distribution, discrete_sequence
174
+
175
+ local_conn = nx.connectivity.local_edge_connectivity
176
+
177
+ if len(G) < 4:
178
+ raise nx.NetworkXError("Graph has fewer than four nodes.")
179
+ if len(G.edges) < 2:
180
+ raise nx.NetworkXError("Graph has fewer that 2 edges")
181
+ # Instead of choosing uniformly at random from a generated edge list,
182
+ # this algorithm chooses nonuniformly from the set of nodes with
183
+ # probability weighted by degree.
184
+ G = G.copy()
185
+ keys, degrees = zip(*G.degree()) # keys, degree
186
+ cdf = cumulative_distribution(degrees) # cdf of degree
187
+
188
+ nnodes = len(G)
189
+ nedges = nx.number_of_edges(G)
190
+ if D is None:
191
+ D = np.zeros((nnodes, nnodes))
192
+ un = np.arange(1, nnodes)
193
+ um = np.arange(nnodes - 1, 0, -1)
194
+ u = np.append((0,), np.where(un < um, un, um))
195
+
196
+ for v in range(int(np.ceil(nnodes / 2))):
197
+ D[nnodes - v - 1, :] = np.append(u[v + 1 :], u[: v + 1])
198
+ D[v, :] = D[nnodes - v - 1, :][::-1]
199
+
200
+ niter = niter * nedges
201
+ # maximal number of rewiring attempts per 'niter'
202
+ max_attempts = int(nnodes * nedges / (nnodes * (nnodes - 1) / 2))
203
+
204
+ for _ in range(niter):
205
+ n = 0
206
+ while n < max_attempts:
207
+ # pick two random edges without creating edge list
208
+ # choose source node indices from discrete distribution
209
+ (ai, ci) = discrete_sequence(2, cdistribution=cdf, seed=seed)
210
+ if ai == ci:
211
+ continue # same source, skip
212
+ a = keys[ai] # convert index to label
213
+ c = keys[ci]
214
+ # choose target uniformly from neighbors
215
+ b = seed.choice(list(G.neighbors(a)))
216
+ d = seed.choice(list(G.neighbors(c)))
217
+ bi = keys.index(b)
218
+ di = keys.index(d)
219
+
220
+ if b in [a, c, d] or d in [a, b, c]:
221
+ continue # all vertices should be different
222
+
223
+ # don't create parallel edges
224
+ if (d not in G[a]) and (b not in G[c]):
225
+ if D[ai, bi] + D[ci, di] >= D[ai, ci] + D[bi, di]:
226
+ # only swap if we get closer to the diagonal
227
+ G.add_edge(a, d)
228
+ G.add_edge(c, b)
229
+ G.remove_edge(a, b)
230
+ G.remove_edge(c, d)
231
+
232
+ # Check if the graph is still connected
233
+ if connectivity and local_conn(G, a, b) == 0:
234
+ # Not connected, revert the swap
235
+ G.remove_edge(a, d)
236
+ G.remove_edge(c, b)
237
+ G.add_edge(a, b)
238
+ G.add_edge(c, d)
239
+ else:
240
+ break
241
+ n += 1
242
+
243
+ return G
244
+
245
+
246
+ @not_implemented_for("directed")
247
+ @not_implemented_for("multigraph")
248
+ @py_random_state(3)
249
+ @nx._dispatchable
250
+ def sigma(G, niter=100, nrand=10, seed=None):
251
+ """Returns the small-world coefficient (sigma) of the given graph.
252
+
253
+ The small-world coefficient is defined as:
254
+ sigma = C/Cr / L/Lr
255
+ where C and L are respectively the average clustering coefficient and
256
+ average shortest path length of G. Cr and Lr are respectively the average
257
+ clustering coefficient and average shortest path length of an equivalent
258
+ random graph.
259
+
260
+ A graph is commonly classified as small-world if sigma>1.
261
+
262
+ Parameters
263
+ ----------
264
+ G : NetworkX graph
265
+ An undirected graph.
266
+ niter : integer (optional, default=100)
267
+ Approximate number of rewiring per edge to compute the equivalent
268
+ random graph.
269
+ nrand : integer (optional, default=10)
270
+ Number of random graphs generated to compute the average clustering
271
+ coefficient (Cr) and average shortest path length (Lr).
272
+ seed : integer, random_state, or None (default)
273
+ Indicator of random number generation state.
274
+ See :ref:`Randomness<randomness>`.
275
+
276
+ Returns
277
+ -------
278
+ sigma : float
279
+ The small-world coefficient of G.
280
+
281
+ Notes
282
+ -----
283
+ The implementation is adapted from Humphries et al. [1]_ [2]_.
284
+
285
+ References
286
+ ----------
287
+ .. [1] The brainstem reticular formation is a small-world, not scale-free,
288
+ network M. D. Humphries, K. Gurney and T. J. Prescott,
289
+ Proc. Roy. Soc. B 2006 273, 503-511, doi:10.1098/rspb.2005.3354.
290
+ .. [2] Humphries and Gurney (2008).
291
+ "Network 'Small-World-Ness': A Quantitative Method for Determining
292
+ Canonical Network Equivalence".
293
+ PLoS One. 3 (4). PMID 18446219. doi:10.1371/journal.pone.0002051.
294
+ """
295
+ import numpy as np
296
+
297
+ # Compute the mean clustering coefficient and average shortest path length
298
+ # for an equivalent random graph
299
+ randMetrics = {"C": [], "L": []}
300
+ for i in range(nrand):
301
+ Gr = random_reference(G, niter=niter, seed=seed)
302
+ randMetrics["C"].append(nx.transitivity(Gr))
303
+ randMetrics["L"].append(nx.average_shortest_path_length(Gr))
304
+
305
+ C = nx.transitivity(G)
306
+ L = nx.average_shortest_path_length(G)
307
+ Cr = np.mean(randMetrics["C"])
308
+ Lr = np.mean(randMetrics["L"])
309
+
310
+ sigma = (C / Cr) / (L / Lr)
311
+
312
+ return float(sigma)
313
+
314
+
315
+ @not_implemented_for("directed")
316
+ @not_implemented_for("multigraph")
317
+ @py_random_state(3)
318
+ @nx._dispatchable
319
+ def omega(G, niter=5, nrand=10, seed=None):
320
+ """Returns the small-world coefficient (omega) of a graph
321
+
322
+ The small-world coefficient of a graph G is:
323
+
324
+ omega = Lr/L - C/Cl
325
+
326
+ where C and L are respectively the average clustering coefficient and
327
+ average shortest path length of G. Lr is the average shortest path length
328
+ of an equivalent random graph and Cl is the average clustering coefficient
329
+ of an equivalent lattice graph.
330
+
331
+ The small-world coefficient (omega) measures how much G is like a lattice
332
+ or a random graph. Negative values mean G is similar to a lattice whereas
333
+ positive values mean G is a random graph.
334
+ Values close to 0 mean that G has small-world characteristics.
335
+
336
+ Parameters
337
+ ----------
338
+ G : NetworkX graph
339
+ An undirected graph.
340
+
341
+ niter: integer (optional, default=5)
342
+ Approximate number of rewiring per edge to compute the equivalent
343
+ random graph.
344
+
345
+ nrand: integer (optional, default=10)
346
+ Number of random graphs generated to compute the maximal clustering
347
+ coefficient (Cr) and average shortest path length (Lr).
348
+
349
+ seed : integer, random_state, or None (default)
350
+ Indicator of random number generation state.
351
+ See :ref:`Randomness<randomness>`.
352
+
353
+
354
+ Returns
355
+ -------
356
+ omega : float
357
+ The small-world coefficient (omega)
358
+
359
+ Notes
360
+ -----
361
+ The implementation is adapted from the algorithm by Telesford et al. [1]_.
362
+
363
+ References
364
+ ----------
365
+ .. [1] Telesford, Joyce, Hayasaka, Burdette, and Laurienti (2011).
366
+ "The Ubiquity of Small-World Networks".
367
+ Brain Connectivity. 1 (0038): 367-75. PMC 3604768. PMID 22432451.
368
+ doi:10.1089/brain.2011.0038.
369
+ """
370
+ import numpy as np
371
+
372
+ # Compute the mean clustering coefficient and average shortest path length
373
+ # for an equivalent random graph
374
+ randMetrics = {"C": [], "L": []}
375
+
376
+ # Calculate initial average clustering coefficient which potentially will
377
+ # get replaced by higher clustering coefficients from generated lattice
378
+ # reference graphs
379
+ Cl = nx.average_clustering(G)
380
+
381
+ niter_lattice_reference = niter
382
+ niter_random_reference = niter * 2
383
+
384
+ for _ in range(nrand):
385
+ # Generate random graph
386
+ Gr = random_reference(G, niter=niter_random_reference, seed=seed)
387
+ randMetrics["L"].append(nx.average_shortest_path_length(Gr))
388
+
389
+ # Generate lattice graph
390
+ Gl = lattice_reference(G, niter=niter_lattice_reference, seed=seed)
391
+
392
+ # Replace old clustering coefficient, if clustering is higher in
393
+ # generated lattice reference
394
+ Cl_temp = nx.average_clustering(Gl)
395
+ if Cl_temp > Cl:
396
+ Cl = Cl_temp
397
+
398
+ C = nx.average_clustering(G)
399
+ L = nx.average_shortest_path_length(G)
400
+ Lr = np.mean(randMetrics["L"])
401
+
402
+ omega = (Lr / L) - (C / Cl)
403
+
404
+ return float(omega)
wemm/lib/python3.10/site-packages/networkx/algorithms/summarization.py ADDED
@@ -0,0 +1,564 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Graph summarization finds smaller representations of graphs resulting in faster
3
+ runtime of algorithms, reduced storage needs, and noise reduction.
4
+ Summarization has applications in areas such as visualization, pattern mining,
5
+ clustering and community detection, and more. Core graph summarization
6
+ techniques are grouping/aggregation, bit-compression,
7
+ simplification/sparsification, and influence based. Graph summarization
8
+ algorithms often produce either summary graphs in the form of supergraphs or
9
+ sparsified graphs, or a list of independent structures. Supergraphs are the
10
+ most common product, which consist of supernodes and original nodes and are
11
+ connected by edges and superedges, which represent aggregate edges between
12
+ nodes and supernodes.
13
+
14
+ Grouping/aggregation based techniques compress graphs by representing
15
+ close/connected nodes and edges in a graph by a single node/edge in a
16
+ supergraph. Nodes can be grouped together into supernodes based on their
17
+ structural similarities or proximity within a graph to reduce the total number
18
+ of nodes in a graph. Edge-grouping techniques group edges into lossy/lossless
19
+ nodes called compressor or virtual nodes to reduce the total number of edges in
20
+ a graph. Edge-grouping techniques can be lossless, meaning that they can be
21
+ used to re-create the original graph, or techniques can be lossy, requiring
22
+ less space to store the summary graph, but at the expense of lower
23
+ reconstruction accuracy of the original graph.
24
+
25
+ Bit-compression techniques minimize the amount of information needed to
26
+ describe the original graph, while revealing structural patterns in the
27
+ original graph. The two-part minimum description length (MDL) is often used to
28
+ represent the model and the original graph in terms of the model. A key
29
+ difference between graph compression and graph summarization is that graph
30
+ summarization focuses on finding structural patterns within the original graph,
31
+ whereas graph compression focuses on compressions the original graph to be as
32
+ small as possible. **NOTE**: Some bit-compression methods exist solely to
33
+ compress a graph without creating a summary graph or finding comprehensible
34
+ structural patterns.
35
+
36
+ Simplification/Sparsification techniques attempt to create a sparse
37
+ representation of a graph by removing unimportant nodes and edges from the
38
+ graph. Sparsified graphs differ from supergraphs created by
39
+ grouping/aggregation by only containing a subset of the original nodes and
40
+ edges of the original graph.
41
+
42
+ Influence based techniques aim to find a high-level description of influence
43
+ propagation in a large graph. These methods are scarce and have been mostly
44
+ applied to social graphs.
45
+
46
+ *dedensification* is a grouping/aggregation based technique to compress the
47
+ neighborhoods around high-degree nodes in unweighted graphs by adding
48
+ compressor nodes that summarize multiple edges of the same type to
49
+ high-degree nodes (nodes with a degree greater than a given threshold).
50
+ Dedensification was developed for the purpose of increasing performance of
51
+ query processing around high-degree nodes in graph databases and enables direct
52
+ operations on the compressed graph. The structural patterns surrounding
53
+ high-degree nodes in the original is preserved while using fewer edges and
54
+ adding a small number of compressor nodes. The degree of nodes present in the
55
+ original graph is also preserved. The current implementation of dedensification
56
+ supports graphs with one edge type.
57
+
58
+ For more information on graph summarization, see `Graph Summarization Methods
59
+ and Applications: A Survey <https://dl.acm.org/doi/abs/10.1145/3186727>`_
60
+ """
61
+
62
+ from collections import Counter, defaultdict
63
+
64
+ import networkx as nx
65
+
66
+ __all__ = ["dedensify", "snap_aggregation"]
67
+
68
+
69
+ @nx._dispatchable(mutates_input={"not copy": 3}, returns_graph=True)
70
+ def dedensify(G, threshold, prefix=None, copy=True):
71
+ """Compresses neighborhoods around high-degree nodes
72
+
73
+ Reduces the number of edges to high-degree nodes by adding compressor nodes
74
+ that summarize multiple edges of the same type to high-degree nodes (nodes
75
+ with a degree greater than a given threshold). Dedensification also has
76
+ the added benefit of reducing the number of edges around high-degree nodes.
77
+ The implementation currently supports graphs with a single edge type.
78
+
79
+ Parameters
80
+ ----------
81
+ G: graph
82
+ A networkx graph
83
+ threshold: int
84
+ Minimum degree threshold of a node to be considered a high degree node.
85
+ The threshold must be greater than or equal to 2.
86
+ prefix: str or None, optional (default: None)
87
+ An optional prefix for denoting compressor nodes
88
+ copy: bool, optional (default: True)
89
+ Indicates if dedensification should be done inplace
90
+
91
+ Returns
92
+ -------
93
+ dedensified networkx graph : (graph, set)
94
+ 2-tuple of the dedensified graph and set of compressor nodes
95
+
96
+ Notes
97
+ -----
98
+ According to the algorithm in [1]_, removes edges in a graph by
99
+ compressing/decompressing the neighborhoods around high degree nodes by
100
+ adding compressor nodes that summarize multiple edges of the same type
101
+ to high-degree nodes. Dedensification will only add a compressor node when
102
+ doing so will reduce the total number of edges in the given graph. This
103
+ implementation currently supports graphs with a single edge type.
104
+
105
+ Examples
106
+ --------
107
+ Dedensification will only add compressor nodes when doing so would result
108
+ in fewer edges::
109
+
110
+ >>> original_graph = nx.DiGraph()
111
+ >>> original_graph.add_nodes_from(
112
+ ... ["1", "2", "3", "4", "5", "6", "A", "B", "C"]
113
+ ... )
114
+ >>> original_graph.add_edges_from(
115
+ ... [
116
+ ... ("1", "C"), ("1", "B"),
117
+ ... ("2", "C"), ("2", "B"), ("2", "A"),
118
+ ... ("3", "B"), ("3", "A"), ("3", "6"),
119
+ ... ("4", "C"), ("4", "B"), ("4", "A"),
120
+ ... ("5", "B"), ("5", "A"),
121
+ ... ("6", "5"),
122
+ ... ("A", "6")
123
+ ... ]
124
+ ... )
125
+ >>> c_graph, c_nodes = nx.dedensify(original_graph, threshold=2)
126
+ >>> original_graph.number_of_edges()
127
+ 15
128
+ >>> c_graph.number_of_edges()
129
+ 14
130
+
131
+ A dedensified, directed graph can be "densified" to reconstruct the
132
+ original graph::
133
+
134
+ >>> original_graph = nx.DiGraph()
135
+ >>> original_graph.add_nodes_from(
136
+ ... ["1", "2", "3", "4", "5", "6", "A", "B", "C"]
137
+ ... )
138
+ >>> original_graph.add_edges_from(
139
+ ... [
140
+ ... ("1", "C"), ("1", "B"),
141
+ ... ("2", "C"), ("2", "B"), ("2", "A"),
142
+ ... ("3", "B"), ("3", "A"), ("3", "6"),
143
+ ... ("4", "C"), ("4", "B"), ("4", "A"),
144
+ ... ("5", "B"), ("5", "A"),
145
+ ... ("6", "5"),
146
+ ... ("A", "6")
147
+ ... ]
148
+ ... )
149
+ >>> c_graph, c_nodes = nx.dedensify(original_graph, threshold=2)
150
+ >>> # re-densifies the compressed graph into the original graph
151
+ >>> for c_node in c_nodes:
152
+ ... all_neighbors = set(nx.all_neighbors(c_graph, c_node))
153
+ ... out_neighbors = set(c_graph.neighbors(c_node))
154
+ ... for out_neighbor in out_neighbors:
155
+ ... c_graph.remove_edge(c_node, out_neighbor)
156
+ ... in_neighbors = all_neighbors - out_neighbors
157
+ ... for in_neighbor in in_neighbors:
158
+ ... c_graph.remove_edge(in_neighbor, c_node)
159
+ ... for out_neighbor in out_neighbors:
160
+ ... c_graph.add_edge(in_neighbor, out_neighbor)
161
+ ... c_graph.remove_node(c_node)
162
+ ...
163
+ >>> nx.is_isomorphic(original_graph, c_graph)
164
+ True
165
+
166
+ References
167
+ ----------
168
+ .. [1] Maccioni, A., & Abadi, D. J. (2016, August).
169
+ Scalable pattern matching over compressed graphs via dedensification.
170
+ In Proceedings of the 22nd ACM SIGKDD International Conference on
171
+ Knowledge Discovery and Data Mining (pp. 1755-1764).
172
+ http://www.cs.umd.edu/~abadi/papers/graph-dedense.pdf
173
+ """
174
+ if threshold < 2:
175
+ raise nx.NetworkXError("The degree threshold must be >= 2")
176
+
177
+ degrees = G.in_degree if G.is_directed() else G.degree
178
+ # Group nodes based on degree threshold
179
+ high_degree_nodes = {n for n, d in degrees if d > threshold}
180
+ low_degree_nodes = G.nodes() - high_degree_nodes
181
+
182
+ auxiliary = {}
183
+ for node in G:
184
+ high_degree_nbrs = frozenset(high_degree_nodes & set(G[node]))
185
+ if high_degree_nbrs:
186
+ if high_degree_nbrs in auxiliary:
187
+ auxiliary[high_degree_nbrs].add(node)
188
+ else:
189
+ auxiliary[high_degree_nbrs] = {node}
190
+
191
+ if copy:
192
+ G = G.copy()
193
+
194
+ compressor_nodes = set()
195
+ for index, (high_degree_nodes, low_degree_nodes) in enumerate(auxiliary.items()):
196
+ low_degree_node_count = len(low_degree_nodes)
197
+ high_degree_node_count = len(high_degree_nodes)
198
+ old_edges = high_degree_node_count * low_degree_node_count
199
+ new_edges = high_degree_node_count + low_degree_node_count
200
+ if old_edges <= new_edges:
201
+ continue
202
+ compression_node = "".join(str(node) for node in high_degree_nodes)
203
+ if prefix:
204
+ compression_node = str(prefix) + compression_node
205
+ for node in low_degree_nodes:
206
+ for high_node in high_degree_nodes:
207
+ if G.has_edge(node, high_node):
208
+ G.remove_edge(node, high_node)
209
+
210
+ G.add_edge(node, compression_node)
211
+ for node in high_degree_nodes:
212
+ G.add_edge(compression_node, node)
213
+ compressor_nodes.add(compression_node)
214
+ return G, compressor_nodes
215
+
216
+
217
+ def _snap_build_graph(
218
+ G,
219
+ groups,
220
+ node_attributes,
221
+ edge_attributes,
222
+ neighbor_info,
223
+ edge_types,
224
+ prefix,
225
+ supernode_attribute,
226
+ superedge_attribute,
227
+ ):
228
+ """
229
+ Build the summary graph from the data structures produced in the SNAP aggregation algorithm
230
+
231
+ Used in the SNAP aggregation algorithm to build the output summary graph and supernode
232
+ lookup dictionary. This process uses the original graph and the data structures to
233
+ create the supernodes with the correct node attributes, and the superedges with the correct
234
+ edge attributes
235
+
236
+ Parameters
237
+ ----------
238
+ G: networkx.Graph
239
+ the original graph to be summarized
240
+ groups: dict
241
+ A dictionary of unique group IDs and their corresponding node groups
242
+ node_attributes: iterable
243
+ An iterable of the node attributes considered in the summarization process
244
+ edge_attributes: iterable
245
+ An iterable of the edge attributes considered in the summarization process
246
+ neighbor_info: dict
247
+ A data structure indicating the number of edges a node has with the
248
+ groups in the current summarization of each edge type
249
+ edge_types: dict
250
+ dictionary of edges in the graph and their corresponding attributes recognized
251
+ in the summarization
252
+ prefix: string
253
+ The prefix to be added to all supernodes
254
+ supernode_attribute: str
255
+ The node attribute for recording the supernode groupings of nodes
256
+ superedge_attribute: str
257
+ The edge attribute for recording the edge types represented by superedges
258
+
259
+ Returns
260
+ -------
261
+ summary graph: Networkx graph
262
+ """
263
+ output = G.__class__()
264
+ node_label_lookup = {}
265
+ for index, group_id in enumerate(groups):
266
+ group_set = groups[group_id]
267
+ supernode = f"{prefix}{index}"
268
+ node_label_lookup[group_id] = supernode
269
+ supernode_attributes = {
270
+ attr: G.nodes[next(iter(group_set))][attr] for attr in node_attributes
271
+ }
272
+ supernode_attributes[supernode_attribute] = group_set
273
+ output.add_node(supernode, **supernode_attributes)
274
+
275
+ for group_id in groups:
276
+ group_set = groups[group_id]
277
+ source_supernode = node_label_lookup[group_id]
278
+ for other_group, group_edge_types in neighbor_info[
279
+ next(iter(group_set))
280
+ ].items():
281
+ if group_edge_types:
282
+ target_supernode = node_label_lookup[other_group]
283
+ summary_graph_edge = (source_supernode, target_supernode)
284
+
285
+ edge_types = [
286
+ dict(zip(edge_attributes, edge_type))
287
+ for edge_type in group_edge_types
288
+ ]
289
+
290
+ has_edge = output.has_edge(*summary_graph_edge)
291
+ if output.is_multigraph():
292
+ if not has_edge:
293
+ for edge_type in edge_types:
294
+ output.add_edge(*summary_graph_edge, **edge_type)
295
+ elif not output.is_directed():
296
+ existing_edge_data = output.get_edge_data(*summary_graph_edge)
297
+ for edge_type in edge_types:
298
+ if edge_type not in existing_edge_data.values():
299
+ output.add_edge(*summary_graph_edge, **edge_type)
300
+ else:
301
+ superedge_attributes = {superedge_attribute: edge_types}
302
+ output.add_edge(*summary_graph_edge, **superedge_attributes)
303
+
304
+ return output
305
+
306
+
307
+ def _snap_eligible_group(G, groups, group_lookup, edge_types):
308
+ """
309
+ Determines if a group is eligible to be split.
310
+
311
+ A group is eligible to be split if all nodes in the group have edges of the same type(s)
312
+ with the same other groups.
313
+
314
+ Parameters
315
+ ----------
316
+ G: graph
317
+ graph to be summarized
318
+ groups: dict
319
+ A dictionary of unique group IDs and their corresponding node groups
320
+ group_lookup: dict
321
+ dictionary of nodes and their current corresponding group ID
322
+ edge_types: dict
323
+ dictionary of edges in the graph and their corresponding attributes recognized
324
+ in the summarization
325
+
326
+ Returns
327
+ -------
328
+ tuple: group ID to split, and neighbor-groups participation_counts data structure
329
+ """
330
+ nbr_info = {node: {gid: Counter() for gid in groups} for node in group_lookup}
331
+ for group_id in groups:
332
+ current_group = groups[group_id]
333
+
334
+ # build nbr_info for nodes in group
335
+ for node in current_group:
336
+ nbr_info[node] = {group_id: Counter() for group_id in groups}
337
+ edges = G.edges(node, keys=True) if G.is_multigraph() else G.edges(node)
338
+ for edge in edges:
339
+ neighbor = edge[1]
340
+ edge_type = edge_types[edge]
341
+ neighbor_group_id = group_lookup[neighbor]
342
+ nbr_info[node][neighbor_group_id][edge_type] += 1
343
+
344
+ # check if group_id is eligible to be split
345
+ group_size = len(current_group)
346
+ for other_group_id in groups:
347
+ edge_counts = Counter()
348
+ for node in current_group:
349
+ edge_counts.update(nbr_info[node][other_group_id].keys())
350
+
351
+ if not all(count == group_size for count in edge_counts.values()):
352
+ # only the nbr_info of the returned group_id is required for handling group splits
353
+ return group_id, nbr_info
354
+
355
+ # if no eligible groups, complete nbr_info is calculated
356
+ return None, nbr_info
357
+
358
+
359
+ def _snap_split(groups, neighbor_info, group_lookup, group_id):
360
+ """
361
+ Splits a group based on edge types and updates the groups accordingly
362
+
363
+ Splits the group with the given group_id based on the edge types
364
+ of the nodes so that each new grouping will all have the same
365
+ edges with other nodes.
366
+
367
+ Parameters
368
+ ----------
369
+ groups: dict
370
+ A dictionary of unique group IDs and their corresponding node groups
371
+ neighbor_info: dict
372
+ A data structure indicating the number of edges a node has with the
373
+ groups in the current summarization of each edge type
374
+ edge_types: dict
375
+ dictionary of edges in the graph and their corresponding attributes recognized
376
+ in the summarization
377
+ group_lookup: dict
378
+ dictionary of nodes and their current corresponding group ID
379
+ group_id: object
380
+ ID of group to be split
381
+
382
+ Returns
383
+ -------
384
+ dict
385
+ The updated groups based on the split
386
+ """
387
+ new_group_mappings = defaultdict(set)
388
+ for node in groups[group_id]:
389
+ signature = tuple(
390
+ frozenset(edge_types) for edge_types in neighbor_info[node].values()
391
+ )
392
+ new_group_mappings[signature].add(node)
393
+
394
+ # leave the biggest new_group as the original group
395
+ new_groups = sorted(new_group_mappings.values(), key=len)
396
+ for new_group in new_groups[:-1]:
397
+ # Assign unused integer as the new_group_id
398
+ # ids are tuples, so will not interact with the original group_ids
399
+ new_group_id = len(groups)
400
+ groups[new_group_id] = new_group
401
+ groups[group_id] -= new_group
402
+ for node in new_group:
403
+ group_lookup[node] = new_group_id
404
+
405
+ return groups
406
+
407
+
408
+ @nx._dispatchable(
409
+ node_attrs="[node_attributes]", edge_attrs="[edge_attributes]", returns_graph=True
410
+ )
411
+ def snap_aggregation(
412
+ G,
413
+ node_attributes,
414
+ edge_attributes=(),
415
+ prefix="Supernode-",
416
+ supernode_attribute="group",
417
+ superedge_attribute="types",
418
+ ):
419
+ """Creates a summary graph based on attributes and connectivity.
420
+
421
+ This function uses the Summarization by Grouping Nodes on Attributes
422
+ and Pairwise edges (SNAP) algorithm for summarizing a given
423
+ graph by grouping nodes by node attributes and their edge attributes
424
+ into supernodes in a summary graph. This name SNAP should not be
425
+ confused with the Stanford Network Analysis Project (SNAP).
426
+
427
+ Here is a high-level view of how this algorithm works:
428
+
429
+ 1) Group nodes by node attribute values.
430
+
431
+ 2) Iteratively split groups until all nodes in each group have edges
432
+ to nodes in the same groups. That is, until all the groups are homogeneous
433
+ in their member nodes' edges to other groups. For example,
434
+ if all the nodes in group A only have edge to nodes in group B, then the
435
+ group is homogeneous and does not need to be split. If all nodes in group B
436
+ have edges with nodes in groups {A, C}, but some also have edges with other
437
+ nodes in B, then group B is not homogeneous and needs to be split into
438
+ groups have edges with {A, C} and a group of nodes having
439
+ edges with {A, B, C}. This way, viewers of the summary graph can
440
+ assume that all nodes in the group have the exact same node attributes and
441
+ the exact same edges.
442
+
443
+ 3) Build the output summary graph, where the groups are represented by
444
+ super-nodes. Edges represent the edges shared between all the nodes in each
445
+ respective groups.
446
+
447
+ A SNAP summary graph can be used to visualize graphs that are too large to display
448
+ or visually analyze, or to efficiently identify sets of similar nodes with similar connectivity
449
+ patterns to other sets of similar nodes based on specified node and/or edge attributes in a graph.
450
+
451
+ Parameters
452
+ ----------
453
+ G: graph
454
+ Networkx Graph to be summarized
455
+ node_attributes: iterable, required
456
+ An iterable of the node attributes used to group nodes in the summarization process. Nodes
457
+ with the same values for these attributes will be grouped together in the summary graph.
458
+ edge_attributes: iterable, optional
459
+ An iterable of the edge attributes considered in the summarization process. If provided, unique
460
+ combinations of the attribute values found in the graph are used to
461
+ determine the edge types in the graph. If not provided, all edges
462
+ are considered to be of the same type.
463
+ prefix: str
464
+ The prefix used to denote supernodes in the summary graph. Defaults to 'Supernode-'.
465
+ supernode_attribute: str
466
+ The node attribute for recording the supernode groupings of nodes. Defaults to 'group'.
467
+ superedge_attribute: str
468
+ The edge attribute for recording the edge types of multiple edges. Defaults to 'types'.
469
+
470
+ Returns
471
+ -------
472
+ networkx.Graph: summary graph
473
+
474
+ Examples
475
+ --------
476
+ SNAP aggregation takes a graph and summarizes it in the context of user-provided
477
+ node and edge attributes such that a viewer can more easily extract and
478
+ analyze the information represented by the graph
479
+
480
+ >>> nodes = {
481
+ ... "A": dict(color="Red"),
482
+ ... "B": dict(color="Red"),
483
+ ... "C": dict(color="Red"),
484
+ ... "D": dict(color="Red"),
485
+ ... "E": dict(color="Blue"),
486
+ ... "F": dict(color="Blue"),
487
+ ... }
488
+ >>> edges = [
489
+ ... ("A", "E", "Strong"),
490
+ ... ("B", "F", "Strong"),
491
+ ... ("C", "E", "Weak"),
492
+ ... ("D", "F", "Weak"),
493
+ ... ]
494
+ >>> G = nx.Graph()
495
+ >>> for node in nodes:
496
+ ... attributes = nodes[node]
497
+ ... G.add_node(node, **attributes)
498
+ >>> for source, target, type in edges:
499
+ ... G.add_edge(source, target, type=type)
500
+ >>> node_attributes = ("color",)
501
+ >>> edge_attributes = ("type",)
502
+ >>> summary_graph = nx.snap_aggregation(
503
+ ... G, node_attributes=node_attributes, edge_attributes=edge_attributes
504
+ ... )
505
+
506
+ Notes
507
+ -----
508
+ The summary graph produced is called a maximum Attribute-edge
509
+ compatible (AR-compatible) grouping. According to [1]_, an
510
+ AR-compatible grouping means that all nodes in each group have the same
511
+ exact node attribute values and the same exact edges and
512
+ edge types to one or more nodes in the same groups. The maximal
513
+ AR-compatible grouping is the grouping with the minimal cardinality.
514
+
515
+ The AR-compatible grouping is the most detailed grouping provided by
516
+ any of the SNAP algorithms.
517
+
518
+ References
519
+ ----------
520
+ .. [1] Y. Tian, R. A. Hankins, and J. M. Patel. Efficient aggregation
521
+ for graph summarization. In Proc. 2008 ACM-SIGMOD Int. Conf.
522
+ Management of Data (SIGMOD’08), pages 567–580, Vancouver, Canada,
523
+ June 2008.
524
+ """
525
+ edge_types = {
526
+ edge: tuple(attrs.get(attr) for attr in edge_attributes)
527
+ for edge, attrs in G.edges.items()
528
+ }
529
+ if not G.is_directed():
530
+ if G.is_multigraph():
531
+ # list is needed to avoid mutating while iterating
532
+ edges = [((v, u, k), etype) for (u, v, k), etype in edge_types.items()]
533
+ else:
534
+ # list is needed to avoid mutating while iterating
535
+ edges = [((v, u), etype) for (u, v), etype in edge_types.items()]
536
+ edge_types.update(edges)
537
+
538
+ group_lookup = {
539
+ node: tuple(attrs[attr] for attr in node_attributes)
540
+ for node, attrs in G.nodes.items()
541
+ }
542
+ groups = defaultdict(set)
543
+ for node, node_type in group_lookup.items():
544
+ groups[node_type].add(node)
545
+
546
+ eligible_group_id, nbr_info = _snap_eligible_group(
547
+ G, groups, group_lookup, edge_types
548
+ )
549
+ while eligible_group_id:
550
+ groups = _snap_split(groups, nbr_info, group_lookup, eligible_group_id)
551
+ eligible_group_id, nbr_info = _snap_eligible_group(
552
+ G, groups, group_lookup, edge_types
553
+ )
554
+ return _snap_build_graph(
555
+ G,
556
+ groups,
557
+ node_attributes,
558
+ edge_attributes,
559
+ nbr_info,
560
+ edge_types,
561
+ prefix,
562
+ supernode_attribute,
563
+ superedge_attribute,
564
+ )
wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_asteroidal.cpython-310.pyc ADDED
Binary file (642 Bytes). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_bridges.cpython-310.pyc ADDED
Binary file (4.88 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_core.cpython-310.pyc ADDED
Binary file (9.59 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_d_separation.cpython-310.pyc ADDED
Binary file (9.22 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_graph_hashing.cpython-310.pyc ADDED
Binary file (21.2 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_mis.cpython-310.pyc ADDED
Binary file (2.61 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_moral.cpython-310.pyc ADDED
Binary file (750 Bytes). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_polynomials.cpython-310.pyc ADDED
Binary file (2.03 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/tests/__pycache__/test_richclub.cpython-310.pyc ADDED
Binary file (3.8 kB). View file