ZTWHHH commited on
Commit
98fbbcd
·
verified ·
1 Parent(s): 89cc6aa

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__init__.py +87 -0
  2. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-310.pyc +0 -0
  3. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-310.pyc +0 -0
  4. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-310.pyc +0 -0
  5. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-310.pyc +0 -0
  6. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-310.pyc +0 -0
  7. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-310.pyc +0 -0
  8. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/extendability.cpython-310.pyc +0 -0
  9. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-310.pyc +0 -0
  10. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/matching.cpython-310.pyc +0 -0
  11. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-310.pyc +0 -0
  12. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-310.pyc +0 -0
  13. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/redundancy.cpython-310.pyc +0 -0
  14. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-310.pyc +0 -0
  15. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/basic.py +322 -0
  16. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/centrality.py +290 -0
  17. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/cluster.py +278 -0
  18. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/covering.py +57 -0
  19. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/edgelist.py +360 -0
  20. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/extendability.py +105 -0
  21. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/generators.py +604 -0
  22. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/matching.py +590 -0
  23. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/matrix.py +168 -0
  24. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/projection.py +526 -0
  25. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/redundancy.py +112 -0
  26. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/spectral.py +69 -0
  27. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__init__.py +0 -0
  28. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  29. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_basic.cpython-310.pyc +0 -0
  30. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_centrality.cpython-310.pyc +0 -0
  31. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_cluster.cpython-310.pyc +0 -0
  32. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_covering.cpython-310.pyc +0 -0
  33. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_edgelist.cpython-310.pyc +0 -0
  34. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_extendability.cpython-310.pyc +0 -0
  35. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_generators.cpython-310.pyc +0 -0
  36. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matching.cpython-310.pyc +0 -0
  37. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matrix.cpython-310.pyc +0 -0
  38. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_project.cpython-310.pyc +0 -0
  39. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_redundancy.cpython-310.pyc +0 -0
  40. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_spectral_bipartivity.cpython-310.pyc +0 -0
  41. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_basic.py +125 -0
  42. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_centrality.py +192 -0
  43. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_cluster.py +84 -0
  44. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_covering.py +33 -0
  45. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_edgelist.py +240 -0
  46. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_extendability.py +334 -0
  47. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_generators.py +409 -0
  48. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_matching.py +327 -0
  49. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_matrix.py +84 -0
  50. deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_project.py +407 -0
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__init__.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ r"""This module provides functions and operations for bipartite
2
+ graphs. Bipartite graphs `B = (U, V, E)` have two node sets `U,V` and edges in
3
+ `E` that only connect nodes from opposite sets. It is common in the literature
4
+ to use an spatial analogy referring to the two node sets as top and bottom nodes.
5
+
6
+ The bipartite algorithms are not imported into the networkx namespace
7
+ at the top level so the easiest way to use them is with:
8
+
9
+ >>> from networkx.algorithms import bipartite
10
+
11
+ NetworkX does not have a custom bipartite graph class but the Graph()
12
+ or DiGraph() classes can be used to represent bipartite graphs. However,
13
+ you have to keep track of which set each node belongs to, and make
14
+ sure that there is no edge between nodes of the same set. The convention used
15
+ in NetworkX is to use a node attribute named `bipartite` with values 0 or 1 to
16
+ identify the sets each node belongs to. This convention is not enforced in
17
+ the source code of bipartite functions, it's only a recommendation.
18
+
19
+ For example:
20
+
21
+ >>> B = nx.Graph()
22
+ >>> # Add nodes with the node attribute "bipartite"
23
+ >>> B.add_nodes_from([1, 2, 3, 4], bipartite=0)
24
+ >>> B.add_nodes_from(["a", "b", "c"], bipartite=1)
25
+ >>> # Add edges only between nodes of opposite node sets
26
+ >>> B.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
27
+
28
+ Many algorithms of the bipartite module of NetworkX require, as an argument, a
29
+ container with all the nodes that belong to one set, in addition to the bipartite
30
+ graph `B`. The functions in the bipartite package do not check that the node set
31
+ is actually correct nor that the input graph is actually bipartite.
32
+ If `B` is connected, you can find the two node sets using a two-coloring
33
+ algorithm:
34
+
35
+ >>> nx.is_connected(B)
36
+ True
37
+ >>> bottom_nodes, top_nodes = bipartite.sets(B)
38
+
39
+ However, if the input graph is not connected, there are more than one possible
40
+ colorations. This is the reason why we require the user to pass a container
41
+ with all nodes of one bipartite node set as an argument to most bipartite
42
+ functions. In the face of ambiguity, we refuse the temptation to guess and
43
+ raise an :exc:`AmbiguousSolution <networkx.AmbiguousSolution>`
44
+ Exception if the input graph for
45
+ :func:`bipartite.sets <networkx.algorithms.bipartite.basic.sets>`
46
+ is disconnected.
47
+
48
+ Using the `bipartite` node attribute, you can easily get the two node sets:
49
+
50
+ >>> top_nodes = {n for n, d in B.nodes(data=True) if d["bipartite"] == 0}
51
+ >>> bottom_nodes = set(B) - top_nodes
52
+
53
+ So you can easily use the bipartite algorithms that require, as an argument, a
54
+ container with all nodes that belong to one node set:
55
+
56
+ >>> print(round(bipartite.density(B, bottom_nodes), 2))
57
+ 0.5
58
+ >>> G = bipartite.projected_graph(B, top_nodes)
59
+
60
+ All bipartite graph generators in NetworkX build bipartite graphs with the
61
+ `bipartite` node attribute. Thus, you can use the same approach:
62
+
63
+ >>> RB = bipartite.random_graph(5, 7, 0.2)
64
+ >>> RB_top = {n for n, d in RB.nodes(data=True) if d["bipartite"] == 0}
65
+ >>> RB_bottom = set(RB) - RB_top
66
+ >>> list(RB_top)
67
+ [0, 1, 2, 3, 4]
68
+ >>> list(RB_bottom)
69
+ [5, 6, 7, 8, 9, 10, 11]
70
+
71
+ For other bipartite graph generators see
72
+ :mod:`Generators <networkx.algorithms.bipartite.generators>`.
73
+
74
+ """
75
+
76
+ from networkx.algorithms.bipartite.basic import *
77
+ from networkx.algorithms.bipartite.centrality import *
78
+ from networkx.algorithms.bipartite.cluster import *
79
+ from networkx.algorithms.bipartite.covering import *
80
+ from networkx.algorithms.bipartite.edgelist import *
81
+ from networkx.algorithms.bipartite.matching import *
82
+ from networkx.algorithms.bipartite.matrix import *
83
+ from networkx.algorithms.bipartite.projection import *
84
+ from networkx.algorithms.bipartite.redundancy import *
85
+ from networkx.algorithms.bipartite.spectral import *
86
+ from networkx.algorithms.bipartite.generators import *
87
+ from networkx.algorithms.bipartite.extendability import *
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (3.99 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-310.pyc ADDED
Binary file (8.47 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-310.pyc ADDED
Binary file (9.14 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-310.pyc ADDED
Binary file (7.49 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-310.pyc ADDED
Binary file (2.27 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-310.pyc ADDED
Binary file (10.8 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/extendability.cpython-310.pyc ADDED
Binary file (4.06 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-310.pyc ADDED
Binary file (18.9 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/matching.cpython-310.pyc ADDED
Binary file (16.2 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-310.pyc ADDED
Binary file (6.04 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-310.pyc ADDED
Binary file (18 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/redundancy.cpython-310.pyc ADDED
Binary file (4.03 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-310.pyc ADDED
Binary file (1.92 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/basic.py ADDED
@@ -0,0 +1,322 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ==========================
3
+ Bipartite Graph Algorithms
4
+ ==========================
5
+ """
6
+
7
+ import networkx as nx
8
+ from networkx.algorithms.components import connected_components
9
+ from networkx.exception import AmbiguousSolution
10
+
11
+ __all__ = [
12
+ "is_bipartite",
13
+ "is_bipartite_node_set",
14
+ "color",
15
+ "sets",
16
+ "density",
17
+ "degrees",
18
+ ]
19
+
20
+
21
+ @nx._dispatchable
22
+ def color(G):
23
+ """Returns a two-coloring of the graph.
24
+
25
+ Raises an exception if the graph is not bipartite.
26
+
27
+ Parameters
28
+ ----------
29
+ G : NetworkX graph
30
+
31
+ Returns
32
+ -------
33
+ color : dictionary
34
+ A dictionary keyed by node with a 1 or 0 as data for each node color.
35
+
36
+ Raises
37
+ ------
38
+ NetworkXError
39
+ If the graph is not two-colorable.
40
+
41
+ Examples
42
+ --------
43
+ >>> from networkx.algorithms import bipartite
44
+ >>> G = nx.path_graph(4)
45
+ >>> c = bipartite.color(G)
46
+ >>> print(c)
47
+ {0: 1, 1: 0, 2: 1, 3: 0}
48
+
49
+ You can use this to set a node attribute indicating the bipartite set:
50
+
51
+ >>> nx.set_node_attributes(G, c, "bipartite")
52
+ >>> print(G.nodes[0]["bipartite"])
53
+ 1
54
+ >>> print(G.nodes[1]["bipartite"])
55
+ 0
56
+ """
57
+ if G.is_directed():
58
+ import itertools
59
+
60
+ def neighbors(v):
61
+ return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)])
62
+
63
+ else:
64
+ neighbors = G.neighbors
65
+
66
+ color = {}
67
+ for n in G: # handle disconnected graphs
68
+ if n in color or len(G[n]) == 0: # skip isolates
69
+ continue
70
+ queue = [n]
71
+ color[n] = 1 # nodes seen with color (1 or 0)
72
+ while queue:
73
+ v = queue.pop()
74
+ c = 1 - color[v] # opposite color of node v
75
+ for w in neighbors(v):
76
+ if w in color:
77
+ if color[w] == color[v]:
78
+ raise nx.NetworkXError("Graph is not bipartite.")
79
+ else:
80
+ color[w] = c
81
+ queue.append(w)
82
+ # color isolates with 0
83
+ color.update(dict.fromkeys(nx.isolates(G), 0))
84
+ return color
85
+
86
+
87
+ @nx._dispatchable
88
+ def is_bipartite(G):
89
+ """Returns True if graph G is bipartite, False if not.
90
+
91
+ Parameters
92
+ ----------
93
+ G : NetworkX graph
94
+
95
+ Examples
96
+ --------
97
+ >>> from networkx.algorithms import bipartite
98
+ >>> G = nx.path_graph(4)
99
+ >>> print(bipartite.is_bipartite(G))
100
+ True
101
+
102
+ See Also
103
+ --------
104
+ color, is_bipartite_node_set
105
+ """
106
+ try:
107
+ color(G)
108
+ return True
109
+ except nx.NetworkXError:
110
+ return False
111
+
112
+
113
+ @nx._dispatchable
114
+ def is_bipartite_node_set(G, nodes):
115
+ """Returns True if nodes and G/nodes are a bipartition of G.
116
+
117
+ Parameters
118
+ ----------
119
+ G : NetworkX graph
120
+
121
+ nodes: list or container
122
+ Check if nodes are a one of a bipartite set.
123
+
124
+ Examples
125
+ --------
126
+ >>> from networkx.algorithms import bipartite
127
+ >>> G = nx.path_graph(4)
128
+ >>> X = set([1, 3])
129
+ >>> bipartite.is_bipartite_node_set(G, X)
130
+ True
131
+
132
+ Notes
133
+ -----
134
+ An exception is raised if the input nodes are not distinct, because in this
135
+ case some bipartite algorithms will yield incorrect results.
136
+ For connected graphs the bipartite sets are unique. This function handles
137
+ disconnected graphs.
138
+ """
139
+ S = set(nodes)
140
+
141
+ if len(S) < len(nodes):
142
+ # this should maybe just return False?
143
+ raise AmbiguousSolution(
144
+ "The input node set contains duplicates.\n"
145
+ "This may lead to incorrect results when using it in bipartite algorithms.\n"
146
+ "Consider using set(nodes) as the input"
147
+ )
148
+
149
+ for CC in (G.subgraph(c).copy() for c in connected_components(G)):
150
+ X, Y = sets(CC)
151
+ if not (
152
+ (X.issubset(S) and Y.isdisjoint(S)) or (Y.issubset(S) and X.isdisjoint(S))
153
+ ):
154
+ return False
155
+ return True
156
+
157
+
158
+ @nx._dispatchable
159
+ def sets(G, top_nodes=None):
160
+ """Returns bipartite node sets of graph G.
161
+
162
+ Raises an exception if the graph is not bipartite or if the input
163
+ graph is disconnected and thus more than one valid solution exists.
164
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
165
+ for further details on how bipartite graphs are handled in NetworkX.
166
+
167
+ Parameters
168
+ ----------
169
+ G : NetworkX graph
170
+
171
+ top_nodes : container, optional
172
+ Container with all nodes in one bipartite node set. If not supplied
173
+ it will be computed. But if more than one solution exists an exception
174
+ will be raised.
175
+
176
+ Returns
177
+ -------
178
+ X : set
179
+ Nodes from one side of the bipartite graph.
180
+ Y : set
181
+ Nodes from the other side.
182
+
183
+ Raises
184
+ ------
185
+ AmbiguousSolution
186
+ Raised if the input bipartite graph is disconnected and no container
187
+ with all nodes in one bipartite set is provided. When determining
188
+ the nodes in each bipartite set more than one valid solution is
189
+ possible if the input graph is disconnected.
190
+ NetworkXError
191
+ Raised if the input graph is not bipartite.
192
+
193
+ Examples
194
+ --------
195
+ >>> from networkx.algorithms import bipartite
196
+ >>> G = nx.path_graph(4)
197
+ >>> X, Y = bipartite.sets(G)
198
+ >>> list(X)
199
+ [0, 2]
200
+ >>> list(Y)
201
+ [1, 3]
202
+
203
+ See Also
204
+ --------
205
+ color
206
+
207
+ """
208
+ if G.is_directed():
209
+ is_connected = nx.is_weakly_connected
210
+ else:
211
+ is_connected = nx.is_connected
212
+ if top_nodes is not None:
213
+ X = set(top_nodes)
214
+ Y = set(G) - X
215
+ else:
216
+ if not is_connected(G):
217
+ msg = "Disconnected graph: Ambiguous solution for bipartite sets."
218
+ raise nx.AmbiguousSolution(msg)
219
+ c = color(G)
220
+ X = {n for n, is_top in c.items() if is_top}
221
+ Y = {n for n, is_top in c.items() if not is_top}
222
+ return (X, Y)
223
+
224
+
225
+ @nx._dispatchable(graphs="B")
226
+ def density(B, nodes):
227
+ """Returns density of bipartite graph B.
228
+
229
+ Parameters
230
+ ----------
231
+ B : NetworkX graph
232
+
233
+ nodes: list or container
234
+ Nodes in one node set of the bipartite graph.
235
+
236
+ Returns
237
+ -------
238
+ d : float
239
+ The bipartite density
240
+
241
+ Examples
242
+ --------
243
+ >>> from networkx.algorithms import bipartite
244
+ >>> G = nx.complete_bipartite_graph(3, 2)
245
+ >>> X = set([0, 1, 2])
246
+ >>> bipartite.density(G, X)
247
+ 1.0
248
+ >>> Y = set([3, 4])
249
+ >>> bipartite.density(G, Y)
250
+ 1.0
251
+
252
+ Notes
253
+ -----
254
+ The container of nodes passed as argument must contain all nodes
255
+ in one of the two bipartite node sets to avoid ambiguity in the
256
+ case of disconnected graphs.
257
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
258
+ for further details on how bipartite graphs are handled in NetworkX.
259
+
260
+ See Also
261
+ --------
262
+ color
263
+ """
264
+ n = len(B)
265
+ m = nx.number_of_edges(B)
266
+ nb = len(nodes)
267
+ nt = n - nb
268
+ if m == 0: # includes cases n==0 and n==1
269
+ d = 0.0
270
+ else:
271
+ if B.is_directed():
272
+ d = m / (2 * nb * nt)
273
+ else:
274
+ d = m / (nb * nt)
275
+ return d
276
+
277
+
278
+ @nx._dispatchable(graphs="B", edge_attrs="weight")
279
+ def degrees(B, nodes, weight=None):
280
+ """Returns the degrees of the two node sets in the bipartite graph B.
281
+
282
+ Parameters
283
+ ----------
284
+ B : NetworkX graph
285
+
286
+ nodes: list or container
287
+ Nodes in one node set of the bipartite graph.
288
+
289
+ weight : string or None, optional (default=None)
290
+ The edge attribute that holds the numerical value used as a weight.
291
+ If None, then each edge has weight 1.
292
+ The degree is the sum of the edge weights adjacent to the node.
293
+
294
+ Returns
295
+ -------
296
+ (degX,degY) : tuple of dictionaries
297
+ The degrees of the two bipartite sets as dictionaries keyed by node.
298
+
299
+ Examples
300
+ --------
301
+ >>> from networkx.algorithms import bipartite
302
+ >>> G = nx.complete_bipartite_graph(3, 2)
303
+ >>> Y = set([3, 4])
304
+ >>> degX, degY = bipartite.degrees(G, Y)
305
+ >>> dict(degX)
306
+ {0: 2, 1: 2, 2: 2}
307
+
308
+ Notes
309
+ -----
310
+ The container of nodes passed as argument must contain all nodes
311
+ in one of the two bipartite node sets to avoid ambiguity in the
312
+ case of disconnected graphs.
313
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
314
+ for further details on how bipartite graphs are handled in NetworkX.
315
+
316
+ See Also
317
+ --------
318
+ color, density
319
+ """
320
+ bottom = set(nodes)
321
+ top = set(B) - bottom
322
+ return (B.degree(top, weight), B.degree(bottom, weight))
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/centrality.py ADDED
@@ -0,0 +1,290 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+
3
+ __all__ = ["degree_centrality", "betweenness_centrality", "closeness_centrality"]
4
+
5
+
6
+ @nx._dispatchable(name="bipartite_degree_centrality")
7
+ def degree_centrality(G, nodes):
8
+ r"""Compute the degree centrality for nodes in a bipartite network.
9
+
10
+ The degree centrality for a node `v` is the fraction of nodes
11
+ connected to it.
12
+
13
+ Parameters
14
+ ----------
15
+ G : graph
16
+ A bipartite network
17
+
18
+ nodes : list or container
19
+ Container with all nodes in one bipartite node set.
20
+
21
+ Returns
22
+ -------
23
+ centrality : dictionary
24
+ Dictionary keyed by node with bipartite degree centrality as the value.
25
+
26
+ Examples
27
+ --------
28
+ >>> G = nx.wheel_graph(5)
29
+ >>> top_nodes = {0, 1, 2}
30
+ >>> nx.bipartite.degree_centrality(G, nodes=top_nodes)
31
+ {0: 2.0, 1: 1.5, 2: 1.5, 3: 1.0, 4: 1.0}
32
+
33
+ See Also
34
+ --------
35
+ betweenness_centrality
36
+ closeness_centrality
37
+ :func:`~networkx.algorithms.bipartite.basic.sets`
38
+ :func:`~networkx.algorithms.bipartite.basic.is_bipartite`
39
+
40
+ Notes
41
+ -----
42
+ The nodes input parameter must contain all nodes in one bipartite node set,
43
+ but the dictionary returned contains all nodes from both bipartite node
44
+ sets. See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
45
+ for further details on how bipartite graphs are handled in NetworkX.
46
+
47
+ For unipartite networks, the degree centrality values are
48
+ normalized by dividing by the maximum possible degree (which is
49
+ `n-1` where `n` is the number of nodes in G).
50
+
51
+ In the bipartite case, the maximum possible degree of a node in a
52
+ bipartite node set is the number of nodes in the opposite node set
53
+ [1]_. The degree centrality for a node `v` in the bipartite
54
+ sets `U` with `n` nodes and `V` with `m` nodes is
55
+
56
+ .. math::
57
+
58
+ d_{v} = \frac{deg(v)}{m}, \mbox{for} v \in U ,
59
+
60
+ d_{v} = \frac{deg(v)}{n}, \mbox{for} v \in V ,
61
+
62
+
63
+ where `deg(v)` is the degree of node `v`.
64
+
65
+ References
66
+ ----------
67
+ .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
68
+ Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
69
+ of Social Network Analysis. Sage Publications.
70
+ https://dx.doi.org/10.4135/9781446294413.n28
71
+ """
72
+ top = set(nodes)
73
+ bottom = set(G) - top
74
+ s = 1.0 / len(bottom)
75
+ centrality = {n: d * s for n, d in G.degree(top)}
76
+ s = 1.0 / len(top)
77
+ centrality.update({n: d * s for n, d in G.degree(bottom)})
78
+ return centrality
79
+
80
+
81
+ @nx._dispatchable(name="bipartite_betweenness_centrality")
82
+ def betweenness_centrality(G, nodes):
83
+ r"""Compute betweenness centrality for nodes in a bipartite network.
84
+
85
+ Betweenness centrality of a node `v` is the sum of the
86
+ fraction of all-pairs shortest paths that pass through `v`.
87
+
88
+ Values of betweenness are normalized by the maximum possible
89
+ value which for bipartite graphs is limited by the relative size
90
+ of the two node sets [1]_.
91
+
92
+ Let `n` be the number of nodes in the node set `U` and
93
+ `m` be the number of nodes in the node set `V`, then
94
+ nodes in `U` are normalized by dividing by
95
+
96
+ .. math::
97
+
98
+ \frac{1}{2} [m^2 (s + 1)^2 + m (s + 1)(2t - s - 1) - t (2s - t + 3)] ,
99
+
100
+ where
101
+
102
+ .. math::
103
+
104
+ s = (n - 1) \div m , t = (n - 1) \mod m ,
105
+
106
+ and nodes in `V` are normalized by dividing by
107
+
108
+ .. math::
109
+
110
+ \frac{1}{2} [n^2 (p + 1)^2 + n (p + 1)(2r - p - 1) - r (2p - r + 3)] ,
111
+
112
+ where,
113
+
114
+ .. math::
115
+
116
+ p = (m - 1) \div n , r = (m - 1) \mod n .
117
+
118
+ Parameters
119
+ ----------
120
+ G : graph
121
+ A bipartite graph
122
+
123
+ nodes : list or container
124
+ Container with all nodes in one bipartite node set.
125
+
126
+ Returns
127
+ -------
128
+ betweenness : dictionary
129
+ Dictionary keyed by node with bipartite betweenness centrality
130
+ as the value.
131
+
132
+ Examples
133
+ --------
134
+ >>> G = nx.cycle_graph(4)
135
+ >>> top_nodes = {1, 2}
136
+ >>> nx.bipartite.betweenness_centrality(G, nodes=top_nodes)
137
+ {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
138
+
139
+ See Also
140
+ --------
141
+ degree_centrality
142
+ closeness_centrality
143
+ :func:`~networkx.algorithms.bipartite.basic.sets`
144
+ :func:`~networkx.algorithms.bipartite.basic.is_bipartite`
145
+
146
+ Notes
147
+ -----
148
+ The nodes input parameter must contain all nodes in one bipartite node set,
149
+ but the dictionary returned contains all nodes from both node sets.
150
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
151
+ for further details on how bipartite graphs are handled in NetworkX.
152
+
153
+
154
+ References
155
+ ----------
156
+ .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
157
+ Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
158
+ of Social Network Analysis. Sage Publications.
159
+ https://dx.doi.org/10.4135/9781446294413.n28
160
+ """
161
+ top = set(nodes)
162
+ bottom = set(G) - top
163
+ n = len(top)
164
+ m = len(bottom)
165
+ s, t = divmod(n - 1, m)
166
+ bet_max_top = (
167
+ ((m**2) * ((s + 1) ** 2))
168
+ + (m * (s + 1) * (2 * t - s - 1))
169
+ - (t * ((2 * s) - t + 3))
170
+ ) / 2.0
171
+ p, r = divmod(m - 1, n)
172
+ bet_max_bot = (
173
+ ((n**2) * ((p + 1) ** 2))
174
+ + (n * (p + 1) * (2 * r - p - 1))
175
+ - (r * ((2 * p) - r + 3))
176
+ ) / 2.0
177
+ betweenness = nx.betweenness_centrality(G, normalized=False, weight=None)
178
+ for node in top:
179
+ betweenness[node] /= bet_max_top
180
+ for node in bottom:
181
+ betweenness[node] /= bet_max_bot
182
+ return betweenness
183
+
184
+
185
+ @nx._dispatchable(name="bipartite_closeness_centrality")
186
+ def closeness_centrality(G, nodes, normalized=True):
187
+ r"""Compute the closeness centrality for nodes in a bipartite network.
188
+
189
+ The closeness of a node is the distance to all other nodes in the
190
+ graph or in the case that the graph is not connected to all other nodes
191
+ in the connected component containing that node.
192
+
193
+ Parameters
194
+ ----------
195
+ G : graph
196
+ A bipartite network
197
+
198
+ nodes : list or container
199
+ Container with all nodes in one bipartite node set.
200
+
201
+ normalized : bool, optional
202
+ If True (default) normalize by connected component size.
203
+
204
+ Returns
205
+ -------
206
+ closeness : dictionary
207
+ Dictionary keyed by node with bipartite closeness centrality
208
+ as the value.
209
+
210
+ Examples
211
+ --------
212
+ >>> G = nx.wheel_graph(5)
213
+ >>> top_nodes = {0, 1, 2}
214
+ >>> nx.bipartite.closeness_centrality(G, nodes=top_nodes)
215
+ {0: 1.5, 1: 1.2, 2: 1.2, 3: 1.0, 4: 1.0}
216
+
217
+ See Also
218
+ --------
219
+ betweenness_centrality
220
+ degree_centrality
221
+ :func:`~networkx.algorithms.bipartite.basic.sets`
222
+ :func:`~networkx.algorithms.bipartite.basic.is_bipartite`
223
+
224
+ Notes
225
+ -----
226
+ The nodes input parameter must contain all nodes in one bipartite node set,
227
+ but the dictionary returned contains all nodes from both node sets.
228
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
229
+ for further details on how bipartite graphs are handled in NetworkX.
230
+
231
+
232
+ Closeness centrality is normalized by the minimum distance possible.
233
+ In the bipartite case the minimum distance for a node in one bipartite
234
+ node set is 1 from all nodes in the other node set and 2 from all
235
+ other nodes in its own set [1]_. Thus the closeness centrality
236
+ for node `v` in the two bipartite sets `U` with
237
+ `n` nodes and `V` with `m` nodes is
238
+
239
+ .. math::
240
+
241
+ c_{v} = \frac{m + 2(n - 1)}{d}, \mbox{for} v \in U,
242
+
243
+ c_{v} = \frac{n + 2(m - 1)}{d}, \mbox{for} v \in V,
244
+
245
+ where `d` is the sum of the distances from `v` to all
246
+ other nodes.
247
+
248
+ Higher values of closeness indicate higher centrality.
249
+
250
+ As in the unipartite case, setting normalized=True causes the
251
+ values to normalized further to n-1 / size(G)-1 where n is the
252
+ number of nodes in the connected part of graph containing the
253
+ node. If the graph is not completely connected, this algorithm
254
+ computes the closeness centrality for each connected part
255
+ separately.
256
+
257
+ References
258
+ ----------
259
+ .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
260
+ Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
261
+ of Social Network Analysis. Sage Publications.
262
+ https://dx.doi.org/10.4135/9781446294413.n28
263
+ """
264
+ closeness = {}
265
+ path_length = nx.single_source_shortest_path_length
266
+ top = set(nodes)
267
+ bottom = set(G) - top
268
+ n = len(top)
269
+ m = len(bottom)
270
+ for node in top:
271
+ sp = dict(path_length(G, node))
272
+ totsp = sum(sp.values())
273
+ if totsp > 0.0 and len(G) > 1:
274
+ closeness[node] = (m + 2 * (n - 1)) / totsp
275
+ if normalized:
276
+ s = (len(sp) - 1) / (len(G) - 1)
277
+ closeness[node] *= s
278
+ else:
279
+ closeness[node] = 0.0
280
+ for node in bottom:
281
+ sp = dict(path_length(G, node))
282
+ totsp = sum(sp.values())
283
+ if totsp > 0.0 and len(G) > 1:
284
+ closeness[node] = (n + 2 * (m - 1)) / totsp
285
+ if normalized:
286
+ s = (len(sp) - 1) / (len(G) - 1)
287
+ closeness[node] *= s
288
+ else:
289
+ closeness[node] = 0.0
290
+ return closeness
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/cluster.py ADDED
@@ -0,0 +1,278 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing clustering of pairs"""
2
+
3
+ import itertools
4
+
5
+ import networkx as nx
6
+
7
+ __all__ = [
8
+ "clustering",
9
+ "average_clustering",
10
+ "latapy_clustering",
11
+ "robins_alexander_clustering",
12
+ ]
13
+
14
+
15
+ def cc_dot(nu, nv):
16
+ return len(nu & nv) / len(nu | nv)
17
+
18
+
19
+ def cc_max(nu, nv):
20
+ return len(nu & nv) / max(len(nu), len(nv))
21
+
22
+
23
+ def cc_min(nu, nv):
24
+ return len(nu & nv) / min(len(nu), len(nv))
25
+
26
+
27
+ modes = {"dot": cc_dot, "min": cc_min, "max": cc_max}
28
+
29
+
30
+ @nx._dispatchable
31
+ def latapy_clustering(G, nodes=None, mode="dot"):
32
+ r"""Compute a bipartite clustering coefficient for nodes.
33
+
34
+ The bipartite clustering coefficient is a measure of local density
35
+ of connections defined as [1]_:
36
+
37
+ .. math::
38
+
39
+ c_u = \frac{\sum_{v \in N(N(u))} c_{uv} }{|N(N(u))|}
40
+
41
+ where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`,
42
+ and `c_{uv}` is the pairwise clustering coefficient between nodes
43
+ `u` and `v`.
44
+
45
+ The mode selects the function for `c_{uv}` which can be:
46
+
47
+ `dot`:
48
+
49
+ .. math::
50
+
51
+ c_{uv}=\frac{|N(u)\cap N(v)|}{|N(u) \cup N(v)|}
52
+
53
+ `min`:
54
+
55
+ .. math::
56
+
57
+ c_{uv}=\frac{|N(u)\cap N(v)|}{min(|N(u)|,|N(v)|)}
58
+
59
+ `max`:
60
+
61
+ .. math::
62
+
63
+ c_{uv}=\frac{|N(u)\cap N(v)|}{max(|N(u)|,|N(v)|)}
64
+
65
+
66
+ Parameters
67
+ ----------
68
+ G : graph
69
+ A bipartite graph
70
+
71
+ nodes : list or iterable (optional)
72
+ Compute bipartite clustering for these nodes. The default
73
+ is all nodes in G.
74
+
75
+ mode : string
76
+ The pairwise bipartite clustering method to be used in the computation.
77
+ It must be "dot", "max", or "min".
78
+
79
+ Returns
80
+ -------
81
+ clustering : dictionary
82
+ A dictionary keyed by node with the clustering coefficient value.
83
+
84
+
85
+ Examples
86
+ --------
87
+ >>> from networkx.algorithms import bipartite
88
+ >>> G = nx.path_graph(4) # path graphs are bipartite
89
+ >>> c = bipartite.clustering(G)
90
+ >>> c[0]
91
+ 0.5
92
+ >>> c = bipartite.clustering(G, mode="min")
93
+ >>> c[0]
94
+ 1.0
95
+
96
+ See Also
97
+ --------
98
+ robins_alexander_clustering
99
+ average_clustering
100
+ networkx.algorithms.cluster.square_clustering
101
+
102
+ References
103
+ ----------
104
+ .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
105
+ Basic notions for the analysis of large two-mode networks.
106
+ Social Networks 30(1), 31--48.
107
+ """
108
+ if not nx.algorithms.bipartite.is_bipartite(G):
109
+ raise nx.NetworkXError("Graph is not bipartite")
110
+
111
+ try:
112
+ cc_func = modes[mode]
113
+ except KeyError as err:
114
+ raise nx.NetworkXError(
115
+ "Mode for bipartite clustering must be: dot, min or max"
116
+ ) from err
117
+
118
+ if nodes is None:
119
+ nodes = G
120
+ ccs = {}
121
+ for v in nodes:
122
+ cc = 0.0
123
+ nbrs2 = {u for nbr in G[v] for u in G[nbr]} - {v}
124
+ for u in nbrs2:
125
+ cc += cc_func(set(G[u]), set(G[v]))
126
+ if cc > 0.0: # len(nbrs2)>0
127
+ cc /= len(nbrs2)
128
+ ccs[v] = cc
129
+ return ccs
130
+
131
+
132
+ clustering = latapy_clustering
133
+
134
+
135
+ @nx._dispatchable(name="bipartite_average_clustering")
136
+ def average_clustering(G, nodes=None, mode="dot"):
137
+ r"""Compute the average bipartite clustering coefficient.
138
+
139
+ A clustering coefficient for the whole graph is the average,
140
+
141
+ .. math::
142
+
143
+ C = \frac{1}{n}\sum_{v \in G} c_v,
144
+
145
+ where `n` is the number of nodes in `G`.
146
+
147
+ Similar measures for the two bipartite sets can be defined [1]_
148
+
149
+ .. math::
150
+
151
+ C_X = \frac{1}{|X|}\sum_{v \in X} c_v,
152
+
153
+ where `X` is a bipartite set of `G`.
154
+
155
+ Parameters
156
+ ----------
157
+ G : graph
158
+ a bipartite graph
159
+
160
+ nodes : list or iterable, optional
161
+ A container of nodes to use in computing the average.
162
+ The nodes should be either the entire graph (the default) or one of the
163
+ bipartite sets.
164
+
165
+ mode : string
166
+ The pairwise bipartite clustering method.
167
+ It must be "dot", "max", or "min"
168
+
169
+ Returns
170
+ -------
171
+ clustering : float
172
+ The average bipartite clustering for the given set of nodes or the
173
+ entire graph if no nodes are specified.
174
+
175
+ Examples
176
+ --------
177
+ >>> from networkx.algorithms import bipartite
178
+ >>> G = nx.star_graph(3) # star graphs are bipartite
179
+ >>> bipartite.average_clustering(G)
180
+ 0.75
181
+ >>> X, Y = bipartite.sets(G)
182
+ >>> bipartite.average_clustering(G, X)
183
+ 0.0
184
+ >>> bipartite.average_clustering(G, Y)
185
+ 1.0
186
+
187
+ See Also
188
+ --------
189
+ clustering
190
+
191
+ Notes
192
+ -----
193
+ The container of nodes passed to this function must contain all of the nodes
194
+ in one of the bipartite sets ("top" or "bottom") in order to compute
195
+ the correct average bipartite clustering coefficients.
196
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
197
+ for further details on how bipartite graphs are handled in NetworkX.
198
+
199
+
200
+ References
201
+ ----------
202
+ .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
203
+ Basic notions for the analysis of large two-mode networks.
204
+ Social Networks 30(1), 31--48.
205
+ """
206
+ if nodes is None:
207
+ nodes = G
208
+ ccs = latapy_clustering(G, nodes=nodes, mode=mode)
209
+ return sum(ccs[v] for v in nodes) / len(nodes)
210
+
211
+
212
+ @nx._dispatchable
213
+ def robins_alexander_clustering(G):
214
+ r"""Compute the bipartite clustering of G.
215
+
216
+ Robins and Alexander [1]_ defined bipartite clustering coefficient as
217
+ four times the number of four cycles `C_4` divided by the number of
218
+ three paths `L_3` in a bipartite graph:
219
+
220
+ .. math::
221
+
222
+ CC_4 = \frac{4 * C_4}{L_3}
223
+
224
+ Parameters
225
+ ----------
226
+ G : graph
227
+ a bipartite graph
228
+
229
+ Returns
230
+ -------
231
+ clustering : float
232
+ The Robins and Alexander bipartite clustering for the input graph.
233
+
234
+ Examples
235
+ --------
236
+ >>> from networkx.algorithms import bipartite
237
+ >>> G = nx.davis_southern_women_graph()
238
+ >>> print(round(bipartite.robins_alexander_clustering(G), 3))
239
+ 0.468
240
+
241
+ See Also
242
+ --------
243
+ latapy_clustering
244
+ networkx.algorithms.cluster.square_clustering
245
+
246
+ References
247
+ ----------
248
+ .. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking
249
+ directors: Network structure and distance in bipartite graphs.
250
+ Computational & Mathematical Organization Theory 10(1), 69–94.
251
+
252
+ """
253
+ if G.order() < 4 or G.size() < 3:
254
+ return 0
255
+ L_3 = _threepaths(G)
256
+ if L_3 == 0:
257
+ return 0
258
+ C_4 = _four_cycles(G)
259
+ return (4.0 * C_4) / L_3
260
+
261
+
262
+ def _four_cycles(G):
263
+ cycles = 0
264
+ for v in G:
265
+ for u, w in itertools.combinations(G[v], 2):
266
+ cycles += len((set(G[u]) & set(G[w])) - {v})
267
+ return cycles / 4
268
+
269
+
270
+ def _threepaths(G):
271
+ paths = 0
272
+ for v in G:
273
+ for u in G[v]:
274
+ for w in set(G[u]) - {v}:
275
+ paths += len(set(G[w]) - {v, u})
276
+ # Divide by two because we count each three path twice
277
+ # one for each possible starting point
278
+ return paths / 2
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/covering.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions related to graph covers."""
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms.bipartite.matching import hopcroft_karp_matching
5
+ from networkx.algorithms.covering import min_edge_cover as _min_edge_cover
6
+ from networkx.utils import not_implemented_for
7
+
8
+ __all__ = ["min_edge_cover"]
9
+
10
+
11
+ @not_implemented_for("directed")
12
+ @not_implemented_for("multigraph")
13
+ @nx._dispatchable(name="bipartite_min_edge_cover")
14
+ def min_edge_cover(G, matching_algorithm=None):
15
+ """Returns a set of edges which constitutes
16
+ the minimum edge cover of the graph.
17
+
18
+ The smallest edge cover can be found in polynomial time by finding
19
+ a maximum matching and extending it greedily so that all nodes
20
+ are covered.
21
+
22
+ Parameters
23
+ ----------
24
+ G : NetworkX graph
25
+ An undirected bipartite graph.
26
+
27
+ matching_algorithm : function
28
+ A function that returns a maximum cardinality matching in a
29
+ given bipartite graph. The function must take one input, the
30
+ graph ``G``, and return a dictionary mapping each node to its
31
+ mate. If not specified,
32
+ :func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching`
33
+ will be used. Other possibilities include
34
+ :func:`~networkx.algorithms.bipartite.matching.eppstein_matching`,
35
+
36
+ Returns
37
+ -------
38
+ set
39
+ A set of the edges in a minimum edge cover of the graph, given as
40
+ pairs of nodes. It contains both the edges `(u, v)` and `(v, u)`
41
+ for given nodes `u` and `v` among the edges of minimum edge cover.
42
+
43
+ Notes
44
+ -----
45
+ An edge cover of a graph is a set of edges such that every node of
46
+ the graph is incident to at least one edge of the set.
47
+ A minimum edge cover is an edge covering of smallest cardinality.
48
+
49
+ Due to its implementation, the worst-case running time of this algorithm
50
+ is bounded by the worst-case running time of the function
51
+ ``matching_algorithm``.
52
+ """
53
+ if G.order() == 0: # Special case for the empty graph
54
+ return set()
55
+ if matching_algorithm is None:
56
+ matching_algorithm = hopcroft_karp_matching
57
+ return _min_edge_cover(G, matching_algorithm=matching_algorithm)
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/edgelist.py ADDED
@@ -0,0 +1,360 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ********************
3
+ Bipartite Edge Lists
4
+ ********************
5
+ Read and write NetworkX graphs as bipartite edge lists.
6
+
7
+ Format
8
+ ------
9
+ You can read or write three formats of edge lists with these functions.
10
+
11
+ Node pairs with no data::
12
+
13
+ 1 2
14
+
15
+ Python dictionary as data::
16
+
17
+ 1 2 {'weight':7, 'color':'green'}
18
+
19
+ Arbitrary data::
20
+
21
+ 1 2 7 green
22
+
23
+ For each edge (u, v) the node u is assigned to part 0 and the node v to part 1.
24
+ """
25
+
26
+ __all__ = ["generate_edgelist", "write_edgelist", "parse_edgelist", "read_edgelist"]
27
+
28
+ import networkx as nx
29
+ from networkx.utils import not_implemented_for, open_file
30
+
31
+
32
+ @open_file(1, mode="wb")
33
+ def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="utf-8"):
34
+ """Write a bipartite graph as a list of edges.
35
+
36
+ Parameters
37
+ ----------
38
+ G : Graph
39
+ A NetworkX bipartite graph
40
+ path : file or string
41
+ File or filename to write. If a file is provided, it must be
42
+ opened in 'wb' mode. Filenames ending in .gz or .bz2 will be compressed.
43
+ comments : string, optional
44
+ The character used to indicate the start of a comment
45
+ delimiter : string, optional
46
+ The string used to separate values. The default is whitespace.
47
+ data : bool or list, optional
48
+ If False write no edge data.
49
+ If True write a string representation of the edge data dictionary..
50
+ If a list (or other iterable) is provided, write the keys specified
51
+ in the list.
52
+ encoding: string, optional
53
+ Specify which encoding to use when writing file.
54
+
55
+ Examples
56
+ --------
57
+ >>> G = nx.path_graph(4)
58
+ >>> G.add_nodes_from([0, 2], bipartite=0)
59
+ >>> G.add_nodes_from([1, 3], bipartite=1)
60
+ >>> nx.write_edgelist(G, "test.edgelist")
61
+ >>> fh = open("test.edgelist", "wb")
62
+ >>> nx.write_edgelist(G, fh)
63
+ >>> nx.write_edgelist(G, "test.edgelist.gz")
64
+ >>> nx.write_edgelist(G, "test.edgelist.gz", data=False)
65
+
66
+ >>> G = nx.Graph()
67
+ >>> G.add_edge(1, 2, weight=7, color="red")
68
+ >>> nx.write_edgelist(G, "test.edgelist", data=False)
69
+ >>> nx.write_edgelist(G, "test.edgelist", data=["color"])
70
+ >>> nx.write_edgelist(G, "test.edgelist", data=["color", "weight"])
71
+
72
+ See Also
73
+ --------
74
+ write_edgelist
75
+ generate_edgelist
76
+ """
77
+ for line in generate_edgelist(G, delimiter, data):
78
+ line += "\n"
79
+ path.write(line.encode(encoding))
80
+
81
+
82
+ @not_implemented_for("directed")
83
+ def generate_edgelist(G, delimiter=" ", data=True):
84
+ """Generate a single line of the bipartite graph G in edge list format.
85
+
86
+ Parameters
87
+ ----------
88
+ G : NetworkX graph
89
+ The graph is assumed to have node attribute `part` set to 0,1 representing
90
+ the two graph parts
91
+
92
+ delimiter : string, optional
93
+ Separator for node labels
94
+
95
+ data : bool or list of keys
96
+ If False generate no edge data. If True use a dictionary
97
+ representation of edge data. If a list of keys use a list of data
98
+ values corresponding to the keys.
99
+
100
+ Returns
101
+ -------
102
+ lines : string
103
+ Lines of data in adjlist format.
104
+
105
+ Examples
106
+ --------
107
+ >>> from networkx.algorithms import bipartite
108
+ >>> G = nx.path_graph(4)
109
+ >>> G.add_nodes_from([0, 2], bipartite=0)
110
+ >>> G.add_nodes_from([1, 3], bipartite=1)
111
+ >>> G[1][2]["weight"] = 3
112
+ >>> G[2][3]["capacity"] = 12
113
+ >>> for line in bipartite.generate_edgelist(G, data=False):
114
+ ... print(line)
115
+ 0 1
116
+ 2 1
117
+ 2 3
118
+
119
+ >>> for line in bipartite.generate_edgelist(G):
120
+ ... print(line)
121
+ 0 1 {}
122
+ 2 1 {'weight': 3}
123
+ 2 3 {'capacity': 12}
124
+
125
+ >>> for line in bipartite.generate_edgelist(G, data=["weight"]):
126
+ ... print(line)
127
+ 0 1
128
+ 2 1 3
129
+ 2 3
130
+ """
131
+ try:
132
+ part0 = [n for n, d in G.nodes.items() if d["bipartite"] == 0]
133
+ except BaseException as err:
134
+ raise AttributeError("Missing node attribute `bipartite`") from err
135
+ if data is True or data is False:
136
+ for n in part0:
137
+ for edge in G.edges(n, data=data):
138
+ yield delimiter.join(map(str, edge))
139
+ else:
140
+ for n in part0:
141
+ for u, v, d in G.edges(n, data=True):
142
+ edge = [u, v]
143
+ try:
144
+ edge.extend(d[k] for k in data)
145
+ except KeyError:
146
+ pass # missing data for this edge, should warn?
147
+ yield delimiter.join(map(str, edge))
148
+
149
+
150
+ @nx._dispatchable(name="bipartite_parse_edgelist", graphs=None, returns_graph=True)
151
+ def parse_edgelist(
152
+ lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True
153
+ ):
154
+ """Parse lines of an edge list representation of a bipartite graph.
155
+
156
+ Parameters
157
+ ----------
158
+ lines : list or iterator of strings
159
+ Input data in edgelist format
160
+ comments : string, optional
161
+ Marker for comment lines
162
+ delimiter : string, optional
163
+ Separator for node labels
164
+ create_using: NetworkX graph container, optional
165
+ Use given NetworkX graph for holding nodes or edges.
166
+ nodetype : Python type, optional
167
+ Convert nodes to this type.
168
+ data : bool or list of (label,type) tuples
169
+ If False generate no edge data or if True use a dictionary
170
+ representation of edge data or a list tuples specifying dictionary
171
+ key names and types for edge data.
172
+
173
+ Returns
174
+ -------
175
+ G: NetworkX Graph
176
+ The bipartite graph corresponding to lines
177
+
178
+ Examples
179
+ --------
180
+ Edgelist with no data:
181
+
182
+ >>> from networkx.algorithms import bipartite
183
+ >>> lines = ["1 2", "2 3", "3 4"]
184
+ >>> G = bipartite.parse_edgelist(lines, nodetype=int)
185
+ >>> sorted(G.nodes())
186
+ [1, 2, 3, 4]
187
+ >>> sorted(G.nodes(data=True))
188
+ [(1, {'bipartite': 0}), (2, {'bipartite': 0}), (3, {'bipartite': 0}), (4, {'bipartite': 1})]
189
+ >>> sorted(G.edges())
190
+ [(1, 2), (2, 3), (3, 4)]
191
+
192
+ Edgelist with data in Python dictionary representation:
193
+
194
+ >>> lines = ["1 2 {'weight':3}", "2 3 {'weight':27}", "3 4 {'weight':3.0}"]
195
+ >>> G = bipartite.parse_edgelist(lines, nodetype=int)
196
+ >>> sorted(G.nodes())
197
+ [1, 2, 3, 4]
198
+ >>> sorted(G.edges(data=True))
199
+ [(1, 2, {'weight': 3}), (2, 3, {'weight': 27}), (3, 4, {'weight': 3.0})]
200
+
201
+ Edgelist with data in a list:
202
+
203
+ >>> lines = ["1 2 3", "2 3 27", "3 4 3.0"]
204
+ >>> G = bipartite.parse_edgelist(lines, nodetype=int, data=(("weight", float),))
205
+ >>> sorted(G.nodes())
206
+ [1, 2, 3, 4]
207
+ >>> sorted(G.edges(data=True))
208
+ [(1, 2, {'weight': 3.0}), (2, 3, {'weight': 27.0}), (3, 4, {'weight': 3.0})]
209
+
210
+ See Also
211
+ --------
212
+ """
213
+ from ast import literal_eval
214
+
215
+ G = nx.empty_graph(0, create_using)
216
+ for line in lines:
217
+ p = line.find(comments)
218
+ if p >= 0:
219
+ line = line[:p]
220
+ if not len(line):
221
+ continue
222
+ # split line, should have 2 or more
223
+ s = line.rstrip("\n").split(delimiter)
224
+ if len(s) < 2:
225
+ continue
226
+ u = s.pop(0)
227
+ v = s.pop(0)
228
+ d = s
229
+ if nodetype is not None:
230
+ try:
231
+ u = nodetype(u)
232
+ v = nodetype(v)
233
+ except BaseException as err:
234
+ raise TypeError(
235
+ f"Failed to convert nodes {u},{v} to type {nodetype}."
236
+ ) from err
237
+
238
+ if len(d) == 0 or data is False:
239
+ # no data or data type specified
240
+ edgedata = {}
241
+ elif data is True:
242
+ # no edge types specified
243
+ try: # try to evaluate as dictionary
244
+ edgedata = dict(literal_eval(" ".join(d)))
245
+ except BaseException as err:
246
+ raise TypeError(
247
+ f"Failed to convert edge data ({d}) to dictionary."
248
+ ) from err
249
+ else:
250
+ # convert edge data to dictionary with specified keys and type
251
+ if len(d) != len(data):
252
+ raise IndexError(
253
+ f"Edge data {d} and data_keys {data} are not the same length"
254
+ )
255
+ edgedata = {}
256
+ for (edge_key, edge_type), edge_value in zip(data, d):
257
+ try:
258
+ edge_value = edge_type(edge_value)
259
+ except BaseException as err:
260
+ raise TypeError(
261
+ f"Failed to convert {edge_key} data "
262
+ f"{edge_value} to type {edge_type}."
263
+ ) from err
264
+ edgedata.update({edge_key: edge_value})
265
+ G.add_node(u, bipartite=0)
266
+ G.add_node(v, bipartite=1)
267
+ G.add_edge(u, v, **edgedata)
268
+ return G
269
+
270
+
271
+ @open_file(0, mode="rb")
272
+ @nx._dispatchable(name="bipartite_read_edgelist", graphs=None, returns_graph=True)
273
+ def read_edgelist(
274
+ path,
275
+ comments="#",
276
+ delimiter=None,
277
+ create_using=None,
278
+ nodetype=None,
279
+ data=True,
280
+ edgetype=None,
281
+ encoding="utf-8",
282
+ ):
283
+ """Read a bipartite graph from a list of edges.
284
+
285
+ Parameters
286
+ ----------
287
+ path : file or string
288
+ File or filename to read. If a file is provided, it must be
289
+ opened in 'rb' mode.
290
+ Filenames ending in .gz or .bz2 will be uncompressed.
291
+ comments : string, optional
292
+ The character used to indicate the start of a comment.
293
+ delimiter : string, optional
294
+ The string used to separate values. The default is whitespace.
295
+ create_using : Graph container, optional,
296
+ Use specified container to build graph. The default is networkx.Graph,
297
+ an undirected graph.
298
+ nodetype : int, float, str, Python type, optional
299
+ Convert node data from strings to specified type
300
+ data : bool or list of (label,type) tuples
301
+ Tuples specifying dictionary key names and types for edge data
302
+ edgetype : int, float, str, Python type, optional OBSOLETE
303
+ Convert edge data from strings to specified type and use as 'weight'
304
+ encoding: string, optional
305
+ Specify which encoding to use when reading file.
306
+
307
+ Returns
308
+ -------
309
+ G : graph
310
+ A networkx Graph or other type specified with create_using
311
+
312
+ Examples
313
+ --------
314
+ >>> from networkx.algorithms import bipartite
315
+ >>> G = nx.path_graph(4)
316
+ >>> G.add_nodes_from([0, 2], bipartite=0)
317
+ >>> G.add_nodes_from([1, 3], bipartite=1)
318
+ >>> bipartite.write_edgelist(G, "test.edgelist")
319
+ >>> G = bipartite.read_edgelist("test.edgelist")
320
+
321
+ >>> fh = open("test.edgelist", "rb")
322
+ >>> G = bipartite.read_edgelist(fh)
323
+ >>> fh.close()
324
+
325
+ >>> G = bipartite.read_edgelist("test.edgelist", nodetype=int)
326
+
327
+ Edgelist with data in a list:
328
+
329
+ >>> textline = "1 2 3"
330
+ >>> fh = open("test.edgelist", "w")
331
+ >>> d = fh.write(textline)
332
+ >>> fh.close()
333
+ >>> G = bipartite.read_edgelist(
334
+ ... "test.edgelist", nodetype=int, data=(("weight", float),)
335
+ ... )
336
+ >>> list(G)
337
+ [1, 2]
338
+ >>> list(G.edges(data=True))
339
+ [(1, 2, {'weight': 3.0})]
340
+
341
+ See parse_edgelist() for more examples of formatting.
342
+
343
+ See Also
344
+ --------
345
+ parse_edgelist
346
+
347
+ Notes
348
+ -----
349
+ Since nodes must be hashable, the function nodetype must return hashable
350
+ types (e.g. int, float, str, frozenset - or tuples of those, etc.)
351
+ """
352
+ lines = (line.decode(encoding) for line in path)
353
+ return parse_edgelist(
354
+ lines,
355
+ comments=comments,
356
+ delimiter=delimiter,
357
+ create_using=create_using,
358
+ nodetype=nodetype,
359
+ data=data,
360
+ )
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/extendability.py ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Provides a function for computing the extendability of a graph which is
2
+ undirected, simple, connected and bipartite and contains at least one perfect matching."""
3
+
4
+ import networkx as nx
5
+ from networkx.utils import not_implemented_for
6
+
7
+ __all__ = ["maximal_extendability"]
8
+
9
+
10
+ @not_implemented_for("directed")
11
+ @not_implemented_for("multigraph")
12
+ @nx._dispatchable
13
+ def maximal_extendability(G):
14
+ """Computes the extendability of a graph.
15
+
16
+ The extendability of a graph is defined as the maximum $k$ for which `G`
17
+ is $k$-extendable. Graph `G` is $k$-extendable if and only if `G` has a
18
+ perfect matching and every set of $k$ independent edges can be extended
19
+ to a perfect matching in `G`.
20
+
21
+ Parameters
22
+ ----------
23
+ G : NetworkX Graph
24
+ A fully-connected bipartite graph without self-loops
25
+
26
+ Returns
27
+ -------
28
+ extendability : int
29
+
30
+ Raises
31
+ ------
32
+ NetworkXError
33
+ If the graph `G` is disconnected.
34
+ If the graph `G` is not bipartite.
35
+ If the graph `G` does not contain a perfect matching.
36
+ If the residual graph of `G` is not strongly connected.
37
+
38
+ Notes
39
+ -----
40
+ Definition:
41
+ Let `G` be a simple, connected, undirected and bipartite graph with a perfect
42
+ matching M and bipartition (U,V). The residual graph of `G`, denoted by $G_M$,
43
+ is the graph obtained from G by directing the edges of M from V to U and the
44
+ edges that do not belong to M from U to V.
45
+
46
+ Lemma [1]_ :
47
+ Let M be a perfect matching of `G`. `G` is $k$-extendable if and only if its residual
48
+ graph $G_M$ is strongly connected and there are $k$ vertex-disjoint directed
49
+ paths between every vertex of U and every vertex of V.
50
+
51
+ Assuming that input graph `G` is undirected, simple, connected, bipartite and contains
52
+ a perfect matching M, this function constructs the residual graph $G_M$ of G and
53
+ returns the minimum value among the maximum vertex-disjoint directed paths between
54
+ every vertex of U and every vertex of V in $G_M$. By combining the definitions
55
+ and the lemma, this value represents the extendability of the graph `G`.
56
+
57
+ Time complexity O($n^3$ $m^2$)) where $n$ is the number of vertices
58
+ and $m$ is the number of edges.
59
+
60
+ References
61
+ ----------
62
+ .. [1] "A polynomial algorithm for the extendability problem in bipartite graphs",
63
+ J. Lakhal, L. Litzler, Information Processing Letters, 1998.
64
+ .. [2] "On n-extendible graphs", M. D. Plummer, Discrete Mathematics, 31:201–210, 1980
65
+ https://doi.org/10.1016/0012-365X(80)90037-0
66
+
67
+ """
68
+ if not nx.is_connected(G):
69
+ raise nx.NetworkXError("Graph G is not connected")
70
+
71
+ if not nx.bipartite.is_bipartite(G):
72
+ raise nx.NetworkXError("Graph G is not bipartite")
73
+
74
+ U, V = nx.bipartite.sets(G)
75
+
76
+ maximum_matching = nx.bipartite.hopcroft_karp_matching(G)
77
+
78
+ if not nx.is_perfect_matching(G, maximum_matching):
79
+ raise nx.NetworkXError("Graph G does not contain a perfect matching")
80
+
81
+ # list of edges in perfect matching, directed from V to U
82
+ pm = [(node, maximum_matching[node]) for node in V & maximum_matching.keys()]
83
+
84
+ # Direct all the edges of G, from V to U if in matching, else from U to V
85
+ directed_edges = [
86
+ (x, y) if (x in V and (x, y) in pm) or (x in U and (y, x) not in pm) else (y, x)
87
+ for x, y in G.edges
88
+ ]
89
+
90
+ # Construct the residual graph of G
91
+ residual_G = nx.DiGraph()
92
+ residual_G.add_nodes_from(G)
93
+ residual_G.add_edges_from(directed_edges)
94
+
95
+ if not nx.is_strongly_connected(residual_G):
96
+ raise nx.NetworkXError("The residual graph of G is not strongly connected")
97
+
98
+ # For node-pairs between V & U, keep min of max number of node-disjoint paths
99
+ # Variable $k$ stands for the extendability of graph G
100
+ k = float("inf")
101
+ for u in U:
102
+ for v in V:
103
+ num_paths = sum(1 for _ in nx.node_disjoint_paths(residual_G, u, v))
104
+ k = k if k < num_paths else num_paths
105
+ return k
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/generators.py ADDED
@@ -0,0 +1,604 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Generators and functions for bipartite graphs.
3
+ """
4
+
5
+ import math
6
+ import numbers
7
+ from functools import reduce
8
+
9
+ import networkx as nx
10
+ from networkx.utils import nodes_or_number, py_random_state
11
+
12
+ __all__ = [
13
+ "configuration_model",
14
+ "havel_hakimi_graph",
15
+ "reverse_havel_hakimi_graph",
16
+ "alternating_havel_hakimi_graph",
17
+ "preferential_attachment_graph",
18
+ "random_graph",
19
+ "gnmk_random_graph",
20
+ "complete_bipartite_graph",
21
+ ]
22
+
23
+
24
+ @nx._dispatchable(graphs=None, returns_graph=True)
25
+ @nodes_or_number([0, 1])
26
+ def complete_bipartite_graph(n1, n2, create_using=None):
27
+ """Returns the complete bipartite graph `K_{n_1,n_2}`.
28
+
29
+ The graph is composed of two partitions with nodes 0 to (n1 - 1)
30
+ in the first and nodes n1 to (n1 + n2 - 1) in the second.
31
+ Each node in the first is connected to each node in the second.
32
+
33
+ Parameters
34
+ ----------
35
+ n1, n2 : integer or iterable container of nodes
36
+ If integers, nodes are from `range(n1)` and `range(n1, n1 + n2)`.
37
+ If a container, the elements are the nodes.
38
+ create_using : NetworkX graph instance, (default: nx.Graph)
39
+ Return graph of this type.
40
+
41
+ Notes
42
+ -----
43
+ Nodes are the integers 0 to `n1 + n2 - 1` unless either n1 or n2 are
44
+ containers of nodes. If only one of n1 or n2 are integers, that
45
+ integer is replaced by `range` of that integer.
46
+
47
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
48
+ to indicate which bipartite set the node belongs to.
49
+
50
+ This function is not imported in the main namespace.
51
+ To use it use nx.bipartite.complete_bipartite_graph
52
+ """
53
+ G = nx.empty_graph(0, create_using)
54
+ if G.is_directed():
55
+ raise nx.NetworkXError("Directed Graph not supported")
56
+
57
+ n1, top = n1
58
+ n2, bottom = n2
59
+ if isinstance(n1, numbers.Integral) and isinstance(n2, numbers.Integral):
60
+ bottom = [n1 + i for i in bottom]
61
+ G.add_nodes_from(top, bipartite=0)
62
+ G.add_nodes_from(bottom, bipartite=1)
63
+ if len(G) != len(top) + len(bottom):
64
+ raise nx.NetworkXError("Inputs n1 and n2 must contain distinct nodes")
65
+ G.add_edges_from((u, v) for u in top for v in bottom)
66
+ G.graph["name"] = f"complete_bipartite_graph({len(top)}, {len(bottom)})"
67
+ return G
68
+
69
+
70
+ @py_random_state(3)
71
+ @nx._dispatchable(name="bipartite_configuration_model", graphs=None, returns_graph=True)
72
+ def configuration_model(aseq, bseq, create_using=None, seed=None):
73
+ """Returns a random bipartite graph from two given degree sequences.
74
+
75
+ Parameters
76
+ ----------
77
+ aseq : list
78
+ Degree sequence for node set A.
79
+ bseq : list
80
+ Degree sequence for node set B.
81
+ create_using : NetworkX graph instance, optional
82
+ Return graph of this type.
83
+ seed : integer, random_state, or None (default)
84
+ Indicator of random number generation state.
85
+ See :ref:`Randomness<randomness>`.
86
+
87
+ The graph is composed of two partitions. Set A has nodes 0 to
88
+ (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
89
+ Nodes from set A are connected to nodes in set B by choosing
90
+ randomly from the possible free stubs, one in A and one in B.
91
+
92
+ Notes
93
+ -----
94
+ The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
95
+ If no graph type is specified use MultiGraph with parallel edges.
96
+ If you want a graph with no parallel edges use create_using=Graph()
97
+ but then the resulting degree sequences might not be exact.
98
+
99
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
100
+ to indicate which bipartite set the node belongs to.
101
+
102
+ This function is not imported in the main namespace.
103
+ To use it use nx.bipartite.configuration_model
104
+ """
105
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
106
+ if G.is_directed():
107
+ raise nx.NetworkXError("Directed Graph not supported")
108
+
109
+ # length and sum of each sequence
110
+ lena = len(aseq)
111
+ lenb = len(bseq)
112
+ suma = sum(aseq)
113
+ sumb = sum(bseq)
114
+
115
+ if not suma == sumb:
116
+ raise nx.NetworkXError(
117
+ f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
118
+ )
119
+
120
+ G = _add_nodes_with_bipartite_label(G, lena, lenb)
121
+
122
+ if len(aseq) == 0 or max(aseq) == 0:
123
+ return G # done if no edges
124
+
125
+ # build lists of degree-repeated vertex numbers
126
+ stubs = [[v] * aseq[v] for v in range(lena)]
127
+ astubs = [x for subseq in stubs for x in subseq]
128
+
129
+ stubs = [[v] * bseq[v - lena] for v in range(lena, lena + lenb)]
130
+ bstubs = [x for subseq in stubs for x in subseq]
131
+
132
+ # shuffle lists
133
+ seed.shuffle(astubs)
134
+ seed.shuffle(bstubs)
135
+
136
+ G.add_edges_from([astubs[i], bstubs[i]] for i in range(suma))
137
+
138
+ G.name = "bipartite_configuration_model"
139
+ return G
140
+
141
+
142
+ @nx._dispatchable(name="bipartite_havel_hakimi_graph", graphs=None, returns_graph=True)
143
+ def havel_hakimi_graph(aseq, bseq, create_using=None):
144
+ """Returns a bipartite graph from two given degree sequences using a
145
+ Havel-Hakimi style construction.
146
+
147
+ The graph is composed of two partitions. Set A has nodes 0 to
148
+ (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
149
+ Nodes from the set A are connected to nodes in the set B by
150
+ connecting the highest degree nodes in set A to the highest degree
151
+ nodes in set B until all stubs are connected.
152
+
153
+ Parameters
154
+ ----------
155
+ aseq : list
156
+ Degree sequence for node set A.
157
+ bseq : list
158
+ Degree sequence for node set B.
159
+ create_using : NetworkX graph instance, optional
160
+ Return graph of this type.
161
+
162
+ Notes
163
+ -----
164
+ The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
165
+ If no graph type is specified use MultiGraph with parallel edges.
166
+ If you want a graph with no parallel edges use create_using=Graph()
167
+ but then the resulting degree sequences might not be exact.
168
+
169
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
170
+ to indicate which bipartite set the node belongs to.
171
+
172
+ This function is not imported in the main namespace.
173
+ To use it use nx.bipartite.havel_hakimi_graph
174
+ """
175
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
176
+ if G.is_directed():
177
+ raise nx.NetworkXError("Directed Graph not supported")
178
+
179
+ # length of the each sequence
180
+ naseq = len(aseq)
181
+ nbseq = len(bseq)
182
+
183
+ suma = sum(aseq)
184
+ sumb = sum(bseq)
185
+
186
+ if not suma == sumb:
187
+ raise nx.NetworkXError(
188
+ f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
189
+ )
190
+
191
+ G = _add_nodes_with_bipartite_label(G, naseq, nbseq)
192
+
193
+ if len(aseq) == 0 or max(aseq) == 0:
194
+ return G # done if no edges
195
+
196
+ # build list of degree-repeated vertex numbers
197
+ astubs = [[aseq[v], v] for v in range(naseq)]
198
+ bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)]
199
+ astubs.sort()
200
+ while astubs:
201
+ (degree, u) = astubs.pop() # take of largest degree node in the a set
202
+ if degree == 0:
203
+ break # done, all are zero
204
+ # connect the source to largest degree nodes in the b set
205
+ bstubs.sort()
206
+ for target in bstubs[-degree:]:
207
+ v = target[1]
208
+ G.add_edge(u, v)
209
+ target[0] -= 1 # note this updates bstubs too.
210
+ if target[0] == 0:
211
+ bstubs.remove(target)
212
+
213
+ G.name = "bipartite_havel_hakimi_graph"
214
+ return G
215
+
216
+
217
+ @nx._dispatchable(graphs=None, returns_graph=True)
218
+ def reverse_havel_hakimi_graph(aseq, bseq, create_using=None):
219
+ """Returns a bipartite graph from two given degree sequences using a
220
+ Havel-Hakimi style construction.
221
+
222
+ The graph is composed of two partitions. Set A has nodes 0 to
223
+ (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
224
+ Nodes from set A are connected to nodes in the set B by connecting
225
+ the highest degree nodes in set A to the lowest degree nodes in
226
+ set B until all stubs are connected.
227
+
228
+ Parameters
229
+ ----------
230
+ aseq : list
231
+ Degree sequence for node set A.
232
+ bseq : list
233
+ Degree sequence for node set B.
234
+ create_using : NetworkX graph instance, optional
235
+ Return graph of this type.
236
+
237
+ Notes
238
+ -----
239
+ The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
240
+ If no graph type is specified use MultiGraph with parallel edges.
241
+ If you want a graph with no parallel edges use create_using=Graph()
242
+ but then the resulting degree sequences might not be exact.
243
+
244
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
245
+ to indicate which bipartite set the node belongs to.
246
+
247
+ This function is not imported in the main namespace.
248
+ To use it use nx.bipartite.reverse_havel_hakimi_graph
249
+ """
250
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
251
+ if G.is_directed():
252
+ raise nx.NetworkXError("Directed Graph not supported")
253
+
254
+ # length of the each sequence
255
+ lena = len(aseq)
256
+ lenb = len(bseq)
257
+ suma = sum(aseq)
258
+ sumb = sum(bseq)
259
+
260
+ if not suma == sumb:
261
+ raise nx.NetworkXError(
262
+ f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
263
+ )
264
+
265
+ G = _add_nodes_with_bipartite_label(G, lena, lenb)
266
+
267
+ if len(aseq) == 0 or max(aseq) == 0:
268
+ return G # done if no edges
269
+
270
+ # build list of degree-repeated vertex numbers
271
+ astubs = [[aseq[v], v] for v in range(lena)]
272
+ bstubs = [[bseq[v - lena], v] for v in range(lena, lena + lenb)]
273
+ astubs.sort()
274
+ bstubs.sort()
275
+ while astubs:
276
+ (degree, u) = astubs.pop() # take of largest degree node in the a set
277
+ if degree == 0:
278
+ break # done, all are zero
279
+ # connect the source to the smallest degree nodes in the b set
280
+ for target in bstubs[0:degree]:
281
+ v = target[1]
282
+ G.add_edge(u, v)
283
+ target[0] -= 1 # note this updates bstubs too.
284
+ if target[0] == 0:
285
+ bstubs.remove(target)
286
+
287
+ G.name = "bipartite_reverse_havel_hakimi_graph"
288
+ return G
289
+
290
+
291
+ @nx._dispatchable(graphs=None, returns_graph=True)
292
+ def alternating_havel_hakimi_graph(aseq, bseq, create_using=None):
293
+ """Returns a bipartite graph from two given degree sequences using
294
+ an alternating Havel-Hakimi style construction.
295
+
296
+ The graph is composed of two partitions. Set A has nodes 0 to
297
+ (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
298
+ Nodes from the set A are connected to nodes in the set B by
299
+ connecting the highest degree nodes in set A to alternatively the
300
+ highest and the lowest degree nodes in set B until all stubs are
301
+ connected.
302
+
303
+ Parameters
304
+ ----------
305
+ aseq : list
306
+ Degree sequence for node set A.
307
+ bseq : list
308
+ Degree sequence for node set B.
309
+ create_using : NetworkX graph instance, optional
310
+ Return graph of this type.
311
+
312
+ Notes
313
+ -----
314
+ The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
315
+ If no graph type is specified use MultiGraph with parallel edges.
316
+ If you want a graph with no parallel edges use create_using=Graph()
317
+ but then the resulting degree sequences might not be exact.
318
+
319
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
320
+ to indicate which bipartite set the node belongs to.
321
+
322
+ This function is not imported in the main namespace.
323
+ To use it use nx.bipartite.alternating_havel_hakimi_graph
324
+ """
325
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
326
+ if G.is_directed():
327
+ raise nx.NetworkXError("Directed Graph not supported")
328
+
329
+ # length of the each sequence
330
+ naseq = len(aseq)
331
+ nbseq = len(bseq)
332
+ suma = sum(aseq)
333
+ sumb = sum(bseq)
334
+
335
+ if not suma == sumb:
336
+ raise nx.NetworkXError(
337
+ f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
338
+ )
339
+
340
+ G = _add_nodes_with_bipartite_label(G, naseq, nbseq)
341
+
342
+ if len(aseq) == 0 or max(aseq) == 0:
343
+ return G # done if no edges
344
+ # build list of degree-repeated vertex numbers
345
+ astubs = [[aseq[v], v] for v in range(naseq)]
346
+ bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)]
347
+ while astubs:
348
+ astubs.sort()
349
+ (degree, u) = astubs.pop() # take of largest degree node in the a set
350
+ if degree == 0:
351
+ break # done, all are zero
352
+ bstubs.sort()
353
+ small = bstubs[0 : degree // 2] # add these low degree targets
354
+ large = bstubs[(-degree + degree // 2) :] # now high degree targets
355
+ stubs = [x for z in zip(large, small) for x in z] # combine, sorry
356
+ if len(stubs) < len(small) + len(large): # check for zip truncation
357
+ stubs.append(large.pop())
358
+ for target in stubs:
359
+ v = target[1]
360
+ G.add_edge(u, v)
361
+ target[0] -= 1 # note this updates bstubs too.
362
+ if target[0] == 0:
363
+ bstubs.remove(target)
364
+
365
+ G.name = "bipartite_alternating_havel_hakimi_graph"
366
+ return G
367
+
368
+
369
+ @py_random_state(3)
370
+ @nx._dispatchable(graphs=None, returns_graph=True)
371
+ def preferential_attachment_graph(aseq, p, create_using=None, seed=None):
372
+ """Create a bipartite graph with a preferential attachment model from
373
+ a given single degree sequence.
374
+
375
+ The graph is composed of two partitions. Set A has nodes 0 to
376
+ (len(aseq) - 1) and set B has nodes starting with node len(aseq).
377
+ The number of nodes in set B is random.
378
+
379
+ Parameters
380
+ ----------
381
+ aseq : list
382
+ Degree sequence for node set A.
383
+ p : float
384
+ Probability that a new bottom node is added.
385
+ create_using : NetworkX graph instance, optional
386
+ Return graph of this type.
387
+ seed : integer, random_state, or None (default)
388
+ Indicator of random number generation state.
389
+ See :ref:`Randomness<randomness>`.
390
+
391
+ References
392
+ ----------
393
+ .. [1] Guillaume, J.L. and Latapy, M.,
394
+ Bipartite graphs as models of complex networks.
395
+ Physica A: Statistical Mechanics and its Applications,
396
+ 2006, 371(2), pp.795-813.
397
+ .. [2] Jean-Loup Guillaume and Matthieu Latapy,
398
+ Bipartite structure of all complex networks,
399
+ Inf. Process. Lett. 90, 2004, pg. 215-221
400
+ https://doi.org/10.1016/j.ipl.2004.03.007
401
+
402
+ Notes
403
+ -----
404
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
405
+ to indicate which bipartite set the node belongs to.
406
+
407
+ This function is not imported in the main namespace.
408
+ To use it use nx.bipartite.preferential_attachment_graph
409
+ """
410
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
411
+ if G.is_directed():
412
+ raise nx.NetworkXError("Directed Graph not supported")
413
+
414
+ if p > 1:
415
+ raise nx.NetworkXError(f"probability {p} > 1")
416
+
417
+ naseq = len(aseq)
418
+ G = _add_nodes_with_bipartite_label(G, naseq, 0)
419
+ vv = [[v] * aseq[v] for v in range(naseq)]
420
+ while vv:
421
+ while vv[0]:
422
+ source = vv[0][0]
423
+ vv[0].remove(source)
424
+ if seed.random() < p or len(G) == naseq:
425
+ target = len(G)
426
+ G.add_node(target, bipartite=1)
427
+ G.add_edge(source, target)
428
+ else:
429
+ bb = [[b] * G.degree(b) for b in range(naseq, len(G))]
430
+ # flatten the list of lists into a list.
431
+ bbstubs = reduce(lambda x, y: x + y, bb)
432
+ # choose preferentially a bottom node.
433
+ target = seed.choice(bbstubs)
434
+ G.add_node(target, bipartite=1)
435
+ G.add_edge(source, target)
436
+ vv.remove(vv[0])
437
+ G.name = "bipartite_preferential_attachment_model"
438
+ return G
439
+
440
+
441
+ @py_random_state(3)
442
+ @nx._dispatchable(graphs=None, returns_graph=True)
443
+ def random_graph(n, m, p, seed=None, directed=False):
444
+ """Returns a bipartite random graph.
445
+
446
+ This is a bipartite version of the binomial (Erdős-Rényi) graph.
447
+ The graph is composed of two partitions. Set A has nodes 0 to
448
+ (n - 1) and set B has nodes n to (n + m - 1).
449
+
450
+ Parameters
451
+ ----------
452
+ n : int
453
+ The number of nodes in the first bipartite set.
454
+ m : int
455
+ The number of nodes in the second bipartite set.
456
+ p : float
457
+ Probability for edge creation.
458
+ seed : integer, random_state, or None (default)
459
+ Indicator of random number generation state.
460
+ See :ref:`Randomness<randomness>`.
461
+ directed : bool, optional (default=False)
462
+ If True return a directed graph
463
+
464
+ Notes
465
+ -----
466
+ The bipartite random graph algorithm chooses each of the n*m (undirected)
467
+ or 2*nm (directed) possible edges with probability p.
468
+
469
+ This algorithm is $O(n+m)$ where $m$ is the expected number of edges.
470
+
471
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
472
+ to indicate which bipartite set the node belongs to.
473
+
474
+ This function is not imported in the main namespace.
475
+ To use it use nx.bipartite.random_graph
476
+
477
+ See Also
478
+ --------
479
+ gnp_random_graph, configuration_model
480
+
481
+ References
482
+ ----------
483
+ .. [1] Vladimir Batagelj and Ulrik Brandes,
484
+ "Efficient generation of large random networks",
485
+ Phys. Rev. E, 71, 036113, 2005.
486
+ """
487
+ G = nx.Graph()
488
+ G = _add_nodes_with_bipartite_label(G, n, m)
489
+ if directed:
490
+ G = nx.DiGraph(G)
491
+ G.name = f"fast_gnp_random_graph({n},{m},{p})"
492
+
493
+ if p <= 0:
494
+ return G
495
+ if p >= 1:
496
+ return nx.complete_bipartite_graph(n, m)
497
+
498
+ lp = math.log(1.0 - p)
499
+
500
+ v = 0
501
+ w = -1
502
+ while v < n:
503
+ lr = math.log(1.0 - seed.random())
504
+ w = w + 1 + int(lr / lp)
505
+ while w >= m and v < n:
506
+ w = w - m
507
+ v = v + 1
508
+ if v < n:
509
+ G.add_edge(v, n + w)
510
+
511
+ if directed:
512
+ # use the same algorithm to
513
+ # add edges from the "m" to "n" set
514
+ v = 0
515
+ w = -1
516
+ while v < n:
517
+ lr = math.log(1.0 - seed.random())
518
+ w = w + 1 + int(lr / lp)
519
+ while w >= m and v < n:
520
+ w = w - m
521
+ v = v + 1
522
+ if v < n:
523
+ G.add_edge(n + w, v)
524
+
525
+ return G
526
+
527
+
528
+ @py_random_state(3)
529
+ @nx._dispatchable(graphs=None, returns_graph=True)
530
+ def gnmk_random_graph(n, m, k, seed=None, directed=False):
531
+ """Returns a random bipartite graph G_{n,m,k}.
532
+
533
+ Produces a bipartite graph chosen randomly out of the set of all graphs
534
+ with n top nodes, m bottom nodes, and k edges.
535
+ The graph is composed of two sets of nodes.
536
+ Set A has nodes 0 to (n - 1) and set B has nodes n to (n + m - 1).
537
+
538
+ Parameters
539
+ ----------
540
+ n : int
541
+ The number of nodes in the first bipartite set.
542
+ m : int
543
+ The number of nodes in the second bipartite set.
544
+ k : int
545
+ The number of edges
546
+ seed : integer, random_state, or None (default)
547
+ Indicator of random number generation state.
548
+ See :ref:`Randomness<randomness>`.
549
+ directed : bool, optional (default=False)
550
+ If True return a directed graph
551
+
552
+ Examples
553
+ --------
554
+ from nx.algorithms import bipartite
555
+ G = bipartite.gnmk_random_graph(10,20,50)
556
+
557
+ See Also
558
+ --------
559
+ gnm_random_graph
560
+
561
+ Notes
562
+ -----
563
+ If k > m * n then a complete bipartite graph is returned.
564
+
565
+ This graph is a bipartite version of the `G_{nm}` random graph model.
566
+
567
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
568
+ to indicate which bipartite set the node belongs to.
569
+
570
+ This function is not imported in the main namespace.
571
+ To use it use nx.bipartite.gnmk_random_graph
572
+ """
573
+ G = nx.Graph()
574
+ G = _add_nodes_with_bipartite_label(G, n, m)
575
+ if directed:
576
+ G = nx.DiGraph(G)
577
+ G.name = f"bipartite_gnm_random_graph({n},{m},{k})"
578
+ if n == 1 or m == 1:
579
+ return G
580
+ max_edges = n * m # max_edges for bipartite networks
581
+ if k >= max_edges: # Maybe we should raise an exception here
582
+ return nx.complete_bipartite_graph(n, m, create_using=G)
583
+
584
+ top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
585
+ bottom = list(set(G) - set(top))
586
+ edge_count = 0
587
+ while edge_count < k:
588
+ # generate random edge,u,v
589
+ u = seed.choice(top)
590
+ v = seed.choice(bottom)
591
+ if v in G[u]:
592
+ continue
593
+ else:
594
+ G.add_edge(u, v)
595
+ edge_count += 1
596
+ return G
597
+
598
+
599
+ def _add_nodes_with_bipartite_label(G, lena, lenb):
600
+ G.add_nodes_from(range(lena + lenb))
601
+ b = dict(zip(range(lena), [0] * lena))
602
+ b.update(dict(zip(range(lena, lena + lenb), [1] * lenb)))
603
+ nx.set_node_attributes(G, b, "bipartite")
604
+ return G
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/matching.py ADDED
@@ -0,0 +1,590 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This module uses material from the Wikipedia article Hopcroft--Karp algorithm
2
+ # <https://en.wikipedia.org/wiki/Hopcroft%E2%80%93Karp_algorithm>, accessed on
3
+ # January 3, 2015, which is released under the Creative Commons
4
+ # Attribution-Share-Alike License 3.0
5
+ # <http://creativecommons.org/licenses/by-sa/3.0/>. That article includes
6
+ # pseudocode, which has been translated into the corresponding Python code.
7
+ #
8
+ # Portions of this module use code from David Eppstein's Python Algorithms and
9
+ # Data Structures (PADS) library, which is dedicated to the public domain (for
10
+ # proof, see <http://www.ics.uci.edu/~eppstein/PADS/ABOUT-PADS.txt>).
11
+ """Provides functions for computing maximum cardinality matchings and minimum
12
+ weight full matchings in a bipartite graph.
13
+
14
+ If you don't care about the particular implementation of the maximum matching
15
+ algorithm, simply use the :func:`maximum_matching`. If you do care, you can
16
+ import one of the named maximum matching algorithms directly.
17
+
18
+ For example, to find a maximum matching in the complete bipartite graph with
19
+ two vertices on the left and three vertices on the right:
20
+
21
+ >>> G = nx.complete_bipartite_graph(2, 3)
22
+ >>> left, right = nx.bipartite.sets(G)
23
+ >>> list(left)
24
+ [0, 1]
25
+ >>> list(right)
26
+ [2, 3, 4]
27
+ >>> nx.bipartite.maximum_matching(G)
28
+ {0: 2, 1: 3, 2: 0, 3: 1}
29
+
30
+ The dictionary returned by :func:`maximum_matching` includes a mapping for
31
+ vertices in both the left and right vertex sets.
32
+
33
+ Similarly, :func:`minimum_weight_full_matching` produces, for a complete
34
+ weighted bipartite graph, a matching whose cardinality is the cardinality of
35
+ the smaller of the two partitions, and for which the sum of the weights of the
36
+ edges included in the matching is minimal.
37
+
38
+ """
39
+
40
+ import collections
41
+ import itertools
42
+
43
+ import networkx as nx
44
+ from networkx.algorithms.bipartite import sets as bipartite_sets
45
+ from networkx.algorithms.bipartite.matrix import biadjacency_matrix
46
+
47
+ __all__ = [
48
+ "maximum_matching",
49
+ "hopcroft_karp_matching",
50
+ "eppstein_matching",
51
+ "to_vertex_cover",
52
+ "minimum_weight_full_matching",
53
+ ]
54
+
55
+ INFINITY = float("inf")
56
+
57
+
58
+ @nx._dispatchable
59
+ def hopcroft_karp_matching(G, top_nodes=None):
60
+ """Returns the maximum cardinality matching of the bipartite graph `G`.
61
+
62
+ A matching is a set of edges that do not share any nodes. A maximum
63
+ cardinality matching is a matching with the most edges possible. It
64
+ is not always unique. Finding a matching in a bipartite graph can be
65
+ treated as a networkx flow problem.
66
+
67
+ The functions ``hopcroft_karp_matching`` and ``maximum_matching``
68
+ are aliases of the same function.
69
+
70
+ Parameters
71
+ ----------
72
+ G : NetworkX graph
73
+
74
+ Undirected bipartite graph
75
+
76
+ top_nodes : container of nodes
77
+
78
+ Container with all nodes in one bipartite node set. If not supplied
79
+ it will be computed. But if more than one solution exists an exception
80
+ will be raised.
81
+
82
+ Returns
83
+ -------
84
+ matches : dictionary
85
+
86
+ The matching is returned as a dictionary, `matches`, such that
87
+ ``matches[v] == w`` if node `v` is matched to node `w`. Unmatched
88
+ nodes do not occur as a key in `matches`.
89
+
90
+ Raises
91
+ ------
92
+ AmbiguousSolution
93
+ Raised if the input bipartite graph is disconnected and no container
94
+ with all nodes in one bipartite set is provided. When determining
95
+ the nodes in each bipartite set more than one valid solution is
96
+ possible if the input graph is disconnected.
97
+
98
+ Notes
99
+ -----
100
+ This function is implemented with the `Hopcroft--Karp matching algorithm
101
+ <https://en.wikipedia.org/wiki/Hopcroft%E2%80%93Karp_algorithm>`_ for
102
+ bipartite graphs.
103
+
104
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
105
+ for further details on how bipartite graphs are handled in NetworkX.
106
+
107
+ See Also
108
+ --------
109
+ maximum_matching
110
+ hopcroft_karp_matching
111
+ eppstein_matching
112
+
113
+ References
114
+ ----------
115
+ .. [1] John E. Hopcroft and Richard M. Karp. "An n^{5 / 2} Algorithm for
116
+ Maximum Matchings in Bipartite Graphs" In: **SIAM Journal of Computing**
117
+ 2.4 (1973), pp. 225--231. <https://doi.org/10.1137/0202019>.
118
+
119
+ """
120
+
121
+ # First we define some auxiliary search functions.
122
+ #
123
+ # If you are a human reading these auxiliary search functions, the "global"
124
+ # variables `leftmatches`, `rightmatches`, `distances`, etc. are defined
125
+ # below the functions, so that they are initialized close to the initial
126
+ # invocation of the search functions.
127
+ def breadth_first_search():
128
+ for v in left:
129
+ if leftmatches[v] is None:
130
+ distances[v] = 0
131
+ queue.append(v)
132
+ else:
133
+ distances[v] = INFINITY
134
+ distances[None] = INFINITY
135
+ while queue:
136
+ v = queue.popleft()
137
+ if distances[v] < distances[None]:
138
+ for u in G[v]:
139
+ if distances[rightmatches[u]] is INFINITY:
140
+ distances[rightmatches[u]] = distances[v] + 1
141
+ queue.append(rightmatches[u])
142
+ return distances[None] is not INFINITY
143
+
144
+ def depth_first_search(v):
145
+ if v is not None:
146
+ for u in G[v]:
147
+ if distances[rightmatches[u]] == distances[v] + 1:
148
+ if depth_first_search(rightmatches[u]):
149
+ rightmatches[u] = v
150
+ leftmatches[v] = u
151
+ return True
152
+ distances[v] = INFINITY
153
+ return False
154
+ return True
155
+
156
+ # Initialize the "global" variables that maintain state during the search.
157
+ left, right = bipartite_sets(G, top_nodes)
158
+ leftmatches = {v: None for v in left}
159
+ rightmatches = {v: None for v in right}
160
+ distances = {}
161
+ queue = collections.deque()
162
+
163
+ # Implementation note: this counter is incremented as pairs are matched but
164
+ # it is currently not used elsewhere in the computation.
165
+ num_matched_pairs = 0
166
+ while breadth_first_search():
167
+ for v in left:
168
+ if leftmatches[v] is None:
169
+ if depth_first_search(v):
170
+ num_matched_pairs += 1
171
+
172
+ # Strip the entries matched to `None`.
173
+ leftmatches = {k: v for k, v in leftmatches.items() if v is not None}
174
+ rightmatches = {k: v for k, v in rightmatches.items() if v is not None}
175
+
176
+ # At this point, the left matches and the right matches are inverses of one
177
+ # another. In other words,
178
+ #
179
+ # leftmatches == {v, k for k, v in rightmatches.items()}
180
+ #
181
+ # Finally, we combine both the left matches and right matches.
182
+ return dict(itertools.chain(leftmatches.items(), rightmatches.items()))
183
+
184
+
185
+ @nx._dispatchable
186
+ def eppstein_matching(G, top_nodes=None):
187
+ """Returns the maximum cardinality matching of the bipartite graph `G`.
188
+
189
+ Parameters
190
+ ----------
191
+ G : NetworkX graph
192
+
193
+ Undirected bipartite graph
194
+
195
+ top_nodes : container
196
+
197
+ Container with all nodes in one bipartite node set. If not supplied
198
+ it will be computed. But if more than one solution exists an exception
199
+ will be raised.
200
+
201
+ Returns
202
+ -------
203
+ matches : dictionary
204
+
205
+ The matching is returned as a dictionary, `matching`, such that
206
+ ``matching[v] == w`` if node `v` is matched to node `w`. Unmatched
207
+ nodes do not occur as a key in `matching`.
208
+
209
+ Raises
210
+ ------
211
+ AmbiguousSolution
212
+ Raised if the input bipartite graph is disconnected and no container
213
+ with all nodes in one bipartite set is provided. When determining
214
+ the nodes in each bipartite set more than one valid solution is
215
+ possible if the input graph is disconnected.
216
+
217
+ Notes
218
+ -----
219
+ This function is implemented with David Eppstein's version of the algorithm
220
+ Hopcroft--Karp algorithm (see :func:`hopcroft_karp_matching`), which
221
+ originally appeared in the `Python Algorithms and Data Structures library
222
+ (PADS) <http://www.ics.uci.edu/~eppstein/PADS/ABOUT-PADS.txt>`_.
223
+
224
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
225
+ for further details on how bipartite graphs are handled in NetworkX.
226
+
227
+ See Also
228
+ --------
229
+
230
+ hopcroft_karp_matching
231
+
232
+ """
233
+ # Due to its original implementation, a directed graph is needed
234
+ # so that the two sets of bipartite nodes can be distinguished
235
+ left, right = bipartite_sets(G, top_nodes)
236
+ G = nx.DiGraph(G.edges(left))
237
+ # initialize greedy matching (redundant, but faster than full search)
238
+ matching = {}
239
+ for u in G:
240
+ for v in G[u]:
241
+ if v not in matching:
242
+ matching[v] = u
243
+ break
244
+ while True:
245
+ # structure residual graph into layers
246
+ # pred[u] gives the neighbor in the previous layer for u in U
247
+ # preds[v] gives a list of neighbors in the previous layer for v in V
248
+ # unmatched gives a list of unmatched vertices in final layer of V,
249
+ # and is also used as a flag value for pred[u] when u is in the first
250
+ # layer
251
+ preds = {}
252
+ unmatched = []
253
+ pred = {u: unmatched for u in G}
254
+ for v in matching:
255
+ del pred[matching[v]]
256
+ layer = list(pred)
257
+
258
+ # repeatedly extend layering structure by another pair of layers
259
+ while layer and not unmatched:
260
+ newLayer = {}
261
+ for u in layer:
262
+ for v in G[u]:
263
+ if v not in preds:
264
+ newLayer.setdefault(v, []).append(u)
265
+ layer = []
266
+ for v in newLayer:
267
+ preds[v] = newLayer[v]
268
+ if v in matching:
269
+ layer.append(matching[v])
270
+ pred[matching[v]] = v
271
+ else:
272
+ unmatched.append(v)
273
+
274
+ # did we finish layering without finding any alternating paths?
275
+ if not unmatched:
276
+ # TODO - The lines between --- were unused and were thus commented
277
+ # out. This whole commented chunk should be reviewed to determine
278
+ # whether it should be built upon or completely removed.
279
+ # ---
280
+ # unlayered = {}
281
+ # for u in G:
282
+ # # TODO Why is extra inner loop necessary?
283
+ # for v in G[u]:
284
+ # if v not in preds:
285
+ # unlayered[v] = None
286
+ # ---
287
+ # TODO Originally, this function returned a three-tuple:
288
+ #
289
+ # return (matching, list(pred), list(unlayered))
290
+ #
291
+ # For some reason, the documentation for this function
292
+ # indicated that the second and third elements of the returned
293
+ # three-tuple would be the vertices in the left and right vertex
294
+ # sets, respectively, that are also in the maximum independent set.
295
+ # However, what I think the author meant was that the second
296
+ # element is the list of vertices that were unmatched and the third
297
+ # element was the list of vertices that were matched. Since that
298
+ # seems to be the case, they don't really need to be returned,
299
+ # since that information can be inferred from the matching
300
+ # dictionary.
301
+
302
+ # All the matched nodes must be a key in the dictionary
303
+ for key in matching.copy():
304
+ matching[matching[key]] = key
305
+ return matching
306
+
307
+ # recursively search backward through layers to find alternating paths
308
+ # recursion returns true if found path, false otherwise
309
+ def recurse(v):
310
+ if v in preds:
311
+ L = preds.pop(v)
312
+ for u in L:
313
+ if u in pred:
314
+ pu = pred.pop(u)
315
+ if pu is unmatched or recurse(pu):
316
+ matching[v] = u
317
+ return True
318
+ return False
319
+
320
+ for v in unmatched:
321
+ recurse(v)
322
+
323
+
324
+ def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, targets):
325
+ """Returns True if and only if the vertex `v` is connected to one of
326
+ the target vertices by an alternating path in `G`.
327
+
328
+ An *alternating path* is a path in which every other edge is in the
329
+ specified maximum matching (and the remaining edges in the path are not in
330
+ the matching). An alternating path may have matched edges in the even
331
+ positions or in the odd positions, as long as the edges alternate between
332
+ 'matched' and 'unmatched'.
333
+
334
+ `G` is an undirected bipartite NetworkX graph.
335
+
336
+ `v` is a vertex in `G`.
337
+
338
+ `matched_edges` is a set of edges present in a maximum matching in `G`.
339
+
340
+ `unmatched_edges` is a set of edges not present in a maximum
341
+ matching in `G`.
342
+
343
+ `targets` is a set of vertices.
344
+
345
+ """
346
+
347
+ def _alternating_dfs(u, along_matched=True):
348
+ """Returns True if and only if `u` is connected to one of the
349
+ targets by an alternating path.
350
+
351
+ `u` is a vertex in the graph `G`.
352
+
353
+ If `along_matched` is True, this step of the depth-first search
354
+ will continue only through edges in the given matching. Otherwise, it
355
+ will continue only through edges *not* in the given matching.
356
+
357
+ """
358
+ visited = set()
359
+ # Follow matched edges when depth is even,
360
+ # and follow unmatched edges when depth is odd.
361
+ initial_depth = 0 if along_matched else 1
362
+ stack = [(u, iter(G[u]), initial_depth)]
363
+ while stack:
364
+ parent, children, depth = stack[-1]
365
+ valid_edges = matched_edges if depth % 2 else unmatched_edges
366
+ try:
367
+ child = next(children)
368
+ if child not in visited:
369
+ if (parent, child) in valid_edges or (child, parent) in valid_edges:
370
+ if child in targets:
371
+ return True
372
+ visited.add(child)
373
+ stack.append((child, iter(G[child]), depth + 1))
374
+ except StopIteration:
375
+ stack.pop()
376
+ return False
377
+
378
+ # Check for alternating paths starting with edges in the matching, then
379
+ # check for alternating paths starting with edges not in the
380
+ # matching.
381
+ return _alternating_dfs(v, along_matched=True) or _alternating_dfs(
382
+ v, along_matched=False
383
+ )
384
+
385
+
386
+ def _connected_by_alternating_paths(G, matching, targets):
387
+ """Returns the set of vertices that are connected to one of the target
388
+ vertices by an alternating path in `G` or are themselves a target.
389
+
390
+ An *alternating path* is a path in which every other edge is in the
391
+ specified maximum matching (and the remaining edges in the path are not in
392
+ the matching). An alternating path may have matched edges in the even
393
+ positions or in the odd positions, as long as the edges alternate between
394
+ 'matched' and 'unmatched'.
395
+
396
+ `G` is an undirected bipartite NetworkX graph.
397
+
398
+ `matching` is a dictionary representing a maximum matching in `G`, as
399
+ returned by, for example, :func:`maximum_matching`.
400
+
401
+ `targets` is a set of vertices.
402
+
403
+ """
404
+ # Get the set of matched edges and the set of unmatched edges. Only include
405
+ # one version of each undirected edge (for example, include edge (1, 2) but
406
+ # not edge (2, 1)). Using frozensets as an intermediary step we do not
407
+ # require nodes to be orderable.
408
+ edge_sets = {frozenset((u, v)) for u, v in matching.items()}
409
+ matched_edges = {tuple(edge) for edge in edge_sets}
410
+ unmatched_edges = {
411
+ (u, v) for (u, v) in G.edges() if frozenset((u, v)) not in edge_sets
412
+ }
413
+
414
+ return {
415
+ v
416
+ for v in G
417
+ if v in targets
418
+ or _is_connected_by_alternating_path(
419
+ G, v, matched_edges, unmatched_edges, targets
420
+ )
421
+ }
422
+
423
+
424
+ @nx._dispatchable
425
+ def to_vertex_cover(G, matching, top_nodes=None):
426
+ """Returns the minimum vertex cover corresponding to the given maximum
427
+ matching of the bipartite graph `G`.
428
+
429
+ Parameters
430
+ ----------
431
+ G : NetworkX graph
432
+
433
+ Undirected bipartite graph
434
+
435
+ matching : dictionary
436
+
437
+ A dictionary whose keys are vertices in `G` and whose values are the
438
+ distinct neighbors comprising the maximum matching for `G`, as returned
439
+ by, for example, :func:`maximum_matching`. The dictionary *must*
440
+ represent the maximum matching.
441
+
442
+ top_nodes : container
443
+
444
+ Container with all nodes in one bipartite node set. If not supplied
445
+ it will be computed. But if more than one solution exists an exception
446
+ will be raised.
447
+
448
+ Returns
449
+ -------
450
+ vertex_cover : :class:`set`
451
+
452
+ The minimum vertex cover in `G`.
453
+
454
+ Raises
455
+ ------
456
+ AmbiguousSolution
457
+ Raised if the input bipartite graph is disconnected and no container
458
+ with all nodes in one bipartite set is provided. When determining
459
+ the nodes in each bipartite set more than one valid solution is
460
+ possible if the input graph is disconnected.
461
+
462
+ Notes
463
+ -----
464
+ This function is implemented using the procedure guaranteed by `Konig's
465
+ theorem
466
+ <https://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29>`_,
467
+ which proves an equivalence between a maximum matching and a minimum vertex
468
+ cover in bipartite graphs.
469
+
470
+ Since a minimum vertex cover is the complement of a maximum independent set
471
+ for any graph, one can compute the maximum independent set of a bipartite
472
+ graph this way:
473
+
474
+ >>> G = nx.complete_bipartite_graph(2, 3)
475
+ >>> matching = nx.bipartite.maximum_matching(G)
476
+ >>> vertex_cover = nx.bipartite.to_vertex_cover(G, matching)
477
+ >>> independent_set = set(G) - vertex_cover
478
+ >>> print(list(independent_set))
479
+ [2, 3, 4]
480
+
481
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
482
+ for further details on how bipartite graphs are handled in NetworkX.
483
+
484
+ """
485
+ # This is a Python implementation of the algorithm described at
486
+ # <https://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29#Proof>.
487
+ L, R = bipartite_sets(G, top_nodes)
488
+ # Let U be the set of unmatched vertices in the left vertex set.
489
+ unmatched_vertices = set(G) - set(matching)
490
+ U = unmatched_vertices & L
491
+ # Let Z be the set of vertices that are either in U or are connected to U
492
+ # by alternating paths.
493
+ Z = _connected_by_alternating_paths(G, matching, U)
494
+ # At this point, every edge either has a right endpoint in Z or a left
495
+ # endpoint not in Z. This gives us the vertex cover.
496
+ return (L - Z) | (R & Z)
497
+
498
+
499
+ #: Returns the maximum cardinality matching in the given bipartite graph.
500
+ #:
501
+ #: This function is simply an alias for :func:`hopcroft_karp_matching`.
502
+ maximum_matching = hopcroft_karp_matching
503
+
504
+
505
+ @nx._dispatchable(edge_attrs="weight")
506
+ def minimum_weight_full_matching(G, top_nodes=None, weight="weight"):
507
+ r"""Returns a minimum weight full matching of the bipartite graph `G`.
508
+
509
+ Let :math:`G = ((U, V), E)` be a weighted bipartite graph with real weights
510
+ :math:`w : E \to \mathbb{R}`. This function then produces a matching
511
+ :math:`M \subseteq E` with cardinality
512
+
513
+ .. math::
514
+ \lvert M \rvert = \min(\lvert U \rvert, \lvert V \rvert),
515
+
516
+ which minimizes the sum of the weights of the edges included in the
517
+ matching, :math:`\sum_{e \in M} w(e)`, or raises an error if no such
518
+ matching exists.
519
+
520
+ When :math:`\lvert U \rvert = \lvert V \rvert`, this is commonly
521
+ referred to as a perfect matching; here, since we allow
522
+ :math:`\lvert U \rvert` and :math:`\lvert V \rvert` to differ, we
523
+ follow Karp [1]_ and refer to the matching as *full*.
524
+
525
+ Parameters
526
+ ----------
527
+ G : NetworkX graph
528
+
529
+ Undirected bipartite graph
530
+
531
+ top_nodes : container
532
+
533
+ Container with all nodes in one bipartite node set. If not supplied
534
+ it will be computed.
535
+
536
+ weight : string, optional (default='weight')
537
+
538
+ The edge data key used to provide each value in the matrix.
539
+ If None, then each edge has weight 1.
540
+
541
+ Returns
542
+ -------
543
+ matches : dictionary
544
+
545
+ The matching is returned as a dictionary, `matches`, such that
546
+ ``matches[v] == w`` if node `v` is matched to node `w`. Unmatched
547
+ nodes do not occur as a key in `matches`.
548
+
549
+ Raises
550
+ ------
551
+ ValueError
552
+ Raised if no full matching exists.
553
+
554
+ ImportError
555
+ Raised if SciPy is not available.
556
+
557
+ Notes
558
+ -----
559
+ The problem of determining a minimum weight full matching is also known as
560
+ the rectangular linear assignment problem. This implementation defers the
561
+ calculation of the assignment to SciPy.
562
+
563
+ References
564
+ ----------
565
+ .. [1] Richard Manning Karp:
566
+ An algorithm to Solve the m x n Assignment Problem in Expected Time
567
+ O(mn log n).
568
+ Networks, 10(2):143–152, 1980.
569
+
570
+ """
571
+ import numpy as np
572
+ import scipy as sp
573
+
574
+ left, right = nx.bipartite.sets(G, top_nodes)
575
+ U = list(left)
576
+ V = list(right)
577
+ # We explicitly create the biadjacency matrix having infinities
578
+ # where edges are missing (as opposed to zeros, which is what one would
579
+ # get by using toarray on the sparse matrix).
580
+ weights_sparse = biadjacency_matrix(
581
+ G, row_order=U, column_order=V, weight=weight, format="coo"
582
+ )
583
+ weights = np.full(weights_sparse.shape, np.inf)
584
+ weights[weights_sparse.row, weights_sparse.col] = weights_sparse.data
585
+ left_matches = sp.optimize.linear_sum_assignment(weights)
586
+ d = {U[u]: V[v] for u, v in zip(*left_matches)}
587
+ # d will contain the matching from edges in left to right; we need to
588
+ # add the ones from right to left as well.
589
+ d.update({v: u for u, v in d.items()})
590
+ return d
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/matrix.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ====================
3
+ Biadjacency matrices
4
+ ====================
5
+ """
6
+
7
+ import itertools
8
+
9
+ import networkx as nx
10
+ from networkx.convert_matrix import _generate_weighted_edges
11
+
12
+ __all__ = ["biadjacency_matrix", "from_biadjacency_matrix"]
13
+
14
+
15
+ @nx._dispatchable(edge_attrs="weight")
16
+ def biadjacency_matrix(
17
+ G, row_order, column_order=None, dtype=None, weight="weight", format="csr"
18
+ ):
19
+ r"""Returns the biadjacency matrix of the bipartite graph G.
20
+
21
+ Let `G = (U, V, E)` be a bipartite graph with node sets
22
+ `U = u_{1},...,u_{r}` and `V = v_{1},...,v_{s}`. The biadjacency
23
+ matrix [1]_ is the `r` x `s` matrix `B` in which `b_{i,j} = 1`
24
+ if, and only if, `(u_i, v_j) \in E`. If the parameter `weight` is
25
+ not `None` and matches the name of an edge attribute, its value is
26
+ used instead of 1.
27
+
28
+ Parameters
29
+ ----------
30
+ G : graph
31
+ A NetworkX graph
32
+
33
+ row_order : list of nodes
34
+ The rows of the matrix are ordered according to the list of nodes.
35
+
36
+ column_order : list, optional
37
+ The columns of the matrix are ordered according to the list of nodes.
38
+ If column_order is None, then the ordering of columns is arbitrary.
39
+
40
+ dtype : NumPy data-type, optional
41
+ A valid NumPy dtype used to initialize the array. If None, then the
42
+ NumPy default is used.
43
+
44
+ weight : string or None, optional (default='weight')
45
+ The edge data key used to provide each value in the matrix.
46
+ If None, then each edge has weight 1.
47
+
48
+ format : str in {'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'}
49
+ The type of the matrix to be returned (default 'csr'). For
50
+ some algorithms different implementations of sparse matrices
51
+ can perform better. See [2]_ for details.
52
+
53
+ Returns
54
+ -------
55
+ M : SciPy sparse array
56
+ Biadjacency matrix representation of the bipartite graph G.
57
+
58
+ Notes
59
+ -----
60
+ No attempt is made to check that the input graph is bipartite.
61
+
62
+ For directed bipartite graphs only successors are considered as neighbors.
63
+ To obtain an adjacency matrix with ones (or weight values) for both
64
+ predecessors and successors you have to generate two biadjacency matrices
65
+ where the rows of one of them are the columns of the other, and then add
66
+ one to the transpose of the other.
67
+
68
+ See Also
69
+ --------
70
+ adjacency_matrix
71
+ from_biadjacency_matrix
72
+
73
+ References
74
+ ----------
75
+ .. [1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
76
+ .. [2] Scipy Dev. References, "Sparse Matrices",
77
+ https://docs.scipy.org/doc/scipy/reference/sparse.html
78
+ """
79
+ import scipy as sp
80
+
81
+ nlen = len(row_order)
82
+ if nlen == 0:
83
+ raise nx.NetworkXError("row_order is empty list")
84
+ if len(row_order) != len(set(row_order)):
85
+ msg = "Ambiguous ordering: `row_order` contained duplicates."
86
+ raise nx.NetworkXError(msg)
87
+ if column_order is None:
88
+ column_order = list(set(G) - set(row_order))
89
+ mlen = len(column_order)
90
+ if len(column_order) != len(set(column_order)):
91
+ msg = "Ambiguous ordering: `column_order` contained duplicates."
92
+ raise nx.NetworkXError(msg)
93
+
94
+ row_index = dict(zip(row_order, itertools.count()))
95
+ col_index = dict(zip(column_order, itertools.count()))
96
+
97
+ if G.number_of_edges() == 0:
98
+ row, col, data = [], [], []
99
+ else:
100
+ row, col, data = zip(
101
+ *(
102
+ (row_index[u], col_index[v], d.get(weight, 1))
103
+ for u, v, d in G.edges(row_order, data=True)
104
+ if u in row_index and v in col_index
105
+ )
106
+ )
107
+ A = sp.sparse.coo_array((data, (row, col)), shape=(nlen, mlen), dtype=dtype)
108
+ try:
109
+ return A.asformat(format)
110
+ except ValueError as err:
111
+ raise nx.NetworkXError(f"Unknown sparse array format: {format}") from err
112
+
113
+
114
+ @nx._dispatchable(graphs=None, returns_graph=True)
115
+ def from_biadjacency_matrix(A, create_using=None, edge_attribute="weight"):
116
+ r"""Creates a new bipartite graph from a biadjacency matrix given as a
117
+ SciPy sparse array.
118
+
119
+ Parameters
120
+ ----------
121
+ A: scipy sparse array
122
+ A biadjacency matrix representation of a graph
123
+
124
+ create_using: NetworkX graph
125
+ Use specified graph for result. The default is Graph()
126
+
127
+ edge_attribute: string
128
+ Name of edge attribute to store matrix numeric value. The data will
129
+ have the same type as the matrix entry (int, float, (real,imag)).
130
+
131
+ Notes
132
+ -----
133
+ The nodes are labeled with the attribute `bipartite` set to an integer
134
+ 0 or 1 representing membership in part 0 or part 1 of the bipartite graph.
135
+
136
+ If `create_using` is an instance of :class:`networkx.MultiGraph` or
137
+ :class:`networkx.MultiDiGraph` and the entries of `A` are of
138
+ type :class:`int`, then this function returns a multigraph (of the same
139
+ type as `create_using`) with parallel edges. In this case, `edge_attribute`
140
+ will be ignored.
141
+
142
+ See Also
143
+ --------
144
+ biadjacency_matrix
145
+ from_numpy_array
146
+
147
+ References
148
+ ----------
149
+ [1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
150
+ """
151
+ G = nx.empty_graph(0, create_using)
152
+ n, m = A.shape
153
+ # Make sure we get even the isolated nodes of the graph.
154
+ G.add_nodes_from(range(n), bipartite=0)
155
+ G.add_nodes_from(range(n, n + m), bipartite=1)
156
+ # Create an iterable over (u, v, w) triples and for each triple, add an
157
+ # edge from u to v with weight w.
158
+ triples = ((u, n + v, d) for (u, v, d) in _generate_weighted_edges(A))
159
+ # If the entries in the adjacency matrix are integers and the graph is a
160
+ # multigraph, then create parallel edges, each with weight 1, for each
161
+ # entry in the adjacency matrix. Otherwise, create one edge for each
162
+ # positive entry in the adjacency matrix and set the weight of that edge to
163
+ # be the entry in the matrix.
164
+ if A.dtype.kind in ("i", "u") and G.is_multigraph():
165
+ chain = itertools.chain.from_iterable
166
+ triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples)
167
+ G.add_weighted_edges_from(triples, weight=edge_attribute)
168
+ return G
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/projection.py ADDED
@@ -0,0 +1,526 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """One-mode (unipartite) projections of bipartite graphs."""
2
+
3
+ import networkx as nx
4
+ from networkx.exception import NetworkXAlgorithmError
5
+ from networkx.utils import not_implemented_for
6
+
7
+ __all__ = [
8
+ "projected_graph",
9
+ "weighted_projected_graph",
10
+ "collaboration_weighted_projected_graph",
11
+ "overlap_weighted_projected_graph",
12
+ "generic_weighted_projected_graph",
13
+ ]
14
+
15
+
16
+ @nx._dispatchable(
17
+ graphs="B", preserve_node_attrs=True, preserve_graph_attrs=True, returns_graph=True
18
+ )
19
+ def projected_graph(B, nodes, multigraph=False):
20
+ r"""Returns the projection of B onto one of its node sets.
21
+
22
+ Returns the graph G that is the projection of the bipartite graph B
23
+ onto the specified nodes. They retain their attributes and are connected
24
+ in G if they have a common neighbor in B.
25
+
26
+ Parameters
27
+ ----------
28
+ B : NetworkX graph
29
+ The input graph should be bipartite.
30
+
31
+ nodes : list or iterable
32
+ Nodes to project onto (the "bottom" nodes).
33
+
34
+ multigraph: bool (default=False)
35
+ If True return a multigraph where the multiple edges represent multiple
36
+ shared neighbors. They edge key in the multigraph is assigned to the
37
+ label of the neighbor.
38
+
39
+ Returns
40
+ -------
41
+ Graph : NetworkX graph or multigraph
42
+ A graph that is the projection onto the given nodes.
43
+
44
+ Examples
45
+ --------
46
+ >>> from networkx.algorithms import bipartite
47
+ >>> B = nx.path_graph(4)
48
+ >>> G = bipartite.projected_graph(B, [1, 3])
49
+ >>> list(G)
50
+ [1, 3]
51
+ >>> list(G.edges())
52
+ [(1, 3)]
53
+
54
+ If nodes `a`, and `b` are connected through both nodes 1 and 2 then
55
+ building a multigraph results in two edges in the projection onto
56
+ [`a`, `b`]:
57
+
58
+ >>> B = nx.Graph()
59
+ >>> B.add_edges_from([("a", 1), ("b", 1), ("a", 2), ("b", 2)])
60
+ >>> G = bipartite.projected_graph(B, ["a", "b"], multigraph=True)
61
+ >>> print([sorted((u, v)) for u, v in G.edges()])
62
+ [['a', 'b'], ['a', 'b']]
63
+
64
+ Notes
65
+ -----
66
+ No attempt is made to verify that the input graph B is bipartite.
67
+ Returns a simple graph that is the projection of the bipartite graph B
68
+ onto the set of nodes given in list nodes. If multigraph=True then
69
+ a multigraph is returned with an edge for every shared neighbor.
70
+
71
+ Directed graphs are allowed as input. The output will also then
72
+ be a directed graph with edges if there is a directed path between
73
+ the nodes.
74
+
75
+ The graph and node properties are (shallow) copied to the projected graph.
76
+
77
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
78
+ for further details on how bipartite graphs are handled in NetworkX.
79
+
80
+ See Also
81
+ --------
82
+ is_bipartite,
83
+ is_bipartite_node_set,
84
+ sets,
85
+ weighted_projected_graph,
86
+ collaboration_weighted_projected_graph,
87
+ overlap_weighted_projected_graph,
88
+ generic_weighted_projected_graph
89
+ """
90
+ if B.is_multigraph():
91
+ raise nx.NetworkXError("not defined for multigraphs")
92
+ if B.is_directed():
93
+ directed = True
94
+ if multigraph:
95
+ G = nx.MultiDiGraph()
96
+ else:
97
+ G = nx.DiGraph()
98
+ else:
99
+ directed = False
100
+ if multigraph:
101
+ G = nx.MultiGraph()
102
+ else:
103
+ G = nx.Graph()
104
+ G.graph.update(B.graph)
105
+ G.add_nodes_from((n, B.nodes[n]) for n in nodes)
106
+ for u in nodes:
107
+ nbrs2 = {v for nbr in B[u] for v in B[nbr] if v != u}
108
+ if multigraph:
109
+ for n in nbrs2:
110
+ if directed:
111
+ links = set(B[u]) & set(B.pred[n])
112
+ else:
113
+ links = set(B[u]) & set(B[n])
114
+ for l in links:
115
+ if not G.has_edge(u, n, l):
116
+ G.add_edge(u, n, key=l)
117
+ else:
118
+ G.add_edges_from((u, n) for n in nbrs2)
119
+ return G
120
+
121
+
122
+ @not_implemented_for("multigraph")
123
+ @nx._dispatchable(graphs="B", returns_graph=True)
124
+ def weighted_projected_graph(B, nodes, ratio=False):
125
+ r"""Returns a weighted projection of B onto one of its node sets.
126
+
127
+ The weighted projected graph is the projection of the bipartite
128
+ network B onto the specified nodes with weights representing the
129
+ number of shared neighbors or the ratio between actual shared
130
+ neighbors and possible shared neighbors if ``ratio is True`` [1]_.
131
+ The nodes retain their attributes and are connected in the resulting
132
+ graph if they have an edge to a common node in the original graph.
133
+
134
+ Parameters
135
+ ----------
136
+ B : NetworkX graph
137
+ The input graph should be bipartite.
138
+
139
+ nodes : list or iterable
140
+ Distinct nodes to project onto (the "bottom" nodes).
141
+
142
+ ratio: Bool (default=False)
143
+ If True, edge weight is the ratio between actual shared neighbors
144
+ and maximum possible shared neighbors (i.e., the size of the other
145
+ node set). If False, edges weight is the number of shared neighbors.
146
+
147
+ Returns
148
+ -------
149
+ Graph : NetworkX graph
150
+ A graph that is the projection onto the given nodes.
151
+
152
+ Examples
153
+ --------
154
+ >>> from networkx.algorithms import bipartite
155
+ >>> B = nx.path_graph(4)
156
+ >>> G = bipartite.weighted_projected_graph(B, [1, 3])
157
+ >>> list(G)
158
+ [1, 3]
159
+ >>> list(G.edges(data=True))
160
+ [(1, 3, {'weight': 1})]
161
+ >>> G = bipartite.weighted_projected_graph(B, [1, 3], ratio=True)
162
+ >>> list(G.edges(data=True))
163
+ [(1, 3, {'weight': 0.5})]
164
+
165
+ Notes
166
+ -----
167
+ No attempt is made to verify that the input graph B is bipartite, or that
168
+ the input nodes are distinct. However, if the length of the input nodes is
169
+ greater than or equal to the nodes in the graph B, an exception is raised.
170
+ If the nodes are not distinct but don't raise this error, the output weights
171
+ will be incorrect.
172
+ The graph and node properties are (shallow) copied to the projected graph.
173
+
174
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
175
+ for further details on how bipartite graphs are handled in NetworkX.
176
+
177
+ See Also
178
+ --------
179
+ is_bipartite,
180
+ is_bipartite_node_set,
181
+ sets,
182
+ collaboration_weighted_projected_graph,
183
+ overlap_weighted_projected_graph,
184
+ generic_weighted_projected_graph
185
+ projected_graph
186
+
187
+ References
188
+ ----------
189
+ .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
190
+ Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
191
+ of Social Network Analysis. Sage Publications.
192
+ """
193
+ if B.is_directed():
194
+ pred = B.pred
195
+ G = nx.DiGraph()
196
+ else:
197
+ pred = B.adj
198
+ G = nx.Graph()
199
+ G.graph.update(B.graph)
200
+ G.add_nodes_from((n, B.nodes[n]) for n in nodes)
201
+ n_top = len(B) - len(nodes)
202
+
203
+ if n_top < 1:
204
+ raise NetworkXAlgorithmError(
205
+ f"the size of the nodes to project onto ({len(nodes)}) is >= the graph size ({len(B)}).\n"
206
+ "They are either not a valid bipartite partition or contain duplicates"
207
+ )
208
+
209
+ for u in nodes:
210
+ unbrs = set(B[u])
211
+ nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u}
212
+ for v in nbrs2:
213
+ vnbrs = set(pred[v])
214
+ common = unbrs & vnbrs
215
+ if not ratio:
216
+ weight = len(common)
217
+ else:
218
+ weight = len(common) / n_top
219
+ G.add_edge(u, v, weight=weight)
220
+ return G
221
+
222
+
223
+ @not_implemented_for("multigraph")
224
+ @nx._dispatchable(graphs="B", returns_graph=True)
225
+ def collaboration_weighted_projected_graph(B, nodes):
226
+ r"""Newman's weighted projection of B onto one of its node sets.
227
+
228
+ The collaboration weighted projection is the projection of the
229
+ bipartite network B onto the specified nodes with weights assigned
230
+ using Newman's collaboration model [1]_:
231
+
232
+ .. math::
233
+
234
+ w_{u, v} = \sum_k \frac{\delta_{u}^{k} \delta_{v}^{k}}{d_k - 1}
235
+
236
+ where `u` and `v` are nodes from the bottom bipartite node set,
237
+ and `k` is a node of the top node set.
238
+ The value `d_k` is the degree of node `k` in the bipartite
239
+ network and `\delta_{u}^{k}` is 1 if node `u` is
240
+ linked to node `k` in the original bipartite graph or 0 otherwise.
241
+
242
+ The nodes retain their attributes and are connected in the resulting
243
+ graph if have an edge to a common node in the original bipartite
244
+ graph.
245
+
246
+ Parameters
247
+ ----------
248
+ B : NetworkX graph
249
+ The input graph should be bipartite.
250
+
251
+ nodes : list or iterable
252
+ Nodes to project onto (the "bottom" nodes).
253
+
254
+ Returns
255
+ -------
256
+ Graph : NetworkX graph
257
+ A graph that is the projection onto the given nodes.
258
+
259
+ Examples
260
+ --------
261
+ >>> from networkx.algorithms import bipartite
262
+ >>> B = nx.path_graph(5)
263
+ >>> B.add_edge(1, 5)
264
+ >>> G = bipartite.collaboration_weighted_projected_graph(B, [0, 2, 4, 5])
265
+ >>> list(G)
266
+ [0, 2, 4, 5]
267
+ >>> for edge in sorted(G.edges(data=True)):
268
+ ... print(edge)
269
+ (0, 2, {'weight': 0.5})
270
+ (0, 5, {'weight': 0.5})
271
+ (2, 4, {'weight': 1.0})
272
+ (2, 5, {'weight': 0.5})
273
+
274
+ Notes
275
+ -----
276
+ No attempt is made to verify that the input graph B is bipartite.
277
+ The graph and node properties are (shallow) copied to the projected graph.
278
+
279
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
280
+ for further details on how bipartite graphs are handled in NetworkX.
281
+
282
+ See Also
283
+ --------
284
+ is_bipartite,
285
+ is_bipartite_node_set,
286
+ sets,
287
+ weighted_projected_graph,
288
+ overlap_weighted_projected_graph,
289
+ generic_weighted_projected_graph,
290
+ projected_graph
291
+
292
+ References
293
+ ----------
294
+ .. [1] Scientific collaboration networks: II.
295
+ Shortest paths, weighted networks, and centrality,
296
+ M. E. J. Newman, Phys. Rev. E 64, 016132 (2001).
297
+ """
298
+ if B.is_directed():
299
+ pred = B.pred
300
+ G = nx.DiGraph()
301
+ else:
302
+ pred = B.adj
303
+ G = nx.Graph()
304
+ G.graph.update(B.graph)
305
+ G.add_nodes_from((n, B.nodes[n]) for n in nodes)
306
+ for u in nodes:
307
+ unbrs = set(B[u])
308
+ nbrs2 = {n for nbr in unbrs for n in B[nbr] if n != u}
309
+ for v in nbrs2:
310
+ vnbrs = set(pred[v])
311
+ common_degree = (len(B[n]) for n in unbrs & vnbrs)
312
+ weight = sum(1.0 / (deg - 1) for deg in common_degree if deg > 1)
313
+ G.add_edge(u, v, weight=weight)
314
+ return G
315
+
316
+
317
+ @not_implemented_for("multigraph")
318
+ @nx._dispatchable(graphs="B", returns_graph=True)
319
+ def overlap_weighted_projected_graph(B, nodes, jaccard=True):
320
+ r"""Overlap weighted projection of B onto one of its node sets.
321
+
322
+ The overlap weighted projection is the projection of the bipartite
323
+ network B onto the specified nodes with weights representing
324
+ the Jaccard index between the neighborhoods of the two nodes in the
325
+ original bipartite network [1]_:
326
+
327
+ .. math::
328
+
329
+ w_{v, u} = \frac{|N(u) \cap N(v)|}{|N(u) \cup N(v)|}
330
+
331
+ or if the parameter 'jaccard' is False, the fraction of common
332
+ neighbors by minimum of both nodes degree in the original
333
+ bipartite graph [1]_:
334
+
335
+ .. math::
336
+
337
+ w_{v, u} = \frac{|N(u) \cap N(v)|}{min(|N(u)|, |N(v)|)}
338
+
339
+ The nodes retain their attributes and are connected in the resulting
340
+ graph if have an edge to a common node in the original bipartite graph.
341
+
342
+ Parameters
343
+ ----------
344
+ B : NetworkX graph
345
+ The input graph should be bipartite.
346
+
347
+ nodes : list or iterable
348
+ Nodes to project onto (the "bottom" nodes).
349
+
350
+ jaccard: Bool (default=True)
351
+
352
+ Returns
353
+ -------
354
+ Graph : NetworkX graph
355
+ A graph that is the projection onto the given nodes.
356
+
357
+ Examples
358
+ --------
359
+ >>> from networkx.algorithms import bipartite
360
+ >>> B = nx.path_graph(5)
361
+ >>> nodes = [0, 2, 4]
362
+ >>> G = bipartite.overlap_weighted_projected_graph(B, nodes)
363
+ >>> list(G)
364
+ [0, 2, 4]
365
+ >>> list(G.edges(data=True))
366
+ [(0, 2, {'weight': 0.5}), (2, 4, {'weight': 0.5})]
367
+ >>> G = bipartite.overlap_weighted_projected_graph(B, nodes, jaccard=False)
368
+ >>> list(G.edges(data=True))
369
+ [(0, 2, {'weight': 1.0}), (2, 4, {'weight': 1.0})]
370
+
371
+ Notes
372
+ -----
373
+ No attempt is made to verify that the input graph B is bipartite.
374
+ The graph and node properties are (shallow) copied to the projected graph.
375
+
376
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
377
+ for further details on how bipartite graphs are handled in NetworkX.
378
+
379
+ See Also
380
+ --------
381
+ is_bipartite,
382
+ is_bipartite_node_set,
383
+ sets,
384
+ weighted_projected_graph,
385
+ collaboration_weighted_projected_graph,
386
+ generic_weighted_projected_graph,
387
+ projected_graph
388
+
389
+ References
390
+ ----------
391
+ .. [1] Borgatti, S.P. and Halgin, D. In press. Analyzing Affiliation
392
+ Networks. In Carrington, P. and Scott, J. (eds) The Sage Handbook
393
+ of Social Network Analysis. Sage Publications.
394
+
395
+ """
396
+ if B.is_directed():
397
+ pred = B.pred
398
+ G = nx.DiGraph()
399
+ else:
400
+ pred = B.adj
401
+ G = nx.Graph()
402
+ G.graph.update(B.graph)
403
+ G.add_nodes_from((n, B.nodes[n]) for n in nodes)
404
+ for u in nodes:
405
+ unbrs = set(B[u])
406
+ nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u}
407
+ for v in nbrs2:
408
+ vnbrs = set(pred[v])
409
+ if jaccard:
410
+ wt = len(unbrs & vnbrs) / len(unbrs | vnbrs)
411
+ else:
412
+ wt = len(unbrs & vnbrs) / min(len(unbrs), len(vnbrs))
413
+ G.add_edge(u, v, weight=wt)
414
+ return G
415
+
416
+
417
+ @not_implemented_for("multigraph")
418
+ @nx._dispatchable(graphs="B", preserve_all_attrs=True, returns_graph=True)
419
+ def generic_weighted_projected_graph(B, nodes, weight_function=None):
420
+ r"""Weighted projection of B with a user-specified weight function.
421
+
422
+ The bipartite network B is projected on to the specified nodes
423
+ with weights computed by a user-specified function. This function
424
+ must accept as a parameter the neighborhood sets of two nodes and
425
+ return an integer or a float.
426
+
427
+ The nodes retain their attributes and are connected in the resulting graph
428
+ if they have an edge to a common node in the original graph.
429
+
430
+ Parameters
431
+ ----------
432
+ B : NetworkX graph
433
+ The input graph should be bipartite.
434
+
435
+ nodes : list or iterable
436
+ Nodes to project onto (the "bottom" nodes).
437
+
438
+ weight_function : function
439
+ This function must accept as parameters the same input graph
440
+ that this function, and two nodes; and return an integer or a float.
441
+ The default function computes the number of shared neighbors.
442
+
443
+ Returns
444
+ -------
445
+ Graph : NetworkX graph
446
+ A graph that is the projection onto the given nodes.
447
+
448
+ Examples
449
+ --------
450
+ >>> from networkx.algorithms import bipartite
451
+ >>> # Define some custom weight functions
452
+ >>> def jaccard(G, u, v):
453
+ ... unbrs = set(G[u])
454
+ ... vnbrs = set(G[v])
455
+ ... return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs)
456
+ >>> def my_weight(G, u, v, weight="weight"):
457
+ ... w = 0
458
+ ... for nbr in set(G[u]) & set(G[v]):
459
+ ... w += G[u][nbr].get(weight, 1) + G[v][nbr].get(weight, 1)
460
+ ... return w
461
+ >>> # A complete bipartite graph with 4 nodes and 4 edges
462
+ >>> B = nx.complete_bipartite_graph(2, 2)
463
+ >>> # Add some arbitrary weight to the edges
464
+ >>> for i, (u, v) in enumerate(B.edges()):
465
+ ... B.edges[u, v]["weight"] = i + 1
466
+ >>> for edge in B.edges(data=True):
467
+ ... print(edge)
468
+ (0, 2, {'weight': 1})
469
+ (0, 3, {'weight': 2})
470
+ (1, 2, {'weight': 3})
471
+ (1, 3, {'weight': 4})
472
+ >>> # By default, the weight is the number of shared neighbors
473
+ >>> G = bipartite.generic_weighted_projected_graph(B, [0, 1])
474
+ >>> print(list(G.edges(data=True)))
475
+ [(0, 1, {'weight': 2})]
476
+ >>> # To specify a custom weight function use the weight_function parameter
477
+ >>> G = bipartite.generic_weighted_projected_graph(
478
+ ... B, [0, 1], weight_function=jaccard
479
+ ... )
480
+ >>> print(list(G.edges(data=True)))
481
+ [(0, 1, {'weight': 1.0})]
482
+ >>> G = bipartite.generic_weighted_projected_graph(
483
+ ... B, [0, 1], weight_function=my_weight
484
+ ... )
485
+ >>> print(list(G.edges(data=True)))
486
+ [(0, 1, {'weight': 10})]
487
+
488
+ Notes
489
+ -----
490
+ No attempt is made to verify that the input graph B is bipartite.
491
+ The graph and node properties are (shallow) copied to the projected graph.
492
+
493
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
494
+ for further details on how bipartite graphs are handled in NetworkX.
495
+
496
+ See Also
497
+ --------
498
+ is_bipartite,
499
+ is_bipartite_node_set,
500
+ sets,
501
+ weighted_projected_graph,
502
+ collaboration_weighted_projected_graph,
503
+ overlap_weighted_projected_graph,
504
+ projected_graph
505
+
506
+ """
507
+ if B.is_directed():
508
+ pred = B.pred
509
+ G = nx.DiGraph()
510
+ else:
511
+ pred = B.adj
512
+ G = nx.Graph()
513
+ if weight_function is None:
514
+
515
+ def weight_function(G, u, v):
516
+ # Notice that we use set(pred[v]) for handling the directed case.
517
+ return len(set(G[u]) & set(pred[v]))
518
+
519
+ G.graph.update(B.graph)
520
+ G.add_nodes_from((n, B.nodes[n]) for n in nodes)
521
+ for u in nodes:
522
+ nbrs2 = {n for nbr in set(B[u]) for n in B[nbr]} - {u}
523
+ for v in nbrs2:
524
+ weight = weight_function(B, u, v)
525
+ G.add_edge(u, v, weight=weight)
526
+ return G
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/redundancy.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Node redundancy for bipartite graphs."""
2
+
3
+ from itertools import combinations
4
+
5
+ import networkx as nx
6
+ from networkx import NetworkXError
7
+
8
+ __all__ = ["node_redundancy"]
9
+
10
+
11
+ @nx._dispatchable
12
+ def node_redundancy(G, nodes=None):
13
+ r"""Computes the node redundancy coefficients for the nodes in the bipartite
14
+ graph `G`.
15
+
16
+ The redundancy coefficient of a node `v` is the fraction of pairs of
17
+ neighbors of `v` that are both linked to other nodes. In a one-mode
18
+ projection these nodes would be linked together even if `v` were
19
+ not there.
20
+
21
+ More formally, for any vertex `v`, the *redundancy coefficient of `v`* is
22
+ defined by
23
+
24
+ .. math::
25
+
26
+ rc(v) = \frac{|\{\{u, w\} \subseteq N(v),
27
+ \: \exists v' \neq v,\: (v',u) \in E\:
28
+ \mathrm{and}\: (v',w) \in E\}|}{ \frac{|N(v)|(|N(v)|-1)}{2}},
29
+
30
+ where `N(v)` is the set of neighbors of `v` in `G`.
31
+
32
+ Parameters
33
+ ----------
34
+ G : graph
35
+ A bipartite graph
36
+
37
+ nodes : list or iterable (optional)
38
+ Compute redundancy for these nodes. The default is all nodes in G.
39
+
40
+ Returns
41
+ -------
42
+ redundancy : dictionary
43
+ A dictionary keyed by node with the node redundancy value.
44
+
45
+ Examples
46
+ --------
47
+ Compute the redundancy coefficient of each node in a graph::
48
+
49
+ >>> from networkx.algorithms import bipartite
50
+ >>> G = nx.cycle_graph(4)
51
+ >>> rc = bipartite.node_redundancy(G)
52
+ >>> rc[0]
53
+ 1.0
54
+
55
+ Compute the average redundancy for the graph::
56
+
57
+ >>> from networkx.algorithms import bipartite
58
+ >>> G = nx.cycle_graph(4)
59
+ >>> rc = bipartite.node_redundancy(G)
60
+ >>> sum(rc.values()) / len(G)
61
+ 1.0
62
+
63
+ Compute the average redundancy for a set of nodes::
64
+
65
+ >>> from networkx.algorithms import bipartite
66
+ >>> G = nx.cycle_graph(4)
67
+ >>> rc = bipartite.node_redundancy(G)
68
+ >>> nodes = [0, 2]
69
+ >>> sum(rc[n] for n in nodes) / len(nodes)
70
+ 1.0
71
+
72
+ Raises
73
+ ------
74
+ NetworkXError
75
+ If any of the nodes in the graph (or in `nodes`, if specified) has
76
+ (out-)degree less than two (which would result in division by zero,
77
+ according to the definition of the redundancy coefficient).
78
+
79
+ References
80
+ ----------
81
+ .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
82
+ Basic notions for the analysis of large two-mode networks.
83
+ Social Networks 30(1), 31--48.
84
+
85
+ """
86
+ if nodes is None:
87
+ nodes = G
88
+ if any(len(G[v]) < 2 for v in nodes):
89
+ raise NetworkXError(
90
+ "Cannot compute redundancy coefficient for a node"
91
+ " that has fewer than two neighbors."
92
+ )
93
+ # TODO This can be trivially parallelized.
94
+ return {v: _node_redundancy(G, v) for v in nodes}
95
+
96
+
97
+ def _node_redundancy(G, v):
98
+ """Returns the redundancy of the node `v` in the bipartite graph `G`.
99
+
100
+ If `G` is a graph with `n` nodes, the redundancy of a node is the ratio
101
+ of the "overlap" of `v` to the maximum possible overlap of `v`
102
+ according to its degree. The overlap of `v` is the number of pairs of
103
+ neighbors that have mutual neighbors themselves, other than `v`.
104
+
105
+ `v` must have at least two neighbors in `G`.
106
+
107
+ """
108
+ n = len(G[v])
109
+ overlap = sum(
110
+ 1 for (u, w) in combinations(G[v], 2) if (set(G[u]) & set(G[w])) - {v}
111
+ )
112
+ return (2 * overlap) / (n * (n - 1))
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/spectral.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Spectral bipartivity measure.
3
+ """
4
+
5
+ import networkx as nx
6
+
7
+ __all__ = ["spectral_bipartivity"]
8
+
9
+
10
+ @nx._dispatchable(edge_attrs="weight")
11
+ def spectral_bipartivity(G, nodes=None, weight="weight"):
12
+ """Returns the spectral bipartivity.
13
+
14
+ Parameters
15
+ ----------
16
+ G : NetworkX graph
17
+
18
+ nodes : list or container optional(default is all nodes)
19
+ Nodes to return value of spectral bipartivity contribution.
20
+
21
+ weight : string or None optional (default = 'weight')
22
+ Edge data key to use for edge weights. If None, weights set to 1.
23
+
24
+ Returns
25
+ -------
26
+ sb : float or dict
27
+ A single number if the keyword nodes is not specified, or
28
+ a dictionary keyed by node with the spectral bipartivity contribution
29
+ of that node as the value.
30
+
31
+ Examples
32
+ --------
33
+ >>> from networkx.algorithms import bipartite
34
+ >>> G = nx.path_graph(4)
35
+ >>> bipartite.spectral_bipartivity(G)
36
+ 1.0
37
+
38
+ Notes
39
+ -----
40
+ This implementation uses Numpy (dense) matrices which are not efficient
41
+ for storing large sparse graphs.
42
+
43
+ See Also
44
+ --------
45
+ color
46
+
47
+ References
48
+ ----------
49
+ .. [1] E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of
50
+ bipartivity in complex networks", PhysRev E 72, 046105 (2005)
51
+ """
52
+ import scipy as sp
53
+
54
+ nodelist = list(G) # ordering of nodes in matrix
55
+ A = nx.to_numpy_array(G, nodelist, weight=weight)
56
+ expA = sp.linalg.expm(A)
57
+ expmA = sp.linalg.expm(-A)
58
+ coshA = 0.5 * (expA + expmA)
59
+ if nodes is None:
60
+ # return single number for entire graph
61
+ return float(coshA.diagonal().sum() / expA.diagonal().sum())
62
+ else:
63
+ # contribution for individual nodes
64
+ index = dict(zip(nodelist, range(len(nodelist))))
65
+ sb = {}
66
+ for n in nodes:
67
+ i = index[n]
68
+ sb[n] = coshA.item(i, i) / expA.item(i, i)
69
+ return sb
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__init__.py ADDED
File without changes
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (192 Bytes). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_basic.cpython-310.pyc ADDED
Binary file (6.06 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_centrality.cpython-310.pyc ADDED
Binary file (5.33 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_cluster.cpython-310.pyc ADDED
Binary file (3.35 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_covering.cpython-310.pyc ADDED
Binary file (1.8 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_edgelist.cpython-310.pyc ADDED
Binary file (8.06 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_extendability.cpython-310.pyc ADDED
Binary file (5.23 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_generators.cpython-310.pyc ADDED
Binary file (9.96 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matching.cpython-310.pyc ADDED
Binary file (12.4 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matrix.cpython-310.pyc ADDED
Binary file (4.7 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_project.cpython-310.pyc ADDED
Binary file (11.7 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_redundancy.cpython-310.pyc ADDED
Binary file (1.41 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_spectral_bipartivity.cpython-310.pyc ADDED
Binary file (2.19 kB). View file
 
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_basic.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms import bipartite
5
+
6
+
7
+ class TestBipartiteBasic:
8
+ def test_is_bipartite(self):
9
+ assert bipartite.is_bipartite(nx.path_graph(4))
10
+ assert bipartite.is_bipartite(nx.DiGraph([(1, 0)]))
11
+ assert not bipartite.is_bipartite(nx.complete_graph(3))
12
+
13
+ def test_bipartite_color(self):
14
+ G = nx.path_graph(4)
15
+ c = bipartite.color(G)
16
+ assert c == {0: 1, 1: 0, 2: 1, 3: 0}
17
+
18
+ def test_not_bipartite_color(self):
19
+ with pytest.raises(nx.NetworkXError):
20
+ c = bipartite.color(nx.complete_graph(4))
21
+
22
+ def test_bipartite_directed(self):
23
+ G = bipartite.random_graph(10, 10, 0.1, directed=True)
24
+ assert bipartite.is_bipartite(G)
25
+
26
+ def test_bipartite_sets(self):
27
+ G = nx.path_graph(4)
28
+ X, Y = bipartite.sets(G)
29
+ assert X == {0, 2}
30
+ assert Y == {1, 3}
31
+
32
+ def test_bipartite_sets_directed(self):
33
+ G = nx.path_graph(4)
34
+ D = G.to_directed()
35
+ X, Y = bipartite.sets(D)
36
+ assert X == {0, 2}
37
+ assert Y == {1, 3}
38
+
39
+ def test_bipartite_sets_given_top_nodes(self):
40
+ G = nx.path_graph(4)
41
+ top_nodes = [0, 2]
42
+ X, Y = bipartite.sets(G, top_nodes)
43
+ assert X == {0, 2}
44
+ assert Y == {1, 3}
45
+
46
+ def test_bipartite_sets_disconnected(self):
47
+ with pytest.raises(nx.AmbiguousSolution):
48
+ G = nx.path_graph(4)
49
+ G.add_edges_from([(5, 6), (6, 7)])
50
+ X, Y = bipartite.sets(G)
51
+
52
+ def test_is_bipartite_node_set(self):
53
+ G = nx.path_graph(4)
54
+
55
+ with pytest.raises(nx.AmbiguousSolution):
56
+ bipartite.is_bipartite_node_set(G, [1, 1, 2, 3])
57
+
58
+ assert bipartite.is_bipartite_node_set(G, [0, 2])
59
+ assert bipartite.is_bipartite_node_set(G, [1, 3])
60
+ assert not bipartite.is_bipartite_node_set(G, [1, 2])
61
+ G.add_edge(10, 20)
62
+ assert bipartite.is_bipartite_node_set(G, [0, 2, 10])
63
+ assert bipartite.is_bipartite_node_set(G, [0, 2, 20])
64
+ assert bipartite.is_bipartite_node_set(G, [1, 3, 10])
65
+ assert bipartite.is_bipartite_node_set(G, [1, 3, 20])
66
+
67
+ def test_bipartite_density(self):
68
+ G = nx.path_graph(5)
69
+ X, Y = bipartite.sets(G)
70
+ density = len(list(G.edges())) / (len(X) * len(Y))
71
+ assert bipartite.density(G, X) == density
72
+ D = nx.DiGraph(G.edges())
73
+ assert bipartite.density(D, X) == density / 2.0
74
+ assert bipartite.density(nx.Graph(), {}) == 0.0
75
+
76
+ def test_bipartite_degrees(self):
77
+ G = nx.path_graph(5)
78
+ X = {1, 3}
79
+ Y = {0, 2, 4}
80
+ u, d = bipartite.degrees(G, Y)
81
+ assert dict(u) == {1: 2, 3: 2}
82
+ assert dict(d) == {0: 1, 2: 2, 4: 1}
83
+
84
+ def test_bipartite_weighted_degrees(self):
85
+ G = nx.path_graph(5)
86
+ G.add_edge(0, 1, weight=0.1, other=0.2)
87
+ X = {1, 3}
88
+ Y = {0, 2, 4}
89
+ u, d = bipartite.degrees(G, Y, weight="weight")
90
+ assert dict(u) == {1: 1.1, 3: 2}
91
+ assert dict(d) == {0: 0.1, 2: 2, 4: 1}
92
+ u, d = bipartite.degrees(G, Y, weight="other")
93
+ assert dict(u) == {1: 1.2, 3: 2}
94
+ assert dict(d) == {0: 0.2, 2: 2, 4: 1}
95
+
96
+ def test_biadjacency_matrix_weight(self):
97
+ pytest.importorskip("scipy")
98
+ G = nx.path_graph(5)
99
+ G.add_edge(0, 1, weight=2, other=4)
100
+ X = [1, 3]
101
+ Y = [0, 2, 4]
102
+ M = bipartite.biadjacency_matrix(G, X, weight="weight")
103
+ assert M[0, 0] == 2
104
+ M = bipartite.biadjacency_matrix(G, X, weight="other")
105
+ assert M[0, 0] == 4
106
+
107
+ def test_biadjacency_matrix(self):
108
+ pytest.importorskip("scipy")
109
+ tops = [2, 5, 10]
110
+ bots = [5, 10, 15]
111
+ for i in range(len(tops)):
112
+ G = bipartite.random_graph(tops[i], bots[i], 0.2)
113
+ top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
114
+ M = bipartite.biadjacency_matrix(G, top)
115
+ assert M.shape[0] == tops[i]
116
+ assert M.shape[1] == bots[i]
117
+
118
+ def test_biadjacency_matrix_order(self):
119
+ pytest.importorskip("scipy")
120
+ G = nx.path_graph(5)
121
+ G.add_edge(0, 1, weight=2)
122
+ X = [3, 1]
123
+ Y = [4, 2, 0]
124
+ M = bipartite.biadjacency_matrix(G, X, Y, weight="weight")
125
+ assert M[1, 2] == 2
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_centrality.py ADDED
@@ -0,0 +1,192 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms import bipartite
5
+
6
+
7
+ class TestBipartiteCentrality:
8
+ @classmethod
9
+ def setup_class(cls):
10
+ cls.P4 = nx.path_graph(4)
11
+ cls.K3 = nx.complete_bipartite_graph(3, 3)
12
+ cls.C4 = nx.cycle_graph(4)
13
+ cls.davis = nx.davis_southern_women_graph()
14
+ cls.top_nodes = [
15
+ n for n, d in cls.davis.nodes(data=True) if d["bipartite"] == 0
16
+ ]
17
+
18
+ def test_degree_centrality(self):
19
+ d = bipartite.degree_centrality(self.P4, [1, 3])
20
+ answer = {0: 0.5, 1: 1.0, 2: 1.0, 3: 0.5}
21
+ assert d == answer
22
+ d = bipartite.degree_centrality(self.K3, [0, 1, 2])
23
+ answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
24
+ assert d == answer
25
+ d = bipartite.degree_centrality(self.C4, [0, 2])
26
+ answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
27
+ assert d == answer
28
+
29
+ def test_betweenness_centrality(self):
30
+ c = bipartite.betweenness_centrality(self.P4, [1, 3])
31
+ answer = {0: 0.0, 1: 1.0, 2: 1.0, 3: 0.0}
32
+ assert c == answer
33
+ c = bipartite.betweenness_centrality(self.K3, [0, 1, 2])
34
+ answer = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125, 4: 0.125, 5: 0.125}
35
+ assert c == answer
36
+ c = bipartite.betweenness_centrality(self.C4, [0, 2])
37
+ answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
38
+ assert c == answer
39
+
40
+ def test_closeness_centrality(self):
41
+ c = bipartite.closeness_centrality(self.P4, [1, 3])
42
+ answer = {0: 2.0 / 3, 1: 1.0, 2: 1.0, 3: 2.0 / 3}
43
+ assert c == answer
44
+ c = bipartite.closeness_centrality(self.K3, [0, 1, 2])
45
+ answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
46
+ assert c == answer
47
+ c = bipartite.closeness_centrality(self.C4, [0, 2])
48
+ answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
49
+ assert c == answer
50
+ G = nx.Graph()
51
+ G.add_node(0)
52
+ G.add_node(1)
53
+ c = bipartite.closeness_centrality(G, [0])
54
+ assert c == {0: 0.0, 1: 0.0}
55
+ c = bipartite.closeness_centrality(G, [1])
56
+ assert c == {0: 0.0, 1: 0.0}
57
+
58
+ def test_bipartite_closeness_centrality_unconnected(self):
59
+ G = nx.complete_bipartite_graph(3, 3)
60
+ G.add_edge(6, 7)
61
+ c = bipartite.closeness_centrality(G, [0, 2, 4, 6], normalized=False)
62
+ answer = {
63
+ 0: 10.0 / 7,
64
+ 2: 10.0 / 7,
65
+ 4: 10.0 / 7,
66
+ 6: 10.0,
67
+ 1: 10.0 / 7,
68
+ 3: 10.0 / 7,
69
+ 5: 10.0 / 7,
70
+ 7: 10.0,
71
+ }
72
+ assert c == answer
73
+
74
+ def test_davis_degree_centrality(self):
75
+ G = self.davis
76
+ deg = bipartite.degree_centrality(G, self.top_nodes)
77
+ answer = {
78
+ "E8": 0.78,
79
+ "E9": 0.67,
80
+ "E7": 0.56,
81
+ "Nora Fayette": 0.57,
82
+ "Evelyn Jefferson": 0.57,
83
+ "Theresa Anderson": 0.57,
84
+ "E6": 0.44,
85
+ "Sylvia Avondale": 0.50,
86
+ "Laura Mandeville": 0.50,
87
+ "Brenda Rogers": 0.50,
88
+ "Katherina Rogers": 0.43,
89
+ "E5": 0.44,
90
+ "Helen Lloyd": 0.36,
91
+ "E3": 0.33,
92
+ "Ruth DeSand": 0.29,
93
+ "Verne Sanderson": 0.29,
94
+ "E12": 0.33,
95
+ "Myra Liddel": 0.29,
96
+ "E11": 0.22,
97
+ "Eleanor Nye": 0.29,
98
+ "Frances Anderson": 0.29,
99
+ "Pearl Oglethorpe": 0.21,
100
+ "E4": 0.22,
101
+ "Charlotte McDowd": 0.29,
102
+ "E10": 0.28,
103
+ "Olivia Carleton": 0.14,
104
+ "Flora Price": 0.14,
105
+ "E2": 0.17,
106
+ "E1": 0.17,
107
+ "Dorothy Murchison": 0.14,
108
+ "E13": 0.17,
109
+ "E14": 0.17,
110
+ }
111
+ for node, value in answer.items():
112
+ assert value == pytest.approx(deg[node], abs=1e-2)
113
+
114
+ def test_davis_betweenness_centrality(self):
115
+ G = self.davis
116
+ bet = bipartite.betweenness_centrality(G, self.top_nodes)
117
+ answer = {
118
+ "E8": 0.24,
119
+ "E9": 0.23,
120
+ "E7": 0.13,
121
+ "Nora Fayette": 0.11,
122
+ "Evelyn Jefferson": 0.10,
123
+ "Theresa Anderson": 0.09,
124
+ "E6": 0.07,
125
+ "Sylvia Avondale": 0.07,
126
+ "Laura Mandeville": 0.05,
127
+ "Brenda Rogers": 0.05,
128
+ "Katherina Rogers": 0.05,
129
+ "E5": 0.04,
130
+ "Helen Lloyd": 0.04,
131
+ "E3": 0.02,
132
+ "Ruth DeSand": 0.02,
133
+ "Verne Sanderson": 0.02,
134
+ "E12": 0.02,
135
+ "Myra Liddel": 0.02,
136
+ "E11": 0.02,
137
+ "Eleanor Nye": 0.01,
138
+ "Frances Anderson": 0.01,
139
+ "Pearl Oglethorpe": 0.01,
140
+ "E4": 0.01,
141
+ "Charlotte McDowd": 0.01,
142
+ "E10": 0.01,
143
+ "Olivia Carleton": 0.01,
144
+ "Flora Price": 0.01,
145
+ "E2": 0.00,
146
+ "E1": 0.00,
147
+ "Dorothy Murchison": 0.00,
148
+ "E13": 0.00,
149
+ "E14": 0.00,
150
+ }
151
+ for node, value in answer.items():
152
+ assert value == pytest.approx(bet[node], abs=1e-2)
153
+
154
+ def test_davis_closeness_centrality(self):
155
+ G = self.davis
156
+ clos = bipartite.closeness_centrality(G, self.top_nodes)
157
+ answer = {
158
+ "E8": 0.85,
159
+ "E9": 0.79,
160
+ "E7": 0.73,
161
+ "Nora Fayette": 0.80,
162
+ "Evelyn Jefferson": 0.80,
163
+ "Theresa Anderson": 0.80,
164
+ "E6": 0.69,
165
+ "Sylvia Avondale": 0.77,
166
+ "Laura Mandeville": 0.73,
167
+ "Brenda Rogers": 0.73,
168
+ "Katherina Rogers": 0.73,
169
+ "E5": 0.59,
170
+ "Helen Lloyd": 0.73,
171
+ "E3": 0.56,
172
+ "Ruth DeSand": 0.71,
173
+ "Verne Sanderson": 0.71,
174
+ "E12": 0.56,
175
+ "Myra Liddel": 0.69,
176
+ "E11": 0.54,
177
+ "Eleanor Nye": 0.67,
178
+ "Frances Anderson": 0.67,
179
+ "Pearl Oglethorpe": 0.67,
180
+ "E4": 0.54,
181
+ "Charlotte McDowd": 0.60,
182
+ "E10": 0.55,
183
+ "Olivia Carleton": 0.59,
184
+ "Flora Price": 0.59,
185
+ "E2": 0.52,
186
+ "E1": 0.52,
187
+ "Dorothy Murchison": 0.65,
188
+ "E13": 0.52,
189
+ "E14": 0.52,
190
+ }
191
+ for node, value in answer.items():
192
+ assert value == pytest.approx(clos[node], abs=1e-2)
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_cluster.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms import bipartite
5
+ from networkx.algorithms.bipartite.cluster import cc_dot, cc_max, cc_min
6
+
7
+
8
+ def test_pairwise_bipartite_cc_functions():
9
+ # Test functions for different kinds of bipartite clustering coefficients
10
+ # between pairs of nodes using 3 example graphs from figure 5 p. 40
11
+ # Latapy et al (2008)
12
+ G1 = nx.Graph([(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7)])
13
+ G2 = nx.Graph([(0, 2), (0, 3), (0, 4), (1, 3), (1, 4), (1, 5)])
14
+ G3 = nx.Graph(
15
+ [(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7), (1, 8), (1, 9)]
16
+ )
17
+ result = {
18
+ 0: [1 / 3.0, 2 / 3.0, 2 / 5.0],
19
+ 1: [1 / 2.0, 2 / 3.0, 2 / 3.0],
20
+ 2: [2 / 8.0, 2 / 5.0, 2 / 5.0],
21
+ }
22
+ for i, G in enumerate([G1, G2, G3]):
23
+ assert bipartite.is_bipartite(G)
24
+ assert cc_dot(set(G[0]), set(G[1])) == result[i][0]
25
+ assert cc_min(set(G[0]), set(G[1])) == result[i][1]
26
+ assert cc_max(set(G[0]), set(G[1])) == result[i][2]
27
+
28
+
29
+ def test_star_graph():
30
+ G = nx.star_graph(3)
31
+ # all modes are the same
32
+ answer = {0: 0, 1: 1, 2: 1, 3: 1}
33
+ assert bipartite.clustering(G, mode="dot") == answer
34
+ assert bipartite.clustering(G, mode="min") == answer
35
+ assert bipartite.clustering(G, mode="max") == answer
36
+
37
+
38
+ def test_not_bipartite():
39
+ with pytest.raises(nx.NetworkXError):
40
+ bipartite.clustering(nx.complete_graph(4))
41
+
42
+
43
+ def test_bad_mode():
44
+ with pytest.raises(nx.NetworkXError):
45
+ bipartite.clustering(nx.path_graph(4), mode="foo")
46
+
47
+
48
+ def test_path_graph():
49
+ G = nx.path_graph(4)
50
+ answer = {0: 0.5, 1: 0.5, 2: 0.5, 3: 0.5}
51
+ assert bipartite.clustering(G, mode="dot") == answer
52
+ assert bipartite.clustering(G, mode="max") == answer
53
+ answer = {0: 1, 1: 1, 2: 1, 3: 1}
54
+ assert bipartite.clustering(G, mode="min") == answer
55
+
56
+
57
+ def test_average_path_graph():
58
+ G = nx.path_graph(4)
59
+ assert bipartite.average_clustering(G, mode="dot") == 0.5
60
+ assert bipartite.average_clustering(G, mode="max") == 0.5
61
+ assert bipartite.average_clustering(G, mode="min") == 1
62
+
63
+
64
+ def test_ra_clustering_davis():
65
+ G = nx.davis_southern_women_graph()
66
+ cc4 = round(bipartite.robins_alexander_clustering(G), 3)
67
+ assert cc4 == 0.468
68
+
69
+
70
+ def test_ra_clustering_square():
71
+ G = nx.path_graph(4)
72
+ G.add_edge(0, 3)
73
+ assert bipartite.robins_alexander_clustering(G) == 1.0
74
+
75
+
76
+ def test_ra_clustering_zero():
77
+ G = nx.Graph()
78
+ assert bipartite.robins_alexander_clustering(G) == 0
79
+ G.add_nodes_from(range(4))
80
+ assert bipartite.robins_alexander_clustering(G) == 0
81
+ G.add_edges_from([(0, 1), (2, 3), (3, 4)])
82
+ assert bipartite.robins_alexander_clustering(G) == 0
83
+ G.add_edge(1, 2)
84
+ assert bipartite.robins_alexander_clustering(G) == 0
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_covering.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+ from networkx.algorithms import bipartite
3
+
4
+
5
+ class TestMinEdgeCover:
6
+ """Tests for :func:`networkx.algorithms.bipartite.min_edge_cover`"""
7
+
8
+ def test_empty_graph(self):
9
+ G = nx.Graph()
10
+ assert bipartite.min_edge_cover(G) == set()
11
+
12
+ def test_graph_single_edge(self):
13
+ G = nx.Graph()
14
+ G.add_edge(0, 1)
15
+ assert bipartite.min_edge_cover(G) == {(0, 1), (1, 0)}
16
+
17
+ def test_bipartite_default(self):
18
+ G = nx.Graph()
19
+ G.add_nodes_from([1, 2, 3, 4], bipartite=0)
20
+ G.add_nodes_from(["a", "b", "c"], bipartite=1)
21
+ G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
22
+ min_cover = bipartite.min_edge_cover(G)
23
+ assert nx.is_edge_cover(G, min_cover)
24
+ assert len(min_cover) == 8
25
+
26
+ def test_bipartite_explicit(self):
27
+ G = nx.Graph()
28
+ G.add_nodes_from([1, 2, 3, 4], bipartite=0)
29
+ G.add_nodes_from(["a", "b", "c"], bipartite=1)
30
+ G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
31
+ min_cover = bipartite.min_edge_cover(G, bipartite.eppstein_matching)
32
+ assert nx.is_edge_cover(G, min_cover)
33
+ assert len(min_cover) == 8
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_edgelist.py ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Unit tests for bipartite edgelists.
3
+ """
4
+
5
+ import io
6
+
7
+ import pytest
8
+
9
+ import networkx as nx
10
+ from networkx.algorithms import bipartite
11
+ from networkx.utils import edges_equal, graphs_equal, nodes_equal
12
+
13
+
14
+ class TestEdgelist:
15
+ @classmethod
16
+ def setup_class(cls):
17
+ cls.G = nx.Graph(name="test")
18
+ e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")]
19
+ cls.G.add_edges_from(e)
20
+ cls.G.add_nodes_from(["a", "c", "e"], bipartite=0)
21
+ cls.G.add_nodes_from(["b", "d", "f"], bipartite=1)
22
+ cls.G.add_node("g", bipartite=0)
23
+ cls.DG = nx.DiGraph(cls.G)
24
+ cls.MG = nx.MultiGraph()
25
+ cls.MG.add_edges_from([(1, 2), (1, 2), (1, 2)])
26
+ cls.MG.add_node(1, bipartite=0)
27
+ cls.MG.add_node(2, bipartite=1)
28
+
29
+ def test_read_edgelist_1(self):
30
+ s = b"""\
31
+ # comment line
32
+ 1 2
33
+ # comment line
34
+ 2 3
35
+ """
36
+ bytesIO = io.BytesIO(s)
37
+ G = bipartite.read_edgelist(bytesIO, nodetype=int)
38
+ assert edges_equal(G.edges(), [(1, 2), (2, 3)])
39
+
40
+ def test_read_edgelist_3(self):
41
+ s = b"""\
42
+ # comment line
43
+ 1 2 {'weight':2.0}
44
+ # comment line
45
+ 2 3 {'weight':3.0}
46
+ """
47
+ bytesIO = io.BytesIO(s)
48
+ G = bipartite.read_edgelist(bytesIO, nodetype=int, data=False)
49
+ assert edges_equal(G.edges(), [(1, 2), (2, 3)])
50
+
51
+ bytesIO = io.BytesIO(s)
52
+ G = bipartite.read_edgelist(bytesIO, nodetype=int, data=True)
53
+ assert edges_equal(
54
+ G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})]
55
+ )
56
+
57
+ def test_write_edgelist_1(self):
58
+ fh = io.BytesIO()
59
+ G = nx.Graph()
60
+ G.add_edges_from([(1, 2), (2, 3)])
61
+ G.add_node(1, bipartite=0)
62
+ G.add_node(2, bipartite=1)
63
+ G.add_node(3, bipartite=0)
64
+ bipartite.write_edgelist(G, fh, data=False)
65
+ fh.seek(0)
66
+ assert fh.read() == b"1 2\n3 2\n"
67
+
68
+ def test_write_edgelist_2(self):
69
+ fh = io.BytesIO()
70
+ G = nx.Graph()
71
+ G.add_edges_from([(1, 2), (2, 3)])
72
+ G.add_node(1, bipartite=0)
73
+ G.add_node(2, bipartite=1)
74
+ G.add_node(3, bipartite=0)
75
+ bipartite.write_edgelist(G, fh, data=True)
76
+ fh.seek(0)
77
+ assert fh.read() == b"1 2 {}\n3 2 {}\n"
78
+
79
+ def test_write_edgelist_3(self):
80
+ fh = io.BytesIO()
81
+ G = nx.Graph()
82
+ G.add_edge(1, 2, weight=2.0)
83
+ G.add_edge(2, 3, weight=3.0)
84
+ G.add_node(1, bipartite=0)
85
+ G.add_node(2, bipartite=1)
86
+ G.add_node(3, bipartite=0)
87
+ bipartite.write_edgelist(G, fh, data=True)
88
+ fh.seek(0)
89
+ assert fh.read() == b"1 2 {'weight': 2.0}\n3 2 {'weight': 3.0}\n"
90
+
91
+ def test_write_edgelist_4(self):
92
+ fh = io.BytesIO()
93
+ G = nx.Graph()
94
+ G.add_edge(1, 2, weight=2.0)
95
+ G.add_edge(2, 3, weight=3.0)
96
+ G.add_node(1, bipartite=0)
97
+ G.add_node(2, bipartite=1)
98
+ G.add_node(3, bipartite=0)
99
+ bipartite.write_edgelist(G, fh, data=[("weight")])
100
+ fh.seek(0)
101
+ assert fh.read() == b"1 2 2.0\n3 2 3.0\n"
102
+
103
+ def test_unicode(self, tmp_path):
104
+ G = nx.Graph()
105
+ name1 = chr(2344) + chr(123) + chr(6543)
106
+ name2 = chr(5543) + chr(1543) + chr(324)
107
+ G.add_edge(name1, "Radiohead", **{name2: 3})
108
+ G.add_node(name1, bipartite=0)
109
+ G.add_node("Radiohead", bipartite=1)
110
+
111
+ fname = tmp_path / "edgelist.txt"
112
+ bipartite.write_edgelist(G, fname)
113
+ H = bipartite.read_edgelist(fname)
114
+ assert graphs_equal(G, H)
115
+
116
+ def test_latin1_issue(self, tmp_path):
117
+ G = nx.Graph()
118
+ name1 = chr(2344) + chr(123) + chr(6543)
119
+ name2 = chr(5543) + chr(1543) + chr(324)
120
+ G.add_edge(name1, "Radiohead", **{name2: 3})
121
+ G.add_node(name1, bipartite=0)
122
+ G.add_node("Radiohead", bipartite=1)
123
+
124
+ fname = tmp_path / "edgelist.txt"
125
+ with pytest.raises(UnicodeEncodeError):
126
+ bipartite.write_edgelist(G, fname, encoding="latin-1")
127
+
128
+ def test_latin1(self, tmp_path):
129
+ G = nx.Graph()
130
+ name1 = "Bj" + chr(246) + "rk"
131
+ name2 = chr(220) + "ber"
132
+ G.add_edge(name1, "Radiohead", **{name2: 3})
133
+ G.add_node(name1, bipartite=0)
134
+ G.add_node("Radiohead", bipartite=1)
135
+
136
+ fname = tmp_path / "edgelist.txt"
137
+ bipartite.write_edgelist(G, fname, encoding="latin-1")
138
+ H = bipartite.read_edgelist(fname, encoding="latin-1")
139
+ assert graphs_equal(G, H)
140
+
141
+ def test_edgelist_graph(self, tmp_path):
142
+ G = self.G
143
+ fname = tmp_path / "edgelist.txt"
144
+ bipartite.write_edgelist(G, fname)
145
+ H = bipartite.read_edgelist(fname)
146
+ H2 = bipartite.read_edgelist(fname)
147
+ assert H is not H2 # they should be different graphs
148
+ G.remove_node("g") # isolated nodes are not written in edgelist
149
+ assert nodes_equal(list(H), list(G))
150
+ assert edges_equal(list(H.edges()), list(G.edges()))
151
+
152
+ def test_edgelist_integers(self, tmp_path):
153
+ G = nx.convert_node_labels_to_integers(self.G)
154
+ fname = tmp_path / "edgelist.txt"
155
+ bipartite.write_edgelist(G, fname)
156
+ H = bipartite.read_edgelist(fname, nodetype=int)
157
+ # isolated nodes are not written in edgelist
158
+ G.remove_nodes_from(list(nx.isolates(G)))
159
+ assert nodes_equal(list(H), list(G))
160
+ assert edges_equal(list(H.edges()), list(G.edges()))
161
+
162
+ def test_edgelist_multigraph(self, tmp_path):
163
+ G = self.MG
164
+ fname = tmp_path / "edgelist.txt"
165
+ bipartite.write_edgelist(G, fname)
166
+ H = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
167
+ H2 = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
168
+ assert H is not H2 # they should be different graphs
169
+ assert nodes_equal(list(H), list(G))
170
+ assert edges_equal(list(H.edges()), list(G.edges()))
171
+
172
+ def test_empty_digraph(self):
173
+ with pytest.raises(nx.NetworkXNotImplemented):
174
+ bytesIO = io.BytesIO()
175
+ bipartite.write_edgelist(nx.DiGraph(), bytesIO)
176
+
177
+ def test_raise_attribute(self):
178
+ with pytest.raises(AttributeError):
179
+ G = nx.path_graph(4)
180
+ bytesIO = io.BytesIO()
181
+ bipartite.write_edgelist(G, bytesIO)
182
+
183
+ def test_parse_edgelist(self):
184
+ """Tests for conditions specific to
185
+ parse_edge_list method"""
186
+
187
+ # ignore strings of length less than 2
188
+ lines = ["1 2", "2 3", "3 1", "4", " "]
189
+ G = bipartite.parse_edgelist(lines, nodetype=int)
190
+ assert list(G.nodes) == [1, 2, 3]
191
+
192
+ # Exception raised when node is not convertible
193
+ # to specified data type
194
+ with pytest.raises(TypeError, match=".*Failed to convert nodes"):
195
+ lines = ["a b", "b c", "c a"]
196
+ G = bipartite.parse_edgelist(lines, nodetype=int)
197
+
198
+ # Exception raised when format of data is not
199
+ # convertible to dictionary object
200
+ with pytest.raises(TypeError, match=".*Failed to convert edge data"):
201
+ lines = ["1 2 3", "2 3 4", "3 1 2"]
202
+ G = bipartite.parse_edgelist(lines, nodetype=int)
203
+
204
+ # Exception raised when edge data and data
205
+ # keys are not of same length
206
+ with pytest.raises(IndexError):
207
+ lines = ["1 2 3 4", "2 3 4"]
208
+ G = bipartite.parse_edgelist(
209
+ lines, nodetype=int, data=[("weight", int), ("key", int)]
210
+ )
211
+
212
+ # Exception raised when edge data is not
213
+ # convertible to specified data type
214
+ with pytest.raises(TypeError, match=".*Failed to convert key data"):
215
+ lines = ["1 2 3 a", "2 3 4 b"]
216
+ G = bipartite.parse_edgelist(
217
+ lines, nodetype=int, data=[("weight", int), ("key", int)]
218
+ )
219
+
220
+
221
+ def test_bipartite_edgelist_consistent_strip_handling():
222
+ """See gh-7462
223
+
224
+ Input when printed looks like:
225
+
226
+ A B interaction 2
227
+ B C interaction 4
228
+ C A interaction
229
+
230
+ Note the trailing \\t in the last line, which indicates the existence of
231
+ an empty data field.
232
+ """
233
+ lines = io.StringIO(
234
+ "A\tB\tinteraction\t2\nB\tC\tinteraction\t4\nC\tA\tinteraction\t"
235
+ )
236
+ descr = [("type", str), ("weight", str)]
237
+ # Should not raise
238
+ G = nx.bipartite.parse_edgelist(lines, delimiter="\t", data=descr)
239
+ expected = [("A", "B", "2"), ("A", "C", ""), ("B", "C", "4")]
240
+ assert sorted(G.edges(data="weight")) == expected
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_extendability.py ADDED
@@ -0,0 +1,334 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+
5
+
6
+ def test_selfloops_raises():
7
+ G = nx.ladder_graph(3)
8
+ G.add_edge(0, 0)
9
+ with pytest.raises(nx.NetworkXError, match=".*not bipartite"):
10
+ nx.bipartite.maximal_extendability(G)
11
+
12
+
13
+ def test_disconnected_raises():
14
+ G = nx.ladder_graph(3)
15
+ G.add_node("a")
16
+ with pytest.raises(nx.NetworkXError, match=".*not connected"):
17
+ nx.bipartite.maximal_extendability(G)
18
+
19
+
20
+ def test_not_bipartite_raises():
21
+ G = nx.complete_graph(5)
22
+ with pytest.raises(nx.NetworkXError, match=".*not bipartite"):
23
+ nx.bipartite.maximal_extendability(G)
24
+
25
+
26
+ def test_no_perfect_matching_raises():
27
+ G = nx.Graph([(0, 1), (0, 2)])
28
+ with pytest.raises(nx.NetworkXError, match=".*not contain a perfect matching"):
29
+ nx.bipartite.maximal_extendability(G)
30
+
31
+
32
+ def test_residual_graph_not_strongly_connected_raises():
33
+ G = nx.Graph([(1, 2), (2, 3), (3, 4)])
34
+ with pytest.raises(
35
+ nx.NetworkXError, match="The residual graph of G is not strongly connected"
36
+ ):
37
+ nx.bipartite.maximal_extendability(G)
38
+
39
+
40
+ def test_ladder_graph_is_1():
41
+ G = nx.ladder_graph(3)
42
+ assert nx.bipartite.maximal_extendability(G) == 1
43
+
44
+
45
+ def test_cubical_graph_is_2():
46
+ G = nx.cubical_graph()
47
+ assert nx.bipartite.maximal_extendability(G) == 2
48
+
49
+
50
+ def test_k_is_3():
51
+ G = nx.Graph(
52
+ [
53
+ (1, 6),
54
+ (1, 7),
55
+ (1, 8),
56
+ (1, 9),
57
+ (2, 6),
58
+ (2, 7),
59
+ (2, 8),
60
+ (2, 10),
61
+ (3, 6),
62
+ (3, 8),
63
+ (3, 9),
64
+ (3, 10),
65
+ (4, 7),
66
+ (4, 8),
67
+ (4, 9),
68
+ (4, 10),
69
+ (5, 6),
70
+ (5, 7),
71
+ (5, 9),
72
+ (5, 10),
73
+ ]
74
+ )
75
+ assert nx.bipartite.maximal_extendability(G) == 3
76
+
77
+
78
+ def test_k_is_4():
79
+ G = nx.Graph(
80
+ [
81
+ (8, 1),
82
+ (8, 2),
83
+ (8, 3),
84
+ (8, 4),
85
+ (8, 5),
86
+ (9, 1),
87
+ (9, 2),
88
+ (9, 3),
89
+ (9, 4),
90
+ (9, 7),
91
+ (10, 1),
92
+ (10, 2),
93
+ (10, 3),
94
+ (10, 4),
95
+ (10, 6),
96
+ (11, 1),
97
+ (11, 2),
98
+ (11, 5),
99
+ (11, 6),
100
+ (11, 7),
101
+ (12, 1),
102
+ (12, 3),
103
+ (12, 5),
104
+ (12, 6),
105
+ (12, 7),
106
+ (13, 2),
107
+ (13, 4),
108
+ (13, 5),
109
+ (13, 6),
110
+ (13, 7),
111
+ (14, 3),
112
+ (14, 4),
113
+ (14, 5),
114
+ (14, 6),
115
+ (14, 7),
116
+ ]
117
+ )
118
+ assert nx.bipartite.maximal_extendability(G) == 4
119
+
120
+
121
+ def test_k_is_5():
122
+ G = nx.Graph(
123
+ [
124
+ (8, 1),
125
+ (8, 2),
126
+ (8, 3),
127
+ (8, 4),
128
+ (8, 5),
129
+ (8, 6),
130
+ (9, 1),
131
+ (9, 2),
132
+ (9, 3),
133
+ (9, 4),
134
+ (9, 5),
135
+ (9, 7),
136
+ (10, 1),
137
+ (10, 2),
138
+ (10, 3),
139
+ (10, 4),
140
+ (10, 6),
141
+ (10, 7),
142
+ (11, 1),
143
+ (11, 2),
144
+ (11, 3),
145
+ (11, 5),
146
+ (11, 6),
147
+ (11, 7),
148
+ (12, 1),
149
+ (12, 2),
150
+ (12, 4),
151
+ (12, 5),
152
+ (12, 6),
153
+ (12, 7),
154
+ (13, 1),
155
+ (13, 3),
156
+ (13, 4),
157
+ (13, 5),
158
+ (13, 6),
159
+ (13, 7),
160
+ (14, 2),
161
+ (14, 3),
162
+ (14, 4),
163
+ (14, 5),
164
+ (14, 6),
165
+ (14, 7),
166
+ ]
167
+ )
168
+ assert nx.bipartite.maximal_extendability(G) == 5
169
+
170
+
171
+ def test_k_is_6():
172
+ G = nx.Graph(
173
+ [
174
+ (9, 1),
175
+ (9, 2),
176
+ (9, 3),
177
+ (9, 4),
178
+ (9, 5),
179
+ (9, 6),
180
+ (9, 7),
181
+ (10, 1),
182
+ (10, 2),
183
+ (10, 3),
184
+ (10, 4),
185
+ (10, 5),
186
+ (10, 6),
187
+ (10, 8),
188
+ (11, 1),
189
+ (11, 2),
190
+ (11, 3),
191
+ (11, 4),
192
+ (11, 5),
193
+ (11, 7),
194
+ (11, 8),
195
+ (12, 1),
196
+ (12, 2),
197
+ (12, 3),
198
+ (12, 4),
199
+ (12, 6),
200
+ (12, 7),
201
+ (12, 8),
202
+ (13, 1),
203
+ (13, 2),
204
+ (13, 3),
205
+ (13, 5),
206
+ (13, 6),
207
+ (13, 7),
208
+ (13, 8),
209
+ (14, 1),
210
+ (14, 2),
211
+ (14, 4),
212
+ (14, 5),
213
+ (14, 6),
214
+ (14, 7),
215
+ (14, 8),
216
+ (15, 1),
217
+ (15, 3),
218
+ (15, 4),
219
+ (15, 5),
220
+ (15, 6),
221
+ (15, 7),
222
+ (15, 8),
223
+ (16, 2),
224
+ (16, 3),
225
+ (16, 4),
226
+ (16, 5),
227
+ (16, 6),
228
+ (16, 7),
229
+ (16, 8),
230
+ ]
231
+ )
232
+ assert nx.bipartite.maximal_extendability(G) == 6
233
+
234
+
235
+ def test_k_is_7():
236
+ G = nx.Graph(
237
+ [
238
+ (1, 11),
239
+ (1, 12),
240
+ (1, 13),
241
+ (1, 14),
242
+ (1, 15),
243
+ (1, 16),
244
+ (1, 17),
245
+ (1, 18),
246
+ (2, 11),
247
+ (2, 12),
248
+ (2, 13),
249
+ (2, 14),
250
+ (2, 15),
251
+ (2, 16),
252
+ (2, 17),
253
+ (2, 19),
254
+ (3, 11),
255
+ (3, 12),
256
+ (3, 13),
257
+ (3, 14),
258
+ (3, 15),
259
+ (3, 16),
260
+ (3, 17),
261
+ (3, 20),
262
+ (4, 11),
263
+ (4, 12),
264
+ (4, 13),
265
+ (4, 14),
266
+ (4, 15),
267
+ (4, 16),
268
+ (4, 17),
269
+ (4, 18),
270
+ (4, 19),
271
+ (4, 20),
272
+ (5, 11),
273
+ (5, 12),
274
+ (5, 13),
275
+ (5, 14),
276
+ (5, 15),
277
+ (5, 16),
278
+ (5, 17),
279
+ (5, 18),
280
+ (5, 19),
281
+ (5, 20),
282
+ (6, 11),
283
+ (6, 12),
284
+ (6, 13),
285
+ (6, 14),
286
+ (6, 15),
287
+ (6, 16),
288
+ (6, 17),
289
+ (6, 18),
290
+ (6, 19),
291
+ (6, 20),
292
+ (7, 11),
293
+ (7, 12),
294
+ (7, 13),
295
+ (7, 14),
296
+ (7, 15),
297
+ (7, 16),
298
+ (7, 17),
299
+ (7, 18),
300
+ (7, 19),
301
+ (7, 20),
302
+ (8, 11),
303
+ (8, 12),
304
+ (8, 13),
305
+ (8, 14),
306
+ (8, 15),
307
+ (8, 16),
308
+ (8, 17),
309
+ (8, 18),
310
+ (8, 19),
311
+ (8, 20),
312
+ (9, 11),
313
+ (9, 12),
314
+ (9, 13),
315
+ (9, 14),
316
+ (9, 15),
317
+ (9, 16),
318
+ (9, 17),
319
+ (9, 18),
320
+ (9, 19),
321
+ (9, 20),
322
+ (10, 11),
323
+ (10, 12),
324
+ (10, 13),
325
+ (10, 14),
326
+ (10, 15),
327
+ (10, 16),
328
+ (10, 17),
329
+ (10, 18),
330
+ (10, 19),
331
+ (10, 20),
332
+ ]
333
+ )
334
+ assert nx.bipartite.maximal_extendability(G) == 7
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_generators.py ADDED
@@ -0,0 +1,409 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numbers
2
+
3
+ import pytest
4
+
5
+ import networkx as nx
6
+
7
+ from ..generators import (
8
+ alternating_havel_hakimi_graph,
9
+ complete_bipartite_graph,
10
+ configuration_model,
11
+ gnmk_random_graph,
12
+ havel_hakimi_graph,
13
+ preferential_attachment_graph,
14
+ random_graph,
15
+ reverse_havel_hakimi_graph,
16
+ )
17
+
18
+ """
19
+ Generators - Bipartite
20
+ ----------------------
21
+ """
22
+
23
+
24
+ class TestGeneratorsBipartite:
25
+ def test_complete_bipartite_graph(self):
26
+ G = complete_bipartite_graph(0, 0)
27
+ assert nx.is_isomorphic(G, nx.null_graph())
28
+
29
+ for i in [1, 5]:
30
+ G = complete_bipartite_graph(i, 0)
31
+ assert nx.is_isomorphic(G, nx.empty_graph(i))
32
+ G = complete_bipartite_graph(0, i)
33
+ assert nx.is_isomorphic(G, nx.empty_graph(i))
34
+
35
+ G = complete_bipartite_graph(2, 2)
36
+ assert nx.is_isomorphic(G, nx.cycle_graph(4))
37
+
38
+ G = complete_bipartite_graph(1, 5)
39
+ assert nx.is_isomorphic(G, nx.star_graph(5))
40
+
41
+ G = complete_bipartite_graph(5, 1)
42
+ assert nx.is_isomorphic(G, nx.star_graph(5))
43
+
44
+ # complete_bipartite_graph(m1,m2) is a connected graph with
45
+ # m1+m2 nodes and m1*m2 edges
46
+ for m1, m2 in [(5, 11), (7, 3)]:
47
+ G = complete_bipartite_graph(m1, m2)
48
+ assert nx.number_of_nodes(G) == m1 + m2
49
+ assert nx.number_of_edges(G) == m1 * m2
50
+
51
+ with pytest.raises(nx.NetworkXError):
52
+ complete_bipartite_graph(7, 3, create_using=nx.DiGraph)
53
+ with pytest.raises(nx.NetworkXError):
54
+ complete_bipartite_graph(7, 3, create_using=nx.MultiDiGraph)
55
+
56
+ mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph)
57
+ assert mG.is_multigraph()
58
+ assert sorted(mG.edges()) == sorted(G.edges())
59
+
60
+ mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph)
61
+ assert mG.is_multigraph()
62
+ assert sorted(mG.edges()) == sorted(G.edges())
63
+
64
+ mG = complete_bipartite_graph(7, 3) # default to Graph
65
+ assert sorted(mG.edges()) == sorted(G.edges())
66
+ assert not mG.is_multigraph()
67
+ assert not mG.is_directed()
68
+
69
+ # specify nodes rather than number of nodes
70
+ for n1, n2 in [([1, 2], "ab"), (3, 2), (3, "ab"), ("ab", 3)]:
71
+ G = complete_bipartite_graph(n1, n2)
72
+ if isinstance(n1, numbers.Integral):
73
+ if isinstance(n2, numbers.Integral):
74
+ n2 = range(n1, n1 + n2)
75
+ n1 = range(n1)
76
+ elif isinstance(n2, numbers.Integral):
77
+ n2 = range(n2)
78
+ edges = {(u, v) for u in n1 for v in n2}
79
+ assert edges == set(G.edges)
80
+ assert G.size() == len(edges)
81
+
82
+ # raise when node sets are not distinct
83
+ for n1, n2 in [([1, 2], 3), (3, [1, 2]), ("abc", "bcd")]:
84
+ pytest.raises(nx.NetworkXError, complete_bipartite_graph, n1, n2)
85
+
86
+ def test_configuration_model(self):
87
+ aseq = []
88
+ bseq = []
89
+ G = configuration_model(aseq, bseq)
90
+ assert len(G) == 0
91
+
92
+ aseq = [0, 0]
93
+ bseq = [0, 0]
94
+ G = configuration_model(aseq, bseq)
95
+ assert len(G) == 4
96
+ assert G.number_of_edges() == 0
97
+
98
+ aseq = [3, 3, 3, 3]
99
+ bseq = [2, 2, 2, 2, 2]
100
+ pytest.raises(nx.NetworkXError, configuration_model, aseq, bseq)
101
+
102
+ aseq = [3, 3, 3, 3]
103
+ bseq = [2, 2, 2, 2, 2, 2]
104
+ G = configuration_model(aseq, bseq)
105
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
106
+
107
+ aseq = [2, 2, 2, 2, 2, 2]
108
+ bseq = [3, 3, 3, 3]
109
+ G = configuration_model(aseq, bseq)
110
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
111
+
112
+ aseq = [2, 2, 2, 1, 1, 1]
113
+ bseq = [3, 3, 3]
114
+ G = configuration_model(aseq, bseq)
115
+ assert G.is_multigraph()
116
+ assert not G.is_directed()
117
+ assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
118
+
119
+ GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
120
+ assert GU.number_of_nodes() == 6
121
+
122
+ GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
123
+ assert GD.number_of_nodes() == 3
124
+
125
+ G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
126
+ assert not G.is_multigraph()
127
+ assert not G.is_directed()
128
+
129
+ pytest.raises(
130
+ nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph()
131
+ )
132
+ pytest.raises(
133
+ nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph
134
+ )
135
+ pytest.raises(
136
+ nx.NetworkXError,
137
+ configuration_model,
138
+ aseq,
139
+ bseq,
140
+ create_using=nx.MultiDiGraph,
141
+ )
142
+
143
+ def test_havel_hakimi_graph(self):
144
+ aseq = []
145
+ bseq = []
146
+ G = havel_hakimi_graph(aseq, bseq)
147
+ assert len(G) == 0
148
+
149
+ aseq = [0, 0]
150
+ bseq = [0, 0]
151
+ G = havel_hakimi_graph(aseq, bseq)
152
+ assert len(G) == 4
153
+ assert G.number_of_edges() == 0
154
+
155
+ aseq = [3, 3, 3, 3]
156
+ bseq = [2, 2, 2, 2, 2]
157
+ pytest.raises(nx.NetworkXError, havel_hakimi_graph, aseq, bseq)
158
+
159
+ bseq = [2, 2, 2, 2, 2, 2]
160
+ G = havel_hakimi_graph(aseq, bseq)
161
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
162
+
163
+ aseq = [2, 2, 2, 2, 2, 2]
164
+ bseq = [3, 3, 3, 3]
165
+ G = havel_hakimi_graph(aseq, bseq)
166
+ assert G.is_multigraph()
167
+ assert not G.is_directed()
168
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
169
+
170
+ GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
171
+ assert GU.number_of_nodes() == 6
172
+
173
+ GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
174
+ assert GD.number_of_nodes() == 4
175
+
176
+ G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
177
+ assert not G.is_multigraph()
178
+ assert not G.is_directed()
179
+
180
+ pytest.raises(
181
+ nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph
182
+ )
183
+ pytest.raises(
184
+ nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph
185
+ )
186
+ pytest.raises(
187
+ nx.NetworkXError,
188
+ havel_hakimi_graph,
189
+ aseq,
190
+ bseq,
191
+ create_using=nx.MultiDiGraph,
192
+ )
193
+
194
+ def test_reverse_havel_hakimi_graph(self):
195
+ aseq = []
196
+ bseq = []
197
+ G = reverse_havel_hakimi_graph(aseq, bseq)
198
+ assert len(G) == 0
199
+
200
+ aseq = [0, 0]
201
+ bseq = [0, 0]
202
+ G = reverse_havel_hakimi_graph(aseq, bseq)
203
+ assert len(G) == 4
204
+ assert G.number_of_edges() == 0
205
+
206
+ aseq = [3, 3, 3, 3]
207
+ bseq = [2, 2, 2, 2, 2]
208
+ pytest.raises(nx.NetworkXError, reverse_havel_hakimi_graph, aseq, bseq)
209
+
210
+ bseq = [2, 2, 2, 2, 2, 2]
211
+ G = reverse_havel_hakimi_graph(aseq, bseq)
212
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
213
+
214
+ aseq = [2, 2, 2, 2, 2, 2]
215
+ bseq = [3, 3, 3, 3]
216
+ G = reverse_havel_hakimi_graph(aseq, bseq)
217
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
218
+
219
+ aseq = [2, 2, 2, 1, 1, 1]
220
+ bseq = [3, 3, 3]
221
+ G = reverse_havel_hakimi_graph(aseq, bseq)
222
+ assert G.is_multigraph()
223
+ assert not G.is_directed()
224
+ assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
225
+
226
+ GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
227
+ assert GU.number_of_nodes() == 6
228
+
229
+ GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
230
+ assert GD.number_of_nodes() == 3
231
+
232
+ G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
233
+ assert not G.is_multigraph()
234
+ assert not G.is_directed()
235
+
236
+ pytest.raises(
237
+ nx.NetworkXError,
238
+ reverse_havel_hakimi_graph,
239
+ aseq,
240
+ bseq,
241
+ create_using=nx.DiGraph,
242
+ )
243
+ pytest.raises(
244
+ nx.NetworkXError,
245
+ reverse_havel_hakimi_graph,
246
+ aseq,
247
+ bseq,
248
+ create_using=nx.DiGraph,
249
+ )
250
+ pytest.raises(
251
+ nx.NetworkXError,
252
+ reverse_havel_hakimi_graph,
253
+ aseq,
254
+ bseq,
255
+ create_using=nx.MultiDiGraph,
256
+ )
257
+
258
+ def test_alternating_havel_hakimi_graph(self):
259
+ aseq = []
260
+ bseq = []
261
+ G = alternating_havel_hakimi_graph(aseq, bseq)
262
+ assert len(G) == 0
263
+
264
+ aseq = [0, 0]
265
+ bseq = [0, 0]
266
+ G = alternating_havel_hakimi_graph(aseq, bseq)
267
+ assert len(G) == 4
268
+ assert G.number_of_edges() == 0
269
+
270
+ aseq = [3, 3, 3, 3]
271
+ bseq = [2, 2, 2, 2, 2]
272
+ pytest.raises(nx.NetworkXError, alternating_havel_hakimi_graph, aseq, bseq)
273
+
274
+ bseq = [2, 2, 2, 2, 2, 2]
275
+ G = alternating_havel_hakimi_graph(aseq, bseq)
276
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
277
+
278
+ aseq = [2, 2, 2, 2, 2, 2]
279
+ bseq = [3, 3, 3, 3]
280
+ G = alternating_havel_hakimi_graph(aseq, bseq)
281
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
282
+
283
+ aseq = [2, 2, 2, 1, 1, 1]
284
+ bseq = [3, 3, 3]
285
+ G = alternating_havel_hakimi_graph(aseq, bseq)
286
+ assert G.is_multigraph()
287
+ assert not G.is_directed()
288
+ assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
289
+
290
+ GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
291
+ assert GU.number_of_nodes() == 6
292
+
293
+ GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
294
+ assert GD.number_of_nodes() == 3
295
+
296
+ G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
297
+ assert not G.is_multigraph()
298
+ assert not G.is_directed()
299
+
300
+ pytest.raises(
301
+ nx.NetworkXError,
302
+ alternating_havel_hakimi_graph,
303
+ aseq,
304
+ bseq,
305
+ create_using=nx.DiGraph,
306
+ )
307
+ pytest.raises(
308
+ nx.NetworkXError,
309
+ alternating_havel_hakimi_graph,
310
+ aseq,
311
+ bseq,
312
+ create_using=nx.DiGraph,
313
+ )
314
+ pytest.raises(
315
+ nx.NetworkXError,
316
+ alternating_havel_hakimi_graph,
317
+ aseq,
318
+ bseq,
319
+ create_using=nx.MultiDiGraph,
320
+ )
321
+
322
+ def test_preferential_attachment(self):
323
+ aseq = [3, 2, 1, 1]
324
+ G = preferential_attachment_graph(aseq, 0.5)
325
+ assert G.is_multigraph()
326
+ assert not G.is_directed()
327
+
328
+ G = preferential_attachment_graph(aseq, 0.5, create_using=nx.Graph)
329
+ assert not G.is_multigraph()
330
+ assert not G.is_directed()
331
+
332
+ pytest.raises(
333
+ nx.NetworkXError,
334
+ preferential_attachment_graph,
335
+ aseq,
336
+ 0.5,
337
+ create_using=nx.DiGraph(),
338
+ )
339
+ pytest.raises(
340
+ nx.NetworkXError,
341
+ preferential_attachment_graph,
342
+ aseq,
343
+ 0.5,
344
+ create_using=nx.DiGraph(),
345
+ )
346
+ pytest.raises(
347
+ nx.NetworkXError,
348
+ preferential_attachment_graph,
349
+ aseq,
350
+ 0.5,
351
+ create_using=nx.DiGraph(),
352
+ )
353
+
354
+ def test_random_graph(self):
355
+ n = 10
356
+ m = 20
357
+ G = random_graph(n, m, 0.9)
358
+ assert len(G) == 30
359
+ assert nx.is_bipartite(G)
360
+ X, Y = nx.algorithms.bipartite.sets(G)
361
+ assert set(range(n)) == X
362
+ assert set(range(n, n + m)) == Y
363
+
364
+ def test_random_digraph(self):
365
+ n = 10
366
+ m = 20
367
+ G = random_graph(n, m, 0.9, directed=True)
368
+ assert len(G) == 30
369
+ assert nx.is_bipartite(G)
370
+ X, Y = nx.algorithms.bipartite.sets(G)
371
+ assert set(range(n)) == X
372
+ assert set(range(n, n + m)) == Y
373
+
374
+ def test_gnmk_random_graph(self):
375
+ n = 10
376
+ m = 20
377
+ edges = 100
378
+ # set seed because sometimes it is not connected
379
+ # which raises an error in bipartite.sets(G) below.
380
+ G = gnmk_random_graph(n, m, edges, seed=1234)
381
+ assert len(G) == n + m
382
+ assert nx.is_bipartite(G)
383
+ X, Y = nx.algorithms.bipartite.sets(G)
384
+ # print(X)
385
+ assert set(range(n)) == X
386
+ assert set(range(n, n + m)) == Y
387
+ assert edges == len(list(G.edges()))
388
+
389
+ def test_gnmk_random_graph_complete(self):
390
+ n = 10
391
+ m = 20
392
+ edges = 200
393
+ G = gnmk_random_graph(n, m, edges)
394
+ assert len(G) == n + m
395
+ assert nx.is_bipartite(G)
396
+ X, Y = nx.algorithms.bipartite.sets(G)
397
+ # print(X)
398
+ assert set(range(n)) == X
399
+ assert set(range(n, n + m)) == Y
400
+ assert edges == len(list(G.edges()))
401
+
402
+ @pytest.mark.parametrize("n", (4, range(4), {0, 1, 2, 3}))
403
+ @pytest.mark.parametrize("m", (range(4, 7), {4, 5, 6}))
404
+ def test_complete_bipartite_graph_str(self, n, m):
405
+ """Ensure G.name is consistent for all inputs accepted by nodes_or_number.
406
+ See gh-7396"""
407
+ G = nx.complete_bipartite_graph(n, m)
408
+ ans = "Graph named 'complete_bipartite_graph(4, 3)' with 7 nodes and 12 edges"
409
+ assert str(G) == ans
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_matching.py ADDED
@@ -0,0 +1,327 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for the :mod:`networkx.algorithms.bipartite.matching` module."""
2
+
3
+ import itertools
4
+
5
+ import pytest
6
+
7
+ import networkx as nx
8
+ from networkx.algorithms.bipartite.matching import (
9
+ eppstein_matching,
10
+ hopcroft_karp_matching,
11
+ maximum_matching,
12
+ minimum_weight_full_matching,
13
+ to_vertex_cover,
14
+ )
15
+
16
+
17
+ class TestMatching:
18
+ """Tests for bipartite matching algorithms."""
19
+
20
+ def setup_method(self):
21
+ """Creates a bipartite graph for use in testing matching algorithms.
22
+
23
+ The bipartite graph has a maximum cardinality matching that leaves
24
+ vertex 1 and vertex 10 unmatched. The first six numbers are the left
25
+ vertices and the next six numbers are the right vertices.
26
+
27
+ """
28
+ self.simple_graph = nx.complete_bipartite_graph(2, 3)
29
+ self.simple_solution = {0: 2, 1: 3, 2: 0, 3: 1}
30
+
31
+ edges = [(0, 7), (0, 8), (2, 6), (2, 9), (3, 8), (4, 8), (4, 9), (5, 11)]
32
+ self.top_nodes = set(range(6))
33
+ self.graph = nx.Graph()
34
+ self.graph.add_nodes_from(range(12))
35
+ self.graph.add_edges_from(edges)
36
+
37
+ # Example bipartite graph from issue 2127
38
+ G = nx.Graph()
39
+ G.add_nodes_from(
40
+ [
41
+ (1, "C"),
42
+ (1, "B"),
43
+ (0, "G"),
44
+ (1, "F"),
45
+ (1, "E"),
46
+ (0, "C"),
47
+ (1, "D"),
48
+ (1, "I"),
49
+ (0, "A"),
50
+ (0, "D"),
51
+ (0, "F"),
52
+ (0, "E"),
53
+ (0, "H"),
54
+ (1, "G"),
55
+ (1, "A"),
56
+ (0, "I"),
57
+ (0, "B"),
58
+ (1, "H"),
59
+ ]
60
+ )
61
+ G.add_edge((1, "C"), (0, "A"))
62
+ G.add_edge((1, "B"), (0, "A"))
63
+ G.add_edge((0, "G"), (1, "I"))
64
+ G.add_edge((0, "G"), (1, "H"))
65
+ G.add_edge((1, "F"), (0, "A"))
66
+ G.add_edge((1, "F"), (0, "C"))
67
+ G.add_edge((1, "F"), (0, "E"))
68
+ G.add_edge((1, "E"), (0, "A"))
69
+ G.add_edge((1, "E"), (0, "C"))
70
+ G.add_edge((0, "C"), (1, "D"))
71
+ G.add_edge((0, "C"), (1, "I"))
72
+ G.add_edge((0, "C"), (1, "G"))
73
+ G.add_edge((0, "C"), (1, "H"))
74
+ G.add_edge((1, "D"), (0, "A"))
75
+ G.add_edge((1, "I"), (0, "A"))
76
+ G.add_edge((1, "I"), (0, "E"))
77
+ G.add_edge((0, "A"), (1, "G"))
78
+ G.add_edge((0, "A"), (1, "H"))
79
+ G.add_edge((0, "E"), (1, "G"))
80
+ G.add_edge((0, "E"), (1, "H"))
81
+ self.disconnected_graph = G
82
+
83
+ def check_match(self, matching):
84
+ """Asserts that the matching is what we expect from the bipartite graph
85
+ constructed in the :meth:`setup` fixture.
86
+
87
+ """
88
+ # For the sake of brevity, rename `matching` to `M`.
89
+ M = matching
90
+ matched_vertices = frozenset(itertools.chain(*M.items()))
91
+ # Assert that the maximum number of vertices (10) is matched.
92
+ assert matched_vertices == frozenset(range(12)) - {1, 10}
93
+ # Assert that no vertex appears in two edges, or in other words, that
94
+ # the matching (u, v) and (v, u) both appear in the matching
95
+ # dictionary.
96
+ assert all(u == M[M[u]] for u in range(12) if u in M)
97
+
98
+ def check_vertex_cover(self, vertices):
99
+ """Asserts that the given set of vertices is the vertex cover we
100
+ expected from the bipartite graph constructed in the :meth:`setup`
101
+ fixture.
102
+
103
+ """
104
+ # By Konig's theorem, the number of edges in a maximum matching equals
105
+ # the number of vertices in a minimum vertex cover.
106
+ assert len(vertices) == 5
107
+ # Assert that the set is truly a vertex cover.
108
+ for u, v in self.graph.edges():
109
+ assert u in vertices or v in vertices
110
+ # TODO Assert that the vertices are the correct ones.
111
+
112
+ def test_eppstein_matching(self):
113
+ """Tests that David Eppstein's implementation of the Hopcroft--Karp
114
+ algorithm produces a maximum cardinality matching.
115
+
116
+ """
117
+ self.check_match(eppstein_matching(self.graph, self.top_nodes))
118
+
119
+ def test_hopcroft_karp_matching(self):
120
+ """Tests that the Hopcroft--Karp algorithm produces a maximum
121
+ cardinality matching in a bipartite graph.
122
+
123
+ """
124
+ self.check_match(hopcroft_karp_matching(self.graph, self.top_nodes))
125
+
126
+ def test_to_vertex_cover(self):
127
+ """Test for converting a maximum matching to a minimum vertex cover."""
128
+ matching = maximum_matching(self.graph, self.top_nodes)
129
+ vertex_cover = to_vertex_cover(self.graph, matching, self.top_nodes)
130
+ self.check_vertex_cover(vertex_cover)
131
+
132
+ def test_eppstein_matching_simple(self):
133
+ match = eppstein_matching(self.simple_graph)
134
+ assert match == self.simple_solution
135
+
136
+ def test_hopcroft_karp_matching_simple(self):
137
+ match = hopcroft_karp_matching(self.simple_graph)
138
+ assert match == self.simple_solution
139
+
140
+ def test_eppstein_matching_disconnected(self):
141
+ with pytest.raises(nx.AmbiguousSolution):
142
+ match = eppstein_matching(self.disconnected_graph)
143
+
144
+ def test_hopcroft_karp_matching_disconnected(self):
145
+ with pytest.raises(nx.AmbiguousSolution):
146
+ match = hopcroft_karp_matching(self.disconnected_graph)
147
+
148
+ def test_issue_2127(self):
149
+ """Test from issue 2127"""
150
+ # Build the example DAG
151
+ G = nx.DiGraph()
152
+ G.add_edge("A", "C")
153
+ G.add_edge("A", "B")
154
+ G.add_edge("C", "E")
155
+ G.add_edge("C", "D")
156
+ G.add_edge("E", "G")
157
+ G.add_edge("E", "F")
158
+ G.add_edge("G", "I")
159
+ G.add_edge("G", "H")
160
+
161
+ tc = nx.transitive_closure(G)
162
+ btc = nx.Graph()
163
+
164
+ # Create a bipartite graph based on the transitive closure of G
165
+ for v in tc.nodes():
166
+ btc.add_node((0, v))
167
+ btc.add_node((1, v))
168
+
169
+ for u, v in tc.edges():
170
+ btc.add_edge((0, u), (1, v))
171
+
172
+ top_nodes = {n for n in btc if n[0] == 0}
173
+ matching = hopcroft_karp_matching(btc, top_nodes)
174
+ vertex_cover = to_vertex_cover(btc, matching, top_nodes)
175
+ independent_set = set(G) - {v for _, v in vertex_cover}
176
+ assert {"B", "D", "F", "I", "H"} == independent_set
177
+
178
+ def test_vertex_cover_issue_2384(self):
179
+ G = nx.Graph([(0, 3), (1, 3), (1, 4), (2, 3)])
180
+ matching = maximum_matching(G)
181
+ vertex_cover = to_vertex_cover(G, matching)
182
+ for u, v in G.edges():
183
+ assert u in vertex_cover or v in vertex_cover
184
+
185
+ def test_vertex_cover_issue_3306(self):
186
+ G = nx.Graph()
187
+ edges = [(0, 2), (1, 0), (1, 1), (1, 2), (2, 2)]
188
+ G.add_edges_from([((i, "L"), (j, "R")) for i, j in edges])
189
+
190
+ matching = maximum_matching(G)
191
+ vertex_cover = to_vertex_cover(G, matching)
192
+ for u, v in G.edges():
193
+ assert u in vertex_cover or v in vertex_cover
194
+
195
+ def test_unorderable_nodes(self):
196
+ a = object()
197
+ b = object()
198
+ c = object()
199
+ d = object()
200
+ e = object()
201
+ G = nx.Graph([(a, d), (b, d), (b, e), (c, d)])
202
+ matching = maximum_matching(G)
203
+ vertex_cover = to_vertex_cover(G, matching)
204
+ for u, v in G.edges():
205
+ assert u in vertex_cover or v in vertex_cover
206
+
207
+
208
+ def test_eppstein_matching():
209
+ """Test in accordance to issue #1927"""
210
+ G = nx.Graph()
211
+ G.add_nodes_from(["a", 2, 3, 4], bipartite=0)
212
+ G.add_nodes_from([1, "b", "c"], bipartite=1)
213
+ G.add_edges_from([("a", 1), ("a", "b"), (2, "b"), (2, "c"), (3, "c"), (4, 1)])
214
+ matching = eppstein_matching(G)
215
+ assert len(matching) == len(maximum_matching(G))
216
+ assert all(x in set(matching.keys()) for x in set(matching.values()))
217
+
218
+
219
+ class TestMinimumWeightFullMatching:
220
+ @classmethod
221
+ def setup_class(cls):
222
+ pytest.importorskip("scipy")
223
+
224
+ def test_minimum_weight_full_matching_incomplete_graph(self):
225
+ B = nx.Graph()
226
+ B.add_nodes_from([1, 2], bipartite=0)
227
+ B.add_nodes_from([3, 4], bipartite=1)
228
+ B.add_edge(1, 4, weight=100)
229
+ B.add_edge(2, 3, weight=100)
230
+ B.add_edge(2, 4, weight=50)
231
+ matching = minimum_weight_full_matching(B)
232
+ assert matching == {1: 4, 2: 3, 4: 1, 3: 2}
233
+
234
+ def test_minimum_weight_full_matching_with_no_full_matching(self):
235
+ B = nx.Graph()
236
+ B.add_nodes_from([1, 2, 3], bipartite=0)
237
+ B.add_nodes_from([4, 5, 6], bipartite=1)
238
+ B.add_edge(1, 4, weight=100)
239
+ B.add_edge(2, 4, weight=100)
240
+ B.add_edge(3, 4, weight=50)
241
+ B.add_edge(3, 5, weight=50)
242
+ B.add_edge(3, 6, weight=50)
243
+ with pytest.raises(ValueError):
244
+ minimum_weight_full_matching(B)
245
+
246
+ def test_minimum_weight_full_matching_square(self):
247
+ G = nx.complete_bipartite_graph(3, 3)
248
+ G.add_edge(0, 3, weight=400)
249
+ G.add_edge(0, 4, weight=150)
250
+ G.add_edge(0, 5, weight=400)
251
+ G.add_edge(1, 3, weight=400)
252
+ G.add_edge(1, 4, weight=450)
253
+ G.add_edge(1, 5, weight=600)
254
+ G.add_edge(2, 3, weight=300)
255
+ G.add_edge(2, 4, weight=225)
256
+ G.add_edge(2, 5, weight=300)
257
+ matching = minimum_weight_full_matching(G)
258
+ assert matching == {0: 4, 1: 3, 2: 5, 4: 0, 3: 1, 5: 2}
259
+
260
+ def test_minimum_weight_full_matching_smaller_left(self):
261
+ G = nx.complete_bipartite_graph(3, 4)
262
+ G.add_edge(0, 3, weight=400)
263
+ G.add_edge(0, 4, weight=150)
264
+ G.add_edge(0, 5, weight=400)
265
+ G.add_edge(0, 6, weight=1)
266
+ G.add_edge(1, 3, weight=400)
267
+ G.add_edge(1, 4, weight=450)
268
+ G.add_edge(1, 5, weight=600)
269
+ G.add_edge(1, 6, weight=2)
270
+ G.add_edge(2, 3, weight=300)
271
+ G.add_edge(2, 4, weight=225)
272
+ G.add_edge(2, 5, weight=290)
273
+ G.add_edge(2, 6, weight=3)
274
+ matching = minimum_weight_full_matching(G)
275
+ assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1}
276
+
277
+ def test_minimum_weight_full_matching_smaller_top_nodes_right(self):
278
+ G = nx.complete_bipartite_graph(3, 4)
279
+ G.add_edge(0, 3, weight=400)
280
+ G.add_edge(0, 4, weight=150)
281
+ G.add_edge(0, 5, weight=400)
282
+ G.add_edge(0, 6, weight=1)
283
+ G.add_edge(1, 3, weight=400)
284
+ G.add_edge(1, 4, weight=450)
285
+ G.add_edge(1, 5, weight=600)
286
+ G.add_edge(1, 6, weight=2)
287
+ G.add_edge(2, 3, weight=300)
288
+ G.add_edge(2, 4, weight=225)
289
+ G.add_edge(2, 5, weight=290)
290
+ G.add_edge(2, 6, weight=3)
291
+ matching = minimum_weight_full_matching(G, top_nodes=[3, 4, 5, 6])
292
+ assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1}
293
+
294
+ def test_minimum_weight_full_matching_smaller_right(self):
295
+ G = nx.complete_bipartite_graph(4, 3)
296
+ G.add_edge(0, 4, weight=400)
297
+ G.add_edge(0, 5, weight=400)
298
+ G.add_edge(0, 6, weight=300)
299
+ G.add_edge(1, 4, weight=150)
300
+ G.add_edge(1, 5, weight=450)
301
+ G.add_edge(1, 6, weight=225)
302
+ G.add_edge(2, 4, weight=400)
303
+ G.add_edge(2, 5, weight=600)
304
+ G.add_edge(2, 6, weight=290)
305
+ G.add_edge(3, 4, weight=1)
306
+ G.add_edge(3, 5, weight=2)
307
+ G.add_edge(3, 6, weight=3)
308
+ matching = minimum_weight_full_matching(G)
309
+ assert matching == {1: 4, 2: 6, 3: 5, 4: 1, 5: 3, 6: 2}
310
+
311
+ def test_minimum_weight_full_matching_negative_weights(self):
312
+ G = nx.complete_bipartite_graph(2, 2)
313
+ G.add_edge(0, 2, weight=-2)
314
+ G.add_edge(0, 3, weight=0.2)
315
+ G.add_edge(1, 2, weight=-2)
316
+ G.add_edge(1, 3, weight=0.3)
317
+ matching = minimum_weight_full_matching(G)
318
+ assert matching == {0: 3, 1: 2, 2: 1, 3: 0}
319
+
320
+ def test_minimum_weight_full_matching_different_weight_key(self):
321
+ G = nx.complete_bipartite_graph(2, 2)
322
+ G.add_edge(0, 2, mass=2)
323
+ G.add_edge(0, 3, mass=0.2)
324
+ G.add_edge(1, 2, mass=1)
325
+ G.add_edge(1, 3, mass=2)
326
+ matching = minimum_weight_full_matching(G, weight="mass")
327
+ assert matching == {0: 3, 1: 2, 2: 1, 3: 0}
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_matrix.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ np = pytest.importorskip("numpy")
4
+ sp = pytest.importorskip("scipy")
5
+ sparse = pytest.importorskip("scipy.sparse")
6
+
7
+
8
+ import networkx as nx
9
+ from networkx.algorithms import bipartite
10
+ from networkx.utils import edges_equal
11
+
12
+
13
+ class TestBiadjacencyMatrix:
14
+ def test_biadjacency_matrix_weight(self):
15
+ G = nx.path_graph(5)
16
+ G.add_edge(0, 1, weight=2, other=4)
17
+ X = [1, 3]
18
+ Y = [0, 2, 4]
19
+ M = bipartite.biadjacency_matrix(G, X, weight="weight")
20
+ assert M[0, 0] == 2
21
+ M = bipartite.biadjacency_matrix(G, X, weight="other")
22
+ assert M[0, 0] == 4
23
+
24
+ def test_biadjacency_matrix(self):
25
+ tops = [2, 5, 10]
26
+ bots = [5, 10, 15]
27
+ for i in range(len(tops)):
28
+ G = bipartite.random_graph(tops[i], bots[i], 0.2)
29
+ top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
30
+ M = bipartite.biadjacency_matrix(G, top)
31
+ assert M.shape[0] == tops[i]
32
+ assert M.shape[1] == bots[i]
33
+
34
+ def test_biadjacency_matrix_order(self):
35
+ G = nx.path_graph(5)
36
+ G.add_edge(0, 1, weight=2)
37
+ X = [3, 1]
38
+ Y = [4, 2, 0]
39
+ M = bipartite.biadjacency_matrix(G, X, Y, weight="weight")
40
+ assert M[1, 2] == 2
41
+
42
+ def test_biadjacency_matrix_empty_graph(self):
43
+ G = nx.empty_graph(2)
44
+ M = nx.bipartite.biadjacency_matrix(G, [0])
45
+ assert np.array_equal(M.toarray(), np.array([[0]]))
46
+
47
+ def test_null_graph(self):
48
+ with pytest.raises(nx.NetworkXError):
49
+ bipartite.biadjacency_matrix(nx.Graph(), [])
50
+
51
+ def test_empty_graph(self):
52
+ with pytest.raises(nx.NetworkXError):
53
+ bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [])
54
+
55
+ def test_duplicate_row(self):
56
+ with pytest.raises(nx.NetworkXError):
57
+ bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [1, 1])
58
+
59
+ def test_duplicate_col(self):
60
+ with pytest.raises(nx.NetworkXError):
61
+ bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], [1, 1])
62
+
63
+ def test_format_keyword(self):
64
+ with pytest.raises(nx.NetworkXError):
65
+ bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], format="foo")
66
+
67
+ def test_from_biadjacency_roundtrip(self):
68
+ B1 = nx.path_graph(5)
69
+ M = bipartite.biadjacency_matrix(B1, [0, 2, 4])
70
+ B2 = bipartite.from_biadjacency_matrix(M)
71
+ assert nx.is_isomorphic(B1, B2)
72
+
73
+ def test_from_biadjacency_weight(self):
74
+ M = sparse.csc_matrix([[1, 2], [0, 3]])
75
+ B = bipartite.from_biadjacency_matrix(M)
76
+ assert edges_equal(B.edges(), [(0, 2), (0, 3), (1, 3)])
77
+ B = bipartite.from_biadjacency_matrix(M, edge_attribute="weight")
78
+ e = [(0, 2, {"weight": 1}), (0, 3, {"weight": 2}), (1, 3, {"weight": 3})]
79
+ assert edges_equal(B.edges(data=True), e)
80
+
81
+ def test_from_biadjacency_multigraph(self):
82
+ M = sparse.csc_matrix([[1, 2], [0, 3]])
83
+ B = bipartite.from_biadjacency_matrix(M, create_using=nx.MultiGraph())
84
+ assert edges_equal(B.edges(), [(0, 2), (0, 3), (0, 3), (1, 3), (1, 3), (1, 3)])
deepseekvl2/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_project.py ADDED
@@ -0,0 +1,407 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms import bipartite
5
+ from networkx.utils import edges_equal, nodes_equal
6
+
7
+
8
+ class TestBipartiteProject:
9
+ def test_path_projected_graph(self):
10
+ G = nx.path_graph(4)
11
+ P = bipartite.projected_graph(G, [1, 3])
12
+ assert nodes_equal(list(P), [1, 3])
13
+ assert edges_equal(list(P.edges()), [(1, 3)])
14
+ P = bipartite.projected_graph(G, [0, 2])
15
+ assert nodes_equal(list(P), [0, 2])
16
+ assert edges_equal(list(P.edges()), [(0, 2)])
17
+ G = nx.MultiGraph([(0, 1)])
18
+ with pytest.raises(nx.NetworkXError, match="not defined for multigraphs"):
19
+ bipartite.projected_graph(G, [0])
20
+
21
+ def test_path_projected_properties_graph(self):
22
+ G = nx.path_graph(4)
23
+ G.add_node(1, name="one")
24
+ G.add_node(2, name="two")
25
+ P = bipartite.projected_graph(G, [1, 3])
26
+ assert nodes_equal(list(P), [1, 3])
27
+ assert edges_equal(list(P.edges()), [(1, 3)])
28
+ assert P.nodes[1]["name"] == G.nodes[1]["name"]
29
+ P = bipartite.projected_graph(G, [0, 2])
30
+ assert nodes_equal(list(P), [0, 2])
31
+ assert edges_equal(list(P.edges()), [(0, 2)])
32
+ assert P.nodes[2]["name"] == G.nodes[2]["name"]
33
+
34
+ def test_path_collaboration_projected_graph(self):
35
+ G = nx.path_graph(4)
36
+ P = bipartite.collaboration_weighted_projected_graph(G, [1, 3])
37
+ assert nodes_equal(list(P), [1, 3])
38
+ assert edges_equal(list(P.edges()), [(1, 3)])
39
+ P[1][3]["weight"] = 1
40
+ P = bipartite.collaboration_weighted_projected_graph(G, [0, 2])
41
+ assert nodes_equal(list(P), [0, 2])
42
+ assert edges_equal(list(P.edges()), [(0, 2)])
43
+ P[0][2]["weight"] = 1
44
+
45
+ def test_directed_path_collaboration_projected_graph(self):
46
+ G = nx.DiGraph()
47
+ nx.add_path(G, range(4))
48
+ P = bipartite.collaboration_weighted_projected_graph(G, [1, 3])
49
+ assert nodes_equal(list(P), [1, 3])
50
+ assert edges_equal(list(P.edges()), [(1, 3)])
51
+ P[1][3]["weight"] = 1
52
+ P = bipartite.collaboration_weighted_projected_graph(G, [0, 2])
53
+ assert nodes_equal(list(P), [0, 2])
54
+ assert edges_equal(list(P.edges()), [(0, 2)])
55
+ P[0][2]["weight"] = 1
56
+
57
+ def test_path_weighted_projected_graph(self):
58
+ G = nx.path_graph(4)
59
+
60
+ with pytest.raises(nx.NetworkXAlgorithmError):
61
+ bipartite.weighted_projected_graph(G, [1, 2, 3, 3])
62
+
63
+ P = bipartite.weighted_projected_graph(G, [1, 3])
64
+ assert nodes_equal(list(P), [1, 3])
65
+ assert edges_equal(list(P.edges()), [(1, 3)])
66
+ P[1][3]["weight"] = 1
67
+ P = bipartite.weighted_projected_graph(G, [0, 2])
68
+ assert nodes_equal(list(P), [0, 2])
69
+ assert edges_equal(list(P.edges()), [(0, 2)])
70
+ P[0][2]["weight"] = 1
71
+
72
+ def test_digraph_weighted_projection(self):
73
+ G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4)])
74
+ P = bipartite.overlap_weighted_projected_graph(G, [1, 3])
75
+ assert nx.get_edge_attributes(P, "weight") == {(1, 3): 1.0}
76
+ assert len(P) == 2
77
+
78
+ def test_path_weighted_projected_directed_graph(self):
79
+ G = nx.DiGraph()
80
+ nx.add_path(G, range(4))
81
+ P = bipartite.weighted_projected_graph(G, [1, 3])
82
+ assert nodes_equal(list(P), [1, 3])
83
+ assert edges_equal(list(P.edges()), [(1, 3)])
84
+ P[1][3]["weight"] = 1
85
+ P = bipartite.weighted_projected_graph(G, [0, 2])
86
+ assert nodes_equal(list(P), [0, 2])
87
+ assert edges_equal(list(P.edges()), [(0, 2)])
88
+ P[0][2]["weight"] = 1
89
+
90
+ def test_star_projected_graph(self):
91
+ G = nx.star_graph(3)
92
+ P = bipartite.projected_graph(G, [1, 2, 3])
93
+ assert nodes_equal(list(P), [1, 2, 3])
94
+ assert edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)])
95
+ P = bipartite.weighted_projected_graph(G, [1, 2, 3])
96
+ assert nodes_equal(list(P), [1, 2, 3])
97
+ assert edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)])
98
+
99
+ P = bipartite.projected_graph(G, [0])
100
+ assert nodes_equal(list(P), [0])
101
+ assert edges_equal(list(P.edges()), [])
102
+
103
+ def test_project_multigraph(self):
104
+ G = nx.Graph()
105
+ G.add_edge("a", 1)
106
+ G.add_edge("b", 1)
107
+ G.add_edge("a", 2)
108
+ G.add_edge("b", 2)
109
+ P = bipartite.projected_graph(G, "ab")
110
+ assert edges_equal(list(P.edges()), [("a", "b")])
111
+ P = bipartite.weighted_projected_graph(G, "ab")
112
+ assert edges_equal(list(P.edges()), [("a", "b")])
113
+ P = bipartite.projected_graph(G, "ab", multigraph=True)
114
+ assert edges_equal(list(P.edges()), [("a", "b"), ("a", "b")])
115
+
116
+ def test_project_collaboration(self):
117
+ G = nx.Graph()
118
+ G.add_edge("a", 1)
119
+ G.add_edge("b", 1)
120
+ G.add_edge("b", 2)
121
+ G.add_edge("c", 2)
122
+ G.add_edge("c", 3)
123
+ G.add_edge("c", 4)
124
+ G.add_edge("b", 4)
125
+ P = bipartite.collaboration_weighted_projected_graph(G, "abc")
126
+ assert P["a"]["b"]["weight"] == 1
127
+ assert P["b"]["c"]["weight"] == 2
128
+
129
+ def test_directed_projection(self):
130
+ G = nx.DiGraph()
131
+ G.add_edge("A", 1)
132
+ G.add_edge(1, "B")
133
+ G.add_edge("A", 2)
134
+ G.add_edge("B", 2)
135
+ P = bipartite.projected_graph(G, "AB")
136
+ assert edges_equal(list(P.edges()), [("A", "B")])
137
+ P = bipartite.weighted_projected_graph(G, "AB")
138
+ assert edges_equal(list(P.edges()), [("A", "B")])
139
+ assert P["A"]["B"]["weight"] == 1
140
+
141
+ P = bipartite.projected_graph(G, "AB", multigraph=True)
142
+ assert edges_equal(list(P.edges()), [("A", "B")])
143
+
144
+ G = nx.DiGraph()
145
+ G.add_edge("A", 1)
146
+ G.add_edge(1, "B")
147
+ G.add_edge("A", 2)
148
+ G.add_edge(2, "B")
149
+ P = bipartite.projected_graph(G, "AB")
150
+ assert edges_equal(list(P.edges()), [("A", "B")])
151
+ P = bipartite.weighted_projected_graph(G, "AB")
152
+ assert edges_equal(list(P.edges()), [("A", "B")])
153
+ assert P["A"]["B"]["weight"] == 2
154
+
155
+ P = bipartite.projected_graph(G, "AB", multigraph=True)
156
+ assert edges_equal(list(P.edges()), [("A", "B"), ("A", "B")])
157
+
158
+
159
+ class TestBipartiteWeightedProjection:
160
+ @classmethod
161
+ def setup_class(cls):
162
+ # Tore Opsahl's example
163
+ # http://toreopsahl.com/2009/05/01/projecting-two-mode-networks-onto-weighted-one-mode-networks/
164
+ cls.G = nx.Graph()
165
+ cls.G.add_edge("A", 1)
166
+ cls.G.add_edge("A", 2)
167
+ cls.G.add_edge("B", 1)
168
+ cls.G.add_edge("B", 2)
169
+ cls.G.add_edge("B", 3)
170
+ cls.G.add_edge("B", 4)
171
+ cls.G.add_edge("B", 5)
172
+ cls.G.add_edge("C", 1)
173
+ cls.G.add_edge("D", 3)
174
+ cls.G.add_edge("E", 4)
175
+ cls.G.add_edge("E", 5)
176
+ cls.G.add_edge("E", 6)
177
+ cls.G.add_edge("F", 6)
178
+ # Graph based on figure 6 from Newman (2001)
179
+ cls.N = nx.Graph()
180
+ cls.N.add_edge("A", 1)
181
+ cls.N.add_edge("A", 2)
182
+ cls.N.add_edge("A", 3)
183
+ cls.N.add_edge("B", 1)
184
+ cls.N.add_edge("B", 2)
185
+ cls.N.add_edge("B", 3)
186
+ cls.N.add_edge("C", 1)
187
+ cls.N.add_edge("D", 1)
188
+ cls.N.add_edge("E", 3)
189
+
190
+ def test_project_weighted_shared(self):
191
+ edges = [
192
+ ("A", "B", 2),
193
+ ("A", "C", 1),
194
+ ("B", "C", 1),
195
+ ("B", "D", 1),
196
+ ("B", "E", 2),
197
+ ("E", "F", 1),
198
+ ]
199
+ Panswer = nx.Graph()
200
+ Panswer.add_weighted_edges_from(edges)
201
+ P = bipartite.weighted_projected_graph(self.G, "ABCDEF")
202
+ assert edges_equal(list(P.edges()), Panswer.edges())
203
+ for u, v in list(P.edges()):
204
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
205
+
206
+ edges = [
207
+ ("A", "B", 3),
208
+ ("A", "E", 1),
209
+ ("A", "C", 1),
210
+ ("A", "D", 1),
211
+ ("B", "E", 1),
212
+ ("B", "C", 1),
213
+ ("B", "D", 1),
214
+ ("C", "D", 1),
215
+ ]
216
+ Panswer = nx.Graph()
217
+ Panswer.add_weighted_edges_from(edges)
218
+ P = bipartite.weighted_projected_graph(self.N, "ABCDE")
219
+ assert edges_equal(list(P.edges()), Panswer.edges())
220
+ for u, v in list(P.edges()):
221
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
222
+
223
+ def test_project_weighted_newman(self):
224
+ edges = [
225
+ ("A", "B", 1.5),
226
+ ("A", "C", 0.5),
227
+ ("B", "C", 0.5),
228
+ ("B", "D", 1),
229
+ ("B", "E", 2),
230
+ ("E", "F", 1),
231
+ ]
232
+ Panswer = nx.Graph()
233
+ Panswer.add_weighted_edges_from(edges)
234
+ P = bipartite.collaboration_weighted_projected_graph(self.G, "ABCDEF")
235
+ assert edges_equal(list(P.edges()), Panswer.edges())
236
+ for u, v in list(P.edges()):
237
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
238
+
239
+ edges = [
240
+ ("A", "B", 11 / 6.0),
241
+ ("A", "E", 1 / 2.0),
242
+ ("A", "C", 1 / 3.0),
243
+ ("A", "D", 1 / 3.0),
244
+ ("B", "E", 1 / 2.0),
245
+ ("B", "C", 1 / 3.0),
246
+ ("B", "D", 1 / 3.0),
247
+ ("C", "D", 1 / 3.0),
248
+ ]
249
+ Panswer = nx.Graph()
250
+ Panswer.add_weighted_edges_from(edges)
251
+ P = bipartite.collaboration_weighted_projected_graph(self.N, "ABCDE")
252
+ assert edges_equal(list(P.edges()), Panswer.edges())
253
+ for u, v in list(P.edges()):
254
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
255
+
256
+ def test_project_weighted_ratio(self):
257
+ edges = [
258
+ ("A", "B", 2 / 6.0),
259
+ ("A", "C", 1 / 6.0),
260
+ ("B", "C", 1 / 6.0),
261
+ ("B", "D", 1 / 6.0),
262
+ ("B", "E", 2 / 6.0),
263
+ ("E", "F", 1 / 6.0),
264
+ ]
265
+ Panswer = nx.Graph()
266
+ Panswer.add_weighted_edges_from(edges)
267
+ P = bipartite.weighted_projected_graph(self.G, "ABCDEF", ratio=True)
268
+ assert edges_equal(list(P.edges()), Panswer.edges())
269
+ for u, v in list(P.edges()):
270
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
271
+
272
+ edges = [
273
+ ("A", "B", 3 / 3.0),
274
+ ("A", "E", 1 / 3.0),
275
+ ("A", "C", 1 / 3.0),
276
+ ("A", "D", 1 / 3.0),
277
+ ("B", "E", 1 / 3.0),
278
+ ("B", "C", 1 / 3.0),
279
+ ("B", "D", 1 / 3.0),
280
+ ("C", "D", 1 / 3.0),
281
+ ]
282
+ Panswer = nx.Graph()
283
+ Panswer.add_weighted_edges_from(edges)
284
+ P = bipartite.weighted_projected_graph(self.N, "ABCDE", ratio=True)
285
+ assert edges_equal(list(P.edges()), Panswer.edges())
286
+ for u, v in list(P.edges()):
287
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
288
+
289
+ def test_project_weighted_overlap(self):
290
+ edges = [
291
+ ("A", "B", 2 / 2.0),
292
+ ("A", "C", 1 / 1.0),
293
+ ("B", "C", 1 / 1.0),
294
+ ("B", "D", 1 / 1.0),
295
+ ("B", "E", 2 / 3.0),
296
+ ("E", "F", 1 / 1.0),
297
+ ]
298
+ Panswer = nx.Graph()
299
+ Panswer.add_weighted_edges_from(edges)
300
+ P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF", jaccard=False)
301
+ assert edges_equal(list(P.edges()), Panswer.edges())
302
+ for u, v in list(P.edges()):
303
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
304
+
305
+ edges = [
306
+ ("A", "B", 3 / 3.0),
307
+ ("A", "E", 1 / 1.0),
308
+ ("A", "C", 1 / 1.0),
309
+ ("A", "D", 1 / 1.0),
310
+ ("B", "E", 1 / 1.0),
311
+ ("B", "C", 1 / 1.0),
312
+ ("B", "D", 1 / 1.0),
313
+ ("C", "D", 1 / 1.0),
314
+ ]
315
+ Panswer = nx.Graph()
316
+ Panswer.add_weighted_edges_from(edges)
317
+ P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE", jaccard=False)
318
+ assert edges_equal(list(P.edges()), Panswer.edges())
319
+ for u, v in list(P.edges()):
320
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
321
+
322
+ def test_project_weighted_jaccard(self):
323
+ edges = [
324
+ ("A", "B", 2 / 5.0),
325
+ ("A", "C", 1 / 2.0),
326
+ ("B", "C", 1 / 5.0),
327
+ ("B", "D", 1 / 5.0),
328
+ ("B", "E", 2 / 6.0),
329
+ ("E", "F", 1 / 3.0),
330
+ ]
331
+ Panswer = nx.Graph()
332
+ Panswer.add_weighted_edges_from(edges)
333
+ P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF")
334
+ assert edges_equal(list(P.edges()), Panswer.edges())
335
+ for u, v in list(P.edges()):
336
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
337
+
338
+ edges = [
339
+ ("A", "B", 3 / 3.0),
340
+ ("A", "E", 1 / 3.0),
341
+ ("A", "C", 1 / 3.0),
342
+ ("A", "D", 1 / 3.0),
343
+ ("B", "E", 1 / 3.0),
344
+ ("B", "C", 1 / 3.0),
345
+ ("B", "D", 1 / 3.0),
346
+ ("C", "D", 1 / 1.0),
347
+ ]
348
+ Panswer = nx.Graph()
349
+ Panswer.add_weighted_edges_from(edges)
350
+ P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE")
351
+ assert edges_equal(list(P.edges()), Panswer.edges())
352
+ for u, v in P.edges():
353
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
354
+
355
+ def test_generic_weighted_projected_graph_simple(self):
356
+ def shared(G, u, v):
357
+ return len(set(G[u]) & set(G[v]))
358
+
359
+ B = nx.path_graph(5)
360
+ G = bipartite.generic_weighted_projected_graph(
361
+ B, [0, 2, 4], weight_function=shared
362
+ )
363
+ assert nodes_equal(list(G), [0, 2, 4])
364
+ assert edges_equal(
365
+ list(G.edges(data=True)),
366
+ [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})],
367
+ )
368
+
369
+ G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4])
370
+ assert nodes_equal(list(G), [0, 2, 4])
371
+ assert edges_equal(
372
+ list(G.edges(data=True)),
373
+ [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})],
374
+ )
375
+ B = nx.DiGraph()
376
+ nx.add_path(B, range(5))
377
+ G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4])
378
+ assert nodes_equal(list(G), [0, 2, 4])
379
+ assert edges_equal(
380
+ list(G.edges(data=True)), [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})]
381
+ )
382
+
383
+ def test_generic_weighted_projected_graph_custom(self):
384
+ def jaccard(G, u, v):
385
+ unbrs = set(G[u])
386
+ vnbrs = set(G[v])
387
+ return len(unbrs & vnbrs) / len(unbrs | vnbrs)
388
+
389
+ def my_weight(G, u, v, weight="weight"):
390
+ w = 0
391
+ for nbr in set(G[u]) & set(G[v]):
392
+ w += G.edges[u, nbr].get(weight, 1) + G.edges[v, nbr].get(weight, 1)
393
+ return w
394
+
395
+ B = nx.bipartite.complete_bipartite_graph(2, 2)
396
+ for i, (u, v) in enumerate(B.edges()):
397
+ B.edges[u, v]["weight"] = i + 1
398
+ G = bipartite.generic_weighted_projected_graph(
399
+ B, [0, 1], weight_function=jaccard
400
+ )
401
+ assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 1.0})])
402
+ G = bipartite.generic_weighted_projected_graph(
403
+ B, [0, 1], weight_function=my_weight
404
+ )
405
+ assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 10})])
406
+ G = bipartite.generic_weighted_projected_graph(B, [0, 1])
407
+ assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 2})])