ZTWHHH commited on
Commit
4b76759
·
verified ·
1 Parent(s): 4588967

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. wemm/lib/python3.10/site-packages/botocore/data/cloudwatch/2010-08-01/paginators-1.json +47 -0
  2. wemm/lib/python3.10/site-packages/botocore/data/cloudwatch/2010-08-01/service-2.json +0 -0
  3. wemm/lib/python3.10/site-packages/botocore/data/dataexchange/2017-07-25/paginators-1.json +34 -0
  4. wemm/lib/python3.10/site-packages/botocore/data/dataexchange/2017-07-25/service-2.json +0 -0
  5. wemm/lib/python3.10/site-packages/botocore/data/guardduty/2017-11-28/service-2.json +0 -0
  6. wemm/lib/python3.10/site-packages/botocore/data/iot1click-projects/2018-05-14/endpoint-rule-set-1.json.gz +3 -0
  7. wemm/lib/python3.10/site-packages/botocore/data/voice-id/2021-09-27/service-2.json +0 -0
  8. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-310.pyc +0 -0
  9. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-310.pyc +0 -0
  10. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-310.pyc +0 -0
  11. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-310.pyc +0 -0
  12. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-310.pyc +0 -0
  13. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/extendability.cpython-310.pyc +0 -0
  14. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/matching.cpython-310.pyc +0 -0
  15. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-310.pyc +0 -0
  16. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-310.pyc +0 -0
  17. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/redundancy.cpython-310.pyc +0 -0
  18. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-310.pyc +0 -0
  19. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/basic.py +322 -0
  20. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/cluster.py +278 -0
  21. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/extendability.py +105 -0
  22. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/generators.py +604 -0
  23. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__init__.py +0 -0
  24. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_centrality.cpython-310.pyc +0 -0
  25. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_covering.cpython-310.pyc +0 -0
  26. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_edgelist.cpython-310.pyc +0 -0
  27. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_extendability.cpython-310.pyc +0 -0
  28. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_generators.cpython-310.pyc +0 -0
  29. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matching.cpython-310.pyc +0 -0
  30. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matrix.cpython-310.pyc +0 -0
  31. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_redundancy.cpython-310.pyc +0 -0
  32. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_spectral_bipartivity.cpython-310.pyc +0 -0
  33. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_centrality.py +192 -0
  34. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_cluster.py +84 -0
  35. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_generators.py +409 -0
  36. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_project.py +407 -0
  37. wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_redundancy.py +35 -0
  38. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/__init__.py +20 -0
  39. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-310.pyc +0 -0
  40. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_closeness.cpython-310.pyc +0 -0
  41. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-310.pyc +0 -0
  42. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/betweenness.py +436 -0
  43. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/betweenness_subset.py +275 -0
  44. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/closeness.py +282 -0
  45. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/current_flow_betweenness_subset.py +227 -0
  46. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/current_flow_closeness.py +96 -0
  47. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/degree_alg.py +150 -0
  48. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/eigenvector.py +357 -0
  49. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/harmonic.py +89 -0
  50. wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/katz.py +331 -0
wemm/lib/python3.10/site-packages/botocore/data/cloudwatch/2010-08-01/paginators-1.json ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "pagination": {
3
+ "DescribeAlarmHistory": {
4
+ "input_token": "NextToken",
5
+ "output_token": "NextToken",
6
+ "limit_key": "MaxRecords",
7
+ "result_key": "AlarmHistoryItems"
8
+ },
9
+ "DescribeAlarms": {
10
+ "input_token": "NextToken",
11
+ "output_token": "NextToken",
12
+ "limit_key": "MaxRecords",
13
+ "result_key": [
14
+ "MetricAlarms",
15
+ "CompositeAlarms"
16
+ ]
17
+ },
18
+ "ListDashboards": {
19
+ "input_token": "NextToken",
20
+ "output_token": "NextToken",
21
+ "result_key": "DashboardEntries"
22
+ },
23
+ "ListMetrics": {
24
+ "input_token": "NextToken",
25
+ "output_token": "NextToken",
26
+ "result_key": [
27
+ "Metrics",
28
+ "OwningAccounts"
29
+ ]
30
+ },
31
+ "GetMetricData": {
32
+ "input_token": "NextToken",
33
+ "limit_key": "MaxDatapoints",
34
+ "output_token": "NextToken",
35
+ "result_key": [
36
+ "MetricDataResults",
37
+ "Messages"
38
+ ]
39
+ },
40
+ "DescribeAnomalyDetectors": {
41
+ "input_token": "NextToken",
42
+ "limit_key": "MaxResults",
43
+ "output_token": "NextToken",
44
+ "result_key": "AnomalyDetectors"
45
+ }
46
+ }
47
+ }
wemm/lib/python3.10/site-packages/botocore/data/cloudwatch/2010-08-01/service-2.json ADDED
The diff for this file is too large to render. See raw diff
 
wemm/lib/python3.10/site-packages/botocore/data/dataexchange/2017-07-25/paginators-1.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "pagination": {
3
+ "ListDataSetRevisions": {
4
+ "input_token": "NextToken",
5
+ "output_token": "NextToken",
6
+ "limit_key": "MaxResults",
7
+ "result_key": "Revisions"
8
+ },
9
+ "ListDataSets": {
10
+ "input_token": "NextToken",
11
+ "output_token": "NextToken",
12
+ "limit_key": "MaxResults",
13
+ "result_key": "DataSets"
14
+ },
15
+ "ListJobs": {
16
+ "input_token": "NextToken",
17
+ "output_token": "NextToken",
18
+ "limit_key": "MaxResults",
19
+ "result_key": "Jobs"
20
+ },
21
+ "ListRevisionAssets": {
22
+ "input_token": "NextToken",
23
+ "output_token": "NextToken",
24
+ "limit_key": "MaxResults",
25
+ "result_key": "Assets"
26
+ },
27
+ "ListEventActions": {
28
+ "input_token": "NextToken",
29
+ "output_token": "NextToken",
30
+ "limit_key": "MaxResults",
31
+ "result_key": "EventActions"
32
+ }
33
+ }
34
+ }
wemm/lib/python3.10/site-packages/botocore/data/dataexchange/2017-07-25/service-2.json ADDED
The diff for this file is too large to render. See raw diff
 
wemm/lib/python3.10/site-packages/botocore/data/guardduty/2017-11-28/service-2.json ADDED
The diff for this file is too large to render. See raw diff
 
wemm/lib/python3.10/site-packages/botocore/data/iot1click-projects/2018-05-14/endpoint-rule-set-1.json.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad9329e1001844795dcced93fdd36961b09643c69bdac478e81a0a6f6ac21632
3
+ size 1154
wemm/lib/python3.10/site-packages/botocore/data/voice-id/2021-09-27/service-2.json ADDED
The diff for this file is too large to render. See raw diff
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-310.pyc ADDED
Binary file (8.46 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-310.pyc ADDED
Binary file (9.13 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-310.pyc ADDED
Binary file (7.49 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-310.pyc ADDED
Binary file (2.26 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-310.pyc ADDED
Binary file (10.8 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/extendability.cpython-310.pyc ADDED
Binary file (4.05 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/matching.cpython-310.pyc ADDED
Binary file (16.2 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-310.pyc ADDED
Binary file (6.03 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-310.pyc ADDED
Binary file (18 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/redundancy.cpython-310.pyc ADDED
Binary file (4.03 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-310.pyc ADDED
Binary file (1.92 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/basic.py ADDED
@@ -0,0 +1,322 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ==========================
3
+ Bipartite Graph Algorithms
4
+ ==========================
5
+ """
6
+
7
+ import networkx as nx
8
+ from networkx.algorithms.components import connected_components
9
+ from networkx.exception import AmbiguousSolution
10
+
11
+ __all__ = [
12
+ "is_bipartite",
13
+ "is_bipartite_node_set",
14
+ "color",
15
+ "sets",
16
+ "density",
17
+ "degrees",
18
+ ]
19
+
20
+
21
+ @nx._dispatchable
22
+ def color(G):
23
+ """Returns a two-coloring of the graph.
24
+
25
+ Raises an exception if the graph is not bipartite.
26
+
27
+ Parameters
28
+ ----------
29
+ G : NetworkX graph
30
+
31
+ Returns
32
+ -------
33
+ color : dictionary
34
+ A dictionary keyed by node with a 1 or 0 as data for each node color.
35
+
36
+ Raises
37
+ ------
38
+ NetworkXError
39
+ If the graph is not two-colorable.
40
+
41
+ Examples
42
+ --------
43
+ >>> from networkx.algorithms import bipartite
44
+ >>> G = nx.path_graph(4)
45
+ >>> c = bipartite.color(G)
46
+ >>> print(c)
47
+ {0: 1, 1: 0, 2: 1, 3: 0}
48
+
49
+ You can use this to set a node attribute indicating the bipartite set:
50
+
51
+ >>> nx.set_node_attributes(G, c, "bipartite")
52
+ >>> print(G.nodes[0]["bipartite"])
53
+ 1
54
+ >>> print(G.nodes[1]["bipartite"])
55
+ 0
56
+ """
57
+ if G.is_directed():
58
+ import itertools
59
+
60
+ def neighbors(v):
61
+ return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)])
62
+
63
+ else:
64
+ neighbors = G.neighbors
65
+
66
+ color = {}
67
+ for n in G: # handle disconnected graphs
68
+ if n in color or len(G[n]) == 0: # skip isolates
69
+ continue
70
+ queue = [n]
71
+ color[n] = 1 # nodes seen with color (1 or 0)
72
+ while queue:
73
+ v = queue.pop()
74
+ c = 1 - color[v] # opposite color of node v
75
+ for w in neighbors(v):
76
+ if w in color:
77
+ if color[w] == color[v]:
78
+ raise nx.NetworkXError("Graph is not bipartite.")
79
+ else:
80
+ color[w] = c
81
+ queue.append(w)
82
+ # color isolates with 0
83
+ color.update(dict.fromkeys(nx.isolates(G), 0))
84
+ return color
85
+
86
+
87
+ @nx._dispatchable
88
+ def is_bipartite(G):
89
+ """Returns True if graph G is bipartite, False if not.
90
+
91
+ Parameters
92
+ ----------
93
+ G : NetworkX graph
94
+
95
+ Examples
96
+ --------
97
+ >>> from networkx.algorithms import bipartite
98
+ >>> G = nx.path_graph(4)
99
+ >>> print(bipartite.is_bipartite(G))
100
+ True
101
+
102
+ See Also
103
+ --------
104
+ color, is_bipartite_node_set
105
+ """
106
+ try:
107
+ color(G)
108
+ return True
109
+ except nx.NetworkXError:
110
+ return False
111
+
112
+
113
+ @nx._dispatchable
114
+ def is_bipartite_node_set(G, nodes):
115
+ """Returns True if nodes and G/nodes are a bipartition of G.
116
+
117
+ Parameters
118
+ ----------
119
+ G : NetworkX graph
120
+
121
+ nodes: list or container
122
+ Check if nodes are a one of a bipartite set.
123
+
124
+ Examples
125
+ --------
126
+ >>> from networkx.algorithms import bipartite
127
+ >>> G = nx.path_graph(4)
128
+ >>> X = set([1, 3])
129
+ >>> bipartite.is_bipartite_node_set(G, X)
130
+ True
131
+
132
+ Notes
133
+ -----
134
+ An exception is raised if the input nodes are not distinct, because in this
135
+ case some bipartite algorithms will yield incorrect results.
136
+ For connected graphs the bipartite sets are unique. This function handles
137
+ disconnected graphs.
138
+ """
139
+ S = set(nodes)
140
+
141
+ if len(S) < len(nodes):
142
+ # this should maybe just return False?
143
+ raise AmbiguousSolution(
144
+ "The input node set contains duplicates.\n"
145
+ "This may lead to incorrect results when using it in bipartite algorithms.\n"
146
+ "Consider using set(nodes) as the input"
147
+ )
148
+
149
+ for CC in (G.subgraph(c).copy() for c in connected_components(G)):
150
+ X, Y = sets(CC)
151
+ if not (
152
+ (X.issubset(S) and Y.isdisjoint(S)) or (Y.issubset(S) and X.isdisjoint(S))
153
+ ):
154
+ return False
155
+ return True
156
+
157
+
158
+ @nx._dispatchable
159
+ def sets(G, top_nodes=None):
160
+ """Returns bipartite node sets of graph G.
161
+
162
+ Raises an exception if the graph is not bipartite or if the input
163
+ graph is disconnected and thus more than one valid solution exists.
164
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
165
+ for further details on how bipartite graphs are handled in NetworkX.
166
+
167
+ Parameters
168
+ ----------
169
+ G : NetworkX graph
170
+
171
+ top_nodes : container, optional
172
+ Container with all nodes in one bipartite node set. If not supplied
173
+ it will be computed. But if more than one solution exists an exception
174
+ will be raised.
175
+
176
+ Returns
177
+ -------
178
+ X : set
179
+ Nodes from one side of the bipartite graph.
180
+ Y : set
181
+ Nodes from the other side.
182
+
183
+ Raises
184
+ ------
185
+ AmbiguousSolution
186
+ Raised if the input bipartite graph is disconnected and no container
187
+ with all nodes in one bipartite set is provided. When determining
188
+ the nodes in each bipartite set more than one valid solution is
189
+ possible if the input graph is disconnected.
190
+ NetworkXError
191
+ Raised if the input graph is not bipartite.
192
+
193
+ Examples
194
+ --------
195
+ >>> from networkx.algorithms import bipartite
196
+ >>> G = nx.path_graph(4)
197
+ >>> X, Y = bipartite.sets(G)
198
+ >>> list(X)
199
+ [0, 2]
200
+ >>> list(Y)
201
+ [1, 3]
202
+
203
+ See Also
204
+ --------
205
+ color
206
+
207
+ """
208
+ if G.is_directed():
209
+ is_connected = nx.is_weakly_connected
210
+ else:
211
+ is_connected = nx.is_connected
212
+ if top_nodes is not None:
213
+ X = set(top_nodes)
214
+ Y = set(G) - X
215
+ else:
216
+ if not is_connected(G):
217
+ msg = "Disconnected graph: Ambiguous solution for bipartite sets."
218
+ raise nx.AmbiguousSolution(msg)
219
+ c = color(G)
220
+ X = {n for n, is_top in c.items() if is_top}
221
+ Y = {n for n, is_top in c.items() if not is_top}
222
+ return (X, Y)
223
+
224
+
225
+ @nx._dispatchable(graphs="B")
226
+ def density(B, nodes):
227
+ """Returns density of bipartite graph B.
228
+
229
+ Parameters
230
+ ----------
231
+ B : NetworkX graph
232
+
233
+ nodes: list or container
234
+ Nodes in one node set of the bipartite graph.
235
+
236
+ Returns
237
+ -------
238
+ d : float
239
+ The bipartite density
240
+
241
+ Examples
242
+ --------
243
+ >>> from networkx.algorithms import bipartite
244
+ >>> G = nx.complete_bipartite_graph(3, 2)
245
+ >>> X = set([0, 1, 2])
246
+ >>> bipartite.density(G, X)
247
+ 1.0
248
+ >>> Y = set([3, 4])
249
+ >>> bipartite.density(G, Y)
250
+ 1.0
251
+
252
+ Notes
253
+ -----
254
+ The container of nodes passed as argument must contain all nodes
255
+ in one of the two bipartite node sets to avoid ambiguity in the
256
+ case of disconnected graphs.
257
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
258
+ for further details on how bipartite graphs are handled in NetworkX.
259
+
260
+ See Also
261
+ --------
262
+ color
263
+ """
264
+ n = len(B)
265
+ m = nx.number_of_edges(B)
266
+ nb = len(nodes)
267
+ nt = n - nb
268
+ if m == 0: # includes cases n==0 and n==1
269
+ d = 0.0
270
+ else:
271
+ if B.is_directed():
272
+ d = m / (2 * nb * nt)
273
+ else:
274
+ d = m / (nb * nt)
275
+ return d
276
+
277
+
278
+ @nx._dispatchable(graphs="B", edge_attrs="weight")
279
+ def degrees(B, nodes, weight=None):
280
+ """Returns the degrees of the two node sets in the bipartite graph B.
281
+
282
+ Parameters
283
+ ----------
284
+ B : NetworkX graph
285
+
286
+ nodes: list or container
287
+ Nodes in one node set of the bipartite graph.
288
+
289
+ weight : string or None, optional (default=None)
290
+ The edge attribute that holds the numerical value used as a weight.
291
+ If None, then each edge has weight 1.
292
+ The degree is the sum of the edge weights adjacent to the node.
293
+
294
+ Returns
295
+ -------
296
+ (degX,degY) : tuple of dictionaries
297
+ The degrees of the two bipartite sets as dictionaries keyed by node.
298
+
299
+ Examples
300
+ --------
301
+ >>> from networkx.algorithms import bipartite
302
+ >>> G = nx.complete_bipartite_graph(3, 2)
303
+ >>> Y = set([3, 4])
304
+ >>> degX, degY = bipartite.degrees(G, Y)
305
+ >>> dict(degX)
306
+ {0: 2, 1: 2, 2: 2}
307
+
308
+ Notes
309
+ -----
310
+ The container of nodes passed as argument must contain all nodes
311
+ in one of the two bipartite node sets to avoid ambiguity in the
312
+ case of disconnected graphs.
313
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
314
+ for further details on how bipartite graphs are handled in NetworkX.
315
+
316
+ See Also
317
+ --------
318
+ color, density
319
+ """
320
+ bottom = set(nodes)
321
+ top = set(B) - bottom
322
+ return (B.degree(top, weight), B.degree(bottom, weight))
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/cluster.py ADDED
@@ -0,0 +1,278 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing clustering of pairs"""
2
+
3
+ import itertools
4
+
5
+ import networkx as nx
6
+
7
+ __all__ = [
8
+ "clustering",
9
+ "average_clustering",
10
+ "latapy_clustering",
11
+ "robins_alexander_clustering",
12
+ ]
13
+
14
+
15
+ def cc_dot(nu, nv):
16
+ return len(nu & nv) / len(nu | nv)
17
+
18
+
19
+ def cc_max(nu, nv):
20
+ return len(nu & nv) / max(len(nu), len(nv))
21
+
22
+
23
+ def cc_min(nu, nv):
24
+ return len(nu & nv) / min(len(nu), len(nv))
25
+
26
+
27
+ modes = {"dot": cc_dot, "min": cc_min, "max": cc_max}
28
+
29
+
30
+ @nx._dispatchable
31
+ def latapy_clustering(G, nodes=None, mode="dot"):
32
+ r"""Compute a bipartite clustering coefficient for nodes.
33
+
34
+ The bipartite clustering coefficient is a measure of local density
35
+ of connections defined as [1]_:
36
+
37
+ .. math::
38
+
39
+ c_u = \frac{\sum_{v \in N(N(u))} c_{uv} }{|N(N(u))|}
40
+
41
+ where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`,
42
+ and `c_{uv}` is the pairwise clustering coefficient between nodes
43
+ `u` and `v`.
44
+
45
+ The mode selects the function for `c_{uv}` which can be:
46
+
47
+ `dot`:
48
+
49
+ .. math::
50
+
51
+ c_{uv}=\frac{|N(u)\cap N(v)|}{|N(u) \cup N(v)|}
52
+
53
+ `min`:
54
+
55
+ .. math::
56
+
57
+ c_{uv}=\frac{|N(u)\cap N(v)|}{min(|N(u)|,|N(v)|)}
58
+
59
+ `max`:
60
+
61
+ .. math::
62
+
63
+ c_{uv}=\frac{|N(u)\cap N(v)|}{max(|N(u)|,|N(v)|)}
64
+
65
+
66
+ Parameters
67
+ ----------
68
+ G : graph
69
+ A bipartite graph
70
+
71
+ nodes : list or iterable (optional)
72
+ Compute bipartite clustering for these nodes. The default
73
+ is all nodes in G.
74
+
75
+ mode : string
76
+ The pairwise bipartite clustering method to be used in the computation.
77
+ It must be "dot", "max", or "min".
78
+
79
+ Returns
80
+ -------
81
+ clustering : dictionary
82
+ A dictionary keyed by node with the clustering coefficient value.
83
+
84
+
85
+ Examples
86
+ --------
87
+ >>> from networkx.algorithms import bipartite
88
+ >>> G = nx.path_graph(4) # path graphs are bipartite
89
+ >>> c = bipartite.clustering(G)
90
+ >>> c[0]
91
+ 0.5
92
+ >>> c = bipartite.clustering(G, mode="min")
93
+ >>> c[0]
94
+ 1.0
95
+
96
+ See Also
97
+ --------
98
+ robins_alexander_clustering
99
+ average_clustering
100
+ networkx.algorithms.cluster.square_clustering
101
+
102
+ References
103
+ ----------
104
+ .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
105
+ Basic notions for the analysis of large two-mode networks.
106
+ Social Networks 30(1), 31--48.
107
+ """
108
+ if not nx.algorithms.bipartite.is_bipartite(G):
109
+ raise nx.NetworkXError("Graph is not bipartite")
110
+
111
+ try:
112
+ cc_func = modes[mode]
113
+ except KeyError as err:
114
+ raise nx.NetworkXError(
115
+ "Mode for bipartite clustering must be: dot, min or max"
116
+ ) from err
117
+
118
+ if nodes is None:
119
+ nodes = G
120
+ ccs = {}
121
+ for v in nodes:
122
+ cc = 0.0
123
+ nbrs2 = {u for nbr in G[v] for u in G[nbr]} - {v}
124
+ for u in nbrs2:
125
+ cc += cc_func(set(G[u]), set(G[v]))
126
+ if cc > 0.0: # len(nbrs2)>0
127
+ cc /= len(nbrs2)
128
+ ccs[v] = cc
129
+ return ccs
130
+
131
+
132
+ clustering = latapy_clustering
133
+
134
+
135
+ @nx._dispatchable(name="bipartite_average_clustering")
136
+ def average_clustering(G, nodes=None, mode="dot"):
137
+ r"""Compute the average bipartite clustering coefficient.
138
+
139
+ A clustering coefficient for the whole graph is the average,
140
+
141
+ .. math::
142
+
143
+ C = \frac{1}{n}\sum_{v \in G} c_v,
144
+
145
+ where `n` is the number of nodes in `G`.
146
+
147
+ Similar measures for the two bipartite sets can be defined [1]_
148
+
149
+ .. math::
150
+
151
+ C_X = \frac{1}{|X|}\sum_{v \in X} c_v,
152
+
153
+ where `X` is a bipartite set of `G`.
154
+
155
+ Parameters
156
+ ----------
157
+ G : graph
158
+ a bipartite graph
159
+
160
+ nodes : list or iterable, optional
161
+ A container of nodes to use in computing the average.
162
+ The nodes should be either the entire graph (the default) or one of the
163
+ bipartite sets.
164
+
165
+ mode : string
166
+ The pairwise bipartite clustering method.
167
+ It must be "dot", "max", or "min"
168
+
169
+ Returns
170
+ -------
171
+ clustering : float
172
+ The average bipartite clustering for the given set of nodes or the
173
+ entire graph if no nodes are specified.
174
+
175
+ Examples
176
+ --------
177
+ >>> from networkx.algorithms import bipartite
178
+ >>> G = nx.star_graph(3) # star graphs are bipartite
179
+ >>> bipartite.average_clustering(G)
180
+ 0.75
181
+ >>> X, Y = bipartite.sets(G)
182
+ >>> bipartite.average_clustering(G, X)
183
+ 0.0
184
+ >>> bipartite.average_clustering(G, Y)
185
+ 1.0
186
+
187
+ See Also
188
+ --------
189
+ clustering
190
+
191
+ Notes
192
+ -----
193
+ The container of nodes passed to this function must contain all of the nodes
194
+ in one of the bipartite sets ("top" or "bottom") in order to compute
195
+ the correct average bipartite clustering coefficients.
196
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
197
+ for further details on how bipartite graphs are handled in NetworkX.
198
+
199
+
200
+ References
201
+ ----------
202
+ .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
203
+ Basic notions for the analysis of large two-mode networks.
204
+ Social Networks 30(1), 31--48.
205
+ """
206
+ if nodes is None:
207
+ nodes = G
208
+ ccs = latapy_clustering(G, nodes=nodes, mode=mode)
209
+ return sum(ccs[v] for v in nodes) / len(nodes)
210
+
211
+
212
+ @nx._dispatchable
213
+ def robins_alexander_clustering(G):
214
+ r"""Compute the bipartite clustering of G.
215
+
216
+ Robins and Alexander [1]_ defined bipartite clustering coefficient as
217
+ four times the number of four cycles `C_4` divided by the number of
218
+ three paths `L_3` in a bipartite graph:
219
+
220
+ .. math::
221
+
222
+ CC_4 = \frac{4 * C_4}{L_3}
223
+
224
+ Parameters
225
+ ----------
226
+ G : graph
227
+ a bipartite graph
228
+
229
+ Returns
230
+ -------
231
+ clustering : float
232
+ The Robins and Alexander bipartite clustering for the input graph.
233
+
234
+ Examples
235
+ --------
236
+ >>> from networkx.algorithms import bipartite
237
+ >>> G = nx.davis_southern_women_graph()
238
+ >>> print(round(bipartite.robins_alexander_clustering(G), 3))
239
+ 0.468
240
+
241
+ See Also
242
+ --------
243
+ latapy_clustering
244
+ networkx.algorithms.cluster.square_clustering
245
+
246
+ References
247
+ ----------
248
+ .. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking
249
+ directors: Network structure and distance in bipartite graphs.
250
+ Computational & Mathematical Organization Theory 10(1), 69–94.
251
+
252
+ """
253
+ if G.order() < 4 or G.size() < 3:
254
+ return 0
255
+ L_3 = _threepaths(G)
256
+ if L_3 == 0:
257
+ return 0
258
+ C_4 = _four_cycles(G)
259
+ return (4.0 * C_4) / L_3
260
+
261
+
262
+ def _four_cycles(G):
263
+ cycles = 0
264
+ for v in G:
265
+ for u, w in itertools.combinations(G[v], 2):
266
+ cycles += len((set(G[u]) & set(G[w])) - {v})
267
+ return cycles / 4
268
+
269
+
270
+ def _threepaths(G):
271
+ paths = 0
272
+ for v in G:
273
+ for u in G[v]:
274
+ for w in set(G[u]) - {v}:
275
+ paths += len(set(G[w]) - {v, u})
276
+ # Divide by two because we count each three path twice
277
+ # one for each possible starting point
278
+ return paths / 2
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/extendability.py ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Provides a function for computing the extendability of a graph which is
2
+ undirected, simple, connected and bipartite and contains at least one perfect matching."""
3
+
4
+ import networkx as nx
5
+ from networkx.utils import not_implemented_for
6
+
7
+ __all__ = ["maximal_extendability"]
8
+
9
+
10
+ @not_implemented_for("directed")
11
+ @not_implemented_for("multigraph")
12
+ @nx._dispatchable
13
+ def maximal_extendability(G):
14
+ """Computes the extendability of a graph.
15
+
16
+ The extendability of a graph is defined as the maximum $k$ for which `G`
17
+ is $k$-extendable. Graph `G` is $k$-extendable if and only if `G` has a
18
+ perfect matching and every set of $k$ independent edges can be extended
19
+ to a perfect matching in `G`.
20
+
21
+ Parameters
22
+ ----------
23
+ G : NetworkX Graph
24
+ A fully-connected bipartite graph without self-loops
25
+
26
+ Returns
27
+ -------
28
+ extendability : int
29
+
30
+ Raises
31
+ ------
32
+ NetworkXError
33
+ If the graph `G` is disconnected.
34
+ If the graph `G` is not bipartite.
35
+ If the graph `G` does not contain a perfect matching.
36
+ If the residual graph of `G` is not strongly connected.
37
+
38
+ Notes
39
+ -----
40
+ Definition:
41
+ Let `G` be a simple, connected, undirected and bipartite graph with a perfect
42
+ matching M and bipartition (U,V). The residual graph of `G`, denoted by $G_M$,
43
+ is the graph obtained from G by directing the edges of M from V to U and the
44
+ edges that do not belong to M from U to V.
45
+
46
+ Lemma [1]_ :
47
+ Let M be a perfect matching of `G`. `G` is $k$-extendable if and only if its residual
48
+ graph $G_M$ is strongly connected and there are $k$ vertex-disjoint directed
49
+ paths between every vertex of U and every vertex of V.
50
+
51
+ Assuming that input graph `G` is undirected, simple, connected, bipartite and contains
52
+ a perfect matching M, this function constructs the residual graph $G_M$ of G and
53
+ returns the minimum value among the maximum vertex-disjoint directed paths between
54
+ every vertex of U and every vertex of V in $G_M$. By combining the definitions
55
+ and the lemma, this value represents the extendability of the graph `G`.
56
+
57
+ Time complexity O($n^3$ $m^2$)) where $n$ is the number of vertices
58
+ and $m$ is the number of edges.
59
+
60
+ References
61
+ ----------
62
+ .. [1] "A polynomial algorithm for the extendability problem in bipartite graphs",
63
+ J. Lakhal, L. Litzler, Information Processing Letters, 1998.
64
+ .. [2] "On n-extendible graphs", M. D. Plummer, Discrete Mathematics, 31:201–210, 1980
65
+ https://doi.org/10.1016/0012-365X(80)90037-0
66
+
67
+ """
68
+ if not nx.is_connected(G):
69
+ raise nx.NetworkXError("Graph G is not connected")
70
+
71
+ if not nx.bipartite.is_bipartite(G):
72
+ raise nx.NetworkXError("Graph G is not bipartite")
73
+
74
+ U, V = nx.bipartite.sets(G)
75
+
76
+ maximum_matching = nx.bipartite.hopcroft_karp_matching(G)
77
+
78
+ if not nx.is_perfect_matching(G, maximum_matching):
79
+ raise nx.NetworkXError("Graph G does not contain a perfect matching")
80
+
81
+ # list of edges in perfect matching, directed from V to U
82
+ pm = [(node, maximum_matching[node]) for node in V & maximum_matching.keys()]
83
+
84
+ # Direct all the edges of G, from V to U if in matching, else from U to V
85
+ directed_edges = [
86
+ (x, y) if (x in V and (x, y) in pm) or (x in U and (y, x) not in pm) else (y, x)
87
+ for x, y in G.edges
88
+ ]
89
+
90
+ # Construct the residual graph of G
91
+ residual_G = nx.DiGraph()
92
+ residual_G.add_nodes_from(G)
93
+ residual_G.add_edges_from(directed_edges)
94
+
95
+ if not nx.is_strongly_connected(residual_G):
96
+ raise nx.NetworkXError("The residual graph of G is not strongly connected")
97
+
98
+ # For node-pairs between V & U, keep min of max number of node-disjoint paths
99
+ # Variable $k$ stands for the extendability of graph G
100
+ k = float("inf")
101
+ for u in U:
102
+ for v in V:
103
+ num_paths = sum(1 for _ in nx.node_disjoint_paths(residual_G, u, v))
104
+ k = k if k < num_paths else num_paths
105
+ return k
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/generators.py ADDED
@@ -0,0 +1,604 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Generators and functions for bipartite graphs.
3
+ """
4
+
5
+ import math
6
+ import numbers
7
+ from functools import reduce
8
+
9
+ import networkx as nx
10
+ from networkx.utils import nodes_or_number, py_random_state
11
+
12
+ __all__ = [
13
+ "configuration_model",
14
+ "havel_hakimi_graph",
15
+ "reverse_havel_hakimi_graph",
16
+ "alternating_havel_hakimi_graph",
17
+ "preferential_attachment_graph",
18
+ "random_graph",
19
+ "gnmk_random_graph",
20
+ "complete_bipartite_graph",
21
+ ]
22
+
23
+
24
+ @nx._dispatchable(graphs=None, returns_graph=True)
25
+ @nodes_or_number([0, 1])
26
+ def complete_bipartite_graph(n1, n2, create_using=None):
27
+ """Returns the complete bipartite graph `K_{n_1,n_2}`.
28
+
29
+ The graph is composed of two partitions with nodes 0 to (n1 - 1)
30
+ in the first and nodes n1 to (n1 + n2 - 1) in the second.
31
+ Each node in the first is connected to each node in the second.
32
+
33
+ Parameters
34
+ ----------
35
+ n1, n2 : integer or iterable container of nodes
36
+ If integers, nodes are from `range(n1)` and `range(n1, n1 + n2)`.
37
+ If a container, the elements are the nodes.
38
+ create_using : NetworkX graph instance, (default: nx.Graph)
39
+ Return graph of this type.
40
+
41
+ Notes
42
+ -----
43
+ Nodes are the integers 0 to `n1 + n2 - 1` unless either n1 or n2 are
44
+ containers of nodes. If only one of n1 or n2 are integers, that
45
+ integer is replaced by `range` of that integer.
46
+
47
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
48
+ to indicate which bipartite set the node belongs to.
49
+
50
+ This function is not imported in the main namespace.
51
+ To use it use nx.bipartite.complete_bipartite_graph
52
+ """
53
+ G = nx.empty_graph(0, create_using)
54
+ if G.is_directed():
55
+ raise nx.NetworkXError("Directed Graph not supported")
56
+
57
+ n1, top = n1
58
+ n2, bottom = n2
59
+ if isinstance(n1, numbers.Integral) and isinstance(n2, numbers.Integral):
60
+ bottom = [n1 + i for i in bottom]
61
+ G.add_nodes_from(top, bipartite=0)
62
+ G.add_nodes_from(bottom, bipartite=1)
63
+ if len(G) != len(top) + len(bottom):
64
+ raise nx.NetworkXError("Inputs n1 and n2 must contain distinct nodes")
65
+ G.add_edges_from((u, v) for u in top for v in bottom)
66
+ G.graph["name"] = f"complete_bipartite_graph({len(top)}, {len(bottom)})"
67
+ return G
68
+
69
+
70
+ @py_random_state(3)
71
+ @nx._dispatchable(name="bipartite_configuration_model", graphs=None, returns_graph=True)
72
+ def configuration_model(aseq, bseq, create_using=None, seed=None):
73
+ """Returns a random bipartite graph from two given degree sequences.
74
+
75
+ Parameters
76
+ ----------
77
+ aseq : list
78
+ Degree sequence for node set A.
79
+ bseq : list
80
+ Degree sequence for node set B.
81
+ create_using : NetworkX graph instance, optional
82
+ Return graph of this type.
83
+ seed : integer, random_state, or None (default)
84
+ Indicator of random number generation state.
85
+ See :ref:`Randomness<randomness>`.
86
+
87
+ The graph is composed of two partitions. Set A has nodes 0 to
88
+ (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
89
+ Nodes from set A are connected to nodes in set B by choosing
90
+ randomly from the possible free stubs, one in A and one in B.
91
+
92
+ Notes
93
+ -----
94
+ The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
95
+ If no graph type is specified use MultiGraph with parallel edges.
96
+ If you want a graph with no parallel edges use create_using=Graph()
97
+ but then the resulting degree sequences might not be exact.
98
+
99
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
100
+ to indicate which bipartite set the node belongs to.
101
+
102
+ This function is not imported in the main namespace.
103
+ To use it use nx.bipartite.configuration_model
104
+ """
105
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
106
+ if G.is_directed():
107
+ raise nx.NetworkXError("Directed Graph not supported")
108
+
109
+ # length and sum of each sequence
110
+ lena = len(aseq)
111
+ lenb = len(bseq)
112
+ suma = sum(aseq)
113
+ sumb = sum(bseq)
114
+
115
+ if not suma == sumb:
116
+ raise nx.NetworkXError(
117
+ f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
118
+ )
119
+
120
+ G = _add_nodes_with_bipartite_label(G, lena, lenb)
121
+
122
+ if len(aseq) == 0 or max(aseq) == 0:
123
+ return G # done if no edges
124
+
125
+ # build lists of degree-repeated vertex numbers
126
+ stubs = [[v] * aseq[v] for v in range(lena)]
127
+ astubs = [x for subseq in stubs for x in subseq]
128
+
129
+ stubs = [[v] * bseq[v - lena] for v in range(lena, lena + lenb)]
130
+ bstubs = [x for subseq in stubs for x in subseq]
131
+
132
+ # shuffle lists
133
+ seed.shuffle(astubs)
134
+ seed.shuffle(bstubs)
135
+
136
+ G.add_edges_from([astubs[i], bstubs[i]] for i in range(suma))
137
+
138
+ G.name = "bipartite_configuration_model"
139
+ return G
140
+
141
+
142
+ @nx._dispatchable(name="bipartite_havel_hakimi_graph", graphs=None, returns_graph=True)
143
+ def havel_hakimi_graph(aseq, bseq, create_using=None):
144
+ """Returns a bipartite graph from two given degree sequences using a
145
+ Havel-Hakimi style construction.
146
+
147
+ The graph is composed of two partitions. Set A has nodes 0 to
148
+ (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
149
+ Nodes from the set A are connected to nodes in the set B by
150
+ connecting the highest degree nodes in set A to the highest degree
151
+ nodes in set B until all stubs are connected.
152
+
153
+ Parameters
154
+ ----------
155
+ aseq : list
156
+ Degree sequence for node set A.
157
+ bseq : list
158
+ Degree sequence for node set B.
159
+ create_using : NetworkX graph instance, optional
160
+ Return graph of this type.
161
+
162
+ Notes
163
+ -----
164
+ The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
165
+ If no graph type is specified use MultiGraph with parallel edges.
166
+ If you want a graph with no parallel edges use create_using=Graph()
167
+ but then the resulting degree sequences might not be exact.
168
+
169
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
170
+ to indicate which bipartite set the node belongs to.
171
+
172
+ This function is not imported in the main namespace.
173
+ To use it use nx.bipartite.havel_hakimi_graph
174
+ """
175
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
176
+ if G.is_directed():
177
+ raise nx.NetworkXError("Directed Graph not supported")
178
+
179
+ # length of the each sequence
180
+ naseq = len(aseq)
181
+ nbseq = len(bseq)
182
+
183
+ suma = sum(aseq)
184
+ sumb = sum(bseq)
185
+
186
+ if not suma == sumb:
187
+ raise nx.NetworkXError(
188
+ f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
189
+ )
190
+
191
+ G = _add_nodes_with_bipartite_label(G, naseq, nbseq)
192
+
193
+ if len(aseq) == 0 or max(aseq) == 0:
194
+ return G # done if no edges
195
+
196
+ # build list of degree-repeated vertex numbers
197
+ astubs = [[aseq[v], v] for v in range(naseq)]
198
+ bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)]
199
+ astubs.sort()
200
+ while astubs:
201
+ (degree, u) = astubs.pop() # take of largest degree node in the a set
202
+ if degree == 0:
203
+ break # done, all are zero
204
+ # connect the source to largest degree nodes in the b set
205
+ bstubs.sort()
206
+ for target in bstubs[-degree:]:
207
+ v = target[1]
208
+ G.add_edge(u, v)
209
+ target[0] -= 1 # note this updates bstubs too.
210
+ if target[0] == 0:
211
+ bstubs.remove(target)
212
+
213
+ G.name = "bipartite_havel_hakimi_graph"
214
+ return G
215
+
216
+
217
+ @nx._dispatchable(graphs=None, returns_graph=True)
218
+ def reverse_havel_hakimi_graph(aseq, bseq, create_using=None):
219
+ """Returns a bipartite graph from two given degree sequences using a
220
+ Havel-Hakimi style construction.
221
+
222
+ The graph is composed of two partitions. Set A has nodes 0 to
223
+ (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
224
+ Nodes from set A are connected to nodes in the set B by connecting
225
+ the highest degree nodes in set A to the lowest degree nodes in
226
+ set B until all stubs are connected.
227
+
228
+ Parameters
229
+ ----------
230
+ aseq : list
231
+ Degree sequence for node set A.
232
+ bseq : list
233
+ Degree sequence for node set B.
234
+ create_using : NetworkX graph instance, optional
235
+ Return graph of this type.
236
+
237
+ Notes
238
+ -----
239
+ The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
240
+ If no graph type is specified use MultiGraph with parallel edges.
241
+ If you want a graph with no parallel edges use create_using=Graph()
242
+ but then the resulting degree sequences might not be exact.
243
+
244
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
245
+ to indicate which bipartite set the node belongs to.
246
+
247
+ This function is not imported in the main namespace.
248
+ To use it use nx.bipartite.reverse_havel_hakimi_graph
249
+ """
250
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
251
+ if G.is_directed():
252
+ raise nx.NetworkXError("Directed Graph not supported")
253
+
254
+ # length of the each sequence
255
+ lena = len(aseq)
256
+ lenb = len(bseq)
257
+ suma = sum(aseq)
258
+ sumb = sum(bseq)
259
+
260
+ if not suma == sumb:
261
+ raise nx.NetworkXError(
262
+ f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
263
+ )
264
+
265
+ G = _add_nodes_with_bipartite_label(G, lena, lenb)
266
+
267
+ if len(aseq) == 0 or max(aseq) == 0:
268
+ return G # done if no edges
269
+
270
+ # build list of degree-repeated vertex numbers
271
+ astubs = [[aseq[v], v] for v in range(lena)]
272
+ bstubs = [[bseq[v - lena], v] for v in range(lena, lena + lenb)]
273
+ astubs.sort()
274
+ bstubs.sort()
275
+ while astubs:
276
+ (degree, u) = astubs.pop() # take of largest degree node in the a set
277
+ if degree == 0:
278
+ break # done, all are zero
279
+ # connect the source to the smallest degree nodes in the b set
280
+ for target in bstubs[0:degree]:
281
+ v = target[1]
282
+ G.add_edge(u, v)
283
+ target[0] -= 1 # note this updates bstubs too.
284
+ if target[0] == 0:
285
+ bstubs.remove(target)
286
+
287
+ G.name = "bipartite_reverse_havel_hakimi_graph"
288
+ return G
289
+
290
+
291
+ @nx._dispatchable(graphs=None, returns_graph=True)
292
+ def alternating_havel_hakimi_graph(aseq, bseq, create_using=None):
293
+ """Returns a bipartite graph from two given degree sequences using
294
+ an alternating Havel-Hakimi style construction.
295
+
296
+ The graph is composed of two partitions. Set A has nodes 0 to
297
+ (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
298
+ Nodes from the set A are connected to nodes in the set B by
299
+ connecting the highest degree nodes in set A to alternatively the
300
+ highest and the lowest degree nodes in set B until all stubs are
301
+ connected.
302
+
303
+ Parameters
304
+ ----------
305
+ aseq : list
306
+ Degree sequence for node set A.
307
+ bseq : list
308
+ Degree sequence for node set B.
309
+ create_using : NetworkX graph instance, optional
310
+ Return graph of this type.
311
+
312
+ Notes
313
+ -----
314
+ The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
315
+ If no graph type is specified use MultiGraph with parallel edges.
316
+ If you want a graph with no parallel edges use create_using=Graph()
317
+ but then the resulting degree sequences might not be exact.
318
+
319
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
320
+ to indicate which bipartite set the node belongs to.
321
+
322
+ This function is not imported in the main namespace.
323
+ To use it use nx.bipartite.alternating_havel_hakimi_graph
324
+ """
325
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
326
+ if G.is_directed():
327
+ raise nx.NetworkXError("Directed Graph not supported")
328
+
329
+ # length of the each sequence
330
+ naseq = len(aseq)
331
+ nbseq = len(bseq)
332
+ suma = sum(aseq)
333
+ sumb = sum(bseq)
334
+
335
+ if not suma == sumb:
336
+ raise nx.NetworkXError(
337
+ f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
338
+ )
339
+
340
+ G = _add_nodes_with_bipartite_label(G, naseq, nbseq)
341
+
342
+ if len(aseq) == 0 or max(aseq) == 0:
343
+ return G # done if no edges
344
+ # build list of degree-repeated vertex numbers
345
+ astubs = [[aseq[v], v] for v in range(naseq)]
346
+ bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)]
347
+ while astubs:
348
+ astubs.sort()
349
+ (degree, u) = astubs.pop() # take of largest degree node in the a set
350
+ if degree == 0:
351
+ break # done, all are zero
352
+ bstubs.sort()
353
+ small = bstubs[0 : degree // 2] # add these low degree targets
354
+ large = bstubs[(-degree + degree // 2) :] # now high degree targets
355
+ stubs = [x for z in zip(large, small) for x in z] # combine, sorry
356
+ if len(stubs) < len(small) + len(large): # check for zip truncation
357
+ stubs.append(large.pop())
358
+ for target in stubs:
359
+ v = target[1]
360
+ G.add_edge(u, v)
361
+ target[0] -= 1 # note this updates bstubs too.
362
+ if target[0] == 0:
363
+ bstubs.remove(target)
364
+
365
+ G.name = "bipartite_alternating_havel_hakimi_graph"
366
+ return G
367
+
368
+
369
+ @py_random_state(3)
370
+ @nx._dispatchable(graphs=None, returns_graph=True)
371
+ def preferential_attachment_graph(aseq, p, create_using=None, seed=None):
372
+ """Create a bipartite graph with a preferential attachment model from
373
+ a given single degree sequence.
374
+
375
+ The graph is composed of two partitions. Set A has nodes 0 to
376
+ (len(aseq) - 1) and set B has nodes starting with node len(aseq).
377
+ The number of nodes in set B is random.
378
+
379
+ Parameters
380
+ ----------
381
+ aseq : list
382
+ Degree sequence for node set A.
383
+ p : float
384
+ Probability that a new bottom node is added.
385
+ create_using : NetworkX graph instance, optional
386
+ Return graph of this type.
387
+ seed : integer, random_state, or None (default)
388
+ Indicator of random number generation state.
389
+ See :ref:`Randomness<randomness>`.
390
+
391
+ References
392
+ ----------
393
+ .. [1] Guillaume, J.L. and Latapy, M.,
394
+ Bipartite graphs as models of complex networks.
395
+ Physica A: Statistical Mechanics and its Applications,
396
+ 2006, 371(2), pp.795-813.
397
+ .. [2] Jean-Loup Guillaume and Matthieu Latapy,
398
+ Bipartite structure of all complex networks,
399
+ Inf. Process. Lett. 90, 2004, pg. 215-221
400
+ https://doi.org/10.1016/j.ipl.2004.03.007
401
+
402
+ Notes
403
+ -----
404
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
405
+ to indicate which bipartite set the node belongs to.
406
+
407
+ This function is not imported in the main namespace.
408
+ To use it use nx.bipartite.preferential_attachment_graph
409
+ """
410
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
411
+ if G.is_directed():
412
+ raise nx.NetworkXError("Directed Graph not supported")
413
+
414
+ if p > 1:
415
+ raise nx.NetworkXError(f"probability {p} > 1")
416
+
417
+ naseq = len(aseq)
418
+ G = _add_nodes_with_bipartite_label(G, naseq, 0)
419
+ vv = [[v] * aseq[v] for v in range(naseq)]
420
+ while vv:
421
+ while vv[0]:
422
+ source = vv[0][0]
423
+ vv[0].remove(source)
424
+ if seed.random() < p or len(G) == naseq:
425
+ target = len(G)
426
+ G.add_node(target, bipartite=1)
427
+ G.add_edge(source, target)
428
+ else:
429
+ bb = [[b] * G.degree(b) for b in range(naseq, len(G))]
430
+ # flatten the list of lists into a list.
431
+ bbstubs = reduce(lambda x, y: x + y, bb)
432
+ # choose preferentially a bottom node.
433
+ target = seed.choice(bbstubs)
434
+ G.add_node(target, bipartite=1)
435
+ G.add_edge(source, target)
436
+ vv.remove(vv[0])
437
+ G.name = "bipartite_preferential_attachment_model"
438
+ return G
439
+
440
+
441
+ @py_random_state(3)
442
+ @nx._dispatchable(graphs=None, returns_graph=True)
443
+ def random_graph(n, m, p, seed=None, directed=False):
444
+ """Returns a bipartite random graph.
445
+
446
+ This is a bipartite version of the binomial (Erdős-Rényi) graph.
447
+ The graph is composed of two partitions. Set A has nodes 0 to
448
+ (n - 1) and set B has nodes n to (n + m - 1).
449
+
450
+ Parameters
451
+ ----------
452
+ n : int
453
+ The number of nodes in the first bipartite set.
454
+ m : int
455
+ The number of nodes in the second bipartite set.
456
+ p : float
457
+ Probability for edge creation.
458
+ seed : integer, random_state, or None (default)
459
+ Indicator of random number generation state.
460
+ See :ref:`Randomness<randomness>`.
461
+ directed : bool, optional (default=False)
462
+ If True return a directed graph
463
+
464
+ Notes
465
+ -----
466
+ The bipartite random graph algorithm chooses each of the n*m (undirected)
467
+ or 2*nm (directed) possible edges with probability p.
468
+
469
+ This algorithm is $O(n+m)$ where $m$ is the expected number of edges.
470
+
471
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
472
+ to indicate which bipartite set the node belongs to.
473
+
474
+ This function is not imported in the main namespace.
475
+ To use it use nx.bipartite.random_graph
476
+
477
+ See Also
478
+ --------
479
+ gnp_random_graph, configuration_model
480
+
481
+ References
482
+ ----------
483
+ .. [1] Vladimir Batagelj and Ulrik Brandes,
484
+ "Efficient generation of large random networks",
485
+ Phys. Rev. E, 71, 036113, 2005.
486
+ """
487
+ G = nx.Graph()
488
+ G = _add_nodes_with_bipartite_label(G, n, m)
489
+ if directed:
490
+ G = nx.DiGraph(G)
491
+ G.name = f"fast_gnp_random_graph({n},{m},{p})"
492
+
493
+ if p <= 0:
494
+ return G
495
+ if p >= 1:
496
+ return nx.complete_bipartite_graph(n, m)
497
+
498
+ lp = math.log(1.0 - p)
499
+
500
+ v = 0
501
+ w = -1
502
+ while v < n:
503
+ lr = math.log(1.0 - seed.random())
504
+ w = w + 1 + int(lr / lp)
505
+ while w >= m and v < n:
506
+ w = w - m
507
+ v = v + 1
508
+ if v < n:
509
+ G.add_edge(v, n + w)
510
+
511
+ if directed:
512
+ # use the same algorithm to
513
+ # add edges from the "m" to "n" set
514
+ v = 0
515
+ w = -1
516
+ while v < n:
517
+ lr = math.log(1.0 - seed.random())
518
+ w = w + 1 + int(lr / lp)
519
+ while w >= m and v < n:
520
+ w = w - m
521
+ v = v + 1
522
+ if v < n:
523
+ G.add_edge(n + w, v)
524
+
525
+ return G
526
+
527
+
528
+ @py_random_state(3)
529
+ @nx._dispatchable(graphs=None, returns_graph=True)
530
+ def gnmk_random_graph(n, m, k, seed=None, directed=False):
531
+ """Returns a random bipartite graph G_{n,m,k}.
532
+
533
+ Produces a bipartite graph chosen randomly out of the set of all graphs
534
+ with n top nodes, m bottom nodes, and k edges.
535
+ The graph is composed of two sets of nodes.
536
+ Set A has nodes 0 to (n - 1) and set B has nodes n to (n + m - 1).
537
+
538
+ Parameters
539
+ ----------
540
+ n : int
541
+ The number of nodes in the first bipartite set.
542
+ m : int
543
+ The number of nodes in the second bipartite set.
544
+ k : int
545
+ The number of edges
546
+ seed : integer, random_state, or None (default)
547
+ Indicator of random number generation state.
548
+ See :ref:`Randomness<randomness>`.
549
+ directed : bool, optional (default=False)
550
+ If True return a directed graph
551
+
552
+ Examples
553
+ --------
554
+ from nx.algorithms import bipartite
555
+ G = bipartite.gnmk_random_graph(10,20,50)
556
+
557
+ See Also
558
+ --------
559
+ gnm_random_graph
560
+
561
+ Notes
562
+ -----
563
+ If k > m * n then a complete bipartite graph is returned.
564
+
565
+ This graph is a bipartite version of the `G_{nm}` random graph model.
566
+
567
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
568
+ to indicate which bipartite set the node belongs to.
569
+
570
+ This function is not imported in the main namespace.
571
+ To use it use nx.bipartite.gnmk_random_graph
572
+ """
573
+ G = nx.Graph()
574
+ G = _add_nodes_with_bipartite_label(G, n, m)
575
+ if directed:
576
+ G = nx.DiGraph(G)
577
+ G.name = f"bipartite_gnm_random_graph({n},{m},{k})"
578
+ if n == 1 or m == 1:
579
+ return G
580
+ max_edges = n * m # max_edges for bipartite networks
581
+ if k >= max_edges: # Maybe we should raise an exception here
582
+ return nx.complete_bipartite_graph(n, m, create_using=G)
583
+
584
+ top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
585
+ bottom = list(set(G) - set(top))
586
+ edge_count = 0
587
+ while edge_count < k:
588
+ # generate random edge,u,v
589
+ u = seed.choice(top)
590
+ v = seed.choice(bottom)
591
+ if v in G[u]:
592
+ continue
593
+ else:
594
+ G.add_edge(u, v)
595
+ edge_count += 1
596
+ return G
597
+
598
+
599
+ def _add_nodes_with_bipartite_label(G, lena, lenb):
600
+ G.add_nodes_from(range(lena + lenb))
601
+ b = dict(zip(range(lena), [0] * lena))
602
+ b.update(dict(zip(range(lena, lena + lenb), [1] * lenb)))
603
+ nx.set_node_attributes(G, b, "bipartite")
604
+ return G
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__init__.py ADDED
File without changes
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_centrality.cpython-310.pyc ADDED
Binary file (5.32 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_covering.cpython-310.pyc ADDED
Binary file (1.79 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_edgelist.cpython-310.pyc ADDED
Binary file (8.05 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_extendability.cpython-310.pyc ADDED
Binary file (5.22 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_generators.cpython-310.pyc ADDED
Binary file (9.95 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matching.cpython-310.pyc ADDED
Binary file (12.4 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matrix.cpython-310.pyc ADDED
Binary file (4.69 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_redundancy.cpython-310.pyc ADDED
Binary file (1.41 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_spectral_bipartivity.cpython-310.pyc ADDED
Binary file (2.18 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_centrality.py ADDED
@@ -0,0 +1,192 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms import bipartite
5
+
6
+
7
+ class TestBipartiteCentrality:
8
+ @classmethod
9
+ def setup_class(cls):
10
+ cls.P4 = nx.path_graph(4)
11
+ cls.K3 = nx.complete_bipartite_graph(3, 3)
12
+ cls.C4 = nx.cycle_graph(4)
13
+ cls.davis = nx.davis_southern_women_graph()
14
+ cls.top_nodes = [
15
+ n for n, d in cls.davis.nodes(data=True) if d["bipartite"] == 0
16
+ ]
17
+
18
+ def test_degree_centrality(self):
19
+ d = bipartite.degree_centrality(self.P4, [1, 3])
20
+ answer = {0: 0.5, 1: 1.0, 2: 1.0, 3: 0.5}
21
+ assert d == answer
22
+ d = bipartite.degree_centrality(self.K3, [0, 1, 2])
23
+ answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
24
+ assert d == answer
25
+ d = bipartite.degree_centrality(self.C4, [0, 2])
26
+ answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
27
+ assert d == answer
28
+
29
+ def test_betweenness_centrality(self):
30
+ c = bipartite.betweenness_centrality(self.P4, [1, 3])
31
+ answer = {0: 0.0, 1: 1.0, 2: 1.0, 3: 0.0}
32
+ assert c == answer
33
+ c = bipartite.betweenness_centrality(self.K3, [0, 1, 2])
34
+ answer = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125, 4: 0.125, 5: 0.125}
35
+ assert c == answer
36
+ c = bipartite.betweenness_centrality(self.C4, [0, 2])
37
+ answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
38
+ assert c == answer
39
+
40
+ def test_closeness_centrality(self):
41
+ c = bipartite.closeness_centrality(self.P4, [1, 3])
42
+ answer = {0: 2.0 / 3, 1: 1.0, 2: 1.0, 3: 2.0 / 3}
43
+ assert c == answer
44
+ c = bipartite.closeness_centrality(self.K3, [0, 1, 2])
45
+ answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
46
+ assert c == answer
47
+ c = bipartite.closeness_centrality(self.C4, [0, 2])
48
+ answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
49
+ assert c == answer
50
+ G = nx.Graph()
51
+ G.add_node(0)
52
+ G.add_node(1)
53
+ c = bipartite.closeness_centrality(G, [0])
54
+ assert c == {0: 0.0, 1: 0.0}
55
+ c = bipartite.closeness_centrality(G, [1])
56
+ assert c == {0: 0.0, 1: 0.0}
57
+
58
+ def test_bipartite_closeness_centrality_unconnected(self):
59
+ G = nx.complete_bipartite_graph(3, 3)
60
+ G.add_edge(6, 7)
61
+ c = bipartite.closeness_centrality(G, [0, 2, 4, 6], normalized=False)
62
+ answer = {
63
+ 0: 10.0 / 7,
64
+ 2: 10.0 / 7,
65
+ 4: 10.0 / 7,
66
+ 6: 10.0,
67
+ 1: 10.0 / 7,
68
+ 3: 10.0 / 7,
69
+ 5: 10.0 / 7,
70
+ 7: 10.0,
71
+ }
72
+ assert c == answer
73
+
74
+ def test_davis_degree_centrality(self):
75
+ G = self.davis
76
+ deg = bipartite.degree_centrality(G, self.top_nodes)
77
+ answer = {
78
+ "E8": 0.78,
79
+ "E9": 0.67,
80
+ "E7": 0.56,
81
+ "Nora Fayette": 0.57,
82
+ "Evelyn Jefferson": 0.57,
83
+ "Theresa Anderson": 0.57,
84
+ "E6": 0.44,
85
+ "Sylvia Avondale": 0.50,
86
+ "Laura Mandeville": 0.50,
87
+ "Brenda Rogers": 0.50,
88
+ "Katherina Rogers": 0.43,
89
+ "E5": 0.44,
90
+ "Helen Lloyd": 0.36,
91
+ "E3": 0.33,
92
+ "Ruth DeSand": 0.29,
93
+ "Verne Sanderson": 0.29,
94
+ "E12": 0.33,
95
+ "Myra Liddel": 0.29,
96
+ "E11": 0.22,
97
+ "Eleanor Nye": 0.29,
98
+ "Frances Anderson": 0.29,
99
+ "Pearl Oglethorpe": 0.21,
100
+ "E4": 0.22,
101
+ "Charlotte McDowd": 0.29,
102
+ "E10": 0.28,
103
+ "Olivia Carleton": 0.14,
104
+ "Flora Price": 0.14,
105
+ "E2": 0.17,
106
+ "E1": 0.17,
107
+ "Dorothy Murchison": 0.14,
108
+ "E13": 0.17,
109
+ "E14": 0.17,
110
+ }
111
+ for node, value in answer.items():
112
+ assert value == pytest.approx(deg[node], abs=1e-2)
113
+
114
+ def test_davis_betweenness_centrality(self):
115
+ G = self.davis
116
+ bet = bipartite.betweenness_centrality(G, self.top_nodes)
117
+ answer = {
118
+ "E8": 0.24,
119
+ "E9": 0.23,
120
+ "E7": 0.13,
121
+ "Nora Fayette": 0.11,
122
+ "Evelyn Jefferson": 0.10,
123
+ "Theresa Anderson": 0.09,
124
+ "E6": 0.07,
125
+ "Sylvia Avondale": 0.07,
126
+ "Laura Mandeville": 0.05,
127
+ "Brenda Rogers": 0.05,
128
+ "Katherina Rogers": 0.05,
129
+ "E5": 0.04,
130
+ "Helen Lloyd": 0.04,
131
+ "E3": 0.02,
132
+ "Ruth DeSand": 0.02,
133
+ "Verne Sanderson": 0.02,
134
+ "E12": 0.02,
135
+ "Myra Liddel": 0.02,
136
+ "E11": 0.02,
137
+ "Eleanor Nye": 0.01,
138
+ "Frances Anderson": 0.01,
139
+ "Pearl Oglethorpe": 0.01,
140
+ "E4": 0.01,
141
+ "Charlotte McDowd": 0.01,
142
+ "E10": 0.01,
143
+ "Olivia Carleton": 0.01,
144
+ "Flora Price": 0.01,
145
+ "E2": 0.00,
146
+ "E1": 0.00,
147
+ "Dorothy Murchison": 0.00,
148
+ "E13": 0.00,
149
+ "E14": 0.00,
150
+ }
151
+ for node, value in answer.items():
152
+ assert value == pytest.approx(bet[node], abs=1e-2)
153
+
154
+ def test_davis_closeness_centrality(self):
155
+ G = self.davis
156
+ clos = bipartite.closeness_centrality(G, self.top_nodes)
157
+ answer = {
158
+ "E8": 0.85,
159
+ "E9": 0.79,
160
+ "E7": 0.73,
161
+ "Nora Fayette": 0.80,
162
+ "Evelyn Jefferson": 0.80,
163
+ "Theresa Anderson": 0.80,
164
+ "E6": 0.69,
165
+ "Sylvia Avondale": 0.77,
166
+ "Laura Mandeville": 0.73,
167
+ "Brenda Rogers": 0.73,
168
+ "Katherina Rogers": 0.73,
169
+ "E5": 0.59,
170
+ "Helen Lloyd": 0.73,
171
+ "E3": 0.56,
172
+ "Ruth DeSand": 0.71,
173
+ "Verne Sanderson": 0.71,
174
+ "E12": 0.56,
175
+ "Myra Liddel": 0.69,
176
+ "E11": 0.54,
177
+ "Eleanor Nye": 0.67,
178
+ "Frances Anderson": 0.67,
179
+ "Pearl Oglethorpe": 0.67,
180
+ "E4": 0.54,
181
+ "Charlotte McDowd": 0.60,
182
+ "E10": 0.55,
183
+ "Olivia Carleton": 0.59,
184
+ "Flora Price": 0.59,
185
+ "E2": 0.52,
186
+ "E1": 0.52,
187
+ "Dorothy Murchison": 0.65,
188
+ "E13": 0.52,
189
+ "E14": 0.52,
190
+ }
191
+ for node, value in answer.items():
192
+ assert value == pytest.approx(clos[node], abs=1e-2)
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_cluster.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms import bipartite
5
+ from networkx.algorithms.bipartite.cluster import cc_dot, cc_max, cc_min
6
+
7
+
8
+ def test_pairwise_bipartite_cc_functions():
9
+ # Test functions for different kinds of bipartite clustering coefficients
10
+ # between pairs of nodes using 3 example graphs from figure 5 p. 40
11
+ # Latapy et al (2008)
12
+ G1 = nx.Graph([(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7)])
13
+ G2 = nx.Graph([(0, 2), (0, 3), (0, 4), (1, 3), (1, 4), (1, 5)])
14
+ G3 = nx.Graph(
15
+ [(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7), (1, 8), (1, 9)]
16
+ )
17
+ result = {
18
+ 0: [1 / 3.0, 2 / 3.0, 2 / 5.0],
19
+ 1: [1 / 2.0, 2 / 3.0, 2 / 3.0],
20
+ 2: [2 / 8.0, 2 / 5.0, 2 / 5.0],
21
+ }
22
+ for i, G in enumerate([G1, G2, G3]):
23
+ assert bipartite.is_bipartite(G)
24
+ assert cc_dot(set(G[0]), set(G[1])) == result[i][0]
25
+ assert cc_min(set(G[0]), set(G[1])) == result[i][1]
26
+ assert cc_max(set(G[0]), set(G[1])) == result[i][2]
27
+
28
+
29
+ def test_star_graph():
30
+ G = nx.star_graph(3)
31
+ # all modes are the same
32
+ answer = {0: 0, 1: 1, 2: 1, 3: 1}
33
+ assert bipartite.clustering(G, mode="dot") == answer
34
+ assert bipartite.clustering(G, mode="min") == answer
35
+ assert bipartite.clustering(G, mode="max") == answer
36
+
37
+
38
+ def test_not_bipartite():
39
+ with pytest.raises(nx.NetworkXError):
40
+ bipartite.clustering(nx.complete_graph(4))
41
+
42
+
43
+ def test_bad_mode():
44
+ with pytest.raises(nx.NetworkXError):
45
+ bipartite.clustering(nx.path_graph(4), mode="foo")
46
+
47
+
48
+ def test_path_graph():
49
+ G = nx.path_graph(4)
50
+ answer = {0: 0.5, 1: 0.5, 2: 0.5, 3: 0.5}
51
+ assert bipartite.clustering(G, mode="dot") == answer
52
+ assert bipartite.clustering(G, mode="max") == answer
53
+ answer = {0: 1, 1: 1, 2: 1, 3: 1}
54
+ assert bipartite.clustering(G, mode="min") == answer
55
+
56
+
57
+ def test_average_path_graph():
58
+ G = nx.path_graph(4)
59
+ assert bipartite.average_clustering(G, mode="dot") == 0.5
60
+ assert bipartite.average_clustering(G, mode="max") == 0.5
61
+ assert bipartite.average_clustering(G, mode="min") == 1
62
+
63
+
64
+ def test_ra_clustering_davis():
65
+ G = nx.davis_southern_women_graph()
66
+ cc4 = round(bipartite.robins_alexander_clustering(G), 3)
67
+ assert cc4 == 0.468
68
+
69
+
70
+ def test_ra_clustering_square():
71
+ G = nx.path_graph(4)
72
+ G.add_edge(0, 3)
73
+ assert bipartite.robins_alexander_clustering(G) == 1.0
74
+
75
+
76
+ def test_ra_clustering_zero():
77
+ G = nx.Graph()
78
+ assert bipartite.robins_alexander_clustering(G) == 0
79
+ G.add_nodes_from(range(4))
80
+ assert bipartite.robins_alexander_clustering(G) == 0
81
+ G.add_edges_from([(0, 1), (2, 3), (3, 4)])
82
+ assert bipartite.robins_alexander_clustering(G) == 0
83
+ G.add_edge(1, 2)
84
+ assert bipartite.robins_alexander_clustering(G) == 0
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_generators.py ADDED
@@ -0,0 +1,409 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numbers
2
+
3
+ import pytest
4
+
5
+ import networkx as nx
6
+
7
+ from ..generators import (
8
+ alternating_havel_hakimi_graph,
9
+ complete_bipartite_graph,
10
+ configuration_model,
11
+ gnmk_random_graph,
12
+ havel_hakimi_graph,
13
+ preferential_attachment_graph,
14
+ random_graph,
15
+ reverse_havel_hakimi_graph,
16
+ )
17
+
18
+ """
19
+ Generators - Bipartite
20
+ ----------------------
21
+ """
22
+
23
+
24
+ class TestGeneratorsBipartite:
25
+ def test_complete_bipartite_graph(self):
26
+ G = complete_bipartite_graph(0, 0)
27
+ assert nx.is_isomorphic(G, nx.null_graph())
28
+
29
+ for i in [1, 5]:
30
+ G = complete_bipartite_graph(i, 0)
31
+ assert nx.is_isomorphic(G, nx.empty_graph(i))
32
+ G = complete_bipartite_graph(0, i)
33
+ assert nx.is_isomorphic(G, nx.empty_graph(i))
34
+
35
+ G = complete_bipartite_graph(2, 2)
36
+ assert nx.is_isomorphic(G, nx.cycle_graph(4))
37
+
38
+ G = complete_bipartite_graph(1, 5)
39
+ assert nx.is_isomorphic(G, nx.star_graph(5))
40
+
41
+ G = complete_bipartite_graph(5, 1)
42
+ assert nx.is_isomorphic(G, nx.star_graph(5))
43
+
44
+ # complete_bipartite_graph(m1,m2) is a connected graph with
45
+ # m1+m2 nodes and m1*m2 edges
46
+ for m1, m2 in [(5, 11), (7, 3)]:
47
+ G = complete_bipartite_graph(m1, m2)
48
+ assert nx.number_of_nodes(G) == m1 + m2
49
+ assert nx.number_of_edges(G) == m1 * m2
50
+
51
+ with pytest.raises(nx.NetworkXError):
52
+ complete_bipartite_graph(7, 3, create_using=nx.DiGraph)
53
+ with pytest.raises(nx.NetworkXError):
54
+ complete_bipartite_graph(7, 3, create_using=nx.MultiDiGraph)
55
+
56
+ mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph)
57
+ assert mG.is_multigraph()
58
+ assert sorted(mG.edges()) == sorted(G.edges())
59
+
60
+ mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph)
61
+ assert mG.is_multigraph()
62
+ assert sorted(mG.edges()) == sorted(G.edges())
63
+
64
+ mG = complete_bipartite_graph(7, 3) # default to Graph
65
+ assert sorted(mG.edges()) == sorted(G.edges())
66
+ assert not mG.is_multigraph()
67
+ assert not mG.is_directed()
68
+
69
+ # specify nodes rather than number of nodes
70
+ for n1, n2 in [([1, 2], "ab"), (3, 2), (3, "ab"), ("ab", 3)]:
71
+ G = complete_bipartite_graph(n1, n2)
72
+ if isinstance(n1, numbers.Integral):
73
+ if isinstance(n2, numbers.Integral):
74
+ n2 = range(n1, n1 + n2)
75
+ n1 = range(n1)
76
+ elif isinstance(n2, numbers.Integral):
77
+ n2 = range(n2)
78
+ edges = {(u, v) for u in n1 for v in n2}
79
+ assert edges == set(G.edges)
80
+ assert G.size() == len(edges)
81
+
82
+ # raise when node sets are not distinct
83
+ for n1, n2 in [([1, 2], 3), (3, [1, 2]), ("abc", "bcd")]:
84
+ pytest.raises(nx.NetworkXError, complete_bipartite_graph, n1, n2)
85
+
86
+ def test_configuration_model(self):
87
+ aseq = []
88
+ bseq = []
89
+ G = configuration_model(aseq, bseq)
90
+ assert len(G) == 0
91
+
92
+ aseq = [0, 0]
93
+ bseq = [0, 0]
94
+ G = configuration_model(aseq, bseq)
95
+ assert len(G) == 4
96
+ assert G.number_of_edges() == 0
97
+
98
+ aseq = [3, 3, 3, 3]
99
+ bseq = [2, 2, 2, 2, 2]
100
+ pytest.raises(nx.NetworkXError, configuration_model, aseq, bseq)
101
+
102
+ aseq = [3, 3, 3, 3]
103
+ bseq = [2, 2, 2, 2, 2, 2]
104
+ G = configuration_model(aseq, bseq)
105
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
106
+
107
+ aseq = [2, 2, 2, 2, 2, 2]
108
+ bseq = [3, 3, 3, 3]
109
+ G = configuration_model(aseq, bseq)
110
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
111
+
112
+ aseq = [2, 2, 2, 1, 1, 1]
113
+ bseq = [3, 3, 3]
114
+ G = configuration_model(aseq, bseq)
115
+ assert G.is_multigraph()
116
+ assert not G.is_directed()
117
+ assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
118
+
119
+ GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
120
+ assert GU.number_of_nodes() == 6
121
+
122
+ GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
123
+ assert GD.number_of_nodes() == 3
124
+
125
+ G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
126
+ assert not G.is_multigraph()
127
+ assert not G.is_directed()
128
+
129
+ pytest.raises(
130
+ nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph()
131
+ )
132
+ pytest.raises(
133
+ nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph
134
+ )
135
+ pytest.raises(
136
+ nx.NetworkXError,
137
+ configuration_model,
138
+ aseq,
139
+ bseq,
140
+ create_using=nx.MultiDiGraph,
141
+ )
142
+
143
+ def test_havel_hakimi_graph(self):
144
+ aseq = []
145
+ bseq = []
146
+ G = havel_hakimi_graph(aseq, bseq)
147
+ assert len(G) == 0
148
+
149
+ aseq = [0, 0]
150
+ bseq = [0, 0]
151
+ G = havel_hakimi_graph(aseq, bseq)
152
+ assert len(G) == 4
153
+ assert G.number_of_edges() == 0
154
+
155
+ aseq = [3, 3, 3, 3]
156
+ bseq = [2, 2, 2, 2, 2]
157
+ pytest.raises(nx.NetworkXError, havel_hakimi_graph, aseq, bseq)
158
+
159
+ bseq = [2, 2, 2, 2, 2, 2]
160
+ G = havel_hakimi_graph(aseq, bseq)
161
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
162
+
163
+ aseq = [2, 2, 2, 2, 2, 2]
164
+ bseq = [3, 3, 3, 3]
165
+ G = havel_hakimi_graph(aseq, bseq)
166
+ assert G.is_multigraph()
167
+ assert not G.is_directed()
168
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
169
+
170
+ GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
171
+ assert GU.number_of_nodes() == 6
172
+
173
+ GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
174
+ assert GD.number_of_nodes() == 4
175
+
176
+ G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
177
+ assert not G.is_multigraph()
178
+ assert not G.is_directed()
179
+
180
+ pytest.raises(
181
+ nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph
182
+ )
183
+ pytest.raises(
184
+ nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph
185
+ )
186
+ pytest.raises(
187
+ nx.NetworkXError,
188
+ havel_hakimi_graph,
189
+ aseq,
190
+ bseq,
191
+ create_using=nx.MultiDiGraph,
192
+ )
193
+
194
+ def test_reverse_havel_hakimi_graph(self):
195
+ aseq = []
196
+ bseq = []
197
+ G = reverse_havel_hakimi_graph(aseq, bseq)
198
+ assert len(G) == 0
199
+
200
+ aseq = [0, 0]
201
+ bseq = [0, 0]
202
+ G = reverse_havel_hakimi_graph(aseq, bseq)
203
+ assert len(G) == 4
204
+ assert G.number_of_edges() == 0
205
+
206
+ aseq = [3, 3, 3, 3]
207
+ bseq = [2, 2, 2, 2, 2]
208
+ pytest.raises(nx.NetworkXError, reverse_havel_hakimi_graph, aseq, bseq)
209
+
210
+ bseq = [2, 2, 2, 2, 2, 2]
211
+ G = reverse_havel_hakimi_graph(aseq, bseq)
212
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
213
+
214
+ aseq = [2, 2, 2, 2, 2, 2]
215
+ bseq = [3, 3, 3, 3]
216
+ G = reverse_havel_hakimi_graph(aseq, bseq)
217
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
218
+
219
+ aseq = [2, 2, 2, 1, 1, 1]
220
+ bseq = [3, 3, 3]
221
+ G = reverse_havel_hakimi_graph(aseq, bseq)
222
+ assert G.is_multigraph()
223
+ assert not G.is_directed()
224
+ assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
225
+
226
+ GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
227
+ assert GU.number_of_nodes() == 6
228
+
229
+ GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
230
+ assert GD.number_of_nodes() == 3
231
+
232
+ G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
233
+ assert not G.is_multigraph()
234
+ assert not G.is_directed()
235
+
236
+ pytest.raises(
237
+ nx.NetworkXError,
238
+ reverse_havel_hakimi_graph,
239
+ aseq,
240
+ bseq,
241
+ create_using=nx.DiGraph,
242
+ )
243
+ pytest.raises(
244
+ nx.NetworkXError,
245
+ reverse_havel_hakimi_graph,
246
+ aseq,
247
+ bseq,
248
+ create_using=nx.DiGraph,
249
+ )
250
+ pytest.raises(
251
+ nx.NetworkXError,
252
+ reverse_havel_hakimi_graph,
253
+ aseq,
254
+ bseq,
255
+ create_using=nx.MultiDiGraph,
256
+ )
257
+
258
+ def test_alternating_havel_hakimi_graph(self):
259
+ aseq = []
260
+ bseq = []
261
+ G = alternating_havel_hakimi_graph(aseq, bseq)
262
+ assert len(G) == 0
263
+
264
+ aseq = [0, 0]
265
+ bseq = [0, 0]
266
+ G = alternating_havel_hakimi_graph(aseq, bseq)
267
+ assert len(G) == 4
268
+ assert G.number_of_edges() == 0
269
+
270
+ aseq = [3, 3, 3, 3]
271
+ bseq = [2, 2, 2, 2, 2]
272
+ pytest.raises(nx.NetworkXError, alternating_havel_hakimi_graph, aseq, bseq)
273
+
274
+ bseq = [2, 2, 2, 2, 2, 2]
275
+ G = alternating_havel_hakimi_graph(aseq, bseq)
276
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
277
+
278
+ aseq = [2, 2, 2, 2, 2, 2]
279
+ bseq = [3, 3, 3, 3]
280
+ G = alternating_havel_hakimi_graph(aseq, bseq)
281
+ assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
282
+
283
+ aseq = [2, 2, 2, 1, 1, 1]
284
+ bseq = [3, 3, 3]
285
+ G = alternating_havel_hakimi_graph(aseq, bseq)
286
+ assert G.is_multigraph()
287
+ assert not G.is_directed()
288
+ assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
289
+
290
+ GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
291
+ assert GU.number_of_nodes() == 6
292
+
293
+ GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
294
+ assert GD.number_of_nodes() == 3
295
+
296
+ G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
297
+ assert not G.is_multigraph()
298
+ assert not G.is_directed()
299
+
300
+ pytest.raises(
301
+ nx.NetworkXError,
302
+ alternating_havel_hakimi_graph,
303
+ aseq,
304
+ bseq,
305
+ create_using=nx.DiGraph,
306
+ )
307
+ pytest.raises(
308
+ nx.NetworkXError,
309
+ alternating_havel_hakimi_graph,
310
+ aseq,
311
+ bseq,
312
+ create_using=nx.DiGraph,
313
+ )
314
+ pytest.raises(
315
+ nx.NetworkXError,
316
+ alternating_havel_hakimi_graph,
317
+ aseq,
318
+ bseq,
319
+ create_using=nx.MultiDiGraph,
320
+ )
321
+
322
+ def test_preferential_attachment(self):
323
+ aseq = [3, 2, 1, 1]
324
+ G = preferential_attachment_graph(aseq, 0.5)
325
+ assert G.is_multigraph()
326
+ assert not G.is_directed()
327
+
328
+ G = preferential_attachment_graph(aseq, 0.5, create_using=nx.Graph)
329
+ assert not G.is_multigraph()
330
+ assert not G.is_directed()
331
+
332
+ pytest.raises(
333
+ nx.NetworkXError,
334
+ preferential_attachment_graph,
335
+ aseq,
336
+ 0.5,
337
+ create_using=nx.DiGraph(),
338
+ )
339
+ pytest.raises(
340
+ nx.NetworkXError,
341
+ preferential_attachment_graph,
342
+ aseq,
343
+ 0.5,
344
+ create_using=nx.DiGraph(),
345
+ )
346
+ pytest.raises(
347
+ nx.NetworkXError,
348
+ preferential_attachment_graph,
349
+ aseq,
350
+ 0.5,
351
+ create_using=nx.DiGraph(),
352
+ )
353
+
354
+ def test_random_graph(self):
355
+ n = 10
356
+ m = 20
357
+ G = random_graph(n, m, 0.9)
358
+ assert len(G) == 30
359
+ assert nx.is_bipartite(G)
360
+ X, Y = nx.algorithms.bipartite.sets(G)
361
+ assert set(range(n)) == X
362
+ assert set(range(n, n + m)) == Y
363
+
364
+ def test_random_digraph(self):
365
+ n = 10
366
+ m = 20
367
+ G = random_graph(n, m, 0.9, directed=True)
368
+ assert len(G) == 30
369
+ assert nx.is_bipartite(G)
370
+ X, Y = nx.algorithms.bipartite.sets(G)
371
+ assert set(range(n)) == X
372
+ assert set(range(n, n + m)) == Y
373
+
374
+ def test_gnmk_random_graph(self):
375
+ n = 10
376
+ m = 20
377
+ edges = 100
378
+ # set seed because sometimes it is not connected
379
+ # which raises an error in bipartite.sets(G) below.
380
+ G = gnmk_random_graph(n, m, edges, seed=1234)
381
+ assert len(G) == n + m
382
+ assert nx.is_bipartite(G)
383
+ X, Y = nx.algorithms.bipartite.sets(G)
384
+ # print(X)
385
+ assert set(range(n)) == X
386
+ assert set(range(n, n + m)) == Y
387
+ assert edges == len(list(G.edges()))
388
+
389
+ def test_gnmk_random_graph_complete(self):
390
+ n = 10
391
+ m = 20
392
+ edges = 200
393
+ G = gnmk_random_graph(n, m, edges)
394
+ assert len(G) == n + m
395
+ assert nx.is_bipartite(G)
396
+ X, Y = nx.algorithms.bipartite.sets(G)
397
+ # print(X)
398
+ assert set(range(n)) == X
399
+ assert set(range(n, n + m)) == Y
400
+ assert edges == len(list(G.edges()))
401
+
402
+ @pytest.mark.parametrize("n", (4, range(4), {0, 1, 2, 3}))
403
+ @pytest.mark.parametrize("m", (range(4, 7), {4, 5, 6}))
404
+ def test_complete_bipartite_graph_str(self, n, m):
405
+ """Ensure G.name is consistent for all inputs accepted by nodes_or_number.
406
+ See gh-7396"""
407
+ G = nx.complete_bipartite_graph(n, m)
408
+ ans = "Graph named 'complete_bipartite_graph(4, 3)' with 7 nodes and 12 edges"
409
+ assert str(G) == ans
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_project.py ADDED
@@ -0,0 +1,407 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms import bipartite
5
+ from networkx.utils import edges_equal, nodes_equal
6
+
7
+
8
+ class TestBipartiteProject:
9
+ def test_path_projected_graph(self):
10
+ G = nx.path_graph(4)
11
+ P = bipartite.projected_graph(G, [1, 3])
12
+ assert nodes_equal(list(P), [1, 3])
13
+ assert edges_equal(list(P.edges()), [(1, 3)])
14
+ P = bipartite.projected_graph(G, [0, 2])
15
+ assert nodes_equal(list(P), [0, 2])
16
+ assert edges_equal(list(P.edges()), [(0, 2)])
17
+ G = nx.MultiGraph([(0, 1)])
18
+ with pytest.raises(nx.NetworkXError, match="not defined for multigraphs"):
19
+ bipartite.projected_graph(G, [0])
20
+
21
+ def test_path_projected_properties_graph(self):
22
+ G = nx.path_graph(4)
23
+ G.add_node(1, name="one")
24
+ G.add_node(2, name="two")
25
+ P = bipartite.projected_graph(G, [1, 3])
26
+ assert nodes_equal(list(P), [1, 3])
27
+ assert edges_equal(list(P.edges()), [(1, 3)])
28
+ assert P.nodes[1]["name"] == G.nodes[1]["name"]
29
+ P = bipartite.projected_graph(G, [0, 2])
30
+ assert nodes_equal(list(P), [0, 2])
31
+ assert edges_equal(list(P.edges()), [(0, 2)])
32
+ assert P.nodes[2]["name"] == G.nodes[2]["name"]
33
+
34
+ def test_path_collaboration_projected_graph(self):
35
+ G = nx.path_graph(4)
36
+ P = bipartite.collaboration_weighted_projected_graph(G, [1, 3])
37
+ assert nodes_equal(list(P), [1, 3])
38
+ assert edges_equal(list(P.edges()), [(1, 3)])
39
+ P[1][3]["weight"] = 1
40
+ P = bipartite.collaboration_weighted_projected_graph(G, [0, 2])
41
+ assert nodes_equal(list(P), [0, 2])
42
+ assert edges_equal(list(P.edges()), [(0, 2)])
43
+ P[0][2]["weight"] = 1
44
+
45
+ def test_directed_path_collaboration_projected_graph(self):
46
+ G = nx.DiGraph()
47
+ nx.add_path(G, range(4))
48
+ P = bipartite.collaboration_weighted_projected_graph(G, [1, 3])
49
+ assert nodes_equal(list(P), [1, 3])
50
+ assert edges_equal(list(P.edges()), [(1, 3)])
51
+ P[1][3]["weight"] = 1
52
+ P = bipartite.collaboration_weighted_projected_graph(G, [0, 2])
53
+ assert nodes_equal(list(P), [0, 2])
54
+ assert edges_equal(list(P.edges()), [(0, 2)])
55
+ P[0][2]["weight"] = 1
56
+
57
+ def test_path_weighted_projected_graph(self):
58
+ G = nx.path_graph(4)
59
+
60
+ with pytest.raises(nx.NetworkXAlgorithmError):
61
+ bipartite.weighted_projected_graph(G, [1, 2, 3, 3])
62
+
63
+ P = bipartite.weighted_projected_graph(G, [1, 3])
64
+ assert nodes_equal(list(P), [1, 3])
65
+ assert edges_equal(list(P.edges()), [(1, 3)])
66
+ P[1][3]["weight"] = 1
67
+ P = bipartite.weighted_projected_graph(G, [0, 2])
68
+ assert nodes_equal(list(P), [0, 2])
69
+ assert edges_equal(list(P.edges()), [(0, 2)])
70
+ P[0][2]["weight"] = 1
71
+
72
+ def test_digraph_weighted_projection(self):
73
+ G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4)])
74
+ P = bipartite.overlap_weighted_projected_graph(G, [1, 3])
75
+ assert nx.get_edge_attributes(P, "weight") == {(1, 3): 1.0}
76
+ assert len(P) == 2
77
+
78
+ def test_path_weighted_projected_directed_graph(self):
79
+ G = nx.DiGraph()
80
+ nx.add_path(G, range(4))
81
+ P = bipartite.weighted_projected_graph(G, [1, 3])
82
+ assert nodes_equal(list(P), [1, 3])
83
+ assert edges_equal(list(P.edges()), [(1, 3)])
84
+ P[1][3]["weight"] = 1
85
+ P = bipartite.weighted_projected_graph(G, [0, 2])
86
+ assert nodes_equal(list(P), [0, 2])
87
+ assert edges_equal(list(P.edges()), [(0, 2)])
88
+ P[0][2]["weight"] = 1
89
+
90
+ def test_star_projected_graph(self):
91
+ G = nx.star_graph(3)
92
+ P = bipartite.projected_graph(G, [1, 2, 3])
93
+ assert nodes_equal(list(P), [1, 2, 3])
94
+ assert edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)])
95
+ P = bipartite.weighted_projected_graph(G, [1, 2, 3])
96
+ assert nodes_equal(list(P), [1, 2, 3])
97
+ assert edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)])
98
+
99
+ P = bipartite.projected_graph(G, [0])
100
+ assert nodes_equal(list(P), [0])
101
+ assert edges_equal(list(P.edges()), [])
102
+
103
+ def test_project_multigraph(self):
104
+ G = nx.Graph()
105
+ G.add_edge("a", 1)
106
+ G.add_edge("b", 1)
107
+ G.add_edge("a", 2)
108
+ G.add_edge("b", 2)
109
+ P = bipartite.projected_graph(G, "ab")
110
+ assert edges_equal(list(P.edges()), [("a", "b")])
111
+ P = bipartite.weighted_projected_graph(G, "ab")
112
+ assert edges_equal(list(P.edges()), [("a", "b")])
113
+ P = bipartite.projected_graph(G, "ab", multigraph=True)
114
+ assert edges_equal(list(P.edges()), [("a", "b"), ("a", "b")])
115
+
116
+ def test_project_collaboration(self):
117
+ G = nx.Graph()
118
+ G.add_edge("a", 1)
119
+ G.add_edge("b", 1)
120
+ G.add_edge("b", 2)
121
+ G.add_edge("c", 2)
122
+ G.add_edge("c", 3)
123
+ G.add_edge("c", 4)
124
+ G.add_edge("b", 4)
125
+ P = bipartite.collaboration_weighted_projected_graph(G, "abc")
126
+ assert P["a"]["b"]["weight"] == 1
127
+ assert P["b"]["c"]["weight"] == 2
128
+
129
+ def test_directed_projection(self):
130
+ G = nx.DiGraph()
131
+ G.add_edge("A", 1)
132
+ G.add_edge(1, "B")
133
+ G.add_edge("A", 2)
134
+ G.add_edge("B", 2)
135
+ P = bipartite.projected_graph(G, "AB")
136
+ assert edges_equal(list(P.edges()), [("A", "B")])
137
+ P = bipartite.weighted_projected_graph(G, "AB")
138
+ assert edges_equal(list(P.edges()), [("A", "B")])
139
+ assert P["A"]["B"]["weight"] == 1
140
+
141
+ P = bipartite.projected_graph(G, "AB", multigraph=True)
142
+ assert edges_equal(list(P.edges()), [("A", "B")])
143
+
144
+ G = nx.DiGraph()
145
+ G.add_edge("A", 1)
146
+ G.add_edge(1, "B")
147
+ G.add_edge("A", 2)
148
+ G.add_edge(2, "B")
149
+ P = bipartite.projected_graph(G, "AB")
150
+ assert edges_equal(list(P.edges()), [("A", "B")])
151
+ P = bipartite.weighted_projected_graph(G, "AB")
152
+ assert edges_equal(list(P.edges()), [("A", "B")])
153
+ assert P["A"]["B"]["weight"] == 2
154
+
155
+ P = bipartite.projected_graph(G, "AB", multigraph=True)
156
+ assert edges_equal(list(P.edges()), [("A", "B"), ("A", "B")])
157
+
158
+
159
+ class TestBipartiteWeightedProjection:
160
+ @classmethod
161
+ def setup_class(cls):
162
+ # Tore Opsahl's example
163
+ # http://toreopsahl.com/2009/05/01/projecting-two-mode-networks-onto-weighted-one-mode-networks/
164
+ cls.G = nx.Graph()
165
+ cls.G.add_edge("A", 1)
166
+ cls.G.add_edge("A", 2)
167
+ cls.G.add_edge("B", 1)
168
+ cls.G.add_edge("B", 2)
169
+ cls.G.add_edge("B", 3)
170
+ cls.G.add_edge("B", 4)
171
+ cls.G.add_edge("B", 5)
172
+ cls.G.add_edge("C", 1)
173
+ cls.G.add_edge("D", 3)
174
+ cls.G.add_edge("E", 4)
175
+ cls.G.add_edge("E", 5)
176
+ cls.G.add_edge("E", 6)
177
+ cls.G.add_edge("F", 6)
178
+ # Graph based on figure 6 from Newman (2001)
179
+ cls.N = nx.Graph()
180
+ cls.N.add_edge("A", 1)
181
+ cls.N.add_edge("A", 2)
182
+ cls.N.add_edge("A", 3)
183
+ cls.N.add_edge("B", 1)
184
+ cls.N.add_edge("B", 2)
185
+ cls.N.add_edge("B", 3)
186
+ cls.N.add_edge("C", 1)
187
+ cls.N.add_edge("D", 1)
188
+ cls.N.add_edge("E", 3)
189
+
190
+ def test_project_weighted_shared(self):
191
+ edges = [
192
+ ("A", "B", 2),
193
+ ("A", "C", 1),
194
+ ("B", "C", 1),
195
+ ("B", "D", 1),
196
+ ("B", "E", 2),
197
+ ("E", "F", 1),
198
+ ]
199
+ Panswer = nx.Graph()
200
+ Panswer.add_weighted_edges_from(edges)
201
+ P = bipartite.weighted_projected_graph(self.G, "ABCDEF")
202
+ assert edges_equal(list(P.edges()), Panswer.edges())
203
+ for u, v in list(P.edges()):
204
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
205
+
206
+ edges = [
207
+ ("A", "B", 3),
208
+ ("A", "E", 1),
209
+ ("A", "C", 1),
210
+ ("A", "D", 1),
211
+ ("B", "E", 1),
212
+ ("B", "C", 1),
213
+ ("B", "D", 1),
214
+ ("C", "D", 1),
215
+ ]
216
+ Panswer = nx.Graph()
217
+ Panswer.add_weighted_edges_from(edges)
218
+ P = bipartite.weighted_projected_graph(self.N, "ABCDE")
219
+ assert edges_equal(list(P.edges()), Panswer.edges())
220
+ for u, v in list(P.edges()):
221
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
222
+
223
+ def test_project_weighted_newman(self):
224
+ edges = [
225
+ ("A", "B", 1.5),
226
+ ("A", "C", 0.5),
227
+ ("B", "C", 0.5),
228
+ ("B", "D", 1),
229
+ ("B", "E", 2),
230
+ ("E", "F", 1),
231
+ ]
232
+ Panswer = nx.Graph()
233
+ Panswer.add_weighted_edges_from(edges)
234
+ P = bipartite.collaboration_weighted_projected_graph(self.G, "ABCDEF")
235
+ assert edges_equal(list(P.edges()), Panswer.edges())
236
+ for u, v in list(P.edges()):
237
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
238
+
239
+ edges = [
240
+ ("A", "B", 11 / 6.0),
241
+ ("A", "E", 1 / 2.0),
242
+ ("A", "C", 1 / 3.0),
243
+ ("A", "D", 1 / 3.0),
244
+ ("B", "E", 1 / 2.0),
245
+ ("B", "C", 1 / 3.0),
246
+ ("B", "D", 1 / 3.0),
247
+ ("C", "D", 1 / 3.0),
248
+ ]
249
+ Panswer = nx.Graph()
250
+ Panswer.add_weighted_edges_from(edges)
251
+ P = bipartite.collaboration_weighted_projected_graph(self.N, "ABCDE")
252
+ assert edges_equal(list(P.edges()), Panswer.edges())
253
+ for u, v in list(P.edges()):
254
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
255
+
256
+ def test_project_weighted_ratio(self):
257
+ edges = [
258
+ ("A", "B", 2 / 6.0),
259
+ ("A", "C", 1 / 6.0),
260
+ ("B", "C", 1 / 6.0),
261
+ ("B", "D", 1 / 6.0),
262
+ ("B", "E", 2 / 6.0),
263
+ ("E", "F", 1 / 6.0),
264
+ ]
265
+ Panswer = nx.Graph()
266
+ Panswer.add_weighted_edges_from(edges)
267
+ P = bipartite.weighted_projected_graph(self.G, "ABCDEF", ratio=True)
268
+ assert edges_equal(list(P.edges()), Panswer.edges())
269
+ for u, v in list(P.edges()):
270
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
271
+
272
+ edges = [
273
+ ("A", "B", 3 / 3.0),
274
+ ("A", "E", 1 / 3.0),
275
+ ("A", "C", 1 / 3.0),
276
+ ("A", "D", 1 / 3.0),
277
+ ("B", "E", 1 / 3.0),
278
+ ("B", "C", 1 / 3.0),
279
+ ("B", "D", 1 / 3.0),
280
+ ("C", "D", 1 / 3.0),
281
+ ]
282
+ Panswer = nx.Graph()
283
+ Panswer.add_weighted_edges_from(edges)
284
+ P = bipartite.weighted_projected_graph(self.N, "ABCDE", ratio=True)
285
+ assert edges_equal(list(P.edges()), Panswer.edges())
286
+ for u, v in list(P.edges()):
287
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
288
+
289
+ def test_project_weighted_overlap(self):
290
+ edges = [
291
+ ("A", "B", 2 / 2.0),
292
+ ("A", "C", 1 / 1.0),
293
+ ("B", "C", 1 / 1.0),
294
+ ("B", "D", 1 / 1.0),
295
+ ("B", "E", 2 / 3.0),
296
+ ("E", "F", 1 / 1.0),
297
+ ]
298
+ Panswer = nx.Graph()
299
+ Panswer.add_weighted_edges_from(edges)
300
+ P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF", jaccard=False)
301
+ assert edges_equal(list(P.edges()), Panswer.edges())
302
+ for u, v in list(P.edges()):
303
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
304
+
305
+ edges = [
306
+ ("A", "B", 3 / 3.0),
307
+ ("A", "E", 1 / 1.0),
308
+ ("A", "C", 1 / 1.0),
309
+ ("A", "D", 1 / 1.0),
310
+ ("B", "E", 1 / 1.0),
311
+ ("B", "C", 1 / 1.0),
312
+ ("B", "D", 1 / 1.0),
313
+ ("C", "D", 1 / 1.0),
314
+ ]
315
+ Panswer = nx.Graph()
316
+ Panswer.add_weighted_edges_from(edges)
317
+ P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE", jaccard=False)
318
+ assert edges_equal(list(P.edges()), Panswer.edges())
319
+ for u, v in list(P.edges()):
320
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
321
+
322
+ def test_project_weighted_jaccard(self):
323
+ edges = [
324
+ ("A", "B", 2 / 5.0),
325
+ ("A", "C", 1 / 2.0),
326
+ ("B", "C", 1 / 5.0),
327
+ ("B", "D", 1 / 5.0),
328
+ ("B", "E", 2 / 6.0),
329
+ ("E", "F", 1 / 3.0),
330
+ ]
331
+ Panswer = nx.Graph()
332
+ Panswer.add_weighted_edges_from(edges)
333
+ P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF")
334
+ assert edges_equal(list(P.edges()), Panswer.edges())
335
+ for u, v in list(P.edges()):
336
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
337
+
338
+ edges = [
339
+ ("A", "B", 3 / 3.0),
340
+ ("A", "E", 1 / 3.0),
341
+ ("A", "C", 1 / 3.0),
342
+ ("A", "D", 1 / 3.0),
343
+ ("B", "E", 1 / 3.0),
344
+ ("B", "C", 1 / 3.0),
345
+ ("B", "D", 1 / 3.0),
346
+ ("C", "D", 1 / 1.0),
347
+ ]
348
+ Panswer = nx.Graph()
349
+ Panswer.add_weighted_edges_from(edges)
350
+ P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE")
351
+ assert edges_equal(list(P.edges()), Panswer.edges())
352
+ for u, v in P.edges():
353
+ assert P[u][v]["weight"] == Panswer[u][v]["weight"]
354
+
355
+ def test_generic_weighted_projected_graph_simple(self):
356
+ def shared(G, u, v):
357
+ return len(set(G[u]) & set(G[v]))
358
+
359
+ B = nx.path_graph(5)
360
+ G = bipartite.generic_weighted_projected_graph(
361
+ B, [0, 2, 4], weight_function=shared
362
+ )
363
+ assert nodes_equal(list(G), [0, 2, 4])
364
+ assert edges_equal(
365
+ list(G.edges(data=True)),
366
+ [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})],
367
+ )
368
+
369
+ G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4])
370
+ assert nodes_equal(list(G), [0, 2, 4])
371
+ assert edges_equal(
372
+ list(G.edges(data=True)),
373
+ [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})],
374
+ )
375
+ B = nx.DiGraph()
376
+ nx.add_path(B, range(5))
377
+ G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4])
378
+ assert nodes_equal(list(G), [0, 2, 4])
379
+ assert edges_equal(
380
+ list(G.edges(data=True)), [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})]
381
+ )
382
+
383
+ def test_generic_weighted_projected_graph_custom(self):
384
+ def jaccard(G, u, v):
385
+ unbrs = set(G[u])
386
+ vnbrs = set(G[v])
387
+ return len(unbrs & vnbrs) / len(unbrs | vnbrs)
388
+
389
+ def my_weight(G, u, v, weight="weight"):
390
+ w = 0
391
+ for nbr in set(G[u]) & set(G[v]):
392
+ w += G.edges[u, nbr].get(weight, 1) + G.edges[v, nbr].get(weight, 1)
393
+ return w
394
+
395
+ B = nx.bipartite.complete_bipartite_graph(2, 2)
396
+ for i, (u, v) in enumerate(B.edges()):
397
+ B.edges[u, v]["weight"] = i + 1
398
+ G = bipartite.generic_weighted_projected_graph(
399
+ B, [0, 1], weight_function=jaccard
400
+ )
401
+ assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 1.0})])
402
+ G = bipartite.generic_weighted_projected_graph(
403
+ B, [0, 1], weight_function=my_weight
404
+ )
405
+ assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 10})])
406
+ G = bipartite.generic_weighted_projected_graph(B, [0, 1])
407
+ assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 2})])
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_redundancy.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for the :mod:`networkx.algorithms.bipartite.redundancy` module."""
2
+
3
+ import pytest
4
+
5
+ from networkx import NetworkXError, cycle_graph
6
+ from networkx.algorithms.bipartite import complete_bipartite_graph, node_redundancy
7
+
8
+
9
+ def test_no_redundant_nodes():
10
+ G = complete_bipartite_graph(2, 2)
11
+
12
+ # when nodes is None
13
+ rc = node_redundancy(G)
14
+ assert all(redundancy == 1 for redundancy in rc.values())
15
+
16
+ # when set of nodes is specified
17
+ rc = node_redundancy(G, (2, 3))
18
+ assert rc == {2: 1.0, 3: 1.0}
19
+
20
+
21
+ def test_redundant_nodes():
22
+ G = cycle_graph(6)
23
+ edge = {0, 3}
24
+ G.add_edge(*edge)
25
+ redundancy = node_redundancy(G)
26
+ for v in edge:
27
+ assert redundancy[v] == 2 / 3
28
+ for v in set(G) - edge:
29
+ assert redundancy[v] == 1
30
+
31
+
32
+ def test_not_enough_neighbors():
33
+ with pytest.raises(NetworkXError):
34
+ G = complete_bipartite_graph(1, 2)
35
+ node_redundancy(G)
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/__init__.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .betweenness import *
2
+ from .betweenness_subset import *
3
+ from .closeness import *
4
+ from .current_flow_betweenness import *
5
+ from .current_flow_betweenness_subset import *
6
+ from .current_flow_closeness import *
7
+ from .degree_alg import *
8
+ from .dispersion import *
9
+ from .eigenvector import *
10
+ from .group import *
11
+ from .harmonic import *
12
+ from .katz import *
13
+ from .load import *
14
+ from .percolation import *
15
+ from .reaching import *
16
+ from .second_order import *
17
+ from .subgraph_alg import *
18
+ from .trophic import *
19
+ from .voterank_alg import *
20
+ from .laplacian import *
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-310.pyc ADDED
Binary file (9.16 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_closeness.cpython-310.pyc ADDED
Binary file (3.36 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-310.pyc ADDED
Binary file (4.55 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/betweenness.py ADDED
@@ -0,0 +1,436 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Betweenness centrality measures."""
2
+
3
+ from collections import deque
4
+ from heapq import heappop, heappush
5
+ from itertools import count
6
+
7
+ import networkx as nx
8
+ from networkx.algorithms.shortest_paths.weighted import _weight_function
9
+ from networkx.utils import py_random_state
10
+ from networkx.utils.decorators import not_implemented_for
11
+
12
+ __all__ = ["betweenness_centrality", "edge_betweenness_centrality"]
13
+
14
+
15
+ @py_random_state(5)
16
+ @nx._dispatchable(edge_attrs="weight")
17
+ def betweenness_centrality(
18
+ G, k=None, normalized=True, weight=None, endpoints=False, seed=None
19
+ ):
20
+ r"""Compute the shortest-path betweenness centrality for nodes.
21
+
22
+ Betweenness centrality of a node $v$ is the sum of the
23
+ fraction of all-pairs shortest paths that pass through $v$
24
+
25
+ .. math::
26
+
27
+ c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)}
28
+
29
+ where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
30
+ shortest $(s, t)$-paths, and $\sigma(s, t|v)$ is the number of
31
+ those paths passing through some node $v$ other than $s, t$.
32
+ If $s = t$, $\sigma(s, t) = 1$, and if $v \in {s, t}$,
33
+ $\sigma(s, t|v) = 0$ [2]_.
34
+
35
+ Parameters
36
+ ----------
37
+ G : graph
38
+ A NetworkX graph.
39
+
40
+ k : int, optional (default=None)
41
+ If k is not None use k node samples to estimate betweenness.
42
+ The value of k <= n where n is the number of nodes in the graph.
43
+ Higher values give better approximation.
44
+
45
+ normalized : bool, optional
46
+ If True the betweenness values are normalized by `2/((n-1)(n-2))`
47
+ for graphs, and `1/((n-1)(n-2))` for directed graphs where `n`
48
+ is the number of nodes in G.
49
+
50
+ weight : None or string, optional (default=None)
51
+ If None, all edge weights are considered equal.
52
+ Otherwise holds the name of the edge attribute used as weight.
53
+ Weights are used to calculate weighted shortest paths, so they are
54
+ interpreted as distances.
55
+
56
+ endpoints : bool, optional
57
+ If True include the endpoints in the shortest path counts.
58
+
59
+ seed : integer, random_state, or None (default)
60
+ Indicator of random number generation state.
61
+ See :ref:`Randomness<randomness>`.
62
+ Note that this is only used if k is not None.
63
+
64
+ Returns
65
+ -------
66
+ nodes : dictionary
67
+ Dictionary of nodes with betweenness centrality as the value.
68
+
69
+ See Also
70
+ --------
71
+ edge_betweenness_centrality
72
+ load_centrality
73
+
74
+ Notes
75
+ -----
76
+ The algorithm is from Ulrik Brandes [1]_.
77
+ See [4]_ for the original first published version and [2]_ for details on
78
+ algorithms for variations and related metrics.
79
+
80
+ For approximate betweenness calculations set k=#samples to use
81
+ k nodes ("pivots") to estimate the betweenness values. For an estimate
82
+ of the number of pivots needed see [3]_.
83
+
84
+ For weighted graphs the edge weights must be greater than zero.
85
+ Zero edge weights can produce an infinite number of equal length
86
+ paths between pairs of nodes.
87
+
88
+ The total number of paths between source and target is counted
89
+ differently for directed and undirected graphs. Directed paths
90
+ are easy to count. Undirected paths are tricky: should a path
91
+ from "u" to "v" count as 1 undirected path or as 2 directed paths?
92
+
93
+ For betweenness_centrality we report the number of undirected
94
+ paths when G is undirected.
95
+
96
+ For betweenness_centrality_subset the reporting is different.
97
+ If the source and target subsets are the same, then we want
98
+ to count undirected paths. But if the source and target subsets
99
+ differ -- for example, if sources is {0} and targets is {1},
100
+ then we are only counting the paths in one direction. They are
101
+ undirected paths but we are counting them in a directed way.
102
+ To count them as undirected paths, each should count as half a path.
103
+
104
+ This algorithm is not guaranteed to be correct if edge weights
105
+ are floating point numbers. As a workaround you can use integer
106
+ numbers by multiplying the relevant edge attributes by a convenient
107
+ constant factor (eg 100) and converting to integers.
108
+
109
+ References
110
+ ----------
111
+ .. [1] Ulrik Brandes:
112
+ A Faster Algorithm for Betweenness Centrality.
113
+ Journal of Mathematical Sociology 25(2):163-177, 2001.
114
+ https://doi.org/10.1080/0022250X.2001.9990249
115
+ .. [2] Ulrik Brandes:
116
+ On Variants of Shortest-Path Betweenness
117
+ Centrality and their Generic Computation.
118
+ Social Networks 30(2):136-145, 2008.
119
+ https://doi.org/10.1016/j.socnet.2007.11.001
120
+ .. [3] Ulrik Brandes and Christian Pich:
121
+ Centrality Estimation in Large Networks.
122
+ International Journal of Bifurcation and Chaos 17(7):2303-2318, 2007.
123
+ https://dx.doi.org/10.1142/S0218127407018403
124
+ .. [4] Linton C. Freeman:
125
+ A set of measures of centrality based on betweenness.
126
+ Sociometry 40: 35–41, 1977
127
+ https://doi.org/10.2307/3033543
128
+ """
129
+ betweenness = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
130
+ if k is None:
131
+ nodes = G
132
+ else:
133
+ nodes = seed.sample(list(G.nodes()), k)
134
+ for s in nodes:
135
+ # single source shortest paths
136
+ if weight is None: # use BFS
137
+ S, P, sigma, _ = _single_source_shortest_path_basic(G, s)
138
+ else: # use Dijkstra's algorithm
139
+ S, P, sigma, _ = _single_source_dijkstra_path_basic(G, s, weight)
140
+ # accumulation
141
+ if endpoints:
142
+ betweenness, _ = _accumulate_endpoints(betweenness, S, P, sigma, s)
143
+ else:
144
+ betweenness, _ = _accumulate_basic(betweenness, S, P, sigma, s)
145
+ # rescaling
146
+ betweenness = _rescale(
147
+ betweenness,
148
+ len(G),
149
+ normalized=normalized,
150
+ directed=G.is_directed(),
151
+ k=k,
152
+ endpoints=endpoints,
153
+ )
154
+ return betweenness
155
+
156
+
157
+ @py_random_state(4)
158
+ @nx._dispatchable(edge_attrs="weight")
159
+ def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=None):
160
+ r"""Compute betweenness centrality for edges.
161
+
162
+ Betweenness centrality of an edge $e$ is the sum of the
163
+ fraction of all-pairs shortest paths that pass through $e$
164
+
165
+ .. math::
166
+
167
+ c_B(e) =\sum_{s,t \in V} \frac{\sigma(s, t|e)}{\sigma(s, t)}
168
+
169
+ where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
170
+ shortest $(s, t)$-paths, and $\sigma(s, t|e)$ is the number of
171
+ those paths passing through edge $e$ [2]_.
172
+
173
+ Parameters
174
+ ----------
175
+ G : graph
176
+ A NetworkX graph.
177
+
178
+ k : int, optional (default=None)
179
+ If k is not None use k node samples to estimate betweenness.
180
+ The value of k <= n where n is the number of nodes in the graph.
181
+ Higher values give better approximation.
182
+
183
+ normalized : bool, optional
184
+ If True the betweenness values are normalized by $2/(n(n-1))$
185
+ for graphs, and $1/(n(n-1))$ for directed graphs where $n$
186
+ is the number of nodes in G.
187
+
188
+ weight : None or string, optional (default=None)
189
+ If None, all edge weights are considered equal.
190
+ Otherwise holds the name of the edge attribute used as weight.
191
+ Weights are used to calculate weighted shortest paths, so they are
192
+ interpreted as distances.
193
+
194
+ seed : integer, random_state, or None (default)
195
+ Indicator of random number generation state.
196
+ See :ref:`Randomness<randomness>`.
197
+ Note that this is only used if k is not None.
198
+
199
+ Returns
200
+ -------
201
+ edges : dictionary
202
+ Dictionary of edges with betweenness centrality as the value.
203
+
204
+ See Also
205
+ --------
206
+ betweenness_centrality
207
+ edge_load
208
+
209
+ Notes
210
+ -----
211
+ The algorithm is from Ulrik Brandes [1]_.
212
+
213
+ For weighted graphs the edge weights must be greater than zero.
214
+ Zero edge weights can produce an infinite number of equal length
215
+ paths between pairs of nodes.
216
+
217
+ References
218
+ ----------
219
+ .. [1] A Faster Algorithm for Betweenness Centrality. Ulrik Brandes,
220
+ Journal of Mathematical Sociology 25(2):163-177, 2001.
221
+ https://doi.org/10.1080/0022250X.2001.9990249
222
+ .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
223
+ Centrality and their Generic Computation.
224
+ Social Networks 30(2):136-145, 2008.
225
+ https://doi.org/10.1016/j.socnet.2007.11.001
226
+ """
227
+ betweenness = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
228
+ # b[e]=0 for e in G.edges()
229
+ betweenness.update(dict.fromkeys(G.edges(), 0.0))
230
+ if k is None:
231
+ nodes = G
232
+ else:
233
+ nodes = seed.sample(list(G.nodes()), k)
234
+ for s in nodes:
235
+ # single source shortest paths
236
+ if weight is None: # use BFS
237
+ S, P, sigma, _ = _single_source_shortest_path_basic(G, s)
238
+ else: # use Dijkstra's algorithm
239
+ S, P, sigma, _ = _single_source_dijkstra_path_basic(G, s, weight)
240
+ # accumulation
241
+ betweenness = _accumulate_edges(betweenness, S, P, sigma, s)
242
+ # rescaling
243
+ for n in G: # remove nodes to only return edges
244
+ del betweenness[n]
245
+ betweenness = _rescale_e(
246
+ betweenness, len(G), normalized=normalized, directed=G.is_directed()
247
+ )
248
+ if G.is_multigraph():
249
+ betweenness = _add_edge_keys(G, betweenness, weight=weight)
250
+ return betweenness
251
+
252
+
253
+ # helpers for betweenness centrality
254
+
255
+
256
+ def _single_source_shortest_path_basic(G, s):
257
+ S = []
258
+ P = {}
259
+ for v in G:
260
+ P[v] = []
261
+ sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G
262
+ D = {}
263
+ sigma[s] = 1.0
264
+ D[s] = 0
265
+ Q = deque([s])
266
+ while Q: # use BFS to find shortest paths
267
+ v = Q.popleft()
268
+ S.append(v)
269
+ Dv = D[v]
270
+ sigmav = sigma[v]
271
+ for w in G[v]:
272
+ if w not in D:
273
+ Q.append(w)
274
+ D[w] = Dv + 1
275
+ if D[w] == Dv + 1: # this is a shortest path, count paths
276
+ sigma[w] += sigmav
277
+ P[w].append(v) # predecessors
278
+ return S, P, sigma, D
279
+
280
+
281
+ def _single_source_dijkstra_path_basic(G, s, weight):
282
+ weight = _weight_function(G, weight)
283
+ # modified from Eppstein
284
+ S = []
285
+ P = {}
286
+ for v in G:
287
+ P[v] = []
288
+ sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G
289
+ D = {}
290
+ sigma[s] = 1.0
291
+ push = heappush
292
+ pop = heappop
293
+ seen = {s: 0}
294
+ c = count()
295
+ Q = [] # use Q as heap with (distance,node id) tuples
296
+ push(Q, (0, next(c), s, s))
297
+ while Q:
298
+ (dist, _, pred, v) = pop(Q)
299
+ if v in D:
300
+ continue # already searched this node.
301
+ sigma[v] += sigma[pred] # count paths
302
+ S.append(v)
303
+ D[v] = dist
304
+ for w, edgedata in G[v].items():
305
+ vw_dist = dist + weight(v, w, edgedata)
306
+ if w not in D and (w not in seen or vw_dist < seen[w]):
307
+ seen[w] = vw_dist
308
+ push(Q, (vw_dist, next(c), v, w))
309
+ sigma[w] = 0.0
310
+ P[w] = [v]
311
+ elif vw_dist == seen[w]: # handle equal paths
312
+ sigma[w] += sigma[v]
313
+ P[w].append(v)
314
+ return S, P, sigma, D
315
+
316
+
317
+ def _accumulate_basic(betweenness, S, P, sigma, s):
318
+ delta = dict.fromkeys(S, 0)
319
+ while S:
320
+ w = S.pop()
321
+ coeff = (1 + delta[w]) / sigma[w]
322
+ for v in P[w]:
323
+ delta[v] += sigma[v] * coeff
324
+ if w != s:
325
+ betweenness[w] += delta[w]
326
+ return betweenness, delta
327
+
328
+
329
+ def _accumulate_endpoints(betweenness, S, P, sigma, s):
330
+ betweenness[s] += len(S) - 1
331
+ delta = dict.fromkeys(S, 0)
332
+ while S:
333
+ w = S.pop()
334
+ coeff = (1 + delta[w]) / sigma[w]
335
+ for v in P[w]:
336
+ delta[v] += sigma[v] * coeff
337
+ if w != s:
338
+ betweenness[w] += delta[w] + 1
339
+ return betweenness, delta
340
+
341
+
342
+ def _accumulate_edges(betweenness, S, P, sigma, s):
343
+ delta = dict.fromkeys(S, 0)
344
+ while S:
345
+ w = S.pop()
346
+ coeff = (1 + delta[w]) / sigma[w]
347
+ for v in P[w]:
348
+ c = sigma[v] * coeff
349
+ if (v, w) not in betweenness:
350
+ betweenness[(w, v)] += c
351
+ else:
352
+ betweenness[(v, w)] += c
353
+ delta[v] += c
354
+ if w != s:
355
+ betweenness[w] += delta[w]
356
+ return betweenness
357
+
358
+
359
+ def _rescale(betweenness, n, normalized, directed=False, k=None, endpoints=False):
360
+ if normalized:
361
+ if endpoints:
362
+ if n < 2:
363
+ scale = None # no normalization
364
+ else:
365
+ # Scale factor should include endpoint nodes
366
+ scale = 1 / (n * (n - 1))
367
+ elif n <= 2:
368
+ scale = None # no normalization b=0 for all nodes
369
+ else:
370
+ scale = 1 / ((n - 1) * (n - 2))
371
+ else: # rescale by 2 for undirected graphs
372
+ if not directed:
373
+ scale = 0.5
374
+ else:
375
+ scale = None
376
+ if scale is not None:
377
+ if k is not None:
378
+ scale = scale * n / k
379
+ for v in betweenness:
380
+ betweenness[v] *= scale
381
+ return betweenness
382
+
383
+
384
+ def _rescale_e(betweenness, n, normalized, directed=False, k=None):
385
+ if normalized:
386
+ if n <= 1:
387
+ scale = None # no normalization b=0 for all nodes
388
+ else:
389
+ scale = 1 / (n * (n - 1))
390
+ else: # rescale by 2 for undirected graphs
391
+ if not directed:
392
+ scale = 0.5
393
+ else:
394
+ scale = None
395
+ if scale is not None:
396
+ if k is not None:
397
+ scale = scale * n / k
398
+ for v in betweenness:
399
+ betweenness[v] *= scale
400
+ return betweenness
401
+
402
+
403
+ @not_implemented_for("graph")
404
+ def _add_edge_keys(G, betweenness, weight=None):
405
+ r"""Adds the corrected betweenness centrality (BC) values for multigraphs.
406
+
407
+ Parameters
408
+ ----------
409
+ G : NetworkX graph.
410
+
411
+ betweenness : dictionary
412
+ Dictionary mapping adjacent node tuples to betweenness centrality values.
413
+
414
+ weight : string or function
415
+ See `_weight_function` for details. Defaults to `None`.
416
+
417
+ Returns
418
+ -------
419
+ edges : dictionary
420
+ The parameter `betweenness` including edges with keys and their
421
+ betweenness centrality values.
422
+
423
+ The BC value is divided among edges of equal weight.
424
+ """
425
+ _weight = _weight_function(G, weight)
426
+
427
+ edge_bc = dict.fromkeys(G.edges, 0.0)
428
+ for u, v in betweenness:
429
+ d = G[u][v]
430
+ wt = _weight(u, v, d)
431
+ keys = [k for k in d if _weight(u, v, {k: d[k]}) == wt]
432
+ bc = betweenness[(u, v)] / len(keys)
433
+ for k in keys:
434
+ edge_bc[(u, v, k)] = bc
435
+
436
+ return edge_bc
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/betweenness_subset.py ADDED
@@ -0,0 +1,275 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Betweenness centrality measures for subsets of nodes."""
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms.centrality.betweenness import (
5
+ _add_edge_keys,
6
+ )
7
+ from networkx.algorithms.centrality.betweenness import (
8
+ _single_source_dijkstra_path_basic as dijkstra,
9
+ )
10
+ from networkx.algorithms.centrality.betweenness import (
11
+ _single_source_shortest_path_basic as shortest_path,
12
+ )
13
+
14
+ __all__ = [
15
+ "betweenness_centrality_subset",
16
+ "edge_betweenness_centrality_subset",
17
+ ]
18
+
19
+
20
+ @nx._dispatchable(edge_attrs="weight")
21
+ def betweenness_centrality_subset(G, sources, targets, normalized=False, weight=None):
22
+ r"""Compute betweenness centrality for a subset of nodes.
23
+
24
+ .. math::
25
+
26
+ c_B(v) =\sum_{s\in S, t \in T} \frac{\sigma(s, t|v)}{\sigma(s, t)}
27
+
28
+ where $S$ is the set of sources, $T$ is the set of targets,
29
+ $\sigma(s, t)$ is the number of shortest $(s, t)$-paths,
30
+ and $\sigma(s, t|v)$ is the number of those paths
31
+ passing through some node $v$ other than $s, t$.
32
+ If $s = t$, $\sigma(s, t) = 1$,
33
+ and if $v \in {s, t}$, $\sigma(s, t|v) = 0$ [2]_.
34
+
35
+
36
+ Parameters
37
+ ----------
38
+ G : graph
39
+ A NetworkX graph.
40
+
41
+ sources: list of nodes
42
+ Nodes to use as sources for shortest paths in betweenness
43
+
44
+ targets: list of nodes
45
+ Nodes to use as targets for shortest paths in betweenness
46
+
47
+ normalized : bool, optional
48
+ If True the betweenness values are normalized by $2/((n-1)(n-2))$
49
+ for graphs, and $1/((n-1)(n-2))$ for directed graphs where $n$
50
+ is the number of nodes in G.
51
+
52
+ weight : None or string, optional (default=None)
53
+ If None, all edge weights are considered equal.
54
+ Otherwise holds the name of the edge attribute used as weight.
55
+ Weights are used to calculate weighted shortest paths, so they are
56
+ interpreted as distances.
57
+
58
+ Returns
59
+ -------
60
+ nodes : dictionary
61
+ Dictionary of nodes with betweenness centrality as the value.
62
+
63
+ See Also
64
+ --------
65
+ edge_betweenness_centrality
66
+ load_centrality
67
+
68
+ Notes
69
+ -----
70
+ The basic algorithm is from [1]_.
71
+
72
+ For weighted graphs the edge weights must be greater than zero.
73
+ Zero edge weights can produce an infinite number of equal length
74
+ paths between pairs of nodes.
75
+
76
+ The normalization might seem a little strange but it is
77
+ designed to make betweenness_centrality(G) be the same as
78
+ betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).
79
+
80
+ The total number of paths between source and target is counted
81
+ differently for directed and undirected graphs. Directed paths
82
+ are easy to count. Undirected paths are tricky: should a path
83
+ from "u" to "v" count as 1 undirected path or as 2 directed paths?
84
+
85
+ For betweenness_centrality we report the number of undirected
86
+ paths when G is undirected.
87
+
88
+ For betweenness_centrality_subset the reporting is different.
89
+ If the source and target subsets are the same, then we want
90
+ to count undirected paths. But if the source and target subsets
91
+ differ -- for example, if sources is {0} and targets is {1},
92
+ then we are only counting the paths in one direction. They are
93
+ undirected paths but we are counting them in a directed way.
94
+ To count them as undirected paths, each should count as half a path.
95
+
96
+ References
97
+ ----------
98
+ .. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
99
+ Journal of Mathematical Sociology 25(2):163-177, 2001.
100
+ https://doi.org/10.1080/0022250X.2001.9990249
101
+ .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
102
+ Centrality and their Generic Computation.
103
+ Social Networks 30(2):136-145, 2008.
104
+ https://doi.org/10.1016/j.socnet.2007.11.001
105
+ """
106
+ b = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
107
+ for s in sources:
108
+ # single source shortest paths
109
+ if weight is None: # use BFS
110
+ S, P, sigma, _ = shortest_path(G, s)
111
+ else: # use Dijkstra's algorithm
112
+ S, P, sigma, _ = dijkstra(G, s, weight)
113
+ b = _accumulate_subset(b, S, P, sigma, s, targets)
114
+ b = _rescale(b, len(G), normalized=normalized, directed=G.is_directed())
115
+ return b
116
+
117
+
118
+ @nx._dispatchable(edge_attrs="weight")
119
+ def edge_betweenness_centrality_subset(
120
+ G, sources, targets, normalized=False, weight=None
121
+ ):
122
+ r"""Compute betweenness centrality for edges for a subset of nodes.
123
+
124
+ .. math::
125
+
126
+ c_B(v) =\sum_{s\in S,t \in T} \frac{\sigma(s, t|e)}{\sigma(s, t)}
127
+
128
+ where $S$ is the set of sources, $T$ is the set of targets,
129
+ $\sigma(s, t)$ is the number of shortest $(s, t)$-paths,
130
+ and $\sigma(s, t|e)$ is the number of those paths
131
+ passing through edge $e$ [2]_.
132
+
133
+ Parameters
134
+ ----------
135
+ G : graph
136
+ A networkx graph.
137
+
138
+ sources: list of nodes
139
+ Nodes to use as sources for shortest paths in betweenness
140
+
141
+ targets: list of nodes
142
+ Nodes to use as targets for shortest paths in betweenness
143
+
144
+ normalized : bool, optional
145
+ If True the betweenness values are normalized by `2/(n(n-1))`
146
+ for graphs, and `1/(n(n-1))` for directed graphs where `n`
147
+ is the number of nodes in G.
148
+
149
+ weight : None or string, optional (default=None)
150
+ If None, all edge weights are considered equal.
151
+ Otherwise holds the name of the edge attribute used as weight.
152
+ Weights are used to calculate weighted shortest paths, so they are
153
+ interpreted as distances.
154
+
155
+ Returns
156
+ -------
157
+ edges : dictionary
158
+ Dictionary of edges with Betweenness centrality as the value.
159
+
160
+ See Also
161
+ --------
162
+ betweenness_centrality
163
+ edge_load
164
+
165
+ Notes
166
+ -----
167
+ The basic algorithm is from [1]_.
168
+
169
+ For weighted graphs the edge weights must be greater than zero.
170
+ Zero edge weights can produce an infinite number of equal length
171
+ paths between pairs of nodes.
172
+
173
+ The normalization might seem a little strange but it is the same
174
+ as in edge_betweenness_centrality() and is designed to make
175
+ edge_betweenness_centrality(G) be the same as
176
+ edge_betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).
177
+
178
+ References
179
+ ----------
180
+ .. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
181
+ Journal of Mathematical Sociology 25(2):163-177, 2001.
182
+ https://doi.org/10.1080/0022250X.2001.9990249
183
+ .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
184
+ Centrality and their Generic Computation.
185
+ Social Networks 30(2):136-145, 2008.
186
+ https://doi.org/10.1016/j.socnet.2007.11.001
187
+ """
188
+ b = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
189
+ b.update(dict.fromkeys(G.edges(), 0.0)) # b[e] for e in G.edges()
190
+ for s in sources:
191
+ # single source shortest paths
192
+ if weight is None: # use BFS
193
+ S, P, sigma, _ = shortest_path(G, s)
194
+ else: # use Dijkstra's algorithm
195
+ S, P, sigma, _ = dijkstra(G, s, weight)
196
+ b = _accumulate_edges_subset(b, S, P, sigma, s, targets)
197
+ for n in G: # remove nodes to only return edges
198
+ del b[n]
199
+ b = _rescale_e(b, len(G), normalized=normalized, directed=G.is_directed())
200
+ if G.is_multigraph():
201
+ b = _add_edge_keys(G, b, weight=weight)
202
+ return b
203
+
204
+
205
+ def _accumulate_subset(betweenness, S, P, sigma, s, targets):
206
+ delta = dict.fromkeys(S, 0.0)
207
+ target_set = set(targets) - {s}
208
+ while S:
209
+ w = S.pop()
210
+ if w in target_set:
211
+ coeff = (delta[w] + 1.0) / sigma[w]
212
+ else:
213
+ coeff = delta[w] / sigma[w]
214
+ for v in P[w]:
215
+ delta[v] += sigma[v] * coeff
216
+ if w != s:
217
+ betweenness[w] += delta[w]
218
+ return betweenness
219
+
220
+
221
+ def _accumulate_edges_subset(betweenness, S, P, sigma, s, targets):
222
+ """edge_betweenness_centrality_subset helper."""
223
+ delta = dict.fromkeys(S, 0)
224
+ target_set = set(targets)
225
+ while S:
226
+ w = S.pop()
227
+ for v in P[w]:
228
+ if w in target_set:
229
+ c = (sigma[v] / sigma[w]) * (1.0 + delta[w])
230
+ else:
231
+ c = delta[w] / len(P[w])
232
+ if (v, w) not in betweenness:
233
+ betweenness[(w, v)] += c
234
+ else:
235
+ betweenness[(v, w)] += c
236
+ delta[v] += c
237
+ if w != s:
238
+ betweenness[w] += delta[w]
239
+ return betweenness
240
+
241
+
242
+ def _rescale(betweenness, n, normalized, directed=False):
243
+ """betweenness_centrality_subset helper."""
244
+ if normalized:
245
+ if n <= 2:
246
+ scale = None # no normalization b=0 for all nodes
247
+ else:
248
+ scale = 1.0 / ((n - 1) * (n - 2))
249
+ else: # rescale by 2 for undirected graphs
250
+ if not directed:
251
+ scale = 0.5
252
+ else:
253
+ scale = None
254
+ if scale is not None:
255
+ for v in betweenness:
256
+ betweenness[v] *= scale
257
+ return betweenness
258
+
259
+
260
+ def _rescale_e(betweenness, n, normalized, directed=False):
261
+ """edge_betweenness_centrality_subset helper."""
262
+ if normalized:
263
+ if n <= 1:
264
+ scale = None # no normalization b=0 for all nodes
265
+ else:
266
+ scale = 1.0 / (n * (n - 1))
267
+ else: # rescale by 2 for undirected graphs
268
+ if not directed:
269
+ scale = 0.5
270
+ else:
271
+ scale = None
272
+ if scale is not None:
273
+ for v in betweenness:
274
+ betweenness[v] *= scale
275
+ return betweenness
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/closeness.py ADDED
@@ -0,0 +1,282 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Closeness centrality measures.
3
+ """
4
+
5
+ import functools
6
+
7
+ import networkx as nx
8
+ from networkx.exception import NetworkXError
9
+ from networkx.utils.decorators import not_implemented_for
10
+
11
+ __all__ = ["closeness_centrality", "incremental_closeness_centrality"]
12
+
13
+
14
+ @nx._dispatchable(edge_attrs="distance")
15
+ def closeness_centrality(G, u=None, distance=None, wf_improved=True):
16
+ r"""Compute closeness centrality for nodes.
17
+
18
+ Closeness centrality [1]_ of a node `u` is the reciprocal of the
19
+ average shortest path distance to `u` over all `n-1` reachable nodes.
20
+
21
+ .. math::
22
+
23
+ C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
24
+
25
+ where `d(v, u)` is the shortest-path distance between `v` and `u`,
26
+ and `n-1` is the number of nodes reachable from `u`. Notice that the
27
+ closeness distance function computes the incoming distance to `u`
28
+ for directed graphs. To use outward distance, act on `G.reverse()`.
29
+
30
+ Notice that higher values of closeness indicate higher centrality.
31
+
32
+ Wasserman and Faust propose an improved formula for graphs with
33
+ more than one connected component. The result is "a ratio of the
34
+ fraction of actors in the group who are reachable, to the average
35
+ distance" from the reachable actors [2]_. You might think this
36
+ scale factor is inverted but it is not. As is, nodes from small
37
+ components receive a smaller closeness value. Letting `N` denote
38
+ the number of nodes in the graph,
39
+
40
+ .. math::
41
+
42
+ C_{WF}(u) = \frac{n-1}{N-1} \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
43
+
44
+ Parameters
45
+ ----------
46
+ G : graph
47
+ A NetworkX graph
48
+
49
+ u : node, optional
50
+ Return only the value for node u
51
+
52
+ distance : edge attribute key, optional (default=None)
53
+ Use the specified edge attribute as the edge distance in shortest
54
+ path calculations. If `None` (the default) all edges have a distance of 1.
55
+ Absent edge attributes are assigned a distance of 1. Note that no check
56
+ is performed to ensure that edges have the provided attribute.
57
+
58
+ wf_improved : bool, optional (default=True)
59
+ If True, scale by the fraction of nodes reachable. This gives the
60
+ Wasserman and Faust improved formula. For single component graphs
61
+ it is the same as the original formula.
62
+
63
+ Returns
64
+ -------
65
+ nodes : dictionary
66
+ Dictionary of nodes with closeness centrality as the value.
67
+
68
+ Examples
69
+ --------
70
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
71
+ >>> nx.closeness_centrality(G)
72
+ {0: 1.0, 1: 1.0, 2: 0.75, 3: 0.75}
73
+
74
+ See Also
75
+ --------
76
+ betweenness_centrality, load_centrality, eigenvector_centrality,
77
+ degree_centrality, incremental_closeness_centrality
78
+
79
+ Notes
80
+ -----
81
+ The closeness centrality is normalized to `(n-1)/(|G|-1)` where
82
+ `n` is the number of nodes in the connected part of graph
83
+ containing the node. If the graph is not completely connected,
84
+ this algorithm computes the closeness centrality for each
85
+ connected part separately scaled by that parts size.
86
+
87
+ If the 'distance' keyword is set to an edge attribute key then the
88
+ shortest-path length will be computed using Dijkstra's algorithm with
89
+ that edge attribute as the edge weight.
90
+
91
+ The closeness centrality uses *inward* distance to a node, not outward.
92
+ If you want to use outword distances apply the function to `G.reverse()`
93
+
94
+ In NetworkX 2.2 and earlier a bug caused Dijkstra's algorithm to use the
95
+ outward distance rather than the inward distance. If you use a 'distance'
96
+ keyword and a DiGraph, your results will change between v2.2 and v2.3.
97
+
98
+ References
99
+ ----------
100
+ .. [1] Linton C. Freeman: Centrality in networks: I.
101
+ Conceptual clarification. Social Networks 1:215-239, 1979.
102
+ https://doi.org/10.1016/0378-8733(78)90021-7
103
+ .. [2] pg. 201 of Wasserman, S. and Faust, K.,
104
+ Social Network Analysis: Methods and Applications, 1994,
105
+ Cambridge University Press.
106
+ """
107
+ if G.is_directed():
108
+ G = G.reverse() # create a reversed graph view
109
+
110
+ if distance is not None:
111
+ # use Dijkstra's algorithm with specified attribute as edge weight
112
+ path_length = functools.partial(
113
+ nx.single_source_dijkstra_path_length, weight=distance
114
+ )
115
+ else:
116
+ path_length = nx.single_source_shortest_path_length
117
+
118
+ if u is None:
119
+ nodes = G.nodes
120
+ else:
121
+ nodes = [u]
122
+ closeness_dict = {}
123
+ for n in nodes:
124
+ sp = path_length(G, n)
125
+ totsp = sum(sp.values())
126
+ len_G = len(G)
127
+ _closeness_centrality = 0.0
128
+ if totsp > 0.0 and len_G > 1:
129
+ _closeness_centrality = (len(sp) - 1.0) / totsp
130
+ # normalize to number of nodes-1 in connected part
131
+ if wf_improved:
132
+ s = (len(sp) - 1.0) / (len_G - 1)
133
+ _closeness_centrality *= s
134
+ closeness_dict[n] = _closeness_centrality
135
+ if u is not None:
136
+ return closeness_dict[u]
137
+ return closeness_dict
138
+
139
+
140
+ @not_implemented_for("directed")
141
+ @nx._dispatchable(mutates_input=True)
142
+ def incremental_closeness_centrality(
143
+ G, edge, prev_cc=None, insertion=True, wf_improved=True
144
+ ):
145
+ r"""Incremental closeness centrality for nodes.
146
+
147
+ Compute closeness centrality for nodes using level-based work filtering
148
+ as described in Incremental Algorithms for Closeness Centrality by Sariyuce et al.
149
+
150
+ Level-based work filtering detects unnecessary updates to the closeness
151
+ centrality and filters them out.
152
+
153
+ ---
154
+ From "Incremental Algorithms for Closeness Centrality":
155
+
156
+ Theorem 1: Let :math:`G = (V, E)` be a graph and u and v be two vertices in V
157
+ such that there is no edge (u, v) in E. Let :math:`G' = (V, E \cup uv)`
158
+ Then :math:`cc[s] = cc'[s]` if and only if :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`.
159
+
160
+ Where :math:`dG(u, v)` denotes the length of the shortest path between
161
+ two vertices u, v in a graph G, cc[s] is the closeness centrality for a
162
+ vertex s in V, and cc'[s] is the closeness centrality for a
163
+ vertex s in V, with the (u, v) edge added.
164
+ ---
165
+
166
+ We use Theorem 1 to filter out updates when adding or removing an edge.
167
+ When adding an edge (u, v), we compute the shortest path lengths from all
168
+ other nodes to u and to v before the node is added. When removing an edge,
169
+ we compute the shortest path lengths after the edge is removed. Then we
170
+ apply Theorem 1 to use previously computed closeness centrality for nodes
171
+ where :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`. This works only for
172
+ undirected, unweighted graphs; the distance argument is not supported.
173
+
174
+ Closeness centrality [1]_ of a node `u` is the reciprocal of the
175
+ sum of the shortest path distances from `u` to all `n-1` other nodes.
176
+ Since the sum of distances depends on the number of nodes in the
177
+ graph, closeness is normalized by the sum of minimum possible
178
+ distances `n-1`.
179
+
180
+ .. math::
181
+
182
+ C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
183
+
184
+ where `d(v, u)` is the shortest-path distance between `v` and `u`,
185
+ and `n` is the number of nodes in the graph.
186
+
187
+ Notice that higher values of closeness indicate higher centrality.
188
+
189
+ Parameters
190
+ ----------
191
+ G : graph
192
+ A NetworkX graph
193
+
194
+ edge : tuple
195
+ The modified edge (u, v) in the graph.
196
+
197
+ prev_cc : dictionary
198
+ The previous closeness centrality for all nodes in the graph.
199
+
200
+ insertion : bool, optional
201
+ If True (default) the edge was inserted, otherwise it was deleted from the graph.
202
+
203
+ wf_improved : bool, optional (default=True)
204
+ If True, scale by the fraction of nodes reachable. This gives the
205
+ Wasserman and Faust improved formula. For single component graphs
206
+ it is the same as the original formula.
207
+
208
+ Returns
209
+ -------
210
+ nodes : dictionary
211
+ Dictionary of nodes with closeness centrality as the value.
212
+
213
+ See Also
214
+ --------
215
+ betweenness_centrality, load_centrality, eigenvector_centrality,
216
+ degree_centrality, closeness_centrality
217
+
218
+ Notes
219
+ -----
220
+ The closeness centrality is normalized to `(n-1)/(|G|-1)` where
221
+ `n` is the number of nodes in the connected part of graph
222
+ containing the node. If the graph is not completely connected,
223
+ this algorithm computes the closeness centrality for each
224
+ connected part separately.
225
+
226
+ References
227
+ ----------
228
+ .. [1] Freeman, L.C., 1979. Centrality in networks: I.
229
+ Conceptual clarification. Social Networks 1, 215--239.
230
+ https://doi.org/10.1016/0378-8733(78)90021-7
231
+ .. [2] Sariyuce, A.E. ; Kaya, K. ; Saule, E. ; Catalyiirek, U.V. Incremental
232
+ Algorithms for Closeness Centrality. 2013 IEEE International Conference on Big Data
233
+ http://sariyuce.com/papers/bigdata13.pdf
234
+ """
235
+ if prev_cc is not None and set(prev_cc.keys()) != set(G.nodes()):
236
+ raise NetworkXError("prev_cc and G do not have the same nodes")
237
+
238
+ # Unpack edge
239
+ (u, v) = edge
240
+ path_length = nx.single_source_shortest_path_length
241
+
242
+ if insertion:
243
+ # For edge insertion, we want shortest paths before the edge is inserted
244
+ du = path_length(G, u)
245
+ dv = path_length(G, v)
246
+
247
+ G.add_edge(u, v)
248
+ else:
249
+ G.remove_edge(u, v)
250
+
251
+ # For edge removal, we want shortest paths after the edge is removed
252
+ du = path_length(G, u)
253
+ dv = path_length(G, v)
254
+
255
+ if prev_cc is None:
256
+ return nx.closeness_centrality(G)
257
+
258
+ nodes = G.nodes()
259
+ closeness_dict = {}
260
+ for n in nodes:
261
+ if n in du and n in dv and abs(du[n] - dv[n]) <= 1:
262
+ closeness_dict[n] = prev_cc[n]
263
+ else:
264
+ sp = path_length(G, n)
265
+ totsp = sum(sp.values())
266
+ len_G = len(G)
267
+ _closeness_centrality = 0.0
268
+ if totsp > 0.0 and len_G > 1:
269
+ _closeness_centrality = (len(sp) - 1.0) / totsp
270
+ # normalize to number of nodes-1 in connected part
271
+ if wf_improved:
272
+ s = (len(sp) - 1.0) / (len_G - 1)
273
+ _closeness_centrality *= s
274
+ closeness_dict[n] = _closeness_centrality
275
+
276
+ # Leave the graph as we found it
277
+ if insertion:
278
+ G.remove_edge(u, v)
279
+ else:
280
+ G.add_edge(u, v)
281
+
282
+ return closeness_dict
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/current_flow_betweenness_subset.py ADDED
@@ -0,0 +1,227 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Current-flow betweenness centrality measures for subsets of nodes."""
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms.centrality.flow_matrix import flow_matrix_row
5
+ from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering
6
+
7
+ __all__ = [
8
+ "current_flow_betweenness_centrality_subset",
9
+ "edge_current_flow_betweenness_centrality_subset",
10
+ ]
11
+
12
+
13
+ @not_implemented_for("directed")
14
+ @nx._dispatchable(edge_attrs="weight")
15
+ def current_flow_betweenness_centrality_subset(
16
+ G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu"
17
+ ):
18
+ r"""Compute current-flow betweenness centrality for subsets of nodes.
19
+
20
+ Current-flow betweenness centrality uses an electrical current
21
+ model for information spreading in contrast to betweenness
22
+ centrality which uses shortest paths.
23
+
24
+ Current-flow betweenness centrality is also known as
25
+ random-walk betweenness centrality [2]_.
26
+
27
+ Parameters
28
+ ----------
29
+ G : graph
30
+ A NetworkX graph
31
+
32
+ sources: list of nodes
33
+ Nodes to use as sources for current
34
+
35
+ targets: list of nodes
36
+ Nodes to use as sinks for current
37
+
38
+ normalized : bool, optional (default=True)
39
+ If True the betweenness values are normalized by b=b/(n-1)(n-2) where
40
+ n is the number of nodes in G.
41
+
42
+ weight : string or None, optional (default=None)
43
+ Key for edge data used as the edge weight.
44
+ If None, then use 1 as each edge weight.
45
+ The weight reflects the capacity or the strength of the
46
+ edge.
47
+
48
+ dtype: data type (float)
49
+ Default data type for internal matrices.
50
+ Set to np.float32 for lower memory consumption.
51
+
52
+ solver: string (default='lu')
53
+ Type of linear solver to use for computing the flow matrix.
54
+ Options are "full" (uses most memory), "lu" (recommended), and
55
+ "cg" (uses least memory).
56
+
57
+ Returns
58
+ -------
59
+ nodes : dictionary
60
+ Dictionary of nodes with betweenness centrality as the value.
61
+
62
+ See Also
63
+ --------
64
+ approximate_current_flow_betweenness_centrality
65
+ betweenness_centrality
66
+ edge_betweenness_centrality
67
+ edge_current_flow_betweenness_centrality
68
+
69
+ Notes
70
+ -----
71
+ Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
72
+ time [1]_, where $I(n-1)$ is the time needed to compute the
73
+ inverse Laplacian. For a full matrix this is $O(n^3)$ but using
74
+ sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
75
+ Laplacian matrix condition number.
76
+
77
+ The space required is $O(nw)$ where $w$ is the width of the sparse
78
+ Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
79
+
80
+ If the edges have a 'weight' attribute they will be used as
81
+ weights in this algorithm. Unspecified weights are set to 1.
82
+
83
+ References
84
+ ----------
85
+ .. [1] Centrality Measures Based on Current Flow.
86
+ Ulrik Brandes and Daniel Fleischer,
87
+ Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
88
+ LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
89
+ https://doi.org/10.1007/978-3-540-31856-9_44
90
+
91
+ .. [2] A measure of betweenness centrality based on random walks,
92
+ M. E. J. Newman, Social Networks 27, 39-54 (2005).
93
+ """
94
+ import numpy as np
95
+
96
+ from networkx.utils import reverse_cuthill_mckee_ordering
97
+
98
+ if not nx.is_connected(G):
99
+ raise nx.NetworkXError("Graph not connected.")
100
+ N = G.number_of_nodes()
101
+ ordering = list(reverse_cuthill_mckee_ordering(G))
102
+ # make a copy with integer labels according to rcm ordering
103
+ # this could be done without a copy if we really wanted to
104
+ mapping = dict(zip(ordering, range(N)))
105
+ H = nx.relabel_nodes(G, mapping)
106
+ betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H
107
+ for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
108
+ for ss in sources:
109
+ i = mapping[ss]
110
+ for tt in targets:
111
+ j = mapping[tt]
112
+ betweenness[s] += 0.5 * abs(row.item(i) - row.item(j))
113
+ betweenness[t] += 0.5 * abs(row.item(i) - row.item(j))
114
+ if normalized:
115
+ nb = (N - 1.0) * (N - 2.0) # normalization factor
116
+ else:
117
+ nb = 2.0
118
+ for node in H:
119
+ betweenness[node] = betweenness[node] / nb + 1.0 / (2 - N)
120
+ return {ordering[node]: value for node, value in betweenness.items()}
121
+
122
+
123
+ @not_implemented_for("directed")
124
+ @nx._dispatchable(edge_attrs="weight")
125
+ def edge_current_flow_betweenness_centrality_subset(
126
+ G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu"
127
+ ):
128
+ r"""Compute current-flow betweenness centrality for edges using subsets
129
+ of nodes.
130
+
131
+ Current-flow betweenness centrality uses an electrical current
132
+ model for information spreading in contrast to betweenness
133
+ centrality which uses shortest paths.
134
+
135
+ Current-flow betweenness centrality is also known as
136
+ random-walk betweenness centrality [2]_.
137
+
138
+ Parameters
139
+ ----------
140
+ G : graph
141
+ A NetworkX graph
142
+
143
+ sources: list of nodes
144
+ Nodes to use as sources for current
145
+
146
+ targets: list of nodes
147
+ Nodes to use as sinks for current
148
+
149
+ normalized : bool, optional (default=True)
150
+ If True the betweenness values are normalized by b=b/(n-1)(n-2) where
151
+ n is the number of nodes in G.
152
+
153
+ weight : string or None, optional (default=None)
154
+ Key for edge data used as the edge weight.
155
+ If None, then use 1 as each edge weight.
156
+ The weight reflects the capacity or the strength of the
157
+ edge.
158
+
159
+ dtype: data type (float)
160
+ Default data type for internal matrices.
161
+ Set to np.float32 for lower memory consumption.
162
+
163
+ solver: string (default='lu')
164
+ Type of linear solver to use for computing the flow matrix.
165
+ Options are "full" (uses most memory), "lu" (recommended), and
166
+ "cg" (uses least memory).
167
+
168
+ Returns
169
+ -------
170
+ nodes : dict
171
+ Dictionary of edge tuples with betweenness centrality as the value.
172
+
173
+ See Also
174
+ --------
175
+ betweenness_centrality
176
+ edge_betweenness_centrality
177
+ current_flow_betweenness_centrality
178
+
179
+ Notes
180
+ -----
181
+ Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
182
+ time [1]_, where $I(n-1)$ is the time needed to compute the
183
+ inverse Laplacian. For a full matrix this is $O(n^3)$ but using
184
+ sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
185
+ Laplacian matrix condition number.
186
+
187
+ The space required is $O(nw)$ where $w$ is the width of the sparse
188
+ Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
189
+
190
+ If the edges have a 'weight' attribute they will be used as
191
+ weights in this algorithm. Unspecified weights are set to 1.
192
+
193
+ References
194
+ ----------
195
+ .. [1] Centrality Measures Based on Current Flow.
196
+ Ulrik Brandes and Daniel Fleischer,
197
+ Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
198
+ LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
199
+ https://doi.org/10.1007/978-3-540-31856-9_44
200
+
201
+ .. [2] A measure of betweenness centrality based on random walks,
202
+ M. E. J. Newman, Social Networks 27, 39-54 (2005).
203
+ """
204
+ import numpy as np
205
+
206
+ if not nx.is_connected(G):
207
+ raise nx.NetworkXError("Graph not connected.")
208
+ N = G.number_of_nodes()
209
+ ordering = list(reverse_cuthill_mckee_ordering(G))
210
+ # make a copy with integer labels according to rcm ordering
211
+ # this could be done without a copy if we really wanted to
212
+ mapping = dict(zip(ordering, range(N)))
213
+ H = nx.relabel_nodes(G, mapping)
214
+ edges = (tuple(sorted((u, v))) for u, v in H.edges())
215
+ betweenness = dict.fromkeys(edges, 0.0)
216
+ if normalized:
217
+ nb = (N - 1.0) * (N - 2.0) # normalization factor
218
+ else:
219
+ nb = 2.0
220
+ for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
221
+ for ss in sources:
222
+ i = mapping[ss]
223
+ for tt in targets:
224
+ j = mapping[tt]
225
+ betweenness[e] += 0.5 * abs(row.item(i) - row.item(j))
226
+ betweenness[e] /= nb
227
+ return {(ordering[s], ordering[t]): value for (s, t), value in betweenness.items()}
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/current_flow_closeness.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Current-flow closeness centrality measures."""
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms.centrality.flow_matrix import (
5
+ CGInverseLaplacian,
6
+ FullInverseLaplacian,
7
+ SuperLUInverseLaplacian,
8
+ )
9
+ from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering
10
+
11
+ __all__ = ["current_flow_closeness_centrality", "information_centrality"]
12
+
13
+
14
+ @not_implemented_for("directed")
15
+ @nx._dispatchable(edge_attrs="weight")
16
+ def current_flow_closeness_centrality(G, weight=None, dtype=float, solver="lu"):
17
+ """Compute current-flow closeness centrality for nodes.
18
+
19
+ Current-flow closeness centrality is variant of closeness
20
+ centrality based on effective resistance between nodes in
21
+ a network. This metric is also known as information centrality.
22
+
23
+ Parameters
24
+ ----------
25
+ G : graph
26
+ A NetworkX graph.
27
+
28
+ weight : None or string, optional (default=None)
29
+ If None, all edge weights are considered equal.
30
+ Otherwise holds the name of the edge attribute used as weight.
31
+ The weight reflects the capacity or the strength of the
32
+ edge.
33
+
34
+ dtype: data type (default=float)
35
+ Default data type for internal matrices.
36
+ Set to np.float32 for lower memory consumption.
37
+
38
+ solver: string (default='lu')
39
+ Type of linear solver to use for computing the flow matrix.
40
+ Options are "full" (uses most memory), "lu" (recommended), and
41
+ "cg" (uses least memory).
42
+
43
+ Returns
44
+ -------
45
+ nodes : dictionary
46
+ Dictionary of nodes with current flow closeness centrality as the value.
47
+
48
+ See Also
49
+ --------
50
+ closeness_centrality
51
+
52
+ Notes
53
+ -----
54
+ The algorithm is from Brandes [1]_.
55
+
56
+ See also [2]_ for the original definition of information centrality.
57
+
58
+ References
59
+ ----------
60
+ .. [1] Ulrik Brandes and Daniel Fleischer,
61
+ Centrality Measures Based on Current Flow.
62
+ Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
63
+ LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
64
+ https://doi.org/10.1007/978-3-540-31856-9_44
65
+
66
+ .. [2] Karen Stephenson and Marvin Zelen:
67
+ Rethinking centrality: Methods and examples.
68
+ Social Networks 11(1):1-37, 1989.
69
+ https://doi.org/10.1016/0378-8733(89)90016-6
70
+ """
71
+ if not nx.is_connected(G):
72
+ raise nx.NetworkXError("Graph not connected.")
73
+ solvername = {
74
+ "full": FullInverseLaplacian,
75
+ "lu": SuperLUInverseLaplacian,
76
+ "cg": CGInverseLaplacian,
77
+ }
78
+ N = G.number_of_nodes()
79
+ ordering = list(reverse_cuthill_mckee_ordering(G))
80
+ # make a copy with integer labels according to rcm ordering
81
+ # this could be done without a copy if we really wanted to
82
+ H = nx.relabel_nodes(G, dict(zip(ordering, range(N))))
83
+ betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H
84
+ N = H.number_of_nodes()
85
+ L = nx.laplacian_matrix(H, nodelist=range(N), weight=weight).asformat("csc")
86
+ L = L.astype(dtype)
87
+ C2 = solvername[solver](L, width=1, dtype=dtype) # initialize solver
88
+ for v in H:
89
+ col = C2.get_row(v)
90
+ for w in H:
91
+ betweenness[v] += col.item(v) - 2 * col.item(w)
92
+ betweenness[w] += col.item(v)
93
+ return {ordering[node]: 1 / value for node, value in betweenness.items()}
94
+
95
+
96
+ information_centrality = current_flow_closeness_centrality
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/degree_alg.py ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Degree centrality measures."""
2
+
3
+ import networkx as nx
4
+ from networkx.utils.decorators import not_implemented_for
5
+
6
+ __all__ = ["degree_centrality", "in_degree_centrality", "out_degree_centrality"]
7
+
8
+
9
+ @nx._dispatchable
10
+ def degree_centrality(G):
11
+ """Compute the degree centrality for nodes.
12
+
13
+ The degree centrality for a node v is the fraction of nodes it
14
+ is connected to.
15
+
16
+ Parameters
17
+ ----------
18
+ G : graph
19
+ A networkx graph
20
+
21
+ Returns
22
+ -------
23
+ nodes : dictionary
24
+ Dictionary of nodes with degree centrality as the value.
25
+
26
+ Examples
27
+ --------
28
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
29
+ >>> nx.degree_centrality(G)
30
+ {0: 1.0, 1: 1.0, 2: 0.6666666666666666, 3: 0.6666666666666666}
31
+
32
+ See Also
33
+ --------
34
+ betweenness_centrality, load_centrality, eigenvector_centrality
35
+
36
+ Notes
37
+ -----
38
+ The degree centrality values are normalized by dividing by the maximum
39
+ possible degree in a simple graph n-1 where n is the number of nodes in G.
40
+
41
+ For multigraphs or graphs with self loops the maximum degree might
42
+ be higher than n-1 and values of degree centrality greater than 1
43
+ are possible.
44
+ """
45
+ if len(G) <= 1:
46
+ return {n: 1 for n in G}
47
+
48
+ s = 1.0 / (len(G) - 1.0)
49
+ centrality = {n: d * s for n, d in G.degree()}
50
+ return centrality
51
+
52
+
53
+ @not_implemented_for("undirected")
54
+ @nx._dispatchable
55
+ def in_degree_centrality(G):
56
+ """Compute the in-degree centrality for nodes.
57
+
58
+ The in-degree centrality for a node v is the fraction of nodes its
59
+ incoming edges are connected to.
60
+
61
+ Parameters
62
+ ----------
63
+ G : graph
64
+ A NetworkX graph
65
+
66
+ Returns
67
+ -------
68
+ nodes : dictionary
69
+ Dictionary of nodes with in-degree centrality as values.
70
+
71
+ Raises
72
+ ------
73
+ NetworkXNotImplemented
74
+ If G is undirected.
75
+
76
+ Examples
77
+ --------
78
+ >>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
79
+ >>> nx.in_degree_centrality(G)
80
+ {0: 0.0, 1: 0.3333333333333333, 2: 0.6666666666666666, 3: 0.6666666666666666}
81
+
82
+ See Also
83
+ --------
84
+ degree_centrality, out_degree_centrality
85
+
86
+ Notes
87
+ -----
88
+ The degree centrality values are normalized by dividing by the maximum
89
+ possible degree in a simple graph n-1 where n is the number of nodes in G.
90
+
91
+ For multigraphs or graphs with self loops the maximum degree might
92
+ be higher than n-1 and values of degree centrality greater than 1
93
+ are possible.
94
+ """
95
+ if len(G) <= 1:
96
+ return {n: 1 for n in G}
97
+
98
+ s = 1.0 / (len(G) - 1.0)
99
+ centrality = {n: d * s for n, d in G.in_degree()}
100
+ return centrality
101
+
102
+
103
+ @not_implemented_for("undirected")
104
+ @nx._dispatchable
105
+ def out_degree_centrality(G):
106
+ """Compute the out-degree centrality for nodes.
107
+
108
+ The out-degree centrality for a node v is the fraction of nodes its
109
+ outgoing edges are connected to.
110
+
111
+ Parameters
112
+ ----------
113
+ G : graph
114
+ A NetworkX graph
115
+
116
+ Returns
117
+ -------
118
+ nodes : dictionary
119
+ Dictionary of nodes with out-degree centrality as values.
120
+
121
+ Raises
122
+ ------
123
+ NetworkXNotImplemented
124
+ If G is undirected.
125
+
126
+ Examples
127
+ --------
128
+ >>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
129
+ >>> nx.out_degree_centrality(G)
130
+ {0: 1.0, 1: 0.6666666666666666, 2: 0.0, 3: 0.0}
131
+
132
+ See Also
133
+ --------
134
+ degree_centrality, in_degree_centrality
135
+
136
+ Notes
137
+ -----
138
+ The degree centrality values are normalized by dividing by the maximum
139
+ possible degree in a simple graph n-1 where n is the number of nodes in G.
140
+
141
+ For multigraphs or graphs with self loops the maximum degree might
142
+ be higher than n-1 and values of degree centrality greater than 1
143
+ are possible.
144
+ """
145
+ if len(G) <= 1:
146
+ return {n: 1 for n in G}
147
+
148
+ s = 1.0 / (len(G) - 1.0)
149
+ centrality = {n: d * s for n, d in G.out_degree()}
150
+ return centrality
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/eigenvector.py ADDED
@@ -0,0 +1,357 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing eigenvector centrality."""
2
+
3
+ import math
4
+
5
+ import networkx as nx
6
+ from networkx.utils import not_implemented_for
7
+
8
+ __all__ = ["eigenvector_centrality", "eigenvector_centrality_numpy"]
9
+
10
+
11
+ @not_implemented_for("multigraph")
12
+ @nx._dispatchable(edge_attrs="weight")
13
+ def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, weight=None):
14
+ r"""Compute the eigenvector centrality for the graph G.
15
+
16
+ Eigenvector centrality computes the centrality for a node by adding
17
+ the centrality of its predecessors. The centrality for node $i$ is the
18
+ $i$-th element of a left eigenvector associated with the eigenvalue $\lambda$
19
+ of maximum modulus that is positive. Such an eigenvector $x$ is
20
+ defined up to a multiplicative constant by the equation
21
+
22
+ .. math::
23
+
24
+ \lambda x^T = x^T A,
25
+
26
+ where $A$ is the adjacency matrix of the graph G. By definition of
27
+ row-column product, the equation above is equivalent to
28
+
29
+ .. math::
30
+
31
+ \lambda x_i = \sum_{j\to i}x_j.
32
+
33
+ That is, adding the eigenvector centralities of the predecessors of
34
+ $i$ one obtains the eigenvector centrality of $i$ multiplied by
35
+ $\lambda$. In the case of undirected graphs, $x$ also solves the familiar
36
+ right-eigenvector equation $Ax = \lambda x$.
37
+
38
+ By virtue of the Perron–Frobenius theorem [1]_, if G is strongly
39
+ connected there is a unique eigenvector $x$, and all its entries
40
+ are strictly positive.
41
+
42
+ If G is not strongly connected there might be several left
43
+ eigenvectors associated with $\lambda$, and some of their elements
44
+ might be zero.
45
+
46
+ Parameters
47
+ ----------
48
+ G : graph
49
+ A networkx graph.
50
+
51
+ max_iter : integer, optional (default=100)
52
+ Maximum number of power iterations.
53
+
54
+ tol : float, optional (default=1.0e-6)
55
+ Error tolerance (in Euclidean norm) used to check convergence in
56
+ power iteration.
57
+
58
+ nstart : dictionary, optional (default=None)
59
+ Starting value of power iteration for each node. Must have a nonzero
60
+ projection on the desired eigenvector for the power method to converge.
61
+ If None, this implementation uses an all-ones vector, which is a safe
62
+ choice.
63
+
64
+ weight : None or string, optional (default=None)
65
+ If None, all edge weights are considered equal. Otherwise holds the
66
+ name of the edge attribute used as weight. In this measure the
67
+ weight is interpreted as the connection strength.
68
+
69
+ Returns
70
+ -------
71
+ nodes : dictionary
72
+ Dictionary of nodes with eigenvector centrality as the value. The
73
+ associated vector has unit Euclidean norm and the values are
74
+ nonegative.
75
+
76
+ Examples
77
+ --------
78
+ >>> G = nx.path_graph(4)
79
+ >>> centrality = nx.eigenvector_centrality(G)
80
+ >>> sorted((v, f"{c:0.2f}") for v, c in centrality.items())
81
+ [(0, '0.37'), (1, '0.60'), (2, '0.60'), (3, '0.37')]
82
+
83
+ Raises
84
+ ------
85
+ NetworkXPointlessConcept
86
+ If the graph G is the null graph.
87
+
88
+ NetworkXError
89
+ If each value in `nstart` is zero.
90
+
91
+ PowerIterationFailedConvergence
92
+ If the algorithm fails to converge to the specified tolerance
93
+ within the specified number of iterations of the power iteration
94
+ method.
95
+
96
+ See Also
97
+ --------
98
+ eigenvector_centrality_numpy
99
+ :func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
100
+ :func:`~networkx.algorithms.link_analysis.hits_alg.hits`
101
+
102
+ Notes
103
+ -----
104
+ Eigenvector centrality was introduced by Landau [2]_ for chess
105
+ tournaments. It was later rediscovered by Wei [3]_ and then
106
+ popularized by Kendall [4]_ in the context of sport ranking. Berge
107
+ introduced a general definition for graphs based on social connections
108
+ [5]_. Bonacich [6]_ reintroduced again eigenvector centrality and made
109
+ it popular in link analysis.
110
+
111
+ This function computes the left dominant eigenvector, which corresponds
112
+ to adding the centrality of predecessors: this is the usual approach.
113
+ To add the centrality of successors first reverse the graph with
114
+ ``G.reverse()``.
115
+
116
+ The implementation uses power iteration [7]_ to compute a dominant
117
+ eigenvector starting from the provided vector `nstart`. Convergence is
118
+ guaranteed as long as `nstart` has a nonzero projection on a dominant
119
+ eigenvector, which certainly happens using the default value.
120
+
121
+ The method stops when the change in the computed vector between two
122
+ iterations is smaller than an error tolerance of ``G.number_of_nodes()
123
+ * tol`` or after ``max_iter`` iterations, but in the second case it
124
+ raises an exception.
125
+
126
+ This implementation uses $(A + I)$ rather than the adjacency matrix
127
+ $A$ because the change preserves eigenvectors, but it shifts the
128
+ spectrum, thus guaranteeing convergence even for networks with
129
+ negative eigenvalues of maximum modulus.
130
+
131
+ References
132
+ ----------
133
+ .. [1] Abraham Berman and Robert J. Plemmons.
134
+ "Nonnegative Matrices in the Mathematical Sciences."
135
+ Classics in Applied Mathematics. SIAM, 1994.
136
+
137
+ .. [2] Edmund Landau.
138
+ "Zur relativen Wertbemessung der Turnierresultate."
139
+ Deutsches Wochenschach, 11:366–369, 1895.
140
+
141
+ .. [3] Teh-Hsing Wei.
142
+ "The Algebraic Foundations of Ranking Theory."
143
+ PhD thesis, University of Cambridge, 1952.
144
+
145
+ .. [4] Maurice G. Kendall.
146
+ "Further contributions to the theory of paired comparisons."
147
+ Biometrics, 11(1):43–62, 1955.
148
+ https://www.jstor.org/stable/3001479
149
+
150
+ .. [5] Claude Berge
151
+ "Théorie des graphes et ses applications."
152
+ Dunod, Paris, France, 1958.
153
+
154
+ .. [6] Phillip Bonacich.
155
+ "Technique for analyzing overlapping memberships."
156
+ Sociological Methodology, 4:176–185, 1972.
157
+ https://www.jstor.org/stable/270732
158
+
159
+ .. [7] Power iteration:: https://en.wikipedia.org/wiki/Power_iteration
160
+
161
+ """
162
+ if len(G) == 0:
163
+ raise nx.NetworkXPointlessConcept(
164
+ "cannot compute centrality for the null graph"
165
+ )
166
+ # If no initial vector is provided, start with the all-ones vector.
167
+ if nstart is None:
168
+ nstart = {v: 1 for v in G}
169
+ if all(v == 0 for v in nstart.values()):
170
+ raise nx.NetworkXError("initial vector cannot have all zero values")
171
+ # Normalize the initial vector so that each entry is in [0, 1]. This is
172
+ # guaranteed to never have a divide-by-zero error by the previous line.
173
+ nstart_sum = sum(nstart.values())
174
+ x = {k: v / nstart_sum for k, v in nstart.items()}
175
+ nnodes = G.number_of_nodes()
176
+ # make up to max_iter iterations
177
+ for _ in range(max_iter):
178
+ xlast = x
179
+ x = xlast.copy() # Start with xlast times I to iterate with (A+I)
180
+ # do the multiplication y^T = x^T A (left eigenvector)
181
+ for n in x:
182
+ for nbr in G[n]:
183
+ w = G[n][nbr].get(weight, 1) if weight else 1
184
+ x[nbr] += xlast[n] * w
185
+ # Normalize the vector. The normalization denominator `norm`
186
+ # should never be zero by the Perron--Frobenius
187
+ # theorem. However, in case it is due to numerical error, we
188
+ # assume the norm to be one instead.
189
+ norm = math.hypot(*x.values()) or 1
190
+ x = {k: v / norm for k, v in x.items()}
191
+ # Check for convergence (in the L_1 norm).
192
+ if sum(abs(x[n] - xlast[n]) for n in x) < nnodes * tol:
193
+ return x
194
+ raise nx.PowerIterationFailedConvergence(max_iter)
195
+
196
+
197
+ @nx._dispatchable(edge_attrs="weight")
198
+ def eigenvector_centrality_numpy(G, weight=None, max_iter=50, tol=0):
199
+ r"""Compute the eigenvector centrality for the graph `G`.
200
+
201
+ Eigenvector centrality computes the centrality for a node by adding
202
+ the centrality of its predecessors. The centrality for node $i$ is the
203
+ $i$-th element of a left eigenvector associated with the eigenvalue $\lambda$
204
+ of maximum modulus that is positive. Such an eigenvector $x$ is
205
+ defined up to a multiplicative constant by the equation
206
+
207
+ .. math::
208
+
209
+ \lambda x^T = x^T A,
210
+
211
+ where $A$ is the adjacency matrix of the graph `G`. By definition of
212
+ row-column product, the equation above is equivalent to
213
+
214
+ .. math::
215
+
216
+ \lambda x_i = \sum_{j\to i}x_j.
217
+
218
+ That is, adding the eigenvector centralities of the predecessors of
219
+ $i$ one obtains the eigenvector centrality of $i$ multiplied by
220
+ $\lambda$. In the case of undirected graphs, $x$ also solves the familiar
221
+ right-eigenvector equation $Ax = \lambda x$.
222
+
223
+ By virtue of the Perron--Frobenius theorem [1]_, if `G` is (strongly)
224
+ connected, there is a unique eigenvector $x$, and all its entries
225
+ are strictly positive.
226
+
227
+ However, if `G` is not (strongly) connected, there might be several left
228
+ eigenvectors associated with $\lambda$, and some of their elements
229
+ might be zero.
230
+ Depending on the method used to choose eigenvectors, round-off error can affect
231
+ which of the infinitely many eigenvectors is reported.
232
+ This can lead to inconsistent results for the same graph,
233
+ which the underlying implementation is not robust to.
234
+ For this reason, only (strongly) connected graphs are accepted.
235
+
236
+ Parameters
237
+ ----------
238
+ G : graph
239
+ A connected NetworkX graph.
240
+
241
+ weight : None or string, optional (default=None)
242
+ If ``None``, all edge weights are considered equal. Otherwise holds the
243
+ name of the edge attribute used as weight. In this measure the
244
+ weight is interpreted as the connection strength.
245
+
246
+ max_iter : integer, optional (default=50)
247
+ Maximum number of Arnoldi update iterations allowed.
248
+
249
+ tol : float, optional (default=0)
250
+ Relative accuracy for eigenvalues (stopping criterion).
251
+ The default value of 0 implies machine precision.
252
+
253
+ Returns
254
+ -------
255
+ nodes : dict of nodes
256
+ Dictionary of nodes with eigenvector centrality as the value. The
257
+ associated vector has unit Euclidean norm and the values are
258
+ nonnegative.
259
+
260
+ Examples
261
+ --------
262
+ >>> G = nx.path_graph(4)
263
+ >>> centrality = nx.eigenvector_centrality_numpy(G)
264
+ >>> print([f"{node} {centrality[node]:0.2f}" for node in centrality])
265
+ ['0 0.37', '1 0.60', '2 0.60', '3 0.37']
266
+
267
+ Raises
268
+ ------
269
+ NetworkXPointlessConcept
270
+ If the graph `G` is the null graph.
271
+
272
+ ArpackNoConvergence
273
+ When the requested convergence is not obtained. The currently
274
+ converged eigenvalues and eigenvectors can be found as
275
+ eigenvalues and eigenvectors attributes of the exception object.
276
+
277
+ AmbiguousSolution
278
+ If `G` is not connected.
279
+
280
+ See Also
281
+ --------
282
+ :func:`scipy.sparse.linalg.eigs`
283
+ eigenvector_centrality
284
+ :func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
285
+ :func:`~networkx.algorithms.link_analysis.hits_alg.hits`
286
+
287
+ Notes
288
+ -----
289
+ Eigenvector centrality was introduced by Landau [2]_ for chess
290
+ tournaments. It was later rediscovered by Wei [3]_ and then
291
+ popularized by Kendall [4]_ in the context of sport ranking. Berge
292
+ introduced a general definition for graphs based on social connections
293
+ [5]_. Bonacich [6]_ reintroduced again eigenvector centrality and made
294
+ it popular in link analysis.
295
+
296
+ This function computes the left dominant eigenvector, which corresponds
297
+ to adding the centrality of predecessors: this is the usual approach.
298
+ To add the centrality of successors first reverse the graph with
299
+ ``G.reverse()``.
300
+
301
+ This implementation uses the
302
+ :func:`SciPy sparse eigenvalue solver<scipy.sparse.linalg.eigs>` (ARPACK)
303
+ to find the largest eigenvalue/eigenvector pair using Arnoldi iterations
304
+ [7]_.
305
+
306
+ References
307
+ ----------
308
+ .. [1] Abraham Berman and Robert J. Plemmons.
309
+ "Nonnegative Matrices in the Mathematical Sciences".
310
+ Classics in Applied Mathematics. SIAM, 1994.
311
+
312
+ .. [2] Edmund Landau.
313
+ "Zur relativen Wertbemessung der Turnierresultate".
314
+ Deutsches Wochenschach, 11:366--369, 1895.
315
+
316
+ .. [3] Teh-Hsing Wei.
317
+ "The Algebraic Foundations of Ranking Theory".
318
+ PhD thesis, University of Cambridge, 1952.
319
+
320
+ .. [4] Maurice G. Kendall.
321
+ "Further contributions to the theory of paired comparisons".
322
+ Biometrics, 11(1):43--62, 1955.
323
+ https://www.jstor.org/stable/3001479
324
+
325
+ .. [5] Claude Berge.
326
+ "Théorie des graphes et ses applications".
327
+ Dunod, Paris, France, 1958.
328
+
329
+ .. [6] Phillip Bonacich.
330
+ "Technique for analyzing overlapping memberships".
331
+ Sociological Methodology, 4:176--185, 1972.
332
+ https://www.jstor.org/stable/270732
333
+
334
+ .. [7] Arnoldi, W. E. (1951).
335
+ "The principle of minimized iterations in the solution of the matrix eigenvalue problem".
336
+ Quarterly of Applied Mathematics. 9 (1): 17--29.
337
+ https://doi.org/10.1090/qam/42792
338
+ """
339
+ import numpy as np
340
+ import scipy as sp
341
+
342
+ if len(G) == 0:
343
+ raise nx.NetworkXPointlessConcept(
344
+ "cannot compute centrality for the null graph"
345
+ )
346
+ connected = nx.is_strongly_connected(G) if G.is_directed() else nx.is_connected(G)
347
+ if not connected: # See gh-6888.
348
+ raise nx.AmbiguousSolution(
349
+ "`eigenvector_centrality_numpy` does not give consistent results for disconnected graphs"
350
+ )
351
+ M = nx.to_scipy_sparse_array(G, nodelist=list(G), weight=weight, dtype=float)
352
+ _, eigenvector = sp.sparse.linalg.eigs(
353
+ M.T, k=1, which="LR", maxiter=max_iter, tol=tol
354
+ )
355
+ largest = eigenvector.flatten().real
356
+ norm = np.sign(largest.sum()) * sp.linalg.norm(largest)
357
+ return dict(zip(G, (largest / norm).tolist()))
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/harmonic.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing the harmonic centrality of a graph."""
2
+
3
+ from functools import partial
4
+
5
+ import networkx as nx
6
+
7
+ __all__ = ["harmonic_centrality"]
8
+
9
+
10
+ @nx._dispatchable(edge_attrs="distance")
11
+ def harmonic_centrality(G, nbunch=None, distance=None, sources=None):
12
+ r"""Compute harmonic centrality for nodes.
13
+
14
+ Harmonic centrality [1]_ of a node `u` is the sum of the reciprocal
15
+ of the shortest path distances from all other nodes to `u`
16
+
17
+ .. math::
18
+
19
+ C(u) = \sum_{v \neq u} \frac{1}{d(v, u)}
20
+
21
+ where `d(v, u)` is the shortest-path distance between `v` and `u`.
22
+
23
+ If `sources` is given as an argument, the returned harmonic centrality
24
+ values are calculated as the sum of the reciprocals of the shortest
25
+ path distances from the nodes specified in `sources` to `u` instead
26
+ of from all nodes to `u`.
27
+
28
+ Notice that higher values indicate higher centrality.
29
+
30
+ Parameters
31
+ ----------
32
+ G : graph
33
+ A NetworkX graph
34
+
35
+ nbunch : container (default: all nodes in G)
36
+ Container of nodes for which harmonic centrality values are calculated.
37
+
38
+ sources : container (default: all nodes in G)
39
+ Container of nodes `v` over which reciprocal distances are computed.
40
+ Nodes not in `G` are silently ignored.
41
+
42
+ distance : edge attribute key, optional (default=None)
43
+ Use the specified edge attribute as the edge distance in shortest
44
+ path calculations. If `None`, then each edge will have distance equal to 1.
45
+
46
+ Returns
47
+ -------
48
+ nodes : dictionary
49
+ Dictionary of nodes with harmonic centrality as the value.
50
+
51
+ See Also
52
+ --------
53
+ betweenness_centrality, load_centrality, eigenvector_centrality,
54
+ degree_centrality, closeness_centrality
55
+
56
+ Notes
57
+ -----
58
+ If the 'distance' keyword is set to an edge attribute key then the
59
+ shortest-path length will be computed using Dijkstra's algorithm with
60
+ that edge attribute as the edge weight.
61
+
62
+ References
63
+ ----------
64
+ .. [1] Boldi, Paolo, and Sebastiano Vigna. "Axioms for centrality."
65
+ Internet Mathematics 10.3-4 (2014): 222-262.
66
+ """
67
+
68
+ nbunch = set(G.nbunch_iter(nbunch) if nbunch is not None else G.nodes)
69
+ sources = set(G.nbunch_iter(sources) if sources is not None else G.nodes)
70
+
71
+ centrality = {u: 0 for u in nbunch}
72
+
73
+ transposed = False
74
+ if len(nbunch) < len(sources):
75
+ transposed = True
76
+ nbunch, sources = sources, nbunch
77
+ if nx.is_directed(G):
78
+ G = nx.reverse(G, copy=False)
79
+
80
+ spl = partial(nx.shortest_path_length, G, weight=distance)
81
+ for v in sources:
82
+ dist = spl(v)
83
+ for u in nbunch.intersection(dist):
84
+ d = dist[u]
85
+ if d == 0: # handle u == v and edges with 0 weight
86
+ continue
87
+ centrality[v if transposed else u] += 1 / d
88
+
89
+ return centrality
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/katz.py ADDED
@@ -0,0 +1,331 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Katz centrality."""
2
+
3
+ import math
4
+
5
+ import networkx as nx
6
+ from networkx.utils import not_implemented_for
7
+
8
+ __all__ = ["katz_centrality", "katz_centrality_numpy"]
9
+
10
+
11
+ @not_implemented_for("multigraph")
12
+ @nx._dispatchable(edge_attrs="weight")
13
+ def katz_centrality(
14
+ G,
15
+ alpha=0.1,
16
+ beta=1.0,
17
+ max_iter=1000,
18
+ tol=1.0e-6,
19
+ nstart=None,
20
+ normalized=True,
21
+ weight=None,
22
+ ):
23
+ r"""Compute the Katz centrality for the nodes of the graph G.
24
+
25
+ Katz centrality computes the centrality for a node based on the centrality
26
+ of its neighbors. It is a generalization of the eigenvector centrality. The
27
+ Katz centrality for node $i$ is
28
+
29
+ .. math::
30
+
31
+ x_i = \alpha \sum_{j} A_{ij} x_j + \beta,
32
+
33
+ where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$.
34
+
35
+ The parameter $\beta$ controls the initial centrality and
36
+
37
+ .. math::
38
+
39
+ \alpha < \frac{1}{\lambda_{\max}}.
40
+
41
+ Katz centrality computes the relative influence of a node within a
42
+ network by measuring the number of the immediate neighbors (first
43
+ degree nodes) and also all other nodes in the network that connect
44
+ to the node under consideration through these immediate neighbors.
45
+
46
+ Extra weight can be provided to immediate neighbors through the
47
+ parameter $\beta$. Connections made with distant neighbors
48
+ are, however, penalized by an attenuation factor $\alpha$ which
49
+ should be strictly less than the inverse largest eigenvalue of the
50
+ adjacency matrix in order for the Katz centrality to be computed
51
+ correctly. More information is provided in [1]_.
52
+
53
+ Parameters
54
+ ----------
55
+ G : graph
56
+ A NetworkX graph.
57
+
58
+ alpha : float, optional (default=0.1)
59
+ Attenuation factor
60
+
61
+ beta : scalar or dictionary, optional (default=1.0)
62
+ Weight attributed to the immediate neighborhood. If not a scalar, the
63
+ dictionary must have a value for every node.
64
+
65
+ max_iter : integer, optional (default=1000)
66
+ Maximum number of iterations in power method.
67
+
68
+ tol : float, optional (default=1.0e-6)
69
+ Error tolerance used to check convergence in power method iteration.
70
+
71
+ nstart : dictionary, optional
72
+ Starting value of Katz iteration for each node.
73
+
74
+ normalized : bool, optional (default=True)
75
+ If True normalize the resulting values.
76
+
77
+ weight : None or string, optional (default=None)
78
+ If None, all edge weights are considered equal.
79
+ Otherwise holds the name of the edge attribute used as weight.
80
+ In this measure the weight is interpreted as the connection strength.
81
+
82
+ Returns
83
+ -------
84
+ nodes : dictionary
85
+ Dictionary of nodes with Katz centrality as the value.
86
+
87
+ Raises
88
+ ------
89
+ NetworkXError
90
+ If the parameter `beta` is not a scalar but lacks a value for at least
91
+ one node
92
+
93
+ PowerIterationFailedConvergence
94
+ If the algorithm fails to converge to the specified tolerance
95
+ within the specified number of iterations of the power iteration
96
+ method.
97
+
98
+ Examples
99
+ --------
100
+ >>> import math
101
+ >>> G = nx.path_graph(4)
102
+ >>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix
103
+ >>> centrality = nx.katz_centrality(G, 1 / phi - 0.01)
104
+ >>> for n, c in sorted(centrality.items()):
105
+ ... print(f"{n} {c:.2f}")
106
+ 0 0.37
107
+ 1 0.60
108
+ 2 0.60
109
+ 3 0.37
110
+
111
+ See Also
112
+ --------
113
+ katz_centrality_numpy
114
+ eigenvector_centrality
115
+ eigenvector_centrality_numpy
116
+ :func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
117
+ :func:`~networkx.algorithms.link_analysis.hits_alg.hits`
118
+
119
+ Notes
120
+ -----
121
+ Katz centrality was introduced by [2]_.
122
+
123
+ This algorithm it uses the power method to find the eigenvector
124
+ corresponding to the largest eigenvalue of the adjacency matrix of ``G``.
125
+ The parameter ``alpha`` should be strictly less than the inverse of largest
126
+ eigenvalue of the adjacency matrix for the algorithm to converge.
127
+ You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest
128
+ eigenvalue of the adjacency matrix.
129
+ The iteration will stop after ``max_iter`` iterations or an error tolerance of
130
+ ``number_of_nodes(G) * tol`` has been reached.
131
+
132
+ For strongly connected graphs, as $\alpha \to 1/\lambda_{\max}$, and $\beta > 0$,
133
+ Katz centrality approaches the results for eigenvector centrality.
134
+
135
+ For directed graphs this finds "left" eigenvectors which corresponds
136
+ to the in-edges in the graph. For out-edges Katz centrality,
137
+ first reverse the graph with ``G.reverse()``.
138
+
139
+ References
140
+ ----------
141
+ .. [1] Mark E. J. Newman:
142
+ Networks: An Introduction.
143
+ Oxford University Press, USA, 2010, p. 720.
144
+ .. [2] Leo Katz:
145
+ A New Status Index Derived from Sociometric Index.
146
+ Psychometrika 18(1):39–43, 1953
147
+ https://link.springer.com/content/pdf/10.1007/BF02289026.pdf
148
+ """
149
+ if len(G) == 0:
150
+ return {}
151
+
152
+ nnodes = G.number_of_nodes()
153
+
154
+ if nstart is None:
155
+ # choose starting vector with entries of 0
156
+ x = {n: 0 for n in G}
157
+ else:
158
+ x = nstart
159
+
160
+ try:
161
+ b = dict.fromkeys(G, float(beta))
162
+ except (TypeError, ValueError, AttributeError) as err:
163
+ b = beta
164
+ if set(beta) != set(G):
165
+ raise nx.NetworkXError(
166
+ "beta dictionary must have a value for every node"
167
+ ) from err
168
+
169
+ # make up to max_iter iterations
170
+ for _ in range(max_iter):
171
+ xlast = x
172
+ x = dict.fromkeys(xlast, 0)
173
+ # do the multiplication y^T = Alpha * x^T A + Beta
174
+ for n in x:
175
+ for nbr in G[n]:
176
+ x[nbr] += xlast[n] * G[n][nbr].get(weight, 1)
177
+ for n in x:
178
+ x[n] = alpha * x[n] + b[n]
179
+
180
+ # check convergence
181
+ error = sum(abs(x[n] - xlast[n]) for n in x)
182
+ if error < nnodes * tol:
183
+ if normalized:
184
+ # normalize vector
185
+ try:
186
+ s = 1.0 / math.hypot(*x.values())
187
+ except ZeroDivisionError:
188
+ s = 1.0
189
+ else:
190
+ s = 1
191
+ for n in x:
192
+ x[n] *= s
193
+ return x
194
+ raise nx.PowerIterationFailedConvergence(max_iter)
195
+
196
+
197
+ @not_implemented_for("multigraph")
198
+ @nx._dispatchable(edge_attrs="weight")
199
+ def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, weight=None):
200
+ r"""Compute the Katz centrality for the graph G.
201
+
202
+ Katz centrality computes the centrality for a node based on the centrality
203
+ of its neighbors. It is a generalization of the eigenvector centrality. The
204
+ Katz centrality for node $i$ is
205
+
206
+ .. math::
207
+
208
+ x_i = \alpha \sum_{j} A_{ij} x_j + \beta,
209
+
210
+ where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$.
211
+
212
+ The parameter $\beta$ controls the initial centrality and
213
+
214
+ .. math::
215
+
216
+ \alpha < \frac{1}{\lambda_{\max}}.
217
+
218
+ Katz centrality computes the relative influence of a node within a
219
+ network by measuring the number of the immediate neighbors (first
220
+ degree nodes) and also all other nodes in the network that connect
221
+ to the node under consideration through these immediate neighbors.
222
+
223
+ Extra weight can be provided to immediate neighbors through the
224
+ parameter $\beta$. Connections made with distant neighbors
225
+ are, however, penalized by an attenuation factor $\alpha$ which
226
+ should be strictly less than the inverse largest eigenvalue of the
227
+ adjacency matrix in order for the Katz centrality to be computed
228
+ correctly. More information is provided in [1]_.
229
+
230
+ Parameters
231
+ ----------
232
+ G : graph
233
+ A NetworkX graph
234
+
235
+ alpha : float
236
+ Attenuation factor
237
+
238
+ beta : scalar or dictionary, optional (default=1.0)
239
+ Weight attributed to the immediate neighborhood. If not a scalar the
240
+ dictionary must have an value for every node.
241
+
242
+ normalized : bool
243
+ If True normalize the resulting values.
244
+
245
+ weight : None or string, optional
246
+ If None, all edge weights are considered equal.
247
+ Otherwise holds the name of the edge attribute used as weight.
248
+ In this measure the weight is interpreted as the connection strength.
249
+
250
+ Returns
251
+ -------
252
+ nodes : dictionary
253
+ Dictionary of nodes with Katz centrality as the value.
254
+
255
+ Raises
256
+ ------
257
+ NetworkXError
258
+ If the parameter `beta` is not a scalar but lacks a value for at least
259
+ one node
260
+
261
+ Examples
262
+ --------
263
+ >>> import math
264
+ >>> G = nx.path_graph(4)
265
+ >>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix
266
+ >>> centrality = nx.katz_centrality_numpy(G, 1 / phi)
267
+ >>> for n, c in sorted(centrality.items()):
268
+ ... print(f"{n} {c:.2f}")
269
+ 0 0.37
270
+ 1 0.60
271
+ 2 0.60
272
+ 3 0.37
273
+
274
+ See Also
275
+ --------
276
+ katz_centrality
277
+ eigenvector_centrality_numpy
278
+ eigenvector_centrality
279
+ :func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
280
+ :func:`~networkx.algorithms.link_analysis.hits_alg.hits`
281
+
282
+ Notes
283
+ -----
284
+ Katz centrality was introduced by [2]_.
285
+
286
+ This algorithm uses a direct linear solver to solve the above equation.
287
+ The parameter ``alpha`` should be strictly less than the inverse of largest
288
+ eigenvalue of the adjacency matrix for there to be a solution.
289
+ You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest
290
+ eigenvalue of the adjacency matrix.
291
+
292
+ For strongly connected graphs, as $\alpha \to 1/\lambda_{\max}$, and $\beta > 0$,
293
+ Katz centrality approaches the results for eigenvector centrality.
294
+
295
+ For directed graphs this finds "left" eigenvectors which corresponds
296
+ to the in-edges in the graph. For out-edges Katz centrality,
297
+ first reverse the graph with ``G.reverse()``.
298
+
299
+ References
300
+ ----------
301
+ .. [1] Mark E. J. Newman:
302
+ Networks: An Introduction.
303
+ Oxford University Press, USA, 2010, p. 173.
304
+ .. [2] Leo Katz:
305
+ A New Status Index Derived from Sociometric Index.
306
+ Psychometrika 18(1):39–43, 1953
307
+ https://link.springer.com/content/pdf/10.1007/BF02289026.pdf
308
+ """
309
+ import numpy as np
310
+
311
+ if len(G) == 0:
312
+ return {}
313
+ try:
314
+ nodelist = beta.keys()
315
+ if set(nodelist) != set(G):
316
+ raise nx.NetworkXError("beta dictionary must have a value for every node")
317
+ b = np.array(list(beta.values()), dtype=float)
318
+ except AttributeError:
319
+ nodelist = list(G)
320
+ try:
321
+ b = np.ones((len(nodelist), 1)) * beta
322
+ except (TypeError, ValueError, AttributeError) as err:
323
+ raise nx.NetworkXError("beta must be a number") from err
324
+
325
+ A = nx.adjacency_matrix(G, nodelist=nodelist, weight=weight).todense().T
326
+ n = A.shape[0]
327
+ centrality = np.linalg.solve(np.eye(n, n) - (alpha * A), b).squeeze()
328
+
329
+ # Normalize: rely on truediv to cast to float, then tolist to make Python numbers
330
+ norm = np.sign(sum(centrality)) * np.linalg.norm(centrality) if normalized else 1
331
+ return dict(zip(nodelist, (centrality / norm).tolist()))