ZTWHHH commited on
Commit
00a020e
·
verified ·
1 Parent(s): fd5aa4d

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-310.pyc +0 -0
  2. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/clique.cpython-310.pyc +0 -0
  3. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/clustering_coefficient.cpython-310.pyc +0 -0
  4. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-310.pyc +0 -0
  5. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-310.pyc +0 -0
  6. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-310.pyc +0 -0
  7. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-310.pyc +0 -0
  8. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-310.pyc +0 -0
  9. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-310.pyc +0 -0
  10. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-310.pyc +0 -0
  11. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/steinertree.cpython-310.pyc +0 -0
  12. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/traveling_salesman.cpython-310.pyc +0 -0
  13. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/treewidth.cpython-310.pyc +0 -0
  14. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-310.pyc +0 -0
  15. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  16. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_approx_clust_coeff.cpython-310.pyc +0 -0
  17. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_clique.cpython-310.pyc +0 -0
  18. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_connectivity.cpython-310.pyc +0 -0
  19. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_distance_measures.cpython-310.pyc +0 -0
  20. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_dominating_set.cpython-310.pyc +0 -0
  21. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_matching.cpython-310.pyc +0 -0
  22. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_maxcut.cpython-310.pyc +0 -0
  23. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_ramsey.cpython-310.pyc +0 -0
  24. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_steinertree.cpython-310.pyc +0 -0
  25. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_traveling_salesman.cpython-310.pyc +0 -0
  26. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_treewidth.cpython-310.pyc +0 -0
  27. minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_vertex_cover.cpython-310.pyc +0 -0
  28. minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__init__.py +2 -0
  29. minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__pycache__/__init__.cpython-310.pyc +0 -0
  30. minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__pycache__/hits_alg.cpython-310.pyc +0 -0
  31. minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__pycache__/pagerank_alg.cpython-310.pyc +0 -0
  32. minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/hits_alg.py +337 -0
  33. minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py +500 -0
  34. minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__init__.py +0 -0
  35. minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  36. minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_hits.cpython-310.pyc +0 -0
  37. minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_pagerank.cpython-310.pyc +0 -0
  38. minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/test_hits.py +78 -0
  39. minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/test_pagerank.py +214 -0
  40. minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/__init__.py +0 -0
  41. minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_boundary.py +154 -0
  42. minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_bridges.py +144 -0
  43. minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_broadcasting.py +82 -0
  44. minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_chains.py +141 -0
  45. minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_chordal.py +129 -0
  46. minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_clique.py +291 -0
  47. minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_communicability.py +80 -0
  48. minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_core.py +266 -0
  49. minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_covering.py +85 -0
  50. minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_cuts.py +171 -0
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.34 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/clique.cpython-310.pyc ADDED
Binary file (7.95 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/clustering_coefficient.cpython-310.pyc ADDED
Binary file (2.52 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-310.pyc ADDED
Binary file (11.2 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-310.pyc ADDED
Binary file (5.02 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-310.pyc ADDED
Binary file (4.45 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-310.pyc ADDED
Binary file (13.9 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-310.pyc ADDED
Binary file (1.43 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-310.pyc ADDED
Binary file (4.31 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-310.pyc ADDED
Binary file (1.56 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/steinertree.cpython-310.pyc ADDED
Binary file (7.74 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/traveling_salesman.cpython-310.pyc ADDED
Binary file (46.1 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/treewidth.cpython-310.pyc ADDED
Binary file (6.63 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-310.pyc ADDED
Binary file (2.86 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (193 Bytes). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_approx_clust_coeff.cpython-310.pyc ADDED
Binary file (1.4 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_clique.cpython-310.pyc ADDED
Binary file (4.4 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_connectivity.cpython-310.pyc ADDED
Binary file (5.83 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_distance_measures.cpython-310.pyc ADDED
Binary file (3.09 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_dominating_set.cpython-310.pyc ADDED
Binary file (2.55 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_matching.cpython-310.pyc ADDED
Binary file (505 Bytes). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_maxcut.cpython-310.pyc ADDED
Binary file (3.02 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_ramsey.cpython-310.pyc ADDED
Binary file (1.17 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_steinertree.cpython-310.pyc ADDED
Binary file (8.19 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_traveling_salesman.cpython-310.pyc ADDED
Binary file (26.9 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_treewidth.cpython-310.pyc ADDED
Binary file (7.16 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_vertex_cover.cpython-310.pyc ADDED
Binary file (2.81 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ from networkx.algorithms.link_analysis.hits_alg import *
2
+ from networkx.algorithms.link_analysis.pagerank_alg import *
minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (304 Bytes). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__pycache__/hits_alg.cpython-310.pyc ADDED
Binary file (9.7 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__pycache__/pagerank_alg.cpython-310.pyc ADDED
Binary file (16.3 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/hits_alg.py ADDED
@@ -0,0 +1,337 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Hubs and authorities analysis of graph structure."""
2
+
3
+ import networkx as nx
4
+
5
+ __all__ = ["hits"]
6
+
7
+
8
+ @nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}})
9
+ def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True):
10
+ """Returns HITS hubs and authorities values for nodes.
11
+
12
+ The HITS algorithm computes two numbers for a node.
13
+ Authorities estimates the node value based on the incoming links.
14
+ Hubs estimates the node value based on outgoing links.
15
+
16
+ Parameters
17
+ ----------
18
+ G : graph
19
+ A NetworkX graph
20
+
21
+ max_iter : integer, optional
22
+ Maximum number of iterations in power method.
23
+
24
+ tol : float, optional
25
+ Error tolerance used to check convergence in power method iteration.
26
+
27
+ nstart : dictionary, optional
28
+ Starting value of each node for power method iteration.
29
+
30
+ normalized : bool (default=True)
31
+ Normalize results by the sum of all of the values.
32
+
33
+ Returns
34
+ -------
35
+ (hubs,authorities) : two-tuple of dictionaries
36
+ Two dictionaries keyed by node containing the hub and authority
37
+ values.
38
+
39
+ Raises
40
+ ------
41
+ PowerIterationFailedConvergence
42
+ If the algorithm fails to converge to the specified tolerance
43
+ within the specified number of iterations of the power iteration
44
+ method.
45
+
46
+ Examples
47
+ --------
48
+ >>> G = nx.path_graph(4)
49
+ >>> h, a = nx.hits(G)
50
+
51
+ Notes
52
+ -----
53
+ The eigenvector calculation is done by the power iteration method
54
+ and has no guarantee of convergence. The iteration will stop
55
+ after max_iter iterations or an error tolerance of
56
+ number_of_nodes(G)*tol has been reached.
57
+
58
+ The HITS algorithm was designed for directed graphs but this
59
+ algorithm does not check if the input graph is directed and will
60
+ execute on undirected graphs.
61
+
62
+ References
63
+ ----------
64
+ .. [1] A. Langville and C. Meyer,
65
+ "A survey of eigenvector methods of web information retrieval."
66
+ http://citeseer.ist.psu.edu/713792.html
67
+ .. [2] Jon Kleinberg,
68
+ Authoritative sources in a hyperlinked environment
69
+ Journal of the ACM 46 (5): 604-32, 1999.
70
+ doi:10.1145/324133.324140.
71
+ http://www.cs.cornell.edu/home/kleinber/auth.pdf.
72
+ """
73
+ import numpy as np
74
+ import scipy as sp
75
+
76
+ if len(G) == 0:
77
+ return {}, {}
78
+ A = nx.adjacency_matrix(G, nodelist=list(G), dtype=float)
79
+
80
+ if nstart is not None:
81
+ nstart = np.array(list(nstart.values()))
82
+ if max_iter <= 0:
83
+ raise nx.PowerIterationFailedConvergence(max_iter)
84
+ try:
85
+ _, _, vt = sp.sparse.linalg.svds(A, k=1, v0=nstart, maxiter=max_iter, tol=tol)
86
+ except sp.sparse.linalg.ArpackNoConvergence as exc:
87
+ raise nx.PowerIterationFailedConvergence(max_iter) from exc
88
+
89
+ a = vt.flatten().real
90
+ h = A @ a
91
+ if normalized:
92
+ h /= h.sum()
93
+ a /= a.sum()
94
+ hubs = dict(zip(G, map(float, h)))
95
+ authorities = dict(zip(G, map(float, a)))
96
+ return hubs, authorities
97
+
98
+
99
+ def _hits_python(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True):
100
+ if isinstance(G, nx.MultiGraph | nx.MultiDiGraph):
101
+ raise Exception("hits() not defined for graphs with multiedges.")
102
+ if len(G) == 0:
103
+ return {}, {}
104
+ # choose fixed starting vector if not given
105
+ if nstart is None:
106
+ h = dict.fromkeys(G, 1.0 / G.number_of_nodes())
107
+ else:
108
+ h = nstart
109
+ # normalize starting vector
110
+ s = 1.0 / sum(h.values())
111
+ for k in h:
112
+ h[k] *= s
113
+ for _ in range(max_iter): # power iteration: make up to max_iter iterations
114
+ hlast = h
115
+ h = dict.fromkeys(hlast.keys(), 0)
116
+ a = dict.fromkeys(hlast.keys(), 0)
117
+ # this "matrix multiply" looks odd because it is
118
+ # doing a left multiply a^T=hlast^T*G
119
+ for n in h:
120
+ for nbr in G[n]:
121
+ a[nbr] += hlast[n] * G[n][nbr].get("weight", 1)
122
+ # now multiply h=Ga
123
+ for n in h:
124
+ for nbr in G[n]:
125
+ h[n] += a[nbr] * G[n][nbr].get("weight", 1)
126
+ # normalize vector
127
+ s = 1.0 / max(h.values())
128
+ for n in h:
129
+ h[n] *= s
130
+ # normalize vector
131
+ s = 1.0 / max(a.values())
132
+ for n in a:
133
+ a[n] *= s
134
+ # check convergence, l1 norm
135
+ err = sum(abs(h[n] - hlast[n]) for n in h)
136
+ if err < tol:
137
+ break
138
+ else:
139
+ raise nx.PowerIterationFailedConvergence(max_iter)
140
+ if normalized:
141
+ s = 1.0 / sum(a.values())
142
+ for n in a:
143
+ a[n] *= s
144
+ s = 1.0 / sum(h.values())
145
+ for n in h:
146
+ h[n] *= s
147
+ return h, a
148
+
149
+
150
+ def _hits_numpy(G, normalized=True):
151
+ """Returns HITS hubs and authorities values for nodes.
152
+
153
+ The HITS algorithm computes two numbers for a node.
154
+ Authorities estimates the node value based on the incoming links.
155
+ Hubs estimates the node value based on outgoing links.
156
+
157
+ Parameters
158
+ ----------
159
+ G : graph
160
+ A NetworkX graph
161
+
162
+ normalized : bool (default=True)
163
+ Normalize results by the sum of all of the values.
164
+
165
+ Returns
166
+ -------
167
+ (hubs,authorities) : two-tuple of dictionaries
168
+ Two dictionaries keyed by node containing the hub and authority
169
+ values.
170
+
171
+ Examples
172
+ --------
173
+ >>> G = nx.path_graph(4)
174
+
175
+ The `hubs` and `authorities` are given by the eigenvectors corresponding to the
176
+ maximum eigenvalues of the hubs_matrix and the authority_matrix, respectively.
177
+
178
+ The ``hubs`` and ``authority`` matrices are computed from the adjacency
179
+ matrix:
180
+
181
+ >>> adj_ary = nx.to_numpy_array(G)
182
+ >>> hubs_matrix = adj_ary @ adj_ary.T
183
+ >>> authority_matrix = adj_ary.T @ adj_ary
184
+
185
+ `_hits_numpy` maps the eigenvector corresponding to the maximum eigenvalue
186
+ of the respective matrices to the nodes in `G`:
187
+
188
+ >>> from networkx.algorithms.link_analysis.hits_alg import _hits_numpy
189
+ >>> hubs, authority = _hits_numpy(G)
190
+
191
+ Notes
192
+ -----
193
+ The eigenvector calculation uses NumPy's interface to LAPACK.
194
+
195
+ The HITS algorithm was designed for directed graphs but this
196
+ algorithm does not check if the input graph is directed and will
197
+ execute on undirected graphs.
198
+
199
+ References
200
+ ----------
201
+ .. [1] A. Langville and C. Meyer,
202
+ "A survey of eigenvector methods of web information retrieval."
203
+ http://citeseer.ist.psu.edu/713792.html
204
+ .. [2] Jon Kleinberg,
205
+ Authoritative sources in a hyperlinked environment
206
+ Journal of the ACM 46 (5): 604-32, 1999.
207
+ doi:10.1145/324133.324140.
208
+ http://www.cs.cornell.edu/home/kleinber/auth.pdf.
209
+ """
210
+ import numpy as np
211
+
212
+ if len(G) == 0:
213
+ return {}, {}
214
+ adj_ary = nx.to_numpy_array(G)
215
+ # Hub matrix
216
+ H = adj_ary @ adj_ary.T
217
+ e, ev = np.linalg.eig(H)
218
+ h = ev[:, np.argmax(e)] # eigenvector corresponding to the maximum eigenvalue
219
+ # Authority matrix
220
+ A = adj_ary.T @ adj_ary
221
+ e, ev = np.linalg.eig(A)
222
+ a = ev[:, np.argmax(e)] # eigenvector corresponding to the maximum eigenvalue
223
+ if normalized:
224
+ h /= h.sum()
225
+ a /= a.sum()
226
+ else:
227
+ h /= h.max()
228
+ a /= a.max()
229
+ hubs = dict(zip(G, map(float, h)))
230
+ authorities = dict(zip(G, map(float, a)))
231
+ return hubs, authorities
232
+
233
+
234
+ def _hits_scipy(G, max_iter=100, tol=1.0e-6, nstart=None, normalized=True):
235
+ """Returns HITS hubs and authorities values for nodes.
236
+
237
+
238
+ The HITS algorithm computes two numbers for a node.
239
+ Authorities estimates the node value based on the incoming links.
240
+ Hubs estimates the node value based on outgoing links.
241
+
242
+ Parameters
243
+ ----------
244
+ G : graph
245
+ A NetworkX graph
246
+
247
+ max_iter : integer, optional
248
+ Maximum number of iterations in power method.
249
+
250
+ tol : float, optional
251
+ Error tolerance used to check convergence in power method iteration.
252
+
253
+ nstart : dictionary, optional
254
+ Starting value of each node for power method iteration.
255
+
256
+ normalized : bool (default=True)
257
+ Normalize results by the sum of all of the values.
258
+
259
+ Returns
260
+ -------
261
+ (hubs,authorities) : two-tuple of dictionaries
262
+ Two dictionaries keyed by node containing the hub and authority
263
+ values.
264
+
265
+ Examples
266
+ --------
267
+ >>> from networkx.algorithms.link_analysis.hits_alg import _hits_scipy
268
+ >>> G = nx.path_graph(4)
269
+ >>> h, a = _hits_scipy(G)
270
+
271
+ Notes
272
+ -----
273
+ This implementation uses SciPy sparse matrices.
274
+
275
+ The eigenvector calculation is done by the power iteration method
276
+ and has no guarantee of convergence. The iteration will stop
277
+ after max_iter iterations or an error tolerance of
278
+ number_of_nodes(G)*tol has been reached.
279
+
280
+ The HITS algorithm was designed for directed graphs but this
281
+ algorithm does not check if the input graph is directed and will
282
+ execute on undirected graphs.
283
+
284
+ Raises
285
+ ------
286
+ PowerIterationFailedConvergence
287
+ If the algorithm fails to converge to the specified tolerance
288
+ within the specified number of iterations of the power iteration
289
+ method.
290
+
291
+ References
292
+ ----------
293
+ .. [1] A. Langville and C. Meyer,
294
+ "A survey of eigenvector methods of web information retrieval."
295
+ http://citeseer.ist.psu.edu/713792.html
296
+ .. [2] Jon Kleinberg,
297
+ Authoritative sources in a hyperlinked environment
298
+ Journal of the ACM 46 (5): 604-632, 1999.
299
+ doi:10.1145/324133.324140.
300
+ http://www.cs.cornell.edu/home/kleinber/auth.pdf.
301
+ """
302
+ import numpy as np
303
+
304
+ if len(G) == 0:
305
+ return {}, {}
306
+ A = nx.to_scipy_sparse_array(G, nodelist=list(G))
307
+ (n, _) = A.shape # should be square
308
+ ATA = A.T @ A # authority matrix
309
+ # choose fixed starting vector if not given
310
+ if nstart is None:
311
+ x = np.ones((n, 1)) / n
312
+ else:
313
+ x = np.array([nstart.get(n, 0) for n in list(G)], dtype=float)
314
+ x /= x.sum()
315
+
316
+ # power iteration on authority matrix
317
+ i = 0
318
+ while True:
319
+ xlast = x
320
+ x = ATA @ x
321
+ x /= x.max()
322
+ # check convergence, l1 norm
323
+ err = np.absolute(x - xlast).sum()
324
+ if err < tol:
325
+ break
326
+ if i > max_iter:
327
+ raise nx.PowerIterationFailedConvergence(max_iter)
328
+ i += 1
329
+
330
+ a = x.flatten()
331
+ h = A @ a
332
+ if normalized:
333
+ h /= h.sum()
334
+ a /= a.sum()
335
+ hubs = dict(zip(G, map(float, h)))
336
+ authorities = dict(zip(G, map(float, a)))
337
+ return hubs, authorities
minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py ADDED
@@ -0,0 +1,500 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """PageRank analysis of graph structure."""
2
+
3
+ from warnings import warn
4
+
5
+ import networkx as nx
6
+
7
+ __all__ = ["pagerank", "google_matrix"]
8
+
9
+
10
+ @nx._dispatchable(edge_attrs="weight")
11
+ def pagerank(
12
+ G,
13
+ alpha=0.85,
14
+ personalization=None,
15
+ max_iter=100,
16
+ tol=1.0e-6,
17
+ nstart=None,
18
+ weight="weight",
19
+ dangling=None,
20
+ ):
21
+ """Returns the PageRank of the nodes in the graph.
22
+
23
+ PageRank computes a ranking of the nodes in the graph G based on
24
+ the structure of the incoming links. It was originally designed as
25
+ an algorithm to rank web pages.
26
+
27
+ Parameters
28
+ ----------
29
+ G : graph
30
+ A NetworkX graph. Undirected graphs will be converted to a directed
31
+ graph with two directed edges for each undirected edge.
32
+
33
+ alpha : float, optional
34
+ Damping parameter for PageRank, default=0.85.
35
+
36
+ personalization: dict, optional
37
+ The "personalization vector" consisting of a dictionary with a
38
+ key some subset of graph nodes and personalization value each of those.
39
+ At least one personalization value must be non-zero.
40
+ If not specified, a nodes personalization value will be zero.
41
+ By default, a uniform distribution is used.
42
+
43
+ max_iter : integer, optional
44
+ Maximum number of iterations in power method eigenvalue solver.
45
+
46
+ tol : float, optional
47
+ Error tolerance used to check convergence in power method solver.
48
+ The iteration will stop after a tolerance of ``len(G) * tol`` is reached.
49
+
50
+ nstart : dictionary, optional
51
+ Starting value of PageRank iteration for each node.
52
+
53
+ weight : key, optional
54
+ Edge data key to use as weight. If None weights are set to 1.
55
+
56
+ dangling: dict, optional
57
+ The outedges to be assigned to any "dangling" nodes, i.e., nodes without
58
+ any outedges. The dict key is the node the outedge points to and the dict
59
+ value is the weight of that outedge. By default, dangling nodes are given
60
+ outedges according to the personalization vector (uniform if not
61
+ specified). This must be selected to result in an irreducible transition
62
+ matrix (see notes under google_matrix). It may be common to have the
63
+ dangling dict to be the same as the personalization dict.
64
+
65
+
66
+ Returns
67
+ -------
68
+ pagerank : dictionary
69
+ Dictionary of nodes with PageRank as value
70
+
71
+ Examples
72
+ --------
73
+ >>> G = nx.DiGraph(nx.path_graph(4))
74
+ >>> pr = nx.pagerank(G, alpha=0.9)
75
+
76
+ Notes
77
+ -----
78
+ The eigenvector calculation is done by the power iteration method
79
+ and has no guarantee of convergence. The iteration will stop after
80
+ an error tolerance of ``len(G) * tol`` has been reached. If the
81
+ number of iterations exceed `max_iter`, a
82
+ :exc:`networkx.exception.PowerIterationFailedConvergence` exception
83
+ is raised.
84
+
85
+ The PageRank algorithm was designed for directed graphs but this
86
+ algorithm does not check if the input graph is directed and will
87
+ execute on undirected graphs by converting each edge in the
88
+ directed graph to two edges.
89
+
90
+ See Also
91
+ --------
92
+ google_matrix
93
+
94
+ Raises
95
+ ------
96
+ PowerIterationFailedConvergence
97
+ If the algorithm fails to converge to the specified tolerance
98
+ within the specified number of iterations of the power iteration
99
+ method.
100
+
101
+ References
102
+ ----------
103
+ .. [1] A. Langville and C. Meyer,
104
+ "A survey of eigenvector methods of web information retrieval."
105
+ http://citeseer.ist.psu.edu/713792.html
106
+ .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry,
107
+ The PageRank citation ranking: Bringing order to the Web. 1999
108
+ http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
109
+
110
+ """
111
+ return _pagerank_scipy(
112
+ G, alpha, personalization, max_iter, tol, nstart, weight, dangling
113
+ )
114
+
115
+
116
+ def _pagerank_python(
117
+ G,
118
+ alpha=0.85,
119
+ personalization=None,
120
+ max_iter=100,
121
+ tol=1.0e-6,
122
+ nstart=None,
123
+ weight="weight",
124
+ dangling=None,
125
+ ):
126
+ if len(G) == 0:
127
+ return {}
128
+
129
+ D = G.to_directed()
130
+
131
+ # Create a copy in (right) stochastic form
132
+ W = nx.stochastic_graph(D, weight=weight)
133
+ N = W.number_of_nodes()
134
+
135
+ # Choose fixed starting vector if not given
136
+ if nstart is None:
137
+ x = dict.fromkeys(W, 1.0 / N)
138
+ else:
139
+ # Normalized nstart vector
140
+ s = sum(nstart.values())
141
+ x = {k: v / s for k, v in nstart.items()}
142
+
143
+ if personalization is None:
144
+ # Assign uniform personalization vector if not given
145
+ p = dict.fromkeys(W, 1.0 / N)
146
+ else:
147
+ s = sum(personalization.values())
148
+ p = {k: v / s for k, v in personalization.items()}
149
+
150
+ if dangling is None:
151
+ # Use personalization vector if dangling vector not specified
152
+ dangling_weights = p
153
+ else:
154
+ s = sum(dangling.values())
155
+ dangling_weights = {k: v / s for k, v in dangling.items()}
156
+ dangling_nodes = [n for n in W if W.out_degree(n, weight=weight) == 0.0]
157
+
158
+ # power iteration: make up to max_iter iterations
159
+ for _ in range(max_iter):
160
+ xlast = x
161
+ x = dict.fromkeys(xlast.keys(), 0)
162
+ danglesum = alpha * sum(xlast[n] for n in dangling_nodes)
163
+ for n in x:
164
+ # this matrix multiply looks odd because it is
165
+ # doing a left multiply x^T=xlast^T*W
166
+ for _, nbr, wt in W.edges(n, data=weight):
167
+ x[nbr] += alpha * xlast[n] * wt
168
+ x[n] += danglesum * dangling_weights.get(n, 0) + (1.0 - alpha) * p.get(n, 0)
169
+ # check convergence, l1 norm
170
+ err = sum(abs(x[n] - xlast[n]) for n in x)
171
+ if err < N * tol:
172
+ return x
173
+ raise nx.PowerIterationFailedConvergence(max_iter)
174
+
175
+
176
+ @nx._dispatchable(edge_attrs="weight")
177
+ def google_matrix(
178
+ G, alpha=0.85, personalization=None, nodelist=None, weight="weight", dangling=None
179
+ ):
180
+ """Returns the Google matrix of the graph.
181
+
182
+ Parameters
183
+ ----------
184
+ G : graph
185
+ A NetworkX graph. Undirected graphs will be converted to a directed
186
+ graph with two directed edges for each undirected edge.
187
+
188
+ alpha : float
189
+ The damping factor.
190
+
191
+ personalization: dict, optional
192
+ The "personalization vector" consisting of a dictionary with a
193
+ key some subset of graph nodes and personalization value each of those.
194
+ At least one personalization value must be non-zero.
195
+ If not specified, a nodes personalization value will be zero.
196
+ By default, a uniform distribution is used.
197
+
198
+ nodelist : list, optional
199
+ The rows and columns are ordered according to the nodes in nodelist.
200
+ If nodelist is None, then the ordering is produced by G.nodes().
201
+
202
+ weight : key, optional
203
+ Edge data key to use as weight. If None weights are set to 1.
204
+
205
+ dangling: dict, optional
206
+ The outedges to be assigned to any "dangling" nodes, i.e., nodes without
207
+ any outedges. The dict key is the node the outedge points to and the dict
208
+ value is the weight of that outedge. By default, dangling nodes are given
209
+ outedges according to the personalization vector (uniform if not
210
+ specified) This must be selected to result in an irreducible transition
211
+ matrix (see notes below). It may be common to have the dangling dict to
212
+ be the same as the personalization dict.
213
+
214
+ Returns
215
+ -------
216
+ A : 2D NumPy ndarray
217
+ Google matrix of the graph
218
+
219
+ Notes
220
+ -----
221
+ The array returned represents the transition matrix that describes the
222
+ Markov chain used in PageRank. For PageRank to converge to a unique
223
+ solution (i.e., a unique stationary distribution in a Markov chain), the
224
+ transition matrix must be irreducible. In other words, it must be that
225
+ there exists a path between every pair of nodes in the graph, or else there
226
+ is the potential of "rank sinks."
227
+
228
+ This implementation works with Multi(Di)Graphs. For multigraphs the
229
+ weight between two nodes is set to be the sum of all edge weights
230
+ between those nodes.
231
+
232
+ See Also
233
+ --------
234
+ pagerank
235
+ """
236
+ import numpy as np
237
+
238
+ if nodelist is None:
239
+ nodelist = list(G)
240
+
241
+ A = nx.to_numpy_array(G, nodelist=nodelist, weight=weight)
242
+ N = len(G)
243
+ if N == 0:
244
+ return A
245
+
246
+ # Personalization vector
247
+ if personalization is None:
248
+ p = np.repeat(1.0 / N, N)
249
+ else:
250
+ p = np.array([personalization.get(n, 0) for n in nodelist], dtype=float)
251
+ if p.sum() == 0:
252
+ raise ZeroDivisionError
253
+ p /= p.sum()
254
+
255
+ # Dangling nodes
256
+ if dangling is None:
257
+ dangling_weights = p
258
+ else:
259
+ # Convert the dangling dictionary into an array in nodelist order
260
+ dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], dtype=float)
261
+ dangling_weights /= dangling_weights.sum()
262
+ dangling_nodes = np.where(A.sum(axis=1) == 0)[0]
263
+
264
+ # Assign dangling_weights to any dangling nodes (nodes with no out links)
265
+ A[dangling_nodes] = dangling_weights
266
+
267
+ A /= A.sum(axis=1)[:, np.newaxis] # Normalize rows to sum to 1
268
+
269
+ return alpha * A + (1 - alpha) * p
270
+
271
+
272
+ def _pagerank_numpy(
273
+ G, alpha=0.85, personalization=None, weight="weight", dangling=None
274
+ ):
275
+ """Returns the PageRank of the nodes in the graph.
276
+
277
+ PageRank computes a ranking of the nodes in the graph G based on
278
+ the structure of the incoming links. It was originally designed as
279
+ an algorithm to rank web pages.
280
+
281
+ Parameters
282
+ ----------
283
+ G : graph
284
+ A NetworkX graph. Undirected graphs will be converted to a directed
285
+ graph with two directed edges for each undirected edge.
286
+
287
+ alpha : float, optional
288
+ Damping parameter for PageRank, default=0.85.
289
+
290
+ personalization: dict, optional
291
+ The "personalization vector" consisting of a dictionary with a
292
+ key some subset of graph nodes and personalization value each of those.
293
+ At least one personalization value must be non-zero.
294
+ If not specified, a nodes personalization value will be zero.
295
+ By default, a uniform distribution is used.
296
+
297
+ weight : key, optional
298
+ Edge data key to use as weight. If None weights are set to 1.
299
+
300
+ dangling: dict, optional
301
+ The outedges to be assigned to any "dangling" nodes, i.e., nodes without
302
+ any outedges. The dict key is the node the outedge points to and the dict
303
+ value is the weight of that outedge. By default, dangling nodes are given
304
+ outedges according to the personalization vector (uniform if not
305
+ specified) This must be selected to result in an irreducible transition
306
+ matrix (see notes under google_matrix). It may be common to have the
307
+ dangling dict to be the same as the personalization dict.
308
+
309
+ Returns
310
+ -------
311
+ pagerank : dictionary
312
+ Dictionary of nodes with PageRank as value.
313
+
314
+ Examples
315
+ --------
316
+ >>> from networkx.algorithms.link_analysis.pagerank_alg import _pagerank_numpy
317
+ >>> G = nx.DiGraph(nx.path_graph(4))
318
+ >>> pr = _pagerank_numpy(G, alpha=0.9)
319
+
320
+ Notes
321
+ -----
322
+ The eigenvector calculation uses NumPy's interface to the LAPACK
323
+ eigenvalue solvers. This will be the fastest and most accurate
324
+ for small graphs.
325
+
326
+ This implementation works with Multi(Di)Graphs. For multigraphs the
327
+ weight between two nodes is set to be the sum of all edge weights
328
+ between those nodes.
329
+
330
+ See Also
331
+ --------
332
+ pagerank, google_matrix
333
+
334
+ References
335
+ ----------
336
+ .. [1] A. Langville and C. Meyer,
337
+ "A survey of eigenvector methods of web information retrieval."
338
+ http://citeseer.ist.psu.edu/713792.html
339
+ .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry,
340
+ The PageRank citation ranking: Bringing order to the Web. 1999
341
+ http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
342
+ """
343
+ import numpy as np
344
+
345
+ if len(G) == 0:
346
+ return {}
347
+ M = google_matrix(
348
+ G, alpha, personalization=personalization, weight=weight, dangling=dangling
349
+ )
350
+ # use numpy LAPACK solver
351
+ eigenvalues, eigenvectors = np.linalg.eig(M.T)
352
+ ind = np.argmax(eigenvalues)
353
+ # eigenvector of largest eigenvalue is at ind, normalized
354
+ largest = np.array(eigenvectors[:, ind]).flatten().real
355
+ norm = largest.sum()
356
+ return dict(zip(G, map(float, largest / norm)))
357
+
358
+
359
+ def _pagerank_scipy(
360
+ G,
361
+ alpha=0.85,
362
+ personalization=None,
363
+ max_iter=100,
364
+ tol=1.0e-6,
365
+ nstart=None,
366
+ weight="weight",
367
+ dangling=None,
368
+ ):
369
+ """Returns the PageRank of the nodes in the graph.
370
+
371
+ PageRank computes a ranking of the nodes in the graph G based on
372
+ the structure of the incoming links. It was originally designed as
373
+ an algorithm to rank web pages.
374
+
375
+ Parameters
376
+ ----------
377
+ G : graph
378
+ A NetworkX graph. Undirected graphs will be converted to a directed
379
+ graph with two directed edges for each undirected edge.
380
+
381
+ alpha : float, optional
382
+ Damping parameter for PageRank, default=0.85.
383
+
384
+ personalization: dict, optional
385
+ The "personalization vector" consisting of a dictionary with a
386
+ key some subset of graph nodes and personalization value each of those.
387
+ At least one personalization value must be non-zero.
388
+ If not specified, a nodes personalization value will be zero.
389
+ By default, a uniform distribution is used.
390
+
391
+ max_iter : integer, optional
392
+ Maximum number of iterations in power method eigenvalue solver.
393
+
394
+ tol : float, optional
395
+ Error tolerance used to check convergence in power method solver.
396
+ The iteration will stop after a tolerance of ``len(G) * tol`` is reached.
397
+
398
+ nstart : dictionary, optional
399
+ Starting value of PageRank iteration for each node.
400
+
401
+ weight : key, optional
402
+ Edge data key to use as weight. If None weights are set to 1.
403
+
404
+ dangling: dict, optional
405
+ The outedges to be assigned to any "dangling" nodes, i.e., nodes without
406
+ any outedges. The dict key is the node the outedge points to and the dict
407
+ value is the weight of that outedge. By default, dangling nodes are given
408
+ outedges according to the personalization vector (uniform if not
409
+ specified) This must be selected to result in an irreducible transition
410
+ matrix (see notes under google_matrix). It may be common to have the
411
+ dangling dict to be the same as the personalization dict.
412
+
413
+ Returns
414
+ -------
415
+ pagerank : dictionary
416
+ Dictionary of nodes with PageRank as value
417
+
418
+ Examples
419
+ --------
420
+ >>> from networkx.algorithms.link_analysis.pagerank_alg import _pagerank_scipy
421
+ >>> G = nx.DiGraph(nx.path_graph(4))
422
+ >>> pr = _pagerank_scipy(G, alpha=0.9)
423
+
424
+ Notes
425
+ -----
426
+ The eigenvector calculation uses power iteration with a SciPy
427
+ sparse matrix representation.
428
+
429
+ This implementation works with Multi(Di)Graphs. For multigraphs the
430
+ weight between two nodes is set to be the sum of all edge weights
431
+ between those nodes.
432
+
433
+ See Also
434
+ --------
435
+ pagerank
436
+
437
+ Raises
438
+ ------
439
+ PowerIterationFailedConvergence
440
+ If the algorithm fails to converge to the specified tolerance
441
+ within the specified number of iterations of the power iteration
442
+ method.
443
+
444
+ References
445
+ ----------
446
+ .. [1] A. Langville and C. Meyer,
447
+ "A survey of eigenvector methods of web information retrieval."
448
+ http://citeseer.ist.psu.edu/713792.html
449
+ .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry,
450
+ The PageRank citation ranking: Bringing order to the Web. 1999
451
+ http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
452
+ """
453
+ import numpy as np
454
+ import scipy as sp
455
+
456
+ N = len(G)
457
+ if N == 0:
458
+ return {}
459
+
460
+ nodelist = list(G)
461
+ A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, dtype=float)
462
+ S = A.sum(axis=1)
463
+ S[S != 0] = 1.0 / S[S != 0]
464
+ # TODO: csr_array
465
+ Q = sp.sparse.csr_array(sp.sparse.spdiags(S.T, 0, *A.shape))
466
+ A = Q @ A
467
+
468
+ # initial vector
469
+ if nstart is None:
470
+ x = np.repeat(1.0 / N, N)
471
+ else:
472
+ x = np.array([nstart.get(n, 0) for n in nodelist], dtype=float)
473
+ x /= x.sum()
474
+
475
+ # Personalization vector
476
+ if personalization is None:
477
+ p = np.repeat(1.0 / N, N)
478
+ else:
479
+ p = np.array([personalization.get(n, 0) for n in nodelist], dtype=float)
480
+ if p.sum() == 0:
481
+ raise ZeroDivisionError
482
+ p /= p.sum()
483
+ # Dangling nodes
484
+ if dangling is None:
485
+ dangling_weights = p
486
+ else:
487
+ # Convert the dangling dictionary into an array in nodelist order
488
+ dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], dtype=float)
489
+ dangling_weights /= dangling_weights.sum()
490
+ is_dangling = np.where(S == 0)[0]
491
+
492
+ # power iteration: make up to max_iter iterations
493
+ for _ in range(max_iter):
494
+ xlast = x
495
+ x = alpha * (x @ A + sum(x[is_dangling]) * dangling_weights) + (1 - alpha) * p
496
+ # check convergence, l1 norm
497
+ err = np.absolute(x - xlast).sum()
498
+ if err < N * tol:
499
+ return dict(zip(nodelist, map(float, x)))
500
+ raise nx.PowerIterationFailedConvergence(max_iter)
minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__init__.py ADDED
File without changes
minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (193 Bytes). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_hits.cpython-310.pyc ADDED
Binary file (2.96 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_pagerank.cpython-310.pyc ADDED
Binary file (7.59 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/test_hits.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+
5
+ np = pytest.importorskip("numpy")
6
+ sp = pytest.importorskip("scipy")
7
+
8
+ from networkx.algorithms.link_analysis.hits_alg import (
9
+ _hits_numpy,
10
+ _hits_python,
11
+ _hits_scipy,
12
+ )
13
+
14
+ # Example from
15
+ # A. Langville and C. Meyer, "A survey of eigenvector methods of web
16
+ # information retrieval." http://citeseer.ist.psu.edu/713792.html
17
+
18
+
19
+ class TestHITS:
20
+ @classmethod
21
+ def setup_class(cls):
22
+ G = nx.DiGraph()
23
+
24
+ edges = [(1, 3), (1, 5), (2, 1), (3, 5), (5, 4), (5, 3), (6, 5)]
25
+
26
+ G.add_edges_from(edges, weight=1)
27
+ cls.G = G
28
+ cls.G.a = dict(
29
+ zip(sorted(G), [0.000000, 0.000000, 0.366025, 0.133975, 0.500000, 0.000000])
30
+ )
31
+ cls.G.h = dict(
32
+ zip(sorted(G), [0.366025, 0.000000, 0.211325, 0.000000, 0.211325, 0.211325])
33
+ )
34
+
35
+ def test_hits_numpy(self):
36
+ G = self.G
37
+ h, a = _hits_numpy(G)
38
+ for n in G:
39
+ assert h[n] == pytest.approx(G.h[n], abs=1e-4)
40
+ for n in G:
41
+ assert a[n] == pytest.approx(G.a[n], abs=1e-4)
42
+
43
+ @pytest.mark.parametrize("hits_alg", (nx.hits, _hits_python, _hits_scipy))
44
+ def test_hits(self, hits_alg):
45
+ G = self.G
46
+ h, a = hits_alg(G, tol=1.0e-08)
47
+ for n in G:
48
+ assert h[n] == pytest.approx(G.h[n], abs=1e-4)
49
+ for n in G:
50
+ assert a[n] == pytest.approx(G.a[n], abs=1e-4)
51
+ nstart = {i: 1.0 / 2 for i in G}
52
+ h, a = hits_alg(G, nstart=nstart)
53
+ for n in G:
54
+ assert h[n] == pytest.approx(G.h[n], abs=1e-4)
55
+ for n in G:
56
+ assert a[n] == pytest.approx(G.a[n], abs=1e-4)
57
+
58
+ def test_empty(self):
59
+ G = nx.Graph()
60
+ assert nx.hits(G) == ({}, {})
61
+ assert _hits_numpy(G) == ({}, {})
62
+ assert _hits_python(G) == ({}, {})
63
+ assert _hits_scipy(G) == ({}, {})
64
+
65
+ def test_hits_not_convergent(self):
66
+ G = nx.path_graph(50)
67
+ with pytest.raises(nx.PowerIterationFailedConvergence):
68
+ _hits_scipy(G, max_iter=1)
69
+ with pytest.raises(nx.PowerIterationFailedConvergence):
70
+ _hits_python(G, max_iter=1)
71
+ with pytest.raises(nx.PowerIterationFailedConvergence):
72
+ _hits_scipy(G, max_iter=0)
73
+ with pytest.raises(nx.PowerIterationFailedConvergence):
74
+ _hits_python(G, max_iter=0)
75
+ with pytest.raises(nx.PowerIterationFailedConvergence):
76
+ nx.hits(G, max_iter=0)
77
+ with pytest.raises(nx.PowerIterationFailedConvergence):
78
+ nx.hits(G, max_iter=1)
minigpt2/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/test_pagerank.py ADDED
@@ -0,0 +1,214 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import random
2
+
3
+ import pytest
4
+
5
+ import networkx as nx
6
+ from networkx.classes.tests import dispatch_interface
7
+
8
+ np = pytest.importorskip("numpy")
9
+ pytest.importorskip("scipy")
10
+
11
+ from networkx.algorithms.link_analysis.pagerank_alg import (
12
+ _pagerank_numpy,
13
+ _pagerank_python,
14
+ _pagerank_scipy,
15
+ )
16
+
17
+ # Example from
18
+ # A. Langville and C. Meyer, "A survey of eigenvector methods of web
19
+ # information retrieval." http://citeseer.ist.psu.edu/713792.html
20
+
21
+
22
+ class TestPageRank:
23
+ @classmethod
24
+ def setup_class(cls):
25
+ G = nx.DiGraph()
26
+ edges = [
27
+ (1, 2),
28
+ (1, 3),
29
+ # 2 is a dangling node
30
+ (3, 1),
31
+ (3, 2),
32
+ (3, 5),
33
+ (4, 5),
34
+ (4, 6),
35
+ (5, 4),
36
+ (5, 6),
37
+ (6, 4),
38
+ ]
39
+ G.add_edges_from(edges)
40
+ cls.G = G
41
+ cls.G.pagerank = dict(
42
+ zip(
43
+ sorted(G),
44
+ [
45
+ 0.03721197,
46
+ 0.05395735,
47
+ 0.04150565,
48
+ 0.37508082,
49
+ 0.20599833,
50
+ 0.28624589,
51
+ ],
52
+ )
53
+ )
54
+ cls.dangling_node_index = 1
55
+ cls.dangling_edges = {1: 2, 2: 3, 3: 0, 4: 0, 5: 0, 6: 0}
56
+ cls.G.dangling_pagerank = dict(
57
+ zip(
58
+ sorted(G),
59
+ [0.10844518, 0.18618601, 0.0710892, 0.2683668, 0.15919783, 0.20671497],
60
+ )
61
+ )
62
+
63
+ @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
64
+ def test_pagerank(self, alg):
65
+ G = self.G
66
+ p = alg(G, alpha=0.9, tol=1.0e-08)
67
+ for n in G:
68
+ assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
69
+
70
+ nstart = {n: random.random() for n in G}
71
+ p = alg(G, alpha=0.9, tol=1.0e-08, nstart=nstart)
72
+ for n in G:
73
+ assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
74
+
75
+ @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
76
+ def test_pagerank_max_iter(self, alg):
77
+ with pytest.raises(nx.PowerIterationFailedConvergence):
78
+ alg(self.G, max_iter=0)
79
+
80
+ def test_numpy_pagerank(self):
81
+ G = self.G
82
+ p = _pagerank_numpy(G, alpha=0.9)
83
+ for n in G:
84
+ assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
85
+
86
+ def test_google_matrix(self):
87
+ G = self.G
88
+ M = nx.google_matrix(G, alpha=0.9, nodelist=sorted(G))
89
+ _, ev = np.linalg.eig(M.T)
90
+ p = ev[:, 0] / ev[:, 0].sum()
91
+ for a, b in zip(p, self.G.pagerank.values()):
92
+ assert a == pytest.approx(b, abs=1e-7)
93
+
94
+ @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, _pagerank_numpy))
95
+ def test_personalization(self, alg):
96
+ G = nx.complete_graph(4)
97
+ personalize = {0: 1, 1: 1, 2: 4, 3: 4}
98
+ answer = {
99
+ 0: 0.23246732615667579,
100
+ 1: 0.23246732615667579,
101
+ 2: 0.267532673843324,
102
+ 3: 0.2675326738433241,
103
+ }
104
+ p = alg(G, alpha=0.85, personalization=personalize)
105
+ for n in G:
106
+ assert p[n] == pytest.approx(answer[n], abs=1e-4)
107
+
108
+ @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, nx.google_matrix))
109
+ def test_zero_personalization_vector(self, alg):
110
+ G = nx.complete_graph(4)
111
+ personalize = {0: 0, 1: 0, 2: 0, 3: 0}
112
+ pytest.raises(ZeroDivisionError, alg, G, personalization=personalize)
113
+
114
+ @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
115
+ def test_one_nonzero_personalization_value(self, alg):
116
+ G = nx.complete_graph(4)
117
+ personalize = {0: 0, 1: 0, 2: 0, 3: 1}
118
+ answer = {
119
+ 0: 0.22077931820379187,
120
+ 1: 0.22077931820379187,
121
+ 2: 0.22077931820379187,
122
+ 3: 0.3376620453886241,
123
+ }
124
+ p = alg(G, alpha=0.85, personalization=personalize)
125
+ for n in G:
126
+ assert p[n] == pytest.approx(answer[n], abs=1e-4)
127
+
128
+ @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
129
+ def test_incomplete_personalization(self, alg):
130
+ G = nx.complete_graph(4)
131
+ personalize = {3: 1}
132
+ answer = {
133
+ 0: 0.22077931820379187,
134
+ 1: 0.22077931820379187,
135
+ 2: 0.22077931820379187,
136
+ 3: 0.3376620453886241,
137
+ }
138
+ p = alg(G, alpha=0.85, personalization=personalize)
139
+ for n in G:
140
+ assert p[n] == pytest.approx(answer[n], abs=1e-4)
141
+
142
+ def test_dangling_matrix(self):
143
+ """
144
+ Tests that the google_matrix doesn't change except for the dangling
145
+ nodes.
146
+ """
147
+ G = self.G
148
+ dangling = self.dangling_edges
149
+ dangling_sum = sum(dangling.values())
150
+ M1 = nx.google_matrix(G, personalization=dangling)
151
+ M2 = nx.google_matrix(G, personalization=dangling, dangling=dangling)
152
+ for i in range(len(G)):
153
+ for j in range(len(G)):
154
+ if i == self.dangling_node_index and (j + 1) in dangling:
155
+ assert M2[i, j] == pytest.approx(
156
+ dangling[j + 1] / dangling_sum, abs=1e-4
157
+ )
158
+ else:
159
+ assert M2[i, j] == pytest.approx(M1[i, j], abs=1e-4)
160
+
161
+ @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, _pagerank_numpy))
162
+ def test_dangling_pagerank(self, alg):
163
+ pr = alg(self.G, dangling=self.dangling_edges)
164
+ for n in self.G:
165
+ assert pr[n] == pytest.approx(self.G.dangling_pagerank[n], abs=1e-4)
166
+
167
+ def test_empty(self):
168
+ G = nx.Graph()
169
+ assert nx.pagerank(G) == {}
170
+ assert _pagerank_python(G) == {}
171
+ assert _pagerank_numpy(G) == {}
172
+ assert nx.google_matrix(G).shape == (0, 0)
173
+
174
+ @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
175
+ def test_multigraph(self, alg):
176
+ G = nx.MultiGraph()
177
+ G.add_edges_from([(1, 2), (1, 2), (1, 2), (2, 3), (2, 3), ("3", 3), ("3", 3)])
178
+ answer = {
179
+ 1: 0.21066048614468322,
180
+ 2: 0.3395308825985378,
181
+ 3: 0.28933951385531687,
182
+ "3": 0.16046911740146227,
183
+ }
184
+ p = alg(G)
185
+ for n in G:
186
+ assert p[n] == pytest.approx(answer[n], abs=1e-4)
187
+
188
+
189
+ class TestPageRankScipy(TestPageRank):
190
+ def test_scipy_pagerank(self):
191
+ G = self.G
192
+ p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08)
193
+ for n in G:
194
+ assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
195
+ personalize = {n: random.random() for n in G}
196
+ p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08, personalization=personalize)
197
+
198
+ nstart = {n: random.random() for n in G}
199
+ p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08, nstart=nstart)
200
+ for n in G:
201
+ assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
202
+
203
+ def test_scipy_pagerank_max_iter(self):
204
+ with pytest.raises(nx.PowerIterationFailedConvergence):
205
+ _pagerank_scipy(self.G, max_iter=0)
206
+
207
+ def test_dangling_scipy_pagerank(self):
208
+ pr = _pagerank_scipy(self.G, dangling=self.dangling_edges)
209
+ for n in self.G:
210
+ assert pr[n] == pytest.approx(self.G.dangling_pagerank[n], abs=1e-4)
211
+
212
+ def test_empty_scipy(self):
213
+ G = nx.Graph()
214
+ assert _pagerank_scipy(G) == {}
minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/__init__.py ADDED
File without changes
minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_boundary.py ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for the :mod:`networkx.algorithms.boundary` module."""
2
+
3
+ from itertools import combinations
4
+
5
+ import pytest
6
+
7
+ import networkx as nx
8
+ from networkx import convert_node_labels_to_integers as cnlti
9
+ from networkx.utils import edges_equal
10
+
11
+
12
+ class TestNodeBoundary:
13
+ """Unit tests for the :func:`~networkx.node_boundary` function."""
14
+
15
+ def test_null_graph(self):
16
+ """Tests that the null graph has empty node boundaries."""
17
+ null = nx.null_graph()
18
+ assert nx.node_boundary(null, []) == set()
19
+ assert nx.node_boundary(null, [], []) == set()
20
+ assert nx.node_boundary(null, [1, 2, 3]) == set()
21
+ assert nx.node_boundary(null, [1, 2, 3], [4, 5, 6]) == set()
22
+ assert nx.node_boundary(null, [1, 2, 3], [3, 4, 5]) == set()
23
+
24
+ def test_path_graph(self):
25
+ P10 = cnlti(nx.path_graph(10), first_label=1)
26
+ assert nx.node_boundary(P10, []) == set()
27
+ assert nx.node_boundary(P10, [], []) == set()
28
+ assert nx.node_boundary(P10, [1, 2, 3]) == {4}
29
+ assert nx.node_boundary(P10, [4, 5, 6]) == {3, 7}
30
+ assert nx.node_boundary(P10, [3, 4, 5, 6, 7]) == {2, 8}
31
+ assert nx.node_boundary(P10, [8, 9, 10]) == {7}
32
+ assert nx.node_boundary(P10, [4, 5, 6], [9, 10]) == set()
33
+
34
+ def test_complete_graph(self):
35
+ K10 = cnlti(nx.complete_graph(10), first_label=1)
36
+ assert nx.node_boundary(K10, []) == set()
37
+ assert nx.node_boundary(K10, [], []) == set()
38
+ assert nx.node_boundary(K10, [1, 2, 3]) == {4, 5, 6, 7, 8, 9, 10}
39
+ assert nx.node_boundary(K10, [4, 5, 6]) == {1, 2, 3, 7, 8, 9, 10}
40
+ assert nx.node_boundary(K10, [3, 4, 5, 6, 7]) == {1, 2, 8, 9, 10}
41
+ assert nx.node_boundary(K10, [4, 5, 6], []) == set()
42
+ assert nx.node_boundary(K10, K10) == set()
43
+ assert nx.node_boundary(K10, [1, 2, 3], [3, 4, 5]) == {4, 5}
44
+
45
+ def test_petersen(self):
46
+ """Check boundaries in the petersen graph
47
+
48
+ cheeger(G,k)=min(|bdy(S)|/|S| for |S|=k, 0<k<=|V(G)|/2)
49
+
50
+ """
51
+
52
+ def cheeger(G, k):
53
+ return min(len(nx.node_boundary(G, nn)) / k for nn in combinations(G, k))
54
+
55
+ P = nx.petersen_graph()
56
+ assert cheeger(P, 1) == pytest.approx(3.00, abs=1e-2)
57
+ assert cheeger(P, 2) == pytest.approx(2.00, abs=1e-2)
58
+ assert cheeger(P, 3) == pytest.approx(1.67, abs=1e-2)
59
+ assert cheeger(P, 4) == pytest.approx(1.00, abs=1e-2)
60
+ assert cheeger(P, 5) == pytest.approx(0.80, abs=1e-2)
61
+
62
+ def test_directed(self):
63
+ """Tests the node boundary of a directed graph."""
64
+ G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)])
65
+ S = {0, 1}
66
+ boundary = nx.node_boundary(G, S)
67
+ expected = {2}
68
+ assert boundary == expected
69
+
70
+ def test_multigraph(self):
71
+ """Tests the node boundary of a multigraph."""
72
+ G = nx.MultiGraph(list(nx.cycle_graph(5).edges()) * 2)
73
+ S = {0, 1}
74
+ boundary = nx.node_boundary(G, S)
75
+ expected = {2, 4}
76
+ assert boundary == expected
77
+
78
+ def test_multidigraph(self):
79
+ """Tests the edge boundary of a multidigraph."""
80
+ edges = [(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]
81
+ G = nx.MultiDiGraph(edges * 2)
82
+ S = {0, 1}
83
+ boundary = nx.node_boundary(G, S)
84
+ expected = {2}
85
+ assert boundary == expected
86
+
87
+
88
+ class TestEdgeBoundary:
89
+ """Unit tests for the :func:`~networkx.edge_boundary` function."""
90
+
91
+ def test_null_graph(self):
92
+ null = nx.null_graph()
93
+ assert list(nx.edge_boundary(null, [])) == []
94
+ assert list(nx.edge_boundary(null, [], [])) == []
95
+ assert list(nx.edge_boundary(null, [1, 2, 3])) == []
96
+ assert list(nx.edge_boundary(null, [1, 2, 3], [4, 5, 6])) == []
97
+ assert list(nx.edge_boundary(null, [1, 2, 3], [3, 4, 5])) == []
98
+
99
+ def test_path_graph(self):
100
+ P10 = cnlti(nx.path_graph(10), first_label=1)
101
+ assert list(nx.edge_boundary(P10, [])) == []
102
+ assert list(nx.edge_boundary(P10, [], [])) == []
103
+ assert list(nx.edge_boundary(P10, [1, 2, 3])) == [(3, 4)]
104
+ assert sorted(nx.edge_boundary(P10, [4, 5, 6])) == [(4, 3), (6, 7)]
105
+ assert sorted(nx.edge_boundary(P10, [3, 4, 5, 6, 7])) == [(3, 2), (7, 8)]
106
+ assert list(nx.edge_boundary(P10, [8, 9, 10])) == [(8, 7)]
107
+ assert sorted(nx.edge_boundary(P10, [4, 5, 6], [9, 10])) == []
108
+ assert list(nx.edge_boundary(P10, [1, 2, 3], [3, 4, 5])) == [(2, 3), (3, 4)]
109
+
110
+ def test_complete_graph(self):
111
+ K10 = cnlti(nx.complete_graph(10), first_label=1)
112
+
113
+ def ilen(iterable):
114
+ return sum(1 for i in iterable)
115
+
116
+ assert list(nx.edge_boundary(K10, [])) == []
117
+ assert list(nx.edge_boundary(K10, [], [])) == []
118
+ assert ilen(nx.edge_boundary(K10, [1, 2, 3])) == 21
119
+ assert ilen(nx.edge_boundary(K10, [4, 5, 6, 7])) == 24
120
+ assert ilen(nx.edge_boundary(K10, [3, 4, 5, 6, 7])) == 25
121
+ assert ilen(nx.edge_boundary(K10, [8, 9, 10])) == 21
122
+ assert edges_equal(
123
+ nx.edge_boundary(K10, [4, 5, 6], [9, 10]),
124
+ [(4, 9), (4, 10), (5, 9), (5, 10), (6, 9), (6, 10)],
125
+ )
126
+ assert edges_equal(
127
+ nx.edge_boundary(K10, [1, 2, 3], [3, 4, 5]),
128
+ [(1, 3), (1, 4), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (3, 5)],
129
+ )
130
+
131
+ def test_directed(self):
132
+ """Tests the edge boundary of a directed graph."""
133
+ G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)])
134
+ S = {0, 1}
135
+ boundary = list(nx.edge_boundary(G, S))
136
+ expected = [(1, 2)]
137
+ assert boundary == expected
138
+
139
+ def test_multigraph(self):
140
+ """Tests the edge boundary of a multigraph."""
141
+ G = nx.MultiGraph(list(nx.cycle_graph(5).edges()) * 2)
142
+ S = {0, 1}
143
+ boundary = list(nx.edge_boundary(G, S))
144
+ expected = [(0, 4), (0, 4), (1, 2), (1, 2)]
145
+ assert boundary == expected
146
+
147
+ def test_multidigraph(self):
148
+ """Tests the edge boundary of a multidigraph."""
149
+ edges = [(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]
150
+ G = nx.MultiDiGraph(edges * 2)
151
+ S = {0, 1}
152
+ boundary = list(nx.edge_boundary(G, S))
153
+ expected = [(1, 2), (1, 2)]
154
+ assert boundary == expected
minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_bridges.py ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for bridge-finding algorithms."""
2
+
3
+ import pytest
4
+
5
+ import networkx as nx
6
+
7
+
8
+ class TestBridges:
9
+ """Unit tests for the bridge-finding function."""
10
+
11
+ def test_single_bridge(self):
12
+ edges = [
13
+ # DFS tree edges.
14
+ (1, 2),
15
+ (2, 3),
16
+ (3, 4),
17
+ (3, 5),
18
+ (5, 6),
19
+ (6, 7),
20
+ (7, 8),
21
+ (5, 9),
22
+ (9, 10),
23
+ # Nontree edges.
24
+ (1, 3),
25
+ (1, 4),
26
+ (2, 5),
27
+ (5, 10),
28
+ (6, 8),
29
+ ]
30
+ G = nx.Graph(edges)
31
+ source = 1
32
+ bridges = list(nx.bridges(G, source))
33
+ assert bridges == [(5, 6)]
34
+
35
+ def test_barbell_graph(self):
36
+ # The (3, 0) barbell graph has two triangles joined by a single edge.
37
+ G = nx.barbell_graph(3, 0)
38
+ source = 0
39
+ bridges = list(nx.bridges(G, source))
40
+ assert bridges == [(2, 3)]
41
+
42
+ def test_multiedge_bridge(self):
43
+ edges = [
44
+ (0, 1),
45
+ (0, 2),
46
+ (1, 2),
47
+ (1, 2),
48
+ (2, 3),
49
+ (3, 4),
50
+ (3, 4),
51
+ ]
52
+ G = nx.MultiGraph(edges)
53
+ assert list(nx.bridges(G)) == [(2, 3)]
54
+
55
+
56
+ class TestHasBridges:
57
+ """Unit tests for the has bridges function."""
58
+
59
+ def test_single_bridge(self):
60
+ edges = [
61
+ # DFS tree edges.
62
+ (1, 2),
63
+ (2, 3),
64
+ (3, 4),
65
+ (3, 5),
66
+ (5, 6), # The only bridge edge
67
+ (6, 7),
68
+ (7, 8),
69
+ (5, 9),
70
+ (9, 10),
71
+ # Nontree edges.
72
+ (1, 3),
73
+ (1, 4),
74
+ (2, 5),
75
+ (5, 10),
76
+ (6, 8),
77
+ ]
78
+ G = nx.Graph(edges)
79
+ assert nx.has_bridges(G) # Default root
80
+ assert nx.has_bridges(G, root=1) # arbitrary root in G
81
+
82
+ def test_has_bridges_raises_root_not_in_G(self):
83
+ G = nx.Graph()
84
+ G.add_nodes_from([1, 2, 3])
85
+ with pytest.raises(nx.NodeNotFound):
86
+ nx.has_bridges(G, root=6)
87
+
88
+ def test_multiedge_bridge(self):
89
+ edges = [
90
+ (0, 1),
91
+ (0, 2),
92
+ (1, 2),
93
+ (1, 2),
94
+ (2, 3),
95
+ (3, 4),
96
+ (3, 4),
97
+ ]
98
+ G = nx.MultiGraph(edges)
99
+ assert nx.has_bridges(G)
100
+ # Make every edge a multiedge
101
+ G.add_edges_from([(0, 1), (0, 2), (2, 3)])
102
+ assert not nx.has_bridges(G)
103
+
104
+ def test_bridges_multiple_components(self):
105
+ G = nx.Graph()
106
+ nx.add_path(G, [0, 1, 2]) # One connected component
107
+ nx.add_path(G, [4, 5, 6]) # Another connected component
108
+ assert list(nx.bridges(G, root=4)) == [(4, 5), (5, 6)]
109
+
110
+
111
+ class TestLocalBridges:
112
+ """Unit tests for the local_bridge function."""
113
+
114
+ @classmethod
115
+ def setup_class(cls):
116
+ cls.BB = nx.barbell_graph(4, 0)
117
+ cls.square = nx.cycle_graph(4)
118
+ cls.tri = nx.cycle_graph(3)
119
+
120
+ def test_nospan(self):
121
+ expected = {(3, 4), (4, 3)}
122
+ assert next(nx.local_bridges(self.BB, with_span=False)) in expected
123
+ assert set(nx.local_bridges(self.square, with_span=False)) == self.square.edges
124
+ assert list(nx.local_bridges(self.tri, with_span=False)) == []
125
+
126
+ def test_no_weight(self):
127
+ inf = float("inf")
128
+ expected = {(3, 4, inf), (4, 3, inf)}
129
+ assert next(nx.local_bridges(self.BB)) in expected
130
+ expected = {(u, v, 3) for u, v in self.square.edges}
131
+ assert set(nx.local_bridges(self.square)) == expected
132
+ assert list(nx.local_bridges(self.tri)) == []
133
+
134
+ def test_weight(self):
135
+ inf = float("inf")
136
+ G = self.square.copy()
137
+
138
+ G.edges[1, 2]["weight"] = 2
139
+ expected = {(u, v, 5 - wt) for u, v, wt in G.edges(data="weight", default=1)}
140
+ assert set(nx.local_bridges(G, weight="weight")) == expected
141
+
142
+ expected = {(u, v, 6) for u, v in G.edges}
143
+ lb = nx.local_bridges(G, weight=lambda u, v, d: 2)
144
+ assert set(lb) == expected
minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_broadcasting.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for the broadcasting module."""
2
+
3
+ import math
4
+
5
+ import networkx as nx
6
+
7
+
8
+ def test_example_tree_broadcast():
9
+ """
10
+ Test the BROADCAST algorithm on the example in the paper titled: "Information Dissemination in Trees"
11
+ """
12
+ edge_list = [
13
+ (0, 1),
14
+ (1, 2),
15
+ (2, 7),
16
+ (3, 4),
17
+ (5, 4),
18
+ (4, 7),
19
+ (6, 7),
20
+ (7, 9),
21
+ (8, 9),
22
+ (9, 13),
23
+ (13, 14),
24
+ (14, 15),
25
+ (14, 16),
26
+ (14, 17),
27
+ (13, 11),
28
+ (11, 10),
29
+ (11, 12),
30
+ (13, 18),
31
+ (18, 19),
32
+ (18, 20),
33
+ ]
34
+ G = nx.Graph(edge_list)
35
+ b_T, b_C = nx.tree_broadcast_center(G)
36
+ assert b_T == 6
37
+ assert b_C == {13, 9}
38
+ # test broadcast time from specific vertex
39
+ assert nx.tree_broadcast_time(G, 17) == 8
40
+ assert nx.tree_broadcast_time(G, 3) == 9
41
+ # test broadcast time of entire tree
42
+ assert nx.tree_broadcast_time(G) == 10
43
+
44
+
45
+ def test_path_broadcast():
46
+ for i in range(2, 12):
47
+ G = nx.path_graph(i)
48
+ b_T, b_C = nx.tree_broadcast_center(G)
49
+ assert b_T == math.ceil(i / 2)
50
+ assert b_C == {
51
+ math.ceil(i / 2),
52
+ math.floor(i / 2),
53
+ math.ceil(i / 2 - 1),
54
+ math.floor(i / 2 - 1),
55
+ }
56
+ assert nx.tree_broadcast_time(G) == i - 1
57
+
58
+
59
+ def test_empty_graph_broadcast():
60
+ H = nx.empty_graph(1)
61
+ b_T, b_C = nx.tree_broadcast_center(H)
62
+ assert b_T == 0
63
+ assert b_C == {0}
64
+ assert nx.tree_broadcast_time(H) == 0
65
+
66
+
67
+ def test_star_broadcast():
68
+ for i in range(4, 12):
69
+ G = nx.star_graph(i)
70
+ b_T, b_C = nx.tree_broadcast_center(G)
71
+ assert b_T == i
72
+ assert b_C == set(G.nodes())
73
+ assert nx.tree_broadcast_time(G) == b_T
74
+
75
+
76
+ def test_binomial_tree_broadcast():
77
+ for i in range(2, 8):
78
+ G = nx.binomial_tree(i)
79
+ b_T, b_C = nx.tree_broadcast_center(G)
80
+ assert b_T == i
81
+ assert b_C == {0, 2 ** (i - 1)}
82
+ assert nx.tree_broadcast_time(G) == 2 * i - 1
minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_chains.py ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for the chain decomposition functions."""
2
+
3
+ from itertools import cycle, islice
4
+
5
+ import pytest
6
+
7
+ import networkx as nx
8
+
9
+
10
+ def cycles(seq):
11
+ """Yields cyclic permutations of the given sequence.
12
+
13
+ For example::
14
+
15
+ >>> list(cycles("abc"))
16
+ [('a', 'b', 'c'), ('b', 'c', 'a'), ('c', 'a', 'b')]
17
+
18
+ """
19
+ n = len(seq)
20
+ cycled_seq = cycle(seq)
21
+ for x in seq:
22
+ yield tuple(islice(cycled_seq, n))
23
+ next(cycled_seq)
24
+
25
+
26
+ def cyclic_equals(seq1, seq2):
27
+ """Decide whether two sequences are equal up to cyclic permutations.
28
+
29
+ For example::
30
+
31
+ >>> cyclic_equals("xyz", "zxy")
32
+ True
33
+ >>> cyclic_equals("xyz", "zyx")
34
+ False
35
+
36
+ """
37
+ # Cast seq2 to a tuple since `cycles()` yields tuples.
38
+ seq2 = tuple(seq2)
39
+ return any(x == tuple(seq2) for x in cycles(seq1))
40
+
41
+
42
+ class TestChainDecomposition:
43
+ """Unit tests for the chain decomposition function."""
44
+
45
+ def assertContainsChain(self, chain, expected):
46
+ # A cycle could be expressed in two different orientations, one
47
+ # forward and one backward, so we need to check for cyclic
48
+ # equality in both orientations.
49
+ reversed_chain = list(reversed([tuple(reversed(e)) for e in chain]))
50
+ for candidate in expected:
51
+ if cyclic_equals(chain, candidate):
52
+ break
53
+ if cyclic_equals(reversed_chain, candidate):
54
+ break
55
+ else:
56
+ self.fail("chain not found")
57
+
58
+ def test_decomposition(self):
59
+ edges = [
60
+ # DFS tree edges.
61
+ (1, 2),
62
+ (2, 3),
63
+ (3, 4),
64
+ (3, 5),
65
+ (5, 6),
66
+ (6, 7),
67
+ (7, 8),
68
+ (5, 9),
69
+ (9, 10),
70
+ # Nontree edges.
71
+ (1, 3),
72
+ (1, 4),
73
+ (2, 5),
74
+ (5, 10),
75
+ (6, 8),
76
+ ]
77
+ G = nx.Graph(edges)
78
+ expected = [
79
+ [(1, 3), (3, 2), (2, 1)],
80
+ [(1, 4), (4, 3)],
81
+ [(2, 5), (5, 3)],
82
+ [(5, 10), (10, 9), (9, 5)],
83
+ [(6, 8), (8, 7), (7, 6)],
84
+ ]
85
+ chains = list(nx.chain_decomposition(G, root=1))
86
+ assert len(chains) == len(expected)
87
+
88
+ # This chain decomposition isn't unique
89
+ # for chain in chains:
90
+ # print(chain)
91
+ # self.assertContainsChain(chain, expected)
92
+
93
+ def test_barbell_graph(self):
94
+ # The (3, 0) barbell graph has two triangles joined by a single edge.
95
+ G = nx.barbell_graph(3, 0)
96
+ chains = list(nx.chain_decomposition(G, root=0))
97
+ expected = [[(0, 1), (1, 2), (2, 0)], [(3, 4), (4, 5), (5, 3)]]
98
+ assert len(chains) == len(expected)
99
+ for chain in chains:
100
+ self.assertContainsChain(chain, expected)
101
+
102
+ def test_disconnected_graph(self):
103
+ """Test for a graph with multiple connected components."""
104
+ G = nx.barbell_graph(3, 0)
105
+ H = nx.barbell_graph(3, 0)
106
+ mapping = dict(zip(range(6), "abcdef"))
107
+ nx.relabel_nodes(H, mapping, copy=False)
108
+ G = nx.union(G, H)
109
+ chains = list(nx.chain_decomposition(G))
110
+ expected = [
111
+ [(0, 1), (1, 2), (2, 0)],
112
+ [(3, 4), (4, 5), (5, 3)],
113
+ [("a", "b"), ("b", "c"), ("c", "a")],
114
+ [("d", "e"), ("e", "f"), ("f", "d")],
115
+ ]
116
+ assert len(chains) == len(expected)
117
+ for chain in chains:
118
+ self.assertContainsChain(chain, expected)
119
+
120
+ def test_disconnected_graph_root_node(self):
121
+ """Test for a single component of a disconnected graph."""
122
+ G = nx.barbell_graph(3, 0)
123
+ H = nx.barbell_graph(3, 0)
124
+ mapping = dict(zip(range(6), "abcdef"))
125
+ nx.relabel_nodes(H, mapping, copy=False)
126
+ G = nx.union(G, H)
127
+ chains = list(nx.chain_decomposition(G, root="a"))
128
+ expected = [
129
+ [("a", "b"), ("b", "c"), ("c", "a")],
130
+ [("d", "e"), ("e", "f"), ("f", "d")],
131
+ ]
132
+ assert len(chains) == len(expected)
133
+ for chain in chains:
134
+ self.assertContainsChain(chain, expected)
135
+
136
+ def test_chain_decomposition_root_not_in_G(self):
137
+ """Test chain decomposition when root is not in graph"""
138
+ G = nx.Graph()
139
+ G.add_nodes_from([1, 2, 3])
140
+ with pytest.raises(nx.NodeNotFound):
141
+ nx.has_bridges(G, root=6)
minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_chordal.py ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+
5
+
6
+ class TestMCS:
7
+ @classmethod
8
+ def setup_class(cls):
9
+ # simple graph
10
+ connected_chordal_G = nx.Graph()
11
+ connected_chordal_G.add_edges_from(
12
+ [
13
+ (1, 2),
14
+ (1, 3),
15
+ (2, 3),
16
+ (2, 4),
17
+ (3, 4),
18
+ (3, 5),
19
+ (3, 6),
20
+ (4, 5),
21
+ (4, 6),
22
+ (5, 6),
23
+ ]
24
+ )
25
+ cls.connected_chordal_G = connected_chordal_G
26
+
27
+ chordal_G = nx.Graph()
28
+ chordal_G.add_edges_from(
29
+ [
30
+ (1, 2),
31
+ (1, 3),
32
+ (2, 3),
33
+ (2, 4),
34
+ (3, 4),
35
+ (3, 5),
36
+ (3, 6),
37
+ (4, 5),
38
+ (4, 6),
39
+ (5, 6),
40
+ (7, 8),
41
+ ]
42
+ )
43
+ chordal_G.add_node(9)
44
+ cls.chordal_G = chordal_G
45
+
46
+ non_chordal_G = nx.Graph()
47
+ non_chordal_G.add_edges_from([(1, 2), (1, 3), (2, 4), (2, 5), (3, 4), (3, 5)])
48
+ cls.non_chordal_G = non_chordal_G
49
+
50
+ self_loop_G = nx.Graph()
51
+ self_loop_G.add_edges_from([(1, 1)])
52
+ cls.self_loop_G = self_loop_G
53
+
54
+ @pytest.mark.parametrize("G", (nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()))
55
+ def test_is_chordal_not_implemented(self, G):
56
+ with pytest.raises(nx.NetworkXNotImplemented):
57
+ nx.is_chordal(G)
58
+
59
+ def test_is_chordal(self):
60
+ assert not nx.is_chordal(self.non_chordal_G)
61
+ assert nx.is_chordal(self.chordal_G)
62
+ assert nx.is_chordal(self.connected_chordal_G)
63
+ assert nx.is_chordal(nx.Graph())
64
+ assert nx.is_chordal(nx.complete_graph(3))
65
+ assert nx.is_chordal(nx.cycle_graph(3))
66
+ assert not nx.is_chordal(nx.cycle_graph(5))
67
+ assert nx.is_chordal(self.self_loop_G)
68
+
69
+ def test_induced_nodes(self):
70
+ G = nx.generators.classic.path_graph(10)
71
+ Induced_nodes = nx.find_induced_nodes(G, 1, 9, 2)
72
+ assert Induced_nodes == {1, 2, 3, 4, 5, 6, 7, 8, 9}
73
+ pytest.raises(
74
+ nx.NetworkXTreewidthBoundExceeded, nx.find_induced_nodes, G, 1, 9, 1
75
+ )
76
+ Induced_nodes = nx.find_induced_nodes(self.chordal_G, 1, 6)
77
+ assert Induced_nodes == {1, 2, 4, 6}
78
+ pytest.raises(nx.NetworkXError, nx.find_induced_nodes, self.non_chordal_G, 1, 5)
79
+
80
+ def test_graph_treewidth(self):
81
+ with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
82
+ nx.chordal_graph_treewidth(self.non_chordal_G)
83
+
84
+ def test_chordal_find_cliques(self):
85
+ cliques = {
86
+ frozenset([9]),
87
+ frozenset([7, 8]),
88
+ frozenset([1, 2, 3]),
89
+ frozenset([2, 3, 4]),
90
+ frozenset([3, 4, 5, 6]),
91
+ }
92
+ assert set(nx.chordal_graph_cliques(self.chordal_G)) == cliques
93
+ with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
94
+ set(nx.chordal_graph_cliques(self.non_chordal_G))
95
+ with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
96
+ set(nx.chordal_graph_cliques(self.self_loop_G))
97
+
98
+ def test_chordal_find_cliques_path(self):
99
+ G = nx.path_graph(10)
100
+ cliqueset = nx.chordal_graph_cliques(G)
101
+ for u, v in G.edges():
102
+ assert frozenset([u, v]) in cliqueset or frozenset([v, u]) in cliqueset
103
+
104
+ def test_chordal_find_cliquesCC(self):
105
+ cliques = {frozenset([1, 2, 3]), frozenset([2, 3, 4]), frozenset([3, 4, 5, 6])}
106
+ cgc = nx.chordal_graph_cliques
107
+ assert set(cgc(self.connected_chordal_G)) == cliques
108
+
109
+ def test_complete_to_chordal_graph(self):
110
+ fgrg = nx.fast_gnp_random_graph
111
+ test_graphs = [
112
+ nx.barbell_graph(6, 2),
113
+ nx.cycle_graph(15),
114
+ nx.wheel_graph(20),
115
+ nx.grid_graph([10, 4]),
116
+ nx.ladder_graph(15),
117
+ nx.star_graph(5),
118
+ nx.bull_graph(),
119
+ fgrg(20, 0.3, seed=1),
120
+ ]
121
+ for G in test_graphs:
122
+ H, a = nx.complete_to_chordal_graph(G)
123
+ assert nx.is_chordal(H)
124
+ assert len(a) == H.number_of_nodes()
125
+ if nx.is_chordal(G):
126
+ assert G.number_of_edges() == H.number_of_edges()
127
+ assert set(a.values()) == {0}
128
+ else:
129
+ assert len(set(a.values())) == H.number_of_nodes()
minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_clique.py ADDED
@@ -0,0 +1,291 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx import convert_node_labels_to_integers as cnlti
5
+
6
+
7
+ class TestCliques:
8
+ def setup_method(self):
9
+ z = [3, 4, 3, 4, 2, 4, 2, 1, 1, 1, 1]
10
+ self.G = cnlti(nx.generators.havel_hakimi_graph(z), first_label=1)
11
+ self.cl = list(nx.find_cliques(self.G))
12
+ H = nx.complete_graph(6)
13
+ H = nx.relabel_nodes(H, {i: i + 1 for i in range(6)})
14
+ H.remove_edges_from([(2, 6), (2, 5), (2, 4), (1, 3), (5, 3)])
15
+ self.H = H
16
+
17
+ def test_find_cliques1(self):
18
+ cl = list(nx.find_cliques(self.G))
19
+ rcl = nx.find_cliques_recursive(self.G)
20
+ expected = [[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]]
21
+ assert sorted(map(sorted, cl)) == sorted(map(sorted, rcl))
22
+ assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
23
+
24
+ def test_selfloops(self):
25
+ self.G.add_edge(1, 1)
26
+ cl = list(nx.find_cliques(self.G))
27
+ rcl = list(nx.find_cliques_recursive(self.G))
28
+ assert set(map(frozenset, cl)) == set(map(frozenset, rcl))
29
+ answer = [{2, 6, 1, 3}, {2, 6, 4}, {5, 4, 7}, {8, 9}, {10, 11}]
30
+ assert len(answer) == len(cl)
31
+ assert all(set(c) in answer for c in cl)
32
+
33
+ def test_find_cliques2(self):
34
+ hcl = list(nx.find_cliques(self.H))
35
+ assert sorted(map(sorted, hcl)) == [[1, 2], [1, 4, 5, 6], [2, 3], [3, 4, 6]]
36
+
37
+ def test_find_cliques3(self):
38
+ # all cliques are [[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]]
39
+
40
+ cl = list(nx.find_cliques(self.G, [2]))
41
+ rcl = nx.find_cliques_recursive(self.G, [2])
42
+ expected = [[2, 6, 1, 3], [2, 6, 4]]
43
+ assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected))
44
+ assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
45
+
46
+ cl = list(nx.find_cliques(self.G, [2, 3]))
47
+ rcl = nx.find_cliques_recursive(self.G, [2, 3])
48
+ expected = [[2, 6, 1, 3]]
49
+ assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected))
50
+ assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
51
+
52
+ cl = list(nx.find_cliques(self.G, [2, 6, 4]))
53
+ rcl = nx.find_cliques_recursive(self.G, [2, 6, 4])
54
+ expected = [[2, 6, 4]]
55
+ assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected))
56
+ assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
57
+
58
+ cl = list(nx.find_cliques(self.G, [2, 6, 4]))
59
+ rcl = nx.find_cliques_recursive(self.G, [2, 6, 4])
60
+ expected = [[2, 6, 4]]
61
+ assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected))
62
+ assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
63
+
64
+ with pytest.raises(ValueError):
65
+ list(nx.find_cliques(self.G, [2, 6, 4, 1]))
66
+
67
+ with pytest.raises(ValueError):
68
+ list(nx.find_cliques_recursive(self.G, [2, 6, 4, 1]))
69
+
70
+ def test_number_of_cliques(self):
71
+ G = self.G
72
+ assert nx.number_of_cliques(G, 1) == 1
73
+ assert list(nx.number_of_cliques(G, [1]).values()) == [1]
74
+ assert list(nx.number_of_cliques(G, [1, 2]).values()) == [1, 2]
75
+ assert nx.number_of_cliques(G, [1, 2]) == {1: 1, 2: 2}
76
+ assert nx.number_of_cliques(G, 2) == 2
77
+ assert nx.number_of_cliques(G) == {
78
+ 1: 1,
79
+ 2: 2,
80
+ 3: 1,
81
+ 4: 2,
82
+ 5: 1,
83
+ 6: 2,
84
+ 7: 1,
85
+ 8: 1,
86
+ 9: 1,
87
+ 10: 1,
88
+ 11: 1,
89
+ }
90
+ assert nx.number_of_cliques(G, nodes=list(G)) == {
91
+ 1: 1,
92
+ 2: 2,
93
+ 3: 1,
94
+ 4: 2,
95
+ 5: 1,
96
+ 6: 2,
97
+ 7: 1,
98
+ 8: 1,
99
+ 9: 1,
100
+ 10: 1,
101
+ 11: 1,
102
+ }
103
+ assert nx.number_of_cliques(G, nodes=[2, 3, 4]) == {2: 2, 3: 1, 4: 2}
104
+ assert nx.number_of_cliques(G, cliques=self.cl) == {
105
+ 1: 1,
106
+ 2: 2,
107
+ 3: 1,
108
+ 4: 2,
109
+ 5: 1,
110
+ 6: 2,
111
+ 7: 1,
112
+ 8: 1,
113
+ 9: 1,
114
+ 10: 1,
115
+ 11: 1,
116
+ }
117
+ assert nx.number_of_cliques(G, list(G), cliques=self.cl) == {
118
+ 1: 1,
119
+ 2: 2,
120
+ 3: 1,
121
+ 4: 2,
122
+ 5: 1,
123
+ 6: 2,
124
+ 7: 1,
125
+ 8: 1,
126
+ 9: 1,
127
+ 10: 1,
128
+ 11: 1,
129
+ }
130
+
131
+ def test_node_clique_number(self):
132
+ G = self.G
133
+ assert nx.node_clique_number(G, 1) == 4
134
+ assert list(nx.node_clique_number(G, [1]).values()) == [4]
135
+ assert list(nx.node_clique_number(G, [1, 2]).values()) == [4, 4]
136
+ assert nx.node_clique_number(G, [1, 2]) == {1: 4, 2: 4}
137
+ assert nx.node_clique_number(G, 1) == 4
138
+ assert nx.node_clique_number(G) == {
139
+ 1: 4,
140
+ 2: 4,
141
+ 3: 4,
142
+ 4: 3,
143
+ 5: 3,
144
+ 6: 4,
145
+ 7: 3,
146
+ 8: 2,
147
+ 9: 2,
148
+ 10: 2,
149
+ 11: 2,
150
+ }
151
+ assert nx.node_clique_number(G, cliques=self.cl) == {
152
+ 1: 4,
153
+ 2: 4,
154
+ 3: 4,
155
+ 4: 3,
156
+ 5: 3,
157
+ 6: 4,
158
+ 7: 3,
159
+ 8: 2,
160
+ 9: 2,
161
+ 10: 2,
162
+ 11: 2,
163
+ }
164
+ assert nx.node_clique_number(G, [1, 2], cliques=self.cl) == {1: 4, 2: 4}
165
+ assert nx.node_clique_number(G, 1, cliques=self.cl) == 4
166
+
167
+ def test_make_clique_bipartite(self):
168
+ G = self.G
169
+ B = nx.make_clique_bipartite(G)
170
+ assert sorted(B) == [-5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
171
+ # Project onto the nodes of the original graph.
172
+ H = nx.projected_graph(B, range(1, 12))
173
+ assert H.adj == G.adj
174
+ # Project onto the nodes representing the cliques.
175
+ H1 = nx.projected_graph(B, range(-5, 0))
176
+ # Relabel the negative numbers as positive ones.
177
+ H1 = nx.relabel_nodes(H1, {-v: v for v in range(1, 6)})
178
+ assert sorted(H1) == [1, 2, 3, 4, 5]
179
+
180
+ def test_make_max_clique_graph(self):
181
+ """Tests that the maximal clique graph is the same as the bipartite
182
+ clique graph after being projected onto the nodes representing the
183
+ cliques.
184
+
185
+ """
186
+ G = self.G
187
+ B = nx.make_clique_bipartite(G)
188
+ # Project onto the nodes representing the cliques.
189
+ H1 = nx.projected_graph(B, range(-5, 0))
190
+ # Relabel the negative numbers as nonnegative ones, starting at
191
+ # 0.
192
+ H1 = nx.relabel_nodes(H1, {-v: v - 1 for v in range(1, 6)})
193
+ H2 = nx.make_max_clique_graph(G)
194
+ assert H1.adj == H2.adj
195
+
196
+ def test_directed(self):
197
+ with pytest.raises(nx.NetworkXNotImplemented):
198
+ next(nx.find_cliques(nx.DiGraph()))
199
+
200
+ def test_find_cliques_trivial(self):
201
+ G = nx.Graph()
202
+ assert sorted(nx.find_cliques(G)) == []
203
+ assert sorted(nx.find_cliques_recursive(G)) == []
204
+
205
+ def test_make_max_clique_graph_create_using(self):
206
+ G = nx.Graph([(1, 2), (3, 1), (4, 1), (5, 6)])
207
+ E = nx.Graph([(0, 1), (0, 2), (1, 2)])
208
+ E.add_node(3)
209
+ assert nx.is_isomorphic(nx.make_max_clique_graph(G, create_using=nx.Graph), E)
210
+
211
+
212
+ class TestEnumerateAllCliques:
213
+ def test_paper_figure_4(self):
214
+ # Same graph as given in Fig. 4 of paper enumerate_all_cliques is
215
+ # based on.
216
+ # http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=1559964&isnumber=33129
217
+ G = nx.Graph()
218
+ edges_fig_4 = [
219
+ ("a", "b"),
220
+ ("a", "c"),
221
+ ("a", "d"),
222
+ ("a", "e"),
223
+ ("b", "c"),
224
+ ("b", "d"),
225
+ ("b", "e"),
226
+ ("c", "d"),
227
+ ("c", "e"),
228
+ ("d", "e"),
229
+ ("f", "b"),
230
+ ("f", "c"),
231
+ ("f", "g"),
232
+ ("g", "f"),
233
+ ("g", "c"),
234
+ ("g", "d"),
235
+ ("g", "e"),
236
+ ]
237
+ G.add_edges_from(edges_fig_4)
238
+
239
+ cliques = list(nx.enumerate_all_cliques(G))
240
+ clique_sizes = list(map(len, cliques))
241
+ assert sorted(clique_sizes) == clique_sizes
242
+
243
+ expected_cliques = [
244
+ ["a"],
245
+ ["b"],
246
+ ["c"],
247
+ ["d"],
248
+ ["e"],
249
+ ["f"],
250
+ ["g"],
251
+ ["a", "b"],
252
+ ["a", "b", "d"],
253
+ ["a", "b", "d", "e"],
254
+ ["a", "b", "e"],
255
+ ["a", "c"],
256
+ ["a", "c", "d"],
257
+ ["a", "c", "d", "e"],
258
+ ["a", "c", "e"],
259
+ ["a", "d"],
260
+ ["a", "d", "e"],
261
+ ["a", "e"],
262
+ ["b", "c"],
263
+ ["b", "c", "d"],
264
+ ["b", "c", "d", "e"],
265
+ ["b", "c", "e"],
266
+ ["b", "c", "f"],
267
+ ["b", "d"],
268
+ ["b", "d", "e"],
269
+ ["b", "e"],
270
+ ["b", "f"],
271
+ ["c", "d"],
272
+ ["c", "d", "e"],
273
+ ["c", "d", "e", "g"],
274
+ ["c", "d", "g"],
275
+ ["c", "e"],
276
+ ["c", "e", "g"],
277
+ ["c", "f"],
278
+ ["c", "f", "g"],
279
+ ["c", "g"],
280
+ ["d", "e"],
281
+ ["d", "e", "g"],
282
+ ["d", "g"],
283
+ ["e", "g"],
284
+ ["f", "g"],
285
+ ["a", "b", "c"],
286
+ ["a", "b", "c", "d"],
287
+ ["a", "b", "c", "d", "e"],
288
+ ["a", "b", "c", "e"],
289
+ ]
290
+
291
+ assert sorted(map(sorted, cliques)) == sorted(map(sorted, expected_cliques))
minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_communicability.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import defaultdict
2
+
3
+ import pytest
4
+
5
+ pytest.importorskip("numpy")
6
+ pytest.importorskip("scipy")
7
+
8
+ import networkx as nx
9
+ from networkx.algorithms.communicability_alg import communicability, communicability_exp
10
+
11
+
12
+ class TestCommunicability:
13
+ def test_communicability(self):
14
+ answer = {
15
+ 0: {0: 1.5430806348152435, 1: 1.1752011936438012},
16
+ 1: {0: 1.1752011936438012, 1: 1.5430806348152435},
17
+ }
18
+ # answer={(0, 0): 1.5430806348152435,
19
+ # (0, 1): 1.1752011936438012,
20
+ # (1, 0): 1.1752011936438012,
21
+ # (1, 1): 1.5430806348152435}
22
+
23
+ result = communicability(nx.path_graph(2))
24
+ for k1, val in result.items():
25
+ for k2 in val:
26
+ assert answer[k1][k2] == pytest.approx(result[k1][k2], abs=1e-7)
27
+
28
+ def test_communicability2(self):
29
+ answer_orig = {
30
+ ("1", "1"): 1.6445956054135658,
31
+ ("1", "Albert"): 0.7430186221096251,
32
+ ("1", "Aric"): 0.7430186221096251,
33
+ ("1", "Dan"): 1.6208126320442937,
34
+ ("1", "Franck"): 0.42639707170035257,
35
+ ("Albert", "1"): 0.7430186221096251,
36
+ ("Albert", "Albert"): 2.4368257358712189,
37
+ ("Albert", "Aric"): 1.4368257358712191,
38
+ ("Albert", "Dan"): 2.0472097037446453,
39
+ ("Albert", "Franck"): 1.8340111678944691,
40
+ ("Aric", "1"): 0.7430186221096251,
41
+ ("Aric", "Albert"): 1.4368257358712191,
42
+ ("Aric", "Aric"): 2.4368257358712193,
43
+ ("Aric", "Dan"): 2.0472097037446457,
44
+ ("Aric", "Franck"): 1.8340111678944691,
45
+ ("Dan", "1"): 1.6208126320442937,
46
+ ("Dan", "Albert"): 2.0472097037446453,
47
+ ("Dan", "Aric"): 2.0472097037446457,
48
+ ("Dan", "Dan"): 3.1306328496328168,
49
+ ("Dan", "Franck"): 1.4860372442192515,
50
+ ("Franck", "1"): 0.42639707170035257,
51
+ ("Franck", "Albert"): 1.8340111678944691,
52
+ ("Franck", "Aric"): 1.8340111678944691,
53
+ ("Franck", "Dan"): 1.4860372442192515,
54
+ ("Franck", "Franck"): 2.3876142275231915,
55
+ }
56
+
57
+ answer = defaultdict(dict)
58
+ for (k1, k2), v in answer_orig.items():
59
+ answer[k1][k2] = v
60
+
61
+ G1 = nx.Graph(
62
+ [
63
+ ("Franck", "Aric"),
64
+ ("Aric", "Dan"),
65
+ ("Dan", "Albert"),
66
+ ("Albert", "Franck"),
67
+ ("Dan", "1"),
68
+ ("Franck", "Albert"),
69
+ ]
70
+ )
71
+
72
+ result = communicability(G1)
73
+ for k1, val in result.items():
74
+ for k2 in val:
75
+ assert answer[k1][k2] == pytest.approx(result[k1][k2], abs=1e-7)
76
+
77
+ result = communicability_exp(G1)
78
+ for k1, val in result.items():
79
+ for k2 in val:
80
+ assert answer[k1][k2] == pytest.approx(result[k1][k2], abs=1e-7)
minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_core.py ADDED
@@ -0,0 +1,266 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.utils import nodes_equal
5
+
6
+
7
+ class TestCore:
8
+ @classmethod
9
+ def setup_class(cls):
10
+ # G is the example graph in Figure 1 from Batagelj and
11
+ # Zaversnik's paper titled An O(m) Algorithm for Cores
12
+ # Decomposition of Networks, 2003,
13
+ # http://arXiv.org/abs/cs/0310049. With nodes labeled as
14
+ # shown, the 3-core is given by nodes 1-8, the 2-core by nodes
15
+ # 9-16, the 1-core by nodes 17-20 and node 21 is in the
16
+ # 0-core.
17
+ t1 = nx.convert_node_labels_to_integers(nx.tetrahedral_graph(), 1)
18
+ t2 = nx.convert_node_labels_to_integers(t1, 5)
19
+ G = nx.union(t1, t2)
20
+ G.add_edges_from(
21
+ [
22
+ (3, 7),
23
+ (2, 11),
24
+ (11, 5),
25
+ (11, 12),
26
+ (5, 12),
27
+ (12, 19),
28
+ (12, 18),
29
+ (3, 9),
30
+ (7, 9),
31
+ (7, 10),
32
+ (9, 10),
33
+ (9, 20),
34
+ (17, 13),
35
+ (13, 14),
36
+ (14, 15),
37
+ (15, 16),
38
+ (16, 13),
39
+ ]
40
+ )
41
+ G.add_node(21)
42
+ cls.G = G
43
+
44
+ # Create the graph H resulting from the degree sequence
45
+ # [0, 1, 2, 2, 2, 2, 3] when using the Havel-Hakimi algorithm.
46
+
47
+ degseq = [0, 1, 2, 2, 2, 2, 3]
48
+ H = nx.havel_hakimi_graph(degseq)
49
+ mapping = {6: 0, 0: 1, 4: 3, 5: 6, 3: 4, 1: 2, 2: 5}
50
+ cls.H = nx.relabel_nodes(H, mapping)
51
+
52
+ def test_trivial(self):
53
+ """Empty graph"""
54
+ G = nx.Graph()
55
+ assert nx.core_number(G) == {}
56
+
57
+ def test_core_number(self):
58
+ core = nx.core_number(self.G)
59
+ nodes_by_core = [sorted(n for n in core if core[n] == val) for val in range(4)]
60
+ assert nodes_equal(nodes_by_core[0], [21])
61
+ assert nodes_equal(nodes_by_core[1], [17, 18, 19, 20])
62
+ assert nodes_equal(nodes_by_core[2], [9, 10, 11, 12, 13, 14, 15, 16])
63
+ assert nodes_equal(nodes_by_core[3], [1, 2, 3, 4, 5, 6, 7, 8])
64
+
65
+ def test_core_number2(self):
66
+ core = nx.core_number(self.H)
67
+ nodes_by_core = [sorted(n for n in core if core[n] == val) for val in range(3)]
68
+ assert nodes_equal(nodes_by_core[0], [0])
69
+ assert nodes_equal(nodes_by_core[1], [1, 3])
70
+ assert nodes_equal(nodes_by_core[2], [2, 4, 5, 6])
71
+
72
+ def test_core_number_multigraph(self):
73
+ G = nx.complete_graph(3)
74
+ G = nx.MultiGraph(G)
75
+ G.add_edge(1, 2)
76
+ with pytest.raises(
77
+ nx.NetworkXNotImplemented, match="not implemented for multigraph type"
78
+ ):
79
+ nx.core_number(G)
80
+
81
+ def test_core_number_self_loop(self):
82
+ G = nx.cycle_graph(3)
83
+ G.add_edge(0, 0)
84
+ with pytest.raises(
85
+ nx.NetworkXNotImplemented, match="Input graph has self loops"
86
+ ):
87
+ nx.core_number(G)
88
+
89
+ def test_directed_core_number(self):
90
+ """core number had a bug for directed graphs found in issue #1959"""
91
+ # small example where too timid edge removal can make cn[2] = 3
92
+ G = nx.DiGraph()
93
+ edges = [(1, 2), (2, 1), (2, 3), (2, 4), (3, 4), (4, 3)]
94
+ G.add_edges_from(edges)
95
+ assert nx.core_number(G) == {1: 2, 2: 2, 3: 2, 4: 2}
96
+ # small example where too aggressive edge removal can make cn[2] = 2
97
+ more_edges = [(1, 5), (3, 5), (4, 5), (3, 6), (4, 6), (5, 6)]
98
+ G.add_edges_from(more_edges)
99
+ assert nx.core_number(G) == {1: 3, 2: 3, 3: 3, 4: 3, 5: 3, 6: 3}
100
+
101
+ def test_main_core(self):
102
+ main_core_subgraph = nx.k_core(self.H)
103
+ assert sorted(main_core_subgraph.nodes()) == [2, 4, 5, 6]
104
+
105
+ def test_k_core(self):
106
+ # k=0
107
+ k_core_subgraph = nx.k_core(self.H, k=0)
108
+ assert sorted(k_core_subgraph.nodes()) == sorted(self.H.nodes())
109
+ # k=1
110
+ k_core_subgraph = nx.k_core(self.H, k=1)
111
+ assert sorted(k_core_subgraph.nodes()) == [1, 2, 3, 4, 5, 6]
112
+ # k = 2
113
+ k_core_subgraph = nx.k_core(self.H, k=2)
114
+ assert sorted(k_core_subgraph.nodes()) == [2, 4, 5, 6]
115
+
116
+ def test_k_core_multigraph(self):
117
+ core_number = nx.core_number(self.H)
118
+ H = nx.MultiGraph(self.H)
119
+ with pytest.deprecated_call():
120
+ nx.k_core(H, k=0, core_number=core_number)
121
+
122
+ def test_main_crust(self):
123
+ main_crust_subgraph = nx.k_crust(self.H)
124
+ assert sorted(main_crust_subgraph.nodes()) == [0, 1, 3]
125
+
126
+ def test_k_crust(self):
127
+ # k = 0
128
+ k_crust_subgraph = nx.k_crust(self.H, k=2)
129
+ assert sorted(k_crust_subgraph.nodes()) == sorted(self.H.nodes())
130
+ # k=1
131
+ k_crust_subgraph = nx.k_crust(self.H, k=1)
132
+ assert sorted(k_crust_subgraph.nodes()) == [0, 1, 3]
133
+ # k=2
134
+ k_crust_subgraph = nx.k_crust(self.H, k=0)
135
+ assert sorted(k_crust_subgraph.nodes()) == [0]
136
+
137
+ def test_k_crust_multigraph(self):
138
+ core_number = nx.core_number(self.H)
139
+ H = nx.MultiGraph(self.H)
140
+ with pytest.deprecated_call():
141
+ nx.k_crust(H, k=0, core_number=core_number)
142
+
143
+ def test_main_shell(self):
144
+ main_shell_subgraph = nx.k_shell(self.H)
145
+ assert sorted(main_shell_subgraph.nodes()) == [2, 4, 5, 6]
146
+
147
+ def test_k_shell(self):
148
+ # k=0
149
+ k_shell_subgraph = nx.k_shell(self.H, k=2)
150
+ assert sorted(k_shell_subgraph.nodes()) == [2, 4, 5, 6]
151
+ # k=1
152
+ k_shell_subgraph = nx.k_shell(self.H, k=1)
153
+ assert sorted(k_shell_subgraph.nodes()) == [1, 3]
154
+ # k=2
155
+ k_shell_subgraph = nx.k_shell(self.H, k=0)
156
+ assert sorted(k_shell_subgraph.nodes()) == [0]
157
+
158
+ def test_k_shell_multigraph(self):
159
+ core_number = nx.core_number(self.H)
160
+ H = nx.MultiGraph(self.H)
161
+ with pytest.deprecated_call():
162
+ nx.k_shell(H, k=0, core_number=core_number)
163
+
164
+ def test_k_corona(self):
165
+ # k=0
166
+ k_corona_subgraph = nx.k_corona(self.H, k=2)
167
+ assert sorted(k_corona_subgraph.nodes()) == [2, 4, 5, 6]
168
+ # k=1
169
+ k_corona_subgraph = nx.k_corona(self.H, k=1)
170
+ assert sorted(k_corona_subgraph.nodes()) == [1]
171
+ # k=2
172
+ k_corona_subgraph = nx.k_corona(self.H, k=0)
173
+ assert sorted(k_corona_subgraph.nodes()) == [0]
174
+
175
+ def test_k_corona_multigraph(self):
176
+ core_number = nx.core_number(self.H)
177
+ H = nx.MultiGraph(self.H)
178
+ with pytest.deprecated_call():
179
+ nx.k_corona(H, k=0, core_number=core_number)
180
+
181
+ def test_k_truss(self):
182
+ # k=-1
183
+ k_truss_subgraph = nx.k_truss(self.G, -1)
184
+ assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21))
185
+ # k=0
186
+ k_truss_subgraph = nx.k_truss(self.G, 0)
187
+ assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21))
188
+ # k=1
189
+ k_truss_subgraph = nx.k_truss(self.G, 1)
190
+ assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21))
191
+ # k=2
192
+ k_truss_subgraph = nx.k_truss(self.G, 2)
193
+ assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21))
194
+ # k=3
195
+ k_truss_subgraph = nx.k_truss(self.G, 3)
196
+ assert sorted(k_truss_subgraph.nodes()) == list(range(1, 13))
197
+
198
+ k_truss_subgraph = nx.k_truss(self.G, 4)
199
+ assert sorted(k_truss_subgraph.nodes()) == list(range(1, 9))
200
+
201
+ k_truss_subgraph = nx.k_truss(self.G, 5)
202
+ assert sorted(k_truss_subgraph.nodes()) == []
203
+
204
+ def test_k_truss_digraph(self):
205
+ G = nx.complete_graph(3)
206
+ G = nx.DiGraph(G)
207
+ G.add_edge(2, 1)
208
+ with pytest.raises(
209
+ nx.NetworkXNotImplemented, match="not implemented for directed type"
210
+ ):
211
+ nx.k_truss(G, k=1)
212
+
213
+ def test_k_truss_multigraph(self):
214
+ G = nx.complete_graph(3)
215
+ G = nx.MultiGraph(G)
216
+ G.add_edge(1, 2)
217
+ with pytest.raises(
218
+ nx.NetworkXNotImplemented, match="not implemented for multigraph type"
219
+ ):
220
+ nx.k_truss(G, k=1)
221
+
222
+ def test_k_truss_self_loop(self):
223
+ G = nx.cycle_graph(3)
224
+ G.add_edge(0, 0)
225
+ with pytest.raises(
226
+ nx.NetworkXNotImplemented, match="Input graph has self loops"
227
+ ):
228
+ nx.k_truss(G, k=1)
229
+
230
+ def test_onion_layers(self):
231
+ layers = nx.onion_layers(self.G)
232
+ nodes_by_layer = [
233
+ sorted(n for n in layers if layers[n] == val) for val in range(1, 7)
234
+ ]
235
+ assert nodes_equal(nodes_by_layer[0], [21])
236
+ assert nodes_equal(nodes_by_layer[1], [17, 18, 19, 20])
237
+ assert nodes_equal(nodes_by_layer[2], [10, 12, 13, 14, 15, 16])
238
+ assert nodes_equal(nodes_by_layer[3], [9, 11])
239
+ assert nodes_equal(nodes_by_layer[4], [1, 2, 4, 5, 6, 8])
240
+ assert nodes_equal(nodes_by_layer[5], [3, 7])
241
+
242
+ def test_onion_digraph(self):
243
+ G = nx.complete_graph(3)
244
+ G = nx.DiGraph(G)
245
+ G.add_edge(2, 1)
246
+ with pytest.raises(
247
+ nx.NetworkXNotImplemented, match="not implemented for directed type"
248
+ ):
249
+ nx.onion_layers(G)
250
+
251
+ def test_onion_multigraph(self):
252
+ G = nx.complete_graph(3)
253
+ G = nx.MultiGraph(G)
254
+ G.add_edge(1, 2)
255
+ with pytest.raises(
256
+ nx.NetworkXNotImplemented, match="not implemented for multigraph type"
257
+ ):
258
+ nx.onion_layers(G)
259
+
260
+ def test_onion_self_loop(self):
261
+ G = nx.cycle_graph(3)
262
+ G.add_edge(0, 0)
263
+ with pytest.raises(
264
+ nx.NetworkXNotImplemented, match="Input graph contains self loops"
265
+ ):
266
+ nx.onion_layers(G)
minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_covering.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+
5
+
6
+ class TestMinEdgeCover:
7
+ """Tests for :func:`networkx.algorithms.min_edge_cover`"""
8
+
9
+ def test_empty_graph(self):
10
+ G = nx.Graph()
11
+ assert nx.min_edge_cover(G) == set()
12
+
13
+ def test_graph_with_loop(self):
14
+ G = nx.Graph()
15
+ G.add_edge(0, 0)
16
+ assert nx.min_edge_cover(G) == {(0, 0)}
17
+
18
+ def test_graph_with_isolated_v(self):
19
+ G = nx.Graph()
20
+ G.add_node(1)
21
+ with pytest.raises(
22
+ nx.NetworkXException,
23
+ match="Graph has a node with no edge incident on it, so no edge cover exists.",
24
+ ):
25
+ nx.min_edge_cover(G)
26
+
27
+ def test_graph_single_edge(self):
28
+ G = nx.Graph([(0, 1)])
29
+ assert nx.min_edge_cover(G) in ({(0, 1)}, {(1, 0)})
30
+
31
+ def test_graph_two_edge_path(self):
32
+ G = nx.path_graph(3)
33
+ min_cover = nx.min_edge_cover(G)
34
+ assert len(min_cover) == 2
35
+ for u, v in G.edges:
36
+ assert (u, v) in min_cover or (v, u) in min_cover
37
+
38
+ def test_bipartite_explicit(self):
39
+ G = nx.Graph()
40
+ G.add_nodes_from([1, 2, 3, 4], bipartite=0)
41
+ G.add_nodes_from(["a", "b", "c"], bipartite=1)
42
+ G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
43
+ # Use bipartite method by prescribing the algorithm
44
+ min_cover = nx.min_edge_cover(
45
+ G, nx.algorithms.bipartite.matching.eppstein_matching
46
+ )
47
+ assert nx.is_edge_cover(G, min_cover)
48
+ assert len(min_cover) == 8
49
+ # Use the default method which is not specialized for bipartite
50
+ min_cover2 = nx.min_edge_cover(G)
51
+ assert nx.is_edge_cover(G, min_cover2)
52
+ assert len(min_cover2) == 4
53
+
54
+ def test_complete_graph_even(self):
55
+ G = nx.complete_graph(10)
56
+ min_cover = nx.min_edge_cover(G)
57
+ assert nx.is_edge_cover(G, min_cover)
58
+ assert len(min_cover) == 5
59
+
60
+ def test_complete_graph_odd(self):
61
+ G = nx.complete_graph(11)
62
+ min_cover = nx.min_edge_cover(G)
63
+ assert nx.is_edge_cover(G, min_cover)
64
+ assert len(min_cover) == 6
65
+
66
+
67
+ class TestIsEdgeCover:
68
+ """Tests for :func:`networkx.algorithms.is_edge_cover`"""
69
+
70
+ def test_empty_graph(self):
71
+ G = nx.Graph()
72
+ assert nx.is_edge_cover(G, set())
73
+
74
+ def test_graph_with_loop(self):
75
+ G = nx.Graph()
76
+ G.add_edge(1, 1)
77
+ assert nx.is_edge_cover(G, {(1, 1)})
78
+
79
+ def test_graph_single_edge(self):
80
+ G = nx.Graph()
81
+ G.add_edge(0, 1)
82
+ assert nx.is_edge_cover(G, {(0, 0), (1, 1)})
83
+ assert nx.is_edge_cover(G, {(0, 1), (1, 0)})
84
+ assert nx.is_edge_cover(G, {(0, 1)})
85
+ assert not nx.is_edge_cover(G, {(0, 0)})
minigpt2/lib/python3.10/site-packages/networkx/algorithms/tests/test_cuts.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for the :mod:`networkx.algorithms.cuts` module."""
2
+
3
+ import networkx as nx
4
+
5
+
6
+ class TestCutSize:
7
+ """Unit tests for the :func:`~networkx.cut_size` function."""
8
+
9
+ def test_symmetric(self):
10
+ """Tests that the cut size is symmetric."""
11
+ G = nx.barbell_graph(3, 0)
12
+ S = {0, 1, 4}
13
+ T = {2, 3, 5}
14
+ assert nx.cut_size(G, S, T) == 4
15
+ assert nx.cut_size(G, T, S) == 4
16
+
17
+ def test_single_edge(self):
18
+ """Tests for a cut of a single edge."""
19
+ G = nx.barbell_graph(3, 0)
20
+ S = {0, 1, 2}
21
+ T = {3, 4, 5}
22
+ assert nx.cut_size(G, S, T) == 1
23
+ assert nx.cut_size(G, T, S) == 1
24
+
25
+ def test_directed(self):
26
+ """Tests that each directed edge is counted once in the cut."""
27
+ G = nx.barbell_graph(3, 0).to_directed()
28
+ S = {0, 1, 2}
29
+ T = {3, 4, 5}
30
+ assert nx.cut_size(G, S, T) == 2
31
+ assert nx.cut_size(G, T, S) == 2
32
+
33
+ def test_directed_symmetric(self):
34
+ """Tests that a cut in a directed graph is symmetric."""
35
+ G = nx.barbell_graph(3, 0).to_directed()
36
+ S = {0, 1, 4}
37
+ T = {2, 3, 5}
38
+ assert nx.cut_size(G, S, T) == 8
39
+ assert nx.cut_size(G, T, S) == 8
40
+
41
+ def test_multigraph(self):
42
+ """Tests that parallel edges are each counted for a cut."""
43
+ G = nx.MultiGraph(["ab", "ab"])
44
+ assert nx.cut_size(G, {"a"}, {"b"}) == 2
45
+
46
+
47
+ class TestVolume:
48
+ """Unit tests for the :func:`~networkx.volume` function."""
49
+
50
+ def test_graph(self):
51
+ G = nx.cycle_graph(4)
52
+ assert nx.volume(G, {0, 1}) == 4
53
+
54
+ def test_digraph(self):
55
+ G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 0)])
56
+ assert nx.volume(G, {0, 1}) == 2
57
+
58
+ def test_multigraph(self):
59
+ edges = list(nx.cycle_graph(4).edges())
60
+ G = nx.MultiGraph(edges * 2)
61
+ assert nx.volume(G, {0, 1}) == 8
62
+
63
+ def test_multidigraph(self):
64
+ edges = [(0, 1), (1, 2), (2, 3), (3, 0)]
65
+ G = nx.MultiDiGraph(edges * 2)
66
+ assert nx.volume(G, {0, 1}) == 4
67
+
68
+ def test_barbell(self):
69
+ G = nx.barbell_graph(3, 0)
70
+ assert nx.volume(G, {0, 1, 2}) == 7
71
+ assert nx.volume(G, {3, 4, 5}) == 7
72
+
73
+
74
+ class TestNormalizedCutSize:
75
+ """Unit tests for the :func:`~networkx.normalized_cut_size` function."""
76
+
77
+ def test_graph(self):
78
+ G = nx.path_graph(4)
79
+ S = {1, 2}
80
+ T = set(G) - S
81
+ size = nx.normalized_cut_size(G, S, T)
82
+ # The cut looks like this: o-{-o--o-}-o
83
+ expected = 2 * ((1 / 4) + (1 / 2))
84
+ assert expected == size
85
+ # Test with no input T
86
+ assert expected == nx.normalized_cut_size(G, S)
87
+
88
+ def test_directed(self):
89
+ G = nx.DiGraph([(0, 1), (1, 2), (2, 3)])
90
+ S = {1, 2}
91
+ T = set(G) - S
92
+ size = nx.normalized_cut_size(G, S, T)
93
+ # The cut looks like this: o-{->o-->o-}->o
94
+ expected = 2 * ((1 / 2) + (1 / 1))
95
+ assert expected == size
96
+ # Test with no input T
97
+ assert expected == nx.normalized_cut_size(G, S)
98
+
99
+
100
+ class TestConductance:
101
+ """Unit tests for the :func:`~networkx.conductance` function."""
102
+
103
+ def test_graph(self):
104
+ G = nx.barbell_graph(5, 0)
105
+ # Consider the singleton sets containing the "bridge" nodes.
106
+ # There is only one cut edge, and each set has volume five.
107
+ S = {4}
108
+ T = {5}
109
+ conductance = nx.conductance(G, S, T)
110
+ expected = 1 / 5
111
+ assert expected == conductance
112
+ # Test with no input T
113
+ G2 = nx.barbell_graph(3, 0)
114
+ # There is only one cut edge, and each set has volume seven.
115
+ S2 = {0, 1, 2}
116
+ assert nx.conductance(G2, S2) == 1 / 7
117
+
118
+
119
+ class TestEdgeExpansion:
120
+ """Unit tests for the :func:`~networkx.edge_expansion` function."""
121
+
122
+ def test_graph(self):
123
+ G = nx.barbell_graph(5, 0)
124
+ S = set(range(5))
125
+ T = set(G) - S
126
+ expansion = nx.edge_expansion(G, S, T)
127
+ expected = 1 / 5
128
+ assert expected == expansion
129
+ # Test with no input T
130
+ assert expected == nx.edge_expansion(G, S)
131
+
132
+
133
+ class TestNodeExpansion:
134
+ """Unit tests for the :func:`~networkx.node_expansion` function."""
135
+
136
+ def test_graph(self):
137
+ G = nx.path_graph(8)
138
+ S = {3, 4, 5}
139
+ expansion = nx.node_expansion(G, S)
140
+ # The neighborhood of S has cardinality five, and S has
141
+ # cardinality three.
142
+ expected = 5 / 3
143
+ assert expected == expansion
144
+
145
+
146
+ class TestBoundaryExpansion:
147
+ """Unit tests for the :func:`~networkx.boundary_expansion` function."""
148
+
149
+ def test_graph(self):
150
+ G = nx.complete_graph(10)
151
+ S = set(range(4))
152
+ expansion = nx.boundary_expansion(G, S)
153
+ # The node boundary of S has cardinality six, and S has
154
+ # cardinality three.
155
+ expected = 6 / 4
156
+ assert expected == expansion
157
+
158
+
159
+ class TestMixingExpansion:
160
+ """Unit tests for the :func:`~networkx.mixing_expansion` function."""
161
+
162
+ def test_graph(self):
163
+ G = nx.barbell_graph(5, 0)
164
+ S = set(range(5))
165
+ T = set(G) - S
166
+ expansion = nx.mixing_expansion(G, S, T)
167
+ # There is one cut edge, and the total number of edges in the
168
+ # graph is twice the total number of edges in a clique of size
169
+ # five, plus one more for the bridge.
170
+ expected = 1 / (2 * (5 * 4 + 1))
171
+ assert expected == expansion