Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +2 -0
- valley/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc +3 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__init__.py +2 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__pycache__/__init__.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__pycache__/hits_alg.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__pycache__/pagerank_alg.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/hits_alg.py +337 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py +499 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__init__.py +0 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/__init__.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_hits.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_pagerank.cpython-310.pyc +0 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/test_hits.py +78 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/test_pagerank.py +217 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/__init__.py +0 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_asteroidal.py +23 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_boundary.py +154 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_bridges.py +144 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_broadcasting.py +81 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_chordal.py +129 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_clique.py +291 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_cluster.py +549 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_communicability.py +80 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_covering.py +85 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_cuts.py +172 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_d_separation.py +348 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_euler.py +314 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_graph_hashing.py +686 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_hierarchy.py +39 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_hybrid.py +24 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_isolate.py +26 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_link_prediction.py +586 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_lowest_common_ancestors.py +427 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_max_weight_clique.py +181 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_mis.py +62 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_moral.py +15 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_node_classification.py +140 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_non_randomness.py +37 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_planar_drawing.py +274 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_planarity.py +535 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_regular.py +92 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_richclub.py +149 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_similarity.py +946 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_smallworld.py +78 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_smetric.py +36 -0
- valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_sparsifiers.py +137 -0
.gitattributes
CHANGED
|
@@ -593,3 +593,5 @@ wemm/compiler_compat/ld filter=lfs diff=lfs merge=lfs -text
|
|
| 593 |
wemm/bin/sqlite3 filter=lfs diff=lfs merge=lfs -text
|
| 594 |
wemm/bin/x86_64-conda_cos7-linux-gnu-ld filter=lfs diff=lfs merge=lfs -text
|
| 595 |
wemm/bin/bunzip2 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 593 |
wemm/bin/sqlite3 filter=lfs diff=lfs merge=lfs -text
|
| 594 |
wemm/bin/x86_64-conda_cos7-linux-gnu-ld filter=lfs diff=lfs merge=lfs -text
|
| 595 |
wemm/bin/bunzip2 filter=lfs diff=lfs merge=lfs -text
|
| 596 |
+
valley/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 597 |
+
wemm/lib/libtinfow.so.6.4 filter=lfs diff=lfs merge=lfs -text
|
valley/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:09ba94d1818971569eb3f7bbcdb1ab531835882544f514eabe870533c6f4c441
|
| 3 |
+
size 152344
|
valley/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.34 kB). View file
|
|
|
valley/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-310.pyc
ADDED
|
Binary file (4.45 kB). View file
|
|
|
valley/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-310.pyc
ADDED
|
Binary file (13.9 kB). View file
|
|
|
valley/lib/python3.10/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-310.pyc
ADDED
|
Binary file (2.86 kB). View file
|
|
|
valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from networkx.algorithms.link_analysis.hits_alg import *
|
| 2 |
+
from networkx.algorithms.link_analysis.pagerank_alg import *
|
valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (302 Bytes). View file
|
|
|
valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__pycache__/hits_alg.cpython-310.pyc
ADDED
|
Binary file (9.7 kB). View file
|
|
|
valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/__pycache__/pagerank_alg.cpython-310.pyc
ADDED
|
Binary file (16.3 kB). View file
|
|
|
valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/hits_alg.py
ADDED
|
@@ -0,0 +1,337 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Hubs and authorities analysis of graph structure.
|
| 2 |
+
"""
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
__all__ = ["hits"]
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}})
|
| 9 |
+
def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True):
|
| 10 |
+
"""Returns HITS hubs and authorities values for nodes.
|
| 11 |
+
|
| 12 |
+
The HITS algorithm computes two numbers for a node.
|
| 13 |
+
Authorities estimates the node value based on the incoming links.
|
| 14 |
+
Hubs estimates the node value based on outgoing links.
|
| 15 |
+
|
| 16 |
+
Parameters
|
| 17 |
+
----------
|
| 18 |
+
G : graph
|
| 19 |
+
A NetworkX graph
|
| 20 |
+
|
| 21 |
+
max_iter : integer, optional
|
| 22 |
+
Maximum number of iterations in power method.
|
| 23 |
+
|
| 24 |
+
tol : float, optional
|
| 25 |
+
Error tolerance used to check convergence in power method iteration.
|
| 26 |
+
|
| 27 |
+
nstart : dictionary, optional
|
| 28 |
+
Starting value of each node for power method iteration.
|
| 29 |
+
|
| 30 |
+
normalized : bool (default=True)
|
| 31 |
+
Normalize results by the sum of all of the values.
|
| 32 |
+
|
| 33 |
+
Returns
|
| 34 |
+
-------
|
| 35 |
+
(hubs,authorities) : two-tuple of dictionaries
|
| 36 |
+
Two dictionaries keyed by node containing the hub and authority
|
| 37 |
+
values.
|
| 38 |
+
|
| 39 |
+
Raises
|
| 40 |
+
------
|
| 41 |
+
PowerIterationFailedConvergence
|
| 42 |
+
If the algorithm fails to converge to the specified tolerance
|
| 43 |
+
within the specified number of iterations of the power iteration
|
| 44 |
+
method.
|
| 45 |
+
|
| 46 |
+
Examples
|
| 47 |
+
--------
|
| 48 |
+
>>> G = nx.path_graph(4)
|
| 49 |
+
>>> h, a = nx.hits(G)
|
| 50 |
+
|
| 51 |
+
Notes
|
| 52 |
+
-----
|
| 53 |
+
The eigenvector calculation is done by the power iteration method
|
| 54 |
+
and has no guarantee of convergence. The iteration will stop
|
| 55 |
+
after max_iter iterations or an error tolerance of
|
| 56 |
+
number_of_nodes(G)*tol has been reached.
|
| 57 |
+
|
| 58 |
+
The HITS algorithm was designed for directed graphs but this
|
| 59 |
+
algorithm does not check if the input graph is directed and will
|
| 60 |
+
execute on undirected graphs.
|
| 61 |
+
|
| 62 |
+
References
|
| 63 |
+
----------
|
| 64 |
+
.. [1] A. Langville and C. Meyer,
|
| 65 |
+
"A survey of eigenvector methods of web information retrieval."
|
| 66 |
+
http://citeseer.ist.psu.edu/713792.html
|
| 67 |
+
.. [2] Jon Kleinberg,
|
| 68 |
+
Authoritative sources in a hyperlinked environment
|
| 69 |
+
Journal of the ACM 46 (5): 604-32, 1999.
|
| 70 |
+
doi:10.1145/324133.324140.
|
| 71 |
+
http://www.cs.cornell.edu/home/kleinber/auth.pdf.
|
| 72 |
+
"""
|
| 73 |
+
import numpy as np
|
| 74 |
+
import scipy as sp
|
| 75 |
+
|
| 76 |
+
if len(G) == 0:
|
| 77 |
+
return {}, {}
|
| 78 |
+
A = nx.adjacency_matrix(G, nodelist=list(G), dtype=float)
|
| 79 |
+
|
| 80 |
+
if nstart is not None:
|
| 81 |
+
nstart = np.array(list(nstart.values()))
|
| 82 |
+
if max_iter <= 0:
|
| 83 |
+
raise nx.PowerIterationFailedConvergence(max_iter)
|
| 84 |
+
try:
|
| 85 |
+
_, _, vt = sp.sparse.linalg.svds(A, k=1, v0=nstart, maxiter=max_iter, tol=tol)
|
| 86 |
+
except sp.sparse.linalg.ArpackNoConvergence as exc:
|
| 87 |
+
raise nx.PowerIterationFailedConvergence(max_iter) from exc
|
| 88 |
+
|
| 89 |
+
a = vt.flatten().real
|
| 90 |
+
h = A @ a
|
| 91 |
+
if normalized:
|
| 92 |
+
h /= h.sum()
|
| 93 |
+
a /= a.sum()
|
| 94 |
+
hubs = dict(zip(G, map(float, h)))
|
| 95 |
+
authorities = dict(zip(G, map(float, a)))
|
| 96 |
+
return hubs, authorities
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def _hits_python(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True):
|
| 100 |
+
if isinstance(G, nx.MultiGraph | nx.MultiDiGraph):
|
| 101 |
+
raise Exception("hits() not defined for graphs with multiedges.")
|
| 102 |
+
if len(G) == 0:
|
| 103 |
+
return {}, {}
|
| 104 |
+
# choose fixed starting vector if not given
|
| 105 |
+
if nstart is None:
|
| 106 |
+
h = dict.fromkeys(G, 1.0 / G.number_of_nodes())
|
| 107 |
+
else:
|
| 108 |
+
h = nstart
|
| 109 |
+
# normalize starting vector
|
| 110 |
+
s = 1.0 / sum(h.values())
|
| 111 |
+
for k in h:
|
| 112 |
+
h[k] *= s
|
| 113 |
+
for _ in range(max_iter): # power iteration: make up to max_iter iterations
|
| 114 |
+
hlast = h
|
| 115 |
+
h = dict.fromkeys(hlast.keys(), 0)
|
| 116 |
+
a = dict.fromkeys(hlast.keys(), 0)
|
| 117 |
+
# this "matrix multiply" looks odd because it is
|
| 118 |
+
# doing a left multiply a^T=hlast^T*G
|
| 119 |
+
for n in h:
|
| 120 |
+
for nbr in G[n]:
|
| 121 |
+
a[nbr] += hlast[n] * G[n][nbr].get("weight", 1)
|
| 122 |
+
# now multiply h=Ga
|
| 123 |
+
for n in h:
|
| 124 |
+
for nbr in G[n]:
|
| 125 |
+
h[n] += a[nbr] * G[n][nbr].get("weight", 1)
|
| 126 |
+
# normalize vector
|
| 127 |
+
s = 1.0 / max(h.values())
|
| 128 |
+
for n in h:
|
| 129 |
+
h[n] *= s
|
| 130 |
+
# normalize vector
|
| 131 |
+
s = 1.0 / max(a.values())
|
| 132 |
+
for n in a:
|
| 133 |
+
a[n] *= s
|
| 134 |
+
# check convergence, l1 norm
|
| 135 |
+
err = sum(abs(h[n] - hlast[n]) for n in h)
|
| 136 |
+
if err < tol:
|
| 137 |
+
break
|
| 138 |
+
else:
|
| 139 |
+
raise nx.PowerIterationFailedConvergence(max_iter)
|
| 140 |
+
if normalized:
|
| 141 |
+
s = 1.0 / sum(a.values())
|
| 142 |
+
for n in a:
|
| 143 |
+
a[n] *= s
|
| 144 |
+
s = 1.0 / sum(h.values())
|
| 145 |
+
for n in h:
|
| 146 |
+
h[n] *= s
|
| 147 |
+
return h, a
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def _hits_numpy(G, normalized=True):
|
| 151 |
+
"""Returns HITS hubs and authorities values for nodes.
|
| 152 |
+
|
| 153 |
+
The HITS algorithm computes two numbers for a node.
|
| 154 |
+
Authorities estimates the node value based on the incoming links.
|
| 155 |
+
Hubs estimates the node value based on outgoing links.
|
| 156 |
+
|
| 157 |
+
Parameters
|
| 158 |
+
----------
|
| 159 |
+
G : graph
|
| 160 |
+
A NetworkX graph
|
| 161 |
+
|
| 162 |
+
normalized : bool (default=True)
|
| 163 |
+
Normalize results by the sum of all of the values.
|
| 164 |
+
|
| 165 |
+
Returns
|
| 166 |
+
-------
|
| 167 |
+
(hubs,authorities) : two-tuple of dictionaries
|
| 168 |
+
Two dictionaries keyed by node containing the hub and authority
|
| 169 |
+
values.
|
| 170 |
+
|
| 171 |
+
Examples
|
| 172 |
+
--------
|
| 173 |
+
>>> G = nx.path_graph(4)
|
| 174 |
+
|
| 175 |
+
The `hubs` and `authorities` are given by the eigenvectors corresponding to the
|
| 176 |
+
maximum eigenvalues of the hubs_matrix and the authority_matrix, respectively.
|
| 177 |
+
|
| 178 |
+
The ``hubs`` and ``authority`` matrices are computed from the adjacency
|
| 179 |
+
matrix:
|
| 180 |
+
|
| 181 |
+
>>> adj_ary = nx.to_numpy_array(G)
|
| 182 |
+
>>> hubs_matrix = adj_ary @ adj_ary.T
|
| 183 |
+
>>> authority_matrix = adj_ary.T @ adj_ary
|
| 184 |
+
|
| 185 |
+
`_hits_numpy` maps the eigenvector corresponding to the maximum eigenvalue
|
| 186 |
+
of the respective matrices to the nodes in `G`:
|
| 187 |
+
|
| 188 |
+
>>> from networkx.algorithms.link_analysis.hits_alg import _hits_numpy
|
| 189 |
+
>>> hubs, authority = _hits_numpy(G)
|
| 190 |
+
|
| 191 |
+
Notes
|
| 192 |
+
-----
|
| 193 |
+
The eigenvector calculation uses NumPy's interface to LAPACK.
|
| 194 |
+
|
| 195 |
+
The HITS algorithm was designed for directed graphs but this
|
| 196 |
+
algorithm does not check if the input graph is directed and will
|
| 197 |
+
execute on undirected graphs.
|
| 198 |
+
|
| 199 |
+
References
|
| 200 |
+
----------
|
| 201 |
+
.. [1] A. Langville and C. Meyer,
|
| 202 |
+
"A survey of eigenvector methods of web information retrieval."
|
| 203 |
+
http://citeseer.ist.psu.edu/713792.html
|
| 204 |
+
.. [2] Jon Kleinberg,
|
| 205 |
+
Authoritative sources in a hyperlinked environment
|
| 206 |
+
Journal of the ACM 46 (5): 604-32, 1999.
|
| 207 |
+
doi:10.1145/324133.324140.
|
| 208 |
+
http://www.cs.cornell.edu/home/kleinber/auth.pdf.
|
| 209 |
+
"""
|
| 210 |
+
import numpy as np
|
| 211 |
+
|
| 212 |
+
if len(G) == 0:
|
| 213 |
+
return {}, {}
|
| 214 |
+
adj_ary = nx.to_numpy_array(G)
|
| 215 |
+
# Hub matrix
|
| 216 |
+
H = adj_ary @ adj_ary.T
|
| 217 |
+
e, ev = np.linalg.eig(H)
|
| 218 |
+
h = ev[:, np.argmax(e)] # eigenvector corresponding to the maximum eigenvalue
|
| 219 |
+
# Authority matrix
|
| 220 |
+
A = adj_ary.T @ adj_ary
|
| 221 |
+
e, ev = np.linalg.eig(A)
|
| 222 |
+
a = ev[:, np.argmax(e)] # eigenvector corresponding to the maximum eigenvalue
|
| 223 |
+
if normalized:
|
| 224 |
+
h /= h.sum()
|
| 225 |
+
a /= a.sum()
|
| 226 |
+
else:
|
| 227 |
+
h /= h.max()
|
| 228 |
+
a /= a.max()
|
| 229 |
+
hubs = dict(zip(G, map(float, h)))
|
| 230 |
+
authorities = dict(zip(G, map(float, a)))
|
| 231 |
+
return hubs, authorities
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
def _hits_scipy(G, max_iter=100, tol=1.0e-6, nstart=None, normalized=True):
|
| 235 |
+
"""Returns HITS hubs and authorities values for nodes.
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
The HITS algorithm computes two numbers for a node.
|
| 239 |
+
Authorities estimates the node value based on the incoming links.
|
| 240 |
+
Hubs estimates the node value based on outgoing links.
|
| 241 |
+
|
| 242 |
+
Parameters
|
| 243 |
+
----------
|
| 244 |
+
G : graph
|
| 245 |
+
A NetworkX graph
|
| 246 |
+
|
| 247 |
+
max_iter : integer, optional
|
| 248 |
+
Maximum number of iterations in power method.
|
| 249 |
+
|
| 250 |
+
tol : float, optional
|
| 251 |
+
Error tolerance used to check convergence in power method iteration.
|
| 252 |
+
|
| 253 |
+
nstart : dictionary, optional
|
| 254 |
+
Starting value of each node for power method iteration.
|
| 255 |
+
|
| 256 |
+
normalized : bool (default=True)
|
| 257 |
+
Normalize results by the sum of all of the values.
|
| 258 |
+
|
| 259 |
+
Returns
|
| 260 |
+
-------
|
| 261 |
+
(hubs,authorities) : two-tuple of dictionaries
|
| 262 |
+
Two dictionaries keyed by node containing the hub and authority
|
| 263 |
+
values.
|
| 264 |
+
|
| 265 |
+
Examples
|
| 266 |
+
--------
|
| 267 |
+
>>> from networkx.algorithms.link_analysis.hits_alg import _hits_scipy
|
| 268 |
+
>>> G = nx.path_graph(4)
|
| 269 |
+
>>> h, a = _hits_scipy(G)
|
| 270 |
+
|
| 271 |
+
Notes
|
| 272 |
+
-----
|
| 273 |
+
This implementation uses SciPy sparse matrices.
|
| 274 |
+
|
| 275 |
+
The eigenvector calculation is done by the power iteration method
|
| 276 |
+
and has no guarantee of convergence. The iteration will stop
|
| 277 |
+
after max_iter iterations or an error tolerance of
|
| 278 |
+
number_of_nodes(G)*tol has been reached.
|
| 279 |
+
|
| 280 |
+
The HITS algorithm was designed for directed graphs but this
|
| 281 |
+
algorithm does not check if the input graph is directed and will
|
| 282 |
+
execute on undirected graphs.
|
| 283 |
+
|
| 284 |
+
Raises
|
| 285 |
+
------
|
| 286 |
+
PowerIterationFailedConvergence
|
| 287 |
+
If the algorithm fails to converge to the specified tolerance
|
| 288 |
+
within the specified number of iterations of the power iteration
|
| 289 |
+
method.
|
| 290 |
+
|
| 291 |
+
References
|
| 292 |
+
----------
|
| 293 |
+
.. [1] A. Langville and C. Meyer,
|
| 294 |
+
"A survey of eigenvector methods of web information retrieval."
|
| 295 |
+
http://citeseer.ist.psu.edu/713792.html
|
| 296 |
+
.. [2] Jon Kleinberg,
|
| 297 |
+
Authoritative sources in a hyperlinked environment
|
| 298 |
+
Journal of the ACM 46 (5): 604-632, 1999.
|
| 299 |
+
doi:10.1145/324133.324140.
|
| 300 |
+
http://www.cs.cornell.edu/home/kleinber/auth.pdf.
|
| 301 |
+
"""
|
| 302 |
+
import numpy as np
|
| 303 |
+
|
| 304 |
+
if len(G) == 0:
|
| 305 |
+
return {}, {}
|
| 306 |
+
A = nx.to_scipy_sparse_array(G, nodelist=list(G))
|
| 307 |
+
(n, _) = A.shape # should be square
|
| 308 |
+
ATA = A.T @ A # authority matrix
|
| 309 |
+
# choose fixed starting vector if not given
|
| 310 |
+
if nstart is None:
|
| 311 |
+
x = np.ones((n, 1)) / n
|
| 312 |
+
else:
|
| 313 |
+
x = np.array([nstart.get(n, 0) for n in list(G)], dtype=float)
|
| 314 |
+
x /= x.sum()
|
| 315 |
+
|
| 316 |
+
# power iteration on authority matrix
|
| 317 |
+
i = 0
|
| 318 |
+
while True:
|
| 319 |
+
xlast = x
|
| 320 |
+
x = ATA @ x
|
| 321 |
+
x /= x.max()
|
| 322 |
+
# check convergence, l1 norm
|
| 323 |
+
err = np.absolute(x - xlast).sum()
|
| 324 |
+
if err < tol:
|
| 325 |
+
break
|
| 326 |
+
if i > max_iter:
|
| 327 |
+
raise nx.PowerIterationFailedConvergence(max_iter)
|
| 328 |
+
i += 1
|
| 329 |
+
|
| 330 |
+
a = x.flatten()
|
| 331 |
+
h = A @ a
|
| 332 |
+
if normalized:
|
| 333 |
+
h /= h.sum()
|
| 334 |
+
a /= a.sum()
|
| 335 |
+
hubs = dict(zip(G, map(float, h)))
|
| 336 |
+
authorities = dict(zip(G, map(float, a)))
|
| 337 |
+
return hubs, authorities
|
valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py
ADDED
|
@@ -0,0 +1,499 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""PageRank analysis of graph structure. """
|
| 2 |
+
from warnings import warn
|
| 3 |
+
|
| 4 |
+
import networkx as nx
|
| 5 |
+
|
| 6 |
+
__all__ = ["pagerank", "google_matrix"]
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 10 |
+
def pagerank(
|
| 11 |
+
G,
|
| 12 |
+
alpha=0.85,
|
| 13 |
+
personalization=None,
|
| 14 |
+
max_iter=100,
|
| 15 |
+
tol=1.0e-6,
|
| 16 |
+
nstart=None,
|
| 17 |
+
weight="weight",
|
| 18 |
+
dangling=None,
|
| 19 |
+
):
|
| 20 |
+
"""Returns the PageRank of the nodes in the graph.
|
| 21 |
+
|
| 22 |
+
PageRank computes a ranking of the nodes in the graph G based on
|
| 23 |
+
the structure of the incoming links. It was originally designed as
|
| 24 |
+
an algorithm to rank web pages.
|
| 25 |
+
|
| 26 |
+
Parameters
|
| 27 |
+
----------
|
| 28 |
+
G : graph
|
| 29 |
+
A NetworkX graph. Undirected graphs will be converted to a directed
|
| 30 |
+
graph with two directed edges for each undirected edge.
|
| 31 |
+
|
| 32 |
+
alpha : float, optional
|
| 33 |
+
Damping parameter for PageRank, default=0.85.
|
| 34 |
+
|
| 35 |
+
personalization: dict, optional
|
| 36 |
+
The "personalization vector" consisting of a dictionary with a
|
| 37 |
+
key some subset of graph nodes and personalization value each of those.
|
| 38 |
+
At least one personalization value must be non-zero.
|
| 39 |
+
If not specified, a nodes personalization value will be zero.
|
| 40 |
+
By default, a uniform distribution is used.
|
| 41 |
+
|
| 42 |
+
max_iter : integer, optional
|
| 43 |
+
Maximum number of iterations in power method eigenvalue solver.
|
| 44 |
+
|
| 45 |
+
tol : float, optional
|
| 46 |
+
Error tolerance used to check convergence in power method solver.
|
| 47 |
+
The iteration will stop after a tolerance of ``len(G) * tol`` is reached.
|
| 48 |
+
|
| 49 |
+
nstart : dictionary, optional
|
| 50 |
+
Starting value of PageRank iteration for each node.
|
| 51 |
+
|
| 52 |
+
weight : key, optional
|
| 53 |
+
Edge data key to use as weight. If None weights are set to 1.
|
| 54 |
+
|
| 55 |
+
dangling: dict, optional
|
| 56 |
+
The outedges to be assigned to any "dangling" nodes, i.e., nodes without
|
| 57 |
+
any outedges. The dict key is the node the outedge points to and the dict
|
| 58 |
+
value is the weight of that outedge. By default, dangling nodes are given
|
| 59 |
+
outedges according to the personalization vector (uniform if not
|
| 60 |
+
specified). This must be selected to result in an irreducible transition
|
| 61 |
+
matrix (see notes under google_matrix). It may be common to have the
|
| 62 |
+
dangling dict to be the same as the personalization dict.
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
Returns
|
| 66 |
+
-------
|
| 67 |
+
pagerank : dictionary
|
| 68 |
+
Dictionary of nodes with PageRank as value
|
| 69 |
+
|
| 70 |
+
Examples
|
| 71 |
+
--------
|
| 72 |
+
>>> G = nx.DiGraph(nx.path_graph(4))
|
| 73 |
+
>>> pr = nx.pagerank(G, alpha=0.9)
|
| 74 |
+
|
| 75 |
+
Notes
|
| 76 |
+
-----
|
| 77 |
+
The eigenvector calculation is done by the power iteration method
|
| 78 |
+
and has no guarantee of convergence. The iteration will stop after
|
| 79 |
+
an error tolerance of ``len(G) * tol`` has been reached. If the
|
| 80 |
+
number of iterations exceed `max_iter`, a
|
| 81 |
+
:exc:`networkx.exception.PowerIterationFailedConvergence` exception
|
| 82 |
+
is raised.
|
| 83 |
+
|
| 84 |
+
The PageRank algorithm was designed for directed graphs but this
|
| 85 |
+
algorithm does not check if the input graph is directed and will
|
| 86 |
+
execute on undirected graphs by converting each edge in the
|
| 87 |
+
directed graph to two edges.
|
| 88 |
+
|
| 89 |
+
See Also
|
| 90 |
+
--------
|
| 91 |
+
google_matrix
|
| 92 |
+
|
| 93 |
+
Raises
|
| 94 |
+
------
|
| 95 |
+
PowerIterationFailedConvergence
|
| 96 |
+
If the algorithm fails to converge to the specified tolerance
|
| 97 |
+
within the specified number of iterations of the power iteration
|
| 98 |
+
method.
|
| 99 |
+
|
| 100 |
+
References
|
| 101 |
+
----------
|
| 102 |
+
.. [1] A. Langville and C. Meyer,
|
| 103 |
+
"A survey of eigenvector methods of web information retrieval."
|
| 104 |
+
http://citeseer.ist.psu.edu/713792.html
|
| 105 |
+
.. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry,
|
| 106 |
+
The PageRank citation ranking: Bringing order to the Web. 1999
|
| 107 |
+
http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
|
| 108 |
+
|
| 109 |
+
"""
|
| 110 |
+
return _pagerank_scipy(
|
| 111 |
+
G, alpha, personalization, max_iter, tol, nstart, weight, dangling
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def _pagerank_python(
|
| 116 |
+
G,
|
| 117 |
+
alpha=0.85,
|
| 118 |
+
personalization=None,
|
| 119 |
+
max_iter=100,
|
| 120 |
+
tol=1.0e-6,
|
| 121 |
+
nstart=None,
|
| 122 |
+
weight="weight",
|
| 123 |
+
dangling=None,
|
| 124 |
+
):
|
| 125 |
+
if len(G) == 0:
|
| 126 |
+
return {}
|
| 127 |
+
|
| 128 |
+
D = G.to_directed()
|
| 129 |
+
|
| 130 |
+
# Create a copy in (right) stochastic form
|
| 131 |
+
W = nx.stochastic_graph(D, weight=weight)
|
| 132 |
+
N = W.number_of_nodes()
|
| 133 |
+
|
| 134 |
+
# Choose fixed starting vector if not given
|
| 135 |
+
if nstart is None:
|
| 136 |
+
x = dict.fromkeys(W, 1.0 / N)
|
| 137 |
+
else:
|
| 138 |
+
# Normalized nstart vector
|
| 139 |
+
s = sum(nstart.values())
|
| 140 |
+
x = {k: v / s for k, v in nstart.items()}
|
| 141 |
+
|
| 142 |
+
if personalization is None:
|
| 143 |
+
# Assign uniform personalization vector if not given
|
| 144 |
+
p = dict.fromkeys(W, 1.0 / N)
|
| 145 |
+
else:
|
| 146 |
+
s = sum(personalization.values())
|
| 147 |
+
p = {k: v / s for k, v in personalization.items()}
|
| 148 |
+
|
| 149 |
+
if dangling is None:
|
| 150 |
+
# Use personalization vector if dangling vector not specified
|
| 151 |
+
dangling_weights = p
|
| 152 |
+
else:
|
| 153 |
+
s = sum(dangling.values())
|
| 154 |
+
dangling_weights = {k: v / s for k, v in dangling.items()}
|
| 155 |
+
dangling_nodes = [n for n in W if W.out_degree(n, weight=weight) == 0.0]
|
| 156 |
+
|
| 157 |
+
# power iteration: make up to max_iter iterations
|
| 158 |
+
for _ in range(max_iter):
|
| 159 |
+
xlast = x
|
| 160 |
+
x = dict.fromkeys(xlast.keys(), 0)
|
| 161 |
+
danglesum = alpha * sum(xlast[n] for n in dangling_nodes)
|
| 162 |
+
for n in x:
|
| 163 |
+
# this matrix multiply looks odd because it is
|
| 164 |
+
# doing a left multiply x^T=xlast^T*W
|
| 165 |
+
for _, nbr, wt in W.edges(n, data=weight):
|
| 166 |
+
x[nbr] += alpha * xlast[n] * wt
|
| 167 |
+
x[n] += danglesum * dangling_weights.get(n, 0) + (1.0 - alpha) * p.get(n, 0)
|
| 168 |
+
# check convergence, l1 norm
|
| 169 |
+
err = sum(abs(x[n] - xlast[n]) for n in x)
|
| 170 |
+
if err < N * tol:
|
| 171 |
+
return x
|
| 172 |
+
raise nx.PowerIterationFailedConvergence(max_iter)
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 176 |
+
def google_matrix(
|
| 177 |
+
G, alpha=0.85, personalization=None, nodelist=None, weight="weight", dangling=None
|
| 178 |
+
):
|
| 179 |
+
"""Returns the Google matrix of the graph.
|
| 180 |
+
|
| 181 |
+
Parameters
|
| 182 |
+
----------
|
| 183 |
+
G : graph
|
| 184 |
+
A NetworkX graph. Undirected graphs will be converted to a directed
|
| 185 |
+
graph with two directed edges for each undirected edge.
|
| 186 |
+
|
| 187 |
+
alpha : float
|
| 188 |
+
The damping factor.
|
| 189 |
+
|
| 190 |
+
personalization: dict, optional
|
| 191 |
+
The "personalization vector" consisting of a dictionary with a
|
| 192 |
+
key some subset of graph nodes and personalization value each of those.
|
| 193 |
+
At least one personalization value must be non-zero.
|
| 194 |
+
If not specified, a nodes personalization value will be zero.
|
| 195 |
+
By default, a uniform distribution is used.
|
| 196 |
+
|
| 197 |
+
nodelist : list, optional
|
| 198 |
+
The rows and columns are ordered according to the nodes in nodelist.
|
| 199 |
+
If nodelist is None, then the ordering is produced by G.nodes().
|
| 200 |
+
|
| 201 |
+
weight : key, optional
|
| 202 |
+
Edge data key to use as weight. If None weights are set to 1.
|
| 203 |
+
|
| 204 |
+
dangling: dict, optional
|
| 205 |
+
The outedges to be assigned to any "dangling" nodes, i.e., nodes without
|
| 206 |
+
any outedges. The dict key is the node the outedge points to and the dict
|
| 207 |
+
value is the weight of that outedge. By default, dangling nodes are given
|
| 208 |
+
outedges according to the personalization vector (uniform if not
|
| 209 |
+
specified) This must be selected to result in an irreducible transition
|
| 210 |
+
matrix (see notes below). It may be common to have the dangling dict to
|
| 211 |
+
be the same as the personalization dict.
|
| 212 |
+
|
| 213 |
+
Returns
|
| 214 |
+
-------
|
| 215 |
+
A : 2D NumPy ndarray
|
| 216 |
+
Google matrix of the graph
|
| 217 |
+
|
| 218 |
+
Notes
|
| 219 |
+
-----
|
| 220 |
+
The array returned represents the transition matrix that describes the
|
| 221 |
+
Markov chain used in PageRank. For PageRank to converge to a unique
|
| 222 |
+
solution (i.e., a unique stationary distribution in a Markov chain), the
|
| 223 |
+
transition matrix must be irreducible. In other words, it must be that
|
| 224 |
+
there exists a path between every pair of nodes in the graph, or else there
|
| 225 |
+
is the potential of "rank sinks."
|
| 226 |
+
|
| 227 |
+
This implementation works with Multi(Di)Graphs. For multigraphs the
|
| 228 |
+
weight between two nodes is set to be the sum of all edge weights
|
| 229 |
+
between those nodes.
|
| 230 |
+
|
| 231 |
+
See Also
|
| 232 |
+
--------
|
| 233 |
+
pagerank
|
| 234 |
+
"""
|
| 235 |
+
import numpy as np
|
| 236 |
+
|
| 237 |
+
if nodelist is None:
|
| 238 |
+
nodelist = list(G)
|
| 239 |
+
|
| 240 |
+
A = nx.to_numpy_array(G, nodelist=nodelist, weight=weight)
|
| 241 |
+
N = len(G)
|
| 242 |
+
if N == 0:
|
| 243 |
+
return A
|
| 244 |
+
|
| 245 |
+
# Personalization vector
|
| 246 |
+
if personalization is None:
|
| 247 |
+
p = np.repeat(1.0 / N, N)
|
| 248 |
+
else:
|
| 249 |
+
p = np.array([personalization.get(n, 0) for n in nodelist], dtype=float)
|
| 250 |
+
if p.sum() == 0:
|
| 251 |
+
raise ZeroDivisionError
|
| 252 |
+
p /= p.sum()
|
| 253 |
+
|
| 254 |
+
# Dangling nodes
|
| 255 |
+
if dangling is None:
|
| 256 |
+
dangling_weights = p
|
| 257 |
+
else:
|
| 258 |
+
# Convert the dangling dictionary into an array in nodelist order
|
| 259 |
+
dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], dtype=float)
|
| 260 |
+
dangling_weights /= dangling_weights.sum()
|
| 261 |
+
dangling_nodes = np.where(A.sum(axis=1) == 0)[0]
|
| 262 |
+
|
| 263 |
+
# Assign dangling_weights to any dangling nodes (nodes with no out links)
|
| 264 |
+
A[dangling_nodes] = dangling_weights
|
| 265 |
+
|
| 266 |
+
A /= A.sum(axis=1)[:, np.newaxis] # Normalize rows to sum to 1
|
| 267 |
+
|
| 268 |
+
return alpha * A + (1 - alpha) * p
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
def _pagerank_numpy(
|
| 272 |
+
G, alpha=0.85, personalization=None, weight="weight", dangling=None
|
| 273 |
+
):
|
| 274 |
+
"""Returns the PageRank of the nodes in the graph.
|
| 275 |
+
|
| 276 |
+
PageRank computes a ranking of the nodes in the graph G based on
|
| 277 |
+
the structure of the incoming links. It was originally designed as
|
| 278 |
+
an algorithm to rank web pages.
|
| 279 |
+
|
| 280 |
+
Parameters
|
| 281 |
+
----------
|
| 282 |
+
G : graph
|
| 283 |
+
A NetworkX graph. Undirected graphs will be converted to a directed
|
| 284 |
+
graph with two directed edges for each undirected edge.
|
| 285 |
+
|
| 286 |
+
alpha : float, optional
|
| 287 |
+
Damping parameter for PageRank, default=0.85.
|
| 288 |
+
|
| 289 |
+
personalization: dict, optional
|
| 290 |
+
The "personalization vector" consisting of a dictionary with a
|
| 291 |
+
key some subset of graph nodes and personalization value each of those.
|
| 292 |
+
At least one personalization value must be non-zero.
|
| 293 |
+
If not specified, a nodes personalization value will be zero.
|
| 294 |
+
By default, a uniform distribution is used.
|
| 295 |
+
|
| 296 |
+
weight : key, optional
|
| 297 |
+
Edge data key to use as weight. If None weights are set to 1.
|
| 298 |
+
|
| 299 |
+
dangling: dict, optional
|
| 300 |
+
The outedges to be assigned to any "dangling" nodes, i.e., nodes without
|
| 301 |
+
any outedges. The dict key is the node the outedge points to and the dict
|
| 302 |
+
value is the weight of that outedge. By default, dangling nodes are given
|
| 303 |
+
outedges according to the personalization vector (uniform if not
|
| 304 |
+
specified) This must be selected to result in an irreducible transition
|
| 305 |
+
matrix (see notes under google_matrix). It may be common to have the
|
| 306 |
+
dangling dict to be the same as the personalization dict.
|
| 307 |
+
|
| 308 |
+
Returns
|
| 309 |
+
-------
|
| 310 |
+
pagerank : dictionary
|
| 311 |
+
Dictionary of nodes with PageRank as value.
|
| 312 |
+
|
| 313 |
+
Examples
|
| 314 |
+
--------
|
| 315 |
+
>>> from networkx.algorithms.link_analysis.pagerank_alg import _pagerank_numpy
|
| 316 |
+
>>> G = nx.DiGraph(nx.path_graph(4))
|
| 317 |
+
>>> pr = _pagerank_numpy(G, alpha=0.9)
|
| 318 |
+
|
| 319 |
+
Notes
|
| 320 |
+
-----
|
| 321 |
+
The eigenvector calculation uses NumPy's interface to the LAPACK
|
| 322 |
+
eigenvalue solvers. This will be the fastest and most accurate
|
| 323 |
+
for small graphs.
|
| 324 |
+
|
| 325 |
+
This implementation works with Multi(Di)Graphs. For multigraphs the
|
| 326 |
+
weight between two nodes is set to be the sum of all edge weights
|
| 327 |
+
between those nodes.
|
| 328 |
+
|
| 329 |
+
See Also
|
| 330 |
+
--------
|
| 331 |
+
pagerank, google_matrix
|
| 332 |
+
|
| 333 |
+
References
|
| 334 |
+
----------
|
| 335 |
+
.. [1] A. Langville and C. Meyer,
|
| 336 |
+
"A survey of eigenvector methods of web information retrieval."
|
| 337 |
+
http://citeseer.ist.psu.edu/713792.html
|
| 338 |
+
.. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry,
|
| 339 |
+
The PageRank citation ranking: Bringing order to the Web. 1999
|
| 340 |
+
http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
|
| 341 |
+
"""
|
| 342 |
+
import numpy as np
|
| 343 |
+
|
| 344 |
+
if len(G) == 0:
|
| 345 |
+
return {}
|
| 346 |
+
M = google_matrix(
|
| 347 |
+
G, alpha, personalization=personalization, weight=weight, dangling=dangling
|
| 348 |
+
)
|
| 349 |
+
# use numpy LAPACK solver
|
| 350 |
+
eigenvalues, eigenvectors = np.linalg.eig(M.T)
|
| 351 |
+
ind = np.argmax(eigenvalues)
|
| 352 |
+
# eigenvector of largest eigenvalue is at ind, normalized
|
| 353 |
+
largest = np.array(eigenvectors[:, ind]).flatten().real
|
| 354 |
+
norm = largest.sum()
|
| 355 |
+
return dict(zip(G, map(float, largest / norm)))
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
def _pagerank_scipy(
|
| 359 |
+
G,
|
| 360 |
+
alpha=0.85,
|
| 361 |
+
personalization=None,
|
| 362 |
+
max_iter=100,
|
| 363 |
+
tol=1.0e-6,
|
| 364 |
+
nstart=None,
|
| 365 |
+
weight="weight",
|
| 366 |
+
dangling=None,
|
| 367 |
+
):
|
| 368 |
+
"""Returns the PageRank of the nodes in the graph.
|
| 369 |
+
|
| 370 |
+
PageRank computes a ranking of the nodes in the graph G based on
|
| 371 |
+
the structure of the incoming links. It was originally designed as
|
| 372 |
+
an algorithm to rank web pages.
|
| 373 |
+
|
| 374 |
+
Parameters
|
| 375 |
+
----------
|
| 376 |
+
G : graph
|
| 377 |
+
A NetworkX graph. Undirected graphs will be converted to a directed
|
| 378 |
+
graph with two directed edges for each undirected edge.
|
| 379 |
+
|
| 380 |
+
alpha : float, optional
|
| 381 |
+
Damping parameter for PageRank, default=0.85.
|
| 382 |
+
|
| 383 |
+
personalization: dict, optional
|
| 384 |
+
The "personalization vector" consisting of a dictionary with a
|
| 385 |
+
key some subset of graph nodes and personalization value each of those.
|
| 386 |
+
At least one personalization value must be non-zero.
|
| 387 |
+
If not specified, a nodes personalization value will be zero.
|
| 388 |
+
By default, a uniform distribution is used.
|
| 389 |
+
|
| 390 |
+
max_iter : integer, optional
|
| 391 |
+
Maximum number of iterations in power method eigenvalue solver.
|
| 392 |
+
|
| 393 |
+
tol : float, optional
|
| 394 |
+
Error tolerance used to check convergence in power method solver.
|
| 395 |
+
The iteration will stop after a tolerance of ``len(G) * tol`` is reached.
|
| 396 |
+
|
| 397 |
+
nstart : dictionary, optional
|
| 398 |
+
Starting value of PageRank iteration for each node.
|
| 399 |
+
|
| 400 |
+
weight : key, optional
|
| 401 |
+
Edge data key to use as weight. If None weights are set to 1.
|
| 402 |
+
|
| 403 |
+
dangling: dict, optional
|
| 404 |
+
The outedges to be assigned to any "dangling" nodes, i.e., nodes without
|
| 405 |
+
any outedges. The dict key is the node the outedge points to and the dict
|
| 406 |
+
value is the weight of that outedge. By default, dangling nodes are given
|
| 407 |
+
outedges according to the personalization vector (uniform if not
|
| 408 |
+
specified) This must be selected to result in an irreducible transition
|
| 409 |
+
matrix (see notes under google_matrix). It may be common to have the
|
| 410 |
+
dangling dict to be the same as the personalization dict.
|
| 411 |
+
|
| 412 |
+
Returns
|
| 413 |
+
-------
|
| 414 |
+
pagerank : dictionary
|
| 415 |
+
Dictionary of nodes with PageRank as value
|
| 416 |
+
|
| 417 |
+
Examples
|
| 418 |
+
--------
|
| 419 |
+
>>> from networkx.algorithms.link_analysis.pagerank_alg import _pagerank_scipy
|
| 420 |
+
>>> G = nx.DiGraph(nx.path_graph(4))
|
| 421 |
+
>>> pr = _pagerank_scipy(G, alpha=0.9)
|
| 422 |
+
|
| 423 |
+
Notes
|
| 424 |
+
-----
|
| 425 |
+
The eigenvector calculation uses power iteration with a SciPy
|
| 426 |
+
sparse matrix representation.
|
| 427 |
+
|
| 428 |
+
This implementation works with Multi(Di)Graphs. For multigraphs the
|
| 429 |
+
weight between two nodes is set to be the sum of all edge weights
|
| 430 |
+
between those nodes.
|
| 431 |
+
|
| 432 |
+
See Also
|
| 433 |
+
--------
|
| 434 |
+
pagerank
|
| 435 |
+
|
| 436 |
+
Raises
|
| 437 |
+
------
|
| 438 |
+
PowerIterationFailedConvergence
|
| 439 |
+
If the algorithm fails to converge to the specified tolerance
|
| 440 |
+
within the specified number of iterations of the power iteration
|
| 441 |
+
method.
|
| 442 |
+
|
| 443 |
+
References
|
| 444 |
+
----------
|
| 445 |
+
.. [1] A. Langville and C. Meyer,
|
| 446 |
+
"A survey of eigenvector methods of web information retrieval."
|
| 447 |
+
http://citeseer.ist.psu.edu/713792.html
|
| 448 |
+
.. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry,
|
| 449 |
+
The PageRank citation ranking: Bringing order to the Web. 1999
|
| 450 |
+
http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
|
| 451 |
+
"""
|
| 452 |
+
import numpy as np
|
| 453 |
+
import scipy as sp
|
| 454 |
+
|
| 455 |
+
N = len(G)
|
| 456 |
+
if N == 0:
|
| 457 |
+
return {}
|
| 458 |
+
|
| 459 |
+
nodelist = list(G)
|
| 460 |
+
A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, dtype=float)
|
| 461 |
+
S = A.sum(axis=1)
|
| 462 |
+
S[S != 0] = 1.0 / S[S != 0]
|
| 463 |
+
# TODO: csr_array
|
| 464 |
+
Q = sp.sparse.csr_array(sp.sparse.spdiags(S.T, 0, *A.shape))
|
| 465 |
+
A = Q @ A
|
| 466 |
+
|
| 467 |
+
# initial vector
|
| 468 |
+
if nstart is None:
|
| 469 |
+
x = np.repeat(1.0 / N, N)
|
| 470 |
+
else:
|
| 471 |
+
x = np.array([nstart.get(n, 0) for n in nodelist], dtype=float)
|
| 472 |
+
x /= x.sum()
|
| 473 |
+
|
| 474 |
+
# Personalization vector
|
| 475 |
+
if personalization is None:
|
| 476 |
+
p = np.repeat(1.0 / N, N)
|
| 477 |
+
else:
|
| 478 |
+
p = np.array([personalization.get(n, 0) for n in nodelist], dtype=float)
|
| 479 |
+
if p.sum() == 0:
|
| 480 |
+
raise ZeroDivisionError
|
| 481 |
+
p /= p.sum()
|
| 482 |
+
# Dangling nodes
|
| 483 |
+
if dangling is None:
|
| 484 |
+
dangling_weights = p
|
| 485 |
+
else:
|
| 486 |
+
# Convert the dangling dictionary into an array in nodelist order
|
| 487 |
+
dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], dtype=float)
|
| 488 |
+
dangling_weights /= dangling_weights.sum()
|
| 489 |
+
is_dangling = np.where(S == 0)[0]
|
| 490 |
+
|
| 491 |
+
# power iteration: make up to max_iter iterations
|
| 492 |
+
for _ in range(max_iter):
|
| 493 |
+
xlast = x
|
| 494 |
+
x = alpha * (x @ A + sum(x[is_dangling]) * dangling_weights) + (1 - alpha) * p
|
| 495 |
+
# check convergence, l1 norm
|
| 496 |
+
err = np.absolute(x - xlast).sum()
|
| 497 |
+
if err < N * tol:
|
| 498 |
+
return dict(zip(nodelist, map(float, x)))
|
| 499 |
+
raise nx.PowerIterationFailedConvergence(max_iter)
|
valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__init__.py
ADDED
|
File without changes
|
valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (191 Bytes). View file
|
|
|
valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_hits.cpython-310.pyc
ADDED
|
Binary file (2.96 kB). View file
|
|
|
valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_pagerank.cpython-310.pyc
ADDED
|
Binary file (7.75 kB). View file
|
|
|
valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/test_hits.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
np = pytest.importorskip("numpy")
|
| 6 |
+
sp = pytest.importorskip("scipy")
|
| 7 |
+
|
| 8 |
+
from networkx.algorithms.link_analysis.hits_alg import (
|
| 9 |
+
_hits_numpy,
|
| 10 |
+
_hits_python,
|
| 11 |
+
_hits_scipy,
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
# Example from
|
| 15 |
+
# A. Langville and C. Meyer, "A survey of eigenvector methods of web
|
| 16 |
+
# information retrieval." http://citeseer.ist.psu.edu/713792.html
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class TestHITS:
|
| 20 |
+
@classmethod
|
| 21 |
+
def setup_class(cls):
|
| 22 |
+
G = nx.DiGraph()
|
| 23 |
+
|
| 24 |
+
edges = [(1, 3), (1, 5), (2, 1), (3, 5), (5, 4), (5, 3), (6, 5)]
|
| 25 |
+
|
| 26 |
+
G.add_edges_from(edges, weight=1)
|
| 27 |
+
cls.G = G
|
| 28 |
+
cls.G.a = dict(
|
| 29 |
+
zip(sorted(G), [0.000000, 0.000000, 0.366025, 0.133975, 0.500000, 0.000000])
|
| 30 |
+
)
|
| 31 |
+
cls.G.h = dict(
|
| 32 |
+
zip(sorted(G), [0.366025, 0.000000, 0.211325, 0.000000, 0.211325, 0.211325])
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
def test_hits_numpy(self):
|
| 36 |
+
G = self.G
|
| 37 |
+
h, a = _hits_numpy(G)
|
| 38 |
+
for n in G:
|
| 39 |
+
assert h[n] == pytest.approx(G.h[n], abs=1e-4)
|
| 40 |
+
for n in G:
|
| 41 |
+
assert a[n] == pytest.approx(G.a[n], abs=1e-4)
|
| 42 |
+
|
| 43 |
+
@pytest.mark.parametrize("hits_alg", (nx.hits, _hits_python, _hits_scipy))
|
| 44 |
+
def test_hits(self, hits_alg):
|
| 45 |
+
G = self.G
|
| 46 |
+
h, a = hits_alg(G, tol=1.0e-08)
|
| 47 |
+
for n in G:
|
| 48 |
+
assert h[n] == pytest.approx(G.h[n], abs=1e-4)
|
| 49 |
+
for n in G:
|
| 50 |
+
assert a[n] == pytest.approx(G.a[n], abs=1e-4)
|
| 51 |
+
nstart = {i: 1.0 / 2 for i in G}
|
| 52 |
+
h, a = hits_alg(G, nstart=nstart)
|
| 53 |
+
for n in G:
|
| 54 |
+
assert h[n] == pytest.approx(G.h[n], abs=1e-4)
|
| 55 |
+
for n in G:
|
| 56 |
+
assert a[n] == pytest.approx(G.a[n], abs=1e-4)
|
| 57 |
+
|
| 58 |
+
def test_empty(self):
|
| 59 |
+
G = nx.Graph()
|
| 60 |
+
assert nx.hits(G) == ({}, {})
|
| 61 |
+
assert _hits_numpy(G) == ({}, {})
|
| 62 |
+
assert _hits_python(G) == ({}, {})
|
| 63 |
+
assert _hits_scipy(G) == ({}, {})
|
| 64 |
+
|
| 65 |
+
def test_hits_not_convergent(self):
|
| 66 |
+
G = nx.path_graph(50)
|
| 67 |
+
with pytest.raises(nx.PowerIterationFailedConvergence):
|
| 68 |
+
_hits_scipy(G, max_iter=1)
|
| 69 |
+
with pytest.raises(nx.PowerIterationFailedConvergence):
|
| 70 |
+
_hits_python(G, max_iter=1)
|
| 71 |
+
with pytest.raises(nx.PowerIterationFailedConvergence):
|
| 72 |
+
_hits_scipy(G, max_iter=0)
|
| 73 |
+
with pytest.raises(nx.PowerIterationFailedConvergence):
|
| 74 |
+
_hits_python(G, max_iter=0)
|
| 75 |
+
with pytest.raises(nx.PowerIterationFailedConvergence):
|
| 76 |
+
nx.hits(G, max_iter=0)
|
| 77 |
+
with pytest.raises(nx.PowerIterationFailedConvergence):
|
| 78 |
+
nx.hits(G, max_iter=1)
|
valley/lib/python3.10/site-packages/networkx/algorithms/link_analysis/tests/test_pagerank.py
ADDED
|
@@ -0,0 +1,217 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import random
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.classes.tests import dispatch_interface
|
| 7 |
+
|
| 8 |
+
np = pytest.importorskip("numpy")
|
| 9 |
+
pytest.importorskip("scipy")
|
| 10 |
+
|
| 11 |
+
from networkx.algorithms.link_analysis.pagerank_alg import (
|
| 12 |
+
_pagerank_numpy,
|
| 13 |
+
_pagerank_python,
|
| 14 |
+
_pagerank_scipy,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
# Example from
|
| 18 |
+
# A. Langville and C. Meyer, "A survey of eigenvector methods of web
|
| 19 |
+
# information retrieval." http://citeseer.ist.psu.edu/713792.html
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class TestPageRank:
|
| 23 |
+
@classmethod
|
| 24 |
+
def setup_class(cls):
|
| 25 |
+
G = nx.DiGraph()
|
| 26 |
+
edges = [
|
| 27 |
+
(1, 2),
|
| 28 |
+
(1, 3),
|
| 29 |
+
# 2 is a dangling node
|
| 30 |
+
(3, 1),
|
| 31 |
+
(3, 2),
|
| 32 |
+
(3, 5),
|
| 33 |
+
(4, 5),
|
| 34 |
+
(4, 6),
|
| 35 |
+
(5, 4),
|
| 36 |
+
(5, 6),
|
| 37 |
+
(6, 4),
|
| 38 |
+
]
|
| 39 |
+
G.add_edges_from(edges)
|
| 40 |
+
cls.G = G
|
| 41 |
+
cls.G.pagerank = dict(
|
| 42 |
+
zip(
|
| 43 |
+
sorted(G),
|
| 44 |
+
[
|
| 45 |
+
0.03721197,
|
| 46 |
+
0.05395735,
|
| 47 |
+
0.04150565,
|
| 48 |
+
0.37508082,
|
| 49 |
+
0.20599833,
|
| 50 |
+
0.28624589,
|
| 51 |
+
],
|
| 52 |
+
)
|
| 53 |
+
)
|
| 54 |
+
cls.dangling_node_index = 1
|
| 55 |
+
cls.dangling_edges = {1: 2, 2: 3, 3: 0, 4: 0, 5: 0, 6: 0}
|
| 56 |
+
cls.G.dangling_pagerank = dict(
|
| 57 |
+
zip(
|
| 58 |
+
sorted(G),
|
| 59 |
+
[0.10844518, 0.18618601, 0.0710892, 0.2683668, 0.15919783, 0.20671497],
|
| 60 |
+
)
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
@pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
|
| 64 |
+
def test_pagerank(self, alg):
|
| 65 |
+
G = self.G
|
| 66 |
+
p = alg(G, alpha=0.9, tol=1.0e-08)
|
| 67 |
+
for n in G:
|
| 68 |
+
assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
|
| 69 |
+
|
| 70 |
+
nstart = {n: random.random() for n in G}
|
| 71 |
+
p = alg(G, alpha=0.9, tol=1.0e-08, nstart=nstart)
|
| 72 |
+
for n in G:
|
| 73 |
+
assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
|
| 74 |
+
|
| 75 |
+
@pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
|
| 76 |
+
def test_pagerank_max_iter(self, alg):
|
| 77 |
+
with pytest.raises(nx.PowerIterationFailedConvergence):
|
| 78 |
+
alg(self.G, max_iter=0)
|
| 79 |
+
|
| 80 |
+
def test_numpy_pagerank(self):
|
| 81 |
+
G = self.G
|
| 82 |
+
p = _pagerank_numpy(G, alpha=0.9)
|
| 83 |
+
for n in G:
|
| 84 |
+
assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
|
| 85 |
+
|
| 86 |
+
# This additionally tests the @nx._dispatchable mechanism, treating
|
| 87 |
+
# nx.google_matrix as if it were a re-implementation from another package
|
| 88 |
+
@pytest.mark.parametrize("wrapper", [lambda x: x, dispatch_interface.convert])
|
| 89 |
+
def test_google_matrix(self, wrapper):
|
| 90 |
+
G = wrapper(self.G)
|
| 91 |
+
M = nx.google_matrix(G, alpha=0.9, nodelist=sorted(G))
|
| 92 |
+
_, ev = np.linalg.eig(M.T)
|
| 93 |
+
p = ev[:, 0] / ev[:, 0].sum()
|
| 94 |
+
for a, b in zip(p, self.G.pagerank.values()):
|
| 95 |
+
assert a == pytest.approx(b, abs=1e-7)
|
| 96 |
+
|
| 97 |
+
@pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, _pagerank_numpy))
|
| 98 |
+
def test_personalization(self, alg):
|
| 99 |
+
G = nx.complete_graph(4)
|
| 100 |
+
personalize = {0: 1, 1: 1, 2: 4, 3: 4}
|
| 101 |
+
answer = {
|
| 102 |
+
0: 0.23246732615667579,
|
| 103 |
+
1: 0.23246732615667579,
|
| 104 |
+
2: 0.267532673843324,
|
| 105 |
+
3: 0.2675326738433241,
|
| 106 |
+
}
|
| 107 |
+
p = alg(G, alpha=0.85, personalization=personalize)
|
| 108 |
+
for n in G:
|
| 109 |
+
assert p[n] == pytest.approx(answer[n], abs=1e-4)
|
| 110 |
+
|
| 111 |
+
@pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, nx.google_matrix))
|
| 112 |
+
def test_zero_personalization_vector(self, alg):
|
| 113 |
+
G = nx.complete_graph(4)
|
| 114 |
+
personalize = {0: 0, 1: 0, 2: 0, 3: 0}
|
| 115 |
+
pytest.raises(ZeroDivisionError, alg, G, personalization=personalize)
|
| 116 |
+
|
| 117 |
+
@pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
|
| 118 |
+
def test_one_nonzero_personalization_value(self, alg):
|
| 119 |
+
G = nx.complete_graph(4)
|
| 120 |
+
personalize = {0: 0, 1: 0, 2: 0, 3: 1}
|
| 121 |
+
answer = {
|
| 122 |
+
0: 0.22077931820379187,
|
| 123 |
+
1: 0.22077931820379187,
|
| 124 |
+
2: 0.22077931820379187,
|
| 125 |
+
3: 0.3376620453886241,
|
| 126 |
+
}
|
| 127 |
+
p = alg(G, alpha=0.85, personalization=personalize)
|
| 128 |
+
for n in G:
|
| 129 |
+
assert p[n] == pytest.approx(answer[n], abs=1e-4)
|
| 130 |
+
|
| 131 |
+
@pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
|
| 132 |
+
def test_incomplete_personalization(self, alg):
|
| 133 |
+
G = nx.complete_graph(4)
|
| 134 |
+
personalize = {3: 1}
|
| 135 |
+
answer = {
|
| 136 |
+
0: 0.22077931820379187,
|
| 137 |
+
1: 0.22077931820379187,
|
| 138 |
+
2: 0.22077931820379187,
|
| 139 |
+
3: 0.3376620453886241,
|
| 140 |
+
}
|
| 141 |
+
p = alg(G, alpha=0.85, personalization=personalize)
|
| 142 |
+
for n in G:
|
| 143 |
+
assert p[n] == pytest.approx(answer[n], abs=1e-4)
|
| 144 |
+
|
| 145 |
+
def test_dangling_matrix(self):
|
| 146 |
+
"""
|
| 147 |
+
Tests that the google_matrix doesn't change except for the dangling
|
| 148 |
+
nodes.
|
| 149 |
+
"""
|
| 150 |
+
G = self.G
|
| 151 |
+
dangling = self.dangling_edges
|
| 152 |
+
dangling_sum = sum(dangling.values())
|
| 153 |
+
M1 = nx.google_matrix(G, personalization=dangling)
|
| 154 |
+
M2 = nx.google_matrix(G, personalization=dangling, dangling=dangling)
|
| 155 |
+
for i in range(len(G)):
|
| 156 |
+
for j in range(len(G)):
|
| 157 |
+
if i == self.dangling_node_index and (j + 1) in dangling:
|
| 158 |
+
assert M2[i, j] == pytest.approx(
|
| 159 |
+
dangling[j + 1] / dangling_sum, abs=1e-4
|
| 160 |
+
)
|
| 161 |
+
else:
|
| 162 |
+
assert M2[i, j] == pytest.approx(M1[i, j], abs=1e-4)
|
| 163 |
+
|
| 164 |
+
@pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, _pagerank_numpy))
|
| 165 |
+
def test_dangling_pagerank(self, alg):
|
| 166 |
+
pr = alg(self.G, dangling=self.dangling_edges)
|
| 167 |
+
for n in self.G:
|
| 168 |
+
assert pr[n] == pytest.approx(self.G.dangling_pagerank[n], abs=1e-4)
|
| 169 |
+
|
| 170 |
+
def test_empty(self):
|
| 171 |
+
G = nx.Graph()
|
| 172 |
+
assert nx.pagerank(G) == {}
|
| 173 |
+
assert _pagerank_python(G) == {}
|
| 174 |
+
assert _pagerank_numpy(G) == {}
|
| 175 |
+
assert nx.google_matrix(G).shape == (0, 0)
|
| 176 |
+
|
| 177 |
+
@pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
|
| 178 |
+
def test_multigraph(self, alg):
|
| 179 |
+
G = nx.MultiGraph()
|
| 180 |
+
G.add_edges_from([(1, 2), (1, 2), (1, 2), (2, 3), (2, 3), ("3", 3), ("3", 3)])
|
| 181 |
+
answer = {
|
| 182 |
+
1: 0.21066048614468322,
|
| 183 |
+
2: 0.3395308825985378,
|
| 184 |
+
3: 0.28933951385531687,
|
| 185 |
+
"3": 0.16046911740146227,
|
| 186 |
+
}
|
| 187 |
+
p = alg(G)
|
| 188 |
+
for n in G:
|
| 189 |
+
assert p[n] == pytest.approx(answer[n], abs=1e-4)
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
class TestPageRankScipy(TestPageRank):
|
| 193 |
+
def test_scipy_pagerank(self):
|
| 194 |
+
G = self.G
|
| 195 |
+
p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08)
|
| 196 |
+
for n in G:
|
| 197 |
+
assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
|
| 198 |
+
personalize = {n: random.random() for n in G}
|
| 199 |
+
p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08, personalization=personalize)
|
| 200 |
+
|
| 201 |
+
nstart = {n: random.random() for n in G}
|
| 202 |
+
p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08, nstart=nstart)
|
| 203 |
+
for n in G:
|
| 204 |
+
assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
|
| 205 |
+
|
| 206 |
+
def test_scipy_pagerank_max_iter(self):
|
| 207 |
+
with pytest.raises(nx.PowerIterationFailedConvergence):
|
| 208 |
+
_pagerank_scipy(self.G, max_iter=0)
|
| 209 |
+
|
| 210 |
+
def test_dangling_scipy_pagerank(self):
|
| 211 |
+
pr = _pagerank_scipy(self.G, dangling=self.dangling_edges)
|
| 212 |
+
for n in self.G:
|
| 213 |
+
assert pr[n] == pytest.approx(self.G.dangling_pagerank[n], abs=1e-4)
|
| 214 |
+
|
| 215 |
+
def test_empty_scipy(self):
|
| 216 |
+
G = nx.Graph()
|
| 217 |
+
assert _pagerank_scipy(G) == {}
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/__init__.py
ADDED
|
File without changes
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_asteroidal.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def test_is_at_free():
|
| 5 |
+
is_at_free = nx.asteroidal.is_at_free
|
| 6 |
+
|
| 7 |
+
cycle = nx.cycle_graph(6)
|
| 8 |
+
assert not is_at_free(cycle)
|
| 9 |
+
|
| 10 |
+
path = nx.path_graph(6)
|
| 11 |
+
assert is_at_free(path)
|
| 12 |
+
|
| 13 |
+
small_graph = nx.complete_graph(2)
|
| 14 |
+
assert is_at_free(small_graph)
|
| 15 |
+
|
| 16 |
+
petersen = nx.petersen_graph()
|
| 17 |
+
assert not is_at_free(petersen)
|
| 18 |
+
|
| 19 |
+
clique = nx.complete_graph(6)
|
| 20 |
+
assert is_at_free(clique)
|
| 21 |
+
|
| 22 |
+
line_clique = nx.line_graph(clique)
|
| 23 |
+
assert not is_at_free(line_clique)
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_boundary.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Unit tests for the :mod:`networkx.algorithms.boundary` module."""
|
| 2 |
+
|
| 3 |
+
from itertools import combinations
|
| 4 |
+
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx import convert_node_labels_to_integers as cnlti
|
| 9 |
+
from networkx.utils import edges_equal
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class TestNodeBoundary:
|
| 13 |
+
"""Unit tests for the :func:`~networkx.node_boundary` function."""
|
| 14 |
+
|
| 15 |
+
def test_null_graph(self):
|
| 16 |
+
"""Tests that the null graph has empty node boundaries."""
|
| 17 |
+
null = nx.null_graph()
|
| 18 |
+
assert nx.node_boundary(null, []) == set()
|
| 19 |
+
assert nx.node_boundary(null, [], []) == set()
|
| 20 |
+
assert nx.node_boundary(null, [1, 2, 3]) == set()
|
| 21 |
+
assert nx.node_boundary(null, [1, 2, 3], [4, 5, 6]) == set()
|
| 22 |
+
assert nx.node_boundary(null, [1, 2, 3], [3, 4, 5]) == set()
|
| 23 |
+
|
| 24 |
+
def test_path_graph(self):
|
| 25 |
+
P10 = cnlti(nx.path_graph(10), first_label=1)
|
| 26 |
+
assert nx.node_boundary(P10, []) == set()
|
| 27 |
+
assert nx.node_boundary(P10, [], []) == set()
|
| 28 |
+
assert nx.node_boundary(P10, [1, 2, 3]) == {4}
|
| 29 |
+
assert nx.node_boundary(P10, [4, 5, 6]) == {3, 7}
|
| 30 |
+
assert nx.node_boundary(P10, [3, 4, 5, 6, 7]) == {2, 8}
|
| 31 |
+
assert nx.node_boundary(P10, [8, 9, 10]) == {7}
|
| 32 |
+
assert nx.node_boundary(P10, [4, 5, 6], [9, 10]) == set()
|
| 33 |
+
|
| 34 |
+
def test_complete_graph(self):
|
| 35 |
+
K10 = cnlti(nx.complete_graph(10), first_label=1)
|
| 36 |
+
assert nx.node_boundary(K10, []) == set()
|
| 37 |
+
assert nx.node_boundary(K10, [], []) == set()
|
| 38 |
+
assert nx.node_boundary(K10, [1, 2, 3]) == {4, 5, 6, 7, 8, 9, 10}
|
| 39 |
+
assert nx.node_boundary(K10, [4, 5, 6]) == {1, 2, 3, 7, 8, 9, 10}
|
| 40 |
+
assert nx.node_boundary(K10, [3, 4, 5, 6, 7]) == {1, 2, 8, 9, 10}
|
| 41 |
+
assert nx.node_boundary(K10, [4, 5, 6], []) == set()
|
| 42 |
+
assert nx.node_boundary(K10, K10) == set()
|
| 43 |
+
assert nx.node_boundary(K10, [1, 2, 3], [3, 4, 5]) == {4, 5}
|
| 44 |
+
|
| 45 |
+
def test_petersen(self):
|
| 46 |
+
"""Check boundaries in the petersen graph
|
| 47 |
+
|
| 48 |
+
cheeger(G,k)=min(|bdy(S)|/|S| for |S|=k, 0<k<=|V(G)|/2)
|
| 49 |
+
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
def cheeger(G, k):
|
| 53 |
+
return min(len(nx.node_boundary(G, nn)) / k for nn in combinations(G, k))
|
| 54 |
+
|
| 55 |
+
P = nx.petersen_graph()
|
| 56 |
+
assert cheeger(P, 1) == pytest.approx(3.00, abs=1e-2)
|
| 57 |
+
assert cheeger(P, 2) == pytest.approx(2.00, abs=1e-2)
|
| 58 |
+
assert cheeger(P, 3) == pytest.approx(1.67, abs=1e-2)
|
| 59 |
+
assert cheeger(P, 4) == pytest.approx(1.00, abs=1e-2)
|
| 60 |
+
assert cheeger(P, 5) == pytest.approx(0.80, abs=1e-2)
|
| 61 |
+
|
| 62 |
+
def test_directed(self):
|
| 63 |
+
"""Tests the node boundary of a directed graph."""
|
| 64 |
+
G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)])
|
| 65 |
+
S = {0, 1}
|
| 66 |
+
boundary = nx.node_boundary(G, S)
|
| 67 |
+
expected = {2}
|
| 68 |
+
assert boundary == expected
|
| 69 |
+
|
| 70 |
+
def test_multigraph(self):
|
| 71 |
+
"""Tests the node boundary of a multigraph."""
|
| 72 |
+
G = nx.MultiGraph(list(nx.cycle_graph(5).edges()) * 2)
|
| 73 |
+
S = {0, 1}
|
| 74 |
+
boundary = nx.node_boundary(G, S)
|
| 75 |
+
expected = {2, 4}
|
| 76 |
+
assert boundary == expected
|
| 77 |
+
|
| 78 |
+
def test_multidigraph(self):
|
| 79 |
+
"""Tests the edge boundary of a multidigraph."""
|
| 80 |
+
edges = [(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]
|
| 81 |
+
G = nx.MultiDiGraph(edges * 2)
|
| 82 |
+
S = {0, 1}
|
| 83 |
+
boundary = nx.node_boundary(G, S)
|
| 84 |
+
expected = {2}
|
| 85 |
+
assert boundary == expected
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class TestEdgeBoundary:
|
| 89 |
+
"""Unit tests for the :func:`~networkx.edge_boundary` function."""
|
| 90 |
+
|
| 91 |
+
def test_null_graph(self):
|
| 92 |
+
null = nx.null_graph()
|
| 93 |
+
assert list(nx.edge_boundary(null, [])) == []
|
| 94 |
+
assert list(nx.edge_boundary(null, [], [])) == []
|
| 95 |
+
assert list(nx.edge_boundary(null, [1, 2, 3])) == []
|
| 96 |
+
assert list(nx.edge_boundary(null, [1, 2, 3], [4, 5, 6])) == []
|
| 97 |
+
assert list(nx.edge_boundary(null, [1, 2, 3], [3, 4, 5])) == []
|
| 98 |
+
|
| 99 |
+
def test_path_graph(self):
|
| 100 |
+
P10 = cnlti(nx.path_graph(10), first_label=1)
|
| 101 |
+
assert list(nx.edge_boundary(P10, [])) == []
|
| 102 |
+
assert list(nx.edge_boundary(P10, [], [])) == []
|
| 103 |
+
assert list(nx.edge_boundary(P10, [1, 2, 3])) == [(3, 4)]
|
| 104 |
+
assert sorted(nx.edge_boundary(P10, [4, 5, 6])) == [(4, 3), (6, 7)]
|
| 105 |
+
assert sorted(nx.edge_boundary(P10, [3, 4, 5, 6, 7])) == [(3, 2), (7, 8)]
|
| 106 |
+
assert list(nx.edge_boundary(P10, [8, 9, 10])) == [(8, 7)]
|
| 107 |
+
assert sorted(nx.edge_boundary(P10, [4, 5, 6], [9, 10])) == []
|
| 108 |
+
assert list(nx.edge_boundary(P10, [1, 2, 3], [3, 4, 5])) == [(2, 3), (3, 4)]
|
| 109 |
+
|
| 110 |
+
def test_complete_graph(self):
|
| 111 |
+
K10 = cnlti(nx.complete_graph(10), first_label=1)
|
| 112 |
+
|
| 113 |
+
def ilen(iterable):
|
| 114 |
+
return sum(1 for i in iterable)
|
| 115 |
+
|
| 116 |
+
assert list(nx.edge_boundary(K10, [])) == []
|
| 117 |
+
assert list(nx.edge_boundary(K10, [], [])) == []
|
| 118 |
+
assert ilen(nx.edge_boundary(K10, [1, 2, 3])) == 21
|
| 119 |
+
assert ilen(nx.edge_boundary(K10, [4, 5, 6, 7])) == 24
|
| 120 |
+
assert ilen(nx.edge_boundary(K10, [3, 4, 5, 6, 7])) == 25
|
| 121 |
+
assert ilen(nx.edge_boundary(K10, [8, 9, 10])) == 21
|
| 122 |
+
assert edges_equal(
|
| 123 |
+
nx.edge_boundary(K10, [4, 5, 6], [9, 10]),
|
| 124 |
+
[(4, 9), (4, 10), (5, 9), (5, 10), (6, 9), (6, 10)],
|
| 125 |
+
)
|
| 126 |
+
assert edges_equal(
|
| 127 |
+
nx.edge_boundary(K10, [1, 2, 3], [3, 4, 5]),
|
| 128 |
+
[(1, 3), (1, 4), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (3, 5)],
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
def test_directed(self):
|
| 132 |
+
"""Tests the edge boundary of a directed graph."""
|
| 133 |
+
G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)])
|
| 134 |
+
S = {0, 1}
|
| 135 |
+
boundary = list(nx.edge_boundary(G, S))
|
| 136 |
+
expected = [(1, 2)]
|
| 137 |
+
assert boundary == expected
|
| 138 |
+
|
| 139 |
+
def test_multigraph(self):
|
| 140 |
+
"""Tests the edge boundary of a multigraph."""
|
| 141 |
+
G = nx.MultiGraph(list(nx.cycle_graph(5).edges()) * 2)
|
| 142 |
+
S = {0, 1}
|
| 143 |
+
boundary = list(nx.edge_boundary(G, S))
|
| 144 |
+
expected = [(0, 4), (0, 4), (1, 2), (1, 2)]
|
| 145 |
+
assert boundary == expected
|
| 146 |
+
|
| 147 |
+
def test_multidigraph(self):
|
| 148 |
+
"""Tests the edge boundary of a multidigraph."""
|
| 149 |
+
edges = [(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]
|
| 150 |
+
G = nx.MultiDiGraph(edges * 2)
|
| 151 |
+
S = {0, 1}
|
| 152 |
+
boundary = list(nx.edge_boundary(G, S))
|
| 153 |
+
expected = [(1, 2), (1, 2)]
|
| 154 |
+
assert boundary == expected
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_bridges.py
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Unit tests for bridge-finding algorithms."""
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class TestBridges:
|
| 9 |
+
"""Unit tests for the bridge-finding function."""
|
| 10 |
+
|
| 11 |
+
def test_single_bridge(self):
|
| 12 |
+
edges = [
|
| 13 |
+
# DFS tree edges.
|
| 14 |
+
(1, 2),
|
| 15 |
+
(2, 3),
|
| 16 |
+
(3, 4),
|
| 17 |
+
(3, 5),
|
| 18 |
+
(5, 6),
|
| 19 |
+
(6, 7),
|
| 20 |
+
(7, 8),
|
| 21 |
+
(5, 9),
|
| 22 |
+
(9, 10),
|
| 23 |
+
# Nontree edges.
|
| 24 |
+
(1, 3),
|
| 25 |
+
(1, 4),
|
| 26 |
+
(2, 5),
|
| 27 |
+
(5, 10),
|
| 28 |
+
(6, 8),
|
| 29 |
+
]
|
| 30 |
+
G = nx.Graph(edges)
|
| 31 |
+
source = 1
|
| 32 |
+
bridges = list(nx.bridges(G, source))
|
| 33 |
+
assert bridges == [(5, 6)]
|
| 34 |
+
|
| 35 |
+
def test_barbell_graph(self):
|
| 36 |
+
# The (3, 0) barbell graph has two triangles joined by a single edge.
|
| 37 |
+
G = nx.barbell_graph(3, 0)
|
| 38 |
+
source = 0
|
| 39 |
+
bridges = list(nx.bridges(G, source))
|
| 40 |
+
assert bridges == [(2, 3)]
|
| 41 |
+
|
| 42 |
+
def test_multiedge_bridge(self):
|
| 43 |
+
edges = [
|
| 44 |
+
(0, 1),
|
| 45 |
+
(0, 2),
|
| 46 |
+
(1, 2),
|
| 47 |
+
(1, 2),
|
| 48 |
+
(2, 3),
|
| 49 |
+
(3, 4),
|
| 50 |
+
(3, 4),
|
| 51 |
+
]
|
| 52 |
+
G = nx.MultiGraph(edges)
|
| 53 |
+
assert list(nx.bridges(G)) == [(2, 3)]
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class TestHasBridges:
|
| 57 |
+
"""Unit tests for the has bridges function."""
|
| 58 |
+
|
| 59 |
+
def test_single_bridge(self):
|
| 60 |
+
edges = [
|
| 61 |
+
# DFS tree edges.
|
| 62 |
+
(1, 2),
|
| 63 |
+
(2, 3),
|
| 64 |
+
(3, 4),
|
| 65 |
+
(3, 5),
|
| 66 |
+
(5, 6), # The only bridge edge
|
| 67 |
+
(6, 7),
|
| 68 |
+
(7, 8),
|
| 69 |
+
(5, 9),
|
| 70 |
+
(9, 10),
|
| 71 |
+
# Nontree edges.
|
| 72 |
+
(1, 3),
|
| 73 |
+
(1, 4),
|
| 74 |
+
(2, 5),
|
| 75 |
+
(5, 10),
|
| 76 |
+
(6, 8),
|
| 77 |
+
]
|
| 78 |
+
G = nx.Graph(edges)
|
| 79 |
+
assert nx.has_bridges(G) # Default root
|
| 80 |
+
assert nx.has_bridges(G, root=1) # arbitrary root in G
|
| 81 |
+
|
| 82 |
+
def test_has_bridges_raises_root_not_in_G(self):
|
| 83 |
+
G = nx.Graph()
|
| 84 |
+
G.add_nodes_from([1, 2, 3])
|
| 85 |
+
with pytest.raises(nx.NodeNotFound):
|
| 86 |
+
nx.has_bridges(G, root=6)
|
| 87 |
+
|
| 88 |
+
def test_multiedge_bridge(self):
|
| 89 |
+
edges = [
|
| 90 |
+
(0, 1),
|
| 91 |
+
(0, 2),
|
| 92 |
+
(1, 2),
|
| 93 |
+
(1, 2),
|
| 94 |
+
(2, 3),
|
| 95 |
+
(3, 4),
|
| 96 |
+
(3, 4),
|
| 97 |
+
]
|
| 98 |
+
G = nx.MultiGraph(edges)
|
| 99 |
+
assert nx.has_bridges(G)
|
| 100 |
+
# Make every edge a multiedge
|
| 101 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 102 |
+
assert not nx.has_bridges(G)
|
| 103 |
+
|
| 104 |
+
def test_bridges_multiple_components(self):
|
| 105 |
+
G = nx.Graph()
|
| 106 |
+
nx.add_path(G, [0, 1, 2]) # One connected component
|
| 107 |
+
nx.add_path(G, [4, 5, 6]) # Another connected component
|
| 108 |
+
assert list(nx.bridges(G, root=4)) == [(4, 5), (5, 6)]
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
class TestLocalBridges:
|
| 112 |
+
"""Unit tests for the local_bridge function."""
|
| 113 |
+
|
| 114 |
+
@classmethod
|
| 115 |
+
def setup_class(cls):
|
| 116 |
+
cls.BB = nx.barbell_graph(4, 0)
|
| 117 |
+
cls.square = nx.cycle_graph(4)
|
| 118 |
+
cls.tri = nx.cycle_graph(3)
|
| 119 |
+
|
| 120 |
+
def test_nospan(self):
|
| 121 |
+
expected = {(3, 4), (4, 3)}
|
| 122 |
+
assert next(nx.local_bridges(self.BB, with_span=False)) in expected
|
| 123 |
+
assert set(nx.local_bridges(self.square, with_span=False)) == self.square.edges
|
| 124 |
+
assert list(nx.local_bridges(self.tri, with_span=False)) == []
|
| 125 |
+
|
| 126 |
+
def test_no_weight(self):
|
| 127 |
+
inf = float("inf")
|
| 128 |
+
expected = {(3, 4, inf), (4, 3, inf)}
|
| 129 |
+
assert next(nx.local_bridges(self.BB)) in expected
|
| 130 |
+
expected = {(u, v, 3) for u, v in self.square.edges}
|
| 131 |
+
assert set(nx.local_bridges(self.square)) == expected
|
| 132 |
+
assert list(nx.local_bridges(self.tri)) == []
|
| 133 |
+
|
| 134 |
+
def test_weight(self):
|
| 135 |
+
inf = float("inf")
|
| 136 |
+
G = self.square.copy()
|
| 137 |
+
|
| 138 |
+
G.edges[1, 2]["weight"] = 2
|
| 139 |
+
expected = {(u, v, 5 - wt) for u, v, wt in G.edges(data="weight", default=1)}
|
| 140 |
+
assert set(nx.local_bridges(G, weight="weight")) == expected
|
| 141 |
+
|
| 142 |
+
expected = {(u, v, 6) for u, v in G.edges}
|
| 143 |
+
lb = nx.local_bridges(G, weight=lambda u, v, d: 2)
|
| 144 |
+
assert set(lb) == expected
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_broadcasting.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Unit tests for the broadcasting module."""
|
| 2 |
+
import math
|
| 3 |
+
|
| 4 |
+
import networkx as nx
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def test_example_tree_broadcast():
|
| 8 |
+
"""
|
| 9 |
+
Test the BROADCAST algorithm on the example in the paper titled: "Information Dissemination in Trees"
|
| 10 |
+
"""
|
| 11 |
+
edge_list = [
|
| 12 |
+
(0, 1),
|
| 13 |
+
(1, 2),
|
| 14 |
+
(2, 7),
|
| 15 |
+
(3, 4),
|
| 16 |
+
(5, 4),
|
| 17 |
+
(4, 7),
|
| 18 |
+
(6, 7),
|
| 19 |
+
(7, 9),
|
| 20 |
+
(8, 9),
|
| 21 |
+
(9, 13),
|
| 22 |
+
(13, 14),
|
| 23 |
+
(14, 15),
|
| 24 |
+
(14, 16),
|
| 25 |
+
(14, 17),
|
| 26 |
+
(13, 11),
|
| 27 |
+
(11, 10),
|
| 28 |
+
(11, 12),
|
| 29 |
+
(13, 18),
|
| 30 |
+
(18, 19),
|
| 31 |
+
(18, 20),
|
| 32 |
+
]
|
| 33 |
+
G = nx.Graph(edge_list)
|
| 34 |
+
b_T, b_C = nx.tree_broadcast_center(G)
|
| 35 |
+
assert b_T == 6
|
| 36 |
+
assert b_C == {13, 9}
|
| 37 |
+
# test broadcast time from specific vertex
|
| 38 |
+
assert nx.tree_broadcast_time(G, 17) == 8
|
| 39 |
+
assert nx.tree_broadcast_time(G, 3) == 9
|
| 40 |
+
# test broadcast time of entire tree
|
| 41 |
+
assert nx.tree_broadcast_time(G) == 10
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def test_path_broadcast():
|
| 45 |
+
for i in range(2, 12):
|
| 46 |
+
G = nx.path_graph(i)
|
| 47 |
+
b_T, b_C = nx.tree_broadcast_center(G)
|
| 48 |
+
assert b_T == math.ceil(i / 2)
|
| 49 |
+
assert b_C == {
|
| 50 |
+
math.ceil(i / 2),
|
| 51 |
+
math.floor(i / 2),
|
| 52 |
+
math.ceil(i / 2 - 1),
|
| 53 |
+
math.floor(i / 2 - 1),
|
| 54 |
+
}
|
| 55 |
+
assert nx.tree_broadcast_time(G) == i - 1
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def test_empty_graph_broadcast():
|
| 59 |
+
H = nx.empty_graph(1)
|
| 60 |
+
b_T, b_C = nx.tree_broadcast_center(H)
|
| 61 |
+
assert b_T == 0
|
| 62 |
+
assert b_C == {0}
|
| 63 |
+
assert nx.tree_broadcast_time(H) == 0
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def test_star_broadcast():
|
| 67 |
+
for i in range(4, 12):
|
| 68 |
+
G = nx.star_graph(i)
|
| 69 |
+
b_T, b_C = nx.tree_broadcast_center(G)
|
| 70 |
+
assert b_T == i
|
| 71 |
+
assert b_C == set(G.nodes())
|
| 72 |
+
assert nx.tree_broadcast_time(G) == b_T
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def test_binomial_tree_broadcast():
|
| 76 |
+
for i in range(2, 8):
|
| 77 |
+
G = nx.binomial_tree(i)
|
| 78 |
+
b_T, b_C = nx.tree_broadcast_center(G)
|
| 79 |
+
assert b_T == i
|
| 80 |
+
assert b_C == {0, 2 ** (i - 1)}
|
| 81 |
+
assert nx.tree_broadcast_time(G) == 2 * i - 1
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_chordal.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestMCS:
|
| 7 |
+
@classmethod
|
| 8 |
+
def setup_class(cls):
|
| 9 |
+
# simple graph
|
| 10 |
+
connected_chordal_G = nx.Graph()
|
| 11 |
+
connected_chordal_G.add_edges_from(
|
| 12 |
+
[
|
| 13 |
+
(1, 2),
|
| 14 |
+
(1, 3),
|
| 15 |
+
(2, 3),
|
| 16 |
+
(2, 4),
|
| 17 |
+
(3, 4),
|
| 18 |
+
(3, 5),
|
| 19 |
+
(3, 6),
|
| 20 |
+
(4, 5),
|
| 21 |
+
(4, 6),
|
| 22 |
+
(5, 6),
|
| 23 |
+
]
|
| 24 |
+
)
|
| 25 |
+
cls.connected_chordal_G = connected_chordal_G
|
| 26 |
+
|
| 27 |
+
chordal_G = nx.Graph()
|
| 28 |
+
chordal_G.add_edges_from(
|
| 29 |
+
[
|
| 30 |
+
(1, 2),
|
| 31 |
+
(1, 3),
|
| 32 |
+
(2, 3),
|
| 33 |
+
(2, 4),
|
| 34 |
+
(3, 4),
|
| 35 |
+
(3, 5),
|
| 36 |
+
(3, 6),
|
| 37 |
+
(4, 5),
|
| 38 |
+
(4, 6),
|
| 39 |
+
(5, 6),
|
| 40 |
+
(7, 8),
|
| 41 |
+
]
|
| 42 |
+
)
|
| 43 |
+
chordal_G.add_node(9)
|
| 44 |
+
cls.chordal_G = chordal_G
|
| 45 |
+
|
| 46 |
+
non_chordal_G = nx.Graph()
|
| 47 |
+
non_chordal_G.add_edges_from([(1, 2), (1, 3), (2, 4), (2, 5), (3, 4), (3, 5)])
|
| 48 |
+
cls.non_chordal_G = non_chordal_G
|
| 49 |
+
|
| 50 |
+
self_loop_G = nx.Graph()
|
| 51 |
+
self_loop_G.add_edges_from([(1, 1)])
|
| 52 |
+
cls.self_loop_G = self_loop_G
|
| 53 |
+
|
| 54 |
+
@pytest.mark.parametrize("G", (nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()))
|
| 55 |
+
def test_is_chordal_not_implemented(self, G):
|
| 56 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 57 |
+
nx.is_chordal(G)
|
| 58 |
+
|
| 59 |
+
def test_is_chordal(self):
|
| 60 |
+
assert not nx.is_chordal(self.non_chordal_G)
|
| 61 |
+
assert nx.is_chordal(self.chordal_G)
|
| 62 |
+
assert nx.is_chordal(self.connected_chordal_G)
|
| 63 |
+
assert nx.is_chordal(nx.Graph())
|
| 64 |
+
assert nx.is_chordal(nx.complete_graph(3))
|
| 65 |
+
assert nx.is_chordal(nx.cycle_graph(3))
|
| 66 |
+
assert not nx.is_chordal(nx.cycle_graph(5))
|
| 67 |
+
assert nx.is_chordal(self.self_loop_G)
|
| 68 |
+
|
| 69 |
+
def test_induced_nodes(self):
|
| 70 |
+
G = nx.generators.classic.path_graph(10)
|
| 71 |
+
Induced_nodes = nx.find_induced_nodes(G, 1, 9, 2)
|
| 72 |
+
assert Induced_nodes == {1, 2, 3, 4, 5, 6, 7, 8, 9}
|
| 73 |
+
pytest.raises(
|
| 74 |
+
nx.NetworkXTreewidthBoundExceeded, nx.find_induced_nodes, G, 1, 9, 1
|
| 75 |
+
)
|
| 76 |
+
Induced_nodes = nx.find_induced_nodes(self.chordal_G, 1, 6)
|
| 77 |
+
assert Induced_nodes == {1, 2, 4, 6}
|
| 78 |
+
pytest.raises(nx.NetworkXError, nx.find_induced_nodes, self.non_chordal_G, 1, 5)
|
| 79 |
+
|
| 80 |
+
def test_graph_treewidth(self):
|
| 81 |
+
with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
|
| 82 |
+
nx.chordal_graph_treewidth(self.non_chordal_G)
|
| 83 |
+
|
| 84 |
+
def test_chordal_find_cliques(self):
|
| 85 |
+
cliques = {
|
| 86 |
+
frozenset([9]),
|
| 87 |
+
frozenset([7, 8]),
|
| 88 |
+
frozenset([1, 2, 3]),
|
| 89 |
+
frozenset([2, 3, 4]),
|
| 90 |
+
frozenset([3, 4, 5, 6]),
|
| 91 |
+
}
|
| 92 |
+
assert set(nx.chordal_graph_cliques(self.chordal_G)) == cliques
|
| 93 |
+
with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
|
| 94 |
+
set(nx.chordal_graph_cliques(self.non_chordal_G))
|
| 95 |
+
with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
|
| 96 |
+
set(nx.chordal_graph_cliques(self.self_loop_G))
|
| 97 |
+
|
| 98 |
+
def test_chordal_find_cliques_path(self):
|
| 99 |
+
G = nx.path_graph(10)
|
| 100 |
+
cliqueset = nx.chordal_graph_cliques(G)
|
| 101 |
+
for u, v in G.edges():
|
| 102 |
+
assert frozenset([u, v]) in cliqueset or frozenset([v, u]) in cliqueset
|
| 103 |
+
|
| 104 |
+
def test_chordal_find_cliquesCC(self):
|
| 105 |
+
cliques = {frozenset([1, 2, 3]), frozenset([2, 3, 4]), frozenset([3, 4, 5, 6])}
|
| 106 |
+
cgc = nx.chordal_graph_cliques
|
| 107 |
+
assert set(cgc(self.connected_chordal_G)) == cliques
|
| 108 |
+
|
| 109 |
+
def test_complete_to_chordal_graph(self):
|
| 110 |
+
fgrg = nx.fast_gnp_random_graph
|
| 111 |
+
test_graphs = [
|
| 112 |
+
nx.barbell_graph(6, 2),
|
| 113 |
+
nx.cycle_graph(15),
|
| 114 |
+
nx.wheel_graph(20),
|
| 115 |
+
nx.grid_graph([10, 4]),
|
| 116 |
+
nx.ladder_graph(15),
|
| 117 |
+
nx.star_graph(5),
|
| 118 |
+
nx.bull_graph(),
|
| 119 |
+
fgrg(20, 0.3, seed=1),
|
| 120 |
+
]
|
| 121 |
+
for G in test_graphs:
|
| 122 |
+
H, a = nx.complete_to_chordal_graph(G)
|
| 123 |
+
assert nx.is_chordal(H)
|
| 124 |
+
assert len(a) == H.number_of_nodes()
|
| 125 |
+
if nx.is_chordal(G):
|
| 126 |
+
assert G.number_of_edges() == H.number_of_edges()
|
| 127 |
+
assert set(a.values()) == {0}
|
| 128 |
+
else:
|
| 129 |
+
assert len(set(a.values())) == H.number_of_nodes()
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_clique.py
ADDED
|
@@ -0,0 +1,291 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx import convert_node_labels_to_integers as cnlti
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class TestCliques:
|
| 8 |
+
def setup_method(self):
|
| 9 |
+
z = [3, 4, 3, 4, 2, 4, 2, 1, 1, 1, 1]
|
| 10 |
+
self.G = cnlti(nx.generators.havel_hakimi_graph(z), first_label=1)
|
| 11 |
+
self.cl = list(nx.find_cliques(self.G))
|
| 12 |
+
H = nx.complete_graph(6)
|
| 13 |
+
H = nx.relabel_nodes(H, {i: i + 1 for i in range(6)})
|
| 14 |
+
H.remove_edges_from([(2, 6), (2, 5), (2, 4), (1, 3), (5, 3)])
|
| 15 |
+
self.H = H
|
| 16 |
+
|
| 17 |
+
def test_find_cliques1(self):
|
| 18 |
+
cl = list(nx.find_cliques(self.G))
|
| 19 |
+
rcl = nx.find_cliques_recursive(self.G)
|
| 20 |
+
expected = [[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]]
|
| 21 |
+
assert sorted(map(sorted, cl)) == sorted(map(sorted, rcl))
|
| 22 |
+
assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
|
| 23 |
+
|
| 24 |
+
def test_selfloops(self):
|
| 25 |
+
self.G.add_edge(1, 1)
|
| 26 |
+
cl = list(nx.find_cliques(self.G))
|
| 27 |
+
rcl = list(nx.find_cliques_recursive(self.G))
|
| 28 |
+
assert set(map(frozenset, cl)) == set(map(frozenset, rcl))
|
| 29 |
+
answer = [{2, 6, 1, 3}, {2, 6, 4}, {5, 4, 7}, {8, 9}, {10, 11}]
|
| 30 |
+
assert len(answer) == len(cl)
|
| 31 |
+
assert all(set(c) in answer for c in cl)
|
| 32 |
+
|
| 33 |
+
def test_find_cliques2(self):
|
| 34 |
+
hcl = list(nx.find_cliques(self.H))
|
| 35 |
+
assert sorted(map(sorted, hcl)) == [[1, 2], [1, 4, 5, 6], [2, 3], [3, 4, 6]]
|
| 36 |
+
|
| 37 |
+
def test_find_cliques3(self):
|
| 38 |
+
# all cliques are [[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]]
|
| 39 |
+
|
| 40 |
+
cl = list(nx.find_cliques(self.G, [2]))
|
| 41 |
+
rcl = nx.find_cliques_recursive(self.G, [2])
|
| 42 |
+
expected = [[2, 6, 1, 3], [2, 6, 4]]
|
| 43 |
+
assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected))
|
| 44 |
+
assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
|
| 45 |
+
|
| 46 |
+
cl = list(nx.find_cliques(self.G, [2, 3]))
|
| 47 |
+
rcl = nx.find_cliques_recursive(self.G, [2, 3])
|
| 48 |
+
expected = [[2, 6, 1, 3]]
|
| 49 |
+
assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected))
|
| 50 |
+
assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
|
| 51 |
+
|
| 52 |
+
cl = list(nx.find_cliques(self.G, [2, 6, 4]))
|
| 53 |
+
rcl = nx.find_cliques_recursive(self.G, [2, 6, 4])
|
| 54 |
+
expected = [[2, 6, 4]]
|
| 55 |
+
assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected))
|
| 56 |
+
assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
|
| 57 |
+
|
| 58 |
+
cl = list(nx.find_cliques(self.G, [2, 6, 4]))
|
| 59 |
+
rcl = nx.find_cliques_recursive(self.G, [2, 6, 4])
|
| 60 |
+
expected = [[2, 6, 4]]
|
| 61 |
+
assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected))
|
| 62 |
+
assert sorted(map(sorted, cl)) == sorted(map(sorted, expected))
|
| 63 |
+
|
| 64 |
+
with pytest.raises(ValueError):
|
| 65 |
+
list(nx.find_cliques(self.G, [2, 6, 4, 1]))
|
| 66 |
+
|
| 67 |
+
with pytest.raises(ValueError):
|
| 68 |
+
list(nx.find_cliques_recursive(self.G, [2, 6, 4, 1]))
|
| 69 |
+
|
| 70 |
+
def test_number_of_cliques(self):
|
| 71 |
+
G = self.G
|
| 72 |
+
assert nx.number_of_cliques(G, 1) == 1
|
| 73 |
+
assert list(nx.number_of_cliques(G, [1]).values()) == [1]
|
| 74 |
+
assert list(nx.number_of_cliques(G, [1, 2]).values()) == [1, 2]
|
| 75 |
+
assert nx.number_of_cliques(G, [1, 2]) == {1: 1, 2: 2}
|
| 76 |
+
assert nx.number_of_cliques(G, 2) == 2
|
| 77 |
+
assert nx.number_of_cliques(G) == {
|
| 78 |
+
1: 1,
|
| 79 |
+
2: 2,
|
| 80 |
+
3: 1,
|
| 81 |
+
4: 2,
|
| 82 |
+
5: 1,
|
| 83 |
+
6: 2,
|
| 84 |
+
7: 1,
|
| 85 |
+
8: 1,
|
| 86 |
+
9: 1,
|
| 87 |
+
10: 1,
|
| 88 |
+
11: 1,
|
| 89 |
+
}
|
| 90 |
+
assert nx.number_of_cliques(G, nodes=list(G)) == {
|
| 91 |
+
1: 1,
|
| 92 |
+
2: 2,
|
| 93 |
+
3: 1,
|
| 94 |
+
4: 2,
|
| 95 |
+
5: 1,
|
| 96 |
+
6: 2,
|
| 97 |
+
7: 1,
|
| 98 |
+
8: 1,
|
| 99 |
+
9: 1,
|
| 100 |
+
10: 1,
|
| 101 |
+
11: 1,
|
| 102 |
+
}
|
| 103 |
+
assert nx.number_of_cliques(G, nodes=[2, 3, 4]) == {2: 2, 3: 1, 4: 2}
|
| 104 |
+
assert nx.number_of_cliques(G, cliques=self.cl) == {
|
| 105 |
+
1: 1,
|
| 106 |
+
2: 2,
|
| 107 |
+
3: 1,
|
| 108 |
+
4: 2,
|
| 109 |
+
5: 1,
|
| 110 |
+
6: 2,
|
| 111 |
+
7: 1,
|
| 112 |
+
8: 1,
|
| 113 |
+
9: 1,
|
| 114 |
+
10: 1,
|
| 115 |
+
11: 1,
|
| 116 |
+
}
|
| 117 |
+
assert nx.number_of_cliques(G, list(G), cliques=self.cl) == {
|
| 118 |
+
1: 1,
|
| 119 |
+
2: 2,
|
| 120 |
+
3: 1,
|
| 121 |
+
4: 2,
|
| 122 |
+
5: 1,
|
| 123 |
+
6: 2,
|
| 124 |
+
7: 1,
|
| 125 |
+
8: 1,
|
| 126 |
+
9: 1,
|
| 127 |
+
10: 1,
|
| 128 |
+
11: 1,
|
| 129 |
+
}
|
| 130 |
+
|
| 131 |
+
def test_node_clique_number(self):
|
| 132 |
+
G = self.G
|
| 133 |
+
assert nx.node_clique_number(G, 1) == 4
|
| 134 |
+
assert list(nx.node_clique_number(G, [1]).values()) == [4]
|
| 135 |
+
assert list(nx.node_clique_number(G, [1, 2]).values()) == [4, 4]
|
| 136 |
+
assert nx.node_clique_number(G, [1, 2]) == {1: 4, 2: 4}
|
| 137 |
+
assert nx.node_clique_number(G, 1) == 4
|
| 138 |
+
assert nx.node_clique_number(G) == {
|
| 139 |
+
1: 4,
|
| 140 |
+
2: 4,
|
| 141 |
+
3: 4,
|
| 142 |
+
4: 3,
|
| 143 |
+
5: 3,
|
| 144 |
+
6: 4,
|
| 145 |
+
7: 3,
|
| 146 |
+
8: 2,
|
| 147 |
+
9: 2,
|
| 148 |
+
10: 2,
|
| 149 |
+
11: 2,
|
| 150 |
+
}
|
| 151 |
+
assert nx.node_clique_number(G, cliques=self.cl) == {
|
| 152 |
+
1: 4,
|
| 153 |
+
2: 4,
|
| 154 |
+
3: 4,
|
| 155 |
+
4: 3,
|
| 156 |
+
5: 3,
|
| 157 |
+
6: 4,
|
| 158 |
+
7: 3,
|
| 159 |
+
8: 2,
|
| 160 |
+
9: 2,
|
| 161 |
+
10: 2,
|
| 162 |
+
11: 2,
|
| 163 |
+
}
|
| 164 |
+
assert nx.node_clique_number(G, [1, 2], cliques=self.cl) == {1: 4, 2: 4}
|
| 165 |
+
assert nx.node_clique_number(G, 1, cliques=self.cl) == 4
|
| 166 |
+
|
| 167 |
+
def test_make_clique_bipartite(self):
|
| 168 |
+
G = self.G
|
| 169 |
+
B = nx.make_clique_bipartite(G)
|
| 170 |
+
assert sorted(B) == [-5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
|
| 171 |
+
# Project onto the nodes of the original graph.
|
| 172 |
+
H = nx.projected_graph(B, range(1, 12))
|
| 173 |
+
assert H.adj == G.adj
|
| 174 |
+
# Project onto the nodes representing the cliques.
|
| 175 |
+
H1 = nx.projected_graph(B, range(-5, 0))
|
| 176 |
+
# Relabel the negative numbers as positive ones.
|
| 177 |
+
H1 = nx.relabel_nodes(H1, {-v: v for v in range(1, 6)})
|
| 178 |
+
assert sorted(H1) == [1, 2, 3, 4, 5]
|
| 179 |
+
|
| 180 |
+
def test_make_max_clique_graph(self):
|
| 181 |
+
"""Tests that the maximal clique graph is the same as the bipartite
|
| 182 |
+
clique graph after being projected onto the nodes representing the
|
| 183 |
+
cliques.
|
| 184 |
+
|
| 185 |
+
"""
|
| 186 |
+
G = self.G
|
| 187 |
+
B = nx.make_clique_bipartite(G)
|
| 188 |
+
# Project onto the nodes representing the cliques.
|
| 189 |
+
H1 = nx.projected_graph(B, range(-5, 0))
|
| 190 |
+
# Relabel the negative numbers as nonnegative ones, starting at
|
| 191 |
+
# 0.
|
| 192 |
+
H1 = nx.relabel_nodes(H1, {-v: v - 1 for v in range(1, 6)})
|
| 193 |
+
H2 = nx.make_max_clique_graph(G)
|
| 194 |
+
assert H1.adj == H2.adj
|
| 195 |
+
|
| 196 |
+
def test_directed(self):
|
| 197 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 198 |
+
next(nx.find_cliques(nx.DiGraph()))
|
| 199 |
+
|
| 200 |
+
def test_find_cliques_trivial(self):
|
| 201 |
+
G = nx.Graph()
|
| 202 |
+
assert sorted(nx.find_cliques(G)) == []
|
| 203 |
+
assert sorted(nx.find_cliques_recursive(G)) == []
|
| 204 |
+
|
| 205 |
+
def test_make_max_clique_graph_create_using(self):
|
| 206 |
+
G = nx.Graph([(1, 2), (3, 1), (4, 1), (5, 6)])
|
| 207 |
+
E = nx.Graph([(0, 1), (0, 2), (1, 2)])
|
| 208 |
+
E.add_node(3)
|
| 209 |
+
assert nx.is_isomorphic(nx.make_max_clique_graph(G, create_using=nx.Graph), E)
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
class TestEnumerateAllCliques:
|
| 213 |
+
def test_paper_figure_4(self):
|
| 214 |
+
# Same graph as given in Fig. 4 of paper enumerate_all_cliques is
|
| 215 |
+
# based on.
|
| 216 |
+
# http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=1559964&isnumber=33129
|
| 217 |
+
G = nx.Graph()
|
| 218 |
+
edges_fig_4 = [
|
| 219 |
+
("a", "b"),
|
| 220 |
+
("a", "c"),
|
| 221 |
+
("a", "d"),
|
| 222 |
+
("a", "e"),
|
| 223 |
+
("b", "c"),
|
| 224 |
+
("b", "d"),
|
| 225 |
+
("b", "e"),
|
| 226 |
+
("c", "d"),
|
| 227 |
+
("c", "e"),
|
| 228 |
+
("d", "e"),
|
| 229 |
+
("f", "b"),
|
| 230 |
+
("f", "c"),
|
| 231 |
+
("f", "g"),
|
| 232 |
+
("g", "f"),
|
| 233 |
+
("g", "c"),
|
| 234 |
+
("g", "d"),
|
| 235 |
+
("g", "e"),
|
| 236 |
+
]
|
| 237 |
+
G.add_edges_from(edges_fig_4)
|
| 238 |
+
|
| 239 |
+
cliques = list(nx.enumerate_all_cliques(G))
|
| 240 |
+
clique_sizes = list(map(len, cliques))
|
| 241 |
+
assert sorted(clique_sizes) == clique_sizes
|
| 242 |
+
|
| 243 |
+
expected_cliques = [
|
| 244 |
+
["a"],
|
| 245 |
+
["b"],
|
| 246 |
+
["c"],
|
| 247 |
+
["d"],
|
| 248 |
+
["e"],
|
| 249 |
+
["f"],
|
| 250 |
+
["g"],
|
| 251 |
+
["a", "b"],
|
| 252 |
+
["a", "b", "d"],
|
| 253 |
+
["a", "b", "d", "e"],
|
| 254 |
+
["a", "b", "e"],
|
| 255 |
+
["a", "c"],
|
| 256 |
+
["a", "c", "d"],
|
| 257 |
+
["a", "c", "d", "e"],
|
| 258 |
+
["a", "c", "e"],
|
| 259 |
+
["a", "d"],
|
| 260 |
+
["a", "d", "e"],
|
| 261 |
+
["a", "e"],
|
| 262 |
+
["b", "c"],
|
| 263 |
+
["b", "c", "d"],
|
| 264 |
+
["b", "c", "d", "e"],
|
| 265 |
+
["b", "c", "e"],
|
| 266 |
+
["b", "c", "f"],
|
| 267 |
+
["b", "d"],
|
| 268 |
+
["b", "d", "e"],
|
| 269 |
+
["b", "e"],
|
| 270 |
+
["b", "f"],
|
| 271 |
+
["c", "d"],
|
| 272 |
+
["c", "d", "e"],
|
| 273 |
+
["c", "d", "e", "g"],
|
| 274 |
+
["c", "d", "g"],
|
| 275 |
+
["c", "e"],
|
| 276 |
+
["c", "e", "g"],
|
| 277 |
+
["c", "f"],
|
| 278 |
+
["c", "f", "g"],
|
| 279 |
+
["c", "g"],
|
| 280 |
+
["d", "e"],
|
| 281 |
+
["d", "e", "g"],
|
| 282 |
+
["d", "g"],
|
| 283 |
+
["e", "g"],
|
| 284 |
+
["f", "g"],
|
| 285 |
+
["a", "b", "c"],
|
| 286 |
+
["a", "b", "c", "d"],
|
| 287 |
+
["a", "b", "c", "d", "e"],
|
| 288 |
+
["a", "b", "c", "e"],
|
| 289 |
+
]
|
| 290 |
+
|
| 291 |
+
assert sorted(map(sorted, cliques)) == sorted(map(sorted, expected_cliques))
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_cluster.py
ADDED
|
@@ -0,0 +1,549 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestTriangles:
|
| 7 |
+
def test_empty(self):
|
| 8 |
+
G = nx.Graph()
|
| 9 |
+
assert list(nx.triangles(G).values()) == []
|
| 10 |
+
|
| 11 |
+
def test_path(self):
|
| 12 |
+
G = nx.path_graph(10)
|
| 13 |
+
assert list(nx.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
|
| 14 |
+
assert nx.triangles(G) == {
|
| 15 |
+
0: 0,
|
| 16 |
+
1: 0,
|
| 17 |
+
2: 0,
|
| 18 |
+
3: 0,
|
| 19 |
+
4: 0,
|
| 20 |
+
5: 0,
|
| 21 |
+
6: 0,
|
| 22 |
+
7: 0,
|
| 23 |
+
8: 0,
|
| 24 |
+
9: 0,
|
| 25 |
+
}
|
| 26 |
+
|
| 27 |
+
def test_cubical(self):
|
| 28 |
+
G = nx.cubical_graph()
|
| 29 |
+
assert list(nx.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0]
|
| 30 |
+
assert nx.triangles(G, 1) == 0
|
| 31 |
+
assert list(nx.triangles(G, [1, 2]).values()) == [0, 0]
|
| 32 |
+
assert nx.triangles(G, 1) == 0
|
| 33 |
+
assert nx.triangles(G, [1, 2]) == {1: 0, 2: 0}
|
| 34 |
+
|
| 35 |
+
def test_k5(self):
|
| 36 |
+
G = nx.complete_graph(5)
|
| 37 |
+
assert list(nx.triangles(G).values()) == [6, 6, 6, 6, 6]
|
| 38 |
+
assert sum(nx.triangles(G).values()) / 3 == 10
|
| 39 |
+
assert nx.triangles(G, 1) == 6
|
| 40 |
+
G.remove_edge(1, 2)
|
| 41 |
+
assert list(nx.triangles(G).values()) == [5, 3, 3, 5, 5]
|
| 42 |
+
assert nx.triangles(G, 1) == 3
|
| 43 |
+
G.add_edge(3, 3) # ignore self-edges
|
| 44 |
+
assert list(nx.triangles(G).values()) == [5, 3, 3, 5, 5]
|
| 45 |
+
assert nx.triangles(G, 3) == 5
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class TestDirectedClustering:
|
| 49 |
+
def test_clustering(self):
|
| 50 |
+
G = nx.DiGraph()
|
| 51 |
+
assert list(nx.clustering(G).values()) == []
|
| 52 |
+
assert nx.clustering(G) == {}
|
| 53 |
+
|
| 54 |
+
def test_path(self):
|
| 55 |
+
G = nx.path_graph(10, create_using=nx.DiGraph())
|
| 56 |
+
assert list(nx.clustering(G).values()) == [
|
| 57 |
+
0,
|
| 58 |
+
0,
|
| 59 |
+
0,
|
| 60 |
+
0,
|
| 61 |
+
0,
|
| 62 |
+
0,
|
| 63 |
+
0,
|
| 64 |
+
0,
|
| 65 |
+
0,
|
| 66 |
+
0,
|
| 67 |
+
]
|
| 68 |
+
assert nx.clustering(G) == {
|
| 69 |
+
0: 0,
|
| 70 |
+
1: 0,
|
| 71 |
+
2: 0,
|
| 72 |
+
3: 0,
|
| 73 |
+
4: 0,
|
| 74 |
+
5: 0,
|
| 75 |
+
6: 0,
|
| 76 |
+
7: 0,
|
| 77 |
+
8: 0,
|
| 78 |
+
9: 0,
|
| 79 |
+
}
|
| 80 |
+
assert nx.clustering(G, 0) == 0
|
| 81 |
+
|
| 82 |
+
def test_k5(self):
|
| 83 |
+
G = nx.complete_graph(5, create_using=nx.DiGraph())
|
| 84 |
+
assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1]
|
| 85 |
+
assert nx.average_clustering(G) == 1
|
| 86 |
+
G.remove_edge(1, 2)
|
| 87 |
+
assert list(nx.clustering(G).values()) == [
|
| 88 |
+
11 / 12,
|
| 89 |
+
1,
|
| 90 |
+
1,
|
| 91 |
+
11 / 12,
|
| 92 |
+
11 / 12,
|
| 93 |
+
]
|
| 94 |
+
assert nx.clustering(G, [1, 4]) == {1: 1, 4: 11 / 12}
|
| 95 |
+
G.remove_edge(2, 1)
|
| 96 |
+
assert list(nx.clustering(G).values()) == [
|
| 97 |
+
5 / 6,
|
| 98 |
+
1,
|
| 99 |
+
1,
|
| 100 |
+
5 / 6,
|
| 101 |
+
5 / 6,
|
| 102 |
+
]
|
| 103 |
+
assert nx.clustering(G, [1, 4]) == {1: 1, 4: 0.83333333333333337}
|
| 104 |
+
assert nx.clustering(G, 4) == 5 / 6
|
| 105 |
+
|
| 106 |
+
def test_triangle_and_edge(self):
|
| 107 |
+
G = nx.cycle_graph(3, create_using=nx.DiGraph())
|
| 108 |
+
G.add_edge(0, 4)
|
| 109 |
+
assert nx.clustering(G)[0] == 1 / 6
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class TestDirectedWeightedClustering:
|
| 113 |
+
@classmethod
|
| 114 |
+
def setup_class(cls):
|
| 115 |
+
global np
|
| 116 |
+
np = pytest.importorskip("numpy")
|
| 117 |
+
|
| 118 |
+
def test_clustering(self):
|
| 119 |
+
G = nx.DiGraph()
|
| 120 |
+
assert list(nx.clustering(G, weight="weight").values()) == []
|
| 121 |
+
assert nx.clustering(G) == {}
|
| 122 |
+
|
| 123 |
+
def test_path(self):
|
| 124 |
+
G = nx.path_graph(10, create_using=nx.DiGraph())
|
| 125 |
+
assert list(nx.clustering(G, weight="weight").values()) == [
|
| 126 |
+
0,
|
| 127 |
+
0,
|
| 128 |
+
0,
|
| 129 |
+
0,
|
| 130 |
+
0,
|
| 131 |
+
0,
|
| 132 |
+
0,
|
| 133 |
+
0,
|
| 134 |
+
0,
|
| 135 |
+
0,
|
| 136 |
+
]
|
| 137 |
+
assert nx.clustering(G, weight="weight") == {
|
| 138 |
+
0: 0,
|
| 139 |
+
1: 0,
|
| 140 |
+
2: 0,
|
| 141 |
+
3: 0,
|
| 142 |
+
4: 0,
|
| 143 |
+
5: 0,
|
| 144 |
+
6: 0,
|
| 145 |
+
7: 0,
|
| 146 |
+
8: 0,
|
| 147 |
+
9: 0,
|
| 148 |
+
}
|
| 149 |
+
|
| 150 |
+
def test_k5(self):
|
| 151 |
+
G = nx.complete_graph(5, create_using=nx.DiGraph())
|
| 152 |
+
assert list(nx.clustering(G, weight="weight").values()) == [1, 1, 1, 1, 1]
|
| 153 |
+
assert nx.average_clustering(G, weight="weight") == 1
|
| 154 |
+
G.remove_edge(1, 2)
|
| 155 |
+
assert list(nx.clustering(G, weight="weight").values()) == [
|
| 156 |
+
11 / 12,
|
| 157 |
+
1,
|
| 158 |
+
1,
|
| 159 |
+
11 / 12,
|
| 160 |
+
11 / 12,
|
| 161 |
+
]
|
| 162 |
+
assert nx.clustering(G, [1, 4], weight="weight") == {1: 1, 4: 11 / 12}
|
| 163 |
+
G.remove_edge(2, 1)
|
| 164 |
+
assert list(nx.clustering(G, weight="weight").values()) == [
|
| 165 |
+
5 / 6,
|
| 166 |
+
1,
|
| 167 |
+
1,
|
| 168 |
+
5 / 6,
|
| 169 |
+
5 / 6,
|
| 170 |
+
]
|
| 171 |
+
assert nx.clustering(G, [1, 4], weight="weight") == {
|
| 172 |
+
1: 1,
|
| 173 |
+
4: 0.83333333333333337,
|
| 174 |
+
}
|
| 175 |
+
|
| 176 |
+
def test_triangle_and_edge(self):
|
| 177 |
+
G = nx.cycle_graph(3, create_using=nx.DiGraph())
|
| 178 |
+
G.add_edge(0, 4, weight=2)
|
| 179 |
+
assert nx.clustering(G)[0] == 1 / 6
|
| 180 |
+
# Relaxed comparisons to allow graphblas-algorithms to pass tests
|
| 181 |
+
np.testing.assert_allclose(nx.clustering(G, weight="weight")[0], 1 / 12)
|
| 182 |
+
np.testing.assert_allclose(nx.clustering(G, 0, weight="weight"), 1 / 12)
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
class TestWeightedClustering:
|
| 186 |
+
@classmethod
|
| 187 |
+
def setup_class(cls):
|
| 188 |
+
global np
|
| 189 |
+
np = pytest.importorskip("numpy")
|
| 190 |
+
|
| 191 |
+
def test_clustering(self):
|
| 192 |
+
G = nx.Graph()
|
| 193 |
+
assert list(nx.clustering(G, weight="weight").values()) == []
|
| 194 |
+
assert nx.clustering(G) == {}
|
| 195 |
+
|
| 196 |
+
def test_path(self):
|
| 197 |
+
G = nx.path_graph(10)
|
| 198 |
+
assert list(nx.clustering(G, weight="weight").values()) == [
|
| 199 |
+
0,
|
| 200 |
+
0,
|
| 201 |
+
0,
|
| 202 |
+
0,
|
| 203 |
+
0,
|
| 204 |
+
0,
|
| 205 |
+
0,
|
| 206 |
+
0,
|
| 207 |
+
0,
|
| 208 |
+
0,
|
| 209 |
+
]
|
| 210 |
+
assert nx.clustering(G, weight="weight") == {
|
| 211 |
+
0: 0,
|
| 212 |
+
1: 0,
|
| 213 |
+
2: 0,
|
| 214 |
+
3: 0,
|
| 215 |
+
4: 0,
|
| 216 |
+
5: 0,
|
| 217 |
+
6: 0,
|
| 218 |
+
7: 0,
|
| 219 |
+
8: 0,
|
| 220 |
+
9: 0,
|
| 221 |
+
}
|
| 222 |
+
|
| 223 |
+
def test_cubical(self):
|
| 224 |
+
G = nx.cubical_graph()
|
| 225 |
+
assert list(nx.clustering(G, weight="weight").values()) == [
|
| 226 |
+
0,
|
| 227 |
+
0,
|
| 228 |
+
0,
|
| 229 |
+
0,
|
| 230 |
+
0,
|
| 231 |
+
0,
|
| 232 |
+
0,
|
| 233 |
+
0,
|
| 234 |
+
]
|
| 235 |
+
assert nx.clustering(G, 1) == 0
|
| 236 |
+
assert list(nx.clustering(G, [1, 2], weight="weight").values()) == [0, 0]
|
| 237 |
+
assert nx.clustering(G, 1, weight="weight") == 0
|
| 238 |
+
assert nx.clustering(G, [1, 2], weight="weight") == {1: 0, 2: 0}
|
| 239 |
+
|
| 240 |
+
def test_k5(self):
|
| 241 |
+
G = nx.complete_graph(5)
|
| 242 |
+
assert list(nx.clustering(G, weight="weight").values()) == [1, 1, 1, 1, 1]
|
| 243 |
+
assert nx.average_clustering(G, weight="weight") == 1
|
| 244 |
+
G.remove_edge(1, 2)
|
| 245 |
+
assert list(nx.clustering(G, weight="weight").values()) == [
|
| 246 |
+
5 / 6,
|
| 247 |
+
1,
|
| 248 |
+
1,
|
| 249 |
+
5 / 6,
|
| 250 |
+
5 / 6,
|
| 251 |
+
]
|
| 252 |
+
assert nx.clustering(G, [1, 4], weight="weight") == {
|
| 253 |
+
1: 1,
|
| 254 |
+
4: 0.83333333333333337,
|
| 255 |
+
}
|
| 256 |
+
|
| 257 |
+
def test_triangle_and_edge(self):
|
| 258 |
+
G = nx.cycle_graph(3)
|
| 259 |
+
G.add_edge(0, 4, weight=2)
|
| 260 |
+
assert nx.clustering(G)[0] == 1 / 3
|
| 261 |
+
np.testing.assert_allclose(nx.clustering(G, weight="weight")[0], 1 / 6)
|
| 262 |
+
np.testing.assert_allclose(nx.clustering(G, 0, weight="weight"), 1 / 6)
|
| 263 |
+
|
| 264 |
+
def test_triangle_and_signed_edge(self):
|
| 265 |
+
G = nx.cycle_graph(3)
|
| 266 |
+
G.add_edge(0, 1, weight=-1)
|
| 267 |
+
G.add_edge(3, 0, weight=0)
|
| 268 |
+
assert nx.clustering(G)[0] == 1 / 3
|
| 269 |
+
assert nx.clustering(G, weight="weight")[0] == -1 / 3
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
class TestClustering:
|
| 273 |
+
@classmethod
|
| 274 |
+
def setup_class(cls):
|
| 275 |
+
pytest.importorskip("numpy")
|
| 276 |
+
|
| 277 |
+
def test_clustering(self):
|
| 278 |
+
G = nx.Graph()
|
| 279 |
+
assert list(nx.clustering(G).values()) == []
|
| 280 |
+
assert nx.clustering(G) == {}
|
| 281 |
+
|
| 282 |
+
def test_path(self):
|
| 283 |
+
G = nx.path_graph(10)
|
| 284 |
+
assert list(nx.clustering(G).values()) == [
|
| 285 |
+
0,
|
| 286 |
+
0,
|
| 287 |
+
0,
|
| 288 |
+
0,
|
| 289 |
+
0,
|
| 290 |
+
0,
|
| 291 |
+
0,
|
| 292 |
+
0,
|
| 293 |
+
0,
|
| 294 |
+
0,
|
| 295 |
+
]
|
| 296 |
+
assert nx.clustering(G) == {
|
| 297 |
+
0: 0,
|
| 298 |
+
1: 0,
|
| 299 |
+
2: 0,
|
| 300 |
+
3: 0,
|
| 301 |
+
4: 0,
|
| 302 |
+
5: 0,
|
| 303 |
+
6: 0,
|
| 304 |
+
7: 0,
|
| 305 |
+
8: 0,
|
| 306 |
+
9: 0,
|
| 307 |
+
}
|
| 308 |
+
|
| 309 |
+
def test_cubical(self):
|
| 310 |
+
G = nx.cubical_graph()
|
| 311 |
+
assert list(nx.clustering(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0]
|
| 312 |
+
assert nx.clustering(G, 1) == 0
|
| 313 |
+
assert list(nx.clustering(G, [1, 2]).values()) == [0, 0]
|
| 314 |
+
assert nx.clustering(G, 1) == 0
|
| 315 |
+
assert nx.clustering(G, [1, 2]) == {1: 0, 2: 0}
|
| 316 |
+
|
| 317 |
+
def test_k5(self):
|
| 318 |
+
G = nx.complete_graph(5)
|
| 319 |
+
assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1]
|
| 320 |
+
assert nx.average_clustering(G) == 1
|
| 321 |
+
G.remove_edge(1, 2)
|
| 322 |
+
assert list(nx.clustering(G).values()) == [
|
| 323 |
+
5 / 6,
|
| 324 |
+
1,
|
| 325 |
+
1,
|
| 326 |
+
5 / 6,
|
| 327 |
+
5 / 6,
|
| 328 |
+
]
|
| 329 |
+
assert nx.clustering(G, [1, 4]) == {1: 1, 4: 0.83333333333333337}
|
| 330 |
+
|
| 331 |
+
def test_k5_signed(self):
|
| 332 |
+
G = nx.complete_graph(5)
|
| 333 |
+
assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1]
|
| 334 |
+
assert nx.average_clustering(G) == 1
|
| 335 |
+
G.remove_edge(1, 2)
|
| 336 |
+
G.add_edge(0, 1, weight=-1)
|
| 337 |
+
assert list(nx.clustering(G, weight="weight").values()) == [
|
| 338 |
+
1 / 6,
|
| 339 |
+
-1 / 3,
|
| 340 |
+
1,
|
| 341 |
+
3 / 6,
|
| 342 |
+
3 / 6,
|
| 343 |
+
]
|
| 344 |
+
|
| 345 |
+
|
| 346 |
+
class TestTransitivity:
|
| 347 |
+
def test_transitivity(self):
|
| 348 |
+
G = nx.Graph()
|
| 349 |
+
assert nx.transitivity(G) == 0
|
| 350 |
+
|
| 351 |
+
def test_path(self):
|
| 352 |
+
G = nx.path_graph(10)
|
| 353 |
+
assert nx.transitivity(G) == 0
|
| 354 |
+
|
| 355 |
+
def test_cubical(self):
|
| 356 |
+
G = nx.cubical_graph()
|
| 357 |
+
assert nx.transitivity(G) == 0
|
| 358 |
+
|
| 359 |
+
def test_k5(self):
|
| 360 |
+
G = nx.complete_graph(5)
|
| 361 |
+
assert nx.transitivity(G) == 1
|
| 362 |
+
G.remove_edge(1, 2)
|
| 363 |
+
assert nx.transitivity(G) == 0.875
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
class TestSquareClustering:
|
| 367 |
+
def test_clustering(self):
|
| 368 |
+
G = nx.Graph()
|
| 369 |
+
assert list(nx.square_clustering(G).values()) == []
|
| 370 |
+
assert nx.square_clustering(G) == {}
|
| 371 |
+
|
| 372 |
+
def test_path(self):
|
| 373 |
+
G = nx.path_graph(10)
|
| 374 |
+
assert list(nx.square_clustering(G).values()) == [
|
| 375 |
+
0,
|
| 376 |
+
0,
|
| 377 |
+
0,
|
| 378 |
+
0,
|
| 379 |
+
0,
|
| 380 |
+
0,
|
| 381 |
+
0,
|
| 382 |
+
0,
|
| 383 |
+
0,
|
| 384 |
+
0,
|
| 385 |
+
]
|
| 386 |
+
assert nx.square_clustering(G) == {
|
| 387 |
+
0: 0,
|
| 388 |
+
1: 0,
|
| 389 |
+
2: 0,
|
| 390 |
+
3: 0,
|
| 391 |
+
4: 0,
|
| 392 |
+
5: 0,
|
| 393 |
+
6: 0,
|
| 394 |
+
7: 0,
|
| 395 |
+
8: 0,
|
| 396 |
+
9: 0,
|
| 397 |
+
}
|
| 398 |
+
|
| 399 |
+
def test_cubical(self):
|
| 400 |
+
G = nx.cubical_graph()
|
| 401 |
+
assert list(nx.square_clustering(G).values()) == [
|
| 402 |
+
1 / 3,
|
| 403 |
+
1 / 3,
|
| 404 |
+
1 / 3,
|
| 405 |
+
1 / 3,
|
| 406 |
+
1 / 3,
|
| 407 |
+
1 / 3,
|
| 408 |
+
1 / 3,
|
| 409 |
+
1 / 3,
|
| 410 |
+
]
|
| 411 |
+
assert list(nx.square_clustering(G, [1, 2]).values()) == [1 / 3, 1 / 3]
|
| 412 |
+
assert nx.square_clustering(G, [1])[1] == 1 / 3
|
| 413 |
+
assert nx.square_clustering(G, 1) == 1 / 3
|
| 414 |
+
assert nx.square_clustering(G, [1, 2]) == {1: 1 / 3, 2: 1 / 3}
|
| 415 |
+
|
| 416 |
+
def test_k5(self):
|
| 417 |
+
G = nx.complete_graph(5)
|
| 418 |
+
assert list(nx.square_clustering(G).values()) == [1, 1, 1, 1, 1]
|
| 419 |
+
|
| 420 |
+
def test_bipartite_k5(self):
|
| 421 |
+
G = nx.complete_bipartite_graph(5, 5)
|
| 422 |
+
assert list(nx.square_clustering(G).values()) == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
|
| 423 |
+
|
| 424 |
+
def test_lind_square_clustering(self):
|
| 425 |
+
"""Test C4 for figure 1 Lind et al (2005)"""
|
| 426 |
+
G = nx.Graph(
|
| 427 |
+
[
|
| 428 |
+
(1, 2),
|
| 429 |
+
(1, 3),
|
| 430 |
+
(1, 6),
|
| 431 |
+
(1, 7),
|
| 432 |
+
(2, 4),
|
| 433 |
+
(2, 5),
|
| 434 |
+
(3, 4),
|
| 435 |
+
(3, 5),
|
| 436 |
+
(6, 7),
|
| 437 |
+
(7, 8),
|
| 438 |
+
(6, 8),
|
| 439 |
+
(7, 9),
|
| 440 |
+
(7, 10),
|
| 441 |
+
(6, 11),
|
| 442 |
+
(6, 12),
|
| 443 |
+
(2, 13),
|
| 444 |
+
(2, 14),
|
| 445 |
+
(3, 15),
|
| 446 |
+
(3, 16),
|
| 447 |
+
]
|
| 448 |
+
)
|
| 449 |
+
G1 = G.subgraph([1, 2, 3, 4, 5, 13, 14, 15, 16])
|
| 450 |
+
G2 = G.subgraph([1, 6, 7, 8, 9, 10, 11, 12])
|
| 451 |
+
assert nx.square_clustering(G, [1])[1] == 3 / 43
|
| 452 |
+
assert nx.square_clustering(G1, [1])[1] == 2 / 6
|
| 453 |
+
assert nx.square_clustering(G2, [1])[1] == 1 / 5
|
| 454 |
+
|
| 455 |
+
def test_peng_square_clustering(self):
|
| 456 |
+
"""Test eq2 for figure 1 Peng et al (2008)"""
|
| 457 |
+
G = nx.Graph([(1, 2), (1, 3), (2, 4), (3, 4), (3, 5), (3, 6)])
|
| 458 |
+
assert nx.square_clustering(G, [1])[1] == 1 / 3
|
| 459 |
+
|
| 460 |
+
def test_self_loops_square_clustering(self):
|
| 461 |
+
G = nx.path_graph(5)
|
| 462 |
+
assert nx.square_clustering(G) == {0: 0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0}
|
| 463 |
+
G.add_edges_from([(0, 0), (1, 1), (2, 2)])
|
| 464 |
+
assert nx.square_clustering(G) == {0: 1, 1: 0.5, 2: 0.2, 3: 0.0, 4: 0}
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
class TestAverageClustering:
|
| 468 |
+
@classmethod
|
| 469 |
+
def setup_class(cls):
|
| 470 |
+
pytest.importorskip("numpy")
|
| 471 |
+
|
| 472 |
+
def test_empty(self):
|
| 473 |
+
G = nx.Graph()
|
| 474 |
+
with pytest.raises(ZeroDivisionError):
|
| 475 |
+
nx.average_clustering(G)
|
| 476 |
+
|
| 477 |
+
def test_average_clustering(self):
|
| 478 |
+
G = nx.cycle_graph(3)
|
| 479 |
+
G.add_edge(2, 3)
|
| 480 |
+
assert nx.average_clustering(G) == (1 + 1 + 1 / 3) / 4
|
| 481 |
+
assert nx.average_clustering(G, count_zeros=True) == (1 + 1 + 1 / 3) / 4
|
| 482 |
+
assert nx.average_clustering(G, count_zeros=False) == (1 + 1 + 1 / 3) / 3
|
| 483 |
+
assert nx.average_clustering(G, [1, 2, 3]) == (1 + 1 / 3) / 3
|
| 484 |
+
assert nx.average_clustering(G, [1, 2, 3], count_zeros=True) == (1 + 1 / 3) / 3
|
| 485 |
+
assert nx.average_clustering(G, [1, 2, 3], count_zeros=False) == (1 + 1 / 3) / 2
|
| 486 |
+
|
| 487 |
+
def test_average_clustering_signed(self):
|
| 488 |
+
G = nx.cycle_graph(3)
|
| 489 |
+
G.add_edge(2, 3)
|
| 490 |
+
G.add_edge(0, 1, weight=-1)
|
| 491 |
+
assert nx.average_clustering(G, weight="weight") == (-1 - 1 - 1 / 3) / 4
|
| 492 |
+
assert (
|
| 493 |
+
nx.average_clustering(G, weight="weight", count_zeros=True)
|
| 494 |
+
== (-1 - 1 - 1 / 3) / 4
|
| 495 |
+
)
|
| 496 |
+
assert (
|
| 497 |
+
nx.average_clustering(G, weight="weight", count_zeros=False)
|
| 498 |
+
== (-1 - 1 - 1 / 3) / 3
|
| 499 |
+
)
|
| 500 |
+
|
| 501 |
+
|
| 502 |
+
class TestDirectedAverageClustering:
|
| 503 |
+
@classmethod
|
| 504 |
+
def setup_class(cls):
|
| 505 |
+
pytest.importorskip("numpy")
|
| 506 |
+
|
| 507 |
+
def test_empty(self):
|
| 508 |
+
G = nx.DiGraph()
|
| 509 |
+
with pytest.raises(ZeroDivisionError):
|
| 510 |
+
nx.average_clustering(G)
|
| 511 |
+
|
| 512 |
+
def test_average_clustering(self):
|
| 513 |
+
G = nx.cycle_graph(3, create_using=nx.DiGraph())
|
| 514 |
+
G.add_edge(2, 3)
|
| 515 |
+
assert nx.average_clustering(G) == (1 + 1 + 1 / 3) / 8
|
| 516 |
+
assert nx.average_clustering(G, count_zeros=True) == (1 + 1 + 1 / 3) / 8
|
| 517 |
+
assert nx.average_clustering(G, count_zeros=False) == (1 + 1 + 1 / 3) / 6
|
| 518 |
+
assert nx.average_clustering(G, [1, 2, 3]) == (1 + 1 / 3) / 6
|
| 519 |
+
assert nx.average_clustering(G, [1, 2, 3], count_zeros=True) == (1 + 1 / 3) / 6
|
| 520 |
+
assert nx.average_clustering(G, [1, 2, 3], count_zeros=False) == (1 + 1 / 3) / 4
|
| 521 |
+
|
| 522 |
+
|
| 523 |
+
class TestGeneralizedDegree:
|
| 524 |
+
def test_generalized_degree(self):
|
| 525 |
+
G = nx.Graph()
|
| 526 |
+
assert nx.generalized_degree(G) == {}
|
| 527 |
+
|
| 528 |
+
def test_path(self):
|
| 529 |
+
G = nx.path_graph(5)
|
| 530 |
+
assert nx.generalized_degree(G, 0) == {0: 1}
|
| 531 |
+
assert nx.generalized_degree(G, 1) == {0: 2}
|
| 532 |
+
|
| 533 |
+
def test_cubical(self):
|
| 534 |
+
G = nx.cubical_graph()
|
| 535 |
+
assert nx.generalized_degree(G, 0) == {0: 3}
|
| 536 |
+
|
| 537 |
+
def test_k5(self):
|
| 538 |
+
G = nx.complete_graph(5)
|
| 539 |
+
assert nx.generalized_degree(G, 0) == {3: 4}
|
| 540 |
+
G.remove_edge(0, 1)
|
| 541 |
+
assert nx.generalized_degree(G, 0) == {2: 3}
|
| 542 |
+
assert nx.generalized_degree(G, [1, 2]) == {1: {2: 3}, 2: {2: 2, 3: 2}}
|
| 543 |
+
assert nx.generalized_degree(G) == {
|
| 544 |
+
0: {2: 3},
|
| 545 |
+
1: {2: 3},
|
| 546 |
+
2: {2: 2, 3: 2},
|
| 547 |
+
3: {2: 2, 3: 2},
|
| 548 |
+
4: {2: 2, 3: 2},
|
| 549 |
+
}
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_communicability.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from collections import defaultdict
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
pytest.importorskip("numpy")
|
| 6 |
+
pytest.importorskip("scipy")
|
| 7 |
+
|
| 8 |
+
import networkx as nx
|
| 9 |
+
from networkx.algorithms.communicability_alg import communicability, communicability_exp
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class TestCommunicability:
|
| 13 |
+
def test_communicability(self):
|
| 14 |
+
answer = {
|
| 15 |
+
0: {0: 1.5430806348152435, 1: 1.1752011936438012},
|
| 16 |
+
1: {0: 1.1752011936438012, 1: 1.5430806348152435},
|
| 17 |
+
}
|
| 18 |
+
# answer={(0, 0): 1.5430806348152435,
|
| 19 |
+
# (0, 1): 1.1752011936438012,
|
| 20 |
+
# (1, 0): 1.1752011936438012,
|
| 21 |
+
# (1, 1): 1.5430806348152435}
|
| 22 |
+
|
| 23 |
+
result = communicability(nx.path_graph(2))
|
| 24 |
+
for k1, val in result.items():
|
| 25 |
+
for k2 in val:
|
| 26 |
+
assert answer[k1][k2] == pytest.approx(result[k1][k2], abs=1e-7)
|
| 27 |
+
|
| 28 |
+
def test_communicability2(self):
|
| 29 |
+
answer_orig = {
|
| 30 |
+
("1", "1"): 1.6445956054135658,
|
| 31 |
+
("1", "Albert"): 0.7430186221096251,
|
| 32 |
+
("1", "Aric"): 0.7430186221096251,
|
| 33 |
+
("1", "Dan"): 1.6208126320442937,
|
| 34 |
+
("1", "Franck"): 0.42639707170035257,
|
| 35 |
+
("Albert", "1"): 0.7430186221096251,
|
| 36 |
+
("Albert", "Albert"): 2.4368257358712189,
|
| 37 |
+
("Albert", "Aric"): 1.4368257358712191,
|
| 38 |
+
("Albert", "Dan"): 2.0472097037446453,
|
| 39 |
+
("Albert", "Franck"): 1.8340111678944691,
|
| 40 |
+
("Aric", "1"): 0.7430186221096251,
|
| 41 |
+
("Aric", "Albert"): 1.4368257358712191,
|
| 42 |
+
("Aric", "Aric"): 2.4368257358712193,
|
| 43 |
+
("Aric", "Dan"): 2.0472097037446457,
|
| 44 |
+
("Aric", "Franck"): 1.8340111678944691,
|
| 45 |
+
("Dan", "1"): 1.6208126320442937,
|
| 46 |
+
("Dan", "Albert"): 2.0472097037446453,
|
| 47 |
+
("Dan", "Aric"): 2.0472097037446457,
|
| 48 |
+
("Dan", "Dan"): 3.1306328496328168,
|
| 49 |
+
("Dan", "Franck"): 1.4860372442192515,
|
| 50 |
+
("Franck", "1"): 0.42639707170035257,
|
| 51 |
+
("Franck", "Albert"): 1.8340111678944691,
|
| 52 |
+
("Franck", "Aric"): 1.8340111678944691,
|
| 53 |
+
("Franck", "Dan"): 1.4860372442192515,
|
| 54 |
+
("Franck", "Franck"): 2.3876142275231915,
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
answer = defaultdict(dict)
|
| 58 |
+
for (k1, k2), v in answer_orig.items():
|
| 59 |
+
answer[k1][k2] = v
|
| 60 |
+
|
| 61 |
+
G1 = nx.Graph(
|
| 62 |
+
[
|
| 63 |
+
("Franck", "Aric"),
|
| 64 |
+
("Aric", "Dan"),
|
| 65 |
+
("Dan", "Albert"),
|
| 66 |
+
("Albert", "Franck"),
|
| 67 |
+
("Dan", "1"),
|
| 68 |
+
("Franck", "Albert"),
|
| 69 |
+
]
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
result = communicability(G1)
|
| 73 |
+
for k1, val in result.items():
|
| 74 |
+
for k2 in val:
|
| 75 |
+
assert answer[k1][k2] == pytest.approx(result[k1][k2], abs=1e-7)
|
| 76 |
+
|
| 77 |
+
result = communicability_exp(G1)
|
| 78 |
+
for k1, val in result.items():
|
| 79 |
+
for k2 in val:
|
| 80 |
+
assert answer[k1][k2] == pytest.approx(result[k1][k2], abs=1e-7)
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_covering.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestMinEdgeCover:
|
| 7 |
+
"""Tests for :func:`networkx.algorithms.min_edge_cover`"""
|
| 8 |
+
|
| 9 |
+
def test_empty_graph(self):
|
| 10 |
+
G = nx.Graph()
|
| 11 |
+
assert nx.min_edge_cover(G) == set()
|
| 12 |
+
|
| 13 |
+
def test_graph_with_loop(self):
|
| 14 |
+
G = nx.Graph()
|
| 15 |
+
G.add_edge(0, 0)
|
| 16 |
+
assert nx.min_edge_cover(G) == {(0, 0)}
|
| 17 |
+
|
| 18 |
+
def test_graph_with_isolated_v(self):
|
| 19 |
+
G = nx.Graph()
|
| 20 |
+
G.add_node(1)
|
| 21 |
+
with pytest.raises(
|
| 22 |
+
nx.NetworkXException,
|
| 23 |
+
match="Graph has a node with no edge incident on it, so no edge cover exists.",
|
| 24 |
+
):
|
| 25 |
+
nx.min_edge_cover(G)
|
| 26 |
+
|
| 27 |
+
def test_graph_single_edge(self):
|
| 28 |
+
G = nx.Graph([(0, 1)])
|
| 29 |
+
assert nx.min_edge_cover(G) in ({(0, 1)}, {(1, 0)})
|
| 30 |
+
|
| 31 |
+
def test_graph_two_edge_path(self):
|
| 32 |
+
G = nx.path_graph(3)
|
| 33 |
+
min_cover = nx.min_edge_cover(G)
|
| 34 |
+
assert len(min_cover) == 2
|
| 35 |
+
for u, v in G.edges:
|
| 36 |
+
assert (u, v) in min_cover or (v, u) in min_cover
|
| 37 |
+
|
| 38 |
+
def test_bipartite_explicit(self):
|
| 39 |
+
G = nx.Graph()
|
| 40 |
+
G.add_nodes_from([1, 2, 3, 4], bipartite=0)
|
| 41 |
+
G.add_nodes_from(["a", "b", "c"], bipartite=1)
|
| 42 |
+
G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
|
| 43 |
+
# Use bipartite method by prescribing the algorithm
|
| 44 |
+
min_cover = nx.min_edge_cover(
|
| 45 |
+
G, nx.algorithms.bipartite.matching.eppstein_matching
|
| 46 |
+
)
|
| 47 |
+
assert nx.is_edge_cover(G, min_cover)
|
| 48 |
+
assert len(min_cover) == 8
|
| 49 |
+
# Use the default method which is not specialized for bipartite
|
| 50 |
+
min_cover2 = nx.min_edge_cover(G)
|
| 51 |
+
assert nx.is_edge_cover(G, min_cover2)
|
| 52 |
+
assert len(min_cover2) == 4
|
| 53 |
+
|
| 54 |
+
def test_complete_graph_even(self):
|
| 55 |
+
G = nx.complete_graph(10)
|
| 56 |
+
min_cover = nx.min_edge_cover(G)
|
| 57 |
+
assert nx.is_edge_cover(G, min_cover)
|
| 58 |
+
assert len(min_cover) == 5
|
| 59 |
+
|
| 60 |
+
def test_complete_graph_odd(self):
|
| 61 |
+
G = nx.complete_graph(11)
|
| 62 |
+
min_cover = nx.min_edge_cover(G)
|
| 63 |
+
assert nx.is_edge_cover(G, min_cover)
|
| 64 |
+
assert len(min_cover) == 6
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class TestIsEdgeCover:
|
| 68 |
+
"""Tests for :func:`networkx.algorithms.is_edge_cover`"""
|
| 69 |
+
|
| 70 |
+
def test_empty_graph(self):
|
| 71 |
+
G = nx.Graph()
|
| 72 |
+
assert nx.is_edge_cover(G, set())
|
| 73 |
+
|
| 74 |
+
def test_graph_with_loop(self):
|
| 75 |
+
G = nx.Graph()
|
| 76 |
+
G.add_edge(1, 1)
|
| 77 |
+
assert nx.is_edge_cover(G, {(1, 1)})
|
| 78 |
+
|
| 79 |
+
def test_graph_single_edge(self):
|
| 80 |
+
G = nx.Graph()
|
| 81 |
+
G.add_edge(0, 1)
|
| 82 |
+
assert nx.is_edge_cover(G, {(0, 0), (1, 1)})
|
| 83 |
+
assert nx.is_edge_cover(G, {(0, 1), (1, 0)})
|
| 84 |
+
assert nx.is_edge_cover(G, {(0, 1)})
|
| 85 |
+
assert not nx.is_edge_cover(G, {(0, 0)})
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_cuts.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Unit tests for the :mod:`networkx.algorithms.cuts` module."""
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
import networkx as nx
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class TestCutSize:
|
| 8 |
+
"""Unit tests for the :func:`~networkx.cut_size` function."""
|
| 9 |
+
|
| 10 |
+
def test_symmetric(self):
|
| 11 |
+
"""Tests that the cut size is symmetric."""
|
| 12 |
+
G = nx.barbell_graph(3, 0)
|
| 13 |
+
S = {0, 1, 4}
|
| 14 |
+
T = {2, 3, 5}
|
| 15 |
+
assert nx.cut_size(G, S, T) == 4
|
| 16 |
+
assert nx.cut_size(G, T, S) == 4
|
| 17 |
+
|
| 18 |
+
def test_single_edge(self):
|
| 19 |
+
"""Tests for a cut of a single edge."""
|
| 20 |
+
G = nx.barbell_graph(3, 0)
|
| 21 |
+
S = {0, 1, 2}
|
| 22 |
+
T = {3, 4, 5}
|
| 23 |
+
assert nx.cut_size(G, S, T) == 1
|
| 24 |
+
assert nx.cut_size(G, T, S) == 1
|
| 25 |
+
|
| 26 |
+
def test_directed(self):
|
| 27 |
+
"""Tests that each directed edge is counted once in the cut."""
|
| 28 |
+
G = nx.barbell_graph(3, 0).to_directed()
|
| 29 |
+
S = {0, 1, 2}
|
| 30 |
+
T = {3, 4, 5}
|
| 31 |
+
assert nx.cut_size(G, S, T) == 2
|
| 32 |
+
assert nx.cut_size(G, T, S) == 2
|
| 33 |
+
|
| 34 |
+
def test_directed_symmetric(self):
|
| 35 |
+
"""Tests that a cut in a directed graph is symmetric."""
|
| 36 |
+
G = nx.barbell_graph(3, 0).to_directed()
|
| 37 |
+
S = {0, 1, 4}
|
| 38 |
+
T = {2, 3, 5}
|
| 39 |
+
assert nx.cut_size(G, S, T) == 8
|
| 40 |
+
assert nx.cut_size(G, T, S) == 8
|
| 41 |
+
|
| 42 |
+
def test_multigraph(self):
|
| 43 |
+
"""Tests that parallel edges are each counted for a cut."""
|
| 44 |
+
G = nx.MultiGraph(["ab", "ab"])
|
| 45 |
+
assert nx.cut_size(G, {"a"}, {"b"}) == 2
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class TestVolume:
|
| 49 |
+
"""Unit tests for the :func:`~networkx.volume` function."""
|
| 50 |
+
|
| 51 |
+
def test_graph(self):
|
| 52 |
+
G = nx.cycle_graph(4)
|
| 53 |
+
assert nx.volume(G, {0, 1}) == 4
|
| 54 |
+
|
| 55 |
+
def test_digraph(self):
|
| 56 |
+
G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 0)])
|
| 57 |
+
assert nx.volume(G, {0, 1}) == 2
|
| 58 |
+
|
| 59 |
+
def test_multigraph(self):
|
| 60 |
+
edges = list(nx.cycle_graph(4).edges())
|
| 61 |
+
G = nx.MultiGraph(edges * 2)
|
| 62 |
+
assert nx.volume(G, {0, 1}) == 8
|
| 63 |
+
|
| 64 |
+
def test_multidigraph(self):
|
| 65 |
+
edges = [(0, 1), (1, 2), (2, 3), (3, 0)]
|
| 66 |
+
G = nx.MultiDiGraph(edges * 2)
|
| 67 |
+
assert nx.volume(G, {0, 1}) == 4
|
| 68 |
+
|
| 69 |
+
def test_barbell(self):
|
| 70 |
+
G = nx.barbell_graph(3, 0)
|
| 71 |
+
assert nx.volume(G, {0, 1, 2}) == 7
|
| 72 |
+
assert nx.volume(G, {3, 4, 5}) == 7
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class TestNormalizedCutSize:
|
| 76 |
+
"""Unit tests for the :func:`~networkx.normalized_cut_size` function."""
|
| 77 |
+
|
| 78 |
+
def test_graph(self):
|
| 79 |
+
G = nx.path_graph(4)
|
| 80 |
+
S = {1, 2}
|
| 81 |
+
T = set(G) - S
|
| 82 |
+
size = nx.normalized_cut_size(G, S, T)
|
| 83 |
+
# The cut looks like this: o-{-o--o-}-o
|
| 84 |
+
expected = 2 * ((1 / 4) + (1 / 2))
|
| 85 |
+
assert expected == size
|
| 86 |
+
# Test with no input T
|
| 87 |
+
assert expected == nx.normalized_cut_size(G, S)
|
| 88 |
+
|
| 89 |
+
def test_directed(self):
|
| 90 |
+
G = nx.DiGraph([(0, 1), (1, 2), (2, 3)])
|
| 91 |
+
S = {1, 2}
|
| 92 |
+
T = set(G) - S
|
| 93 |
+
size = nx.normalized_cut_size(G, S, T)
|
| 94 |
+
# The cut looks like this: o-{->o-->o-}->o
|
| 95 |
+
expected = 2 * ((1 / 2) + (1 / 1))
|
| 96 |
+
assert expected == size
|
| 97 |
+
# Test with no input T
|
| 98 |
+
assert expected == nx.normalized_cut_size(G, S)
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class TestConductance:
|
| 102 |
+
"""Unit tests for the :func:`~networkx.conductance` function."""
|
| 103 |
+
|
| 104 |
+
def test_graph(self):
|
| 105 |
+
G = nx.barbell_graph(5, 0)
|
| 106 |
+
# Consider the singleton sets containing the "bridge" nodes.
|
| 107 |
+
# There is only one cut edge, and each set has volume five.
|
| 108 |
+
S = {4}
|
| 109 |
+
T = {5}
|
| 110 |
+
conductance = nx.conductance(G, S, T)
|
| 111 |
+
expected = 1 / 5
|
| 112 |
+
assert expected == conductance
|
| 113 |
+
# Test with no input T
|
| 114 |
+
G2 = nx.barbell_graph(3, 0)
|
| 115 |
+
# There is only one cut edge, and each set has volume seven.
|
| 116 |
+
S2 = {0, 1, 2}
|
| 117 |
+
assert nx.conductance(G2, S2) == 1 / 7
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
class TestEdgeExpansion:
|
| 121 |
+
"""Unit tests for the :func:`~networkx.edge_expansion` function."""
|
| 122 |
+
|
| 123 |
+
def test_graph(self):
|
| 124 |
+
G = nx.barbell_graph(5, 0)
|
| 125 |
+
S = set(range(5))
|
| 126 |
+
T = set(G) - S
|
| 127 |
+
expansion = nx.edge_expansion(G, S, T)
|
| 128 |
+
expected = 1 / 5
|
| 129 |
+
assert expected == expansion
|
| 130 |
+
# Test with no input T
|
| 131 |
+
assert expected == nx.edge_expansion(G, S)
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
class TestNodeExpansion:
|
| 135 |
+
"""Unit tests for the :func:`~networkx.node_expansion` function."""
|
| 136 |
+
|
| 137 |
+
def test_graph(self):
|
| 138 |
+
G = nx.path_graph(8)
|
| 139 |
+
S = {3, 4, 5}
|
| 140 |
+
expansion = nx.node_expansion(G, S)
|
| 141 |
+
# The neighborhood of S has cardinality five, and S has
|
| 142 |
+
# cardinality three.
|
| 143 |
+
expected = 5 / 3
|
| 144 |
+
assert expected == expansion
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
class TestBoundaryExpansion:
|
| 148 |
+
"""Unit tests for the :func:`~networkx.boundary_expansion` function."""
|
| 149 |
+
|
| 150 |
+
def test_graph(self):
|
| 151 |
+
G = nx.complete_graph(10)
|
| 152 |
+
S = set(range(4))
|
| 153 |
+
expansion = nx.boundary_expansion(G, S)
|
| 154 |
+
# The node boundary of S has cardinality six, and S has
|
| 155 |
+
# cardinality three.
|
| 156 |
+
expected = 6 / 4
|
| 157 |
+
assert expected == expansion
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
class TestMixingExpansion:
|
| 161 |
+
"""Unit tests for the :func:`~networkx.mixing_expansion` function."""
|
| 162 |
+
|
| 163 |
+
def test_graph(self):
|
| 164 |
+
G = nx.barbell_graph(5, 0)
|
| 165 |
+
S = set(range(5))
|
| 166 |
+
T = set(G) - S
|
| 167 |
+
expansion = nx.mixing_expansion(G, S, T)
|
| 168 |
+
# There is one cut edge, and the total number of edges in the
|
| 169 |
+
# graph is twice the total number of edges in a clique of size
|
| 170 |
+
# five, plus one more for the bridge.
|
| 171 |
+
expected = 1 / (2 * (5 * 4 + 1))
|
| 172 |
+
assert expected == expansion
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_d_separation.py
ADDED
|
@@ -0,0 +1,348 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import combinations
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def path_graph():
|
| 9 |
+
"""Return a path graph of length three."""
|
| 10 |
+
G = nx.path_graph(3, create_using=nx.DiGraph)
|
| 11 |
+
G.graph["name"] = "path"
|
| 12 |
+
nx.freeze(G)
|
| 13 |
+
return G
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def fork_graph():
|
| 17 |
+
"""Return a three node fork graph."""
|
| 18 |
+
G = nx.DiGraph(name="fork")
|
| 19 |
+
G.add_edges_from([(0, 1), (0, 2)])
|
| 20 |
+
nx.freeze(G)
|
| 21 |
+
return G
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def collider_graph():
|
| 25 |
+
"""Return a collider/v-structure graph with three nodes."""
|
| 26 |
+
G = nx.DiGraph(name="collider")
|
| 27 |
+
G.add_edges_from([(0, 2), (1, 2)])
|
| 28 |
+
nx.freeze(G)
|
| 29 |
+
return G
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def naive_bayes_graph():
|
| 33 |
+
"""Return a simply Naive Bayes PGM graph."""
|
| 34 |
+
G = nx.DiGraph(name="naive_bayes")
|
| 35 |
+
G.add_edges_from([(0, 1), (0, 2), (0, 3), (0, 4)])
|
| 36 |
+
nx.freeze(G)
|
| 37 |
+
return G
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def asia_graph():
|
| 41 |
+
"""Return the 'Asia' PGM graph."""
|
| 42 |
+
G = nx.DiGraph(name="asia")
|
| 43 |
+
G.add_edges_from(
|
| 44 |
+
[
|
| 45 |
+
("asia", "tuberculosis"),
|
| 46 |
+
("smoking", "cancer"),
|
| 47 |
+
("smoking", "bronchitis"),
|
| 48 |
+
("tuberculosis", "either"),
|
| 49 |
+
("cancer", "either"),
|
| 50 |
+
("either", "xray"),
|
| 51 |
+
("either", "dyspnea"),
|
| 52 |
+
("bronchitis", "dyspnea"),
|
| 53 |
+
]
|
| 54 |
+
)
|
| 55 |
+
nx.freeze(G)
|
| 56 |
+
return G
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
@pytest.fixture(name="path_graph")
|
| 60 |
+
def path_graph_fixture():
|
| 61 |
+
return path_graph()
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
@pytest.fixture(name="fork_graph")
|
| 65 |
+
def fork_graph_fixture():
|
| 66 |
+
return fork_graph()
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
@pytest.fixture(name="collider_graph")
|
| 70 |
+
def collider_graph_fixture():
|
| 71 |
+
return collider_graph()
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
@pytest.fixture(name="naive_bayes_graph")
|
| 75 |
+
def naive_bayes_graph_fixture():
|
| 76 |
+
return naive_bayes_graph()
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
@pytest.fixture(name="asia_graph")
|
| 80 |
+
def asia_graph_fixture():
|
| 81 |
+
return asia_graph()
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
@pytest.fixture()
|
| 85 |
+
def large_collider_graph():
|
| 86 |
+
edge_list = [("A", "B"), ("C", "B"), ("B", "D"), ("D", "E"), ("B", "F"), ("G", "E")]
|
| 87 |
+
G = nx.DiGraph(edge_list)
|
| 88 |
+
return G
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
@pytest.fixture()
|
| 92 |
+
def chain_and_fork_graph():
|
| 93 |
+
edge_list = [("A", "B"), ("B", "C"), ("B", "D"), ("D", "C")]
|
| 94 |
+
G = nx.DiGraph(edge_list)
|
| 95 |
+
return G
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
@pytest.fixture()
|
| 99 |
+
def no_separating_set_graph():
|
| 100 |
+
edge_list = [("A", "B")]
|
| 101 |
+
G = nx.DiGraph(edge_list)
|
| 102 |
+
return G
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
@pytest.fixture()
|
| 106 |
+
def large_no_separating_set_graph():
|
| 107 |
+
edge_list = [("A", "B"), ("C", "A"), ("C", "B")]
|
| 108 |
+
G = nx.DiGraph(edge_list)
|
| 109 |
+
return G
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
@pytest.fixture()
|
| 113 |
+
def collider_trek_graph():
|
| 114 |
+
edge_list = [("A", "B"), ("C", "B"), ("C", "D")]
|
| 115 |
+
G = nx.DiGraph(edge_list)
|
| 116 |
+
return G
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
@pytest.mark.parametrize(
|
| 120 |
+
"graph",
|
| 121 |
+
[path_graph(), fork_graph(), collider_graph(), naive_bayes_graph(), asia_graph()],
|
| 122 |
+
)
|
| 123 |
+
def test_markov_condition(graph):
|
| 124 |
+
"""Test that the Markov condition holds for each PGM graph."""
|
| 125 |
+
for node in graph.nodes:
|
| 126 |
+
parents = set(graph.predecessors(node))
|
| 127 |
+
non_descendants = graph.nodes - nx.descendants(graph, node) - {node} - parents
|
| 128 |
+
assert nx.is_d_separator(graph, {node}, non_descendants, parents)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def test_path_graph_dsep(path_graph):
|
| 132 |
+
"""Example-based test of d-separation for path_graph."""
|
| 133 |
+
assert nx.is_d_separator(path_graph, {0}, {2}, {1})
|
| 134 |
+
assert not nx.is_d_separator(path_graph, {0}, {2}, set())
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def test_fork_graph_dsep(fork_graph):
|
| 138 |
+
"""Example-based test of d-separation for fork_graph."""
|
| 139 |
+
assert nx.is_d_separator(fork_graph, {1}, {2}, {0})
|
| 140 |
+
assert not nx.is_d_separator(fork_graph, {1}, {2}, set())
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
def test_collider_graph_dsep(collider_graph):
|
| 144 |
+
"""Example-based test of d-separation for collider_graph."""
|
| 145 |
+
assert nx.is_d_separator(collider_graph, {0}, {1}, set())
|
| 146 |
+
assert not nx.is_d_separator(collider_graph, {0}, {1}, {2})
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def test_naive_bayes_dsep(naive_bayes_graph):
|
| 150 |
+
"""Example-based test of d-separation for naive_bayes_graph."""
|
| 151 |
+
for u, v in combinations(range(1, 5), 2):
|
| 152 |
+
assert nx.is_d_separator(naive_bayes_graph, {u}, {v}, {0})
|
| 153 |
+
assert not nx.is_d_separator(naive_bayes_graph, {u}, {v}, set())
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def test_asia_graph_dsep(asia_graph):
|
| 157 |
+
"""Example-based test of d-separation for asia_graph."""
|
| 158 |
+
assert nx.is_d_separator(
|
| 159 |
+
asia_graph, {"asia", "smoking"}, {"dyspnea", "xray"}, {"bronchitis", "either"}
|
| 160 |
+
)
|
| 161 |
+
assert nx.is_d_separator(
|
| 162 |
+
asia_graph, {"tuberculosis", "cancer"}, {"bronchitis"}, {"smoking", "xray"}
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
def test_undirected_graphs_are_not_supported():
|
| 167 |
+
"""
|
| 168 |
+
Test that undirected graphs are not supported.
|
| 169 |
+
|
| 170 |
+
d-separation and its related algorithms do not apply in
|
| 171 |
+
the case of undirected graphs.
|
| 172 |
+
"""
|
| 173 |
+
g = nx.path_graph(3, nx.Graph)
|
| 174 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 175 |
+
nx.is_d_separator(g, {0}, {1}, {2})
|
| 176 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 177 |
+
nx.is_minimal_d_separator(g, {0}, {1}, {2})
|
| 178 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 179 |
+
nx.find_minimal_d_separator(g, {0}, {1})
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
def test_cyclic_graphs_raise_error():
|
| 183 |
+
"""
|
| 184 |
+
Test that cycle graphs should cause erroring.
|
| 185 |
+
|
| 186 |
+
This is because PGMs assume a directed acyclic graph.
|
| 187 |
+
"""
|
| 188 |
+
g = nx.cycle_graph(3, nx.DiGraph)
|
| 189 |
+
with pytest.raises(nx.NetworkXError):
|
| 190 |
+
nx.is_d_separator(g, {0}, {1}, {2})
|
| 191 |
+
with pytest.raises(nx.NetworkXError):
|
| 192 |
+
nx.find_minimal_d_separator(g, {0}, {1})
|
| 193 |
+
with pytest.raises(nx.NetworkXError):
|
| 194 |
+
nx.is_minimal_d_separator(g, {0}, {1}, {2})
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def test_invalid_nodes_raise_error(asia_graph):
|
| 198 |
+
"""
|
| 199 |
+
Test that graphs that have invalid nodes passed in raise errors.
|
| 200 |
+
"""
|
| 201 |
+
# Check both set and node arguments
|
| 202 |
+
with pytest.raises(nx.NodeNotFound):
|
| 203 |
+
nx.is_d_separator(asia_graph, {0}, {1}, {2})
|
| 204 |
+
with pytest.raises(nx.NodeNotFound):
|
| 205 |
+
nx.is_d_separator(asia_graph, 0, 1, 2)
|
| 206 |
+
with pytest.raises(nx.NodeNotFound):
|
| 207 |
+
nx.is_minimal_d_separator(asia_graph, {0}, {1}, {2})
|
| 208 |
+
with pytest.raises(nx.NodeNotFound):
|
| 209 |
+
nx.is_minimal_d_separator(asia_graph, 0, 1, 2)
|
| 210 |
+
with pytest.raises(nx.NodeNotFound):
|
| 211 |
+
nx.find_minimal_d_separator(asia_graph, {0}, {1})
|
| 212 |
+
with pytest.raises(nx.NodeNotFound):
|
| 213 |
+
nx.find_minimal_d_separator(asia_graph, 0, 1)
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
def test_nondisjoint_node_sets_raise_error(collider_graph):
|
| 217 |
+
"""
|
| 218 |
+
Test that error is raised when node sets aren't disjoint.
|
| 219 |
+
"""
|
| 220 |
+
with pytest.raises(nx.NetworkXError):
|
| 221 |
+
nx.is_d_separator(collider_graph, 0, 1, 0)
|
| 222 |
+
with pytest.raises(nx.NetworkXError):
|
| 223 |
+
nx.is_d_separator(collider_graph, 0, 2, 0)
|
| 224 |
+
with pytest.raises(nx.NetworkXError):
|
| 225 |
+
nx.is_d_separator(collider_graph, 0, 0, 1)
|
| 226 |
+
with pytest.raises(nx.NetworkXError):
|
| 227 |
+
nx.is_d_separator(collider_graph, 1, 0, 0)
|
| 228 |
+
with pytest.raises(nx.NetworkXError):
|
| 229 |
+
nx.find_minimal_d_separator(collider_graph, 0, 0)
|
| 230 |
+
with pytest.raises(nx.NetworkXError):
|
| 231 |
+
nx.find_minimal_d_separator(collider_graph, 0, 1, included=0)
|
| 232 |
+
with pytest.raises(nx.NetworkXError):
|
| 233 |
+
nx.find_minimal_d_separator(collider_graph, 1, 0, included=0)
|
| 234 |
+
with pytest.raises(nx.NetworkXError):
|
| 235 |
+
nx.is_minimal_d_separator(collider_graph, 0, 0, set())
|
| 236 |
+
with pytest.raises(nx.NetworkXError):
|
| 237 |
+
nx.is_minimal_d_separator(collider_graph, 0, 1, set(), included=0)
|
| 238 |
+
with pytest.raises(nx.NetworkXError):
|
| 239 |
+
nx.is_minimal_d_separator(collider_graph, 1, 0, set(), included=0)
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
def test_is_minimal_d_separator(
|
| 243 |
+
large_collider_graph,
|
| 244 |
+
chain_and_fork_graph,
|
| 245 |
+
no_separating_set_graph,
|
| 246 |
+
large_no_separating_set_graph,
|
| 247 |
+
collider_trek_graph,
|
| 248 |
+
):
|
| 249 |
+
# Case 1:
|
| 250 |
+
# create a graph A -> B <- C
|
| 251 |
+
# B -> D -> E;
|
| 252 |
+
# B -> F;
|
| 253 |
+
# G -> E;
|
| 254 |
+
assert not nx.is_d_separator(large_collider_graph, {"B"}, {"E"}, set())
|
| 255 |
+
|
| 256 |
+
# minimal set of the corresponding graph
|
| 257 |
+
# for B and E should be (D,)
|
| 258 |
+
Zmin = nx.find_minimal_d_separator(large_collider_graph, "B", "E")
|
| 259 |
+
# check that the minimal d-separator is a d-separating set
|
| 260 |
+
assert nx.is_d_separator(large_collider_graph, "B", "E", Zmin)
|
| 261 |
+
# the minimal separating set should also pass the test for minimality
|
| 262 |
+
assert nx.is_minimal_d_separator(large_collider_graph, "B", "E", Zmin)
|
| 263 |
+
# function should also work with set arguments
|
| 264 |
+
assert nx.is_minimal_d_separator(large_collider_graph, {"A", "B"}, {"G", "E"}, Zmin)
|
| 265 |
+
assert Zmin == {"D"}
|
| 266 |
+
|
| 267 |
+
# Case 2:
|
| 268 |
+
# create a graph A -> B -> C
|
| 269 |
+
# B -> D -> C;
|
| 270 |
+
assert not nx.is_d_separator(chain_and_fork_graph, {"A"}, {"C"}, set())
|
| 271 |
+
Zmin = nx.find_minimal_d_separator(chain_and_fork_graph, "A", "C")
|
| 272 |
+
|
| 273 |
+
# the minimal separating set should pass the test for minimality
|
| 274 |
+
assert nx.is_minimal_d_separator(chain_and_fork_graph, "A", "C", Zmin)
|
| 275 |
+
assert Zmin == {"B"}
|
| 276 |
+
Znotmin = Zmin.union({"D"})
|
| 277 |
+
assert not nx.is_minimal_d_separator(chain_and_fork_graph, "A", "C", Znotmin)
|
| 278 |
+
|
| 279 |
+
# Case 3:
|
| 280 |
+
# create a graph A -> B
|
| 281 |
+
|
| 282 |
+
# there is no m-separating set between A and B at all, so
|
| 283 |
+
# no minimal m-separating set can exist
|
| 284 |
+
assert not nx.is_d_separator(no_separating_set_graph, {"A"}, {"B"}, set())
|
| 285 |
+
assert nx.find_minimal_d_separator(no_separating_set_graph, "A", "B") is None
|
| 286 |
+
|
| 287 |
+
# Case 4:
|
| 288 |
+
# create a graph A -> B with A <- C -> B
|
| 289 |
+
|
| 290 |
+
# there is no m-separating set between A and B at all, so
|
| 291 |
+
# no minimal m-separating set can exist
|
| 292 |
+
# however, the algorithm will initially propose C as a
|
| 293 |
+
# minimal (but invalid) separating set
|
| 294 |
+
assert not nx.is_d_separator(large_no_separating_set_graph, {"A"}, {"B"}, {"C"})
|
| 295 |
+
assert nx.find_minimal_d_separator(large_no_separating_set_graph, "A", "B") is None
|
| 296 |
+
|
| 297 |
+
# Test `included` and `excluded` args
|
| 298 |
+
# create graph A -> B <- C -> D
|
| 299 |
+
assert nx.find_minimal_d_separator(collider_trek_graph, "A", "D", included="B") == {
|
| 300 |
+
"B",
|
| 301 |
+
"C",
|
| 302 |
+
}
|
| 303 |
+
assert (
|
| 304 |
+
nx.find_minimal_d_separator(
|
| 305 |
+
collider_trek_graph, "A", "D", included="B", restricted="B"
|
| 306 |
+
)
|
| 307 |
+
is None
|
| 308 |
+
)
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
def test_is_minimal_d_separator_checks_dsep():
|
| 312 |
+
"""Test that is_minimal_d_separator checks for d-separation as well."""
|
| 313 |
+
g = nx.DiGraph()
|
| 314 |
+
g.add_edges_from(
|
| 315 |
+
[
|
| 316 |
+
("A", "B"),
|
| 317 |
+
("A", "E"),
|
| 318 |
+
("B", "C"),
|
| 319 |
+
("B", "D"),
|
| 320 |
+
("D", "C"),
|
| 321 |
+
("D", "F"),
|
| 322 |
+
("E", "D"),
|
| 323 |
+
("E", "F"),
|
| 324 |
+
]
|
| 325 |
+
)
|
| 326 |
+
|
| 327 |
+
assert not nx.is_d_separator(g, {"C"}, {"F"}, {"D"})
|
| 328 |
+
|
| 329 |
+
# since {'D'} and {} are not d-separators, we return false
|
| 330 |
+
assert not nx.is_minimal_d_separator(g, "C", "F", {"D"})
|
| 331 |
+
assert not nx.is_minimal_d_separator(g, "C", "F", set())
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
def test__reachable(large_collider_graph):
|
| 335 |
+
reachable = nx.algorithms.d_separation._reachable
|
| 336 |
+
g = large_collider_graph
|
| 337 |
+
x = {"F", "D"}
|
| 338 |
+
ancestors = {"A", "B", "C", "D", "F"}
|
| 339 |
+
assert reachable(g, x, ancestors, {"B"}) == {"B", "F", "D"}
|
| 340 |
+
assert reachable(g, x, ancestors, set()) == ancestors
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
def test_deprecations():
|
| 344 |
+
G = nx.DiGraph([(0, 1), (1, 2)])
|
| 345 |
+
with pytest.deprecated_call():
|
| 346 |
+
nx.d_separated(G, 0, 2, {1})
|
| 347 |
+
with pytest.deprecated_call():
|
| 348 |
+
z = nx.minimal_d_separator(G, 0, 2)
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_euler.py
ADDED
|
@@ -0,0 +1,314 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@pytest.mark.parametrize("f", (nx.is_eulerian, nx.is_semieulerian))
|
| 9 |
+
def test_empty_graph_raises(f):
|
| 10 |
+
G = nx.Graph()
|
| 11 |
+
with pytest.raises(nx.NetworkXPointlessConcept, match="Connectivity is undefined"):
|
| 12 |
+
f(G)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TestIsEulerian:
|
| 16 |
+
def test_is_eulerian(self):
|
| 17 |
+
assert nx.is_eulerian(nx.complete_graph(5))
|
| 18 |
+
assert nx.is_eulerian(nx.complete_graph(7))
|
| 19 |
+
assert nx.is_eulerian(nx.hypercube_graph(4))
|
| 20 |
+
assert nx.is_eulerian(nx.hypercube_graph(6))
|
| 21 |
+
|
| 22 |
+
assert not nx.is_eulerian(nx.complete_graph(4))
|
| 23 |
+
assert not nx.is_eulerian(nx.complete_graph(6))
|
| 24 |
+
assert not nx.is_eulerian(nx.hypercube_graph(3))
|
| 25 |
+
assert not nx.is_eulerian(nx.hypercube_graph(5))
|
| 26 |
+
|
| 27 |
+
assert not nx.is_eulerian(nx.petersen_graph())
|
| 28 |
+
assert not nx.is_eulerian(nx.path_graph(4))
|
| 29 |
+
|
| 30 |
+
def test_is_eulerian2(self):
|
| 31 |
+
# not connected
|
| 32 |
+
G = nx.Graph()
|
| 33 |
+
G.add_nodes_from([1, 2, 3])
|
| 34 |
+
assert not nx.is_eulerian(G)
|
| 35 |
+
# not strongly connected
|
| 36 |
+
G = nx.DiGraph()
|
| 37 |
+
G.add_nodes_from([1, 2, 3])
|
| 38 |
+
assert not nx.is_eulerian(G)
|
| 39 |
+
G = nx.MultiDiGraph()
|
| 40 |
+
G.add_edge(1, 2)
|
| 41 |
+
G.add_edge(2, 3)
|
| 42 |
+
G.add_edge(2, 3)
|
| 43 |
+
G.add_edge(3, 1)
|
| 44 |
+
assert not nx.is_eulerian(G)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class TestEulerianCircuit:
|
| 48 |
+
def test_eulerian_circuit_cycle(self):
|
| 49 |
+
G = nx.cycle_graph(4)
|
| 50 |
+
|
| 51 |
+
edges = list(nx.eulerian_circuit(G, source=0))
|
| 52 |
+
nodes = [u for u, v in edges]
|
| 53 |
+
assert nodes == [0, 3, 2, 1]
|
| 54 |
+
assert edges == [(0, 3), (3, 2), (2, 1), (1, 0)]
|
| 55 |
+
|
| 56 |
+
edges = list(nx.eulerian_circuit(G, source=1))
|
| 57 |
+
nodes = [u for u, v in edges]
|
| 58 |
+
assert nodes == [1, 2, 3, 0]
|
| 59 |
+
assert edges == [(1, 2), (2, 3), (3, 0), (0, 1)]
|
| 60 |
+
|
| 61 |
+
G = nx.complete_graph(3)
|
| 62 |
+
|
| 63 |
+
edges = list(nx.eulerian_circuit(G, source=0))
|
| 64 |
+
nodes = [u for u, v in edges]
|
| 65 |
+
assert nodes == [0, 2, 1]
|
| 66 |
+
assert edges == [(0, 2), (2, 1), (1, 0)]
|
| 67 |
+
|
| 68 |
+
edges = list(nx.eulerian_circuit(G, source=1))
|
| 69 |
+
nodes = [u for u, v in edges]
|
| 70 |
+
assert nodes == [1, 2, 0]
|
| 71 |
+
assert edges == [(1, 2), (2, 0), (0, 1)]
|
| 72 |
+
|
| 73 |
+
def test_eulerian_circuit_digraph(self):
|
| 74 |
+
G = nx.DiGraph()
|
| 75 |
+
nx.add_cycle(G, [0, 1, 2, 3])
|
| 76 |
+
|
| 77 |
+
edges = list(nx.eulerian_circuit(G, source=0))
|
| 78 |
+
nodes = [u for u, v in edges]
|
| 79 |
+
assert nodes == [0, 1, 2, 3]
|
| 80 |
+
assert edges == [(0, 1), (1, 2), (2, 3), (3, 0)]
|
| 81 |
+
|
| 82 |
+
edges = list(nx.eulerian_circuit(G, source=1))
|
| 83 |
+
nodes = [u for u, v in edges]
|
| 84 |
+
assert nodes == [1, 2, 3, 0]
|
| 85 |
+
assert edges == [(1, 2), (2, 3), (3, 0), (0, 1)]
|
| 86 |
+
|
| 87 |
+
def test_multigraph(self):
|
| 88 |
+
G = nx.MultiGraph()
|
| 89 |
+
nx.add_cycle(G, [0, 1, 2, 3])
|
| 90 |
+
G.add_edge(1, 2)
|
| 91 |
+
G.add_edge(1, 2)
|
| 92 |
+
edges = list(nx.eulerian_circuit(G, source=0))
|
| 93 |
+
nodes = [u for u, v in edges]
|
| 94 |
+
assert nodes == [0, 3, 2, 1, 2, 1]
|
| 95 |
+
assert edges == [(0, 3), (3, 2), (2, 1), (1, 2), (2, 1), (1, 0)]
|
| 96 |
+
|
| 97 |
+
def test_multigraph_with_keys(self):
|
| 98 |
+
G = nx.MultiGraph()
|
| 99 |
+
nx.add_cycle(G, [0, 1, 2, 3])
|
| 100 |
+
G.add_edge(1, 2)
|
| 101 |
+
G.add_edge(1, 2)
|
| 102 |
+
edges = list(nx.eulerian_circuit(G, source=0, keys=True))
|
| 103 |
+
nodes = [u for u, v, k in edges]
|
| 104 |
+
assert nodes == [0, 3, 2, 1, 2, 1]
|
| 105 |
+
assert edges[:2] == [(0, 3, 0), (3, 2, 0)]
|
| 106 |
+
assert collections.Counter(edges[2:5]) == collections.Counter(
|
| 107 |
+
[(2, 1, 0), (1, 2, 1), (2, 1, 2)]
|
| 108 |
+
)
|
| 109 |
+
assert edges[5:] == [(1, 0, 0)]
|
| 110 |
+
|
| 111 |
+
def test_not_eulerian(self):
|
| 112 |
+
with pytest.raises(nx.NetworkXError):
|
| 113 |
+
f = list(nx.eulerian_circuit(nx.complete_graph(4)))
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
class TestIsSemiEulerian:
|
| 117 |
+
def test_is_semieulerian(self):
|
| 118 |
+
# Test graphs with Eulerian paths but no cycles return True.
|
| 119 |
+
assert nx.is_semieulerian(nx.path_graph(4))
|
| 120 |
+
G = nx.path_graph(6, create_using=nx.DiGraph)
|
| 121 |
+
assert nx.is_semieulerian(G)
|
| 122 |
+
|
| 123 |
+
# Test graphs with Eulerian cycles return False.
|
| 124 |
+
assert not nx.is_semieulerian(nx.complete_graph(5))
|
| 125 |
+
assert not nx.is_semieulerian(nx.complete_graph(7))
|
| 126 |
+
assert not nx.is_semieulerian(nx.hypercube_graph(4))
|
| 127 |
+
assert not nx.is_semieulerian(nx.hypercube_graph(6))
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
class TestHasEulerianPath:
|
| 131 |
+
def test_has_eulerian_path_cyclic(self):
|
| 132 |
+
# Test graphs with Eulerian cycles return True.
|
| 133 |
+
assert nx.has_eulerian_path(nx.complete_graph(5))
|
| 134 |
+
assert nx.has_eulerian_path(nx.complete_graph(7))
|
| 135 |
+
assert nx.has_eulerian_path(nx.hypercube_graph(4))
|
| 136 |
+
assert nx.has_eulerian_path(nx.hypercube_graph(6))
|
| 137 |
+
|
| 138 |
+
def test_has_eulerian_path_non_cyclic(self):
|
| 139 |
+
# Test graphs with Eulerian paths but no cycles return True.
|
| 140 |
+
assert nx.has_eulerian_path(nx.path_graph(4))
|
| 141 |
+
G = nx.path_graph(6, create_using=nx.DiGraph)
|
| 142 |
+
assert nx.has_eulerian_path(G)
|
| 143 |
+
|
| 144 |
+
def test_has_eulerian_path_directed_graph(self):
|
| 145 |
+
# Test directed graphs and returns False
|
| 146 |
+
G = nx.DiGraph()
|
| 147 |
+
G.add_edges_from([(0, 1), (1, 2), (0, 2)])
|
| 148 |
+
assert not nx.has_eulerian_path(G)
|
| 149 |
+
|
| 150 |
+
# Test directed graphs without isolated node returns True
|
| 151 |
+
G = nx.DiGraph()
|
| 152 |
+
G.add_edges_from([(0, 1), (1, 2), (2, 0)])
|
| 153 |
+
assert nx.has_eulerian_path(G)
|
| 154 |
+
|
| 155 |
+
# Test directed graphs with isolated node returns False
|
| 156 |
+
G.add_node(3)
|
| 157 |
+
assert not nx.has_eulerian_path(G)
|
| 158 |
+
|
| 159 |
+
@pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph()))
|
| 160 |
+
def test_has_eulerian_path_not_weakly_connected(self, G):
|
| 161 |
+
G.add_edges_from([(0, 1), (2, 3), (3, 2)])
|
| 162 |
+
assert not nx.has_eulerian_path(G)
|
| 163 |
+
|
| 164 |
+
@pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph()))
|
| 165 |
+
def test_has_eulerian_path_unbalancedins_more_than_one(self, G):
|
| 166 |
+
G.add_edges_from([(0, 1), (2, 3)])
|
| 167 |
+
assert not nx.has_eulerian_path(G)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class TestFindPathStart:
|
| 171 |
+
def testfind_path_start(self):
|
| 172 |
+
find_path_start = nx.algorithms.euler._find_path_start
|
| 173 |
+
# Test digraphs return correct starting node.
|
| 174 |
+
G = nx.path_graph(6, create_using=nx.DiGraph)
|
| 175 |
+
assert find_path_start(G) == 0
|
| 176 |
+
edges = [(0, 1), (1, 2), (2, 0), (4, 0)]
|
| 177 |
+
assert find_path_start(nx.DiGraph(edges)) == 4
|
| 178 |
+
|
| 179 |
+
# Test graph with no Eulerian path return None.
|
| 180 |
+
edges = [(0, 1), (1, 2), (2, 3), (2, 4)]
|
| 181 |
+
assert find_path_start(nx.DiGraph(edges)) is None
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
class TestEulerianPath:
|
| 185 |
+
def test_eulerian_path(self):
|
| 186 |
+
x = [(4, 0), (0, 1), (1, 2), (2, 0)]
|
| 187 |
+
for e1, e2 in zip(x, nx.eulerian_path(nx.DiGraph(x))):
|
| 188 |
+
assert e1 == e2
|
| 189 |
+
|
| 190 |
+
def test_eulerian_path_straight_link(self):
|
| 191 |
+
G = nx.DiGraph()
|
| 192 |
+
result = [(1, 2), (2, 3), (3, 4), (4, 5)]
|
| 193 |
+
G.add_edges_from(result)
|
| 194 |
+
assert result == list(nx.eulerian_path(G))
|
| 195 |
+
assert result == list(nx.eulerian_path(G, source=1))
|
| 196 |
+
with pytest.raises(nx.NetworkXError):
|
| 197 |
+
list(nx.eulerian_path(G, source=3))
|
| 198 |
+
with pytest.raises(nx.NetworkXError):
|
| 199 |
+
list(nx.eulerian_path(G, source=4))
|
| 200 |
+
with pytest.raises(nx.NetworkXError):
|
| 201 |
+
list(nx.eulerian_path(G, source=5))
|
| 202 |
+
|
| 203 |
+
def test_eulerian_path_multigraph(self):
|
| 204 |
+
G = nx.MultiDiGraph()
|
| 205 |
+
result = [(2, 1), (1, 2), (2, 1), (1, 2), (2, 3), (3, 4), (4, 3)]
|
| 206 |
+
G.add_edges_from(result)
|
| 207 |
+
assert result == list(nx.eulerian_path(G))
|
| 208 |
+
assert result == list(nx.eulerian_path(G, source=2))
|
| 209 |
+
with pytest.raises(nx.NetworkXError):
|
| 210 |
+
list(nx.eulerian_path(G, source=3))
|
| 211 |
+
with pytest.raises(nx.NetworkXError):
|
| 212 |
+
list(nx.eulerian_path(G, source=4))
|
| 213 |
+
|
| 214 |
+
def test_eulerian_path_eulerian_circuit(self):
|
| 215 |
+
G = nx.DiGraph()
|
| 216 |
+
result = [(1, 2), (2, 3), (3, 4), (4, 1)]
|
| 217 |
+
result2 = [(2, 3), (3, 4), (4, 1), (1, 2)]
|
| 218 |
+
result3 = [(3, 4), (4, 1), (1, 2), (2, 3)]
|
| 219 |
+
G.add_edges_from(result)
|
| 220 |
+
assert result == list(nx.eulerian_path(G))
|
| 221 |
+
assert result == list(nx.eulerian_path(G, source=1))
|
| 222 |
+
assert result2 == list(nx.eulerian_path(G, source=2))
|
| 223 |
+
assert result3 == list(nx.eulerian_path(G, source=3))
|
| 224 |
+
|
| 225 |
+
def test_eulerian_path_undirected(self):
|
| 226 |
+
G = nx.Graph()
|
| 227 |
+
result = [(1, 2), (2, 3), (3, 4), (4, 5)]
|
| 228 |
+
result2 = [(5, 4), (4, 3), (3, 2), (2, 1)]
|
| 229 |
+
G.add_edges_from(result)
|
| 230 |
+
assert list(nx.eulerian_path(G)) in (result, result2)
|
| 231 |
+
assert result == list(nx.eulerian_path(G, source=1))
|
| 232 |
+
assert result2 == list(nx.eulerian_path(G, source=5))
|
| 233 |
+
with pytest.raises(nx.NetworkXError):
|
| 234 |
+
list(nx.eulerian_path(G, source=3))
|
| 235 |
+
with pytest.raises(nx.NetworkXError):
|
| 236 |
+
list(nx.eulerian_path(G, source=2))
|
| 237 |
+
|
| 238 |
+
def test_eulerian_path_multigraph_undirected(self):
|
| 239 |
+
G = nx.MultiGraph()
|
| 240 |
+
result = [(2, 1), (1, 2), (2, 1), (1, 2), (2, 3), (3, 4)]
|
| 241 |
+
G.add_edges_from(result)
|
| 242 |
+
assert result == list(nx.eulerian_path(G))
|
| 243 |
+
assert result == list(nx.eulerian_path(G, source=2))
|
| 244 |
+
with pytest.raises(nx.NetworkXError):
|
| 245 |
+
list(nx.eulerian_path(G, source=3))
|
| 246 |
+
with pytest.raises(nx.NetworkXError):
|
| 247 |
+
list(nx.eulerian_path(G, source=1))
|
| 248 |
+
|
| 249 |
+
@pytest.mark.parametrize(
|
| 250 |
+
("graph_type", "result"),
|
| 251 |
+
(
|
| 252 |
+
(nx.MultiGraph, [(0, 1, 0), (1, 0, 1)]),
|
| 253 |
+
(nx.MultiDiGraph, [(0, 1, 0), (1, 0, 0)]),
|
| 254 |
+
),
|
| 255 |
+
)
|
| 256 |
+
def test_eulerian_with_keys(self, graph_type, result):
|
| 257 |
+
G = graph_type([(0, 1), (1, 0)])
|
| 258 |
+
answer = nx.eulerian_path(G, keys=True)
|
| 259 |
+
assert list(answer) == result
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
class TestEulerize:
|
| 263 |
+
def test_disconnected(self):
|
| 264 |
+
with pytest.raises(nx.NetworkXError):
|
| 265 |
+
G = nx.from_edgelist([(0, 1), (2, 3)])
|
| 266 |
+
nx.eulerize(G)
|
| 267 |
+
|
| 268 |
+
def test_null_graph(self):
|
| 269 |
+
with pytest.raises(nx.NetworkXPointlessConcept):
|
| 270 |
+
nx.eulerize(nx.Graph())
|
| 271 |
+
|
| 272 |
+
def test_null_multigraph(self):
|
| 273 |
+
with pytest.raises(nx.NetworkXPointlessConcept):
|
| 274 |
+
nx.eulerize(nx.MultiGraph())
|
| 275 |
+
|
| 276 |
+
def test_on_empty_graph(self):
|
| 277 |
+
with pytest.raises(nx.NetworkXError):
|
| 278 |
+
nx.eulerize(nx.empty_graph(3))
|
| 279 |
+
|
| 280 |
+
def test_on_eulerian(self):
|
| 281 |
+
G = nx.cycle_graph(3)
|
| 282 |
+
H = nx.eulerize(G)
|
| 283 |
+
assert nx.is_isomorphic(G, H)
|
| 284 |
+
|
| 285 |
+
def test_on_eulerian_multigraph(self):
|
| 286 |
+
G = nx.MultiGraph(nx.cycle_graph(3))
|
| 287 |
+
G.add_edge(0, 1)
|
| 288 |
+
H = nx.eulerize(G)
|
| 289 |
+
assert nx.is_eulerian(H)
|
| 290 |
+
|
| 291 |
+
def test_on_complete_graph(self):
|
| 292 |
+
G = nx.complete_graph(4)
|
| 293 |
+
assert nx.is_eulerian(nx.eulerize(G))
|
| 294 |
+
assert nx.is_eulerian(nx.eulerize(nx.MultiGraph(G)))
|
| 295 |
+
|
| 296 |
+
def test_on_non_eulerian_graph(self):
|
| 297 |
+
G = nx.cycle_graph(18)
|
| 298 |
+
G.add_edge(0, 18)
|
| 299 |
+
G.add_edge(18, 19)
|
| 300 |
+
G.add_edge(17, 19)
|
| 301 |
+
G.add_edge(4, 20)
|
| 302 |
+
G.add_edge(20, 21)
|
| 303 |
+
G.add_edge(21, 22)
|
| 304 |
+
G.add_edge(22, 23)
|
| 305 |
+
G.add_edge(23, 24)
|
| 306 |
+
G.add_edge(24, 25)
|
| 307 |
+
G.add_edge(25, 26)
|
| 308 |
+
G.add_edge(26, 27)
|
| 309 |
+
G.add_edge(27, 28)
|
| 310 |
+
G.add_edge(28, 13)
|
| 311 |
+
assert not nx.is_eulerian(G)
|
| 312 |
+
G = nx.eulerize(G)
|
| 313 |
+
assert nx.is_eulerian(G)
|
| 314 |
+
assert nx.number_of_edges(G) == 39
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_graph_hashing.py
ADDED
|
@@ -0,0 +1,686 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.generators import directed
|
| 5 |
+
|
| 6 |
+
# Unit tests for the :func:`~networkx.weisfeiler_lehman_graph_hash` function
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def test_empty_graph_hash():
|
| 10 |
+
"""
|
| 11 |
+
empty graphs should give hashes regardless of other params
|
| 12 |
+
"""
|
| 13 |
+
G1 = nx.empty_graph()
|
| 14 |
+
G2 = nx.empty_graph()
|
| 15 |
+
|
| 16 |
+
h1 = nx.weisfeiler_lehman_graph_hash(G1)
|
| 17 |
+
h2 = nx.weisfeiler_lehman_graph_hash(G2)
|
| 18 |
+
h3 = nx.weisfeiler_lehman_graph_hash(G2, edge_attr="edge_attr1")
|
| 19 |
+
h4 = nx.weisfeiler_lehman_graph_hash(G2, node_attr="node_attr1")
|
| 20 |
+
h5 = nx.weisfeiler_lehman_graph_hash(
|
| 21 |
+
G2, edge_attr="edge_attr1", node_attr="node_attr1"
|
| 22 |
+
)
|
| 23 |
+
h6 = nx.weisfeiler_lehman_graph_hash(G2, iterations=10)
|
| 24 |
+
|
| 25 |
+
assert h1 == h2
|
| 26 |
+
assert h1 == h3
|
| 27 |
+
assert h1 == h4
|
| 28 |
+
assert h1 == h5
|
| 29 |
+
assert h1 == h6
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def test_directed():
|
| 33 |
+
"""
|
| 34 |
+
A directed graph with no bi-directional edges should yield different a graph hash
|
| 35 |
+
to the same graph taken as undirected if there are no hash collisions.
|
| 36 |
+
"""
|
| 37 |
+
r = 10
|
| 38 |
+
for i in range(r):
|
| 39 |
+
G_directed = nx.gn_graph(10 + r, seed=100 + i)
|
| 40 |
+
G_undirected = nx.to_undirected(G_directed)
|
| 41 |
+
|
| 42 |
+
h_directed = nx.weisfeiler_lehman_graph_hash(G_directed)
|
| 43 |
+
h_undirected = nx.weisfeiler_lehman_graph_hash(G_undirected)
|
| 44 |
+
|
| 45 |
+
assert h_directed != h_undirected
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def test_reversed():
|
| 49 |
+
"""
|
| 50 |
+
A directed graph with no bi-directional edges should yield different a graph hash
|
| 51 |
+
to the same graph taken with edge directions reversed if there are no hash collisions.
|
| 52 |
+
Here we test a cycle graph which is the minimal counterexample
|
| 53 |
+
"""
|
| 54 |
+
G = nx.cycle_graph(5, create_using=nx.DiGraph)
|
| 55 |
+
nx.set_node_attributes(G, {n: str(n) for n in G.nodes()}, name="label")
|
| 56 |
+
|
| 57 |
+
G_reversed = G.reverse()
|
| 58 |
+
|
| 59 |
+
h = nx.weisfeiler_lehman_graph_hash(G, node_attr="label")
|
| 60 |
+
h_reversed = nx.weisfeiler_lehman_graph_hash(G_reversed, node_attr="label")
|
| 61 |
+
|
| 62 |
+
assert h != h_reversed
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def test_isomorphic():
|
| 66 |
+
"""
|
| 67 |
+
graph hashes should be invariant to node-relabeling (when the output is reindexed
|
| 68 |
+
by the same mapping)
|
| 69 |
+
"""
|
| 70 |
+
n, r = 100, 10
|
| 71 |
+
p = 1.0 / r
|
| 72 |
+
for i in range(1, r + 1):
|
| 73 |
+
G1 = nx.erdos_renyi_graph(n, p * i, seed=200 + i)
|
| 74 |
+
G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
|
| 75 |
+
|
| 76 |
+
g1_hash = nx.weisfeiler_lehman_graph_hash(G1)
|
| 77 |
+
g2_hash = nx.weisfeiler_lehman_graph_hash(G2)
|
| 78 |
+
|
| 79 |
+
assert g1_hash == g2_hash
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def test_isomorphic_edge_attr():
|
| 83 |
+
"""
|
| 84 |
+
Isomorphic graphs with differing edge attributes should yield different graph
|
| 85 |
+
hashes if the 'edge_attr' argument is supplied and populated in the graph,
|
| 86 |
+
and there are no hash collisions.
|
| 87 |
+
The output should still be invariant to node-relabeling
|
| 88 |
+
"""
|
| 89 |
+
n, r = 100, 10
|
| 90 |
+
p = 1.0 / r
|
| 91 |
+
for i in range(1, r + 1):
|
| 92 |
+
G1 = nx.erdos_renyi_graph(n, p * i, seed=300 + i)
|
| 93 |
+
|
| 94 |
+
for a, b in G1.edges:
|
| 95 |
+
G1[a][b]["edge_attr1"] = f"{a}-{b}-1"
|
| 96 |
+
G1[a][b]["edge_attr2"] = f"{a}-{b}-2"
|
| 97 |
+
|
| 98 |
+
g1_hash_with_edge_attr1 = nx.weisfeiler_lehman_graph_hash(
|
| 99 |
+
G1, edge_attr="edge_attr1"
|
| 100 |
+
)
|
| 101 |
+
g1_hash_with_edge_attr2 = nx.weisfeiler_lehman_graph_hash(
|
| 102 |
+
G1, edge_attr="edge_attr2"
|
| 103 |
+
)
|
| 104 |
+
g1_hash_no_edge_attr = nx.weisfeiler_lehman_graph_hash(G1, edge_attr=None)
|
| 105 |
+
|
| 106 |
+
assert g1_hash_with_edge_attr1 != g1_hash_no_edge_attr
|
| 107 |
+
assert g1_hash_with_edge_attr2 != g1_hash_no_edge_attr
|
| 108 |
+
assert g1_hash_with_edge_attr1 != g1_hash_with_edge_attr2
|
| 109 |
+
|
| 110 |
+
G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
|
| 111 |
+
|
| 112 |
+
g2_hash_with_edge_attr1 = nx.weisfeiler_lehman_graph_hash(
|
| 113 |
+
G2, edge_attr="edge_attr1"
|
| 114 |
+
)
|
| 115 |
+
g2_hash_with_edge_attr2 = nx.weisfeiler_lehman_graph_hash(
|
| 116 |
+
G2, edge_attr="edge_attr2"
|
| 117 |
+
)
|
| 118 |
+
|
| 119 |
+
assert g1_hash_with_edge_attr1 == g2_hash_with_edge_attr1
|
| 120 |
+
assert g1_hash_with_edge_attr2 == g2_hash_with_edge_attr2
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
def test_missing_edge_attr():
|
| 124 |
+
"""
|
| 125 |
+
If the 'edge_attr' argument is supplied but is missing from an edge in the graph,
|
| 126 |
+
we should raise a KeyError
|
| 127 |
+
"""
|
| 128 |
+
G = nx.Graph()
|
| 129 |
+
G.add_edges_from([(1, 2, {"edge_attr1": "a"}), (1, 3, {})])
|
| 130 |
+
pytest.raises(KeyError, nx.weisfeiler_lehman_graph_hash, G, edge_attr="edge_attr1")
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def test_isomorphic_node_attr():
|
| 134 |
+
"""
|
| 135 |
+
Isomorphic graphs with differing node attributes should yield different graph
|
| 136 |
+
hashes if the 'node_attr' argument is supplied and populated in the graph, and
|
| 137 |
+
there are no hash collisions.
|
| 138 |
+
The output should still be invariant to node-relabeling
|
| 139 |
+
"""
|
| 140 |
+
n, r = 100, 10
|
| 141 |
+
p = 1.0 / r
|
| 142 |
+
for i in range(1, r + 1):
|
| 143 |
+
G1 = nx.erdos_renyi_graph(n, p * i, seed=400 + i)
|
| 144 |
+
|
| 145 |
+
for u in G1.nodes():
|
| 146 |
+
G1.nodes[u]["node_attr1"] = f"{u}-1"
|
| 147 |
+
G1.nodes[u]["node_attr2"] = f"{u}-2"
|
| 148 |
+
|
| 149 |
+
g1_hash_with_node_attr1 = nx.weisfeiler_lehman_graph_hash(
|
| 150 |
+
G1, node_attr="node_attr1"
|
| 151 |
+
)
|
| 152 |
+
g1_hash_with_node_attr2 = nx.weisfeiler_lehman_graph_hash(
|
| 153 |
+
G1, node_attr="node_attr2"
|
| 154 |
+
)
|
| 155 |
+
g1_hash_no_node_attr = nx.weisfeiler_lehman_graph_hash(G1, node_attr=None)
|
| 156 |
+
|
| 157 |
+
assert g1_hash_with_node_attr1 != g1_hash_no_node_attr
|
| 158 |
+
assert g1_hash_with_node_attr2 != g1_hash_no_node_attr
|
| 159 |
+
assert g1_hash_with_node_attr1 != g1_hash_with_node_attr2
|
| 160 |
+
|
| 161 |
+
G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
|
| 162 |
+
|
| 163 |
+
g2_hash_with_node_attr1 = nx.weisfeiler_lehman_graph_hash(
|
| 164 |
+
G2, node_attr="node_attr1"
|
| 165 |
+
)
|
| 166 |
+
g2_hash_with_node_attr2 = nx.weisfeiler_lehman_graph_hash(
|
| 167 |
+
G2, node_attr="node_attr2"
|
| 168 |
+
)
|
| 169 |
+
|
| 170 |
+
assert g1_hash_with_node_attr1 == g2_hash_with_node_attr1
|
| 171 |
+
assert g1_hash_with_node_attr2 == g2_hash_with_node_attr2
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
def test_missing_node_attr():
|
| 175 |
+
"""
|
| 176 |
+
If the 'node_attr' argument is supplied but is missing from a node in the graph,
|
| 177 |
+
we should raise a KeyError
|
| 178 |
+
"""
|
| 179 |
+
G = nx.Graph()
|
| 180 |
+
G.add_nodes_from([(1, {"node_attr1": "a"}), (2, {})])
|
| 181 |
+
G.add_edges_from([(1, 2), (2, 3), (3, 1), (1, 4)])
|
| 182 |
+
pytest.raises(KeyError, nx.weisfeiler_lehman_graph_hash, G, node_attr="node_attr1")
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def test_isomorphic_edge_attr_and_node_attr():
|
| 186 |
+
"""
|
| 187 |
+
Isomorphic graphs with differing node attributes should yield different graph
|
| 188 |
+
hashes if the 'node_attr' and 'edge_attr' argument is supplied and populated in
|
| 189 |
+
the graph, and there are no hash collisions.
|
| 190 |
+
The output should still be invariant to node-relabeling
|
| 191 |
+
"""
|
| 192 |
+
n, r = 100, 10
|
| 193 |
+
p = 1.0 / r
|
| 194 |
+
for i in range(1, r + 1):
|
| 195 |
+
G1 = nx.erdos_renyi_graph(n, p * i, seed=500 + i)
|
| 196 |
+
|
| 197 |
+
for u in G1.nodes():
|
| 198 |
+
G1.nodes[u]["node_attr1"] = f"{u}-1"
|
| 199 |
+
G1.nodes[u]["node_attr2"] = f"{u}-2"
|
| 200 |
+
|
| 201 |
+
for a, b in G1.edges:
|
| 202 |
+
G1[a][b]["edge_attr1"] = f"{a}-{b}-1"
|
| 203 |
+
G1[a][b]["edge_attr2"] = f"{a}-{b}-2"
|
| 204 |
+
|
| 205 |
+
g1_hash_edge1_node1 = nx.weisfeiler_lehman_graph_hash(
|
| 206 |
+
G1, edge_attr="edge_attr1", node_attr="node_attr1"
|
| 207 |
+
)
|
| 208 |
+
g1_hash_edge2_node2 = nx.weisfeiler_lehman_graph_hash(
|
| 209 |
+
G1, edge_attr="edge_attr2", node_attr="node_attr2"
|
| 210 |
+
)
|
| 211 |
+
g1_hash_edge1_node2 = nx.weisfeiler_lehman_graph_hash(
|
| 212 |
+
G1, edge_attr="edge_attr1", node_attr="node_attr2"
|
| 213 |
+
)
|
| 214 |
+
g1_hash_no_attr = nx.weisfeiler_lehman_graph_hash(G1)
|
| 215 |
+
|
| 216 |
+
assert g1_hash_edge1_node1 != g1_hash_no_attr
|
| 217 |
+
assert g1_hash_edge2_node2 != g1_hash_no_attr
|
| 218 |
+
assert g1_hash_edge1_node1 != g1_hash_edge2_node2
|
| 219 |
+
assert g1_hash_edge1_node2 != g1_hash_edge2_node2
|
| 220 |
+
assert g1_hash_edge1_node2 != g1_hash_edge1_node1
|
| 221 |
+
|
| 222 |
+
G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
|
| 223 |
+
|
| 224 |
+
g2_hash_edge1_node1 = nx.weisfeiler_lehman_graph_hash(
|
| 225 |
+
G2, edge_attr="edge_attr1", node_attr="node_attr1"
|
| 226 |
+
)
|
| 227 |
+
g2_hash_edge2_node2 = nx.weisfeiler_lehman_graph_hash(
|
| 228 |
+
G2, edge_attr="edge_attr2", node_attr="node_attr2"
|
| 229 |
+
)
|
| 230 |
+
|
| 231 |
+
assert g1_hash_edge1_node1 == g2_hash_edge1_node1
|
| 232 |
+
assert g1_hash_edge2_node2 == g2_hash_edge2_node2
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
def test_digest_size():
|
| 236 |
+
"""
|
| 237 |
+
The hash string lengths should be as expected for a variety of graphs and
|
| 238 |
+
digest sizes
|
| 239 |
+
"""
|
| 240 |
+
n, r = 100, 10
|
| 241 |
+
p = 1.0 / r
|
| 242 |
+
for i in range(1, r + 1):
|
| 243 |
+
G = nx.erdos_renyi_graph(n, p * i, seed=1000 + i)
|
| 244 |
+
|
| 245 |
+
h16 = nx.weisfeiler_lehman_graph_hash(G)
|
| 246 |
+
h32 = nx.weisfeiler_lehman_graph_hash(G, digest_size=32)
|
| 247 |
+
|
| 248 |
+
assert h16 != h32
|
| 249 |
+
assert len(h16) == 16 * 2
|
| 250 |
+
assert len(h32) == 32 * 2
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
# Unit tests for the :func:`~networkx.weisfeiler_lehman_hash_subgraphs` function
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
def is_subiteration(a, b):
|
| 257 |
+
"""
|
| 258 |
+
returns True if that each hash sequence in 'a' is a prefix for
|
| 259 |
+
the corresponding sequence indexed by the same node in 'b'.
|
| 260 |
+
"""
|
| 261 |
+
return all(b[node][: len(hashes)] == hashes for node, hashes in a.items())
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def hexdigest_sizes_correct(a, digest_size):
|
| 265 |
+
"""
|
| 266 |
+
returns True if all hex digest sizes are the expected length in a node:subgraph-hashes
|
| 267 |
+
dictionary. Hex digest string length == 2 * bytes digest length since each pair of hex
|
| 268 |
+
digits encodes 1 byte (https://docs.python.org/3/library/hashlib.html)
|
| 269 |
+
"""
|
| 270 |
+
hexdigest_size = digest_size * 2
|
| 271 |
+
list_digest_sizes_correct = lambda l: all(len(x) == hexdigest_size for x in l)
|
| 272 |
+
return all(list_digest_sizes_correct(hashes) for hashes in a.values())
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
def test_empty_graph_subgraph_hash():
|
| 276 |
+
""" "
|
| 277 |
+
empty graphs should give empty dict subgraph hashes regardless of other params
|
| 278 |
+
"""
|
| 279 |
+
G = nx.empty_graph()
|
| 280 |
+
|
| 281 |
+
subgraph_hashes1 = nx.weisfeiler_lehman_subgraph_hashes(G)
|
| 282 |
+
subgraph_hashes2 = nx.weisfeiler_lehman_subgraph_hashes(G, edge_attr="edge_attr")
|
| 283 |
+
subgraph_hashes3 = nx.weisfeiler_lehman_subgraph_hashes(G, node_attr="edge_attr")
|
| 284 |
+
subgraph_hashes4 = nx.weisfeiler_lehman_subgraph_hashes(G, iterations=2)
|
| 285 |
+
subgraph_hashes5 = nx.weisfeiler_lehman_subgraph_hashes(G, digest_size=64)
|
| 286 |
+
|
| 287 |
+
assert subgraph_hashes1 == {}
|
| 288 |
+
assert subgraph_hashes2 == {}
|
| 289 |
+
assert subgraph_hashes3 == {}
|
| 290 |
+
assert subgraph_hashes4 == {}
|
| 291 |
+
assert subgraph_hashes5 == {}
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
def test_directed_subgraph_hash():
|
| 295 |
+
"""
|
| 296 |
+
A directed graph with no bi-directional edges should yield different subgraph hashes
|
| 297 |
+
to the same graph taken as undirected, if all hashes don't collide.
|
| 298 |
+
"""
|
| 299 |
+
r = 10
|
| 300 |
+
for i in range(r):
|
| 301 |
+
G_directed = nx.gn_graph(10 + r, seed=100 + i)
|
| 302 |
+
G_undirected = nx.to_undirected(G_directed)
|
| 303 |
+
|
| 304 |
+
directed_subgraph_hashes = nx.weisfeiler_lehman_subgraph_hashes(G_directed)
|
| 305 |
+
undirected_subgraph_hashes = nx.weisfeiler_lehman_subgraph_hashes(G_undirected)
|
| 306 |
+
|
| 307 |
+
assert directed_subgraph_hashes != undirected_subgraph_hashes
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
def test_reversed_subgraph_hash():
|
| 311 |
+
"""
|
| 312 |
+
A directed graph with no bi-directional edges should yield different subgraph hashes
|
| 313 |
+
to the same graph taken with edge directions reversed if there are no hash collisions.
|
| 314 |
+
Here we test a cycle graph which is the minimal counterexample
|
| 315 |
+
"""
|
| 316 |
+
G = nx.cycle_graph(5, create_using=nx.DiGraph)
|
| 317 |
+
nx.set_node_attributes(G, {n: str(n) for n in G.nodes()}, name="label")
|
| 318 |
+
|
| 319 |
+
G_reversed = G.reverse()
|
| 320 |
+
|
| 321 |
+
h = nx.weisfeiler_lehman_subgraph_hashes(G, node_attr="label")
|
| 322 |
+
h_reversed = nx.weisfeiler_lehman_subgraph_hashes(G_reversed, node_attr="label")
|
| 323 |
+
|
| 324 |
+
assert h != h_reversed
|
| 325 |
+
|
| 326 |
+
|
| 327 |
+
def test_isomorphic_subgraph_hash():
|
| 328 |
+
"""
|
| 329 |
+
the subgraph hashes should be invariant to node-relabeling when the output is reindexed
|
| 330 |
+
by the same mapping and all hashes don't collide.
|
| 331 |
+
"""
|
| 332 |
+
n, r = 100, 10
|
| 333 |
+
p = 1.0 / r
|
| 334 |
+
for i in range(1, r + 1):
|
| 335 |
+
G1 = nx.erdos_renyi_graph(n, p * i, seed=200 + i)
|
| 336 |
+
G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
|
| 337 |
+
|
| 338 |
+
g1_subgraph_hashes = nx.weisfeiler_lehman_subgraph_hashes(G1)
|
| 339 |
+
g2_subgraph_hashes = nx.weisfeiler_lehman_subgraph_hashes(G2)
|
| 340 |
+
|
| 341 |
+
assert g1_subgraph_hashes == {-1 * k: v for k, v in g2_subgraph_hashes.items()}
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
def test_isomorphic_edge_attr_subgraph_hash():
|
| 345 |
+
"""
|
| 346 |
+
Isomorphic graphs with differing edge attributes should yield different subgraph
|
| 347 |
+
hashes if the 'edge_attr' argument is supplied and populated in the graph, and
|
| 348 |
+
all hashes don't collide.
|
| 349 |
+
The output should still be invariant to node-relabeling
|
| 350 |
+
"""
|
| 351 |
+
n, r = 100, 10
|
| 352 |
+
p = 1.0 / r
|
| 353 |
+
for i in range(1, r + 1):
|
| 354 |
+
G1 = nx.erdos_renyi_graph(n, p * i, seed=300 + i)
|
| 355 |
+
|
| 356 |
+
for a, b in G1.edges:
|
| 357 |
+
G1[a][b]["edge_attr1"] = f"{a}-{b}-1"
|
| 358 |
+
G1[a][b]["edge_attr2"] = f"{a}-{b}-2"
|
| 359 |
+
|
| 360 |
+
g1_hash_with_edge_attr1 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 361 |
+
G1, edge_attr="edge_attr1"
|
| 362 |
+
)
|
| 363 |
+
g1_hash_with_edge_attr2 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 364 |
+
G1, edge_attr="edge_attr2"
|
| 365 |
+
)
|
| 366 |
+
g1_hash_no_edge_attr = nx.weisfeiler_lehman_subgraph_hashes(G1, edge_attr=None)
|
| 367 |
+
|
| 368 |
+
assert g1_hash_with_edge_attr1 != g1_hash_no_edge_attr
|
| 369 |
+
assert g1_hash_with_edge_attr2 != g1_hash_no_edge_attr
|
| 370 |
+
assert g1_hash_with_edge_attr1 != g1_hash_with_edge_attr2
|
| 371 |
+
|
| 372 |
+
G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
|
| 373 |
+
|
| 374 |
+
g2_hash_with_edge_attr1 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 375 |
+
G2, edge_attr="edge_attr1"
|
| 376 |
+
)
|
| 377 |
+
g2_hash_with_edge_attr2 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 378 |
+
G2, edge_attr="edge_attr2"
|
| 379 |
+
)
|
| 380 |
+
|
| 381 |
+
assert g1_hash_with_edge_attr1 == {
|
| 382 |
+
-1 * k: v for k, v in g2_hash_with_edge_attr1.items()
|
| 383 |
+
}
|
| 384 |
+
assert g1_hash_with_edge_attr2 == {
|
| 385 |
+
-1 * k: v for k, v in g2_hash_with_edge_attr2.items()
|
| 386 |
+
}
|
| 387 |
+
|
| 388 |
+
|
| 389 |
+
def test_missing_edge_attr_subgraph_hash():
|
| 390 |
+
"""
|
| 391 |
+
If the 'edge_attr' argument is supplied but is missing from an edge in the graph,
|
| 392 |
+
we should raise a KeyError
|
| 393 |
+
"""
|
| 394 |
+
G = nx.Graph()
|
| 395 |
+
G.add_edges_from([(1, 2, {"edge_attr1": "a"}), (1, 3, {})])
|
| 396 |
+
pytest.raises(
|
| 397 |
+
KeyError, nx.weisfeiler_lehman_subgraph_hashes, G, edge_attr="edge_attr1"
|
| 398 |
+
)
|
| 399 |
+
|
| 400 |
+
|
| 401 |
+
def test_isomorphic_node_attr_subgraph_hash():
|
| 402 |
+
"""
|
| 403 |
+
Isomorphic graphs with differing node attributes should yield different subgraph
|
| 404 |
+
hashes if the 'node_attr' argument is supplied and populated in the graph, and
|
| 405 |
+
all hashes don't collide.
|
| 406 |
+
The output should still be invariant to node-relabeling
|
| 407 |
+
"""
|
| 408 |
+
n, r = 100, 10
|
| 409 |
+
p = 1.0 / r
|
| 410 |
+
for i in range(1, r + 1):
|
| 411 |
+
G1 = nx.erdos_renyi_graph(n, p * i, seed=400 + i)
|
| 412 |
+
|
| 413 |
+
for u in G1.nodes():
|
| 414 |
+
G1.nodes[u]["node_attr1"] = f"{u}-1"
|
| 415 |
+
G1.nodes[u]["node_attr2"] = f"{u}-2"
|
| 416 |
+
|
| 417 |
+
g1_hash_with_node_attr1 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 418 |
+
G1, node_attr="node_attr1"
|
| 419 |
+
)
|
| 420 |
+
g1_hash_with_node_attr2 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 421 |
+
G1, node_attr="node_attr2"
|
| 422 |
+
)
|
| 423 |
+
g1_hash_no_node_attr = nx.weisfeiler_lehman_subgraph_hashes(G1, node_attr=None)
|
| 424 |
+
|
| 425 |
+
assert g1_hash_with_node_attr1 != g1_hash_no_node_attr
|
| 426 |
+
assert g1_hash_with_node_attr2 != g1_hash_no_node_attr
|
| 427 |
+
assert g1_hash_with_node_attr1 != g1_hash_with_node_attr2
|
| 428 |
+
|
| 429 |
+
G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
|
| 430 |
+
|
| 431 |
+
g2_hash_with_node_attr1 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 432 |
+
G2, node_attr="node_attr1"
|
| 433 |
+
)
|
| 434 |
+
g2_hash_with_node_attr2 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 435 |
+
G2, node_attr="node_attr2"
|
| 436 |
+
)
|
| 437 |
+
|
| 438 |
+
assert g1_hash_with_node_attr1 == {
|
| 439 |
+
-1 * k: v for k, v in g2_hash_with_node_attr1.items()
|
| 440 |
+
}
|
| 441 |
+
assert g1_hash_with_node_attr2 == {
|
| 442 |
+
-1 * k: v for k, v in g2_hash_with_node_attr2.items()
|
| 443 |
+
}
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
def test_missing_node_attr_subgraph_hash():
|
| 447 |
+
"""
|
| 448 |
+
If the 'node_attr' argument is supplied but is missing from a node in the graph,
|
| 449 |
+
we should raise a KeyError
|
| 450 |
+
"""
|
| 451 |
+
G = nx.Graph()
|
| 452 |
+
G.add_nodes_from([(1, {"node_attr1": "a"}), (2, {})])
|
| 453 |
+
G.add_edges_from([(1, 2), (2, 3), (3, 1), (1, 4)])
|
| 454 |
+
pytest.raises(
|
| 455 |
+
KeyError, nx.weisfeiler_lehman_subgraph_hashes, G, node_attr="node_attr1"
|
| 456 |
+
)
|
| 457 |
+
|
| 458 |
+
|
| 459 |
+
def test_isomorphic_edge_attr_and_node_attr_subgraph_hash():
|
| 460 |
+
"""
|
| 461 |
+
Isomorphic graphs with differing node attributes should yield different subgraph
|
| 462 |
+
hashes if the 'node_attr' and 'edge_attr' argument is supplied and populated in
|
| 463 |
+
the graph, and all hashes don't collide
|
| 464 |
+
The output should still be invariant to node-relabeling
|
| 465 |
+
"""
|
| 466 |
+
n, r = 100, 10
|
| 467 |
+
p = 1.0 / r
|
| 468 |
+
for i in range(1, r + 1):
|
| 469 |
+
G1 = nx.erdos_renyi_graph(n, p * i, seed=500 + i)
|
| 470 |
+
|
| 471 |
+
for u in G1.nodes():
|
| 472 |
+
G1.nodes[u]["node_attr1"] = f"{u}-1"
|
| 473 |
+
G1.nodes[u]["node_attr2"] = f"{u}-2"
|
| 474 |
+
|
| 475 |
+
for a, b in G1.edges:
|
| 476 |
+
G1[a][b]["edge_attr1"] = f"{a}-{b}-1"
|
| 477 |
+
G1[a][b]["edge_attr2"] = f"{a}-{b}-2"
|
| 478 |
+
|
| 479 |
+
g1_hash_edge1_node1 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 480 |
+
G1, edge_attr="edge_attr1", node_attr="node_attr1"
|
| 481 |
+
)
|
| 482 |
+
g1_hash_edge2_node2 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 483 |
+
G1, edge_attr="edge_attr2", node_attr="node_attr2"
|
| 484 |
+
)
|
| 485 |
+
g1_hash_edge1_node2 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 486 |
+
G1, edge_attr="edge_attr1", node_attr="node_attr2"
|
| 487 |
+
)
|
| 488 |
+
g1_hash_no_attr = nx.weisfeiler_lehman_subgraph_hashes(G1)
|
| 489 |
+
|
| 490 |
+
assert g1_hash_edge1_node1 != g1_hash_no_attr
|
| 491 |
+
assert g1_hash_edge2_node2 != g1_hash_no_attr
|
| 492 |
+
assert g1_hash_edge1_node1 != g1_hash_edge2_node2
|
| 493 |
+
assert g1_hash_edge1_node2 != g1_hash_edge2_node2
|
| 494 |
+
assert g1_hash_edge1_node2 != g1_hash_edge1_node1
|
| 495 |
+
|
| 496 |
+
G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()})
|
| 497 |
+
|
| 498 |
+
g2_hash_edge1_node1 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 499 |
+
G2, edge_attr="edge_attr1", node_attr="node_attr1"
|
| 500 |
+
)
|
| 501 |
+
g2_hash_edge2_node2 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 502 |
+
G2, edge_attr="edge_attr2", node_attr="node_attr2"
|
| 503 |
+
)
|
| 504 |
+
|
| 505 |
+
assert g1_hash_edge1_node1 == {
|
| 506 |
+
-1 * k: v for k, v in g2_hash_edge1_node1.items()
|
| 507 |
+
}
|
| 508 |
+
assert g1_hash_edge2_node2 == {
|
| 509 |
+
-1 * k: v for k, v in g2_hash_edge2_node2.items()
|
| 510 |
+
}
|
| 511 |
+
|
| 512 |
+
|
| 513 |
+
def test_iteration_depth():
|
| 514 |
+
"""
|
| 515 |
+
All nodes should have the correct number of subgraph hashes in the output when
|
| 516 |
+
using degree as initial node labels
|
| 517 |
+
Subsequent iteration depths for the same graph should be additive for each node
|
| 518 |
+
"""
|
| 519 |
+
n, r = 100, 10
|
| 520 |
+
p = 1.0 / r
|
| 521 |
+
for i in range(1, r + 1):
|
| 522 |
+
G = nx.erdos_renyi_graph(n, p * i, seed=600 + i)
|
| 523 |
+
|
| 524 |
+
depth3 = nx.weisfeiler_lehman_subgraph_hashes(G, iterations=3)
|
| 525 |
+
depth4 = nx.weisfeiler_lehman_subgraph_hashes(G, iterations=4)
|
| 526 |
+
depth5 = nx.weisfeiler_lehman_subgraph_hashes(G, iterations=5)
|
| 527 |
+
|
| 528 |
+
assert all(len(hashes) == 3 for hashes in depth3.values())
|
| 529 |
+
assert all(len(hashes) == 4 for hashes in depth4.values())
|
| 530 |
+
assert all(len(hashes) == 5 for hashes in depth5.values())
|
| 531 |
+
|
| 532 |
+
assert is_subiteration(depth3, depth4)
|
| 533 |
+
assert is_subiteration(depth4, depth5)
|
| 534 |
+
assert is_subiteration(depth3, depth5)
|
| 535 |
+
|
| 536 |
+
|
| 537 |
+
def test_iteration_depth_edge_attr():
|
| 538 |
+
"""
|
| 539 |
+
All nodes should have the correct number of subgraph hashes in the output when
|
| 540 |
+
setting initial node labels empty and using an edge attribute when aggregating
|
| 541 |
+
neighborhoods.
|
| 542 |
+
Subsequent iteration depths for the same graph should be additive for each node
|
| 543 |
+
"""
|
| 544 |
+
n, r = 100, 10
|
| 545 |
+
p = 1.0 / r
|
| 546 |
+
for i in range(1, r + 1):
|
| 547 |
+
G = nx.erdos_renyi_graph(n, p * i, seed=700 + i)
|
| 548 |
+
|
| 549 |
+
for a, b in G.edges:
|
| 550 |
+
G[a][b]["edge_attr1"] = f"{a}-{b}-1"
|
| 551 |
+
|
| 552 |
+
depth3 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 553 |
+
G, edge_attr="edge_attr1", iterations=3
|
| 554 |
+
)
|
| 555 |
+
depth4 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 556 |
+
G, edge_attr="edge_attr1", iterations=4
|
| 557 |
+
)
|
| 558 |
+
depth5 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 559 |
+
G, edge_attr="edge_attr1", iterations=5
|
| 560 |
+
)
|
| 561 |
+
|
| 562 |
+
assert all(len(hashes) == 3 for hashes in depth3.values())
|
| 563 |
+
assert all(len(hashes) == 4 for hashes in depth4.values())
|
| 564 |
+
assert all(len(hashes) == 5 for hashes in depth5.values())
|
| 565 |
+
|
| 566 |
+
assert is_subiteration(depth3, depth4)
|
| 567 |
+
assert is_subiteration(depth4, depth5)
|
| 568 |
+
assert is_subiteration(depth3, depth5)
|
| 569 |
+
|
| 570 |
+
|
| 571 |
+
def test_iteration_depth_node_attr():
|
| 572 |
+
"""
|
| 573 |
+
All nodes should have the correct number of subgraph hashes in the output when
|
| 574 |
+
setting initial node labels to an attribute.
|
| 575 |
+
Subsequent iteration depths for the same graph should be additive for each node
|
| 576 |
+
"""
|
| 577 |
+
n, r = 100, 10
|
| 578 |
+
p = 1.0 / r
|
| 579 |
+
for i in range(1, r + 1):
|
| 580 |
+
G = nx.erdos_renyi_graph(n, p * i, seed=800 + i)
|
| 581 |
+
|
| 582 |
+
for u in G.nodes():
|
| 583 |
+
G.nodes[u]["node_attr1"] = f"{u}-1"
|
| 584 |
+
|
| 585 |
+
depth3 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 586 |
+
G, node_attr="node_attr1", iterations=3
|
| 587 |
+
)
|
| 588 |
+
depth4 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 589 |
+
G, node_attr="node_attr1", iterations=4
|
| 590 |
+
)
|
| 591 |
+
depth5 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 592 |
+
G, node_attr="node_attr1", iterations=5
|
| 593 |
+
)
|
| 594 |
+
|
| 595 |
+
assert all(len(hashes) == 3 for hashes in depth3.values())
|
| 596 |
+
assert all(len(hashes) == 4 for hashes in depth4.values())
|
| 597 |
+
assert all(len(hashes) == 5 for hashes in depth5.values())
|
| 598 |
+
|
| 599 |
+
assert is_subiteration(depth3, depth4)
|
| 600 |
+
assert is_subiteration(depth4, depth5)
|
| 601 |
+
assert is_subiteration(depth3, depth5)
|
| 602 |
+
|
| 603 |
+
|
| 604 |
+
def test_iteration_depth_node_edge_attr():
|
| 605 |
+
"""
|
| 606 |
+
All nodes should have the correct number of subgraph hashes in the output when
|
| 607 |
+
setting initial node labels to an attribute and also using an edge attribute when
|
| 608 |
+
aggregating neighborhoods.
|
| 609 |
+
Subsequent iteration depths for the same graph should be additive for each node
|
| 610 |
+
"""
|
| 611 |
+
n, r = 100, 10
|
| 612 |
+
p = 1.0 / r
|
| 613 |
+
for i in range(1, r + 1):
|
| 614 |
+
G = nx.erdos_renyi_graph(n, p * i, seed=900 + i)
|
| 615 |
+
|
| 616 |
+
for u in G.nodes():
|
| 617 |
+
G.nodes[u]["node_attr1"] = f"{u}-1"
|
| 618 |
+
|
| 619 |
+
for a, b in G.edges:
|
| 620 |
+
G[a][b]["edge_attr1"] = f"{a}-{b}-1"
|
| 621 |
+
|
| 622 |
+
depth3 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 623 |
+
G, edge_attr="edge_attr1", node_attr="node_attr1", iterations=3
|
| 624 |
+
)
|
| 625 |
+
depth4 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 626 |
+
G, edge_attr="edge_attr1", node_attr="node_attr1", iterations=4
|
| 627 |
+
)
|
| 628 |
+
depth5 = nx.weisfeiler_lehman_subgraph_hashes(
|
| 629 |
+
G, edge_attr="edge_attr1", node_attr="node_attr1", iterations=5
|
| 630 |
+
)
|
| 631 |
+
|
| 632 |
+
assert all(len(hashes) == 3 for hashes in depth3.values())
|
| 633 |
+
assert all(len(hashes) == 4 for hashes in depth4.values())
|
| 634 |
+
assert all(len(hashes) == 5 for hashes in depth5.values())
|
| 635 |
+
|
| 636 |
+
assert is_subiteration(depth3, depth4)
|
| 637 |
+
assert is_subiteration(depth4, depth5)
|
| 638 |
+
assert is_subiteration(depth3, depth5)
|
| 639 |
+
|
| 640 |
+
|
| 641 |
+
def test_digest_size_subgraph_hash():
|
| 642 |
+
"""
|
| 643 |
+
The hash string lengths should be as expected for a variety of graphs and
|
| 644 |
+
digest sizes
|
| 645 |
+
"""
|
| 646 |
+
n, r = 100, 10
|
| 647 |
+
p = 1.0 / r
|
| 648 |
+
for i in range(1, r + 1):
|
| 649 |
+
G = nx.erdos_renyi_graph(n, p * i, seed=1000 + i)
|
| 650 |
+
|
| 651 |
+
digest_size16_hashes = nx.weisfeiler_lehman_subgraph_hashes(G)
|
| 652 |
+
digest_size32_hashes = nx.weisfeiler_lehman_subgraph_hashes(G, digest_size=32)
|
| 653 |
+
|
| 654 |
+
assert digest_size16_hashes != digest_size32_hashes
|
| 655 |
+
|
| 656 |
+
assert hexdigest_sizes_correct(digest_size16_hashes, 16)
|
| 657 |
+
assert hexdigest_sizes_correct(digest_size32_hashes, 32)
|
| 658 |
+
|
| 659 |
+
|
| 660 |
+
def test_initial_node_labels_subgraph_hash():
|
| 661 |
+
"""
|
| 662 |
+
Including the hashed initial label prepends an extra hash to the lists
|
| 663 |
+
"""
|
| 664 |
+
G = nx.path_graph(5)
|
| 665 |
+
nx.set_node_attributes(G, {i: int(0 < i < 4) for i in G}, "label")
|
| 666 |
+
# initial node labels:
|
| 667 |
+
# 0--1--1--1--0
|
| 668 |
+
|
| 669 |
+
without_initial_label = nx.weisfeiler_lehman_subgraph_hashes(G, node_attr="label")
|
| 670 |
+
assert all(len(v) == 3 for v in without_initial_label.values())
|
| 671 |
+
# 3 different 1 hop nhds
|
| 672 |
+
assert len({v[0] for v in without_initial_label.values()}) == 3
|
| 673 |
+
|
| 674 |
+
with_initial_label = nx.weisfeiler_lehman_subgraph_hashes(
|
| 675 |
+
G, node_attr="label", include_initial_labels=True
|
| 676 |
+
)
|
| 677 |
+
assert all(len(v) == 4 for v in with_initial_label.values())
|
| 678 |
+
# 2 different initial labels
|
| 679 |
+
assert len({v[0] for v in with_initial_label.values()}) == 2
|
| 680 |
+
|
| 681 |
+
# check hashes match otherwise
|
| 682 |
+
for u in G:
|
| 683 |
+
for a, b in zip(
|
| 684 |
+
with_initial_label[u][1:], without_initial_label[u], strict=True
|
| 685 |
+
):
|
| 686 |
+
assert a == b
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_hierarchy.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def test_hierarchy_exception():
|
| 7 |
+
G = nx.cycle_graph(5)
|
| 8 |
+
pytest.raises(nx.NetworkXError, nx.flow_hierarchy, G)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def test_hierarchy_cycle():
|
| 12 |
+
G = nx.cycle_graph(5, create_using=nx.DiGraph())
|
| 13 |
+
assert nx.flow_hierarchy(G) == 0.0
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def test_hierarchy_tree():
|
| 17 |
+
G = nx.full_rary_tree(2, 16, create_using=nx.DiGraph())
|
| 18 |
+
assert nx.flow_hierarchy(G) == 1.0
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def test_hierarchy_1():
|
| 22 |
+
G = nx.DiGraph()
|
| 23 |
+
G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 1), (3, 4), (0, 4)])
|
| 24 |
+
assert nx.flow_hierarchy(G) == 0.5
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def test_hierarchy_weight():
|
| 28 |
+
G = nx.DiGraph()
|
| 29 |
+
G.add_edges_from(
|
| 30 |
+
[
|
| 31 |
+
(0, 1, {"weight": 0.3}),
|
| 32 |
+
(1, 2, {"weight": 0.1}),
|
| 33 |
+
(2, 3, {"weight": 0.1}),
|
| 34 |
+
(3, 1, {"weight": 0.1}),
|
| 35 |
+
(3, 4, {"weight": 0.3}),
|
| 36 |
+
(0, 4, {"weight": 0.3}),
|
| 37 |
+
]
|
| 38 |
+
)
|
| 39 |
+
assert nx.flow_hierarchy(G, weight="weight") == 0.75
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_hybrid.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def test_2d_grid_graph():
|
| 5 |
+
# FC article claims 2d grid graph of size n is (3,3)-connected
|
| 6 |
+
# and (5,9)-connected, but I don't think it is (5,9)-connected
|
| 7 |
+
G = nx.grid_2d_graph(8, 8, periodic=True)
|
| 8 |
+
assert nx.is_kl_connected(G, 3, 3)
|
| 9 |
+
assert not nx.is_kl_connected(G, 5, 9)
|
| 10 |
+
(H, graphOK) = nx.kl_connected_subgraph(G, 5, 9, same_as_graph=True)
|
| 11 |
+
assert not graphOK
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def test_small_graph():
|
| 15 |
+
G = nx.Graph()
|
| 16 |
+
G.add_edge(1, 2)
|
| 17 |
+
G.add_edge(1, 3)
|
| 18 |
+
G.add_edge(2, 3)
|
| 19 |
+
assert nx.is_kl_connected(G, 2, 2)
|
| 20 |
+
H = nx.kl_connected_subgraph(G, 2, 2)
|
| 21 |
+
(H, graphOK) = nx.kl_connected_subgraph(
|
| 22 |
+
G, 2, 2, low_memory=True, same_as_graph=True
|
| 23 |
+
)
|
| 24 |
+
assert graphOK
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_isolate.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Unit tests for the :mod:`networkx.algorithms.isolates` module."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def test_is_isolate():
|
| 7 |
+
G = nx.Graph()
|
| 8 |
+
G.add_edge(0, 1)
|
| 9 |
+
G.add_node(2)
|
| 10 |
+
assert not nx.is_isolate(G, 0)
|
| 11 |
+
assert not nx.is_isolate(G, 1)
|
| 12 |
+
assert nx.is_isolate(G, 2)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def test_isolates():
|
| 16 |
+
G = nx.Graph()
|
| 17 |
+
G.add_edge(0, 1)
|
| 18 |
+
G.add_nodes_from([2, 3])
|
| 19 |
+
assert sorted(nx.isolates(G)) == [2, 3]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def test_number_of_isolates():
|
| 23 |
+
G = nx.Graph()
|
| 24 |
+
G.add_edge(0, 1)
|
| 25 |
+
G.add_nodes_from([2, 3])
|
| 26 |
+
assert nx.number_of_isolates(G) == 2
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_link_prediction.py
ADDED
|
@@ -0,0 +1,586 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
from functools import partial
|
| 3 |
+
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def _test_func(G, ebunch, expected, predict_func, **kwargs):
|
| 10 |
+
result = predict_func(G, ebunch, **kwargs)
|
| 11 |
+
exp_dict = {tuple(sorted([u, v])): score for u, v, score in expected}
|
| 12 |
+
res_dict = {tuple(sorted([u, v])): score for u, v, score in result}
|
| 13 |
+
|
| 14 |
+
assert len(exp_dict) == len(res_dict)
|
| 15 |
+
for p in exp_dict:
|
| 16 |
+
assert exp_dict[p] == pytest.approx(res_dict[p], abs=1e-7)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class TestResourceAllocationIndex:
|
| 20 |
+
@classmethod
|
| 21 |
+
def setup_class(cls):
|
| 22 |
+
cls.func = staticmethod(nx.resource_allocation_index)
|
| 23 |
+
cls.test = partial(_test_func, predict_func=cls.func)
|
| 24 |
+
|
| 25 |
+
def test_K5(self):
|
| 26 |
+
G = nx.complete_graph(5)
|
| 27 |
+
self.test(G, [(0, 1)], [(0, 1, 0.75)])
|
| 28 |
+
|
| 29 |
+
def test_P3(self):
|
| 30 |
+
G = nx.path_graph(3)
|
| 31 |
+
self.test(G, [(0, 2)], [(0, 2, 0.5)])
|
| 32 |
+
|
| 33 |
+
def test_S4(self):
|
| 34 |
+
G = nx.star_graph(4)
|
| 35 |
+
self.test(G, [(1, 2)], [(1, 2, 0.25)])
|
| 36 |
+
|
| 37 |
+
@pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
|
| 38 |
+
def test_notimplemented(self, graph_type):
|
| 39 |
+
assert pytest.raises(
|
| 40 |
+
nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)]
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
def test_node_not_found(self):
|
| 44 |
+
G = nx.Graph()
|
| 45 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 46 |
+
assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
|
| 47 |
+
|
| 48 |
+
def test_no_common_neighbor(self):
|
| 49 |
+
G = nx.Graph()
|
| 50 |
+
G.add_nodes_from([0, 1])
|
| 51 |
+
self.test(G, [(0, 1)], [(0, 1, 0)])
|
| 52 |
+
|
| 53 |
+
def test_equal_nodes(self):
|
| 54 |
+
G = nx.complete_graph(4)
|
| 55 |
+
self.test(G, [(0, 0)], [(0, 0, 1)])
|
| 56 |
+
|
| 57 |
+
def test_all_nonexistent_edges(self):
|
| 58 |
+
G = nx.Graph()
|
| 59 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 60 |
+
self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)])
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class TestJaccardCoefficient:
|
| 64 |
+
@classmethod
|
| 65 |
+
def setup_class(cls):
|
| 66 |
+
cls.func = staticmethod(nx.jaccard_coefficient)
|
| 67 |
+
cls.test = partial(_test_func, predict_func=cls.func)
|
| 68 |
+
|
| 69 |
+
def test_K5(self):
|
| 70 |
+
G = nx.complete_graph(5)
|
| 71 |
+
self.test(G, [(0, 1)], [(0, 1, 0.6)])
|
| 72 |
+
|
| 73 |
+
def test_P4(self):
|
| 74 |
+
G = nx.path_graph(4)
|
| 75 |
+
self.test(G, [(0, 2)], [(0, 2, 0.5)])
|
| 76 |
+
|
| 77 |
+
@pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
|
| 78 |
+
def test_notimplemented(self, graph_type):
|
| 79 |
+
assert pytest.raises(
|
| 80 |
+
nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)]
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
def test_node_not_found(self):
|
| 84 |
+
G = nx.Graph()
|
| 85 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 86 |
+
assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
|
| 87 |
+
|
| 88 |
+
def test_no_common_neighbor(self):
|
| 89 |
+
G = nx.Graph()
|
| 90 |
+
G.add_edges_from([(0, 1), (2, 3)])
|
| 91 |
+
self.test(G, [(0, 2)], [(0, 2, 0)])
|
| 92 |
+
|
| 93 |
+
def test_isolated_nodes(self):
|
| 94 |
+
G = nx.Graph()
|
| 95 |
+
G.add_nodes_from([0, 1])
|
| 96 |
+
self.test(G, [(0, 1)], [(0, 1, 0)])
|
| 97 |
+
|
| 98 |
+
def test_all_nonexistent_edges(self):
|
| 99 |
+
G = nx.Graph()
|
| 100 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 101 |
+
self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)])
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
class TestAdamicAdarIndex:
|
| 105 |
+
@classmethod
|
| 106 |
+
def setup_class(cls):
|
| 107 |
+
cls.func = staticmethod(nx.adamic_adar_index)
|
| 108 |
+
cls.test = partial(_test_func, predict_func=cls.func)
|
| 109 |
+
|
| 110 |
+
def test_K5(self):
|
| 111 |
+
G = nx.complete_graph(5)
|
| 112 |
+
self.test(G, [(0, 1)], [(0, 1, 3 / math.log(4))])
|
| 113 |
+
|
| 114 |
+
def test_P3(self):
|
| 115 |
+
G = nx.path_graph(3)
|
| 116 |
+
self.test(G, [(0, 2)], [(0, 2, 1 / math.log(2))])
|
| 117 |
+
|
| 118 |
+
def test_S4(self):
|
| 119 |
+
G = nx.star_graph(4)
|
| 120 |
+
self.test(G, [(1, 2)], [(1, 2, 1 / math.log(4))])
|
| 121 |
+
|
| 122 |
+
@pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
|
| 123 |
+
def test_notimplemented(self, graph_type):
|
| 124 |
+
assert pytest.raises(
|
| 125 |
+
nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)]
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
def test_node_not_found(self):
|
| 129 |
+
G = nx.Graph()
|
| 130 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 131 |
+
assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
|
| 132 |
+
|
| 133 |
+
def test_no_common_neighbor(self):
|
| 134 |
+
G = nx.Graph()
|
| 135 |
+
G.add_nodes_from([0, 1])
|
| 136 |
+
self.test(G, [(0, 1)], [(0, 1, 0)])
|
| 137 |
+
|
| 138 |
+
def test_equal_nodes(self):
|
| 139 |
+
G = nx.complete_graph(4)
|
| 140 |
+
self.test(G, [(0, 0)], [(0, 0, 3 / math.log(3))])
|
| 141 |
+
|
| 142 |
+
def test_all_nonexistent_edges(self):
|
| 143 |
+
G = nx.Graph()
|
| 144 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 145 |
+
self.test(
|
| 146 |
+
G, None, [(0, 3, 1 / math.log(2)), (1, 2, 1 / math.log(2)), (1, 3, 0)]
|
| 147 |
+
)
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class TestCommonNeighborCentrality:
|
| 151 |
+
@classmethod
|
| 152 |
+
def setup_class(cls):
|
| 153 |
+
cls.func = staticmethod(nx.common_neighbor_centrality)
|
| 154 |
+
cls.test = partial(_test_func, predict_func=cls.func)
|
| 155 |
+
|
| 156 |
+
def test_K5(self):
|
| 157 |
+
G = nx.complete_graph(5)
|
| 158 |
+
self.test(G, [(0, 1)], [(0, 1, 3.0)], alpha=1)
|
| 159 |
+
self.test(G, [(0, 1)], [(0, 1, 5.0)], alpha=0)
|
| 160 |
+
|
| 161 |
+
def test_P3(self):
|
| 162 |
+
G = nx.path_graph(3)
|
| 163 |
+
self.test(G, [(0, 2)], [(0, 2, 1.25)], alpha=0.5)
|
| 164 |
+
|
| 165 |
+
def test_S4(self):
|
| 166 |
+
G = nx.star_graph(4)
|
| 167 |
+
self.test(G, [(1, 2)], [(1, 2, 1.75)], alpha=0.5)
|
| 168 |
+
|
| 169 |
+
@pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
|
| 170 |
+
def test_notimplemented(self, graph_type):
|
| 171 |
+
assert pytest.raises(
|
| 172 |
+
nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)]
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
def test_node_u_not_found(self):
|
| 176 |
+
G = nx.Graph()
|
| 177 |
+
G.add_edges_from([(1, 3), (2, 3)])
|
| 178 |
+
assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 1)])
|
| 179 |
+
|
| 180 |
+
def test_node_v_not_found(self):
|
| 181 |
+
G = nx.Graph()
|
| 182 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 183 |
+
assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
|
| 184 |
+
|
| 185 |
+
def test_no_common_neighbor(self):
|
| 186 |
+
G = nx.Graph()
|
| 187 |
+
G.add_nodes_from([0, 1])
|
| 188 |
+
self.test(G, [(0, 1)], [(0, 1, 0)])
|
| 189 |
+
|
| 190 |
+
def test_equal_nodes(self):
|
| 191 |
+
G = nx.complete_graph(4)
|
| 192 |
+
assert pytest.raises(nx.NetworkXAlgorithmError, self.test, G, [(0, 0)], [])
|
| 193 |
+
|
| 194 |
+
def test_equal_nodes_with_alpha_one_raises_error(self):
|
| 195 |
+
G = nx.complete_graph(4)
|
| 196 |
+
assert pytest.raises(
|
| 197 |
+
nx.NetworkXAlgorithmError, self.test, G, [(0, 0)], [], alpha=1.0
|
| 198 |
+
)
|
| 199 |
+
|
| 200 |
+
def test_all_nonexistent_edges(self):
|
| 201 |
+
G = nx.Graph()
|
| 202 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 203 |
+
self.test(G, None, [(0, 3, 1.5), (1, 2, 1.5), (1, 3, 2 / 3)], alpha=0.5)
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
class TestPreferentialAttachment:
|
| 207 |
+
@classmethod
|
| 208 |
+
def setup_class(cls):
|
| 209 |
+
cls.func = staticmethod(nx.preferential_attachment)
|
| 210 |
+
cls.test = partial(_test_func, predict_func=cls.func)
|
| 211 |
+
|
| 212 |
+
def test_K5(self):
|
| 213 |
+
G = nx.complete_graph(5)
|
| 214 |
+
self.test(G, [(0, 1)], [(0, 1, 16)])
|
| 215 |
+
|
| 216 |
+
def test_P3(self):
|
| 217 |
+
G = nx.path_graph(3)
|
| 218 |
+
self.test(G, [(0, 1)], [(0, 1, 2)])
|
| 219 |
+
|
| 220 |
+
def test_S4(self):
|
| 221 |
+
G = nx.star_graph(4)
|
| 222 |
+
self.test(G, [(0, 2)], [(0, 2, 4)])
|
| 223 |
+
|
| 224 |
+
@pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
|
| 225 |
+
def test_notimplemented(self, graph_type):
|
| 226 |
+
assert pytest.raises(
|
| 227 |
+
nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)]
|
| 228 |
+
)
|
| 229 |
+
|
| 230 |
+
def test_node_not_found(self):
|
| 231 |
+
G = nx.Graph()
|
| 232 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 233 |
+
assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
|
| 234 |
+
|
| 235 |
+
def test_zero_degrees(self):
|
| 236 |
+
G = nx.Graph()
|
| 237 |
+
G.add_nodes_from([0, 1])
|
| 238 |
+
self.test(G, [(0, 1)], [(0, 1, 0)])
|
| 239 |
+
|
| 240 |
+
def test_all_nonexistent_edges(self):
|
| 241 |
+
G = nx.Graph()
|
| 242 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 243 |
+
self.test(G, None, [(0, 3, 2), (1, 2, 2), (1, 3, 1)])
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
class TestCNSoundarajanHopcroft:
|
| 247 |
+
@classmethod
|
| 248 |
+
def setup_class(cls):
|
| 249 |
+
cls.func = staticmethod(nx.cn_soundarajan_hopcroft)
|
| 250 |
+
cls.test = partial(_test_func, predict_func=cls.func, community="community")
|
| 251 |
+
|
| 252 |
+
def test_K5(self):
|
| 253 |
+
G = nx.complete_graph(5)
|
| 254 |
+
G.nodes[0]["community"] = 0
|
| 255 |
+
G.nodes[1]["community"] = 0
|
| 256 |
+
G.nodes[2]["community"] = 0
|
| 257 |
+
G.nodes[3]["community"] = 0
|
| 258 |
+
G.nodes[4]["community"] = 1
|
| 259 |
+
self.test(G, [(0, 1)], [(0, 1, 5)])
|
| 260 |
+
|
| 261 |
+
def test_P3(self):
|
| 262 |
+
G = nx.path_graph(3)
|
| 263 |
+
G.nodes[0]["community"] = 0
|
| 264 |
+
G.nodes[1]["community"] = 1
|
| 265 |
+
G.nodes[2]["community"] = 0
|
| 266 |
+
self.test(G, [(0, 2)], [(0, 2, 1)])
|
| 267 |
+
|
| 268 |
+
def test_S4(self):
|
| 269 |
+
G = nx.star_graph(4)
|
| 270 |
+
G.nodes[0]["community"] = 1
|
| 271 |
+
G.nodes[1]["community"] = 1
|
| 272 |
+
G.nodes[2]["community"] = 1
|
| 273 |
+
G.nodes[3]["community"] = 0
|
| 274 |
+
G.nodes[4]["community"] = 0
|
| 275 |
+
self.test(G, [(1, 2)], [(1, 2, 2)])
|
| 276 |
+
|
| 277 |
+
@pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
|
| 278 |
+
def test_notimplemented(self, graph_type):
|
| 279 |
+
G = graph_type([(0, 1), (1, 2)])
|
| 280 |
+
G.add_nodes_from([0, 1, 2], community=0)
|
| 281 |
+
assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
|
| 282 |
+
|
| 283 |
+
def test_node_not_found(self):
|
| 284 |
+
G = nx.Graph()
|
| 285 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 286 |
+
G.nodes[0]["community"] = 0
|
| 287 |
+
G.nodes[1]["community"] = 1
|
| 288 |
+
G.nodes[2]["community"] = 0
|
| 289 |
+
G.nodes[3]["community"] = 0
|
| 290 |
+
assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
|
| 291 |
+
|
| 292 |
+
def test_no_common_neighbor(self):
|
| 293 |
+
G = nx.Graph()
|
| 294 |
+
G.add_nodes_from([0, 1])
|
| 295 |
+
G.nodes[0]["community"] = 0
|
| 296 |
+
G.nodes[1]["community"] = 0
|
| 297 |
+
self.test(G, [(0, 1)], [(0, 1, 0)])
|
| 298 |
+
|
| 299 |
+
def test_equal_nodes(self):
|
| 300 |
+
G = nx.complete_graph(3)
|
| 301 |
+
G.nodes[0]["community"] = 0
|
| 302 |
+
G.nodes[1]["community"] = 0
|
| 303 |
+
G.nodes[2]["community"] = 0
|
| 304 |
+
self.test(G, [(0, 0)], [(0, 0, 4)])
|
| 305 |
+
|
| 306 |
+
def test_different_community(self):
|
| 307 |
+
G = nx.Graph()
|
| 308 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
|
| 309 |
+
G.nodes[0]["community"] = 0
|
| 310 |
+
G.nodes[1]["community"] = 0
|
| 311 |
+
G.nodes[2]["community"] = 0
|
| 312 |
+
G.nodes[3]["community"] = 1
|
| 313 |
+
self.test(G, [(0, 3)], [(0, 3, 2)])
|
| 314 |
+
|
| 315 |
+
def test_no_community_information(self):
|
| 316 |
+
G = nx.complete_graph(5)
|
| 317 |
+
assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)]))
|
| 318 |
+
|
| 319 |
+
def test_insufficient_community_information(self):
|
| 320 |
+
G = nx.Graph()
|
| 321 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
|
| 322 |
+
G.nodes[0]["community"] = 0
|
| 323 |
+
G.nodes[1]["community"] = 0
|
| 324 |
+
G.nodes[3]["community"] = 0
|
| 325 |
+
assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)]))
|
| 326 |
+
|
| 327 |
+
def test_sufficient_community_information(self):
|
| 328 |
+
G = nx.Graph()
|
| 329 |
+
G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)])
|
| 330 |
+
G.nodes[1]["community"] = 0
|
| 331 |
+
G.nodes[2]["community"] = 0
|
| 332 |
+
G.nodes[3]["community"] = 0
|
| 333 |
+
G.nodes[4]["community"] = 0
|
| 334 |
+
self.test(G, [(1, 4)], [(1, 4, 4)])
|
| 335 |
+
|
| 336 |
+
def test_custom_community_attribute_name(self):
|
| 337 |
+
G = nx.Graph()
|
| 338 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
|
| 339 |
+
G.nodes[0]["cmty"] = 0
|
| 340 |
+
G.nodes[1]["cmty"] = 0
|
| 341 |
+
G.nodes[2]["cmty"] = 0
|
| 342 |
+
G.nodes[3]["cmty"] = 1
|
| 343 |
+
self.test(G, [(0, 3)], [(0, 3, 2)], community="cmty")
|
| 344 |
+
|
| 345 |
+
def test_all_nonexistent_edges(self):
|
| 346 |
+
G = nx.Graph()
|
| 347 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 348 |
+
G.nodes[0]["community"] = 0
|
| 349 |
+
G.nodes[1]["community"] = 1
|
| 350 |
+
G.nodes[2]["community"] = 0
|
| 351 |
+
G.nodes[3]["community"] = 0
|
| 352 |
+
self.test(G, None, [(0, 3, 2), (1, 2, 1), (1, 3, 0)])
|
| 353 |
+
|
| 354 |
+
|
| 355 |
+
class TestRAIndexSoundarajanHopcroft:
|
| 356 |
+
@classmethod
|
| 357 |
+
def setup_class(cls):
|
| 358 |
+
cls.func = staticmethod(nx.ra_index_soundarajan_hopcroft)
|
| 359 |
+
cls.test = partial(_test_func, predict_func=cls.func, community="community")
|
| 360 |
+
|
| 361 |
+
def test_K5(self):
|
| 362 |
+
G = nx.complete_graph(5)
|
| 363 |
+
G.nodes[0]["community"] = 0
|
| 364 |
+
G.nodes[1]["community"] = 0
|
| 365 |
+
G.nodes[2]["community"] = 0
|
| 366 |
+
G.nodes[3]["community"] = 0
|
| 367 |
+
G.nodes[4]["community"] = 1
|
| 368 |
+
self.test(G, [(0, 1)], [(0, 1, 0.5)])
|
| 369 |
+
|
| 370 |
+
def test_P3(self):
|
| 371 |
+
G = nx.path_graph(3)
|
| 372 |
+
G.nodes[0]["community"] = 0
|
| 373 |
+
G.nodes[1]["community"] = 1
|
| 374 |
+
G.nodes[2]["community"] = 0
|
| 375 |
+
self.test(G, [(0, 2)], [(0, 2, 0)])
|
| 376 |
+
|
| 377 |
+
def test_S4(self):
|
| 378 |
+
G = nx.star_graph(4)
|
| 379 |
+
G.nodes[0]["community"] = 1
|
| 380 |
+
G.nodes[1]["community"] = 1
|
| 381 |
+
G.nodes[2]["community"] = 1
|
| 382 |
+
G.nodes[3]["community"] = 0
|
| 383 |
+
G.nodes[4]["community"] = 0
|
| 384 |
+
self.test(G, [(1, 2)], [(1, 2, 0.25)])
|
| 385 |
+
|
| 386 |
+
@pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
|
| 387 |
+
def test_notimplemented(self, graph_type):
|
| 388 |
+
G = graph_type([(0, 1), (1, 2)])
|
| 389 |
+
G.add_nodes_from([0, 1, 2], community=0)
|
| 390 |
+
assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
|
| 391 |
+
|
| 392 |
+
def test_node_not_found(self):
|
| 393 |
+
G = nx.Graph()
|
| 394 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 395 |
+
G.nodes[0]["community"] = 0
|
| 396 |
+
G.nodes[1]["community"] = 1
|
| 397 |
+
G.nodes[2]["community"] = 0
|
| 398 |
+
G.nodes[3]["community"] = 0
|
| 399 |
+
assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
|
| 400 |
+
|
| 401 |
+
def test_no_common_neighbor(self):
|
| 402 |
+
G = nx.Graph()
|
| 403 |
+
G.add_nodes_from([0, 1])
|
| 404 |
+
G.nodes[0]["community"] = 0
|
| 405 |
+
G.nodes[1]["community"] = 0
|
| 406 |
+
self.test(G, [(0, 1)], [(0, 1, 0)])
|
| 407 |
+
|
| 408 |
+
def test_equal_nodes(self):
|
| 409 |
+
G = nx.complete_graph(3)
|
| 410 |
+
G.nodes[0]["community"] = 0
|
| 411 |
+
G.nodes[1]["community"] = 0
|
| 412 |
+
G.nodes[2]["community"] = 0
|
| 413 |
+
self.test(G, [(0, 0)], [(0, 0, 1)])
|
| 414 |
+
|
| 415 |
+
def test_different_community(self):
|
| 416 |
+
G = nx.Graph()
|
| 417 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
|
| 418 |
+
G.nodes[0]["community"] = 0
|
| 419 |
+
G.nodes[1]["community"] = 0
|
| 420 |
+
G.nodes[2]["community"] = 0
|
| 421 |
+
G.nodes[3]["community"] = 1
|
| 422 |
+
self.test(G, [(0, 3)], [(0, 3, 0)])
|
| 423 |
+
|
| 424 |
+
def test_no_community_information(self):
|
| 425 |
+
G = nx.complete_graph(5)
|
| 426 |
+
assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)]))
|
| 427 |
+
|
| 428 |
+
def test_insufficient_community_information(self):
|
| 429 |
+
G = nx.Graph()
|
| 430 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
|
| 431 |
+
G.nodes[0]["community"] = 0
|
| 432 |
+
G.nodes[1]["community"] = 0
|
| 433 |
+
G.nodes[3]["community"] = 0
|
| 434 |
+
assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)]))
|
| 435 |
+
|
| 436 |
+
def test_sufficient_community_information(self):
|
| 437 |
+
G = nx.Graph()
|
| 438 |
+
G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)])
|
| 439 |
+
G.nodes[1]["community"] = 0
|
| 440 |
+
G.nodes[2]["community"] = 0
|
| 441 |
+
G.nodes[3]["community"] = 0
|
| 442 |
+
G.nodes[4]["community"] = 0
|
| 443 |
+
self.test(G, [(1, 4)], [(1, 4, 1)])
|
| 444 |
+
|
| 445 |
+
def test_custom_community_attribute_name(self):
|
| 446 |
+
G = nx.Graph()
|
| 447 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
|
| 448 |
+
G.nodes[0]["cmty"] = 0
|
| 449 |
+
G.nodes[1]["cmty"] = 0
|
| 450 |
+
G.nodes[2]["cmty"] = 0
|
| 451 |
+
G.nodes[3]["cmty"] = 1
|
| 452 |
+
self.test(G, [(0, 3)], [(0, 3, 0)], community="cmty")
|
| 453 |
+
|
| 454 |
+
def test_all_nonexistent_edges(self):
|
| 455 |
+
G = nx.Graph()
|
| 456 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 457 |
+
G.nodes[0]["community"] = 0
|
| 458 |
+
G.nodes[1]["community"] = 1
|
| 459 |
+
G.nodes[2]["community"] = 0
|
| 460 |
+
G.nodes[3]["community"] = 0
|
| 461 |
+
self.test(G, None, [(0, 3, 0.5), (1, 2, 0), (1, 3, 0)])
|
| 462 |
+
|
| 463 |
+
|
| 464 |
+
class TestWithinInterCluster:
|
| 465 |
+
@classmethod
|
| 466 |
+
def setup_class(cls):
|
| 467 |
+
cls.delta = 0.001
|
| 468 |
+
cls.func = staticmethod(nx.within_inter_cluster)
|
| 469 |
+
cls.test = partial(
|
| 470 |
+
_test_func, predict_func=cls.func, delta=cls.delta, community="community"
|
| 471 |
+
)
|
| 472 |
+
|
| 473 |
+
def test_K5(self):
|
| 474 |
+
G = nx.complete_graph(5)
|
| 475 |
+
G.nodes[0]["community"] = 0
|
| 476 |
+
G.nodes[1]["community"] = 0
|
| 477 |
+
G.nodes[2]["community"] = 0
|
| 478 |
+
G.nodes[3]["community"] = 0
|
| 479 |
+
G.nodes[4]["community"] = 1
|
| 480 |
+
self.test(G, [(0, 1)], [(0, 1, 2 / (1 + self.delta))])
|
| 481 |
+
|
| 482 |
+
def test_P3(self):
|
| 483 |
+
G = nx.path_graph(3)
|
| 484 |
+
G.nodes[0]["community"] = 0
|
| 485 |
+
G.nodes[1]["community"] = 1
|
| 486 |
+
G.nodes[2]["community"] = 0
|
| 487 |
+
self.test(G, [(0, 2)], [(0, 2, 0)])
|
| 488 |
+
|
| 489 |
+
def test_S4(self):
|
| 490 |
+
G = nx.star_graph(4)
|
| 491 |
+
G.nodes[0]["community"] = 1
|
| 492 |
+
G.nodes[1]["community"] = 1
|
| 493 |
+
G.nodes[2]["community"] = 1
|
| 494 |
+
G.nodes[3]["community"] = 0
|
| 495 |
+
G.nodes[4]["community"] = 0
|
| 496 |
+
self.test(G, [(1, 2)], [(1, 2, 1 / self.delta)])
|
| 497 |
+
|
| 498 |
+
@pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
|
| 499 |
+
def test_notimplemented(self, graph_type):
|
| 500 |
+
G = graph_type([(0, 1), (1, 2)])
|
| 501 |
+
G.add_nodes_from([0, 1, 2], community=0)
|
| 502 |
+
assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
|
| 503 |
+
|
| 504 |
+
def test_node_not_found(self):
|
| 505 |
+
G = nx.Graph()
|
| 506 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 507 |
+
G.nodes[0]["community"] = 0
|
| 508 |
+
G.nodes[1]["community"] = 1
|
| 509 |
+
G.nodes[2]["community"] = 0
|
| 510 |
+
G.nodes[3]["community"] = 0
|
| 511 |
+
assert pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)])
|
| 512 |
+
|
| 513 |
+
def test_no_common_neighbor(self):
|
| 514 |
+
G = nx.Graph()
|
| 515 |
+
G.add_nodes_from([0, 1])
|
| 516 |
+
G.nodes[0]["community"] = 0
|
| 517 |
+
G.nodes[1]["community"] = 0
|
| 518 |
+
self.test(G, [(0, 1)], [(0, 1, 0)])
|
| 519 |
+
|
| 520 |
+
def test_equal_nodes(self):
|
| 521 |
+
G = nx.complete_graph(3)
|
| 522 |
+
G.nodes[0]["community"] = 0
|
| 523 |
+
G.nodes[1]["community"] = 0
|
| 524 |
+
G.nodes[2]["community"] = 0
|
| 525 |
+
self.test(G, [(0, 0)], [(0, 0, 2 / self.delta)])
|
| 526 |
+
|
| 527 |
+
def test_different_community(self):
|
| 528 |
+
G = nx.Graph()
|
| 529 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
|
| 530 |
+
G.nodes[0]["community"] = 0
|
| 531 |
+
G.nodes[1]["community"] = 0
|
| 532 |
+
G.nodes[2]["community"] = 0
|
| 533 |
+
G.nodes[3]["community"] = 1
|
| 534 |
+
self.test(G, [(0, 3)], [(0, 3, 0)])
|
| 535 |
+
|
| 536 |
+
def test_no_inter_cluster_common_neighbor(self):
|
| 537 |
+
G = nx.complete_graph(4)
|
| 538 |
+
G.nodes[0]["community"] = 0
|
| 539 |
+
G.nodes[1]["community"] = 0
|
| 540 |
+
G.nodes[2]["community"] = 0
|
| 541 |
+
G.nodes[3]["community"] = 0
|
| 542 |
+
self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)])
|
| 543 |
+
|
| 544 |
+
def test_no_community_information(self):
|
| 545 |
+
G = nx.complete_graph(5)
|
| 546 |
+
assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)]))
|
| 547 |
+
|
| 548 |
+
def test_insufficient_community_information(self):
|
| 549 |
+
G = nx.Graph()
|
| 550 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
|
| 551 |
+
G.nodes[0]["community"] = 0
|
| 552 |
+
G.nodes[1]["community"] = 0
|
| 553 |
+
G.nodes[3]["community"] = 0
|
| 554 |
+
assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)]))
|
| 555 |
+
|
| 556 |
+
def test_sufficient_community_information(self):
|
| 557 |
+
G = nx.Graph()
|
| 558 |
+
G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)])
|
| 559 |
+
G.nodes[1]["community"] = 0
|
| 560 |
+
G.nodes[2]["community"] = 0
|
| 561 |
+
G.nodes[3]["community"] = 0
|
| 562 |
+
G.nodes[4]["community"] = 0
|
| 563 |
+
self.test(G, [(1, 4)], [(1, 4, 2 / self.delta)])
|
| 564 |
+
|
| 565 |
+
def test_invalid_delta(self):
|
| 566 |
+
G = nx.complete_graph(3)
|
| 567 |
+
G.add_nodes_from([0, 1, 2], community=0)
|
| 568 |
+
assert pytest.raises(nx.NetworkXAlgorithmError, self.func, G, [(0, 1)], 0)
|
| 569 |
+
assert pytest.raises(nx.NetworkXAlgorithmError, self.func, G, [(0, 1)], -0.5)
|
| 570 |
+
|
| 571 |
+
def test_custom_community_attribute_name(self):
|
| 572 |
+
G = nx.complete_graph(4)
|
| 573 |
+
G.nodes[0]["cmty"] = 0
|
| 574 |
+
G.nodes[1]["cmty"] = 0
|
| 575 |
+
G.nodes[2]["cmty"] = 0
|
| 576 |
+
G.nodes[3]["cmty"] = 0
|
| 577 |
+
self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)], community="cmty")
|
| 578 |
+
|
| 579 |
+
def test_all_nonexistent_edges(self):
|
| 580 |
+
G = nx.Graph()
|
| 581 |
+
G.add_edges_from([(0, 1), (0, 2), (2, 3)])
|
| 582 |
+
G.nodes[0]["community"] = 0
|
| 583 |
+
G.nodes[1]["community"] = 1
|
| 584 |
+
G.nodes[2]["community"] = 0
|
| 585 |
+
G.nodes[3]["community"] = 0
|
| 586 |
+
self.test(G, None, [(0, 3, 1 / self.delta), (1, 2, 0), (1, 3, 0)])
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_lowest_common_ancestors.py
ADDED
|
@@ -0,0 +1,427 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import chain, combinations, product
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
tree_all_pairs_lca = nx.tree_all_pairs_lowest_common_ancestor
|
| 8 |
+
all_pairs_lca = nx.all_pairs_lowest_common_ancestor
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def get_pair(dictionary, n1, n2):
|
| 12 |
+
if (n1, n2) in dictionary:
|
| 13 |
+
return dictionary[n1, n2]
|
| 14 |
+
else:
|
| 15 |
+
return dictionary[n2, n1]
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class TestTreeLCA:
|
| 19 |
+
@classmethod
|
| 20 |
+
def setup_class(cls):
|
| 21 |
+
cls.DG = nx.DiGraph()
|
| 22 |
+
edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)]
|
| 23 |
+
cls.DG.add_edges_from(edges)
|
| 24 |
+
cls.ans = dict(tree_all_pairs_lca(cls.DG, 0))
|
| 25 |
+
gold = {(n, n): n for n in cls.DG}
|
| 26 |
+
gold.update({(0, i): 0 for i in range(1, 7)})
|
| 27 |
+
gold.update(
|
| 28 |
+
{
|
| 29 |
+
(1, 2): 0,
|
| 30 |
+
(1, 3): 1,
|
| 31 |
+
(1, 4): 1,
|
| 32 |
+
(1, 5): 0,
|
| 33 |
+
(1, 6): 0,
|
| 34 |
+
(2, 3): 0,
|
| 35 |
+
(2, 4): 0,
|
| 36 |
+
(2, 5): 2,
|
| 37 |
+
(2, 6): 2,
|
| 38 |
+
(3, 4): 1,
|
| 39 |
+
(3, 5): 0,
|
| 40 |
+
(3, 6): 0,
|
| 41 |
+
(4, 5): 0,
|
| 42 |
+
(4, 6): 0,
|
| 43 |
+
(5, 6): 2,
|
| 44 |
+
}
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
cls.gold = gold
|
| 48 |
+
|
| 49 |
+
@staticmethod
|
| 50 |
+
def assert_has_same_pairs(d1, d2):
|
| 51 |
+
for a, b in ((min(pair), max(pair)) for pair in chain(d1, d2)):
|
| 52 |
+
assert get_pair(d1, a, b) == get_pair(d2, a, b)
|
| 53 |
+
|
| 54 |
+
def test_tree_all_pairs_lca_default_root(self):
|
| 55 |
+
assert dict(tree_all_pairs_lca(self.DG)) == self.ans
|
| 56 |
+
|
| 57 |
+
def test_tree_all_pairs_lca_return_subset(self):
|
| 58 |
+
test_pairs = [(0, 1), (0, 1), (1, 0)]
|
| 59 |
+
ans = dict(tree_all_pairs_lca(self.DG, 0, test_pairs))
|
| 60 |
+
assert (0, 1) in ans and (1, 0) in ans
|
| 61 |
+
assert len(ans) == 2
|
| 62 |
+
|
| 63 |
+
def test_tree_all_pairs_lca(self):
|
| 64 |
+
all_pairs = chain(combinations(self.DG, 2), ((node, node) for node in self.DG))
|
| 65 |
+
|
| 66 |
+
ans = dict(tree_all_pairs_lca(self.DG, 0, all_pairs))
|
| 67 |
+
self.assert_has_same_pairs(ans, self.ans)
|
| 68 |
+
|
| 69 |
+
def test_tree_all_pairs_gold_example(self):
|
| 70 |
+
ans = dict(tree_all_pairs_lca(self.DG))
|
| 71 |
+
self.assert_has_same_pairs(self.gold, ans)
|
| 72 |
+
|
| 73 |
+
def test_tree_all_pairs_lca_invalid_input(self):
|
| 74 |
+
empty_digraph = tree_all_pairs_lca(nx.DiGraph())
|
| 75 |
+
pytest.raises(nx.NetworkXPointlessConcept, list, empty_digraph)
|
| 76 |
+
|
| 77 |
+
bad_pairs_digraph = tree_all_pairs_lca(self.DG, pairs=[(-1, -2)])
|
| 78 |
+
pytest.raises(nx.NodeNotFound, list, bad_pairs_digraph)
|
| 79 |
+
|
| 80 |
+
def test_tree_all_pairs_lca_subtrees(self):
|
| 81 |
+
ans = dict(tree_all_pairs_lca(self.DG, 1))
|
| 82 |
+
gold = {
|
| 83 |
+
pair: lca
|
| 84 |
+
for (pair, lca) in self.gold.items()
|
| 85 |
+
if all(n in (1, 3, 4) for n in pair)
|
| 86 |
+
}
|
| 87 |
+
self.assert_has_same_pairs(gold, ans)
|
| 88 |
+
|
| 89 |
+
def test_tree_all_pairs_lca_disconnected_nodes(self):
|
| 90 |
+
G = nx.DiGraph()
|
| 91 |
+
G.add_node(1)
|
| 92 |
+
assert {(1, 1): 1} == dict(tree_all_pairs_lca(G))
|
| 93 |
+
|
| 94 |
+
G.add_node(0)
|
| 95 |
+
assert {(1, 1): 1} == dict(tree_all_pairs_lca(G, 1))
|
| 96 |
+
assert {(0, 0): 0} == dict(tree_all_pairs_lca(G, 0))
|
| 97 |
+
|
| 98 |
+
pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G))
|
| 99 |
+
|
| 100 |
+
def test_tree_all_pairs_lca_error_if_input_not_tree(self):
|
| 101 |
+
# Cycle
|
| 102 |
+
G = nx.DiGraph([(1, 2), (2, 1)])
|
| 103 |
+
pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G))
|
| 104 |
+
# DAG
|
| 105 |
+
G = nx.DiGraph([(0, 2), (1, 2)])
|
| 106 |
+
pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G))
|
| 107 |
+
|
| 108 |
+
def test_tree_all_pairs_lca_generator(self):
|
| 109 |
+
pairs = iter([(0, 1), (0, 1), (1, 0)])
|
| 110 |
+
some_pairs = dict(tree_all_pairs_lca(self.DG, 0, pairs))
|
| 111 |
+
assert (0, 1) in some_pairs and (1, 0) in some_pairs
|
| 112 |
+
assert len(some_pairs) == 2
|
| 113 |
+
|
| 114 |
+
def test_tree_all_pairs_lca_nonexisting_pairs_exception(self):
|
| 115 |
+
lca = tree_all_pairs_lca(self.DG, 0, [(-1, -1)])
|
| 116 |
+
pytest.raises(nx.NodeNotFound, list, lca)
|
| 117 |
+
# check if node is None
|
| 118 |
+
lca = tree_all_pairs_lca(self.DG, None, [(-1, -1)])
|
| 119 |
+
pytest.raises(nx.NodeNotFound, list, lca)
|
| 120 |
+
|
| 121 |
+
def test_tree_all_pairs_lca_routine_bails_on_DAGs(self):
|
| 122 |
+
G = nx.DiGraph([(3, 4), (5, 4)])
|
| 123 |
+
pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G))
|
| 124 |
+
|
| 125 |
+
def test_tree_all_pairs_lca_not_implemented(self):
|
| 126 |
+
NNI = nx.NetworkXNotImplemented
|
| 127 |
+
G = nx.Graph([(0, 1)])
|
| 128 |
+
with pytest.raises(NNI):
|
| 129 |
+
next(tree_all_pairs_lca(G))
|
| 130 |
+
with pytest.raises(NNI):
|
| 131 |
+
next(all_pairs_lca(G))
|
| 132 |
+
pytest.raises(NNI, nx.lowest_common_ancestor, G, 0, 1)
|
| 133 |
+
G = nx.MultiGraph([(0, 1)])
|
| 134 |
+
with pytest.raises(NNI):
|
| 135 |
+
next(tree_all_pairs_lca(G))
|
| 136 |
+
with pytest.raises(NNI):
|
| 137 |
+
next(all_pairs_lca(G))
|
| 138 |
+
pytest.raises(NNI, nx.lowest_common_ancestor, G, 0, 1)
|
| 139 |
+
|
| 140 |
+
def test_tree_all_pairs_lca_trees_without_LCAs(self):
|
| 141 |
+
G = nx.DiGraph()
|
| 142 |
+
G.add_node(3)
|
| 143 |
+
ans = list(tree_all_pairs_lca(G))
|
| 144 |
+
assert ans == [((3, 3), 3)]
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
class TestMultiTreeLCA(TestTreeLCA):
|
| 148 |
+
@classmethod
|
| 149 |
+
def setup_class(cls):
|
| 150 |
+
cls.DG = nx.MultiDiGraph()
|
| 151 |
+
edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)]
|
| 152 |
+
cls.DG.add_edges_from(edges)
|
| 153 |
+
cls.ans = dict(tree_all_pairs_lca(cls.DG, 0))
|
| 154 |
+
# add multiedges
|
| 155 |
+
cls.DG.add_edges_from(edges)
|
| 156 |
+
|
| 157 |
+
gold = {(n, n): n for n in cls.DG}
|
| 158 |
+
gold.update({(0, i): 0 for i in range(1, 7)})
|
| 159 |
+
gold.update(
|
| 160 |
+
{
|
| 161 |
+
(1, 2): 0,
|
| 162 |
+
(1, 3): 1,
|
| 163 |
+
(1, 4): 1,
|
| 164 |
+
(1, 5): 0,
|
| 165 |
+
(1, 6): 0,
|
| 166 |
+
(2, 3): 0,
|
| 167 |
+
(2, 4): 0,
|
| 168 |
+
(2, 5): 2,
|
| 169 |
+
(2, 6): 2,
|
| 170 |
+
(3, 4): 1,
|
| 171 |
+
(3, 5): 0,
|
| 172 |
+
(3, 6): 0,
|
| 173 |
+
(4, 5): 0,
|
| 174 |
+
(4, 6): 0,
|
| 175 |
+
(5, 6): 2,
|
| 176 |
+
}
|
| 177 |
+
)
|
| 178 |
+
|
| 179 |
+
cls.gold = gold
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
class TestDAGLCA:
|
| 183 |
+
@classmethod
|
| 184 |
+
def setup_class(cls):
|
| 185 |
+
cls.DG = nx.DiGraph()
|
| 186 |
+
nx.add_path(cls.DG, (0, 1, 2, 3))
|
| 187 |
+
nx.add_path(cls.DG, (0, 4, 3))
|
| 188 |
+
nx.add_path(cls.DG, (0, 5, 6, 8, 3))
|
| 189 |
+
nx.add_path(cls.DG, (5, 7, 8))
|
| 190 |
+
cls.DG.add_edge(6, 2)
|
| 191 |
+
cls.DG.add_edge(7, 2)
|
| 192 |
+
|
| 193 |
+
cls.root_distance = nx.shortest_path_length(cls.DG, source=0)
|
| 194 |
+
|
| 195 |
+
cls.gold = {
|
| 196 |
+
(1, 1): 1,
|
| 197 |
+
(1, 2): 1,
|
| 198 |
+
(1, 3): 1,
|
| 199 |
+
(1, 4): 0,
|
| 200 |
+
(1, 5): 0,
|
| 201 |
+
(1, 6): 0,
|
| 202 |
+
(1, 7): 0,
|
| 203 |
+
(1, 8): 0,
|
| 204 |
+
(2, 2): 2,
|
| 205 |
+
(2, 3): 2,
|
| 206 |
+
(2, 4): 0,
|
| 207 |
+
(2, 5): 5,
|
| 208 |
+
(2, 6): 6,
|
| 209 |
+
(2, 7): 7,
|
| 210 |
+
(2, 8): 7,
|
| 211 |
+
(3, 3): 3,
|
| 212 |
+
(3, 4): 4,
|
| 213 |
+
(3, 5): 5,
|
| 214 |
+
(3, 6): 6,
|
| 215 |
+
(3, 7): 7,
|
| 216 |
+
(3, 8): 8,
|
| 217 |
+
(4, 4): 4,
|
| 218 |
+
(4, 5): 0,
|
| 219 |
+
(4, 6): 0,
|
| 220 |
+
(4, 7): 0,
|
| 221 |
+
(4, 8): 0,
|
| 222 |
+
(5, 5): 5,
|
| 223 |
+
(5, 6): 5,
|
| 224 |
+
(5, 7): 5,
|
| 225 |
+
(5, 8): 5,
|
| 226 |
+
(6, 6): 6,
|
| 227 |
+
(6, 7): 5,
|
| 228 |
+
(6, 8): 6,
|
| 229 |
+
(7, 7): 7,
|
| 230 |
+
(7, 8): 7,
|
| 231 |
+
(8, 8): 8,
|
| 232 |
+
}
|
| 233 |
+
cls.gold.update(((0, n), 0) for n in cls.DG)
|
| 234 |
+
|
| 235 |
+
def assert_lca_dicts_same(self, d1, d2, G=None):
|
| 236 |
+
"""Checks if d1 and d2 contain the same pairs and
|
| 237 |
+
have a node at the same distance from root for each.
|
| 238 |
+
If G is None use self.DG."""
|
| 239 |
+
if G is None:
|
| 240 |
+
G = self.DG
|
| 241 |
+
root_distance = self.root_distance
|
| 242 |
+
else:
|
| 243 |
+
roots = [n for n, deg in G.in_degree if deg == 0]
|
| 244 |
+
assert len(roots) == 1
|
| 245 |
+
root_distance = nx.shortest_path_length(G, source=roots[0])
|
| 246 |
+
|
| 247 |
+
for a, b in ((min(pair), max(pair)) for pair in chain(d1, d2)):
|
| 248 |
+
assert (
|
| 249 |
+
root_distance[get_pair(d1, a, b)] == root_distance[get_pair(d2, a, b)]
|
| 250 |
+
)
|
| 251 |
+
|
| 252 |
+
def test_all_pairs_lca_gold_example(self):
|
| 253 |
+
self.assert_lca_dicts_same(dict(all_pairs_lca(self.DG)), self.gold)
|
| 254 |
+
|
| 255 |
+
def test_all_pairs_lca_all_pairs_given(self):
|
| 256 |
+
all_pairs = list(product(self.DG.nodes(), self.DG.nodes()))
|
| 257 |
+
ans = all_pairs_lca(self.DG, pairs=all_pairs)
|
| 258 |
+
self.assert_lca_dicts_same(dict(ans), self.gold)
|
| 259 |
+
|
| 260 |
+
def test_all_pairs_lca_generator(self):
|
| 261 |
+
all_pairs = product(self.DG.nodes(), self.DG.nodes())
|
| 262 |
+
ans = all_pairs_lca(self.DG, pairs=all_pairs)
|
| 263 |
+
self.assert_lca_dicts_same(dict(ans), self.gold)
|
| 264 |
+
|
| 265 |
+
def test_all_pairs_lca_input_graph_with_two_roots(self):
|
| 266 |
+
G = self.DG.copy()
|
| 267 |
+
G.add_edge(9, 10)
|
| 268 |
+
G.add_edge(9, 4)
|
| 269 |
+
gold = self.gold.copy()
|
| 270 |
+
gold[9, 9] = 9
|
| 271 |
+
gold[9, 10] = 9
|
| 272 |
+
gold[9, 4] = 9
|
| 273 |
+
gold[9, 3] = 9
|
| 274 |
+
gold[10, 4] = 9
|
| 275 |
+
gold[10, 3] = 9
|
| 276 |
+
gold[10, 10] = 10
|
| 277 |
+
|
| 278 |
+
testing = dict(all_pairs_lca(G))
|
| 279 |
+
|
| 280 |
+
G.add_edge(-1, 9)
|
| 281 |
+
G.add_edge(-1, 0)
|
| 282 |
+
self.assert_lca_dicts_same(testing, gold, G)
|
| 283 |
+
|
| 284 |
+
def test_all_pairs_lca_nonexisting_pairs_exception(self):
|
| 285 |
+
pytest.raises(nx.NodeNotFound, all_pairs_lca, self.DG, [(-1, -1)])
|
| 286 |
+
|
| 287 |
+
def test_all_pairs_lca_pairs_without_lca(self):
|
| 288 |
+
G = self.DG.copy()
|
| 289 |
+
G.add_node(-1)
|
| 290 |
+
gen = all_pairs_lca(G, [(-1, -1), (-1, 0)])
|
| 291 |
+
assert dict(gen) == {(-1, -1): -1}
|
| 292 |
+
|
| 293 |
+
def test_all_pairs_lca_null_graph(self):
|
| 294 |
+
pytest.raises(nx.NetworkXPointlessConcept, all_pairs_lca, nx.DiGraph())
|
| 295 |
+
|
| 296 |
+
def test_all_pairs_lca_non_dags(self):
|
| 297 |
+
pytest.raises(nx.NetworkXError, all_pairs_lca, nx.DiGraph([(3, 4), (4, 3)]))
|
| 298 |
+
|
| 299 |
+
def test_all_pairs_lca_nonempty_graph_without_lca(self):
|
| 300 |
+
G = nx.DiGraph()
|
| 301 |
+
G.add_node(3)
|
| 302 |
+
ans = list(all_pairs_lca(G))
|
| 303 |
+
assert ans == [((3, 3), 3)]
|
| 304 |
+
|
| 305 |
+
def test_all_pairs_lca_bug_gh4942(self):
|
| 306 |
+
G = nx.DiGraph([(0, 2), (1, 2), (2, 3)])
|
| 307 |
+
ans = list(all_pairs_lca(G))
|
| 308 |
+
assert len(ans) == 9
|
| 309 |
+
|
| 310 |
+
def test_all_pairs_lca_default_kwarg(self):
|
| 311 |
+
G = nx.DiGraph([(0, 1), (2, 1)])
|
| 312 |
+
sentinel = object()
|
| 313 |
+
assert nx.lowest_common_ancestor(G, 0, 2, default=sentinel) is sentinel
|
| 314 |
+
|
| 315 |
+
def test_all_pairs_lca_identity(self):
|
| 316 |
+
G = nx.DiGraph()
|
| 317 |
+
G.add_node(3)
|
| 318 |
+
assert nx.lowest_common_ancestor(G, 3, 3) == 3
|
| 319 |
+
|
| 320 |
+
def test_all_pairs_lca_issue_4574(self):
|
| 321 |
+
G = nx.DiGraph()
|
| 322 |
+
G.add_nodes_from(range(17))
|
| 323 |
+
G.add_edges_from(
|
| 324 |
+
[
|
| 325 |
+
(2, 0),
|
| 326 |
+
(1, 2),
|
| 327 |
+
(3, 2),
|
| 328 |
+
(5, 2),
|
| 329 |
+
(8, 2),
|
| 330 |
+
(11, 2),
|
| 331 |
+
(4, 5),
|
| 332 |
+
(6, 5),
|
| 333 |
+
(7, 8),
|
| 334 |
+
(10, 8),
|
| 335 |
+
(13, 11),
|
| 336 |
+
(14, 11),
|
| 337 |
+
(15, 11),
|
| 338 |
+
(9, 10),
|
| 339 |
+
(12, 13),
|
| 340 |
+
(16, 15),
|
| 341 |
+
]
|
| 342 |
+
)
|
| 343 |
+
|
| 344 |
+
assert nx.lowest_common_ancestor(G, 7, 9) == None
|
| 345 |
+
|
| 346 |
+
def test_all_pairs_lca_one_pair_gh4942(self):
|
| 347 |
+
G = nx.DiGraph()
|
| 348 |
+
# Note: order edge addition is critical to the test
|
| 349 |
+
G.add_edge(0, 1)
|
| 350 |
+
G.add_edge(2, 0)
|
| 351 |
+
G.add_edge(2, 3)
|
| 352 |
+
G.add_edge(4, 0)
|
| 353 |
+
G.add_edge(5, 2)
|
| 354 |
+
|
| 355 |
+
assert nx.lowest_common_ancestor(G, 1, 3) == 2
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
class TestMultiDiGraph_DAGLCA(TestDAGLCA):
|
| 359 |
+
@classmethod
|
| 360 |
+
def setup_class(cls):
|
| 361 |
+
cls.DG = nx.MultiDiGraph()
|
| 362 |
+
nx.add_path(cls.DG, (0, 1, 2, 3))
|
| 363 |
+
# add multiedges
|
| 364 |
+
nx.add_path(cls.DG, (0, 1, 2, 3))
|
| 365 |
+
nx.add_path(cls.DG, (0, 4, 3))
|
| 366 |
+
nx.add_path(cls.DG, (0, 5, 6, 8, 3))
|
| 367 |
+
nx.add_path(cls.DG, (5, 7, 8))
|
| 368 |
+
cls.DG.add_edge(6, 2)
|
| 369 |
+
cls.DG.add_edge(7, 2)
|
| 370 |
+
|
| 371 |
+
cls.root_distance = nx.shortest_path_length(cls.DG, source=0)
|
| 372 |
+
|
| 373 |
+
cls.gold = {
|
| 374 |
+
(1, 1): 1,
|
| 375 |
+
(1, 2): 1,
|
| 376 |
+
(1, 3): 1,
|
| 377 |
+
(1, 4): 0,
|
| 378 |
+
(1, 5): 0,
|
| 379 |
+
(1, 6): 0,
|
| 380 |
+
(1, 7): 0,
|
| 381 |
+
(1, 8): 0,
|
| 382 |
+
(2, 2): 2,
|
| 383 |
+
(2, 3): 2,
|
| 384 |
+
(2, 4): 0,
|
| 385 |
+
(2, 5): 5,
|
| 386 |
+
(2, 6): 6,
|
| 387 |
+
(2, 7): 7,
|
| 388 |
+
(2, 8): 7,
|
| 389 |
+
(3, 3): 3,
|
| 390 |
+
(3, 4): 4,
|
| 391 |
+
(3, 5): 5,
|
| 392 |
+
(3, 6): 6,
|
| 393 |
+
(3, 7): 7,
|
| 394 |
+
(3, 8): 8,
|
| 395 |
+
(4, 4): 4,
|
| 396 |
+
(4, 5): 0,
|
| 397 |
+
(4, 6): 0,
|
| 398 |
+
(4, 7): 0,
|
| 399 |
+
(4, 8): 0,
|
| 400 |
+
(5, 5): 5,
|
| 401 |
+
(5, 6): 5,
|
| 402 |
+
(5, 7): 5,
|
| 403 |
+
(5, 8): 5,
|
| 404 |
+
(6, 6): 6,
|
| 405 |
+
(6, 7): 5,
|
| 406 |
+
(6, 8): 6,
|
| 407 |
+
(7, 7): 7,
|
| 408 |
+
(7, 8): 7,
|
| 409 |
+
(8, 8): 8,
|
| 410 |
+
}
|
| 411 |
+
cls.gold.update(((0, n), 0) for n in cls.DG)
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
def test_all_pairs_lca_self_ancestors():
|
| 415 |
+
"""Self-ancestors should always be the node itself, i.e. lca of (0, 0) is 0.
|
| 416 |
+
See gh-4458."""
|
| 417 |
+
# DAG for test - note order of node/edge addition is relevant
|
| 418 |
+
G = nx.DiGraph()
|
| 419 |
+
G.add_nodes_from(range(5))
|
| 420 |
+
G.add_edges_from([(1, 0), (2, 0), (3, 2), (4, 1), (4, 3)])
|
| 421 |
+
|
| 422 |
+
ap_lca = nx.all_pairs_lowest_common_ancestor
|
| 423 |
+
assert all(u == v == a for (u, v), a in ap_lca(G) if u == v)
|
| 424 |
+
MG = nx.MultiDiGraph(G)
|
| 425 |
+
assert all(u == v == a for (u, v), a in ap_lca(MG) if u == v)
|
| 426 |
+
MG.add_edges_from([(1, 0), (2, 0)])
|
| 427 |
+
assert all(u == v == a for (u, v), a in ap_lca(MG) if u == v)
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_max_weight_clique.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Maximum weight clique test suite.
|
| 2 |
+
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TestMaximumWeightClique:
|
| 11 |
+
def test_basic_cases(self):
|
| 12 |
+
def check_basic_case(graph_func, expected_weight, weight_accessor):
|
| 13 |
+
graph = graph_func()
|
| 14 |
+
clique, weight = nx.algorithms.max_weight_clique(graph, weight_accessor)
|
| 15 |
+
assert verify_clique(
|
| 16 |
+
graph, clique, weight, expected_weight, weight_accessor
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
for graph_func, (expected_weight, expected_size) in TEST_CASES.items():
|
| 20 |
+
check_basic_case(graph_func, expected_weight, "weight")
|
| 21 |
+
check_basic_case(graph_func, expected_size, None)
|
| 22 |
+
|
| 23 |
+
def test_key_error(self):
|
| 24 |
+
graph = two_node_graph()
|
| 25 |
+
with pytest.raises(KeyError):
|
| 26 |
+
nx.algorithms.max_weight_clique(graph, "nonexistent-key")
|
| 27 |
+
|
| 28 |
+
def test_error_on_non_integer_weight(self):
|
| 29 |
+
graph = two_node_graph()
|
| 30 |
+
graph.nodes[2]["weight"] = 1.5
|
| 31 |
+
with pytest.raises(ValueError):
|
| 32 |
+
nx.algorithms.max_weight_clique(graph)
|
| 33 |
+
|
| 34 |
+
def test_unaffected_by_self_loops(self):
|
| 35 |
+
graph = two_node_graph()
|
| 36 |
+
graph.add_edge(1, 1)
|
| 37 |
+
graph.add_edge(2, 2)
|
| 38 |
+
clique, weight = nx.algorithms.max_weight_clique(graph, "weight")
|
| 39 |
+
assert verify_clique(graph, clique, weight, 30, "weight")
|
| 40 |
+
graph = three_node_independent_set()
|
| 41 |
+
graph.add_edge(1, 1)
|
| 42 |
+
clique, weight = nx.algorithms.max_weight_clique(graph, "weight")
|
| 43 |
+
assert verify_clique(graph, clique, weight, 20, "weight")
|
| 44 |
+
|
| 45 |
+
def test_30_node_prob(self):
|
| 46 |
+
G = nx.Graph()
|
| 47 |
+
G.add_nodes_from(range(1, 31))
|
| 48 |
+
for i in range(1, 31):
|
| 49 |
+
G.nodes[i]["weight"] = i + 1
|
| 50 |
+
# fmt: off
|
| 51 |
+
G.add_edges_from(
|
| 52 |
+
[
|
| 53 |
+
(1, 12), (1, 13), (1, 15), (1, 16), (1, 18), (1, 19), (1, 20),
|
| 54 |
+
(1, 23), (1, 26), (1, 28), (1, 29), (1, 30), (2, 3), (2, 4),
|
| 55 |
+
(2, 5), (2, 8), (2, 9), (2, 10), (2, 14), (2, 17), (2, 18),
|
| 56 |
+
(2, 21), (2, 22), (2, 23), (2, 27), (3, 9), (3, 15), (3, 21),
|
| 57 |
+
(3, 22), (3, 23), (3, 24), (3, 27), (3, 28), (3, 29), (4, 5),
|
| 58 |
+
(4, 6), (4, 8), (4, 21), (4, 22), (4, 23), (4, 26), (4, 28),
|
| 59 |
+
(4, 30), (5, 6), (5, 8), (5, 9), (5, 13), (5, 14), (5, 15),
|
| 60 |
+
(5, 16), (5, 20), (5, 21), (5, 22), (5, 25), (5, 28), (5, 29),
|
| 61 |
+
(6, 7), (6, 8), (6, 13), (6, 17), (6, 18), (6, 19), (6, 24),
|
| 62 |
+
(6, 26), (6, 27), (6, 28), (6, 29), (7, 12), (7, 14), (7, 15),
|
| 63 |
+
(7, 16), (7, 17), (7, 20), (7, 25), (7, 27), (7, 29), (7, 30),
|
| 64 |
+
(8, 10), (8, 15), (8, 16), (8, 18), (8, 20), (8, 22), (8, 24),
|
| 65 |
+
(8, 26), (8, 27), (8, 28), (8, 30), (9, 11), (9, 12), (9, 13),
|
| 66 |
+
(9, 14), (9, 15), (9, 16), (9, 19), (9, 20), (9, 21), (9, 24),
|
| 67 |
+
(9, 30), (10, 12), (10, 15), (10, 18), (10, 19), (10, 20),
|
| 68 |
+
(10, 22), (10, 23), (10, 24), (10, 26), (10, 27), (10, 29),
|
| 69 |
+
(10, 30), (11, 13), (11, 15), (11, 16), (11, 17), (11, 18),
|
| 70 |
+
(11, 19), (11, 20), (11, 22), (11, 29), (11, 30), (12, 14),
|
| 71 |
+
(12, 17), (12, 18), (12, 19), (12, 20), (12, 21), (12, 23),
|
| 72 |
+
(12, 25), (12, 26), (12, 30), (13, 20), (13, 22), (13, 23),
|
| 73 |
+
(13, 24), (13, 30), (14, 16), (14, 20), (14, 21), (14, 22),
|
| 74 |
+
(14, 23), (14, 25), (14, 26), (14, 27), (14, 29), (14, 30),
|
| 75 |
+
(15, 17), (15, 18), (15, 20), (15, 21), (15, 26), (15, 27),
|
| 76 |
+
(15, 28), (16, 17), (16, 18), (16, 19), (16, 20), (16, 21),
|
| 77 |
+
(16, 29), (16, 30), (17, 18), (17, 21), (17, 22), (17, 25),
|
| 78 |
+
(17, 27), (17, 28), (17, 30), (18, 19), (18, 20), (18, 21),
|
| 79 |
+
(18, 22), (18, 23), (18, 24), (19, 20), (19, 22), (19, 23),
|
| 80 |
+
(19, 24), (19, 25), (19, 27), (19, 30), (20, 21), (20, 23),
|
| 81 |
+
(20, 24), (20, 26), (20, 28), (20, 29), (21, 23), (21, 26),
|
| 82 |
+
(21, 27), (21, 29), (22, 24), (22, 25), (22, 26), (22, 29),
|
| 83 |
+
(23, 25), (23, 30), (24, 25), (24, 26), (25, 27), (25, 29),
|
| 84 |
+
(26, 27), (26, 28), (26, 30), (28, 29), (29, 30),
|
| 85 |
+
]
|
| 86 |
+
)
|
| 87 |
+
# fmt: on
|
| 88 |
+
clique, weight = nx.algorithms.max_weight_clique(G)
|
| 89 |
+
assert verify_clique(G, clique, weight, 111, "weight")
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
# ############################ Utility functions ############################
|
| 93 |
+
def verify_clique(
|
| 94 |
+
graph, clique, reported_clique_weight, expected_clique_weight, weight_accessor
|
| 95 |
+
):
|
| 96 |
+
for node1 in clique:
|
| 97 |
+
for node2 in clique:
|
| 98 |
+
if node1 == node2:
|
| 99 |
+
continue
|
| 100 |
+
if not graph.has_edge(node1, node2):
|
| 101 |
+
return False
|
| 102 |
+
|
| 103 |
+
if weight_accessor is None:
|
| 104 |
+
clique_weight = len(clique)
|
| 105 |
+
else:
|
| 106 |
+
clique_weight = sum(graph.nodes[v]["weight"] for v in clique)
|
| 107 |
+
|
| 108 |
+
if clique_weight != expected_clique_weight:
|
| 109 |
+
return False
|
| 110 |
+
if clique_weight != reported_clique_weight:
|
| 111 |
+
return False
|
| 112 |
+
|
| 113 |
+
return True
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
# ############################ Graph Generation ############################
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def empty_graph():
|
| 120 |
+
return nx.Graph()
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
def one_node_graph():
|
| 124 |
+
graph = nx.Graph()
|
| 125 |
+
graph.add_nodes_from([1])
|
| 126 |
+
graph.nodes[1]["weight"] = 10
|
| 127 |
+
return graph
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def two_node_graph():
|
| 131 |
+
graph = nx.Graph()
|
| 132 |
+
graph.add_nodes_from([1, 2])
|
| 133 |
+
graph.add_edges_from([(1, 2)])
|
| 134 |
+
graph.nodes[1]["weight"] = 10
|
| 135 |
+
graph.nodes[2]["weight"] = 20
|
| 136 |
+
return graph
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def three_node_clique():
|
| 140 |
+
graph = nx.Graph()
|
| 141 |
+
graph.add_nodes_from([1, 2, 3])
|
| 142 |
+
graph.add_edges_from([(1, 2), (1, 3), (2, 3)])
|
| 143 |
+
graph.nodes[1]["weight"] = 10
|
| 144 |
+
graph.nodes[2]["weight"] = 20
|
| 145 |
+
graph.nodes[3]["weight"] = 5
|
| 146 |
+
return graph
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def three_node_independent_set():
|
| 150 |
+
graph = nx.Graph()
|
| 151 |
+
graph.add_nodes_from([1, 2, 3])
|
| 152 |
+
graph.nodes[1]["weight"] = 10
|
| 153 |
+
graph.nodes[2]["weight"] = 20
|
| 154 |
+
graph.nodes[3]["weight"] = 5
|
| 155 |
+
return graph
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
def disconnected():
|
| 159 |
+
graph = nx.Graph()
|
| 160 |
+
graph.add_edges_from([(1, 2), (2, 3), (4, 5), (5, 6)])
|
| 161 |
+
graph.nodes[1]["weight"] = 10
|
| 162 |
+
graph.nodes[2]["weight"] = 20
|
| 163 |
+
graph.nodes[3]["weight"] = 5
|
| 164 |
+
graph.nodes[4]["weight"] = 100
|
| 165 |
+
graph.nodes[5]["weight"] = 200
|
| 166 |
+
graph.nodes[6]["weight"] = 50
|
| 167 |
+
return graph
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
# --------------------------------------------------------------------------
|
| 171 |
+
# Basic tests for all strategies
|
| 172 |
+
# For each basic graph function, specify expected weight of max weight clique
|
| 173 |
+
# and expected size of maximum clique
|
| 174 |
+
TEST_CASES = {
|
| 175 |
+
empty_graph: (0, 0),
|
| 176 |
+
one_node_graph: (10, 1),
|
| 177 |
+
two_node_graph: (30, 2),
|
| 178 |
+
three_node_clique: (35, 3),
|
| 179 |
+
three_node_independent_set: (20, 1),
|
| 180 |
+
disconnected: (300, 2),
|
| 181 |
+
}
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_mis.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for maximal (not maximum) independent sets.
|
| 3 |
+
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import random
|
| 7 |
+
|
| 8 |
+
import pytest
|
| 9 |
+
|
| 10 |
+
import networkx as nx
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def test_random_seed():
|
| 14 |
+
G = nx.empty_graph(5)
|
| 15 |
+
assert nx.maximal_independent_set(G, seed=1) == [1, 0, 3, 2, 4]
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@pytest.mark.parametrize("graph", [nx.complete_graph(5), nx.complete_graph(55)])
|
| 19 |
+
def test_K5(graph):
|
| 20 |
+
"""Maximal independent set for complete graphs"""
|
| 21 |
+
assert all(nx.maximal_independent_set(graph, [n]) == [n] for n in graph)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def test_exceptions():
|
| 25 |
+
"""Bad input should raise exception."""
|
| 26 |
+
G = nx.florentine_families_graph()
|
| 27 |
+
pytest.raises(nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Smith"])
|
| 28 |
+
pytest.raises(
|
| 29 |
+
nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Salviati", "Pazzi"]
|
| 30 |
+
)
|
| 31 |
+
# MaximalIndependentSet is not implemented for directed graphs
|
| 32 |
+
pytest.raises(nx.NetworkXNotImplemented, nx.maximal_independent_set, nx.DiGraph(G))
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def test_florentine_family():
|
| 36 |
+
G = nx.florentine_families_graph()
|
| 37 |
+
indep = nx.maximal_independent_set(G, ["Medici", "Bischeri"])
|
| 38 |
+
assert set(indep) == {
|
| 39 |
+
"Medici",
|
| 40 |
+
"Bischeri",
|
| 41 |
+
"Castellani",
|
| 42 |
+
"Pazzi",
|
| 43 |
+
"Ginori",
|
| 44 |
+
"Lamberteschi",
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def test_bipartite():
|
| 49 |
+
G = nx.complete_bipartite_graph(12, 34)
|
| 50 |
+
indep = nx.maximal_independent_set(G, [4, 5, 9, 10])
|
| 51 |
+
assert sorted(indep) == list(range(12))
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def test_random_graphs():
|
| 55 |
+
"""Generate 5 random graphs of different types and sizes and
|
| 56 |
+
make sure that all sets are independent and maximal."""
|
| 57 |
+
for i in range(0, 50, 10):
|
| 58 |
+
G = nx.erdos_renyi_graph(i * 10 + 1, random.random())
|
| 59 |
+
IS = nx.maximal_independent_set(G)
|
| 60 |
+
assert G.subgraph(IS).number_of_edges() == 0
|
| 61 |
+
nbrs_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS))
|
| 62 |
+
assert all(v in nbrs_of_MIS for v in set(G.nodes()).difference(IS))
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_moral.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
from networkx.algorithms.moral import moral_graph
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
def test_get_moral_graph():
|
| 6 |
+
graph = nx.DiGraph()
|
| 7 |
+
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
|
| 8 |
+
graph.add_edges_from([(1, 2), (3, 2), (4, 1), (4, 5), (6, 5), (7, 5)])
|
| 9 |
+
H = moral_graph(graph)
|
| 10 |
+
assert not H.is_directed()
|
| 11 |
+
assert H.has_edge(1, 3)
|
| 12 |
+
assert H.has_edge(4, 6)
|
| 13 |
+
assert H.has_edge(6, 7)
|
| 14 |
+
assert H.has_edge(4, 7)
|
| 15 |
+
assert not H.has_edge(1, 5)
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_node_classification.py
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
pytest.importorskip("numpy")
|
| 4 |
+
pytest.importorskip("scipy")
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
from networkx.algorithms import node_classification
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TestHarmonicFunction:
|
| 11 |
+
def test_path_graph(self):
|
| 12 |
+
G = nx.path_graph(4)
|
| 13 |
+
label_name = "label"
|
| 14 |
+
G.nodes[0][label_name] = "A"
|
| 15 |
+
G.nodes[3][label_name] = "B"
|
| 16 |
+
predicted = node_classification.harmonic_function(G, label_name=label_name)
|
| 17 |
+
assert predicted[0] == "A"
|
| 18 |
+
assert predicted[1] == "A"
|
| 19 |
+
assert predicted[2] == "B"
|
| 20 |
+
assert predicted[3] == "B"
|
| 21 |
+
|
| 22 |
+
def test_no_labels(self):
|
| 23 |
+
with pytest.raises(nx.NetworkXError):
|
| 24 |
+
G = nx.path_graph(4)
|
| 25 |
+
node_classification.harmonic_function(G)
|
| 26 |
+
|
| 27 |
+
def test_no_nodes(self):
|
| 28 |
+
with pytest.raises(nx.NetworkXError):
|
| 29 |
+
G = nx.Graph()
|
| 30 |
+
node_classification.harmonic_function(G)
|
| 31 |
+
|
| 32 |
+
def test_no_edges(self):
|
| 33 |
+
with pytest.raises(nx.NetworkXError):
|
| 34 |
+
G = nx.Graph()
|
| 35 |
+
G.add_node(1)
|
| 36 |
+
G.add_node(2)
|
| 37 |
+
node_classification.harmonic_function(G)
|
| 38 |
+
|
| 39 |
+
def test_digraph(self):
|
| 40 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 41 |
+
G = nx.DiGraph()
|
| 42 |
+
G.add_edge(0, 1)
|
| 43 |
+
G.add_edge(1, 2)
|
| 44 |
+
G.add_edge(2, 3)
|
| 45 |
+
label_name = "label"
|
| 46 |
+
G.nodes[0][label_name] = "A"
|
| 47 |
+
G.nodes[3][label_name] = "B"
|
| 48 |
+
node_classification.harmonic_function(G)
|
| 49 |
+
|
| 50 |
+
def test_one_labeled_node(self):
|
| 51 |
+
G = nx.path_graph(4)
|
| 52 |
+
label_name = "label"
|
| 53 |
+
G.nodes[0][label_name] = "A"
|
| 54 |
+
predicted = node_classification.harmonic_function(G, label_name=label_name)
|
| 55 |
+
assert predicted[0] == "A"
|
| 56 |
+
assert predicted[1] == "A"
|
| 57 |
+
assert predicted[2] == "A"
|
| 58 |
+
assert predicted[3] == "A"
|
| 59 |
+
|
| 60 |
+
def test_nodes_all_labeled(self):
|
| 61 |
+
G = nx.karate_club_graph()
|
| 62 |
+
label_name = "club"
|
| 63 |
+
predicted = node_classification.harmonic_function(G, label_name=label_name)
|
| 64 |
+
for i in range(len(G)):
|
| 65 |
+
assert predicted[i] == G.nodes[i][label_name]
|
| 66 |
+
|
| 67 |
+
def test_labeled_nodes_are_not_changed(self):
|
| 68 |
+
G = nx.karate_club_graph()
|
| 69 |
+
label_name = "club"
|
| 70 |
+
label_removed = {0, 1, 2, 3, 4, 5, 6, 7}
|
| 71 |
+
for i in label_removed:
|
| 72 |
+
del G.nodes[i][label_name]
|
| 73 |
+
predicted = node_classification.harmonic_function(G, label_name=label_name)
|
| 74 |
+
label_not_removed = set(range(len(G))) - label_removed
|
| 75 |
+
for i in label_not_removed:
|
| 76 |
+
assert predicted[i] == G.nodes[i][label_name]
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class TestLocalAndGlobalConsistency:
|
| 80 |
+
def test_path_graph(self):
|
| 81 |
+
G = nx.path_graph(4)
|
| 82 |
+
label_name = "label"
|
| 83 |
+
G.nodes[0][label_name] = "A"
|
| 84 |
+
G.nodes[3][label_name] = "B"
|
| 85 |
+
predicted = node_classification.local_and_global_consistency(
|
| 86 |
+
G, label_name=label_name
|
| 87 |
+
)
|
| 88 |
+
assert predicted[0] == "A"
|
| 89 |
+
assert predicted[1] == "A"
|
| 90 |
+
assert predicted[2] == "B"
|
| 91 |
+
assert predicted[3] == "B"
|
| 92 |
+
|
| 93 |
+
def test_no_labels(self):
|
| 94 |
+
with pytest.raises(nx.NetworkXError):
|
| 95 |
+
G = nx.path_graph(4)
|
| 96 |
+
node_classification.local_and_global_consistency(G)
|
| 97 |
+
|
| 98 |
+
def test_no_nodes(self):
|
| 99 |
+
with pytest.raises(nx.NetworkXError):
|
| 100 |
+
G = nx.Graph()
|
| 101 |
+
node_classification.local_and_global_consistency(G)
|
| 102 |
+
|
| 103 |
+
def test_no_edges(self):
|
| 104 |
+
with pytest.raises(nx.NetworkXError):
|
| 105 |
+
G = nx.Graph()
|
| 106 |
+
G.add_node(1)
|
| 107 |
+
G.add_node(2)
|
| 108 |
+
node_classification.local_and_global_consistency(G)
|
| 109 |
+
|
| 110 |
+
def test_digraph(self):
|
| 111 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 112 |
+
G = nx.DiGraph()
|
| 113 |
+
G.add_edge(0, 1)
|
| 114 |
+
G.add_edge(1, 2)
|
| 115 |
+
G.add_edge(2, 3)
|
| 116 |
+
label_name = "label"
|
| 117 |
+
G.nodes[0][label_name] = "A"
|
| 118 |
+
G.nodes[3][label_name] = "B"
|
| 119 |
+
node_classification.harmonic_function(G)
|
| 120 |
+
|
| 121 |
+
def test_one_labeled_node(self):
|
| 122 |
+
G = nx.path_graph(4)
|
| 123 |
+
label_name = "label"
|
| 124 |
+
G.nodes[0][label_name] = "A"
|
| 125 |
+
predicted = node_classification.local_and_global_consistency(
|
| 126 |
+
G, label_name=label_name
|
| 127 |
+
)
|
| 128 |
+
assert predicted[0] == "A"
|
| 129 |
+
assert predicted[1] == "A"
|
| 130 |
+
assert predicted[2] == "A"
|
| 131 |
+
assert predicted[3] == "A"
|
| 132 |
+
|
| 133 |
+
def test_nodes_all_labeled(self):
|
| 134 |
+
G = nx.karate_club_graph()
|
| 135 |
+
label_name = "club"
|
| 136 |
+
predicted = node_classification.local_and_global_consistency(
|
| 137 |
+
G, alpha=0, label_name=label_name
|
| 138 |
+
)
|
| 139 |
+
for i in range(len(G)):
|
| 140 |
+
assert predicted[i] == G.nodes[i][label_name]
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_non_randomness.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
np = pytest.importorskip("numpy")
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@pytest.mark.parametrize(
|
| 9 |
+
"k, weight, expected",
|
| 10 |
+
[
|
| 11 |
+
(None, None, 7.21), # infers 3 communities
|
| 12 |
+
(2, None, 11.7),
|
| 13 |
+
(None, "weight", 25.45),
|
| 14 |
+
(2, "weight", 38.8),
|
| 15 |
+
],
|
| 16 |
+
)
|
| 17 |
+
def test_non_randomness(k, weight, expected):
|
| 18 |
+
G = nx.karate_club_graph()
|
| 19 |
+
np.testing.assert_almost_equal(
|
| 20 |
+
nx.non_randomness(G, k, weight)[0], expected, decimal=2
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def test_non_connected():
|
| 25 |
+
G = nx.Graph()
|
| 26 |
+
G.add_edge(1, 2)
|
| 27 |
+
G.add_node(3)
|
| 28 |
+
with pytest.raises(nx.NetworkXException):
|
| 29 |
+
nx.non_randomness(G)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def test_self_loops():
|
| 33 |
+
G = nx.Graph()
|
| 34 |
+
G.add_edge(1, 2)
|
| 35 |
+
G.add_edge(1, 1)
|
| 36 |
+
with pytest.raises(nx.NetworkXError):
|
| 37 |
+
nx.non_randomness(G)
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_planar_drawing.py
ADDED
|
@@ -0,0 +1,274 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.algorithms.planar_drawing import triangulate_embedding
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def test_graph1():
|
| 10 |
+
embedding_data = {0: [1, 2, 3], 1: [2, 0], 2: [3, 0, 1], 3: [2, 0]}
|
| 11 |
+
check_embedding_data(embedding_data)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def test_graph2():
|
| 15 |
+
embedding_data = {
|
| 16 |
+
0: [8, 6],
|
| 17 |
+
1: [2, 6, 9],
|
| 18 |
+
2: [8, 1, 7, 9, 6, 4],
|
| 19 |
+
3: [9],
|
| 20 |
+
4: [2],
|
| 21 |
+
5: [6, 8],
|
| 22 |
+
6: [9, 1, 0, 5, 2],
|
| 23 |
+
7: [9, 2],
|
| 24 |
+
8: [0, 2, 5],
|
| 25 |
+
9: [1, 6, 2, 7, 3],
|
| 26 |
+
}
|
| 27 |
+
check_embedding_data(embedding_data)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def test_circle_graph():
|
| 31 |
+
embedding_data = {
|
| 32 |
+
0: [1, 9],
|
| 33 |
+
1: [0, 2],
|
| 34 |
+
2: [1, 3],
|
| 35 |
+
3: [2, 4],
|
| 36 |
+
4: [3, 5],
|
| 37 |
+
5: [4, 6],
|
| 38 |
+
6: [5, 7],
|
| 39 |
+
7: [6, 8],
|
| 40 |
+
8: [7, 9],
|
| 41 |
+
9: [8, 0],
|
| 42 |
+
}
|
| 43 |
+
check_embedding_data(embedding_data)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def test_grid_graph():
|
| 47 |
+
embedding_data = {
|
| 48 |
+
(0, 1): [(0, 0), (1, 1), (0, 2)],
|
| 49 |
+
(1, 2): [(1, 1), (2, 2), (0, 2)],
|
| 50 |
+
(0, 0): [(0, 1), (1, 0)],
|
| 51 |
+
(2, 1): [(2, 0), (2, 2), (1, 1)],
|
| 52 |
+
(1, 1): [(2, 1), (1, 2), (0, 1), (1, 0)],
|
| 53 |
+
(2, 0): [(1, 0), (2, 1)],
|
| 54 |
+
(2, 2): [(1, 2), (2, 1)],
|
| 55 |
+
(1, 0): [(0, 0), (2, 0), (1, 1)],
|
| 56 |
+
(0, 2): [(1, 2), (0, 1)],
|
| 57 |
+
}
|
| 58 |
+
check_embedding_data(embedding_data)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def test_one_node_graph():
|
| 62 |
+
embedding_data = {0: []}
|
| 63 |
+
check_embedding_data(embedding_data)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def test_two_node_graph():
|
| 67 |
+
embedding_data = {0: [1], 1: [0]}
|
| 68 |
+
check_embedding_data(embedding_data)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def test_three_node_graph():
|
| 72 |
+
embedding_data = {0: [1, 2], 1: [0, 2], 2: [0, 1]}
|
| 73 |
+
check_embedding_data(embedding_data)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def test_multiple_component_graph1():
|
| 77 |
+
embedding_data = {0: [], 1: []}
|
| 78 |
+
check_embedding_data(embedding_data)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def test_multiple_component_graph2():
|
| 82 |
+
embedding_data = {0: [1, 2], 1: [0, 2], 2: [0, 1], 3: [4, 5], 4: [3, 5], 5: [3, 4]}
|
| 83 |
+
check_embedding_data(embedding_data)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def test_invalid_half_edge():
|
| 87 |
+
with pytest.raises(nx.NetworkXException):
|
| 88 |
+
embedding_data = {1: [2, 3, 4], 2: [1, 3, 4], 3: [1, 2, 4], 4: [1, 2, 3]}
|
| 89 |
+
embedding = nx.PlanarEmbedding()
|
| 90 |
+
embedding.set_data(embedding_data)
|
| 91 |
+
nx.combinatorial_embedding_to_pos(embedding)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def test_triangulate_embedding1():
|
| 95 |
+
embedding = nx.PlanarEmbedding()
|
| 96 |
+
embedding.add_node(1)
|
| 97 |
+
expected_embedding = {1: []}
|
| 98 |
+
check_triangulation(embedding, expected_embedding)
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def test_triangulate_embedding2():
|
| 102 |
+
embedding = nx.PlanarEmbedding()
|
| 103 |
+
embedding.connect_components(1, 2)
|
| 104 |
+
expected_embedding = {1: [2], 2: [1]}
|
| 105 |
+
check_triangulation(embedding, expected_embedding)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def check_triangulation(embedding, expected_embedding):
|
| 109 |
+
res_embedding, _ = triangulate_embedding(embedding, True)
|
| 110 |
+
assert (
|
| 111 |
+
res_embedding.get_data() == expected_embedding
|
| 112 |
+
), "Expected embedding incorrect"
|
| 113 |
+
res_embedding, _ = triangulate_embedding(embedding, False)
|
| 114 |
+
assert (
|
| 115 |
+
res_embedding.get_data() == expected_embedding
|
| 116 |
+
), "Expected embedding incorrect"
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def check_embedding_data(embedding_data):
|
| 120 |
+
"""Checks that the planar embedding of the input is correct"""
|
| 121 |
+
embedding = nx.PlanarEmbedding()
|
| 122 |
+
embedding.set_data(embedding_data)
|
| 123 |
+
pos_fully = nx.combinatorial_embedding_to_pos(embedding, False)
|
| 124 |
+
msg = "Planar drawing does not conform to the embedding (fully triangulation)"
|
| 125 |
+
assert planar_drawing_conforms_to_embedding(embedding, pos_fully), msg
|
| 126 |
+
check_edge_intersections(embedding, pos_fully)
|
| 127 |
+
pos_internally = nx.combinatorial_embedding_to_pos(embedding, True)
|
| 128 |
+
msg = "Planar drawing does not conform to the embedding (internal triangulation)"
|
| 129 |
+
assert planar_drawing_conforms_to_embedding(embedding, pos_internally), msg
|
| 130 |
+
check_edge_intersections(embedding, pos_internally)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def is_close(a, b, rel_tol=1e-09, abs_tol=0.0):
|
| 134 |
+
# Check if float numbers are basically equal, for python >=3.5 there is
|
| 135 |
+
# function for that in the standard library
|
| 136 |
+
return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def point_in_between(a, b, p):
|
| 140 |
+
# checks if p is on the line between a and b
|
| 141 |
+
x1, y1 = a
|
| 142 |
+
x2, y2 = b
|
| 143 |
+
px, py = p
|
| 144 |
+
dist_1_2 = math.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2)
|
| 145 |
+
dist_1_p = math.sqrt((x1 - px) ** 2 + (y1 - py) ** 2)
|
| 146 |
+
dist_2_p = math.sqrt((x2 - px) ** 2 + (y2 - py) ** 2)
|
| 147 |
+
return is_close(dist_1_p + dist_2_p, dist_1_2)
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def check_edge_intersections(G, pos):
|
| 151 |
+
"""Check all edges in G for intersections.
|
| 152 |
+
|
| 153 |
+
Raises an exception if an intersection is found.
|
| 154 |
+
|
| 155 |
+
Parameters
|
| 156 |
+
----------
|
| 157 |
+
G : NetworkX graph
|
| 158 |
+
pos : dict
|
| 159 |
+
Maps every node to a tuple (x, y) representing its position
|
| 160 |
+
|
| 161 |
+
"""
|
| 162 |
+
for a, b in G.edges():
|
| 163 |
+
for c, d in G.edges():
|
| 164 |
+
# Check if end points are different
|
| 165 |
+
if a != c and b != d and b != c and a != d:
|
| 166 |
+
x1, y1 = pos[a]
|
| 167 |
+
x2, y2 = pos[b]
|
| 168 |
+
x3, y3 = pos[c]
|
| 169 |
+
x4, y4 = pos[d]
|
| 170 |
+
determinant = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4)
|
| 171 |
+
if determinant != 0: # the lines are not parallel
|
| 172 |
+
# calculate intersection point, see:
|
| 173 |
+
# https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection
|
| 174 |
+
px = (x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (
|
| 175 |
+
x3 * y4 - y3 * x4
|
| 176 |
+
) / determinant
|
| 177 |
+
py = (x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (
|
| 178 |
+
x3 * y4 - y3 * x4
|
| 179 |
+
) / determinant
|
| 180 |
+
|
| 181 |
+
# Check if intersection lies between the points
|
| 182 |
+
if point_in_between(pos[a], pos[b], (px, py)) and point_in_between(
|
| 183 |
+
pos[c], pos[d], (px, py)
|
| 184 |
+
):
|
| 185 |
+
msg = f"There is an intersection at {px},{py}"
|
| 186 |
+
raise nx.NetworkXException(msg)
|
| 187 |
+
|
| 188 |
+
# Check overlap
|
| 189 |
+
msg = "A node lies on a edge connecting two other nodes"
|
| 190 |
+
if (
|
| 191 |
+
point_in_between(pos[a], pos[b], pos[c])
|
| 192 |
+
or point_in_between(pos[a], pos[b], pos[d])
|
| 193 |
+
or point_in_between(pos[c], pos[d], pos[a])
|
| 194 |
+
or point_in_between(pos[c], pos[d], pos[b])
|
| 195 |
+
):
|
| 196 |
+
raise nx.NetworkXException(msg)
|
| 197 |
+
# No edge intersection found
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
class Vector:
|
| 201 |
+
"""Compare vectors by their angle without loss of precision
|
| 202 |
+
|
| 203 |
+
All vectors in direction [0, 1] are the smallest.
|
| 204 |
+
The vectors grow in clockwise direction.
|
| 205 |
+
"""
|
| 206 |
+
|
| 207 |
+
__slots__ = ["x", "y", "node", "quadrant"]
|
| 208 |
+
|
| 209 |
+
def __init__(self, x, y, node):
|
| 210 |
+
self.x = x
|
| 211 |
+
self.y = y
|
| 212 |
+
self.node = node
|
| 213 |
+
if self.x >= 0 and self.y > 0:
|
| 214 |
+
self.quadrant = 1
|
| 215 |
+
elif self.x > 0 and self.y <= 0:
|
| 216 |
+
self.quadrant = 2
|
| 217 |
+
elif self.x <= 0 and self.y < 0:
|
| 218 |
+
self.quadrant = 3
|
| 219 |
+
else:
|
| 220 |
+
self.quadrant = 4
|
| 221 |
+
|
| 222 |
+
def __eq__(self, other):
|
| 223 |
+
return self.quadrant == other.quadrant and self.x * other.y == self.y * other.x
|
| 224 |
+
|
| 225 |
+
def __lt__(self, other):
|
| 226 |
+
if self.quadrant < other.quadrant:
|
| 227 |
+
return True
|
| 228 |
+
elif self.quadrant > other.quadrant:
|
| 229 |
+
return False
|
| 230 |
+
else:
|
| 231 |
+
return self.x * other.y < self.y * other.x
|
| 232 |
+
|
| 233 |
+
def __ne__(self, other):
|
| 234 |
+
return self != other
|
| 235 |
+
|
| 236 |
+
def __le__(self, other):
|
| 237 |
+
return not other < self
|
| 238 |
+
|
| 239 |
+
def __gt__(self, other):
|
| 240 |
+
return other < self
|
| 241 |
+
|
| 242 |
+
def __ge__(self, other):
|
| 243 |
+
return not self < other
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
def planar_drawing_conforms_to_embedding(embedding, pos):
|
| 247 |
+
"""Checks if pos conforms to the planar embedding
|
| 248 |
+
|
| 249 |
+
Returns true iff the neighbors are actually oriented in the orientation
|
| 250 |
+
specified of the embedding
|
| 251 |
+
"""
|
| 252 |
+
for v in embedding:
|
| 253 |
+
nbr_vectors = []
|
| 254 |
+
v_pos = pos[v]
|
| 255 |
+
for nbr in embedding[v]:
|
| 256 |
+
new_vector = Vector(pos[nbr][0] - v_pos[0], pos[nbr][1] - v_pos[1], nbr)
|
| 257 |
+
nbr_vectors.append(new_vector)
|
| 258 |
+
# Sort neighbors according to their phi angle
|
| 259 |
+
nbr_vectors.sort()
|
| 260 |
+
for idx, nbr_vector in enumerate(nbr_vectors):
|
| 261 |
+
cw_vector = nbr_vectors[(idx + 1) % len(nbr_vectors)]
|
| 262 |
+
ccw_vector = nbr_vectors[idx - 1]
|
| 263 |
+
if (
|
| 264 |
+
embedding[v][nbr_vector.node]["cw"] != cw_vector.node
|
| 265 |
+
or embedding[v][nbr_vector.node]["ccw"] != ccw_vector.node
|
| 266 |
+
):
|
| 267 |
+
return False
|
| 268 |
+
if cw_vector.node != nbr_vector.node and cw_vector == nbr_vector:
|
| 269 |
+
# Lines overlap
|
| 270 |
+
return False
|
| 271 |
+
if ccw_vector.node != nbr_vector.node and ccw_vector == nbr_vector:
|
| 272 |
+
# Lines overlap
|
| 273 |
+
return False
|
| 274 |
+
return True
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_planarity.py
ADDED
|
@@ -0,0 +1,535 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.algorithms.planarity import (
|
| 5 |
+
check_planarity_recursive,
|
| 6 |
+
get_counterexample,
|
| 7 |
+
get_counterexample_recursive,
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class TestLRPlanarity:
|
| 12 |
+
"""Nose Unit tests for the :mod:`networkx.algorithms.planarity` module.
|
| 13 |
+
|
| 14 |
+
Tests three things:
|
| 15 |
+
1. Check that the result is correct
|
| 16 |
+
(returns planar if and only if the graph is actually planar)
|
| 17 |
+
2. In case a counter example is returned: Check if it is correct
|
| 18 |
+
3. In case an embedding is returned: Check if its actually an embedding
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
@staticmethod
|
| 22 |
+
def check_graph(G, is_planar=None):
|
| 23 |
+
"""Raises an exception if the lr_planarity check returns a wrong result
|
| 24 |
+
|
| 25 |
+
Parameters
|
| 26 |
+
----------
|
| 27 |
+
G : NetworkX graph
|
| 28 |
+
is_planar : bool
|
| 29 |
+
The expected result of the planarity check.
|
| 30 |
+
If set to None only counter example or embedding are verified.
|
| 31 |
+
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
# obtain results of planarity check
|
| 35 |
+
is_planar_lr, result = nx.check_planarity(G, True)
|
| 36 |
+
is_planar_lr_rec, result_rec = check_planarity_recursive(G, True)
|
| 37 |
+
|
| 38 |
+
if is_planar is not None:
|
| 39 |
+
# set a message for the assert
|
| 40 |
+
if is_planar:
|
| 41 |
+
msg = "Wrong planarity check result. Should be planar."
|
| 42 |
+
else:
|
| 43 |
+
msg = "Wrong planarity check result. Should be non-planar."
|
| 44 |
+
|
| 45 |
+
# check if the result is as expected
|
| 46 |
+
assert is_planar == is_planar_lr, msg
|
| 47 |
+
assert is_planar == is_planar_lr_rec, msg
|
| 48 |
+
|
| 49 |
+
if is_planar_lr:
|
| 50 |
+
# check embedding
|
| 51 |
+
check_embedding(G, result)
|
| 52 |
+
check_embedding(G, result_rec)
|
| 53 |
+
else:
|
| 54 |
+
# check counter example
|
| 55 |
+
check_counterexample(G, result)
|
| 56 |
+
check_counterexample(G, result_rec)
|
| 57 |
+
|
| 58 |
+
def test_simple_planar_graph(self):
|
| 59 |
+
e = [
|
| 60 |
+
(1, 2),
|
| 61 |
+
(2, 3),
|
| 62 |
+
(3, 4),
|
| 63 |
+
(4, 6),
|
| 64 |
+
(6, 7),
|
| 65 |
+
(7, 1),
|
| 66 |
+
(1, 5),
|
| 67 |
+
(5, 2),
|
| 68 |
+
(2, 4),
|
| 69 |
+
(4, 5),
|
| 70 |
+
(5, 7),
|
| 71 |
+
]
|
| 72 |
+
self.check_graph(nx.Graph(e), is_planar=True)
|
| 73 |
+
|
| 74 |
+
def test_planar_with_selfloop(self):
|
| 75 |
+
e = [
|
| 76 |
+
(1, 1),
|
| 77 |
+
(2, 2),
|
| 78 |
+
(3, 3),
|
| 79 |
+
(4, 4),
|
| 80 |
+
(5, 5),
|
| 81 |
+
(1, 2),
|
| 82 |
+
(1, 3),
|
| 83 |
+
(1, 5),
|
| 84 |
+
(2, 5),
|
| 85 |
+
(2, 4),
|
| 86 |
+
(3, 4),
|
| 87 |
+
(3, 5),
|
| 88 |
+
(4, 5),
|
| 89 |
+
]
|
| 90 |
+
self.check_graph(nx.Graph(e), is_planar=True)
|
| 91 |
+
|
| 92 |
+
def test_k3_3(self):
|
| 93 |
+
self.check_graph(nx.complete_bipartite_graph(3, 3), is_planar=False)
|
| 94 |
+
|
| 95 |
+
def test_k5(self):
|
| 96 |
+
self.check_graph(nx.complete_graph(5), is_planar=False)
|
| 97 |
+
|
| 98 |
+
def test_multiple_components_planar(self):
|
| 99 |
+
e = [(1, 2), (2, 3), (3, 1), (4, 5), (5, 6), (6, 4)]
|
| 100 |
+
self.check_graph(nx.Graph(e), is_planar=True)
|
| 101 |
+
|
| 102 |
+
def test_multiple_components_non_planar(self):
|
| 103 |
+
G = nx.complete_graph(5)
|
| 104 |
+
# add another planar component to the non planar component
|
| 105 |
+
# G stays non planar
|
| 106 |
+
G.add_edges_from([(6, 7), (7, 8), (8, 6)])
|
| 107 |
+
self.check_graph(G, is_planar=False)
|
| 108 |
+
|
| 109 |
+
def test_non_planar_with_selfloop(self):
|
| 110 |
+
G = nx.complete_graph(5)
|
| 111 |
+
# add self loops
|
| 112 |
+
for i in range(5):
|
| 113 |
+
G.add_edge(i, i)
|
| 114 |
+
self.check_graph(G, is_planar=False)
|
| 115 |
+
|
| 116 |
+
def test_non_planar1(self):
|
| 117 |
+
# tests a graph that has no subgraph directly isomorph to K5 or K3_3
|
| 118 |
+
e = [
|
| 119 |
+
(1, 5),
|
| 120 |
+
(1, 6),
|
| 121 |
+
(1, 7),
|
| 122 |
+
(2, 6),
|
| 123 |
+
(2, 3),
|
| 124 |
+
(3, 5),
|
| 125 |
+
(3, 7),
|
| 126 |
+
(4, 5),
|
| 127 |
+
(4, 6),
|
| 128 |
+
(4, 7),
|
| 129 |
+
]
|
| 130 |
+
self.check_graph(nx.Graph(e), is_planar=False)
|
| 131 |
+
|
| 132 |
+
def test_loop(self):
|
| 133 |
+
# test a graph with a selfloop
|
| 134 |
+
e = [(1, 2), (2, 2)]
|
| 135 |
+
G = nx.Graph(e)
|
| 136 |
+
self.check_graph(G, is_planar=True)
|
| 137 |
+
|
| 138 |
+
def test_comp(self):
|
| 139 |
+
# test multiple component graph
|
| 140 |
+
e = [(1, 2), (3, 4)]
|
| 141 |
+
G = nx.Graph(e)
|
| 142 |
+
G.remove_edge(1, 2)
|
| 143 |
+
self.check_graph(G, is_planar=True)
|
| 144 |
+
|
| 145 |
+
def test_goldner_harary(self):
|
| 146 |
+
# test goldner-harary graph (a maximal planar graph)
|
| 147 |
+
e = [
|
| 148 |
+
(1, 2),
|
| 149 |
+
(1, 3),
|
| 150 |
+
(1, 4),
|
| 151 |
+
(1, 5),
|
| 152 |
+
(1, 7),
|
| 153 |
+
(1, 8),
|
| 154 |
+
(1, 10),
|
| 155 |
+
(1, 11),
|
| 156 |
+
(2, 3),
|
| 157 |
+
(2, 4),
|
| 158 |
+
(2, 6),
|
| 159 |
+
(2, 7),
|
| 160 |
+
(2, 9),
|
| 161 |
+
(2, 10),
|
| 162 |
+
(2, 11),
|
| 163 |
+
(3, 4),
|
| 164 |
+
(4, 5),
|
| 165 |
+
(4, 6),
|
| 166 |
+
(4, 7),
|
| 167 |
+
(5, 7),
|
| 168 |
+
(6, 7),
|
| 169 |
+
(7, 8),
|
| 170 |
+
(7, 9),
|
| 171 |
+
(7, 10),
|
| 172 |
+
(8, 10),
|
| 173 |
+
(9, 10),
|
| 174 |
+
(10, 11),
|
| 175 |
+
]
|
| 176 |
+
G = nx.Graph(e)
|
| 177 |
+
self.check_graph(G, is_planar=True)
|
| 178 |
+
|
| 179 |
+
def test_planar_multigraph(self):
|
| 180 |
+
G = nx.MultiGraph([(1, 2), (1, 2), (1, 2), (1, 2), (2, 3), (3, 1)])
|
| 181 |
+
self.check_graph(G, is_planar=True)
|
| 182 |
+
|
| 183 |
+
def test_non_planar_multigraph(self):
|
| 184 |
+
G = nx.MultiGraph(nx.complete_graph(5))
|
| 185 |
+
G.add_edges_from([(1, 2)] * 5)
|
| 186 |
+
self.check_graph(G, is_planar=False)
|
| 187 |
+
|
| 188 |
+
def test_planar_digraph(self):
|
| 189 |
+
G = nx.DiGraph([(1, 2), (2, 3), (2, 4), (4, 1), (4, 2), (1, 4), (3, 2)])
|
| 190 |
+
self.check_graph(G, is_planar=True)
|
| 191 |
+
|
| 192 |
+
def test_non_planar_digraph(self):
|
| 193 |
+
G = nx.DiGraph(nx.complete_graph(5))
|
| 194 |
+
G.remove_edge(1, 2)
|
| 195 |
+
G.remove_edge(4, 1)
|
| 196 |
+
self.check_graph(G, is_planar=False)
|
| 197 |
+
|
| 198 |
+
def test_single_component(self):
|
| 199 |
+
# Test a graph with only a single node
|
| 200 |
+
G = nx.Graph()
|
| 201 |
+
G.add_node(1)
|
| 202 |
+
self.check_graph(G, is_planar=True)
|
| 203 |
+
|
| 204 |
+
def test_graph1(self):
|
| 205 |
+
G = nx.Graph(
|
| 206 |
+
[
|
| 207 |
+
(3, 10),
|
| 208 |
+
(2, 13),
|
| 209 |
+
(1, 13),
|
| 210 |
+
(7, 11),
|
| 211 |
+
(0, 8),
|
| 212 |
+
(8, 13),
|
| 213 |
+
(0, 2),
|
| 214 |
+
(0, 7),
|
| 215 |
+
(0, 10),
|
| 216 |
+
(1, 7),
|
| 217 |
+
]
|
| 218 |
+
)
|
| 219 |
+
self.check_graph(G, is_planar=True)
|
| 220 |
+
|
| 221 |
+
def test_graph2(self):
|
| 222 |
+
G = nx.Graph(
|
| 223 |
+
[
|
| 224 |
+
(1, 2),
|
| 225 |
+
(4, 13),
|
| 226 |
+
(0, 13),
|
| 227 |
+
(4, 5),
|
| 228 |
+
(7, 10),
|
| 229 |
+
(1, 7),
|
| 230 |
+
(0, 3),
|
| 231 |
+
(2, 6),
|
| 232 |
+
(5, 6),
|
| 233 |
+
(7, 13),
|
| 234 |
+
(4, 8),
|
| 235 |
+
(0, 8),
|
| 236 |
+
(0, 9),
|
| 237 |
+
(2, 13),
|
| 238 |
+
(6, 7),
|
| 239 |
+
(3, 6),
|
| 240 |
+
(2, 8),
|
| 241 |
+
]
|
| 242 |
+
)
|
| 243 |
+
self.check_graph(G, is_planar=False)
|
| 244 |
+
|
| 245 |
+
def test_graph3(self):
|
| 246 |
+
G = nx.Graph(
|
| 247 |
+
[
|
| 248 |
+
(0, 7),
|
| 249 |
+
(3, 11),
|
| 250 |
+
(3, 4),
|
| 251 |
+
(8, 9),
|
| 252 |
+
(4, 11),
|
| 253 |
+
(1, 7),
|
| 254 |
+
(1, 13),
|
| 255 |
+
(1, 11),
|
| 256 |
+
(3, 5),
|
| 257 |
+
(5, 7),
|
| 258 |
+
(1, 3),
|
| 259 |
+
(0, 4),
|
| 260 |
+
(5, 11),
|
| 261 |
+
(5, 13),
|
| 262 |
+
]
|
| 263 |
+
)
|
| 264 |
+
self.check_graph(G, is_planar=False)
|
| 265 |
+
|
| 266 |
+
def test_counterexample_planar(self):
|
| 267 |
+
with pytest.raises(nx.NetworkXException):
|
| 268 |
+
# Try to get a counterexample of a planar graph
|
| 269 |
+
G = nx.Graph()
|
| 270 |
+
G.add_node(1)
|
| 271 |
+
get_counterexample(G)
|
| 272 |
+
|
| 273 |
+
def test_counterexample_planar_recursive(self):
|
| 274 |
+
with pytest.raises(nx.NetworkXException):
|
| 275 |
+
# Try to get a counterexample of a planar graph
|
| 276 |
+
G = nx.Graph()
|
| 277 |
+
G.add_node(1)
|
| 278 |
+
get_counterexample_recursive(G)
|
| 279 |
+
|
| 280 |
+
def test_edge_removal_from_planar_embedding(self):
|
| 281 |
+
# PlanarEmbedding.check_structure() must succeed after edge removal
|
| 282 |
+
edges = ((0, 1), (1, 2), (2, 3), (3, 4), (4, 0), (0, 2), (0, 3))
|
| 283 |
+
G = nx.Graph(edges)
|
| 284 |
+
cert, P = nx.check_planarity(G)
|
| 285 |
+
assert cert is True
|
| 286 |
+
P.remove_edge(0, 2)
|
| 287 |
+
self.check_graph(P, is_planar=True)
|
| 288 |
+
P.add_half_edge_ccw(1, 3, 2)
|
| 289 |
+
P.add_half_edge_cw(3, 1, 2)
|
| 290 |
+
self.check_graph(P, is_planar=True)
|
| 291 |
+
P.remove_edges_from(((0, 3), (1, 3)))
|
| 292 |
+
self.check_graph(P, is_planar=True)
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
def check_embedding(G, embedding):
|
| 296 |
+
"""Raises an exception if the combinatorial embedding is not correct
|
| 297 |
+
|
| 298 |
+
Parameters
|
| 299 |
+
----------
|
| 300 |
+
G : NetworkX graph
|
| 301 |
+
embedding : a dict mapping nodes to a list of edges
|
| 302 |
+
This specifies the ordering of the outgoing edges from a node for
|
| 303 |
+
a combinatorial embedding
|
| 304 |
+
|
| 305 |
+
Notes
|
| 306 |
+
-----
|
| 307 |
+
Checks the following things:
|
| 308 |
+
- The type of the embedding is correct
|
| 309 |
+
- The nodes and edges match the original graph
|
| 310 |
+
- Every half edge has its matching opposite half edge
|
| 311 |
+
- No intersections of edges (checked by Euler's formula)
|
| 312 |
+
"""
|
| 313 |
+
|
| 314 |
+
if not isinstance(embedding, nx.PlanarEmbedding):
|
| 315 |
+
raise nx.NetworkXException("Bad embedding. Not of type nx.PlanarEmbedding")
|
| 316 |
+
|
| 317 |
+
# Check structure
|
| 318 |
+
embedding.check_structure()
|
| 319 |
+
|
| 320 |
+
# Check that graphs are equivalent
|
| 321 |
+
|
| 322 |
+
assert set(G.nodes) == set(
|
| 323 |
+
embedding.nodes
|
| 324 |
+
), "Bad embedding. Nodes don't match the original graph."
|
| 325 |
+
|
| 326 |
+
# Check that the edges are equal
|
| 327 |
+
g_edges = set()
|
| 328 |
+
for edge in G.edges:
|
| 329 |
+
if edge[0] != edge[1]:
|
| 330 |
+
g_edges.add((edge[0], edge[1]))
|
| 331 |
+
g_edges.add((edge[1], edge[0]))
|
| 332 |
+
assert g_edges == set(
|
| 333 |
+
embedding.edges
|
| 334 |
+
), "Bad embedding. Edges don't match the original graph."
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
def check_counterexample(G, sub_graph):
|
| 338 |
+
"""Raises an exception if the counterexample is wrong.
|
| 339 |
+
|
| 340 |
+
Parameters
|
| 341 |
+
----------
|
| 342 |
+
G : NetworkX graph
|
| 343 |
+
subdivision_nodes : set
|
| 344 |
+
A set of nodes inducing a subgraph as a counterexample
|
| 345 |
+
"""
|
| 346 |
+
# 1. Create the sub graph
|
| 347 |
+
sub_graph = nx.Graph(sub_graph)
|
| 348 |
+
|
| 349 |
+
# 2. Remove self loops
|
| 350 |
+
for u in sub_graph:
|
| 351 |
+
if sub_graph.has_edge(u, u):
|
| 352 |
+
sub_graph.remove_edge(u, u)
|
| 353 |
+
|
| 354 |
+
# keep track of nodes we might need to contract
|
| 355 |
+
contract = list(sub_graph)
|
| 356 |
+
|
| 357 |
+
# 3. Contract Edges
|
| 358 |
+
while len(contract) > 0:
|
| 359 |
+
contract_node = contract.pop()
|
| 360 |
+
if contract_node not in sub_graph:
|
| 361 |
+
# Node was already contracted
|
| 362 |
+
continue
|
| 363 |
+
degree = sub_graph.degree[contract_node]
|
| 364 |
+
# Check if we can remove the node
|
| 365 |
+
if degree == 2:
|
| 366 |
+
# Get the two neighbors
|
| 367 |
+
neighbors = iter(sub_graph[contract_node])
|
| 368 |
+
u = next(neighbors)
|
| 369 |
+
v = next(neighbors)
|
| 370 |
+
# Save nodes for later
|
| 371 |
+
contract.append(u)
|
| 372 |
+
contract.append(v)
|
| 373 |
+
# Contract edge
|
| 374 |
+
sub_graph.remove_node(contract_node)
|
| 375 |
+
sub_graph.add_edge(u, v)
|
| 376 |
+
|
| 377 |
+
# 4. Check for isomorphism with K5 or K3_3 graphs
|
| 378 |
+
if len(sub_graph) == 5:
|
| 379 |
+
if not nx.is_isomorphic(nx.complete_graph(5), sub_graph):
|
| 380 |
+
raise nx.NetworkXException("Bad counter example.")
|
| 381 |
+
elif len(sub_graph) == 6:
|
| 382 |
+
if not nx.is_isomorphic(nx.complete_bipartite_graph(3, 3), sub_graph):
|
| 383 |
+
raise nx.NetworkXException("Bad counter example.")
|
| 384 |
+
else:
|
| 385 |
+
raise nx.NetworkXException("Bad counter example.")
|
| 386 |
+
|
| 387 |
+
|
| 388 |
+
class TestPlanarEmbeddingClass:
|
| 389 |
+
def test_add_half_edge(self):
|
| 390 |
+
embedding = nx.PlanarEmbedding()
|
| 391 |
+
embedding.add_half_edge(0, 1)
|
| 392 |
+
with pytest.raises(
|
| 393 |
+
nx.NetworkXException, match="Invalid clockwise reference node."
|
| 394 |
+
):
|
| 395 |
+
embedding.add_half_edge(0, 2, cw=3)
|
| 396 |
+
with pytest.raises(
|
| 397 |
+
nx.NetworkXException, match="Invalid counterclockwise reference node."
|
| 398 |
+
):
|
| 399 |
+
embedding.add_half_edge(0, 2, ccw=3)
|
| 400 |
+
with pytest.raises(
|
| 401 |
+
nx.NetworkXException, match="Only one of cw/ccw can be specified."
|
| 402 |
+
):
|
| 403 |
+
embedding.add_half_edge(0, 2, cw=1, ccw=1)
|
| 404 |
+
with pytest.raises(
|
| 405 |
+
nx.NetworkXException,
|
| 406 |
+
match=(
|
| 407 |
+
r"Node already has out-half-edge\(s\), either"
|
| 408 |
+
" cw or ccw reference node required."
|
| 409 |
+
),
|
| 410 |
+
):
|
| 411 |
+
embedding.add_half_edge(0, 2)
|
| 412 |
+
# these should work
|
| 413 |
+
embedding.add_half_edge(0, 2, cw=1)
|
| 414 |
+
embedding.add_half_edge(0, 3, ccw=1)
|
| 415 |
+
assert sorted(embedding.edges(data=True)) == [
|
| 416 |
+
(0, 1, {"ccw": 2, "cw": 3}),
|
| 417 |
+
(0, 2, {"cw": 1, "ccw": 3}),
|
| 418 |
+
(0, 3, {"cw": 2, "ccw": 1}),
|
| 419 |
+
]
|
| 420 |
+
|
| 421 |
+
def test_get_data(self):
|
| 422 |
+
embedding = self.get_star_embedding(4)
|
| 423 |
+
data = embedding.get_data()
|
| 424 |
+
data_cmp = {0: [3, 2, 1], 1: [0], 2: [0], 3: [0]}
|
| 425 |
+
assert data == data_cmp
|
| 426 |
+
|
| 427 |
+
def test_edge_removal(self):
|
| 428 |
+
embedding = nx.PlanarEmbedding()
|
| 429 |
+
embedding.set_data(
|
| 430 |
+
{
|
| 431 |
+
1: [2, 5, 7],
|
| 432 |
+
2: [1, 3, 4, 5],
|
| 433 |
+
3: [2, 4],
|
| 434 |
+
4: [3, 6, 5, 2],
|
| 435 |
+
5: [7, 1, 2, 4],
|
| 436 |
+
6: [4, 7],
|
| 437 |
+
7: [6, 1, 5],
|
| 438 |
+
}
|
| 439 |
+
)
|
| 440 |
+
# remove_edges_from() calls remove_edge(), so both are tested here
|
| 441 |
+
embedding.remove_edges_from(((5, 4), (1, 5)))
|
| 442 |
+
embedding.check_structure()
|
| 443 |
+
embedding_expected = nx.PlanarEmbedding()
|
| 444 |
+
embedding_expected.set_data(
|
| 445 |
+
{
|
| 446 |
+
1: [2, 7],
|
| 447 |
+
2: [1, 3, 4, 5],
|
| 448 |
+
3: [2, 4],
|
| 449 |
+
4: [3, 6, 2],
|
| 450 |
+
5: [7, 2],
|
| 451 |
+
6: [4, 7],
|
| 452 |
+
7: [6, 1, 5],
|
| 453 |
+
}
|
| 454 |
+
)
|
| 455 |
+
assert nx.utils.graphs_equal(embedding, embedding_expected)
|
| 456 |
+
|
| 457 |
+
def test_missing_edge_orientation(self):
|
| 458 |
+
embedding = nx.PlanarEmbedding({1: {2: {}}, 2: {1: {}}})
|
| 459 |
+
with pytest.raises(nx.NetworkXException):
|
| 460 |
+
# Invalid structure because the orientation of the edge was not set
|
| 461 |
+
embedding.check_structure()
|
| 462 |
+
|
| 463 |
+
def test_invalid_edge_orientation(self):
|
| 464 |
+
embedding = nx.PlanarEmbedding(
|
| 465 |
+
{
|
| 466 |
+
1: {2: {"cw": 2, "ccw": 2}},
|
| 467 |
+
2: {1: {"cw": 1, "ccw": 1}},
|
| 468 |
+
1: {3: {}},
|
| 469 |
+
3: {1: {}},
|
| 470 |
+
}
|
| 471 |
+
)
|
| 472 |
+
with pytest.raises(nx.NetworkXException):
|
| 473 |
+
embedding.check_structure()
|
| 474 |
+
|
| 475 |
+
def test_missing_half_edge(self):
|
| 476 |
+
embedding = nx.PlanarEmbedding()
|
| 477 |
+
embedding.add_half_edge(1, 2)
|
| 478 |
+
with pytest.raises(nx.NetworkXException):
|
| 479 |
+
# Invalid structure because other half edge is missing
|
| 480 |
+
embedding.check_structure()
|
| 481 |
+
|
| 482 |
+
def test_not_fulfilling_euler_formula(self):
|
| 483 |
+
embedding = nx.PlanarEmbedding()
|
| 484 |
+
for i in range(5):
|
| 485 |
+
ref = None
|
| 486 |
+
for j in range(5):
|
| 487 |
+
if i != j:
|
| 488 |
+
embedding.add_half_edge(i, j, cw=ref)
|
| 489 |
+
ref = j
|
| 490 |
+
with pytest.raises(nx.NetworkXException):
|
| 491 |
+
embedding.check_structure()
|
| 492 |
+
|
| 493 |
+
def test_missing_reference(self):
|
| 494 |
+
embedding = nx.PlanarEmbedding()
|
| 495 |
+
with pytest.raises(nx.NetworkXException, match="Invalid reference node."):
|
| 496 |
+
embedding.add_half_edge(1, 2, ccw=3)
|
| 497 |
+
|
| 498 |
+
def test_connect_components(self):
|
| 499 |
+
embedding = nx.PlanarEmbedding()
|
| 500 |
+
embedding.connect_components(1, 2)
|
| 501 |
+
|
| 502 |
+
def test_successful_face_traversal(self):
|
| 503 |
+
embedding = nx.PlanarEmbedding()
|
| 504 |
+
embedding.add_half_edge(1, 2)
|
| 505 |
+
embedding.add_half_edge(2, 1)
|
| 506 |
+
face = embedding.traverse_face(1, 2)
|
| 507 |
+
assert face == [1, 2]
|
| 508 |
+
|
| 509 |
+
def test_unsuccessful_face_traversal(self):
|
| 510 |
+
embedding = nx.PlanarEmbedding(
|
| 511 |
+
{1: {2: {"cw": 3, "ccw": 2}}, 2: {1: {"cw": 3, "ccw": 1}}}
|
| 512 |
+
)
|
| 513 |
+
with pytest.raises(nx.NetworkXException):
|
| 514 |
+
embedding.traverse_face(1, 2)
|
| 515 |
+
|
| 516 |
+
def test_forbidden_methods(self):
|
| 517 |
+
embedding = nx.PlanarEmbedding()
|
| 518 |
+
embedding.add_node(42) # no exception
|
| 519 |
+
embedding.add_nodes_from([(23, 24)]) # no exception
|
| 520 |
+
with pytest.raises(NotImplementedError):
|
| 521 |
+
embedding.add_edge(1, 3)
|
| 522 |
+
with pytest.raises(NotImplementedError):
|
| 523 |
+
embedding.add_edges_from([(0, 2), (1, 4)])
|
| 524 |
+
with pytest.raises(NotImplementedError):
|
| 525 |
+
embedding.add_weighted_edges_from([(0, 2, 350), (1, 4, 125)])
|
| 526 |
+
|
| 527 |
+
@staticmethod
|
| 528 |
+
def get_star_embedding(n):
|
| 529 |
+
embedding = nx.PlanarEmbedding()
|
| 530 |
+
ref = None
|
| 531 |
+
for i in range(1, n):
|
| 532 |
+
embedding.add_half_edge(0, i, cw=ref)
|
| 533 |
+
ref = i
|
| 534 |
+
embedding.add_half_edge(i, 0)
|
| 535 |
+
return embedding
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_regular.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx
|
| 4 |
+
import networkx as nx
|
| 5 |
+
import networkx.algorithms.regular as reg
|
| 6 |
+
import networkx.generators as gen
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class TestKFactor:
|
| 10 |
+
def test_k_factor_trivial(self):
|
| 11 |
+
g = gen.cycle_graph(4)
|
| 12 |
+
f = reg.k_factor(g, 2)
|
| 13 |
+
assert g.edges == f.edges
|
| 14 |
+
|
| 15 |
+
def test_k_factor1(self):
|
| 16 |
+
g = gen.grid_2d_graph(4, 4)
|
| 17 |
+
g_kf = reg.k_factor(g, 2)
|
| 18 |
+
for edge in g_kf.edges():
|
| 19 |
+
assert g.has_edge(edge[0], edge[1])
|
| 20 |
+
for _, degree in g_kf.degree():
|
| 21 |
+
assert degree == 2
|
| 22 |
+
|
| 23 |
+
def test_k_factor2(self):
|
| 24 |
+
g = gen.complete_graph(6)
|
| 25 |
+
g_kf = reg.k_factor(g, 3)
|
| 26 |
+
for edge in g_kf.edges():
|
| 27 |
+
assert g.has_edge(edge[0], edge[1])
|
| 28 |
+
for _, degree in g_kf.degree():
|
| 29 |
+
assert degree == 3
|
| 30 |
+
|
| 31 |
+
def test_k_factor3(self):
|
| 32 |
+
g = gen.grid_2d_graph(4, 4)
|
| 33 |
+
with pytest.raises(nx.NetworkXUnfeasible):
|
| 34 |
+
reg.k_factor(g, 3)
|
| 35 |
+
|
| 36 |
+
def test_k_factor4(self):
|
| 37 |
+
g = gen.lattice.hexagonal_lattice_graph(4, 4)
|
| 38 |
+
# Perfect matching doesn't exist for 4,4 hexagonal lattice graph
|
| 39 |
+
with pytest.raises(nx.NetworkXUnfeasible):
|
| 40 |
+
reg.k_factor(g, 2)
|
| 41 |
+
|
| 42 |
+
def test_k_factor5(self):
|
| 43 |
+
g = gen.complete_graph(6)
|
| 44 |
+
# small k to exercise SmallKGadget
|
| 45 |
+
g_kf = reg.k_factor(g, 2)
|
| 46 |
+
for edge in g_kf.edges():
|
| 47 |
+
assert g.has_edge(edge[0], edge[1])
|
| 48 |
+
for _, degree in g_kf.degree():
|
| 49 |
+
assert degree == 2
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class TestIsRegular:
|
| 53 |
+
def test_is_regular1(self):
|
| 54 |
+
g = gen.cycle_graph(4)
|
| 55 |
+
assert reg.is_regular(g)
|
| 56 |
+
|
| 57 |
+
def test_is_regular2(self):
|
| 58 |
+
g = gen.complete_graph(5)
|
| 59 |
+
assert reg.is_regular(g)
|
| 60 |
+
|
| 61 |
+
def test_is_regular3(self):
|
| 62 |
+
g = gen.lollipop_graph(5, 5)
|
| 63 |
+
assert not reg.is_regular(g)
|
| 64 |
+
|
| 65 |
+
def test_is_regular4(self):
|
| 66 |
+
g = nx.DiGraph()
|
| 67 |
+
g.add_edges_from([(0, 1), (1, 2), (2, 0)])
|
| 68 |
+
assert reg.is_regular(g)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def test_is_regular_empty_graph_raises():
|
| 72 |
+
G = nx.Graph()
|
| 73 |
+
with pytest.raises(nx.NetworkXPointlessConcept, match="Graph has no nodes"):
|
| 74 |
+
nx.is_regular(G)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class TestIsKRegular:
|
| 78 |
+
def test_is_k_regular1(self):
|
| 79 |
+
g = gen.cycle_graph(4)
|
| 80 |
+
assert reg.is_k_regular(g, 2)
|
| 81 |
+
assert not reg.is_k_regular(g, 3)
|
| 82 |
+
|
| 83 |
+
def test_is_k_regular2(self):
|
| 84 |
+
g = gen.complete_graph(5)
|
| 85 |
+
assert reg.is_k_regular(g, 4)
|
| 86 |
+
assert not reg.is_k_regular(g, 3)
|
| 87 |
+
assert not reg.is_k_regular(g, 6)
|
| 88 |
+
|
| 89 |
+
def test_is_k_regular3(self):
|
| 90 |
+
g = gen.lollipop_graph(5, 5)
|
| 91 |
+
assert not reg.is_k_regular(g, 5)
|
| 92 |
+
assert not reg.is_k_regular(g, 6)
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_richclub.py
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def test_richclub():
|
| 7 |
+
G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)])
|
| 8 |
+
rc = nx.richclub.rich_club_coefficient(G, normalized=False)
|
| 9 |
+
assert rc == {0: 12.0 / 30, 1: 8.0 / 12}
|
| 10 |
+
|
| 11 |
+
# test single value
|
| 12 |
+
rc0 = nx.richclub.rich_club_coefficient(G, normalized=False)[0]
|
| 13 |
+
assert rc0 == 12.0 / 30.0
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def test_richclub_seed():
|
| 17 |
+
G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)])
|
| 18 |
+
rcNorm = nx.richclub.rich_club_coefficient(G, Q=2, seed=1)
|
| 19 |
+
assert rcNorm == {0: 1.0, 1: 1.0}
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def test_richclub_normalized():
|
| 23 |
+
G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)])
|
| 24 |
+
rcNorm = nx.richclub.rich_club_coefficient(G, Q=2, seed=42)
|
| 25 |
+
assert rcNorm == {0: 1.0, 1: 1.0}
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def test_richclub2():
|
| 29 |
+
T = nx.balanced_tree(2, 10)
|
| 30 |
+
rc = nx.richclub.rich_club_coefficient(T, normalized=False)
|
| 31 |
+
assert rc == {
|
| 32 |
+
0: 4092 / (2047 * 2046.0),
|
| 33 |
+
1: (2044.0 / (1023 * 1022)),
|
| 34 |
+
2: (2040.0 / (1022 * 1021)),
|
| 35 |
+
}
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def test_richclub3():
|
| 39 |
+
# tests edgecase
|
| 40 |
+
G = nx.karate_club_graph()
|
| 41 |
+
rc = nx.rich_club_coefficient(G, normalized=False)
|
| 42 |
+
assert rc == {
|
| 43 |
+
0: 156.0 / 1122,
|
| 44 |
+
1: 154.0 / 1056,
|
| 45 |
+
2: 110.0 / 462,
|
| 46 |
+
3: 78.0 / 240,
|
| 47 |
+
4: 44.0 / 90,
|
| 48 |
+
5: 22.0 / 42,
|
| 49 |
+
6: 10.0 / 20,
|
| 50 |
+
7: 10.0 / 20,
|
| 51 |
+
8: 10.0 / 20,
|
| 52 |
+
9: 6.0 / 12,
|
| 53 |
+
10: 2.0 / 6,
|
| 54 |
+
11: 2.0 / 6,
|
| 55 |
+
12: 0.0,
|
| 56 |
+
13: 0.0,
|
| 57 |
+
14: 0.0,
|
| 58 |
+
15: 0.0,
|
| 59 |
+
}
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def test_richclub4():
|
| 63 |
+
G = nx.Graph()
|
| 64 |
+
G.add_edges_from(
|
| 65 |
+
[(0, 1), (0, 2), (0, 3), (0, 4), (4, 5), (5, 9), (6, 9), (7, 9), (8, 9)]
|
| 66 |
+
)
|
| 67 |
+
rc = nx.rich_club_coefficient(G, normalized=False)
|
| 68 |
+
assert rc == {0: 18 / 90.0, 1: 6 / 12.0, 2: 0.0, 3: 0.0}
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def test_richclub_exception():
|
| 72 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 73 |
+
G = nx.DiGraph()
|
| 74 |
+
nx.rich_club_coefficient(G)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def test_rich_club_exception2():
|
| 78 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 79 |
+
G = nx.MultiGraph()
|
| 80 |
+
nx.rich_club_coefficient(G)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def test_rich_club_selfloop():
|
| 84 |
+
G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
|
| 85 |
+
G.add_edge(1, 1) # self loop
|
| 86 |
+
G.add_edge(1, 2)
|
| 87 |
+
with pytest.raises(
|
| 88 |
+
Exception,
|
| 89 |
+
match="rich_club_coefficient is not implemented for " "graphs with self loops.",
|
| 90 |
+
):
|
| 91 |
+
nx.rich_club_coefficient(G)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def test_rich_club_leq_3_nodes_unnormalized():
|
| 95 |
+
# edgeless graphs upto 3 nodes
|
| 96 |
+
G = nx.Graph()
|
| 97 |
+
rc = nx.rich_club_coefficient(G, normalized=False)
|
| 98 |
+
assert rc == {}
|
| 99 |
+
|
| 100 |
+
for i in range(3):
|
| 101 |
+
G.add_node(i)
|
| 102 |
+
rc = nx.rich_club_coefficient(G, normalized=False)
|
| 103 |
+
assert rc == {}
|
| 104 |
+
|
| 105 |
+
# 2 nodes, single edge
|
| 106 |
+
G = nx.Graph()
|
| 107 |
+
G.add_edge(0, 1)
|
| 108 |
+
rc = nx.rich_club_coefficient(G, normalized=False)
|
| 109 |
+
assert rc == {0: 1}
|
| 110 |
+
|
| 111 |
+
# 3 nodes, single edge
|
| 112 |
+
G = nx.Graph()
|
| 113 |
+
G.add_nodes_from([0, 1, 2])
|
| 114 |
+
G.add_edge(0, 1)
|
| 115 |
+
rc = nx.rich_club_coefficient(G, normalized=False)
|
| 116 |
+
assert rc == {0: 1}
|
| 117 |
+
|
| 118 |
+
# 3 nodes, 2 edges
|
| 119 |
+
G.add_edge(1, 2)
|
| 120 |
+
rc = nx.rich_club_coefficient(G, normalized=False)
|
| 121 |
+
assert rc == {0: 2 / 3}
|
| 122 |
+
|
| 123 |
+
# 3 nodes, 3 edges
|
| 124 |
+
G.add_edge(0, 2)
|
| 125 |
+
rc = nx.rich_club_coefficient(G, normalized=False)
|
| 126 |
+
assert rc == {0: 1, 1: 1}
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def test_rich_club_leq_3_nodes_normalized():
|
| 130 |
+
G = nx.Graph()
|
| 131 |
+
with pytest.raises(
|
| 132 |
+
nx.exception.NetworkXError,
|
| 133 |
+
match="Graph has fewer than four nodes",
|
| 134 |
+
):
|
| 135 |
+
rc = nx.rich_club_coefficient(G, normalized=True)
|
| 136 |
+
|
| 137 |
+
for i in range(3):
|
| 138 |
+
G.add_node(i)
|
| 139 |
+
with pytest.raises(
|
| 140 |
+
nx.exception.NetworkXError,
|
| 141 |
+
match="Graph has fewer than four nodes",
|
| 142 |
+
):
|
| 143 |
+
rc = nx.rich_club_coefficient(G, normalized=True)
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
# def test_richclub2_normalized():
|
| 147 |
+
# T = nx.balanced_tree(2,10)
|
| 148 |
+
# rcNorm = nx.richclub.rich_club_coefficient(T,Q=2)
|
| 149 |
+
# assert_true(rcNorm[0] ==1.0 and rcNorm[1] < 0.9 and rcNorm[2] < 0.9)
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_similarity.py
ADDED
|
@@ -0,0 +1,946 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.algorithms.similarity import (
|
| 5 |
+
graph_edit_distance,
|
| 6 |
+
optimal_edit_paths,
|
| 7 |
+
optimize_graph_edit_distance,
|
| 8 |
+
)
|
| 9 |
+
from networkx.generators.classic import (
|
| 10 |
+
circular_ladder_graph,
|
| 11 |
+
cycle_graph,
|
| 12 |
+
path_graph,
|
| 13 |
+
wheel_graph,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def nmatch(n1, n2):
|
| 18 |
+
return n1 == n2
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def ematch(e1, e2):
|
| 22 |
+
return e1 == e2
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def getCanonical():
|
| 26 |
+
G = nx.Graph()
|
| 27 |
+
G.add_node("A", label="A")
|
| 28 |
+
G.add_node("B", label="B")
|
| 29 |
+
G.add_node("C", label="C")
|
| 30 |
+
G.add_node("D", label="D")
|
| 31 |
+
G.add_edge("A", "B", label="a-b")
|
| 32 |
+
G.add_edge("B", "C", label="b-c")
|
| 33 |
+
G.add_edge("B", "D", label="b-d")
|
| 34 |
+
return G
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class TestSimilarity:
|
| 38 |
+
@classmethod
|
| 39 |
+
def setup_class(cls):
|
| 40 |
+
global np
|
| 41 |
+
np = pytest.importorskip("numpy")
|
| 42 |
+
pytest.importorskip("scipy")
|
| 43 |
+
|
| 44 |
+
def test_graph_edit_distance_roots_and_timeout(self):
|
| 45 |
+
G0 = nx.star_graph(5)
|
| 46 |
+
G1 = G0.copy()
|
| 47 |
+
pytest.raises(ValueError, graph_edit_distance, G0, G1, roots=[2])
|
| 48 |
+
pytest.raises(ValueError, graph_edit_distance, G0, G1, roots=[2, 3, 4])
|
| 49 |
+
pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(9, 3))
|
| 50 |
+
pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(3, 9))
|
| 51 |
+
pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(9, 9))
|
| 52 |
+
assert graph_edit_distance(G0, G1, roots=(1, 2)) == 0
|
| 53 |
+
assert graph_edit_distance(G0, G1, roots=(0, 1)) == 8
|
| 54 |
+
assert graph_edit_distance(G0, G1, roots=(1, 2), timeout=5) == 0
|
| 55 |
+
assert graph_edit_distance(G0, G1, roots=(0, 1), timeout=5) == 8
|
| 56 |
+
assert graph_edit_distance(G0, G1, roots=(0, 1), timeout=0.0001) is None
|
| 57 |
+
# test raise on 0 timeout
|
| 58 |
+
pytest.raises(nx.NetworkXError, graph_edit_distance, G0, G1, timeout=0)
|
| 59 |
+
|
| 60 |
+
def test_graph_edit_distance(self):
|
| 61 |
+
G0 = nx.Graph()
|
| 62 |
+
G1 = path_graph(6)
|
| 63 |
+
G2 = cycle_graph(6)
|
| 64 |
+
G3 = wheel_graph(7)
|
| 65 |
+
|
| 66 |
+
assert graph_edit_distance(G0, G0) == 0
|
| 67 |
+
assert graph_edit_distance(G0, G1) == 11
|
| 68 |
+
assert graph_edit_distance(G1, G0) == 11
|
| 69 |
+
assert graph_edit_distance(G0, G2) == 12
|
| 70 |
+
assert graph_edit_distance(G2, G0) == 12
|
| 71 |
+
assert graph_edit_distance(G0, G3) == 19
|
| 72 |
+
assert graph_edit_distance(G3, G0) == 19
|
| 73 |
+
|
| 74 |
+
assert graph_edit_distance(G1, G1) == 0
|
| 75 |
+
assert graph_edit_distance(G1, G2) == 1
|
| 76 |
+
assert graph_edit_distance(G2, G1) == 1
|
| 77 |
+
assert graph_edit_distance(G1, G3) == 8
|
| 78 |
+
assert graph_edit_distance(G3, G1) == 8
|
| 79 |
+
|
| 80 |
+
assert graph_edit_distance(G2, G2) == 0
|
| 81 |
+
assert graph_edit_distance(G2, G3) == 7
|
| 82 |
+
assert graph_edit_distance(G3, G2) == 7
|
| 83 |
+
|
| 84 |
+
assert graph_edit_distance(G3, G3) == 0
|
| 85 |
+
|
| 86 |
+
def test_graph_edit_distance_node_match(self):
|
| 87 |
+
G1 = cycle_graph(5)
|
| 88 |
+
G2 = cycle_graph(5)
|
| 89 |
+
for n, attr in G1.nodes.items():
|
| 90 |
+
attr["color"] = "red" if n % 2 == 0 else "blue"
|
| 91 |
+
for n, attr in G2.nodes.items():
|
| 92 |
+
attr["color"] = "red" if n % 2 == 1 else "blue"
|
| 93 |
+
assert graph_edit_distance(G1, G2) == 0
|
| 94 |
+
assert (
|
| 95 |
+
graph_edit_distance(
|
| 96 |
+
G1, G2, node_match=lambda n1, n2: n1["color"] == n2["color"]
|
| 97 |
+
)
|
| 98 |
+
== 1
|
| 99 |
+
)
|
| 100 |
+
|
| 101 |
+
def test_graph_edit_distance_edge_match(self):
|
| 102 |
+
G1 = path_graph(6)
|
| 103 |
+
G2 = path_graph(6)
|
| 104 |
+
for e, attr in G1.edges.items():
|
| 105 |
+
attr["color"] = "red" if min(e) % 2 == 0 else "blue"
|
| 106 |
+
for e, attr in G2.edges.items():
|
| 107 |
+
attr["color"] = "red" if min(e) // 3 == 0 else "blue"
|
| 108 |
+
assert graph_edit_distance(G1, G2) == 0
|
| 109 |
+
assert (
|
| 110 |
+
graph_edit_distance(
|
| 111 |
+
G1, G2, edge_match=lambda e1, e2: e1["color"] == e2["color"]
|
| 112 |
+
)
|
| 113 |
+
== 2
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
def test_graph_edit_distance_node_cost(self):
|
| 117 |
+
G1 = path_graph(6)
|
| 118 |
+
G2 = path_graph(6)
|
| 119 |
+
for n, attr in G1.nodes.items():
|
| 120 |
+
attr["color"] = "red" if n % 2 == 0 else "blue"
|
| 121 |
+
for n, attr in G2.nodes.items():
|
| 122 |
+
attr["color"] = "red" if n % 2 == 1 else "blue"
|
| 123 |
+
|
| 124 |
+
def node_subst_cost(uattr, vattr):
|
| 125 |
+
if uattr["color"] == vattr["color"]:
|
| 126 |
+
return 1
|
| 127 |
+
else:
|
| 128 |
+
return 10
|
| 129 |
+
|
| 130 |
+
def node_del_cost(attr):
|
| 131 |
+
if attr["color"] == "blue":
|
| 132 |
+
return 20
|
| 133 |
+
else:
|
| 134 |
+
return 50
|
| 135 |
+
|
| 136 |
+
def node_ins_cost(attr):
|
| 137 |
+
if attr["color"] == "blue":
|
| 138 |
+
return 40
|
| 139 |
+
else:
|
| 140 |
+
return 100
|
| 141 |
+
|
| 142 |
+
assert (
|
| 143 |
+
graph_edit_distance(
|
| 144 |
+
G1,
|
| 145 |
+
G2,
|
| 146 |
+
node_subst_cost=node_subst_cost,
|
| 147 |
+
node_del_cost=node_del_cost,
|
| 148 |
+
node_ins_cost=node_ins_cost,
|
| 149 |
+
)
|
| 150 |
+
== 6
|
| 151 |
+
)
|
| 152 |
+
|
| 153 |
+
def test_graph_edit_distance_edge_cost(self):
|
| 154 |
+
G1 = path_graph(6)
|
| 155 |
+
G2 = path_graph(6)
|
| 156 |
+
for e, attr in G1.edges.items():
|
| 157 |
+
attr["color"] = "red" if min(e) % 2 == 0 else "blue"
|
| 158 |
+
for e, attr in G2.edges.items():
|
| 159 |
+
attr["color"] = "red" if min(e) // 3 == 0 else "blue"
|
| 160 |
+
|
| 161 |
+
def edge_subst_cost(gattr, hattr):
|
| 162 |
+
if gattr["color"] == hattr["color"]:
|
| 163 |
+
return 0.01
|
| 164 |
+
else:
|
| 165 |
+
return 0.1
|
| 166 |
+
|
| 167 |
+
def edge_del_cost(attr):
|
| 168 |
+
if attr["color"] == "blue":
|
| 169 |
+
return 0.2
|
| 170 |
+
else:
|
| 171 |
+
return 0.5
|
| 172 |
+
|
| 173 |
+
def edge_ins_cost(attr):
|
| 174 |
+
if attr["color"] == "blue":
|
| 175 |
+
return 0.4
|
| 176 |
+
else:
|
| 177 |
+
return 1.0
|
| 178 |
+
|
| 179 |
+
assert (
|
| 180 |
+
graph_edit_distance(
|
| 181 |
+
G1,
|
| 182 |
+
G2,
|
| 183 |
+
edge_subst_cost=edge_subst_cost,
|
| 184 |
+
edge_del_cost=edge_del_cost,
|
| 185 |
+
edge_ins_cost=edge_ins_cost,
|
| 186 |
+
)
|
| 187 |
+
== 0.23
|
| 188 |
+
)
|
| 189 |
+
|
| 190 |
+
def test_graph_edit_distance_upper_bound(self):
|
| 191 |
+
G1 = circular_ladder_graph(2)
|
| 192 |
+
G2 = circular_ladder_graph(6)
|
| 193 |
+
assert graph_edit_distance(G1, G2, upper_bound=5) is None
|
| 194 |
+
assert graph_edit_distance(G1, G2, upper_bound=24) == 22
|
| 195 |
+
assert graph_edit_distance(G1, G2) == 22
|
| 196 |
+
|
| 197 |
+
def test_optimal_edit_paths(self):
|
| 198 |
+
G1 = path_graph(3)
|
| 199 |
+
G2 = cycle_graph(3)
|
| 200 |
+
paths, cost = optimal_edit_paths(G1, G2)
|
| 201 |
+
assert cost == 1
|
| 202 |
+
assert len(paths) == 6
|
| 203 |
+
|
| 204 |
+
def canonical(vertex_path, edge_path):
|
| 205 |
+
return (
|
| 206 |
+
tuple(sorted(vertex_path)),
|
| 207 |
+
tuple(sorted(edge_path, key=lambda x: (None in x, x))),
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
expected_paths = [
|
| 211 |
+
(
|
| 212 |
+
[(0, 0), (1, 1), (2, 2)],
|
| 213 |
+
[((0, 1), (0, 1)), ((1, 2), (1, 2)), (None, (0, 2))],
|
| 214 |
+
),
|
| 215 |
+
(
|
| 216 |
+
[(0, 0), (1, 2), (2, 1)],
|
| 217 |
+
[((0, 1), (0, 2)), ((1, 2), (1, 2)), (None, (0, 1))],
|
| 218 |
+
),
|
| 219 |
+
(
|
| 220 |
+
[(0, 1), (1, 0), (2, 2)],
|
| 221 |
+
[((0, 1), (0, 1)), ((1, 2), (0, 2)), (None, (1, 2))],
|
| 222 |
+
),
|
| 223 |
+
(
|
| 224 |
+
[(0, 1), (1, 2), (2, 0)],
|
| 225 |
+
[((0, 1), (1, 2)), ((1, 2), (0, 2)), (None, (0, 1))],
|
| 226 |
+
),
|
| 227 |
+
(
|
| 228 |
+
[(0, 2), (1, 0), (2, 1)],
|
| 229 |
+
[((0, 1), (0, 2)), ((1, 2), (0, 1)), (None, (1, 2))],
|
| 230 |
+
),
|
| 231 |
+
(
|
| 232 |
+
[(0, 2), (1, 1), (2, 0)],
|
| 233 |
+
[((0, 1), (1, 2)), ((1, 2), (0, 1)), (None, (0, 2))],
|
| 234 |
+
),
|
| 235 |
+
]
|
| 236 |
+
assert {canonical(*p) for p in paths} == {canonical(*p) for p in expected_paths}
|
| 237 |
+
|
| 238 |
+
def test_optimize_graph_edit_distance(self):
|
| 239 |
+
G1 = circular_ladder_graph(2)
|
| 240 |
+
G2 = circular_ladder_graph(6)
|
| 241 |
+
bestcost = 1000
|
| 242 |
+
for cost in optimize_graph_edit_distance(G1, G2):
|
| 243 |
+
assert cost < bestcost
|
| 244 |
+
bestcost = cost
|
| 245 |
+
assert bestcost == 22
|
| 246 |
+
|
| 247 |
+
# def test_graph_edit_distance_bigger(self):
|
| 248 |
+
# G1 = circular_ladder_graph(12)
|
| 249 |
+
# G2 = circular_ladder_graph(16)
|
| 250 |
+
# assert_equal(graph_edit_distance(G1, G2), 22)
|
| 251 |
+
|
| 252 |
+
def test_selfloops(self):
|
| 253 |
+
G0 = nx.Graph()
|
| 254 |
+
G1 = nx.Graph()
|
| 255 |
+
G1.add_edges_from((("A", "A"), ("A", "B")))
|
| 256 |
+
G2 = nx.Graph()
|
| 257 |
+
G2.add_edges_from((("A", "B"), ("B", "B")))
|
| 258 |
+
G3 = nx.Graph()
|
| 259 |
+
G3.add_edges_from((("A", "A"), ("A", "B"), ("B", "B")))
|
| 260 |
+
|
| 261 |
+
assert graph_edit_distance(G0, G0) == 0
|
| 262 |
+
assert graph_edit_distance(G0, G1) == 4
|
| 263 |
+
assert graph_edit_distance(G1, G0) == 4
|
| 264 |
+
assert graph_edit_distance(G0, G2) == 4
|
| 265 |
+
assert graph_edit_distance(G2, G0) == 4
|
| 266 |
+
assert graph_edit_distance(G0, G3) == 5
|
| 267 |
+
assert graph_edit_distance(G3, G0) == 5
|
| 268 |
+
|
| 269 |
+
assert graph_edit_distance(G1, G1) == 0
|
| 270 |
+
assert graph_edit_distance(G1, G2) == 0
|
| 271 |
+
assert graph_edit_distance(G2, G1) == 0
|
| 272 |
+
assert graph_edit_distance(G1, G3) == 1
|
| 273 |
+
assert graph_edit_distance(G3, G1) == 1
|
| 274 |
+
|
| 275 |
+
assert graph_edit_distance(G2, G2) == 0
|
| 276 |
+
assert graph_edit_distance(G2, G3) == 1
|
| 277 |
+
assert graph_edit_distance(G3, G2) == 1
|
| 278 |
+
|
| 279 |
+
assert graph_edit_distance(G3, G3) == 0
|
| 280 |
+
|
| 281 |
+
def test_digraph(self):
|
| 282 |
+
G0 = nx.DiGraph()
|
| 283 |
+
G1 = nx.DiGraph()
|
| 284 |
+
G1.add_edges_from((("A", "B"), ("B", "C"), ("C", "D"), ("D", "A")))
|
| 285 |
+
G2 = nx.DiGraph()
|
| 286 |
+
G2.add_edges_from((("A", "B"), ("B", "C"), ("C", "D"), ("A", "D")))
|
| 287 |
+
G3 = nx.DiGraph()
|
| 288 |
+
G3.add_edges_from((("A", "B"), ("A", "C"), ("B", "D"), ("C", "D")))
|
| 289 |
+
|
| 290 |
+
assert graph_edit_distance(G0, G0) == 0
|
| 291 |
+
assert graph_edit_distance(G0, G1) == 8
|
| 292 |
+
assert graph_edit_distance(G1, G0) == 8
|
| 293 |
+
assert graph_edit_distance(G0, G2) == 8
|
| 294 |
+
assert graph_edit_distance(G2, G0) == 8
|
| 295 |
+
assert graph_edit_distance(G0, G3) == 8
|
| 296 |
+
assert graph_edit_distance(G3, G0) == 8
|
| 297 |
+
|
| 298 |
+
assert graph_edit_distance(G1, G1) == 0
|
| 299 |
+
assert graph_edit_distance(G1, G2) == 2
|
| 300 |
+
assert graph_edit_distance(G2, G1) == 2
|
| 301 |
+
assert graph_edit_distance(G1, G3) == 4
|
| 302 |
+
assert graph_edit_distance(G3, G1) == 4
|
| 303 |
+
|
| 304 |
+
assert graph_edit_distance(G2, G2) == 0
|
| 305 |
+
assert graph_edit_distance(G2, G3) == 2
|
| 306 |
+
assert graph_edit_distance(G3, G2) == 2
|
| 307 |
+
|
| 308 |
+
assert graph_edit_distance(G3, G3) == 0
|
| 309 |
+
|
| 310 |
+
def test_multigraph(self):
|
| 311 |
+
G0 = nx.MultiGraph()
|
| 312 |
+
G1 = nx.MultiGraph()
|
| 313 |
+
G1.add_edges_from((("A", "B"), ("B", "C"), ("A", "C")))
|
| 314 |
+
G2 = nx.MultiGraph()
|
| 315 |
+
G2.add_edges_from((("A", "B"), ("B", "C"), ("B", "C"), ("A", "C")))
|
| 316 |
+
G3 = nx.MultiGraph()
|
| 317 |
+
G3.add_edges_from((("A", "B"), ("B", "C"), ("A", "C"), ("A", "C"), ("A", "C")))
|
| 318 |
+
|
| 319 |
+
assert graph_edit_distance(G0, G0) == 0
|
| 320 |
+
assert graph_edit_distance(G0, G1) == 6
|
| 321 |
+
assert graph_edit_distance(G1, G0) == 6
|
| 322 |
+
assert graph_edit_distance(G0, G2) == 7
|
| 323 |
+
assert graph_edit_distance(G2, G0) == 7
|
| 324 |
+
assert graph_edit_distance(G0, G3) == 8
|
| 325 |
+
assert graph_edit_distance(G3, G0) == 8
|
| 326 |
+
|
| 327 |
+
assert graph_edit_distance(G1, G1) == 0
|
| 328 |
+
assert graph_edit_distance(G1, G2) == 1
|
| 329 |
+
assert graph_edit_distance(G2, G1) == 1
|
| 330 |
+
assert graph_edit_distance(G1, G3) == 2
|
| 331 |
+
assert graph_edit_distance(G3, G1) == 2
|
| 332 |
+
|
| 333 |
+
assert graph_edit_distance(G2, G2) == 0
|
| 334 |
+
assert graph_edit_distance(G2, G3) == 1
|
| 335 |
+
assert graph_edit_distance(G3, G2) == 1
|
| 336 |
+
|
| 337 |
+
assert graph_edit_distance(G3, G3) == 0
|
| 338 |
+
|
| 339 |
+
def test_multidigraph(self):
|
| 340 |
+
G1 = nx.MultiDiGraph()
|
| 341 |
+
G1.add_edges_from(
|
| 342 |
+
(
|
| 343 |
+
("hardware", "kernel"),
|
| 344 |
+
("kernel", "hardware"),
|
| 345 |
+
("kernel", "userspace"),
|
| 346 |
+
("userspace", "kernel"),
|
| 347 |
+
)
|
| 348 |
+
)
|
| 349 |
+
G2 = nx.MultiDiGraph()
|
| 350 |
+
G2.add_edges_from(
|
| 351 |
+
(
|
| 352 |
+
("winter", "spring"),
|
| 353 |
+
("spring", "summer"),
|
| 354 |
+
("summer", "autumn"),
|
| 355 |
+
("autumn", "winter"),
|
| 356 |
+
)
|
| 357 |
+
)
|
| 358 |
+
|
| 359 |
+
assert graph_edit_distance(G1, G2) == 5
|
| 360 |
+
assert graph_edit_distance(G2, G1) == 5
|
| 361 |
+
|
| 362 |
+
# by https://github.com/jfbeaumont
|
| 363 |
+
def testCopy(self):
|
| 364 |
+
G = nx.Graph()
|
| 365 |
+
G.add_node("A", label="A")
|
| 366 |
+
G.add_node("B", label="B")
|
| 367 |
+
G.add_edge("A", "B", label="a-b")
|
| 368 |
+
assert (
|
| 369 |
+
graph_edit_distance(G, G.copy(), node_match=nmatch, edge_match=ematch) == 0
|
| 370 |
+
)
|
| 371 |
+
|
| 372 |
+
def testSame(self):
|
| 373 |
+
G1 = nx.Graph()
|
| 374 |
+
G1.add_node("A", label="A")
|
| 375 |
+
G1.add_node("B", label="B")
|
| 376 |
+
G1.add_edge("A", "B", label="a-b")
|
| 377 |
+
G2 = nx.Graph()
|
| 378 |
+
G2.add_node("A", label="A")
|
| 379 |
+
G2.add_node("B", label="B")
|
| 380 |
+
G2.add_edge("A", "B", label="a-b")
|
| 381 |
+
assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 0
|
| 382 |
+
|
| 383 |
+
def testOneEdgeLabelDiff(self):
|
| 384 |
+
G1 = nx.Graph()
|
| 385 |
+
G1.add_node("A", label="A")
|
| 386 |
+
G1.add_node("B", label="B")
|
| 387 |
+
G1.add_edge("A", "B", label="a-b")
|
| 388 |
+
G2 = nx.Graph()
|
| 389 |
+
G2.add_node("A", label="A")
|
| 390 |
+
G2.add_node("B", label="B")
|
| 391 |
+
G2.add_edge("A", "B", label="bad")
|
| 392 |
+
assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
|
| 393 |
+
|
| 394 |
+
def testOneNodeLabelDiff(self):
|
| 395 |
+
G1 = nx.Graph()
|
| 396 |
+
G1.add_node("A", label="A")
|
| 397 |
+
G1.add_node("B", label="B")
|
| 398 |
+
G1.add_edge("A", "B", label="a-b")
|
| 399 |
+
G2 = nx.Graph()
|
| 400 |
+
G2.add_node("A", label="Z")
|
| 401 |
+
G2.add_node("B", label="B")
|
| 402 |
+
G2.add_edge("A", "B", label="a-b")
|
| 403 |
+
assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
|
| 404 |
+
|
| 405 |
+
def testOneExtraNode(self):
|
| 406 |
+
G1 = nx.Graph()
|
| 407 |
+
G1.add_node("A", label="A")
|
| 408 |
+
G1.add_node("B", label="B")
|
| 409 |
+
G1.add_edge("A", "B", label="a-b")
|
| 410 |
+
G2 = nx.Graph()
|
| 411 |
+
G2.add_node("A", label="A")
|
| 412 |
+
G2.add_node("B", label="B")
|
| 413 |
+
G2.add_edge("A", "B", label="a-b")
|
| 414 |
+
G2.add_node("C", label="C")
|
| 415 |
+
assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
|
| 416 |
+
|
| 417 |
+
def testOneExtraEdge(self):
|
| 418 |
+
G1 = nx.Graph()
|
| 419 |
+
G1.add_node("A", label="A")
|
| 420 |
+
G1.add_node("B", label="B")
|
| 421 |
+
G1.add_node("C", label="C")
|
| 422 |
+
G1.add_node("C", label="C")
|
| 423 |
+
G1.add_edge("A", "B", label="a-b")
|
| 424 |
+
G2 = nx.Graph()
|
| 425 |
+
G2.add_node("A", label="A")
|
| 426 |
+
G2.add_node("B", label="B")
|
| 427 |
+
G2.add_node("C", label="C")
|
| 428 |
+
G2.add_edge("A", "B", label="a-b")
|
| 429 |
+
G2.add_edge("A", "C", label="a-c")
|
| 430 |
+
assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
|
| 431 |
+
|
| 432 |
+
def testOneExtraNodeAndEdge(self):
|
| 433 |
+
G1 = nx.Graph()
|
| 434 |
+
G1.add_node("A", label="A")
|
| 435 |
+
G1.add_node("B", label="B")
|
| 436 |
+
G1.add_edge("A", "B", label="a-b")
|
| 437 |
+
G2 = nx.Graph()
|
| 438 |
+
G2.add_node("A", label="A")
|
| 439 |
+
G2.add_node("B", label="B")
|
| 440 |
+
G2.add_node("C", label="C")
|
| 441 |
+
G2.add_edge("A", "B", label="a-b")
|
| 442 |
+
G2.add_edge("A", "C", label="a-c")
|
| 443 |
+
assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2
|
| 444 |
+
|
| 445 |
+
def testGraph1(self):
|
| 446 |
+
G1 = getCanonical()
|
| 447 |
+
G2 = nx.Graph()
|
| 448 |
+
G2.add_node("A", label="A")
|
| 449 |
+
G2.add_node("B", label="B")
|
| 450 |
+
G2.add_node("D", label="D")
|
| 451 |
+
G2.add_node("E", label="E")
|
| 452 |
+
G2.add_edge("A", "B", label="a-b")
|
| 453 |
+
G2.add_edge("B", "D", label="b-d")
|
| 454 |
+
G2.add_edge("D", "E", label="d-e")
|
| 455 |
+
assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 3
|
| 456 |
+
|
| 457 |
+
def testGraph2(self):
|
| 458 |
+
G1 = getCanonical()
|
| 459 |
+
G2 = nx.Graph()
|
| 460 |
+
G2.add_node("A", label="A")
|
| 461 |
+
G2.add_node("B", label="B")
|
| 462 |
+
G2.add_node("C", label="C")
|
| 463 |
+
G2.add_node("D", label="D")
|
| 464 |
+
G2.add_node("E", label="E")
|
| 465 |
+
G2.add_edge("A", "B", label="a-b")
|
| 466 |
+
G2.add_edge("B", "C", label="b-c")
|
| 467 |
+
G2.add_edge("C", "D", label="c-d")
|
| 468 |
+
G2.add_edge("C", "E", label="c-e")
|
| 469 |
+
assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 4
|
| 470 |
+
|
| 471 |
+
def testGraph3(self):
|
| 472 |
+
G1 = getCanonical()
|
| 473 |
+
G2 = nx.Graph()
|
| 474 |
+
G2.add_node("A", label="A")
|
| 475 |
+
G2.add_node("B", label="B")
|
| 476 |
+
G2.add_node("C", label="C")
|
| 477 |
+
G2.add_node("D", label="D")
|
| 478 |
+
G2.add_node("E", label="E")
|
| 479 |
+
G2.add_node("F", label="F")
|
| 480 |
+
G2.add_node("G", label="G")
|
| 481 |
+
G2.add_edge("A", "C", label="a-c")
|
| 482 |
+
G2.add_edge("A", "D", label="a-d")
|
| 483 |
+
G2.add_edge("D", "E", label="d-e")
|
| 484 |
+
G2.add_edge("D", "F", label="d-f")
|
| 485 |
+
G2.add_edge("D", "G", label="d-g")
|
| 486 |
+
G2.add_edge("E", "B", label="e-b")
|
| 487 |
+
assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 12
|
| 488 |
+
|
| 489 |
+
def testGraph4(self):
|
| 490 |
+
G1 = getCanonical()
|
| 491 |
+
G2 = nx.Graph()
|
| 492 |
+
G2.add_node("A", label="A")
|
| 493 |
+
G2.add_node("B", label="B")
|
| 494 |
+
G2.add_node("C", label="C")
|
| 495 |
+
G2.add_node("D", label="D")
|
| 496 |
+
G2.add_edge("A", "B", label="a-b")
|
| 497 |
+
G2.add_edge("B", "C", label="b-c")
|
| 498 |
+
G2.add_edge("C", "D", label="c-d")
|
| 499 |
+
assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2
|
| 500 |
+
|
| 501 |
+
def testGraph4_a(self):
|
| 502 |
+
G1 = getCanonical()
|
| 503 |
+
G2 = nx.Graph()
|
| 504 |
+
G2.add_node("A", label="A")
|
| 505 |
+
G2.add_node("B", label="B")
|
| 506 |
+
G2.add_node("C", label="C")
|
| 507 |
+
G2.add_node("D", label="D")
|
| 508 |
+
G2.add_edge("A", "B", label="a-b")
|
| 509 |
+
G2.add_edge("B", "C", label="b-c")
|
| 510 |
+
G2.add_edge("A", "D", label="a-d")
|
| 511 |
+
assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2
|
| 512 |
+
|
| 513 |
+
def testGraph4_b(self):
|
| 514 |
+
G1 = getCanonical()
|
| 515 |
+
G2 = nx.Graph()
|
| 516 |
+
G2.add_node("A", label="A")
|
| 517 |
+
G2.add_node("B", label="B")
|
| 518 |
+
G2.add_node("C", label="C")
|
| 519 |
+
G2.add_node("D", label="D")
|
| 520 |
+
G2.add_edge("A", "B", label="a-b")
|
| 521 |
+
G2.add_edge("B", "C", label="b-c")
|
| 522 |
+
G2.add_edge("B", "D", label="bad")
|
| 523 |
+
assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
|
| 524 |
+
|
| 525 |
+
# note: nx.simrank_similarity_numpy not included because returns np.array
|
| 526 |
+
simrank_algs = [
|
| 527 |
+
nx.simrank_similarity,
|
| 528 |
+
nx.algorithms.similarity._simrank_similarity_python,
|
| 529 |
+
]
|
| 530 |
+
|
| 531 |
+
@pytest.mark.parametrize("simrank_similarity", simrank_algs)
|
| 532 |
+
def test_simrank_no_source_no_target(self, simrank_similarity):
|
| 533 |
+
G = nx.cycle_graph(5)
|
| 534 |
+
expected = {
|
| 535 |
+
0: {
|
| 536 |
+
0: 1,
|
| 537 |
+
1: 0.3951219505902448,
|
| 538 |
+
2: 0.5707317069281646,
|
| 539 |
+
3: 0.5707317069281646,
|
| 540 |
+
4: 0.3951219505902449,
|
| 541 |
+
},
|
| 542 |
+
1: {
|
| 543 |
+
0: 0.3951219505902448,
|
| 544 |
+
1: 1,
|
| 545 |
+
2: 0.3951219505902449,
|
| 546 |
+
3: 0.5707317069281646,
|
| 547 |
+
4: 0.5707317069281646,
|
| 548 |
+
},
|
| 549 |
+
2: {
|
| 550 |
+
0: 0.5707317069281646,
|
| 551 |
+
1: 0.3951219505902449,
|
| 552 |
+
2: 1,
|
| 553 |
+
3: 0.3951219505902449,
|
| 554 |
+
4: 0.5707317069281646,
|
| 555 |
+
},
|
| 556 |
+
3: {
|
| 557 |
+
0: 0.5707317069281646,
|
| 558 |
+
1: 0.5707317069281646,
|
| 559 |
+
2: 0.3951219505902449,
|
| 560 |
+
3: 1,
|
| 561 |
+
4: 0.3951219505902449,
|
| 562 |
+
},
|
| 563 |
+
4: {
|
| 564 |
+
0: 0.3951219505902449,
|
| 565 |
+
1: 0.5707317069281646,
|
| 566 |
+
2: 0.5707317069281646,
|
| 567 |
+
3: 0.3951219505902449,
|
| 568 |
+
4: 1,
|
| 569 |
+
},
|
| 570 |
+
}
|
| 571 |
+
actual = simrank_similarity(G)
|
| 572 |
+
for k, v in expected.items():
|
| 573 |
+
assert v == pytest.approx(actual[k], abs=1e-2)
|
| 574 |
+
|
| 575 |
+
# For a DiGraph test, use the first graph from the paper cited in
|
| 576 |
+
# the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126
|
| 577 |
+
G = nx.DiGraph()
|
| 578 |
+
G.add_node(0, label="Univ")
|
| 579 |
+
G.add_node(1, label="ProfA")
|
| 580 |
+
G.add_node(2, label="ProfB")
|
| 581 |
+
G.add_node(3, label="StudentA")
|
| 582 |
+
G.add_node(4, label="StudentB")
|
| 583 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)])
|
| 584 |
+
|
| 585 |
+
expected = {
|
| 586 |
+
0: {0: 1, 1: 0.0, 2: 0.1323363991265798, 3: 0.0, 4: 0.03387811817640443},
|
| 587 |
+
1: {0: 0.0, 1: 1, 2: 0.4135512472705618, 3: 0.0, 4: 0.10586911930126384},
|
| 588 |
+
2: {
|
| 589 |
+
0: 0.1323363991265798,
|
| 590 |
+
1: 0.4135512472705618,
|
| 591 |
+
2: 1,
|
| 592 |
+
3: 0.04234764772050554,
|
| 593 |
+
4: 0.08822426608438655,
|
| 594 |
+
},
|
| 595 |
+
3: {0: 0.0, 1: 0.0, 2: 0.04234764772050554, 3: 1, 4: 0.3308409978164495},
|
| 596 |
+
4: {
|
| 597 |
+
0: 0.03387811817640443,
|
| 598 |
+
1: 0.10586911930126384,
|
| 599 |
+
2: 0.08822426608438655,
|
| 600 |
+
3: 0.3308409978164495,
|
| 601 |
+
4: 1,
|
| 602 |
+
},
|
| 603 |
+
}
|
| 604 |
+
# Use the importance_factor from the paper to get the same numbers.
|
| 605 |
+
actual = simrank_similarity(G, importance_factor=0.8)
|
| 606 |
+
for k, v in expected.items():
|
| 607 |
+
assert v == pytest.approx(actual[k], abs=1e-2)
|
| 608 |
+
|
| 609 |
+
@pytest.mark.parametrize("simrank_similarity", simrank_algs)
|
| 610 |
+
def test_simrank_source_no_target(self, simrank_similarity):
|
| 611 |
+
G = nx.cycle_graph(5)
|
| 612 |
+
expected = {
|
| 613 |
+
0: 1,
|
| 614 |
+
1: 0.3951219505902448,
|
| 615 |
+
2: 0.5707317069281646,
|
| 616 |
+
3: 0.5707317069281646,
|
| 617 |
+
4: 0.3951219505902449,
|
| 618 |
+
}
|
| 619 |
+
actual = simrank_similarity(G, source=0)
|
| 620 |
+
assert expected == pytest.approx(actual, abs=1e-2)
|
| 621 |
+
|
| 622 |
+
# For a DiGraph test, use the first graph from the paper cited in
|
| 623 |
+
# the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126
|
| 624 |
+
G = nx.DiGraph()
|
| 625 |
+
G.add_node(0, label="Univ")
|
| 626 |
+
G.add_node(1, label="ProfA")
|
| 627 |
+
G.add_node(2, label="ProfB")
|
| 628 |
+
G.add_node(3, label="StudentA")
|
| 629 |
+
G.add_node(4, label="StudentB")
|
| 630 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)])
|
| 631 |
+
|
| 632 |
+
expected = {0: 1, 1: 0.0, 2: 0.1323363991265798, 3: 0.0, 4: 0.03387811817640443}
|
| 633 |
+
# Use the importance_factor from the paper to get the same numbers.
|
| 634 |
+
actual = simrank_similarity(G, importance_factor=0.8, source=0)
|
| 635 |
+
assert expected == pytest.approx(actual, abs=1e-2)
|
| 636 |
+
|
| 637 |
+
@pytest.mark.parametrize("simrank_similarity", simrank_algs)
|
| 638 |
+
def test_simrank_noninteger_nodes(self, simrank_similarity):
|
| 639 |
+
G = nx.cycle_graph(5)
|
| 640 |
+
G = nx.relabel_nodes(G, dict(enumerate("abcde")))
|
| 641 |
+
expected = {
|
| 642 |
+
"a": 1,
|
| 643 |
+
"b": 0.3951219505902448,
|
| 644 |
+
"c": 0.5707317069281646,
|
| 645 |
+
"d": 0.5707317069281646,
|
| 646 |
+
"e": 0.3951219505902449,
|
| 647 |
+
}
|
| 648 |
+
actual = simrank_similarity(G, source="a")
|
| 649 |
+
assert expected == pytest.approx(actual, abs=1e-2)
|
| 650 |
+
|
| 651 |
+
# For a DiGraph test, use the first graph from the paper cited in
|
| 652 |
+
# the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126
|
| 653 |
+
G = nx.DiGraph()
|
| 654 |
+
G.add_node(0, label="Univ")
|
| 655 |
+
G.add_node(1, label="ProfA")
|
| 656 |
+
G.add_node(2, label="ProfB")
|
| 657 |
+
G.add_node(3, label="StudentA")
|
| 658 |
+
G.add_node(4, label="StudentB")
|
| 659 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)])
|
| 660 |
+
node_labels = dict(enumerate(nx.get_node_attributes(G, "label").values()))
|
| 661 |
+
G = nx.relabel_nodes(G, node_labels)
|
| 662 |
+
|
| 663 |
+
expected = {
|
| 664 |
+
"Univ": 1,
|
| 665 |
+
"ProfA": 0.0,
|
| 666 |
+
"ProfB": 0.1323363991265798,
|
| 667 |
+
"StudentA": 0.0,
|
| 668 |
+
"StudentB": 0.03387811817640443,
|
| 669 |
+
}
|
| 670 |
+
# Use the importance_factor from the paper to get the same numbers.
|
| 671 |
+
actual = simrank_similarity(G, importance_factor=0.8, source="Univ")
|
| 672 |
+
assert expected == pytest.approx(actual, abs=1e-2)
|
| 673 |
+
|
| 674 |
+
@pytest.mark.parametrize("simrank_similarity", simrank_algs)
|
| 675 |
+
def test_simrank_source_and_target(self, simrank_similarity):
|
| 676 |
+
G = nx.cycle_graph(5)
|
| 677 |
+
expected = 1
|
| 678 |
+
actual = simrank_similarity(G, source=0, target=0)
|
| 679 |
+
assert expected == pytest.approx(actual, abs=1e-2)
|
| 680 |
+
|
| 681 |
+
# For a DiGraph test, use the first graph from the paper cited in
|
| 682 |
+
# the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126
|
| 683 |
+
G = nx.DiGraph()
|
| 684 |
+
G.add_node(0, label="Univ")
|
| 685 |
+
G.add_node(1, label="ProfA")
|
| 686 |
+
G.add_node(2, label="ProfB")
|
| 687 |
+
G.add_node(3, label="StudentA")
|
| 688 |
+
G.add_node(4, label="StudentB")
|
| 689 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)])
|
| 690 |
+
|
| 691 |
+
expected = 0.1323363991265798
|
| 692 |
+
# Use the importance_factor from the paper to get the same numbers.
|
| 693 |
+
# Use the pair (0,2) because (0,0) and (0,1) have trivial results.
|
| 694 |
+
actual = simrank_similarity(G, importance_factor=0.8, source=0, target=2)
|
| 695 |
+
assert expected == pytest.approx(actual, abs=1e-5)
|
| 696 |
+
|
| 697 |
+
@pytest.mark.parametrize("alg", simrank_algs)
|
| 698 |
+
def test_simrank_max_iterations(self, alg):
|
| 699 |
+
G = nx.cycle_graph(5)
|
| 700 |
+
pytest.raises(nx.ExceededMaxIterations, alg, G, max_iterations=10)
|
| 701 |
+
|
| 702 |
+
def test_simrank_source_not_found(self):
|
| 703 |
+
G = nx.cycle_graph(5)
|
| 704 |
+
with pytest.raises(nx.NodeNotFound, match="Source node 10 not in G"):
|
| 705 |
+
nx.simrank_similarity(G, source=10)
|
| 706 |
+
|
| 707 |
+
def test_simrank_target_not_found(self):
|
| 708 |
+
G = nx.cycle_graph(5)
|
| 709 |
+
with pytest.raises(nx.NodeNotFound, match="Target node 10 not in G"):
|
| 710 |
+
nx.simrank_similarity(G, target=10)
|
| 711 |
+
|
| 712 |
+
def test_simrank_between_versions(self):
|
| 713 |
+
G = nx.cycle_graph(5)
|
| 714 |
+
# _python tolerance 1e-4
|
| 715 |
+
expected_python_tol4 = {
|
| 716 |
+
0: 1,
|
| 717 |
+
1: 0.394512499239852,
|
| 718 |
+
2: 0.5703550452791322,
|
| 719 |
+
3: 0.5703550452791323,
|
| 720 |
+
4: 0.394512499239852,
|
| 721 |
+
}
|
| 722 |
+
# _numpy tolerance 1e-4
|
| 723 |
+
expected_numpy_tol4 = {
|
| 724 |
+
0: 1.0,
|
| 725 |
+
1: 0.3947180735764555,
|
| 726 |
+
2: 0.570482097206368,
|
| 727 |
+
3: 0.570482097206368,
|
| 728 |
+
4: 0.3947180735764555,
|
| 729 |
+
}
|
| 730 |
+
actual = nx.simrank_similarity(G, source=0)
|
| 731 |
+
assert expected_numpy_tol4 == pytest.approx(actual, abs=1e-7)
|
| 732 |
+
# versions differ at 1e-4 level but equal at 1e-3
|
| 733 |
+
assert expected_python_tol4 != pytest.approx(actual, abs=1e-4)
|
| 734 |
+
assert expected_python_tol4 == pytest.approx(actual, abs=1e-3)
|
| 735 |
+
|
| 736 |
+
actual = nx.similarity._simrank_similarity_python(G, source=0)
|
| 737 |
+
assert expected_python_tol4 == pytest.approx(actual, abs=1e-7)
|
| 738 |
+
# versions differ at 1e-4 level but equal at 1e-3
|
| 739 |
+
assert expected_numpy_tol4 != pytest.approx(actual, abs=1e-4)
|
| 740 |
+
assert expected_numpy_tol4 == pytest.approx(actual, abs=1e-3)
|
| 741 |
+
|
| 742 |
+
def test_simrank_numpy_no_source_no_target(self):
|
| 743 |
+
G = nx.cycle_graph(5)
|
| 744 |
+
expected = np.array(
|
| 745 |
+
[
|
| 746 |
+
[
|
| 747 |
+
1.0,
|
| 748 |
+
0.3947180735764555,
|
| 749 |
+
0.570482097206368,
|
| 750 |
+
0.570482097206368,
|
| 751 |
+
0.3947180735764555,
|
| 752 |
+
],
|
| 753 |
+
[
|
| 754 |
+
0.3947180735764555,
|
| 755 |
+
1.0,
|
| 756 |
+
0.3947180735764555,
|
| 757 |
+
0.570482097206368,
|
| 758 |
+
0.570482097206368,
|
| 759 |
+
],
|
| 760 |
+
[
|
| 761 |
+
0.570482097206368,
|
| 762 |
+
0.3947180735764555,
|
| 763 |
+
1.0,
|
| 764 |
+
0.3947180735764555,
|
| 765 |
+
0.570482097206368,
|
| 766 |
+
],
|
| 767 |
+
[
|
| 768 |
+
0.570482097206368,
|
| 769 |
+
0.570482097206368,
|
| 770 |
+
0.3947180735764555,
|
| 771 |
+
1.0,
|
| 772 |
+
0.3947180735764555,
|
| 773 |
+
],
|
| 774 |
+
[
|
| 775 |
+
0.3947180735764555,
|
| 776 |
+
0.570482097206368,
|
| 777 |
+
0.570482097206368,
|
| 778 |
+
0.3947180735764555,
|
| 779 |
+
1.0,
|
| 780 |
+
],
|
| 781 |
+
]
|
| 782 |
+
)
|
| 783 |
+
actual = nx.similarity._simrank_similarity_numpy(G)
|
| 784 |
+
np.testing.assert_allclose(expected, actual, atol=1e-7)
|
| 785 |
+
|
| 786 |
+
def test_simrank_numpy_source_no_target(self):
|
| 787 |
+
G = nx.cycle_graph(5)
|
| 788 |
+
expected = np.array(
|
| 789 |
+
[
|
| 790 |
+
1.0,
|
| 791 |
+
0.3947180735764555,
|
| 792 |
+
0.570482097206368,
|
| 793 |
+
0.570482097206368,
|
| 794 |
+
0.3947180735764555,
|
| 795 |
+
]
|
| 796 |
+
)
|
| 797 |
+
actual = nx.similarity._simrank_similarity_numpy(G, source=0)
|
| 798 |
+
np.testing.assert_allclose(expected, actual, atol=1e-7)
|
| 799 |
+
|
| 800 |
+
def test_simrank_numpy_source_and_target(self):
|
| 801 |
+
G = nx.cycle_graph(5)
|
| 802 |
+
expected = 1.0
|
| 803 |
+
actual = nx.similarity._simrank_similarity_numpy(G, source=0, target=0)
|
| 804 |
+
np.testing.assert_allclose(expected, actual, atol=1e-7)
|
| 805 |
+
|
| 806 |
+
def test_panther_similarity_unweighted(self):
|
| 807 |
+
np.random.seed(42)
|
| 808 |
+
|
| 809 |
+
G = nx.Graph()
|
| 810 |
+
G.add_edge(0, 1)
|
| 811 |
+
G.add_edge(0, 2)
|
| 812 |
+
G.add_edge(0, 3)
|
| 813 |
+
G.add_edge(1, 2)
|
| 814 |
+
G.add_edge(2, 4)
|
| 815 |
+
expected = {3: 0.5, 2: 0.5, 1: 0.5, 4: 0.125}
|
| 816 |
+
sim = nx.panther_similarity(G, 0, path_length=2)
|
| 817 |
+
assert sim == expected
|
| 818 |
+
|
| 819 |
+
def test_panther_similarity_weighted(self):
|
| 820 |
+
np.random.seed(42)
|
| 821 |
+
|
| 822 |
+
G = nx.Graph()
|
| 823 |
+
G.add_edge("v1", "v2", w=5)
|
| 824 |
+
G.add_edge("v1", "v3", w=1)
|
| 825 |
+
G.add_edge("v1", "v4", w=2)
|
| 826 |
+
G.add_edge("v2", "v3", w=0.1)
|
| 827 |
+
G.add_edge("v3", "v5", w=1)
|
| 828 |
+
expected = {"v3": 0.75, "v4": 0.5, "v2": 0.5, "v5": 0.25}
|
| 829 |
+
sim = nx.panther_similarity(G, "v1", path_length=2, weight="w")
|
| 830 |
+
assert sim == expected
|
| 831 |
+
|
| 832 |
+
def test_panther_similarity_source_not_found(self):
|
| 833 |
+
G = nx.Graph()
|
| 834 |
+
G.add_edges_from([(0, 1), (0, 2), (0, 3), (1, 2), (2, 4)])
|
| 835 |
+
with pytest.raises(nx.NodeNotFound, match="Source node 10 not in G"):
|
| 836 |
+
nx.panther_similarity(G, source=10)
|
| 837 |
+
|
| 838 |
+
def test_panther_similarity_isolated(self):
|
| 839 |
+
G = nx.Graph()
|
| 840 |
+
G.add_nodes_from(range(5))
|
| 841 |
+
with pytest.raises(
|
| 842 |
+
nx.NetworkXUnfeasible,
|
| 843 |
+
match="Panther similarity is not defined for the isolated source node 1.",
|
| 844 |
+
):
|
| 845 |
+
nx.panther_similarity(G, source=1)
|
| 846 |
+
|
| 847 |
+
def test_generate_random_paths_unweighted(self):
|
| 848 |
+
index_map = {}
|
| 849 |
+
num_paths = 10
|
| 850 |
+
path_length = 2
|
| 851 |
+
G = nx.Graph()
|
| 852 |
+
G.add_edge(0, 1)
|
| 853 |
+
G.add_edge(0, 2)
|
| 854 |
+
G.add_edge(0, 3)
|
| 855 |
+
G.add_edge(1, 2)
|
| 856 |
+
G.add_edge(2, 4)
|
| 857 |
+
paths = nx.generate_random_paths(
|
| 858 |
+
G, num_paths, path_length=path_length, index_map=index_map, seed=42
|
| 859 |
+
)
|
| 860 |
+
expected_paths = [
|
| 861 |
+
[3, 0, 3],
|
| 862 |
+
[4, 2, 1],
|
| 863 |
+
[2, 1, 0],
|
| 864 |
+
[2, 0, 3],
|
| 865 |
+
[3, 0, 1],
|
| 866 |
+
[3, 0, 1],
|
| 867 |
+
[4, 2, 0],
|
| 868 |
+
[2, 1, 0],
|
| 869 |
+
[3, 0, 2],
|
| 870 |
+
[2, 1, 2],
|
| 871 |
+
]
|
| 872 |
+
expected_map = {
|
| 873 |
+
0: {0, 2, 3, 4, 5, 6, 7, 8},
|
| 874 |
+
1: {1, 2, 4, 5, 7, 9},
|
| 875 |
+
2: {1, 2, 3, 6, 7, 8, 9},
|
| 876 |
+
3: {0, 3, 4, 5, 8},
|
| 877 |
+
4: {1, 6},
|
| 878 |
+
}
|
| 879 |
+
|
| 880 |
+
assert expected_paths == list(paths)
|
| 881 |
+
assert expected_map == index_map
|
| 882 |
+
|
| 883 |
+
def test_generate_random_paths_weighted(self):
|
| 884 |
+
np.random.seed(42)
|
| 885 |
+
|
| 886 |
+
index_map = {}
|
| 887 |
+
num_paths = 10
|
| 888 |
+
path_length = 6
|
| 889 |
+
G = nx.Graph()
|
| 890 |
+
G.add_edge("a", "b", weight=0.6)
|
| 891 |
+
G.add_edge("a", "c", weight=0.2)
|
| 892 |
+
G.add_edge("c", "d", weight=0.1)
|
| 893 |
+
G.add_edge("c", "e", weight=0.7)
|
| 894 |
+
G.add_edge("c", "f", weight=0.9)
|
| 895 |
+
G.add_edge("a", "d", weight=0.3)
|
| 896 |
+
paths = nx.generate_random_paths(
|
| 897 |
+
G, num_paths, path_length=path_length, index_map=index_map
|
| 898 |
+
)
|
| 899 |
+
|
| 900 |
+
expected_paths = [
|
| 901 |
+
["d", "c", "f", "c", "d", "a", "b"],
|
| 902 |
+
["e", "c", "f", "c", "f", "c", "e"],
|
| 903 |
+
["d", "a", "b", "a", "b", "a", "c"],
|
| 904 |
+
["b", "a", "d", "a", "b", "a", "b"],
|
| 905 |
+
["d", "a", "b", "a", "b", "a", "d"],
|
| 906 |
+
["d", "a", "b", "a", "b", "a", "c"],
|
| 907 |
+
["d", "a", "b", "a", "b", "a", "b"],
|
| 908 |
+
["f", "c", "f", "c", "f", "c", "e"],
|
| 909 |
+
["d", "a", "d", "a", "b", "a", "b"],
|
| 910 |
+
["e", "c", "f", "c", "e", "c", "d"],
|
| 911 |
+
]
|
| 912 |
+
expected_map = {
|
| 913 |
+
"d": {0, 2, 3, 4, 5, 6, 8, 9},
|
| 914 |
+
"c": {0, 1, 2, 5, 7, 9},
|
| 915 |
+
"f": {0, 1, 9, 7},
|
| 916 |
+
"a": {0, 2, 3, 4, 5, 6, 8},
|
| 917 |
+
"b": {0, 2, 3, 4, 5, 6, 8},
|
| 918 |
+
"e": {1, 9, 7},
|
| 919 |
+
}
|
| 920 |
+
|
| 921 |
+
assert expected_paths == list(paths)
|
| 922 |
+
assert expected_map == index_map
|
| 923 |
+
|
| 924 |
+
def test_symmetry_with_custom_matching(self):
|
| 925 |
+
print("G2 is edge (a,b) and G3 is edge (a,a)")
|
| 926 |
+
print("but node order for G2 is (a,b) while for G3 it is (b,a)")
|
| 927 |
+
|
| 928 |
+
a, b = "A", "B"
|
| 929 |
+
G2 = nx.Graph()
|
| 930 |
+
G2.add_nodes_from((a, b))
|
| 931 |
+
G2.add_edges_from([(a, b)])
|
| 932 |
+
G3 = nx.Graph()
|
| 933 |
+
G3.add_nodes_from((b, a))
|
| 934 |
+
G3.add_edges_from([(a, a)])
|
| 935 |
+
for G in (G2, G3):
|
| 936 |
+
for n in G:
|
| 937 |
+
G.nodes[n]["attr"] = n
|
| 938 |
+
for e in G.edges:
|
| 939 |
+
G.edges[e]["attr"] = e
|
| 940 |
+
match = lambda x, y: x == y
|
| 941 |
+
|
| 942 |
+
print("Starting G2 to G3 GED calculation")
|
| 943 |
+
assert nx.graph_edit_distance(G2, G3, node_match=match, edge_match=match) == 1
|
| 944 |
+
|
| 945 |
+
print("Starting G3 to G2 GED calculation")
|
| 946 |
+
assert nx.graph_edit_distance(G3, G2, node_match=match, edge_match=match) == 1
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_smallworld.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
pytest.importorskip("numpy")
|
| 4 |
+
|
| 5 |
+
import random
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx import lattice_reference, omega, random_reference, sigma
|
| 9 |
+
|
| 10 |
+
rng = 42
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def test_random_reference():
|
| 14 |
+
G = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng)
|
| 15 |
+
Gr = random_reference(G, niter=1, seed=rng)
|
| 16 |
+
C = nx.average_clustering(G)
|
| 17 |
+
Cr = nx.average_clustering(Gr)
|
| 18 |
+
assert C > Cr
|
| 19 |
+
|
| 20 |
+
with pytest.raises(nx.NetworkXError):
|
| 21 |
+
next(random_reference(nx.Graph()))
|
| 22 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 23 |
+
next(random_reference(nx.DiGraph()))
|
| 24 |
+
|
| 25 |
+
H = nx.Graph(((0, 1), (2, 3)))
|
| 26 |
+
Hl = random_reference(H, niter=1, seed=rng)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def test_lattice_reference():
|
| 30 |
+
G = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng)
|
| 31 |
+
Gl = lattice_reference(G, niter=1, seed=rng)
|
| 32 |
+
L = nx.average_shortest_path_length(G)
|
| 33 |
+
Ll = nx.average_shortest_path_length(Gl)
|
| 34 |
+
assert Ll > L
|
| 35 |
+
|
| 36 |
+
pytest.raises(nx.NetworkXError, lattice_reference, nx.Graph())
|
| 37 |
+
pytest.raises(nx.NetworkXNotImplemented, lattice_reference, nx.DiGraph())
|
| 38 |
+
|
| 39 |
+
H = nx.Graph(((0, 1), (2, 3)))
|
| 40 |
+
Hl = lattice_reference(H, niter=1)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def test_sigma():
|
| 44 |
+
Gs = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng)
|
| 45 |
+
Gr = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng)
|
| 46 |
+
sigmas = sigma(Gs, niter=1, nrand=2, seed=rng)
|
| 47 |
+
sigmar = sigma(Gr, niter=1, nrand=2, seed=rng)
|
| 48 |
+
assert sigmar < sigmas
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def test_omega():
|
| 52 |
+
Gl = nx.connected_watts_strogatz_graph(50, 6, 0, seed=rng)
|
| 53 |
+
Gr = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng)
|
| 54 |
+
Gs = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng)
|
| 55 |
+
omegal = omega(Gl, niter=1, nrand=1, seed=rng)
|
| 56 |
+
omegar = omega(Gr, niter=1, nrand=1, seed=rng)
|
| 57 |
+
omegas = omega(Gs, niter=1, nrand=1, seed=rng)
|
| 58 |
+
assert omegal < omegas and omegas < omegar
|
| 59 |
+
|
| 60 |
+
# Test that omega lies within the [-1, 1] bounds
|
| 61 |
+
G_barbell = nx.barbell_graph(5, 1)
|
| 62 |
+
G_karate = nx.karate_club_graph()
|
| 63 |
+
|
| 64 |
+
omega_barbell = nx.omega(G_barbell)
|
| 65 |
+
omega_karate = nx.omega(G_karate, nrand=2)
|
| 66 |
+
|
| 67 |
+
omegas = (omegal, omegar, omegas, omega_barbell, omega_karate)
|
| 68 |
+
|
| 69 |
+
for o in omegas:
|
| 70 |
+
assert -1 <= o <= 1
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
@pytest.mark.parametrize("f", (nx.random_reference, nx.lattice_reference))
|
| 74 |
+
def test_graph_no_edges(f):
|
| 75 |
+
G = nx.Graph()
|
| 76 |
+
G.add_nodes_from([0, 1, 2, 3])
|
| 77 |
+
with pytest.raises(nx.NetworkXError, match="Graph has fewer that 2 edges"):
|
| 78 |
+
f(G)
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_smetric.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import warnings
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def test_smetric():
|
| 9 |
+
g = nx.Graph()
|
| 10 |
+
g.add_edge(1, 2)
|
| 11 |
+
g.add_edge(2, 3)
|
| 12 |
+
g.add_edge(2, 4)
|
| 13 |
+
g.add_edge(1, 4)
|
| 14 |
+
sm = nx.s_metric(g, normalized=False)
|
| 15 |
+
assert sm == 19.0
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
# NOTE: Tests below to be deleted when deprecation of `normalized` kwarg expires
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def test_normalized_deprecation_warning():
|
| 22 |
+
"""Test that a deprecation warning is raised when s_metric is called with
|
| 23 |
+
a `normalized` kwarg."""
|
| 24 |
+
G = nx.cycle_graph(7)
|
| 25 |
+
# No warning raised when called without kwargs (future behavior)
|
| 26 |
+
with warnings.catch_warnings():
|
| 27 |
+
warnings.simplefilter("error") # Fail the test if warning caught
|
| 28 |
+
assert nx.s_metric(G) == 28
|
| 29 |
+
|
| 30 |
+
# Deprecation warning
|
| 31 |
+
with pytest.deprecated_call():
|
| 32 |
+
nx.s_metric(G, normalized=True)
|
| 33 |
+
|
| 34 |
+
# Make sure you get standard Python behavior when unrecognized keyword provided
|
| 35 |
+
with pytest.raises(TypeError):
|
| 36 |
+
nx.s_metric(G, normalize=True)
|
valley/lib/python3.10/site-packages/networkx/algorithms/tests/test_sparsifiers.py
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Unit tests for the sparsifier computation functions."""
|
| 2 |
+
import pytest
|
| 3 |
+
|
| 4 |
+
import networkx as nx
|
| 5 |
+
from networkx.utils import py_random_state
|
| 6 |
+
|
| 7 |
+
_seed = 2
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def _test_spanner(G, spanner, stretch, weight=None):
|
| 11 |
+
"""Test whether a spanner is valid.
|
| 12 |
+
|
| 13 |
+
This function tests whether the given spanner is a subgraph of the
|
| 14 |
+
given graph G with the same node set. It also tests for all shortest
|
| 15 |
+
paths whether they adhere to the given stretch.
|
| 16 |
+
|
| 17 |
+
Parameters
|
| 18 |
+
----------
|
| 19 |
+
G : NetworkX graph
|
| 20 |
+
The original graph for which the spanner was constructed.
|
| 21 |
+
|
| 22 |
+
spanner : NetworkX graph
|
| 23 |
+
The spanner to be tested.
|
| 24 |
+
|
| 25 |
+
stretch : float
|
| 26 |
+
The proclaimed stretch of the spanner.
|
| 27 |
+
|
| 28 |
+
weight : object
|
| 29 |
+
The edge attribute to use as distance.
|
| 30 |
+
"""
|
| 31 |
+
# check node set
|
| 32 |
+
assert set(G.nodes()) == set(spanner.nodes())
|
| 33 |
+
|
| 34 |
+
# check edge set and weights
|
| 35 |
+
for u, v in spanner.edges():
|
| 36 |
+
assert G.has_edge(u, v)
|
| 37 |
+
if weight:
|
| 38 |
+
assert spanner[u][v][weight] == G[u][v][weight]
|
| 39 |
+
|
| 40 |
+
# check connectivity and stretch
|
| 41 |
+
original_length = dict(nx.shortest_path_length(G, weight=weight))
|
| 42 |
+
spanner_length = dict(nx.shortest_path_length(spanner, weight=weight))
|
| 43 |
+
for u in G.nodes():
|
| 44 |
+
for v in G.nodes():
|
| 45 |
+
if u in original_length and v in original_length[u]:
|
| 46 |
+
assert spanner_length[u][v] <= stretch * original_length[u][v]
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
@py_random_state(1)
|
| 50 |
+
def _assign_random_weights(G, seed=None):
|
| 51 |
+
"""Assigns random weights to the edges of a graph.
|
| 52 |
+
|
| 53 |
+
Parameters
|
| 54 |
+
----------
|
| 55 |
+
|
| 56 |
+
G : NetworkX graph
|
| 57 |
+
The original graph for which the spanner was constructed.
|
| 58 |
+
|
| 59 |
+
seed : integer, random_state, or None (default)
|
| 60 |
+
Indicator of random number generation state.
|
| 61 |
+
See :ref:`Randomness<randomness>`.
|
| 62 |
+
"""
|
| 63 |
+
for u, v in G.edges():
|
| 64 |
+
G[u][v]["weight"] = seed.random()
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def test_spanner_trivial():
|
| 68 |
+
"""Test a trivial spanner with stretch 1."""
|
| 69 |
+
G = nx.complete_graph(20)
|
| 70 |
+
spanner = nx.spanner(G, 1, seed=_seed)
|
| 71 |
+
|
| 72 |
+
for u, v in G.edges:
|
| 73 |
+
assert spanner.has_edge(u, v)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def test_spanner_unweighted_complete_graph():
|
| 77 |
+
"""Test spanner construction on a complete unweighted graph."""
|
| 78 |
+
G = nx.complete_graph(20)
|
| 79 |
+
|
| 80 |
+
spanner = nx.spanner(G, 4, seed=_seed)
|
| 81 |
+
_test_spanner(G, spanner, 4)
|
| 82 |
+
|
| 83 |
+
spanner = nx.spanner(G, 10, seed=_seed)
|
| 84 |
+
_test_spanner(G, spanner, 10)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def test_spanner_weighted_complete_graph():
|
| 88 |
+
"""Test spanner construction on a complete weighted graph."""
|
| 89 |
+
G = nx.complete_graph(20)
|
| 90 |
+
_assign_random_weights(G, seed=_seed)
|
| 91 |
+
|
| 92 |
+
spanner = nx.spanner(G, 4, weight="weight", seed=_seed)
|
| 93 |
+
_test_spanner(G, spanner, 4, weight="weight")
|
| 94 |
+
|
| 95 |
+
spanner = nx.spanner(G, 10, weight="weight", seed=_seed)
|
| 96 |
+
_test_spanner(G, spanner, 10, weight="weight")
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def test_spanner_unweighted_gnp_graph():
|
| 100 |
+
"""Test spanner construction on an unweighted gnp graph."""
|
| 101 |
+
G = nx.gnp_random_graph(20, 0.4, seed=_seed)
|
| 102 |
+
|
| 103 |
+
spanner = nx.spanner(G, 4, seed=_seed)
|
| 104 |
+
_test_spanner(G, spanner, 4)
|
| 105 |
+
|
| 106 |
+
spanner = nx.spanner(G, 10, seed=_seed)
|
| 107 |
+
_test_spanner(G, spanner, 10)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def test_spanner_weighted_gnp_graph():
|
| 111 |
+
"""Test spanner construction on an weighted gnp graph."""
|
| 112 |
+
G = nx.gnp_random_graph(20, 0.4, seed=_seed)
|
| 113 |
+
_assign_random_weights(G, seed=_seed)
|
| 114 |
+
|
| 115 |
+
spanner = nx.spanner(G, 4, weight="weight", seed=_seed)
|
| 116 |
+
_test_spanner(G, spanner, 4, weight="weight")
|
| 117 |
+
|
| 118 |
+
spanner = nx.spanner(G, 10, weight="weight", seed=_seed)
|
| 119 |
+
_test_spanner(G, spanner, 10, weight="weight")
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def test_spanner_unweighted_disconnected_graph():
|
| 123 |
+
"""Test spanner construction on a disconnected graph."""
|
| 124 |
+
G = nx.disjoint_union(nx.complete_graph(10), nx.complete_graph(10))
|
| 125 |
+
|
| 126 |
+
spanner = nx.spanner(G, 4, seed=_seed)
|
| 127 |
+
_test_spanner(G, spanner, 4)
|
| 128 |
+
|
| 129 |
+
spanner = nx.spanner(G, 10, seed=_seed)
|
| 130 |
+
_test_spanner(G, spanner, 10)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def test_spanner_invalid_stretch():
|
| 134 |
+
"""Check whether an invalid stretch is caught."""
|
| 135 |
+
with pytest.raises(ValueError):
|
| 136 |
+
G = nx.empty_graph()
|
| 137 |
+
nx.spanner(G, 0)
|