Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__init__.py +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/base_test.py +81 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py +123 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py +176 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py +108 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/__init__.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/katz.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/laplacian.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/voterank_alg.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_closeness_centrality.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_closeness.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_degree_centrality.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_dispersion.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_eigenvector_centrality.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_katz_centrality.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_percolation_centrality.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_reaching.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py +197 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py +147 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_degree_centrality.py +144 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_group.py +278 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_katz_centrality.py +345 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_subgraph.py +110 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_trophic.py +302 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_disjoint_paths.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_edge_augmentation.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_edge_kcomponents.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_kcomponents.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/test_stoer_wagner.py +102 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/core.py +545 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/d_separation.py +457 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/dominating.py +94 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/efficiency_measures.py +168 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/euler.py +469 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/__pycache__/gomory_hu.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/preflowpush.py +429 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/matching.py +1151 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/planarity.py +1179 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/reciprocity.py +97 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/regular.py +212 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/traversal/__pycache__/__init__.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/traversal/__pycache__/depth_first_search.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/traversal/breadth_first_search.py +581 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/traversal/depth_first_search.py +469 -0
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-311.pyc
ADDED
|
Binary file (5.73 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-311.pyc
ADDED
|
Binary file (8.78 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-311.pyc
ADDED
|
Binary file (4.68 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__init__.py
ADDED
|
File without changes
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/base_test.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class BaseTestAttributeMixing:
|
| 5 |
+
@classmethod
|
| 6 |
+
def setup_class(cls):
|
| 7 |
+
G = nx.Graph()
|
| 8 |
+
G.add_nodes_from([0, 1], fish="one")
|
| 9 |
+
G.add_nodes_from([2, 3], fish="two")
|
| 10 |
+
G.add_nodes_from([4], fish="red")
|
| 11 |
+
G.add_nodes_from([5], fish="blue")
|
| 12 |
+
G.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
|
| 13 |
+
cls.G = G
|
| 14 |
+
|
| 15 |
+
D = nx.DiGraph()
|
| 16 |
+
D.add_nodes_from([0, 1], fish="one")
|
| 17 |
+
D.add_nodes_from([2, 3], fish="two")
|
| 18 |
+
D.add_nodes_from([4], fish="red")
|
| 19 |
+
D.add_nodes_from([5], fish="blue")
|
| 20 |
+
D.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
|
| 21 |
+
cls.D = D
|
| 22 |
+
|
| 23 |
+
M = nx.MultiGraph()
|
| 24 |
+
M.add_nodes_from([0, 1], fish="one")
|
| 25 |
+
M.add_nodes_from([2, 3], fish="two")
|
| 26 |
+
M.add_nodes_from([4], fish="red")
|
| 27 |
+
M.add_nodes_from([5], fish="blue")
|
| 28 |
+
M.add_edges_from([(0, 1), (0, 1), (2, 3)])
|
| 29 |
+
cls.M = M
|
| 30 |
+
|
| 31 |
+
S = nx.Graph()
|
| 32 |
+
S.add_nodes_from([0, 1], fish="one")
|
| 33 |
+
S.add_nodes_from([2, 3], fish="two")
|
| 34 |
+
S.add_nodes_from([4], fish="red")
|
| 35 |
+
S.add_nodes_from([5], fish="blue")
|
| 36 |
+
S.add_edge(0, 0)
|
| 37 |
+
S.add_edge(2, 2)
|
| 38 |
+
cls.S = S
|
| 39 |
+
|
| 40 |
+
N = nx.Graph()
|
| 41 |
+
N.add_nodes_from([0, 1], margin=-2)
|
| 42 |
+
N.add_nodes_from([2, 3], margin=-2)
|
| 43 |
+
N.add_nodes_from([4], margin=-3)
|
| 44 |
+
N.add_nodes_from([5], margin=-4)
|
| 45 |
+
N.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
|
| 46 |
+
cls.N = N
|
| 47 |
+
|
| 48 |
+
F = nx.Graph()
|
| 49 |
+
F.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5)
|
| 50 |
+
F.add_edge(0, 2, weight=1)
|
| 51 |
+
nx.set_node_attributes(F, dict(F.degree(weight="weight")), "margin")
|
| 52 |
+
cls.F = F
|
| 53 |
+
|
| 54 |
+
K = nx.Graph()
|
| 55 |
+
K.add_nodes_from([1, 2], margin=-1)
|
| 56 |
+
K.add_nodes_from([3], margin=1)
|
| 57 |
+
K.add_nodes_from([4], margin=2)
|
| 58 |
+
K.add_edges_from([(3, 4), (1, 2), (1, 3)])
|
| 59 |
+
cls.K = K
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class BaseTestDegreeMixing:
|
| 63 |
+
@classmethod
|
| 64 |
+
def setup_class(cls):
|
| 65 |
+
cls.P4 = nx.path_graph(4)
|
| 66 |
+
cls.D = nx.DiGraph()
|
| 67 |
+
cls.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)])
|
| 68 |
+
cls.D2 = nx.DiGraph()
|
| 69 |
+
cls.D2.add_edges_from([(0, 3), (1, 0), (1, 2), (2, 4), (4, 1), (4, 3), (4, 2)])
|
| 70 |
+
cls.M = nx.MultiGraph()
|
| 71 |
+
nx.add_path(cls.M, range(4))
|
| 72 |
+
cls.M.add_edge(0, 1)
|
| 73 |
+
cls.S = nx.Graph()
|
| 74 |
+
cls.S.add_edges_from([(0, 0), (1, 1)])
|
| 75 |
+
cls.W = nx.Graph()
|
| 76 |
+
cls.W.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5)
|
| 77 |
+
cls.W.add_edge(0, 2, weight=1)
|
| 78 |
+
S1 = nx.star_graph(4)
|
| 79 |
+
S2 = nx.star_graph(4)
|
| 80 |
+
cls.DS = nx.disjoint_union(S1, S2)
|
| 81 |
+
cls.DS.add_edge(4, 5)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
np = pytest.importorskip("numpy")
|
| 4 |
+
pytest.importorskip("scipy")
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.algorithms.assortativity.correlation import attribute_ac
|
| 9 |
+
|
| 10 |
+
from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class TestDegreeMixingCorrelation(BaseTestDegreeMixing):
|
| 14 |
+
def test_degree_assortativity_undirected(self):
|
| 15 |
+
r = nx.degree_assortativity_coefficient(self.P4)
|
| 16 |
+
np.testing.assert_almost_equal(r, -1.0 / 2, decimal=4)
|
| 17 |
+
|
| 18 |
+
def test_degree_assortativity_node_kwargs(self):
|
| 19 |
+
G = nx.Graph()
|
| 20 |
+
edges = [(0, 1), (0, 3), (1, 2), (1, 3), (1, 4), (5, 9), (9, 0)]
|
| 21 |
+
G.add_edges_from(edges)
|
| 22 |
+
r = nx.degree_assortativity_coefficient(G, nodes=[1, 2, 4])
|
| 23 |
+
np.testing.assert_almost_equal(r, -1.0, decimal=4)
|
| 24 |
+
|
| 25 |
+
def test_degree_assortativity_directed(self):
|
| 26 |
+
r = nx.degree_assortativity_coefficient(self.D)
|
| 27 |
+
np.testing.assert_almost_equal(r, -0.57735, decimal=4)
|
| 28 |
+
|
| 29 |
+
def test_degree_assortativity_directed2(self):
|
| 30 |
+
"""Test degree assortativity for a directed graph where the set of
|
| 31 |
+
in/out degree does not equal the total degree."""
|
| 32 |
+
r = nx.degree_assortativity_coefficient(self.D2)
|
| 33 |
+
np.testing.assert_almost_equal(r, 0.14852, decimal=4)
|
| 34 |
+
|
| 35 |
+
def test_degree_assortativity_multigraph(self):
|
| 36 |
+
r = nx.degree_assortativity_coefficient(self.M)
|
| 37 |
+
np.testing.assert_almost_equal(r, -1.0 / 7.0, decimal=4)
|
| 38 |
+
|
| 39 |
+
def test_degree_pearson_assortativity_undirected(self):
|
| 40 |
+
r = nx.degree_pearson_correlation_coefficient(self.P4)
|
| 41 |
+
np.testing.assert_almost_equal(r, -1.0 / 2, decimal=4)
|
| 42 |
+
|
| 43 |
+
def test_degree_pearson_assortativity_directed(self):
|
| 44 |
+
r = nx.degree_pearson_correlation_coefficient(self.D)
|
| 45 |
+
np.testing.assert_almost_equal(r, -0.57735, decimal=4)
|
| 46 |
+
|
| 47 |
+
def test_degree_pearson_assortativity_directed2(self):
|
| 48 |
+
"""Test degree assortativity with Pearson for a directed graph where
|
| 49 |
+
the set of in/out degree does not equal the total degree."""
|
| 50 |
+
r = nx.degree_pearson_correlation_coefficient(self.D2)
|
| 51 |
+
np.testing.assert_almost_equal(r, 0.14852, decimal=4)
|
| 52 |
+
|
| 53 |
+
def test_degree_pearson_assortativity_multigraph(self):
|
| 54 |
+
r = nx.degree_pearson_correlation_coefficient(self.M)
|
| 55 |
+
np.testing.assert_almost_equal(r, -1.0 / 7.0, decimal=4)
|
| 56 |
+
|
| 57 |
+
def test_degree_assortativity_weighted(self):
|
| 58 |
+
r = nx.degree_assortativity_coefficient(self.W, weight="weight")
|
| 59 |
+
np.testing.assert_almost_equal(r, -0.1429, decimal=4)
|
| 60 |
+
|
| 61 |
+
def test_degree_assortativity_double_star(self):
|
| 62 |
+
r = nx.degree_assortativity_coefficient(self.DS)
|
| 63 |
+
np.testing.assert_almost_equal(r, -0.9339, decimal=4)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class TestAttributeMixingCorrelation(BaseTestAttributeMixing):
|
| 67 |
+
def test_attribute_assortativity_undirected(self):
|
| 68 |
+
r = nx.attribute_assortativity_coefficient(self.G, "fish")
|
| 69 |
+
assert r == 6.0 / 22.0
|
| 70 |
+
|
| 71 |
+
def test_attribute_assortativity_directed(self):
|
| 72 |
+
r = nx.attribute_assortativity_coefficient(self.D, "fish")
|
| 73 |
+
assert r == 1.0 / 3.0
|
| 74 |
+
|
| 75 |
+
def test_attribute_assortativity_multigraph(self):
|
| 76 |
+
r = nx.attribute_assortativity_coefficient(self.M, "fish")
|
| 77 |
+
assert r == 1.0
|
| 78 |
+
|
| 79 |
+
def test_attribute_assortativity_coefficient(self):
|
| 80 |
+
# from "Mixing patterns in networks"
|
| 81 |
+
# fmt: off
|
| 82 |
+
a = np.array([[0.258, 0.016, 0.035, 0.013],
|
| 83 |
+
[0.012, 0.157, 0.058, 0.019],
|
| 84 |
+
[0.013, 0.023, 0.306, 0.035],
|
| 85 |
+
[0.005, 0.007, 0.024, 0.016]])
|
| 86 |
+
# fmt: on
|
| 87 |
+
r = attribute_ac(a)
|
| 88 |
+
np.testing.assert_almost_equal(r, 0.623, decimal=3)
|
| 89 |
+
|
| 90 |
+
def test_attribute_assortativity_coefficient2(self):
|
| 91 |
+
# fmt: off
|
| 92 |
+
a = np.array([[0.18, 0.02, 0.01, 0.03],
|
| 93 |
+
[0.02, 0.20, 0.03, 0.02],
|
| 94 |
+
[0.01, 0.03, 0.16, 0.01],
|
| 95 |
+
[0.03, 0.02, 0.01, 0.22]])
|
| 96 |
+
# fmt: on
|
| 97 |
+
r = attribute_ac(a)
|
| 98 |
+
np.testing.assert_almost_equal(r, 0.68, decimal=2)
|
| 99 |
+
|
| 100 |
+
def test_attribute_assortativity(self):
|
| 101 |
+
a = np.array([[50, 50, 0], [50, 50, 0], [0, 0, 2]])
|
| 102 |
+
r = attribute_ac(a)
|
| 103 |
+
np.testing.assert_almost_equal(r, 0.029, decimal=3)
|
| 104 |
+
|
| 105 |
+
def test_attribute_assortativity_negative(self):
|
| 106 |
+
r = nx.numeric_assortativity_coefficient(self.N, "margin")
|
| 107 |
+
np.testing.assert_almost_equal(r, -0.2903, decimal=4)
|
| 108 |
+
|
| 109 |
+
def test_assortativity_node_kwargs(self):
|
| 110 |
+
G = nx.Graph()
|
| 111 |
+
G.add_nodes_from([0, 1], size=2)
|
| 112 |
+
G.add_nodes_from([2, 3], size=3)
|
| 113 |
+
G.add_edges_from([(0, 1), (2, 3)])
|
| 114 |
+
r = nx.numeric_assortativity_coefficient(G, "size", nodes=[0, 3])
|
| 115 |
+
np.testing.assert_almost_equal(r, 1.0, decimal=4)
|
| 116 |
+
|
| 117 |
+
def test_attribute_assortativity_float(self):
|
| 118 |
+
r = nx.numeric_assortativity_coefficient(self.F, "margin")
|
| 119 |
+
np.testing.assert_almost_equal(r, -0.1429, decimal=4)
|
| 120 |
+
|
| 121 |
+
def test_attribute_assortativity_mixed(self):
|
| 122 |
+
r = nx.numeric_assortativity_coefficient(self.K, "margin")
|
| 123 |
+
np.testing.assert_almost_equal(r, 0.4340, decimal=4)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
np = pytest.importorskip("numpy")
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
|
| 8 |
+
from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class TestDegreeMixingDict(BaseTestDegreeMixing):
|
| 12 |
+
def test_degree_mixing_dict_undirected(self):
|
| 13 |
+
d = nx.degree_mixing_dict(self.P4)
|
| 14 |
+
d_result = {1: {2: 2}, 2: {1: 2, 2: 2}}
|
| 15 |
+
assert d == d_result
|
| 16 |
+
|
| 17 |
+
def test_degree_mixing_dict_undirected_normalized(self):
|
| 18 |
+
d = nx.degree_mixing_dict(self.P4, normalized=True)
|
| 19 |
+
d_result = {1: {2: 1.0 / 3}, 2: {1: 1.0 / 3, 2: 1.0 / 3}}
|
| 20 |
+
assert d == d_result
|
| 21 |
+
|
| 22 |
+
def test_degree_mixing_dict_directed(self):
|
| 23 |
+
d = nx.degree_mixing_dict(self.D)
|
| 24 |
+
print(d)
|
| 25 |
+
d_result = {1: {3: 2}, 2: {1: 1, 3: 1}, 3: {}}
|
| 26 |
+
assert d == d_result
|
| 27 |
+
|
| 28 |
+
def test_degree_mixing_dict_multigraph(self):
|
| 29 |
+
d = nx.degree_mixing_dict(self.M)
|
| 30 |
+
d_result = {1: {2: 1}, 2: {1: 1, 3: 3}, 3: {2: 3}}
|
| 31 |
+
assert d == d_result
|
| 32 |
+
|
| 33 |
+
def test_degree_mixing_dict_weighted(self):
|
| 34 |
+
d = nx.degree_mixing_dict(self.W, weight="weight")
|
| 35 |
+
d_result = {0.5: {1.5: 1}, 1.5: {1.5: 6, 0.5: 1}}
|
| 36 |
+
assert d == d_result
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class TestDegreeMixingMatrix(BaseTestDegreeMixing):
|
| 40 |
+
def test_degree_mixing_matrix_undirected(self):
|
| 41 |
+
# fmt: off
|
| 42 |
+
a_result = np.array([[0, 2],
|
| 43 |
+
[2, 2]]
|
| 44 |
+
)
|
| 45 |
+
# fmt: on
|
| 46 |
+
a = nx.degree_mixing_matrix(self.P4, normalized=False)
|
| 47 |
+
np.testing.assert_equal(a, a_result)
|
| 48 |
+
a = nx.degree_mixing_matrix(self.P4)
|
| 49 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 50 |
+
|
| 51 |
+
def test_degree_mixing_matrix_directed(self):
|
| 52 |
+
# fmt: off
|
| 53 |
+
a_result = np.array([[0, 0, 2],
|
| 54 |
+
[1, 0, 1],
|
| 55 |
+
[0, 0, 0]]
|
| 56 |
+
)
|
| 57 |
+
# fmt: on
|
| 58 |
+
a = nx.degree_mixing_matrix(self.D, normalized=False)
|
| 59 |
+
np.testing.assert_equal(a, a_result)
|
| 60 |
+
a = nx.degree_mixing_matrix(self.D)
|
| 61 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 62 |
+
|
| 63 |
+
def test_degree_mixing_matrix_multigraph(self):
|
| 64 |
+
# fmt: off
|
| 65 |
+
a_result = np.array([[0, 1, 0],
|
| 66 |
+
[1, 0, 3],
|
| 67 |
+
[0, 3, 0]]
|
| 68 |
+
)
|
| 69 |
+
# fmt: on
|
| 70 |
+
a = nx.degree_mixing_matrix(self.M, normalized=False)
|
| 71 |
+
np.testing.assert_equal(a, a_result)
|
| 72 |
+
a = nx.degree_mixing_matrix(self.M)
|
| 73 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 74 |
+
|
| 75 |
+
def test_degree_mixing_matrix_selfloop(self):
|
| 76 |
+
# fmt: off
|
| 77 |
+
a_result = np.array([[2]])
|
| 78 |
+
# fmt: on
|
| 79 |
+
a = nx.degree_mixing_matrix(self.S, normalized=False)
|
| 80 |
+
np.testing.assert_equal(a, a_result)
|
| 81 |
+
a = nx.degree_mixing_matrix(self.S)
|
| 82 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 83 |
+
|
| 84 |
+
def test_degree_mixing_matrix_weighted(self):
|
| 85 |
+
a_result = np.array([[0.0, 1.0], [1.0, 6.0]])
|
| 86 |
+
a = nx.degree_mixing_matrix(self.W, weight="weight", normalized=False)
|
| 87 |
+
np.testing.assert_equal(a, a_result)
|
| 88 |
+
a = nx.degree_mixing_matrix(self.W, weight="weight")
|
| 89 |
+
np.testing.assert_equal(a, a_result / float(a_result.sum()))
|
| 90 |
+
|
| 91 |
+
def test_degree_mixing_matrix_mapping(self):
|
| 92 |
+
a_result = np.array([[6.0, 1.0], [1.0, 0.0]])
|
| 93 |
+
mapping = {0.5: 1, 1.5: 0}
|
| 94 |
+
a = nx.degree_mixing_matrix(
|
| 95 |
+
self.W, weight="weight", normalized=False, mapping=mapping
|
| 96 |
+
)
|
| 97 |
+
np.testing.assert_equal(a, a_result)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
class TestAttributeMixingDict(BaseTestAttributeMixing):
|
| 101 |
+
def test_attribute_mixing_dict_undirected(self):
|
| 102 |
+
d = nx.attribute_mixing_dict(self.G, "fish")
|
| 103 |
+
d_result = {
|
| 104 |
+
"one": {"one": 2, "red": 1},
|
| 105 |
+
"two": {"two": 2, "blue": 1},
|
| 106 |
+
"red": {"one": 1},
|
| 107 |
+
"blue": {"two": 1},
|
| 108 |
+
}
|
| 109 |
+
assert d == d_result
|
| 110 |
+
|
| 111 |
+
def test_attribute_mixing_dict_directed(self):
|
| 112 |
+
d = nx.attribute_mixing_dict(self.D, "fish")
|
| 113 |
+
d_result = {
|
| 114 |
+
"one": {"one": 1, "red": 1},
|
| 115 |
+
"two": {"two": 1, "blue": 1},
|
| 116 |
+
"red": {},
|
| 117 |
+
"blue": {},
|
| 118 |
+
}
|
| 119 |
+
assert d == d_result
|
| 120 |
+
|
| 121 |
+
def test_attribute_mixing_dict_multigraph(self):
|
| 122 |
+
d = nx.attribute_mixing_dict(self.M, "fish")
|
| 123 |
+
d_result = {"one": {"one": 4}, "two": {"two": 2}}
|
| 124 |
+
assert d == d_result
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class TestAttributeMixingMatrix(BaseTestAttributeMixing):
|
| 128 |
+
def test_attribute_mixing_matrix_undirected(self):
|
| 129 |
+
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
|
| 130 |
+
a_result = np.array([[2, 0, 1, 0], [0, 2, 0, 1], [1, 0, 0, 0], [0, 1, 0, 0]])
|
| 131 |
+
a = nx.attribute_mixing_matrix(
|
| 132 |
+
self.G, "fish", mapping=mapping, normalized=False
|
| 133 |
+
)
|
| 134 |
+
np.testing.assert_equal(a, a_result)
|
| 135 |
+
a = nx.attribute_mixing_matrix(self.G, "fish", mapping=mapping)
|
| 136 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 137 |
+
|
| 138 |
+
def test_attribute_mixing_matrix_directed(self):
|
| 139 |
+
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
|
| 140 |
+
a_result = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]])
|
| 141 |
+
a = nx.attribute_mixing_matrix(
|
| 142 |
+
self.D, "fish", mapping=mapping, normalized=False
|
| 143 |
+
)
|
| 144 |
+
np.testing.assert_equal(a, a_result)
|
| 145 |
+
a = nx.attribute_mixing_matrix(self.D, "fish", mapping=mapping)
|
| 146 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 147 |
+
|
| 148 |
+
def test_attribute_mixing_matrix_multigraph(self):
|
| 149 |
+
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
|
| 150 |
+
a_result = np.array([[4, 0, 0, 0], [0, 2, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])
|
| 151 |
+
a = nx.attribute_mixing_matrix(
|
| 152 |
+
self.M, "fish", mapping=mapping, normalized=False
|
| 153 |
+
)
|
| 154 |
+
np.testing.assert_equal(a, a_result)
|
| 155 |
+
a = nx.attribute_mixing_matrix(self.M, "fish", mapping=mapping)
|
| 156 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 157 |
+
|
| 158 |
+
def test_attribute_mixing_matrix_negative(self):
|
| 159 |
+
mapping = {-2: 0, -3: 1, -4: 2}
|
| 160 |
+
a_result = np.array([[4.0, 1.0, 1.0], [1.0, 0.0, 0.0], [1.0, 0.0, 0.0]])
|
| 161 |
+
a = nx.attribute_mixing_matrix(
|
| 162 |
+
self.N, "margin", mapping=mapping, normalized=False
|
| 163 |
+
)
|
| 164 |
+
np.testing.assert_equal(a, a_result)
|
| 165 |
+
a = nx.attribute_mixing_matrix(self.N, "margin", mapping=mapping)
|
| 166 |
+
np.testing.assert_equal(a, a_result / float(a_result.sum()))
|
| 167 |
+
|
| 168 |
+
def test_attribute_mixing_matrix_float(self):
|
| 169 |
+
mapping = {0.5: 1, 1.5: 0}
|
| 170 |
+
a_result = np.array([[6.0, 1.0], [1.0, 0.0]])
|
| 171 |
+
a = nx.attribute_mixing_matrix(
|
| 172 |
+
self.F, "margin", mapping=mapping, normalized=False
|
| 173 |
+
)
|
| 174 |
+
np.testing.assert_equal(a, a_result)
|
| 175 |
+
a = nx.attribute_mixing_matrix(self.F, "margin", mapping=mapping)
|
| 176 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestAverageNeighbor:
|
| 7 |
+
def test_degree_p4(self):
|
| 8 |
+
G = nx.path_graph(4)
|
| 9 |
+
answer = {0: 2, 1: 1.5, 2: 1.5, 3: 2}
|
| 10 |
+
nd = nx.average_neighbor_degree(G)
|
| 11 |
+
assert nd == answer
|
| 12 |
+
|
| 13 |
+
D = G.to_directed()
|
| 14 |
+
nd = nx.average_neighbor_degree(D)
|
| 15 |
+
assert nd == answer
|
| 16 |
+
|
| 17 |
+
D = nx.DiGraph(G.edges(data=True))
|
| 18 |
+
nd = nx.average_neighbor_degree(D)
|
| 19 |
+
assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
|
| 20 |
+
nd = nx.average_neighbor_degree(D, "in", "out")
|
| 21 |
+
assert nd == {0: 0, 1: 1, 2: 1, 3: 1}
|
| 22 |
+
nd = nx.average_neighbor_degree(D, "out", "in")
|
| 23 |
+
assert nd == {0: 1, 1: 1, 2: 1, 3: 0}
|
| 24 |
+
nd = nx.average_neighbor_degree(D, "in", "in")
|
| 25 |
+
assert nd == {0: 0, 1: 0, 2: 1, 3: 1}
|
| 26 |
+
|
| 27 |
+
def test_degree_p4_weighted(self):
|
| 28 |
+
G = nx.path_graph(4)
|
| 29 |
+
G[1][2]["weight"] = 4
|
| 30 |
+
answer = {0: 2, 1: 1.8, 2: 1.8, 3: 2}
|
| 31 |
+
nd = nx.average_neighbor_degree(G, weight="weight")
|
| 32 |
+
assert nd == answer
|
| 33 |
+
|
| 34 |
+
D = G.to_directed()
|
| 35 |
+
nd = nx.average_neighbor_degree(D, weight="weight")
|
| 36 |
+
assert nd == answer
|
| 37 |
+
|
| 38 |
+
D = nx.DiGraph(G.edges(data=True))
|
| 39 |
+
print(D.edges(data=True))
|
| 40 |
+
nd = nx.average_neighbor_degree(D, weight="weight")
|
| 41 |
+
assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
|
| 42 |
+
nd = nx.average_neighbor_degree(D, "out", "out", weight="weight")
|
| 43 |
+
assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
|
| 44 |
+
nd = nx.average_neighbor_degree(D, "in", "in", weight="weight")
|
| 45 |
+
assert nd == {0: 0, 1: 0, 2: 1, 3: 1}
|
| 46 |
+
nd = nx.average_neighbor_degree(D, "in", "out", weight="weight")
|
| 47 |
+
assert nd == {0: 0, 1: 1, 2: 1, 3: 1}
|
| 48 |
+
nd = nx.average_neighbor_degree(D, "out", "in", weight="weight")
|
| 49 |
+
assert nd == {0: 1, 1: 1, 2: 1, 3: 0}
|
| 50 |
+
nd = nx.average_neighbor_degree(D, source="in+out", weight="weight")
|
| 51 |
+
assert nd == {0: 1.0, 1: 1.0, 2: 0.8, 3: 1.0}
|
| 52 |
+
nd = nx.average_neighbor_degree(D, target="in+out", weight="weight")
|
| 53 |
+
assert nd == {0: 2.0, 1: 2.0, 2: 1.0, 3: 0.0}
|
| 54 |
+
|
| 55 |
+
D = G.to_directed()
|
| 56 |
+
nd = nx.average_neighbor_degree(D, weight="weight")
|
| 57 |
+
assert nd == answer
|
| 58 |
+
nd = nx.average_neighbor_degree(D, source="out", target="out", weight="weight")
|
| 59 |
+
assert nd == answer
|
| 60 |
+
|
| 61 |
+
D = G.to_directed()
|
| 62 |
+
nd = nx.average_neighbor_degree(D, source="in", target="in", weight="weight")
|
| 63 |
+
assert nd == answer
|
| 64 |
+
|
| 65 |
+
def test_degree_k4(self):
|
| 66 |
+
G = nx.complete_graph(4)
|
| 67 |
+
answer = {0: 3, 1: 3, 2: 3, 3: 3}
|
| 68 |
+
nd = nx.average_neighbor_degree(G)
|
| 69 |
+
assert nd == answer
|
| 70 |
+
|
| 71 |
+
D = G.to_directed()
|
| 72 |
+
nd = nx.average_neighbor_degree(D)
|
| 73 |
+
assert nd == answer
|
| 74 |
+
|
| 75 |
+
D = G.to_directed()
|
| 76 |
+
nd = nx.average_neighbor_degree(D)
|
| 77 |
+
assert nd == answer
|
| 78 |
+
|
| 79 |
+
D = G.to_directed()
|
| 80 |
+
nd = nx.average_neighbor_degree(D, source="in", target="in")
|
| 81 |
+
assert nd == answer
|
| 82 |
+
|
| 83 |
+
def test_degree_k4_nodes(self):
|
| 84 |
+
G = nx.complete_graph(4)
|
| 85 |
+
answer = {1: 3.0, 2: 3.0}
|
| 86 |
+
nd = nx.average_neighbor_degree(G, nodes=[1, 2])
|
| 87 |
+
assert nd == answer
|
| 88 |
+
|
| 89 |
+
def test_degree_barrat(self):
|
| 90 |
+
G = nx.star_graph(5)
|
| 91 |
+
G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)])
|
| 92 |
+
G[0][5]["weight"] = 5
|
| 93 |
+
nd = nx.average_neighbor_degree(G)[5]
|
| 94 |
+
assert nd == 1.8
|
| 95 |
+
nd = nx.average_neighbor_degree(G, weight="weight")[5]
|
| 96 |
+
assert nd == pytest.approx(3.222222, abs=1e-5)
|
| 97 |
+
|
| 98 |
+
def test_error_invalid_source_target(self):
|
| 99 |
+
G = nx.path_graph(4)
|
| 100 |
+
with pytest.raises(nx.NetworkXError):
|
| 101 |
+
nx.average_neighbor_degree(G, "error")
|
| 102 |
+
with pytest.raises(nx.NetworkXError):
|
| 103 |
+
nx.average_neighbor_degree(G, "in", "error")
|
| 104 |
+
G = G.to_directed()
|
| 105 |
+
with pytest.raises(nx.NetworkXError):
|
| 106 |
+
nx.average_neighbor_degree(G, "error")
|
| 107 |
+
with pytest.raises(nx.NetworkXError):
|
| 108 |
+
nx.average_neighbor_degree(G, "in", "error")
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (912 Bytes). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-311.pyc
ADDED
|
Binary file (11.1 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/katz.cpython-311.pyc
ADDED
|
Binary file (13.5 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/laplacian.cpython-311.pyc
ADDED
|
Binary file (7.04 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/voterank_alg.cpython-311.pyc
ADDED
|
Binary file (4.51 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality.cpython-311.pyc
ADDED
|
Binary file (38.6 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_closeness_centrality.cpython-311.pyc
ADDED
|
Binary file (16.7 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality.cpython-311.pyc
ADDED
|
Binary file (15.3 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_closeness.cpython-311.pyc
ADDED
|
Binary file (3.57 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_degree_centrality.cpython-311.pyc
ADDED
|
Binary file (8.18 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_dispersion.cpython-311.pyc
ADDED
|
Binary file (3.5 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_eigenvector_centrality.cpython-311.pyc
ADDED
|
Binary file (11.2 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_katz_centrality.cpython-311.pyc
ADDED
|
Binary file (20 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_percolation_centrality.cpython-311.pyc
ADDED
|
Binary file (4.88 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_reaching.cpython-311.pyc
ADDED
|
Binary file (9.94 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py
ADDED
|
@@ -0,0 +1,197 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx import approximate_current_flow_betweenness_centrality as approximate_cfbc
|
| 5 |
+
from networkx import edge_current_flow_betweenness_centrality as edge_current_flow
|
| 6 |
+
|
| 7 |
+
np = pytest.importorskip("numpy")
|
| 8 |
+
pytest.importorskip("scipy")
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class TestFlowBetweennessCentrality:
|
| 12 |
+
def test_K4_normalized(self):
|
| 13 |
+
"""Betweenness centrality: K4"""
|
| 14 |
+
G = nx.complete_graph(4)
|
| 15 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True)
|
| 16 |
+
b_answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
|
| 17 |
+
for n in sorted(G):
|
| 18 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 19 |
+
G.add_edge(0, 1, weight=0.5, other=0.3)
|
| 20 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True, weight=None)
|
| 21 |
+
for n in sorted(G):
|
| 22 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 23 |
+
wb_answer = {0: 0.2222222, 1: 0.2222222, 2: 0.30555555, 3: 0.30555555}
|
| 24 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True, weight="weight")
|
| 25 |
+
for n in sorted(G):
|
| 26 |
+
assert b[n] == pytest.approx(wb_answer[n], abs=1e-7)
|
| 27 |
+
wb_answer = {0: 0.2051282, 1: 0.2051282, 2: 0.33974358, 3: 0.33974358}
|
| 28 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True, weight="other")
|
| 29 |
+
for n in sorted(G):
|
| 30 |
+
assert b[n] == pytest.approx(wb_answer[n], abs=1e-7)
|
| 31 |
+
|
| 32 |
+
def test_K4(self):
|
| 33 |
+
"""Betweenness centrality: K4"""
|
| 34 |
+
G = nx.complete_graph(4)
|
| 35 |
+
for solver in ["full", "lu", "cg"]:
|
| 36 |
+
b = nx.current_flow_betweenness_centrality(
|
| 37 |
+
G, normalized=False, solver=solver
|
| 38 |
+
)
|
| 39 |
+
b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
|
| 40 |
+
for n in sorted(G):
|
| 41 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 42 |
+
|
| 43 |
+
def test_P4_normalized(self):
|
| 44 |
+
"""Betweenness centrality: P4 normalized"""
|
| 45 |
+
G = nx.path_graph(4)
|
| 46 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True)
|
| 47 |
+
b_answer = {0: 0, 1: 2.0 / 3, 2: 2.0 / 3, 3: 0}
|
| 48 |
+
for n in sorted(G):
|
| 49 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 50 |
+
|
| 51 |
+
def test_P4(self):
|
| 52 |
+
"""Betweenness centrality: P4"""
|
| 53 |
+
G = nx.path_graph(4)
|
| 54 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=False)
|
| 55 |
+
b_answer = {0: 0, 1: 2, 2: 2, 3: 0}
|
| 56 |
+
for n in sorted(G):
|
| 57 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 58 |
+
|
| 59 |
+
def test_star(self):
|
| 60 |
+
"""Betweenness centrality: star"""
|
| 61 |
+
G = nx.Graph()
|
| 62 |
+
nx.add_star(G, ["a", "b", "c", "d"])
|
| 63 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True)
|
| 64 |
+
b_answer = {"a": 1.0, "b": 0.0, "c": 0.0, "d": 0.0}
|
| 65 |
+
for n in sorted(G):
|
| 66 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 67 |
+
|
| 68 |
+
def test_solvers2(self):
|
| 69 |
+
"""Betweenness centrality: alternate solvers"""
|
| 70 |
+
G = nx.complete_graph(4)
|
| 71 |
+
for solver in ["full", "lu", "cg"]:
|
| 72 |
+
b = nx.current_flow_betweenness_centrality(
|
| 73 |
+
G, normalized=False, solver=solver
|
| 74 |
+
)
|
| 75 |
+
b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
|
| 76 |
+
for n in sorted(G):
|
| 77 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class TestApproximateFlowBetweennessCentrality:
|
| 81 |
+
def test_K4_normalized(self):
|
| 82 |
+
"Approximate current-flow betweenness centrality: K4 normalized"
|
| 83 |
+
G = nx.complete_graph(4)
|
| 84 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True)
|
| 85 |
+
epsilon = 0.1
|
| 86 |
+
ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon)
|
| 87 |
+
for n in sorted(G):
|
| 88 |
+
np.testing.assert_allclose(b[n], ba[n], atol=epsilon)
|
| 89 |
+
|
| 90 |
+
def test_K4(self):
|
| 91 |
+
"Approximate current-flow betweenness centrality: K4"
|
| 92 |
+
G = nx.complete_graph(4)
|
| 93 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=False)
|
| 94 |
+
epsilon = 0.1
|
| 95 |
+
ba = approximate_cfbc(G, normalized=False, epsilon=0.5 * epsilon)
|
| 96 |
+
for n in sorted(G):
|
| 97 |
+
np.testing.assert_allclose(b[n], ba[n], atol=epsilon * len(G) ** 2)
|
| 98 |
+
|
| 99 |
+
def test_star(self):
|
| 100 |
+
"Approximate current-flow betweenness centrality: star"
|
| 101 |
+
G = nx.Graph()
|
| 102 |
+
nx.add_star(G, ["a", "b", "c", "d"])
|
| 103 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True)
|
| 104 |
+
epsilon = 0.1
|
| 105 |
+
ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon)
|
| 106 |
+
for n in sorted(G):
|
| 107 |
+
np.testing.assert_allclose(b[n], ba[n], atol=epsilon)
|
| 108 |
+
|
| 109 |
+
def test_grid(self):
|
| 110 |
+
"Approximate current-flow betweenness centrality: 2d grid"
|
| 111 |
+
G = nx.grid_2d_graph(4, 4)
|
| 112 |
+
b = nx.current_flow_betweenness_centrality(G, normalized=True)
|
| 113 |
+
epsilon = 0.1
|
| 114 |
+
ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon)
|
| 115 |
+
for n in sorted(G):
|
| 116 |
+
np.testing.assert_allclose(b[n], ba[n], atol=epsilon)
|
| 117 |
+
|
| 118 |
+
def test_seed(self):
|
| 119 |
+
G = nx.complete_graph(4)
|
| 120 |
+
b = approximate_cfbc(G, normalized=False, epsilon=0.05, seed=1)
|
| 121 |
+
b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
|
| 122 |
+
for n in sorted(G):
|
| 123 |
+
np.testing.assert_allclose(b[n], b_answer[n], atol=0.1)
|
| 124 |
+
|
| 125 |
+
def test_solvers(self):
|
| 126 |
+
"Approximate current-flow betweenness centrality: solvers"
|
| 127 |
+
G = nx.complete_graph(4)
|
| 128 |
+
epsilon = 0.1
|
| 129 |
+
for solver in ["full", "lu", "cg"]:
|
| 130 |
+
b = approximate_cfbc(
|
| 131 |
+
G, normalized=False, solver=solver, epsilon=0.5 * epsilon
|
| 132 |
+
)
|
| 133 |
+
b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
|
| 134 |
+
for n in sorted(G):
|
| 135 |
+
np.testing.assert_allclose(b[n], b_answer[n], atol=epsilon)
|
| 136 |
+
|
| 137 |
+
def test_lower_kmax(self):
|
| 138 |
+
G = nx.complete_graph(4)
|
| 139 |
+
with pytest.raises(nx.NetworkXError, match="Increase kmax or epsilon"):
|
| 140 |
+
nx.approximate_current_flow_betweenness_centrality(G, kmax=4)
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
class TestWeightedFlowBetweennessCentrality:
|
| 144 |
+
pass
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
class TestEdgeFlowBetweennessCentrality:
|
| 148 |
+
def test_K4(self):
|
| 149 |
+
"""Edge flow betweenness centrality: K4"""
|
| 150 |
+
G = nx.complete_graph(4)
|
| 151 |
+
b = edge_current_flow(G, normalized=True)
|
| 152 |
+
b_answer = dict.fromkeys(G.edges(), 0.25)
|
| 153 |
+
for (s, t), v1 in b_answer.items():
|
| 154 |
+
v2 = b.get((s, t), b.get((t, s)))
|
| 155 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
| 156 |
+
|
| 157 |
+
def test_K4_normalized(self):
|
| 158 |
+
"""Edge flow betweenness centrality: K4"""
|
| 159 |
+
G = nx.complete_graph(4)
|
| 160 |
+
b = edge_current_flow(G, normalized=False)
|
| 161 |
+
b_answer = dict.fromkeys(G.edges(), 0.75)
|
| 162 |
+
for (s, t), v1 in b_answer.items():
|
| 163 |
+
v2 = b.get((s, t), b.get((t, s)))
|
| 164 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
| 165 |
+
|
| 166 |
+
def test_C4(self):
|
| 167 |
+
"""Edge flow betweenness centrality: C4"""
|
| 168 |
+
G = nx.cycle_graph(4)
|
| 169 |
+
b = edge_current_flow(G, normalized=False)
|
| 170 |
+
b_answer = {(0, 1): 1.25, (0, 3): 1.25, (1, 2): 1.25, (2, 3): 1.25}
|
| 171 |
+
for (s, t), v1 in b_answer.items():
|
| 172 |
+
v2 = b.get((s, t), b.get((t, s)))
|
| 173 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
| 174 |
+
|
| 175 |
+
def test_P4(self):
|
| 176 |
+
"""Edge betweenness centrality: P4"""
|
| 177 |
+
G = nx.path_graph(4)
|
| 178 |
+
b = edge_current_flow(G, normalized=False)
|
| 179 |
+
b_answer = {(0, 1): 1.5, (1, 2): 2.0, (2, 3): 1.5}
|
| 180 |
+
for (s, t), v1 in b_answer.items():
|
| 181 |
+
v2 = b.get((s, t), b.get((t, s)))
|
| 182 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
@pytest.mark.parametrize(
|
| 186 |
+
"centrality_func",
|
| 187 |
+
(
|
| 188 |
+
nx.current_flow_betweenness_centrality,
|
| 189 |
+
nx.edge_current_flow_betweenness_centrality,
|
| 190 |
+
nx.approximate_current_flow_betweenness_centrality,
|
| 191 |
+
),
|
| 192 |
+
)
|
| 193 |
+
def test_unconnected_graphs_betweenness_centrality(centrality_func):
|
| 194 |
+
G = nx.Graph([(1, 2), (3, 4)])
|
| 195 |
+
G.add_node(5)
|
| 196 |
+
with pytest.raises(nx.NetworkXError, match="Graph not connected"):
|
| 197 |
+
centrality_func(G)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
pytest.importorskip("numpy")
|
| 4 |
+
pytest.importorskip("scipy")
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
from networkx import edge_current_flow_betweenness_centrality as edge_current_flow
|
| 8 |
+
from networkx import (
|
| 9 |
+
edge_current_flow_betweenness_centrality_subset as edge_current_flow_subset,
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class TestFlowBetweennessCentrality:
|
| 14 |
+
def test_K4_normalized(self):
|
| 15 |
+
"""Betweenness centrality: K4"""
|
| 16 |
+
G = nx.complete_graph(4)
|
| 17 |
+
b = nx.current_flow_betweenness_centrality_subset(
|
| 18 |
+
G, list(G), list(G), normalized=True
|
| 19 |
+
)
|
| 20 |
+
b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
|
| 21 |
+
for n in sorted(G):
|
| 22 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 23 |
+
|
| 24 |
+
def test_K4(self):
|
| 25 |
+
"""Betweenness centrality: K4"""
|
| 26 |
+
G = nx.complete_graph(4)
|
| 27 |
+
b = nx.current_flow_betweenness_centrality_subset(
|
| 28 |
+
G, list(G), list(G), normalized=True
|
| 29 |
+
)
|
| 30 |
+
b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
|
| 31 |
+
for n in sorted(G):
|
| 32 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 33 |
+
# test weighted network
|
| 34 |
+
G.add_edge(0, 1, weight=0.5, other=0.3)
|
| 35 |
+
b = nx.current_flow_betweenness_centrality_subset(
|
| 36 |
+
G, list(G), list(G), normalized=True, weight=None
|
| 37 |
+
)
|
| 38 |
+
for n in sorted(G):
|
| 39 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 40 |
+
b = nx.current_flow_betweenness_centrality_subset(
|
| 41 |
+
G, list(G), list(G), normalized=True
|
| 42 |
+
)
|
| 43 |
+
b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
|
| 44 |
+
for n in sorted(G):
|
| 45 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 46 |
+
b = nx.current_flow_betweenness_centrality_subset(
|
| 47 |
+
G, list(G), list(G), normalized=True, weight="other"
|
| 48 |
+
)
|
| 49 |
+
b_answer = nx.current_flow_betweenness_centrality(
|
| 50 |
+
G, normalized=True, weight="other"
|
| 51 |
+
)
|
| 52 |
+
for n in sorted(G):
|
| 53 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 54 |
+
|
| 55 |
+
def test_P4_normalized(self):
|
| 56 |
+
"""Betweenness centrality: P4 normalized"""
|
| 57 |
+
G = nx.path_graph(4)
|
| 58 |
+
b = nx.current_flow_betweenness_centrality_subset(
|
| 59 |
+
G, list(G), list(G), normalized=True
|
| 60 |
+
)
|
| 61 |
+
b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
|
| 62 |
+
for n in sorted(G):
|
| 63 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 64 |
+
|
| 65 |
+
def test_P4(self):
|
| 66 |
+
"""Betweenness centrality: P4"""
|
| 67 |
+
G = nx.path_graph(4)
|
| 68 |
+
b = nx.current_flow_betweenness_centrality_subset(
|
| 69 |
+
G, list(G), list(G), normalized=True
|
| 70 |
+
)
|
| 71 |
+
b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
|
| 72 |
+
for n in sorted(G):
|
| 73 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 74 |
+
|
| 75 |
+
def test_star(self):
|
| 76 |
+
"""Betweenness centrality: star"""
|
| 77 |
+
G = nx.Graph()
|
| 78 |
+
nx.add_star(G, ["a", "b", "c", "d"])
|
| 79 |
+
b = nx.current_flow_betweenness_centrality_subset(
|
| 80 |
+
G, list(G), list(G), normalized=True
|
| 81 |
+
)
|
| 82 |
+
b_answer = nx.current_flow_betweenness_centrality(G, normalized=True)
|
| 83 |
+
for n in sorted(G):
|
| 84 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
# class TestWeightedFlowBetweennessCentrality():
|
| 88 |
+
# pass
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class TestEdgeFlowBetweennessCentrality:
|
| 92 |
+
def test_K4_normalized(self):
|
| 93 |
+
"""Betweenness centrality: K4"""
|
| 94 |
+
G = nx.complete_graph(4)
|
| 95 |
+
b = edge_current_flow_subset(G, list(G), list(G), normalized=True)
|
| 96 |
+
b_answer = edge_current_flow(G, normalized=True)
|
| 97 |
+
for (s, t), v1 in b_answer.items():
|
| 98 |
+
v2 = b.get((s, t), b.get((t, s)))
|
| 99 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
| 100 |
+
|
| 101 |
+
def test_K4(self):
|
| 102 |
+
"""Betweenness centrality: K4"""
|
| 103 |
+
G = nx.complete_graph(4)
|
| 104 |
+
b = edge_current_flow_subset(G, list(G), list(G), normalized=False)
|
| 105 |
+
b_answer = edge_current_flow(G, normalized=False)
|
| 106 |
+
for (s, t), v1 in b_answer.items():
|
| 107 |
+
v2 = b.get((s, t), b.get((t, s)))
|
| 108 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
| 109 |
+
# test weighted network
|
| 110 |
+
G.add_edge(0, 1, weight=0.5, other=0.3)
|
| 111 |
+
b = edge_current_flow_subset(G, list(G), list(G), normalized=False, weight=None)
|
| 112 |
+
# weight is None => same as unweighted network
|
| 113 |
+
for (s, t), v1 in b_answer.items():
|
| 114 |
+
v2 = b.get((s, t), b.get((t, s)))
|
| 115 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
| 116 |
+
|
| 117 |
+
b = edge_current_flow_subset(G, list(G), list(G), normalized=False)
|
| 118 |
+
b_answer = edge_current_flow(G, normalized=False)
|
| 119 |
+
for (s, t), v1 in b_answer.items():
|
| 120 |
+
v2 = b.get((s, t), b.get((t, s)))
|
| 121 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
| 122 |
+
|
| 123 |
+
b = edge_current_flow_subset(
|
| 124 |
+
G, list(G), list(G), normalized=False, weight="other"
|
| 125 |
+
)
|
| 126 |
+
b_answer = edge_current_flow(G, normalized=False, weight="other")
|
| 127 |
+
for (s, t), v1 in b_answer.items():
|
| 128 |
+
v2 = b.get((s, t), b.get((t, s)))
|
| 129 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
| 130 |
+
|
| 131 |
+
def test_C4(self):
|
| 132 |
+
"""Edge betweenness centrality: C4"""
|
| 133 |
+
G = nx.cycle_graph(4)
|
| 134 |
+
b = edge_current_flow_subset(G, list(G), list(G), normalized=True)
|
| 135 |
+
b_answer = edge_current_flow(G, normalized=True)
|
| 136 |
+
for (s, t), v1 in b_answer.items():
|
| 137 |
+
v2 = b.get((s, t), b.get((t, s)))
|
| 138 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
| 139 |
+
|
| 140 |
+
def test_P4(self):
|
| 141 |
+
"""Edge betweenness centrality: P4"""
|
| 142 |
+
G = nx.path_graph(4)
|
| 143 |
+
b = edge_current_flow_subset(G, list(G), list(G), normalized=True)
|
| 144 |
+
b_answer = edge_current_flow(G, normalized=True)
|
| 145 |
+
for (s, t), v1 in b_answer.items():
|
| 146 |
+
v2 = b.get((s, t), b.get((t, s)))
|
| 147 |
+
assert v1 == pytest.approx(v2, abs=1e-7)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_degree_centrality.py
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Unit tests for degree centrality.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TestDegreeCentrality:
|
| 11 |
+
def setup_method(self):
|
| 12 |
+
self.K = nx.krackhardt_kite_graph()
|
| 13 |
+
self.P3 = nx.path_graph(3)
|
| 14 |
+
self.K5 = nx.complete_graph(5)
|
| 15 |
+
|
| 16 |
+
F = nx.Graph() # Florentine families
|
| 17 |
+
F.add_edge("Acciaiuoli", "Medici")
|
| 18 |
+
F.add_edge("Castellani", "Peruzzi")
|
| 19 |
+
F.add_edge("Castellani", "Strozzi")
|
| 20 |
+
F.add_edge("Castellani", "Barbadori")
|
| 21 |
+
F.add_edge("Medici", "Barbadori")
|
| 22 |
+
F.add_edge("Medici", "Ridolfi")
|
| 23 |
+
F.add_edge("Medici", "Tornabuoni")
|
| 24 |
+
F.add_edge("Medici", "Albizzi")
|
| 25 |
+
F.add_edge("Medici", "Salviati")
|
| 26 |
+
F.add_edge("Salviati", "Pazzi")
|
| 27 |
+
F.add_edge("Peruzzi", "Strozzi")
|
| 28 |
+
F.add_edge("Peruzzi", "Bischeri")
|
| 29 |
+
F.add_edge("Strozzi", "Ridolfi")
|
| 30 |
+
F.add_edge("Strozzi", "Bischeri")
|
| 31 |
+
F.add_edge("Ridolfi", "Tornabuoni")
|
| 32 |
+
F.add_edge("Tornabuoni", "Guadagni")
|
| 33 |
+
F.add_edge("Albizzi", "Ginori")
|
| 34 |
+
F.add_edge("Albizzi", "Guadagni")
|
| 35 |
+
F.add_edge("Bischeri", "Guadagni")
|
| 36 |
+
F.add_edge("Guadagni", "Lamberteschi")
|
| 37 |
+
self.F = F
|
| 38 |
+
|
| 39 |
+
G = nx.DiGraph()
|
| 40 |
+
G.add_edge(0, 5)
|
| 41 |
+
G.add_edge(1, 5)
|
| 42 |
+
G.add_edge(2, 5)
|
| 43 |
+
G.add_edge(3, 5)
|
| 44 |
+
G.add_edge(4, 5)
|
| 45 |
+
G.add_edge(5, 6)
|
| 46 |
+
G.add_edge(5, 7)
|
| 47 |
+
G.add_edge(5, 8)
|
| 48 |
+
self.G = G
|
| 49 |
+
|
| 50 |
+
def test_degree_centrality_1(self):
|
| 51 |
+
d = nx.degree_centrality(self.K5)
|
| 52 |
+
exact = dict(zip(range(5), [1] * 5))
|
| 53 |
+
for n, dc in d.items():
|
| 54 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
| 55 |
+
|
| 56 |
+
def test_degree_centrality_2(self):
|
| 57 |
+
d = nx.degree_centrality(self.P3)
|
| 58 |
+
exact = {0: 0.5, 1: 1, 2: 0.5}
|
| 59 |
+
for n, dc in d.items():
|
| 60 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
| 61 |
+
|
| 62 |
+
def test_degree_centrality_3(self):
|
| 63 |
+
d = nx.degree_centrality(self.K)
|
| 64 |
+
exact = {
|
| 65 |
+
0: 0.444,
|
| 66 |
+
1: 0.444,
|
| 67 |
+
2: 0.333,
|
| 68 |
+
3: 0.667,
|
| 69 |
+
4: 0.333,
|
| 70 |
+
5: 0.556,
|
| 71 |
+
6: 0.556,
|
| 72 |
+
7: 0.333,
|
| 73 |
+
8: 0.222,
|
| 74 |
+
9: 0.111,
|
| 75 |
+
}
|
| 76 |
+
for n, dc in d.items():
|
| 77 |
+
assert exact[n] == pytest.approx(float(f"{dc:.3f}"), abs=1e-7)
|
| 78 |
+
|
| 79 |
+
def test_degree_centrality_4(self):
|
| 80 |
+
d = nx.degree_centrality(self.F)
|
| 81 |
+
names = sorted(self.F.nodes())
|
| 82 |
+
dcs = [
|
| 83 |
+
0.071,
|
| 84 |
+
0.214,
|
| 85 |
+
0.143,
|
| 86 |
+
0.214,
|
| 87 |
+
0.214,
|
| 88 |
+
0.071,
|
| 89 |
+
0.286,
|
| 90 |
+
0.071,
|
| 91 |
+
0.429,
|
| 92 |
+
0.071,
|
| 93 |
+
0.214,
|
| 94 |
+
0.214,
|
| 95 |
+
0.143,
|
| 96 |
+
0.286,
|
| 97 |
+
0.214,
|
| 98 |
+
]
|
| 99 |
+
exact = dict(zip(names, dcs))
|
| 100 |
+
for n, dc in d.items():
|
| 101 |
+
assert exact[n] == pytest.approx(float(f"{dc:.3f}"), abs=1e-7)
|
| 102 |
+
|
| 103 |
+
def test_indegree_centrality(self):
|
| 104 |
+
d = nx.in_degree_centrality(self.G)
|
| 105 |
+
exact = {
|
| 106 |
+
0: 0.0,
|
| 107 |
+
1: 0.0,
|
| 108 |
+
2: 0.0,
|
| 109 |
+
3: 0.0,
|
| 110 |
+
4: 0.0,
|
| 111 |
+
5: 0.625,
|
| 112 |
+
6: 0.125,
|
| 113 |
+
7: 0.125,
|
| 114 |
+
8: 0.125,
|
| 115 |
+
}
|
| 116 |
+
for n, dc in d.items():
|
| 117 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
| 118 |
+
|
| 119 |
+
def test_outdegree_centrality(self):
|
| 120 |
+
d = nx.out_degree_centrality(self.G)
|
| 121 |
+
exact = {
|
| 122 |
+
0: 0.125,
|
| 123 |
+
1: 0.125,
|
| 124 |
+
2: 0.125,
|
| 125 |
+
3: 0.125,
|
| 126 |
+
4: 0.125,
|
| 127 |
+
5: 0.375,
|
| 128 |
+
6: 0.0,
|
| 129 |
+
7: 0.0,
|
| 130 |
+
8: 0.0,
|
| 131 |
+
}
|
| 132 |
+
for n, dc in d.items():
|
| 133 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
| 134 |
+
|
| 135 |
+
def test_small_graph_centrality(self):
|
| 136 |
+
G = nx.empty_graph(create_using=nx.DiGraph)
|
| 137 |
+
assert {} == nx.degree_centrality(G)
|
| 138 |
+
assert {} == nx.out_degree_centrality(G)
|
| 139 |
+
assert {} == nx.in_degree_centrality(G)
|
| 140 |
+
|
| 141 |
+
G = nx.empty_graph(1, create_using=nx.DiGraph)
|
| 142 |
+
assert {0: 1} == nx.degree_centrality(G)
|
| 143 |
+
assert {0: 1} == nx.out_degree_centrality(G)
|
| 144 |
+
assert {0: 1} == nx.in_degree_centrality(G)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_group.py
ADDED
|
@@ -0,0 +1,278 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for Group Centrality Measures
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
import pytest
|
| 7 |
+
|
| 8 |
+
import networkx as nx
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class TestGroupBetweennessCentrality:
|
| 12 |
+
def test_group_betweenness_single_node(self):
|
| 13 |
+
"""
|
| 14 |
+
Group betweenness centrality for single node group
|
| 15 |
+
"""
|
| 16 |
+
G = nx.path_graph(5)
|
| 17 |
+
C = [1]
|
| 18 |
+
b = nx.group_betweenness_centrality(
|
| 19 |
+
G, C, weight=None, normalized=False, endpoints=False
|
| 20 |
+
)
|
| 21 |
+
b_answer = 3.0
|
| 22 |
+
assert b == b_answer
|
| 23 |
+
|
| 24 |
+
def test_group_betweenness_with_endpoints(self):
|
| 25 |
+
"""
|
| 26 |
+
Group betweenness centrality for single node group
|
| 27 |
+
"""
|
| 28 |
+
G = nx.path_graph(5)
|
| 29 |
+
C = [1]
|
| 30 |
+
b = nx.group_betweenness_centrality(
|
| 31 |
+
G, C, weight=None, normalized=False, endpoints=True
|
| 32 |
+
)
|
| 33 |
+
b_answer = 7.0
|
| 34 |
+
assert b == b_answer
|
| 35 |
+
|
| 36 |
+
def test_group_betweenness_normalized(self):
|
| 37 |
+
"""
|
| 38 |
+
Group betweenness centrality for group with more than
|
| 39 |
+
1 node and normalized
|
| 40 |
+
"""
|
| 41 |
+
G = nx.path_graph(5)
|
| 42 |
+
C = [1, 3]
|
| 43 |
+
b = nx.group_betweenness_centrality(
|
| 44 |
+
G, C, weight=None, normalized=True, endpoints=False
|
| 45 |
+
)
|
| 46 |
+
b_answer = 1.0
|
| 47 |
+
assert b == b_answer
|
| 48 |
+
|
| 49 |
+
def test_two_group_betweenness_value_zero(self):
|
| 50 |
+
"""
|
| 51 |
+
Group betweenness centrality value of 0
|
| 52 |
+
"""
|
| 53 |
+
G = nx.cycle_graph(7)
|
| 54 |
+
C = [[0, 1, 6], [0, 1, 5]]
|
| 55 |
+
b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False)
|
| 56 |
+
b_answer = [0.0, 3.0]
|
| 57 |
+
assert b == b_answer
|
| 58 |
+
|
| 59 |
+
def test_group_betweenness_value_zero(self):
|
| 60 |
+
"""
|
| 61 |
+
Group betweenness centrality value of 0
|
| 62 |
+
"""
|
| 63 |
+
G = nx.cycle_graph(6)
|
| 64 |
+
C = [0, 1, 5]
|
| 65 |
+
b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False)
|
| 66 |
+
b_answer = 0.0
|
| 67 |
+
assert b == b_answer
|
| 68 |
+
|
| 69 |
+
def test_group_betweenness_disconnected_graph(self):
|
| 70 |
+
"""
|
| 71 |
+
Group betweenness centrality in a disconnected graph
|
| 72 |
+
"""
|
| 73 |
+
G = nx.path_graph(5)
|
| 74 |
+
G.remove_edge(0, 1)
|
| 75 |
+
C = [1]
|
| 76 |
+
b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False)
|
| 77 |
+
b_answer = 0.0
|
| 78 |
+
assert b == b_answer
|
| 79 |
+
|
| 80 |
+
def test_group_betweenness_node_not_in_graph(self):
|
| 81 |
+
"""
|
| 82 |
+
Node(s) in C not in graph, raises NodeNotFound exception
|
| 83 |
+
"""
|
| 84 |
+
with pytest.raises(nx.NodeNotFound):
|
| 85 |
+
nx.group_betweenness_centrality(nx.path_graph(5), [4, 7, 8])
|
| 86 |
+
|
| 87 |
+
def test_group_betweenness_directed_weighted(self):
|
| 88 |
+
"""
|
| 89 |
+
Group betweenness centrality in a directed and weighted graph
|
| 90 |
+
"""
|
| 91 |
+
G = nx.DiGraph()
|
| 92 |
+
G.add_edge(1, 0, weight=1)
|
| 93 |
+
G.add_edge(0, 2, weight=2)
|
| 94 |
+
G.add_edge(1, 2, weight=3)
|
| 95 |
+
G.add_edge(3, 1, weight=4)
|
| 96 |
+
G.add_edge(2, 3, weight=1)
|
| 97 |
+
G.add_edge(4, 3, weight=6)
|
| 98 |
+
G.add_edge(2, 4, weight=7)
|
| 99 |
+
C = [1, 2]
|
| 100 |
+
b = nx.group_betweenness_centrality(G, C, weight="weight", normalized=False)
|
| 101 |
+
b_answer = 5.0
|
| 102 |
+
assert b == b_answer
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
class TestProminentGroup:
|
| 106 |
+
np = pytest.importorskip("numpy")
|
| 107 |
+
pd = pytest.importorskip("pandas")
|
| 108 |
+
|
| 109 |
+
def test_prominent_group_single_node(self):
|
| 110 |
+
"""
|
| 111 |
+
Prominent group for single node
|
| 112 |
+
"""
|
| 113 |
+
G = nx.path_graph(5)
|
| 114 |
+
k = 1
|
| 115 |
+
b, g = nx.prominent_group(G, k, normalized=False, endpoints=False)
|
| 116 |
+
b_answer, g_answer = 4.0, [2]
|
| 117 |
+
assert b == b_answer and g == g_answer
|
| 118 |
+
|
| 119 |
+
def test_prominent_group_with_c(self):
|
| 120 |
+
"""
|
| 121 |
+
Prominent group without some nodes
|
| 122 |
+
"""
|
| 123 |
+
G = nx.path_graph(5)
|
| 124 |
+
k = 1
|
| 125 |
+
b, g = nx.prominent_group(G, k, normalized=False, C=[2])
|
| 126 |
+
b_answer, g_answer = 3.0, [1]
|
| 127 |
+
assert b == b_answer and g == g_answer
|
| 128 |
+
|
| 129 |
+
def test_prominent_group_normalized_endpoints(self):
|
| 130 |
+
"""
|
| 131 |
+
Prominent group with normalized result, with endpoints
|
| 132 |
+
"""
|
| 133 |
+
G = nx.cycle_graph(7)
|
| 134 |
+
k = 2
|
| 135 |
+
b, g = nx.prominent_group(G, k, normalized=True, endpoints=True)
|
| 136 |
+
b_answer, g_answer = 1.7, [2, 5]
|
| 137 |
+
assert b == b_answer and g == g_answer
|
| 138 |
+
|
| 139 |
+
def test_prominent_group_disconnected_graph(self):
|
| 140 |
+
"""
|
| 141 |
+
Prominent group of disconnected graph
|
| 142 |
+
"""
|
| 143 |
+
G = nx.path_graph(6)
|
| 144 |
+
G.remove_edge(0, 1)
|
| 145 |
+
k = 1
|
| 146 |
+
b, g = nx.prominent_group(G, k, weight=None, normalized=False)
|
| 147 |
+
b_answer, g_answer = 4.0, [3]
|
| 148 |
+
assert b == b_answer and g == g_answer
|
| 149 |
+
|
| 150 |
+
def test_prominent_group_node_not_in_graph(self):
|
| 151 |
+
"""
|
| 152 |
+
Node(s) in C not in graph, raises NodeNotFound exception
|
| 153 |
+
"""
|
| 154 |
+
with pytest.raises(nx.NodeNotFound):
|
| 155 |
+
nx.prominent_group(nx.path_graph(5), 1, C=[10])
|
| 156 |
+
|
| 157 |
+
def test_group_betweenness_directed_weighted(self):
|
| 158 |
+
"""
|
| 159 |
+
Group betweenness centrality in a directed and weighted graph
|
| 160 |
+
"""
|
| 161 |
+
G = nx.DiGraph()
|
| 162 |
+
G.add_edge(1, 0, weight=1)
|
| 163 |
+
G.add_edge(0, 2, weight=2)
|
| 164 |
+
G.add_edge(1, 2, weight=3)
|
| 165 |
+
G.add_edge(3, 1, weight=4)
|
| 166 |
+
G.add_edge(2, 3, weight=1)
|
| 167 |
+
G.add_edge(4, 3, weight=6)
|
| 168 |
+
G.add_edge(2, 4, weight=7)
|
| 169 |
+
k = 2
|
| 170 |
+
b, g = nx.prominent_group(G, k, weight="weight", normalized=False)
|
| 171 |
+
b_answer, g_answer = 5.0, [1, 2]
|
| 172 |
+
assert b == b_answer and g == g_answer
|
| 173 |
+
|
| 174 |
+
def test_prominent_group_greedy_algorithm(self):
|
| 175 |
+
"""
|
| 176 |
+
Group betweenness centrality in a greedy algorithm
|
| 177 |
+
"""
|
| 178 |
+
G = nx.cycle_graph(7)
|
| 179 |
+
k = 2
|
| 180 |
+
b, g = nx.prominent_group(G, k, normalized=True, endpoints=True, greedy=True)
|
| 181 |
+
b_answer, g_answer = 1.7, [6, 3]
|
| 182 |
+
assert b == b_answer and g == g_answer
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
class TestGroupClosenessCentrality:
|
| 186 |
+
def test_group_closeness_single_node(self):
|
| 187 |
+
"""
|
| 188 |
+
Group closeness centrality for a single node group
|
| 189 |
+
"""
|
| 190 |
+
G = nx.path_graph(5)
|
| 191 |
+
c = nx.group_closeness_centrality(G, [1])
|
| 192 |
+
c_answer = nx.closeness_centrality(G, 1)
|
| 193 |
+
assert c == c_answer
|
| 194 |
+
|
| 195 |
+
def test_group_closeness_disconnected(self):
|
| 196 |
+
"""
|
| 197 |
+
Group closeness centrality for a disconnected graph
|
| 198 |
+
"""
|
| 199 |
+
G = nx.Graph()
|
| 200 |
+
G.add_nodes_from([1, 2, 3, 4])
|
| 201 |
+
c = nx.group_closeness_centrality(G, [1, 2])
|
| 202 |
+
c_answer = 0
|
| 203 |
+
assert c == c_answer
|
| 204 |
+
|
| 205 |
+
def test_group_closeness_multiple_node(self):
|
| 206 |
+
"""
|
| 207 |
+
Group closeness centrality for a group with more than
|
| 208 |
+
1 node
|
| 209 |
+
"""
|
| 210 |
+
G = nx.path_graph(4)
|
| 211 |
+
c = nx.group_closeness_centrality(G, [1, 2])
|
| 212 |
+
c_answer = 1
|
| 213 |
+
assert c == c_answer
|
| 214 |
+
|
| 215 |
+
def test_group_closeness_node_not_in_graph(self):
|
| 216 |
+
"""
|
| 217 |
+
Node(s) in S not in graph, raises NodeNotFound exception
|
| 218 |
+
"""
|
| 219 |
+
with pytest.raises(nx.NodeNotFound):
|
| 220 |
+
nx.group_closeness_centrality(nx.path_graph(5), [6, 7, 8])
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
class TestGroupDegreeCentrality:
|
| 224 |
+
def test_group_degree_centrality_single_node(self):
|
| 225 |
+
"""
|
| 226 |
+
Group degree centrality for a single node group
|
| 227 |
+
"""
|
| 228 |
+
G = nx.path_graph(4)
|
| 229 |
+
d = nx.group_degree_centrality(G, [1])
|
| 230 |
+
d_answer = nx.degree_centrality(G)[1]
|
| 231 |
+
assert d == d_answer
|
| 232 |
+
|
| 233 |
+
def test_group_degree_centrality_multiple_node(self):
|
| 234 |
+
"""
|
| 235 |
+
Group degree centrality for group with more than
|
| 236 |
+
1 node
|
| 237 |
+
"""
|
| 238 |
+
G = nx.Graph()
|
| 239 |
+
G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
|
| 240 |
+
G.add_edges_from(
|
| 241 |
+
[(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)]
|
| 242 |
+
)
|
| 243 |
+
d = nx.group_degree_centrality(G, [1, 2])
|
| 244 |
+
d_answer = 1
|
| 245 |
+
assert d == d_answer
|
| 246 |
+
|
| 247 |
+
def test_group_in_degree_centrality(self):
|
| 248 |
+
"""
|
| 249 |
+
Group in-degree centrality in a DiGraph
|
| 250 |
+
"""
|
| 251 |
+
G = nx.DiGraph()
|
| 252 |
+
G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
|
| 253 |
+
G.add_edges_from(
|
| 254 |
+
[(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)]
|
| 255 |
+
)
|
| 256 |
+
d = nx.group_in_degree_centrality(G, [1, 2])
|
| 257 |
+
d_answer = 0
|
| 258 |
+
assert d == d_answer
|
| 259 |
+
|
| 260 |
+
def test_group_out_degree_centrality(self):
|
| 261 |
+
"""
|
| 262 |
+
Group out-degree centrality in a DiGraph
|
| 263 |
+
"""
|
| 264 |
+
G = nx.DiGraph()
|
| 265 |
+
G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
|
| 266 |
+
G.add_edges_from(
|
| 267 |
+
[(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)]
|
| 268 |
+
)
|
| 269 |
+
d = nx.group_out_degree_centrality(G, [1, 2])
|
| 270 |
+
d_answer = 1
|
| 271 |
+
assert d == d_answer
|
| 272 |
+
|
| 273 |
+
def test_group_degree_centrality_node_not_in_graph(self):
|
| 274 |
+
"""
|
| 275 |
+
Node(s) in S not in graph, raises NetworkXError
|
| 276 |
+
"""
|
| 277 |
+
with pytest.raises(nx.NetworkXError):
|
| 278 |
+
nx.group_degree_centrality(nx.path_graph(5), [6, 7, 8])
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_katz_centrality.py
ADDED
|
@@ -0,0 +1,345 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class TestKatzCentrality:
|
| 9 |
+
def test_K5(self):
|
| 10 |
+
"""Katz centrality: K5"""
|
| 11 |
+
G = nx.complete_graph(5)
|
| 12 |
+
alpha = 0.1
|
| 13 |
+
b = nx.katz_centrality(G, alpha)
|
| 14 |
+
v = math.sqrt(1 / 5.0)
|
| 15 |
+
b_answer = dict.fromkeys(G, v)
|
| 16 |
+
for n in sorted(G):
|
| 17 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 18 |
+
nstart = {n: 1 for n in G}
|
| 19 |
+
b = nx.katz_centrality(G, alpha, nstart=nstart)
|
| 20 |
+
for n in sorted(G):
|
| 21 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 22 |
+
|
| 23 |
+
def test_P3(self):
|
| 24 |
+
"""Katz centrality: P3"""
|
| 25 |
+
alpha = 0.1
|
| 26 |
+
G = nx.path_graph(3)
|
| 27 |
+
b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
|
| 28 |
+
b = nx.katz_centrality(G, alpha)
|
| 29 |
+
for n in sorted(G):
|
| 30 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
|
| 31 |
+
|
| 32 |
+
def test_maxiter(self):
|
| 33 |
+
with pytest.raises(nx.PowerIterationFailedConvergence):
|
| 34 |
+
nx.katz_centrality(nx.path_graph(3), 0.1, max_iter=0)
|
| 35 |
+
|
| 36 |
+
def test_beta_as_scalar(self):
|
| 37 |
+
alpha = 0.1
|
| 38 |
+
beta = 0.1
|
| 39 |
+
b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
|
| 40 |
+
G = nx.path_graph(3)
|
| 41 |
+
b = nx.katz_centrality(G, alpha, beta)
|
| 42 |
+
for n in sorted(G):
|
| 43 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
|
| 44 |
+
|
| 45 |
+
def test_beta_as_dict(self):
|
| 46 |
+
alpha = 0.1
|
| 47 |
+
beta = {0: 1.0, 1: 1.0, 2: 1.0}
|
| 48 |
+
b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
|
| 49 |
+
G = nx.path_graph(3)
|
| 50 |
+
b = nx.katz_centrality(G, alpha, beta)
|
| 51 |
+
for n in sorted(G):
|
| 52 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
|
| 53 |
+
|
| 54 |
+
def test_multiple_alpha(self):
|
| 55 |
+
alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]
|
| 56 |
+
for alpha in alpha_list:
|
| 57 |
+
b_answer = {
|
| 58 |
+
0.1: {
|
| 59 |
+
0: 0.5598852584152165,
|
| 60 |
+
1: 0.6107839182711449,
|
| 61 |
+
2: 0.5598852584152162,
|
| 62 |
+
},
|
| 63 |
+
0.2: {
|
| 64 |
+
0: 0.5454545454545454,
|
| 65 |
+
1: 0.6363636363636365,
|
| 66 |
+
2: 0.5454545454545454,
|
| 67 |
+
},
|
| 68 |
+
0.3: {
|
| 69 |
+
0: 0.5333964609104419,
|
| 70 |
+
1: 0.6564879518897746,
|
| 71 |
+
2: 0.5333964609104419,
|
| 72 |
+
},
|
| 73 |
+
0.4: {
|
| 74 |
+
0: 0.5232045649263551,
|
| 75 |
+
1: 0.6726915834767423,
|
| 76 |
+
2: 0.5232045649263551,
|
| 77 |
+
},
|
| 78 |
+
0.5: {
|
| 79 |
+
0: 0.5144957746691622,
|
| 80 |
+
1: 0.6859943117075809,
|
| 81 |
+
2: 0.5144957746691622,
|
| 82 |
+
},
|
| 83 |
+
0.6: {
|
| 84 |
+
0: 0.5069794004195823,
|
| 85 |
+
1: 0.6970966755769258,
|
| 86 |
+
2: 0.5069794004195823,
|
| 87 |
+
},
|
| 88 |
+
}
|
| 89 |
+
G = nx.path_graph(3)
|
| 90 |
+
b = nx.katz_centrality(G, alpha)
|
| 91 |
+
for n in sorted(G):
|
| 92 |
+
assert b[n] == pytest.approx(b_answer[alpha][n], abs=1e-4)
|
| 93 |
+
|
| 94 |
+
def test_multigraph(self):
|
| 95 |
+
with pytest.raises(nx.NetworkXException):
|
| 96 |
+
nx.katz_centrality(nx.MultiGraph(), 0.1)
|
| 97 |
+
|
| 98 |
+
def test_empty(self):
|
| 99 |
+
e = nx.katz_centrality(nx.Graph(), 0.1)
|
| 100 |
+
assert e == {}
|
| 101 |
+
|
| 102 |
+
def test_bad_beta(self):
|
| 103 |
+
with pytest.raises(nx.NetworkXException):
|
| 104 |
+
G = nx.Graph([(0, 1)])
|
| 105 |
+
beta = {0: 77}
|
| 106 |
+
nx.katz_centrality(G, 0.1, beta=beta)
|
| 107 |
+
|
| 108 |
+
def test_bad_beta_number(self):
|
| 109 |
+
with pytest.raises(nx.NetworkXException):
|
| 110 |
+
G = nx.Graph([(0, 1)])
|
| 111 |
+
nx.katz_centrality(G, 0.1, beta="foo")
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
class TestKatzCentralityNumpy:
|
| 115 |
+
@classmethod
|
| 116 |
+
def setup_class(cls):
|
| 117 |
+
global np
|
| 118 |
+
np = pytest.importorskip("numpy")
|
| 119 |
+
pytest.importorskip("scipy")
|
| 120 |
+
|
| 121 |
+
def test_K5(self):
|
| 122 |
+
"""Katz centrality: K5"""
|
| 123 |
+
G = nx.complete_graph(5)
|
| 124 |
+
alpha = 0.1
|
| 125 |
+
b = nx.katz_centrality(G, alpha)
|
| 126 |
+
v = math.sqrt(1 / 5.0)
|
| 127 |
+
b_answer = dict.fromkeys(G, v)
|
| 128 |
+
for n in sorted(G):
|
| 129 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 130 |
+
b = nx.eigenvector_centrality_numpy(G)
|
| 131 |
+
for n in sorted(G):
|
| 132 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
|
| 133 |
+
|
| 134 |
+
def test_P3(self):
|
| 135 |
+
"""Katz centrality: P3"""
|
| 136 |
+
alpha = 0.1
|
| 137 |
+
G = nx.path_graph(3)
|
| 138 |
+
b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
|
| 139 |
+
b = nx.katz_centrality_numpy(G, alpha)
|
| 140 |
+
for n in sorted(G):
|
| 141 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
|
| 142 |
+
|
| 143 |
+
def test_beta_as_scalar(self):
|
| 144 |
+
alpha = 0.1
|
| 145 |
+
beta = 0.1
|
| 146 |
+
b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
|
| 147 |
+
G = nx.path_graph(3)
|
| 148 |
+
b = nx.katz_centrality_numpy(G, alpha, beta)
|
| 149 |
+
for n in sorted(G):
|
| 150 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
|
| 151 |
+
|
| 152 |
+
def test_beta_as_dict(self):
|
| 153 |
+
alpha = 0.1
|
| 154 |
+
beta = {0: 1.0, 1: 1.0, 2: 1.0}
|
| 155 |
+
b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
|
| 156 |
+
G = nx.path_graph(3)
|
| 157 |
+
b = nx.katz_centrality_numpy(G, alpha, beta)
|
| 158 |
+
for n in sorted(G):
|
| 159 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
|
| 160 |
+
|
| 161 |
+
def test_multiple_alpha(self):
|
| 162 |
+
alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]
|
| 163 |
+
for alpha in alpha_list:
|
| 164 |
+
b_answer = {
|
| 165 |
+
0.1: {
|
| 166 |
+
0: 0.5598852584152165,
|
| 167 |
+
1: 0.6107839182711449,
|
| 168 |
+
2: 0.5598852584152162,
|
| 169 |
+
},
|
| 170 |
+
0.2: {
|
| 171 |
+
0: 0.5454545454545454,
|
| 172 |
+
1: 0.6363636363636365,
|
| 173 |
+
2: 0.5454545454545454,
|
| 174 |
+
},
|
| 175 |
+
0.3: {
|
| 176 |
+
0: 0.5333964609104419,
|
| 177 |
+
1: 0.6564879518897746,
|
| 178 |
+
2: 0.5333964609104419,
|
| 179 |
+
},
|
| 180 |
+
0.4: {
|
| 181 |
+
0: 0.5232045649263551,
|
| 182 |
+
1: 0.6726915834767423,
|
| 183 |
+
2: 0.5232045649263551,
|
| 184 |
+
},
|
| 185 |
+
0.5: {
|
| 186 |
+
0: 0.5144957746691622,
|
| 187 |
+
1: 0.6859943117075809,
|
| 188 |
+
2: 0.5144957746691622,
|
| 189 |
+
},
|
| 190 |
+
0.6: {
|
| 191 |
+
0: 0.5069794004195823,
|
| 192 |
+
1: 0.6970966755769258,
|
| 193 |
+
2: 0.5069794004195823,
|
| 194 |
+
},
|
| 195 |
+
}
|
| 196 |
+
G = nx.path_graph(3)
|
| 197 |
+
b = nx.katz_centrality_numpy(G, alpha)
|
| 198 |
+
for n in sorted(G):
|
| 199 |
+
assert b[n] == pytest.approx(b_answer[alpha][n], abs=1e-4)
|
| 200 |
+
|
| 201 |
+
def test_multigraph(self):
|
| 202 |
+
with pytest.raises(nx.NetworkXException):
|
| 203 |
+
nx.katz_centrality(nx.MultiGraph(), 0.1)
|
| 204 |
+
|
| 205 |
+
def test_empty(self):
|
| 206 |
+
e = nx.katz_centrality(nx.Graph(), 0.1)
|
| 207 |
+
assert e == {}
|
| 208 |
+
|
| 209 |
+
def test_bad_beta(self):
|
| 210 |
+
with pytest.raises(nx.NetworkXException):
|
| 211 |
+
G = nx.Graph([(0, 1)])
|
| 212 |
+
beta = {0: 77}
|
| 213 |
+
nx.katz_centrality_numpy(G, 0.1, beta=beta)
|
| 214 |
+
|
| 215 |
+
def test_bad_beta_numbe(self):
|
| 216 |
+
with pytest.raises(nx.NetworkXException):
|
| 217 |
+
G = nx.Graph([(0, 1)])
|
| 218 |
+
nx.katz_centrality_numpy(G, 0.1, beta="foo")
|
| 219 |
+
|
| 220 |
+
def test_K5_unweighted(self):
|
| 221 |
+
"""Katz centrality: K5"""
|
| 222 |
+
G = nx.complete_graph(5)
|
| 223 |
+
alpha = 0.1
|
| 224 |
+
b = nx.katz_centrality(G, alpha, weight=None)
|
| 225 |
+
v = math.sqrt(1 / 5.0)
|
| 226 |
+
b_answer = dict.fromkeys(G, v)
|
| 227 |
+
for n in sorted(G):
|
| 228 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 229 |
+
b = nx.eigenvector_centrality_numpy(G, weight=None)
|
| 230 |
+
for n in sorted(G):
|
| 231 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
|
| 232 |
+
|
| 233 |
+
def test_P3_unweighted(self):
|
| 234 |
+
"""Katz centrality: P3"""
|
| 235 |
+
alpha = 0.1
|
| 236 |
+
G = nx.path_graph(3)
|
| 237 |
+
b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}
|
| 238 |
+
b = nx.katz_centrality_numpy(G, alpha, weight=None)
|
| 239 |
+
for n in sorted(G):
|
| 240 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-4)
|
| 241 |
+
|
| 242 |
+
|
| 243 |
+
class TestKatzCentralityDirected:
|
| 244 |
+
@classmethod
|
| 245 |
+
def setup_class(cls):
|
| 246 |
+
G = nx.DiGraph()
|
| 247 |
+
edges = [
|
| 248 |
+
(1, 2),
|
| 249 |
+
(1, 3),
|
| 250 |
+
(2, 4),
|
| 251 |
+
(3, 2),
|
| 252 |
+
(3, 5),
|
| 253 |
+
(4, 2),
|
| 254 |
+
(4, 5),
|
| 255 |
+
(4, 6),
|
| 256 |
+
(5, 6),
|
| 257 |
+
(5, 7),
|
| 258 |
+
(5, 8),
|
| 259 |
+
(6, 8),
|
| 260 |
+
(7, 1),
|
| 261 |
+
(7, 5),
|
| 262 |
+
(7, 8),
|
| 263 |
+
(8, 6),
|
| 264 |
+
(8, 7),
|
| 265 |
+
]
|
| 266 |
+
G.add_edges_from(edges, weight=2.0)
|
| 267 |
+
cls.G = G.reverse()
|
| 268 |
+
cls.G.alpha = 0.1
|
| 269 |
+
cls.G.evc = [
|
| 270 |
+
0.3289589783189635,
|
| 271 |
+
0.2832077296243516,
|
| 272 |
+
0.3425906003685471,
|
| 273 |
+
0.3970420865198392,
|
| 274 |
+
0.41074871061646284,
|
| 275 |
+
0.272257430756461,
|
| 276 |
+
0.4201989685435462,
|
| 277 |
+
0.34229059218038554,
|
| 278 |
+
]
|
| 279 |
+
|
| 280 |
+
H = nx.DiGraph(edges)
|
| 281 |
+
cls.H = G.reverse()
|
| 282 |
+
cls.H.alpha = 0.1
|
| 283 |
+
cls.H.evc = [
|
| 284 |
+
0.3289589783189635,
|
| 285 |
+
0.2832077296243516,
|
| 286 |
+
0.3425906003685471,
|
| 287 |
+
0.3970420865198392,
|
| 288 |
+
0.41074871061646284,
|
| 289 |
+
0.272257430756461,
|
| 290 |
+
0.4201989685435462,
|
| 291 |
+
0.34229059218038554,
|
| 292 |
+
]
|
| 293 |
+
|
| 294 |
+
def test_katz_centrality_weighted(self):
|
| 295 |
+
G = self.G
|
| 296 |
+
alpha = self.G.alpha
|
| 297 |
+
p = nx.katz_centrality(G, alpha, weight="weight")
|
| 298 |
+
for a, b in zip(list(p.values()), self.G.evc):
|
| 299 |
+
assert a == pytest.approx(b, abs=1e-7)
|
| 300 |
+
|
| 301 |
+
def test_katz_centrality_unweighted(self):
|
| 302 |
+
H = self.H
|
| 303 |
+
alpha = self.H.alpha
|
| 304 |
+
p = nx.katz_centrality(H, alpha, weight="weight")
|
| 305 |
+
for a, b in zip(list(p.values()), self.H.evc):
|
| 306 |
+
assert a == pytest.approx(b, abs=1e-7)
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
class TestKatzCentralityDirectedNumpy(TestKatzCentralityDirected):
|
| 310 |
+
@classmethod
|
| 311 |
+
def setup_class(cls):
|
| 312 |
+
global np
|
| 313 |
+
np = pytest.importorskip("numpy")
|
| 314 |
+
pytest.importorskip("scipy")
|
| 315 |
+
super().setup_class()
|
| 316 |
+
|
| 317 |
+
def test_katz_centrality_weighted(self):
|
| 318 |
+
G = self.G
|
| 319 |
+
alpha = self.G.alpha
|
| 320 |
+
p = nx.katz_centrality_numpy(G, alpha, weight="weight")
|
| 321 |
+
for a, b in zip(list(p.values()), self.G.evc):
|
| 322 |
+
assert a == pytest.approx(b, abs=1e-7)
|
| 323 |
+
|
| 324 |
+
def test_katz_centrality_unweighted(self):
|
| 325 |
+
H = self.H
|
| 326 |
+
alpha = self.H.alpha
|
| 327 |
+
p = nx.katz_centrality_numpy(H, alpha, weight="weight")
|
| 328 |
+
for a, b in zip(list(p.values()), self.H.evc):
|
| 329 |
+
assert a == pytest.approx(b, abs=1e-7)
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
class TestKatzEigenvectorVKatz:
|
| 333 |
+
@classmethod
|
| 334 |
+
def setup_class(cls):
|
| 335 |
+
global np
|
| 336 |
+
np = pytest.importorskip("numpy")
|
| 337 |
+
pytest.importorskip("scipy")
|
| 338 |
+
|
| 339 |
+
def test_eigenvector_v_katz_random(self):
|
| 340 |
+
G = nx.gnp_random_graph(10, 0.5, seed=1234)
|
| 341 |
+
l = max(np.linalg.eigvals(nx.adjacency_matrix(G).todense()))
|
| 342 |
+
e = nx.eigenvector_centrality_numpy(G)
|
| 343 |
+
k = nx.katz_centrality_numpy(G, 1.0 / l)
|
| 344 |
+
for n in G:
|
| 345 |
+
assert e[n] == pytest.approx(k[n], abs=1e-7)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_subgraph.py
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
pytest.importorskip("numpy")
|
| 4 |
+
pytest.importorskip("scipy")
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
from networkx.algorithms.centrality.subgraph_alg import (
|
| 8 |
+
communicability_betweenness_centrality,
|
| 9 |
+
estrada_index,
|
| 10 |
+
subgraph_centrality,
|
| 11 |
+
subgraph_centrality_exp,
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TestSubgraph:
|
| 16 |
+
def test_subgraph_centrality(self):
|
| 17 |
+
answer = {0: 1.5430806348152433, 1: 1.5430806348152433}
|
| 18 |
+
result = subgraph_centrality(nx.path_graph(2))
|
| 19 |
+
for k, v in result.items():
|
| 20 |
+
assert answer[k] == pytest.approx(v, abs=1e-7)
|
| 21 |
+
|
| 22 |
+
answer1 = {
|
| 23 |
+
"1": 1.6445956054135658,
|
| 24 |
+
"Albert": 2.4368257358712189,
|
| 25 |
+
"Aric": 2.4368257358712193,
|
| 26 |
+
"Dan": 3.1306328496328168,
|
| 27 |
+
"Franck": 2.3876142275231915,
|
| 28 |
+
}
|
| 29 |
+
G1 = nx.Graph(
|
| 30 |
+
[
|
| 31 |
+
("Franck", "Aric"),
|
| 32 |
+
("Aric", "Dan"),
|
| 33 |
+
("Dan", "Albert"),
|
| 34 |
+
("Albert", "Franck"),
|
| 35 |
+
("Dan", "1"),
|
| 36 |
+
("Franck", "Albert"),
|
| 37 |
+
]
|
| 38 |
+
)
|
| 39 |
+
result1 = subgraph_centrality(G1)
|
| 40 |
+
for k, v in result1.items():
|
| 41 |
+
assert answer1[k] == pytest.approx(v, abs=1e-7)
|
| 42 |
+
result1 = subgraph_centrality_exp(G1)
|
| 43 |
+
for k, v in result1.items():
|
| 44 |
+
assert answer1[k] == pytest.approx(v, abs=1e-7)
|
| 45 |
+
|
| 46 |
+
def test_subgraph_centrality_big_graph(self):
|
| 47 |
+
g199 = nx.complete_graph(199)
|
| 48 |
+
g200 = nx.complete_graph(200)
|
| 49 |
+
|
| 50 |
+
comm199 = nx.subgraph_centrality(g199)
|
| 51 |
+
comm199_exp = nx.subgraph_centrality_exp(g199)
|
| 52 |
+
|
| 53 |
+
comm200 = nx.subgraph_centrality(g200)
|
| 54 |
+
comm200_exp = nx.subgraph_centrality_exp(g200)
|
| 55 |
+
|
| 56 |
+
def test_communicability_betweenness_centrality_small(self):
|
| 57 |
+
result = communicability_betweenness_centrality(nx.path_graph(2))
|
| 58 |
+
assert result == {0: 0, 1: 0}
|
| 59 |
+
|
| 60 |
+
result = communicability_betweenness_centrality(nx.path_graph(1))
|
| 61 |
+
assert result == {0: 0}
|
| 62 |
+
|
| 63 |
+
result = communicability_betweenness_centrality(nx.path_graph(0))
|
| 64 |
+
assert result == {}
|
| 65 |
+
|
| 66 |
+
answer = {0: 0.1411224421177313, 1: 1.0, 2: 0.1411224421177313}
|
| 67 |
+
result = communicability_betweenness_centrality(nx.path_graph(3))
|
| 68 |
+
for k, v in result.items():
|
| 69 |
+
assert answer[k] == pytest.approx(v, abs=1e-7)
|
| 70 |
+
|
| 71 |
+
result = communicability_betweenness_centrality(nx.complete_graph(3))
|
| 72 |
+
for k, v in result.items():
|
| 73 |
+
assert 0.49786143366223296 == pytest.approx(v, abs=1e-7)
|
| 74 |
+
|
| 75 |
+
def test_communicability_betweenness_centrality(self):
|
| 76 |
+
answer = {
|
| 77 |
+
0: 0.07017447951484615,
|
| 78 |
+
1: 0.71565598701107991,
|
| 79 |
+
2: 0.71565598701107991,
|
| 80 |
+
3: 0.07017447951484615,
|
| 81 |
+
}
|
| 82 |
+
result = communicability_betweenness_centrality(nx.path_graph(4))
|
| 83 |
+
for k, v in result.items():
|
| 84 |
+
assert answer[k] == pytest.approx(v, abs=1e-7)
|
| 85 |
+
|
| 86 |
+
answer1 = {
|
| 87 |
+
"1": 0.060039074193949521,
|
| 88 |
+
"Albert": 0.315470761661372,
|
| 89 |
+
"Aric": 0.31547076166137211,
|
| 90 |
+
"Dan": 0.68297778678316201,
|
| 91 |
+
"Franck": 0.21977926617449497,
|
| 92 |
+
}
|
| 93 |
+
G1 = nx.Graph(
|
| 94 |
+
[
|
| 95 |
+
("Franck", "Aric"),
|
| 96 |
+
("Aric", "Dan"),
|
| 97 |
+
("Dan", "Albert"),
|
| 98 |
+
("Albert", "Franck"),
|
| 99 |
+
("Dan", "1"),
|
| 100 |
+
("Franck", "Albert"),
|
| 101 |
+
]
|
| 102 |
+
)
|
| 103 |
+
result1 = communicability_betweenness_centrality(G1)
|
| 104 |
+
for k, v in result1.items():
|
| 105 |
+
assert answer1[k] == pytest.approx(v, abs=1e-7)
|
| 106 |
+
|
| 107 |
+
def test_estrada_index(self):
|
| 108 |
+
answer = 1041.2470334195475
|
| 109 |
+
result = estrada_index(nx.karate_club_graph())
|
| 110 |
+
assert answer == pytest.approx(result, abs=1e-7)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_trophic.py
ADDED
|
@@ -0,0 +1,302 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Test trophic levels, trophic differences and trophic coherence
|
| 2 |
+
"""
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
np = pytest.importorskip("numpy")
|
| 6 |
+
pytest.importorskip("scipy")
|
| 7 |
+
|
| 8 |
+
import networkx as nx
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def test_trophic_levels():
|
| 12 |
+
"""Trivial example"""
|
| 13 |
+
G = nx.DiGraph()
|
| 14 |
+
G.add_edge("a", "b")
|
| 15 |
+
G.add_edge("b", "c")
|
| 16 |
+
|
| 17 |
+
d = nx.trophic_levels(G)
|
| 18 |
+
assert d == {"a": 1, "b": 2, "c": 3}
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def test_trophic_levels_levine():
|
| 22 |
+
"""Example from Figure 5 in Stephen Levine (1980) J. theor. Biol. 83,
|
| 23 |
+
195-207
|
| 24 |
+
"""
|
| 25 |
+
S = nx.DiGraph()
|
| 26 |
+
S.add_edge(1, 2, weight=1.0)
|
| 27 |
+
S.add_edge(1, 3, weight=0.2)
|
| 28 |
+
S.add_edge(1, 4, weight=0.8)
|
| 29 |
+
S.add_edge(2, 3, weight=0.2)
|
| 30 |
+
S.add_edge(2, 5, weight=0.3)
|
| 31 |
+
S.add_edge(4, 3, weight=0.6)
|
| 32 |
+
S.add_edge(4, 5, weight=0.7)
|
| 33 |
+
S.add_edge(5, 4, weight=0.2)
|
| 34 |
+
|
| 35 |
+
# save copy for later, test intermediate implementation details first
|
| 36 |
+
S2 = S.copy()
|
| 37 |
+
|
| 38 |
+
# drop nodes of in-degree zero
|
| 39 |
+
z = [nid for nid, d in S.in_degree if d == 0]
|
| 40 |
+
for nid in z:
|
| 41 |
+
S.remove_node(nid)
|
| 42 |
+
|
| 43 |
+
# find adjacency matrix
|
| 44 |
+
q = nx.linalg.graphmatrix.adjacency_matrix(S).T
|
| 45 |
+
|
| 46 |
+
# fmt: off
|
| 47 |
+
expected_q = np.array([
|
| 48 |
+
[0, 0, 0., 0],
|
| 49 |
+
[0.2, 0, 0.6, 0],
|
| 50 |
+
[0, 0, 0, 0.2],
|
| 51 |
+
[0.3, 0, 0.7, 0]
|
| 52 |
+
])
|
| 53 |
+
# fmt: on
|
| 54 |
+
assert np.array_equal(q.todense(), expected_q)
|
| 55 |
+
|
| 56 |
+
# must be square, size of number of nodes
|
| 57 |
+
assert len(q.shape) == 2
|
| 58 |
+
assert q.shape[0] == q.shape[1]
|
| 59 |
+
assert q.shape[0] == len(S)
|
| 60 |
+
|
| 61 |
+
nn = q.shape[0]
|
| 62 |
+
|
| 63 |
+
i = np.eye(nn)
|
| 64 |
+
n = np.linalg.inv(i - q)
|
| 65 |
+
y = np.asarray(n) @ np.ones(nn)
|
| 66 |
+
|
| 67 |
+
expected_y = np.array([1, 2.07906977, 1.46511628, 2.3255814])
|
| 68 |
+
assert np.allclose(y, expected_y)
|
| 69 |
+
|
| 70 |
+
expected_d = {1: 1, 2: 2, 3: 3.07906977, 4: 2.46511628, 5: 3.3255814}
|
| 71 |
+
|
| 72 |
+
d = nx.trophic_levels(S2)
|
| 73 |
+
|
| 74 |
+
for nid, level in d.items():
|
| 75 |
+
expected_level = expected_d[nid]
|
| 76 |
+
assert expected_level == pytest.approx(level, abs=1e-7)
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def test_trophic_levels_simple():
|
| 80 |
+
matrix_a = np.array([[0, 0], [1, 0]])
|
| 81 |
+
G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph)
|
| 82 |
+
d = nx.trophic_levels(G)
|
| 83 |
+
assert d[0] == pytest.approx(2, abs=1e-7)
|
| 84 |
+
assert d[1] == pytest.approx(1, abs=1e-7)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def test_trophic_levels_more_complex():
|
| 88 |
+
# fmt: off
|
| 89 |
+
matrix = np.array([
|
| 90 |
+
[0, 1, 0, 0],
|
| 91 |
+
[0, 0, 1, 0],
|
| 92 |
+
[0, 0, 0, 1],
|
| 93 |
+
[0, 0, 0, 0]
|
| 94 |
+
])
|
| 95 |
+
# fmt: on
|
| 96 |
+
G = nx.from_numpy_array(matrix, create_using=nx.DiGraph)
|
| 97 |
+
d = nx.trophic_levels(G)
|
| 98 |
+
expected_result = [1, 2, 3, 4]
|
| 99 |
+
for ind in range(4):
|
| 100 |
+
assert d[ind] == pytest.approx(expected_result[ind], abs=1e-7)
|
| 101 |
+
|
| 102 |
+
# fmt: off
|
| 103 |
+
matrix = np.array([
|
| 104 |
+
[0, 1, 1, 0],
|
| 105 |
+
[0, 0, 1, 1],
|
| 106 |
+
[0, 0, 0, 1],
|
| 107 |
+
[0, 0, 0, 0]
|
| 108 |
+
])
|
| 109 |
+
# fmt: on
|
| 110 |
+
G = nx.from_numpy_array(matrix, create_using=nx.DiGraph)
|
| 111 |
+
d = nx.trophic_levels(G)
|
| 112 |
+
|
| 113 |
+
expected_result = [1, 2, 2.5, 3.25]
|
| 114 |
+
print("Calculated result: ", d)
|
| 115 |
+
print("Expected Result: ", expected_result)
|
| 116 |
+
|
| 117 |
+
for ind in range(4):
|
| 118 |
+
assert d[ind] == pytest.approx(expected_result[ind], abs=1e-7)
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def test_trophic_levels_even_more_complex():
|
| 122 |
+
# fmt: off
|
| 123 |
+
# Another, bigger matrix
|
| 124 |
+
matrix = np.array([
|
| 125 |
+
[0, 0, 0, 0, 0],
|
| 126 |
+
[0, 1, 0, 1, 0],
|
| 127 |
+
[1, 0, 0, 0, 0],
|
| 128 |
+
[0, 1, 0, 0, 0],
|
| 129 |
+
[0, 0, 0, 1, 0]
|
| 130 |
+
])
|
| 131 |
+
# Generated this linear system using pen and paper:
|
| 132 |
+
K = np.array([
|
| 133 |
+
[1, 0, -1, 0, 0],
|
| 134 |
+
[0, 0.5, 0, -0.5, 0],
|
| 135 |
+
[0, 0, 1, 0, 0],
|
| 136 |
+
[0, -0.5, 0, 1, -0.5],
|
| 137 |
+
[0, 0, 0, 0, 1],
|
| 138 |
+
])
|
| 139 |
+
# fmt: on
|
| 140 |
+
result_1 = np.ravel(np.linalg.inv(K) @ np.ones(5))
|
| 141 |
+
G = nx.from_numpy_array(matrix, create_using=nx.DiGraph)
|
| 142 |
+
result_2 = nx.trophic_levels(G)
|
| 143 |
+
|
| 144 |
+
for ind in range(5):
|
| 145 |
+
assert result_1[ind] == pytest.approx(result_2[ind], abs=1e-7)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def test_trophic_levels_singular_matrix():
|
| 149 |
+
"""Should raise an error with graphs with only non-basal nodes"""
|
| 150 |
+
matrix = np.identity(4)
|
| 151 |
+
G = nx.from_numpy_array(matrix, create_using=nx.DiGraph)
|
| 152 |
+
with pytest.raises(nx.NetworkXError) as e:
|
| 153 |
+
nx.trophic_levels(G)
|
| 154 |
+
msg = (
|
| 155 |
+
"Trophic levels are only defined for graphs where every node "
|
| 156 |
+
+ "has a path from a basal node (basal nodes are nodes with no "
|
| 157 |
+
+ "incoming edges)."
|
| 158 |
+
)
|
| 159 |
+
assert msg in str(e.value)
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
def test_trophic_levels_singular_with_basal():
|
| 163 |
+
"""Should fail to compute if there are any parts of the graph which are not
|
| 164 |
+
reachable from any basal node (with in-degree zero).
|
| 165 |
+
"""
|
| 166 |
+
G = nx.DiGraph()
|
| 167 |
+
# a has in-degree zero
|
| 168 |
+
G.add_edge("a", "b")
|
| 169 |
+
|
| 170 |
+
# b is one level above a, c and d
|
| 171 |
+
G.add_edge("c", "b")
|
| 172 |
+
G.add_edge("d", "b")
|
| 173 |
+
|
| 174 |
+
# c and d form a loop, neither are reachable from a
|
| 175 |
+
G.add_edge("c", "d")
|
| 176 |
+
G.add_edge("d", "c")
|
| 177 |
+
|
| 178 |
+
with pytest.raises(nx.NetworkXError) as e:
|
| 179 |
+
nx.trophic_levels(G)
|
| 180 |
+
msg = (
|
| 181 |
+
"Trophic levels are only defined for graphs where every node "
|
| 182 |
+
+ "has a path from a basal node (basal nodes are nodes with no "
|
| 183 |
+
+ "incoming edges)."
|
| 184 |
+
)
|
| 185 |
+
assert msg in str(e.value)
|
| 186 |
+
|
| 187 |
+
# if self-loops are allowed, smaller example:
|
| 188 |
+
G = nx.DiGraph()
|
| 189 |
+
G.add_edge("a", "b") # a has in-degree zero
|
| 190 |
+
G.add_edge("c", "b") # b is one level above a and c
|
| 191 |
+
G.add_edge("c", "c") # c has a self-loop
|
| 192 |
+
with pytest.raises(nx.NetworkXError) as e:
|
| 193 |
+
nx.trophic_levels(G)
|
| 194 |
+
msg = (
|
| 195 |
+
"Trophic levels are only defined for graphs where every node "
|
| 196 |
+
+ "has a path from a basal node (basal nodes are nodes with no "
|
| 197 |
+
+ "incoming edges)."
|
| 198 |
+
)
|
| 199 |
+
assert msg in str(e.value)
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
def test_trophic_differences():
|
| 203 |
+
matrix_a = np.array([[0, 1], [0, 0]])
|
| 204 |
+
G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph)
|
| 205 |
+
diffs = nx.trophic_differences(G)
|
| 206 |
+
assert diffs[(0, 1)] == pytest.approx(1, abs=1e-7)
|
| 207 |
+
|
| 208 |
+
# fmt: off
|
| 209 |
+
matrix_b = np.array([
|
| 210 |
+
[0, 1, 1, 0],
|
| 211 |
+
[0, 0, 1, 1],
|
| 212 |
+
[0, 0, 0, 1],
|
| 213 |
+
[0, 0, 0, 0]
|
| 214 |
+
])
|
| 215 |
+
# fmt: on
|
| 216 |
+
G = nx.from_numpy_array(matrix_b, create_using=nx.DiGraph)
|
| 217 |
+
diffs = nx.trophic_differences(G)
|
| 218 |
+
|
| 219 |
+
assert diffs[(0, 1)] == pytest.approx(1, abs=1e-7)
|
| 220 |
+
assert diffs[(0, 2)] == pytest.approx(1.5, abs=1e-7)
|
| 221 |
+
assert diffs[(1, 2)] == pytest.approx(0.5, abs=1e-7)
|
| 222 |
+
assert diffs[(1, 3)] == pytest.approx(1.25, abs=1e-7)
|
| 223 |
+
assert diffs[(2, 3)] == pytest.approx(0.75, abs=1e-7)
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
def test_trophic_incoherence_parameter_no_cannibalism():
|
| 227 |
+
matrix_a = np.array([[0, 1], [0, 0]])
|
| 228 |
+
G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph)
|
| 229 |
+
q = nx.trophic_incoherence_parameter(G, cannibalism=False)
|
| 230 |
+
assert q == pytest.approx(0, abs=1e-7)
|
| 231 |
+
|
| 232 |
+
# fmt: off
|
| 233 |
+
matrix_b = np.array([
|
| 234 |
+
[0, 1, 1, 0],
|
| 235 |
+
[0, 0, 1, 1],
|
| 236 |
+
[0, 0, 0, 1],
|
| 237 |
+
[0, 0, 0, 0]
|
| 238 |
+
])
|
| 239 |
+
# fmt: on
|
| 240 |
+
G = nx.from_numpy_array(matrix_b, create_using=nx.DiGraph)
|
| 241 |
+
q = nx.trophic_incoherence_parameter(G, cannibalism=False)
|
| 242 |
+
assert q == pytest.approx(np.std([1, 1.5, 0.5, 0.75, 1.25]), abs=1e-7)
|
| 243 |
+
|
| 244 |
+
# fmt: off
|
| 245 |
+
matrix_c = np.array([
|
| 246 |
+
[0, 1, 1, 0],
|
| 247 |
+
[0, 1, 1, 1],
|
| 248 |
+
[0, 0, 0, 1],
|
| 249 |
+
[0, 0, 0, 1]
|
| 250 |
+
])
|
| 251 |
+
# fmt: on
|
| 252 |
+
G = nx.from_numpy_array(matrix_c, create_using=nx.DiGraph)
|
| 253 |
+
q = nx.trophic_incoherence_parameter(G, cannibalism=False)
|
| 254 |
+
# Ignore the -link
|
| 255 |
+
assert q == pytest.approx(np.std([1, 1.5, 0.5, 0.75, 1.25]), abs=1e-7)
|
| 256 |
+
|
| 257 |
+
# no self-loops case
|
| 258 |
+
# fmt: off
|
| 259 |
+
matrix_d = np.array([
|
| 260 |
+
[0, 1, 1, 0],
|
| 261 |
+
[0, 0, 1, 1],
|
| 262 |
+
[0, 0, 0, 1],
|
| 263 |
+
[0, 0, 0, 0]
|
| 264 |
+
])
|
| 265 |
+
# fmt: on
|
| 266 |
+
G = nx.from_numpy_array(matrix_d, create_using=nx.DiGraph)
|
| 267 |
+
q = nx.trophic_incoherence_parameter(G, cannibalism=False)
|
| 268 |
+
# Ignore the -link
|
| 269 |
+
assert q == pytest.approx(np.std([1, 1.5, 0.5, 0.75, 1.25]), abs=1e-7)
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
def test_trophic_incoherence_parameter_cannibalism():
|
| 273 |
+
matrix_a = np.array([[0, 1], [0, 0]])
|
| 274 |
+
G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph)
|
| 275 |
+
q = nx.trophic_incoherence_parameter(G, cannibalism=True)
|
| 276 |
+
assert q == pytest.approx(0, abs=1e-7)
|
| 277 |
+
|
| 278 |
+
# fmt: off
|
| 279 |
+
matrix_b = np.array([
|
| 280 |
+
[0, 0, 0, 0, 0],
|
| 281 |
+
[0, 1, 0, 1, 0],
|
| 282 |
+
[1, 0, 0, 0, 0],
|
| 283 |
+
[0, 1, 0, 0, 0],
|
| 284 |
+
[0, 0, 0, 1, 0]
|
| 285 |
+
])
|
| 286 |
+
# fmt: on
|
| 287 |
+
G = nx.from_numpy_array(matrix_b, create_using=nx.DiGraph)
|
| 288 |
+
q = nx.trophic_incoherence_parameter(G, cannibalism=True)
|
| 289 |
+
assert q == pytest.approx(2, abs=1e-7)
|
| 290 |
+
|
| 291 |
+
# fmt: off
|
| 292 |
+
matrix_c = np.array([
|
| 293 |
+
[0, 1, 1, 0],
|
| 294 |
+
[0, 0, 1, 1],
|
| 295 |
+
[0, 0, 0, 1],
|
| 296 |
+
[0, 0, 0, 0]
|
| 297 |
+
])
|
| 298 |
+
# fmt: on
|
| 299 |
+
G = nx.from_numpy_array(matrix_c, create_using=nx.DiGraph)
|
| 300 |
+
q = nx.trophic_incoherence_parameter(G, cannibalism=True)
|
| 301 |
+
# Ignore the -link
|
| 302 |
+
assert q == pytest.approx(np.std([1, 1.5, 0.5, 0.75, 1.25]), abs=1e-7)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_disjoint_paths.cpython-311.pyc
ADDED
|
Binary file (16.8 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_edge_augmentation.cpython-311.pyc
ADDED
|
Binary file (25.5 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_edge_kcomponents.cpython-311.pyc
ADDED
|
Binary file (26.1 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_kcomponents.cpython-311.pyc
ADDED
|
Binary file (13.3 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/test_stoer_wagner.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import chain
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def _check_partition(G, cut_value, partition, weight):
|
| 9 |
+
assert isinstance(partition, tuple)
|
| 10 |
+
assert len(partition) == 2
|
| 11 |
+
assert isinstance(partition[0], list)
|
| 12 |
+
assert isinstance(partition[1], list)
|
| 13 |
+
assert len(partition[0]) > 0
|
| 14 |
+
assert len(partition[1]) > 0
|
| 15 |
+
assert sum(map(len, partition)) == len(G)
|
| 16 |
+
assert set(chain.from_iterable(partition)) == set(G)
|
| 17 |
+
partition = tuple(map(set, partition))
|
| 18 |
+
w = 0
|
| 19 |
+
for u, v, e in G.edges(data=True):
|
| 20 |
+
if (u in partition[0]) == (v in partition[1]):
|
| 21 |
+
w += e.get(weight, 1)
|
| 22 |
+
assert w == cut_value
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def _test_stoer_wagner(G, answer, weight="weight"):
|
| 26 |
+
cut_value, partition = nx.stoer_wagner(G, weight, heap=nx.utils.PairingHeap)
|
| 27 |
+
assert cut_value == answer
|
| 28 |
+
_check_partition(G, cut_value, partition, weight)
|
| 29 |
+
cut_value, partition = nx.stoer_wagner(G, weight, heap=nx.utils.BinaryHeap)
|
| 30 |
+
assert cut_value == answer
|
| 31 |
+
_check_partition(G, cut_value, partition, weight)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def test_graph1():
|
| 35 |
+
G = nx.Graph()
|
| 36 |
+
G.add_edge("x", "a", weight=3)
|
| 37 |
+
G.add_edge("x", "b", weight=1)
|
| 38 |
+
G.add_edge("a", "c", weight=3)
|
| 39 |
+
G.add_edge("b", "c", weight=5)
|
| 40 |
+
G.add_edge("b", "d", weight=4)
|
| 41 |
+
G.add_edge("d", "e", weight=2)
|
| 42 |
+
G.add_edge("c", "y", weight=2)
|
| 43 |
+
G.add_edge("e", "y", weight=3)
|
| 44 |
+
_test_stoer_wagner(G, 4)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def test_graph2():
|
| 48 |
+
G = nx.Graph()
|
| 49 |
+
G.add_edge("x", "a")
|
| 50 |
+
G.add_edge("x", "b")
|
| 51 |
+
G.add_edge("a", "c")
|
| 52 |
+
G.add_edge("b", "c")
|
| 53 |
+
G.add_edge("b", "d")
|
| 54 |
+
G.add_edge("d", "e")
|
| 55 |
+
G.add_edge("c", "y")
|
| 56 |
+
G.add_edge("e", "y")
|
| 57 |
+
_test_stoer_wagner(G, 2)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def test_graph3():
|
| 61 |
+
# Source:
|
| 62 |
+
# Stoer, M. and Wagner, F. (1997). "A simple min-cut algorithm". Journal of
|
| 63 |
+
# the ACM 44 (4), 585-591.
|
| 64 |
+
G = nx.Graph()
|
| 65 |
+
G.add_edge(1, 2, weight=2)
|
| 66 |
+
G.add_edge(1, 5, weight=3)
|
| 67 |
+
G.add_edge(2, 3, weight=3)
|
| 68 |
+
G.add_edge(2, 5, weight=2)
|
| 69 |
+
G.add_edge(2, 6, weight=2)
|
| 70 |
+
G.add_edge(3, 4, weight=4)
|
| 71 |
+
G.add_edge(3, 7, weight=2)
|
| 72 |
+
G.add_edge(4, 7, weight=2)
|
| 73 |
+
G.add_edge(4, 8, weight=2)
|
| 74 |
+
G.add_edge(5, 6, weight=3)
|
| 75 |
+
G.add_edge(6, 7, weight=1)
|
| 76 |
+
G.add_edge(7, 8, weight=3)
|
| 77 |
+
_test_stoer_wagner(G, 4)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def test_weight_name():
|
| 81 |
+
G = nx.Graph()
|
| 82 |
+
G.add_edge(1, 2, weight=1, cost=8)
|
| 83 |
+
G.add_edge(1, 3, cost=2)
|
| 84 |
+
G.add_edge(2, 3, cost=4)
|
| 85 |
+
_test_stoer_wagner(G, 6, weight="cost")
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def test_exceptions():
|
| 89 |
+
G = nx.Graph()
|
| 90 |
+
pytest.raises(nx.NetworkXError, nx.stoer_wagner, G)
|
| 91 |
+
G.add_node(1)
|
| 92 |
+
pytest.raises(nx.NetworkXError, nx.stoer_wagner, G)
|
| 93 |
+
G.add_node(2)
|
| 94 |
+
pytest.raises(nx.NetworkXError, nx.stoer_wagner, G)
|
| 95 |
+
G.add_edge(1, 2, weight=-2)
|
| 96 |
+
pytest.raises(nx.NetworkXError, nx.stoer_wagner, G)
|
| 97 |
+
G = nx.DiGraph()
|
| 98 |
+
pytest.raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G)
|
| 99 |
+
G = nx.MultiGraph()
|
| 100 |
+
pytest.raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G)
|
| 101 |
+
G = nx.MultiDiGraph()
|
| 102 |
+
pytest.raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/core.py
ADDED
|
@@ -0,0 +1,545 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Find the k-cores of a graph.
|
| 3 |
+
|
| 4 |
+
The k-core is found by recursively pruning nodes with degrees less than k.
|
| 5 |
+
|
| 6 |
+
See the following references for details:
|
| 7 |
+
|
| 8 |
+
An O(m) Algorithm for Cores Decomposition of Networks
|
| 9 |
+
Vladimir Batagelj and Matjaz Zaversnik, 2003.
|
| 10 |
+
https://arxiv.org/abs/cs.DS/0310049
|
| 11 |
+
|
| 12 |
+
Generalized Cores
|
| 13 |
+
Vladimir Batagelj and Matjaz Zaversnik, 2002.
|
| 14 |
+
https://arxiv.org/pdf/cs/0202039
|
| 15 |
+
|
| 16 |
+
For directed graphs a more general notion is that of D-cores which
|
| 17 |
+
looks at (k, l) restrictions on (in, out) degree. The (k, k) D-core
|
| 18 |
+
is the k-core.
|
| 19 |
+
|
| 20 |
+
D-cores: Measuring Collaboration of Directed Graphs Based on Degeneracy
|
| 21 |
+
Christos Giatsidis, Dimitrios M. Thilikos, Michalis Vazirgiannis, ICDM 2011.
|
| 22 |
+
http://www.graphdegeneracy.org/dcores_ICDM_2011.pdf
|
| 23 |
+
|
| 24 |
+
Multi-scale structure and topological anomaly detection via a new network \
|
| 25 |
+
statistic: The onion decomposition
|
| 26 |
+
L. Hébert-Dufresne, J. A. Grochow, and A. Allard
|
| 27 |
+
Scientific Reports 6, 31708 (2016)
|
| 28 |
+
http://doi.org/10.1038/srep31708
|
| 29 |
+
|
| 30 |
+
"""
|
| 31 |
+
import networkx as nx
|
| 32 |
+
from networkx.exception import NetworkXError
|
| 33 |
+
from networkx.utils import not_implemented_for
|
| 34 |
+
|
| 35 |
+
__all__ = [
|
| 36 |
+
"core_number",
|
| 37 |
+
"k_core",
|
| 38 |
+
"k_shell",
|
| 39 |
+
"k_crust",
|
| 40 |
+
"k_corona",
|
| 41 |
+
"k_truss",
|
| 42 |
+
"onion_layers",
|
| 43 |
+
]
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
@not_implemented_for("multigraph")
|
| 47 |
+
@nx._dispatch
|
| 48 |
+
def core_number(G):
|
| 49 |
+
"""Returns the core number for each vertex.
|
| 50 |
+
|
| 51 |
+
A k-core is a maximal subgraph that contains nodes of degree k or more.
|
| 52 |
+
|
| 53 |
+
The core number of a node is the largest value k of a k-core containing
|
| 54 |
+
that node.
|
| 55 |
+
|
| 56 |
+
Parameters
|
| 57 |
+
----------
|
| 58 |
+
G : NetworkX graph
|
| 59 |
+
A graph or directed graph
|
| 60 |
+
|
| 61 |
+
Returns
|
| 62 |
+
-------
|
| 63 |
+
core_number : dictionary
|
| 64 |
+
A dictionary keyed by node to the core number.
|
| 65 |
+
|
| 66 |
+
Raises
|
| 67 |
+
------
|
| 68 |
+
NetworkXError
|
| 69 |
+
The k-core is not implemented for graphs with self loops
|
| 70 |
+
or parallel edges.
|
| 71 |
+
|
| 72 |
+
Notes
|
| 73 |
+
-----
|
| 74 |
+
Not implemented for graphs with parallel edges or self loops.
|
| 75 |
+
|
| 76 |
+
For directed graphs the node degree is defined to be the
|
| 77 |
+
in-degree + out-degree.
|
| 78 |
+
|
| 79 |
+
References
|
| 80 |
+
----------
|
| 81 |
+
.. [1] An O(m) Algorithm for Cores Decomposition of Networks
|
| 82 |
+
Vladimir Batagelj and Matjaz Zaversnik, 2003.
|
| 83 |
+
https://arxiv.org/abs/cs.DS/0310049
|
| 84 |
+
"""
|
| 85 |
+
if nx.number_of_selfloops(G) > 0:
|
| 86 |
+
msg = (
|
| 87 |
+
"Input graph has self loops which is not permitted; "
|
| 88 |
+
"Consider using G.remove_edges_from(nx.selfloop_edges(G))."
|
| 89 |
+
)
|
| 90 |
+
raise NetworkXError(msg)
|
| 91 |
+
degrees = dict(G.degree())
|
| 92 |
+
# Sort nodes by degree.
|
| 93 |
+
nodes = sorted(degrees, key=degrees.get)
|
| 94 |
+
bin_boundaries = [0]
|
| 95 |
+
curr_degree = 0
|
| 96 |
+
for i, v in enumerate(nodes):
|
| 97 |
+
if degrees[v] > curr_degree:
|
| 98 |
+
bin_boundaries.extend([i] * (degrees[v] - curr_degree))
|
| 99 |
+
curr_degree = degrees[v]
|
| 100 |
+
node_pos = {v: pos for pos, v in enumerate(nodes)}
|
| 101 |
+
# The initial guess for the core number of a node is its degree.
|
| 102 |
+
core = degrees
|
| 103 |
+
nbrs = {v: list(nx.all_neighbors(G, v)) for v in G}
|
| 104 |
+
for v in nodes:
|
| 105 |
+
for u in nbrs[v]:
|
| 106 |
+
if core[u] > core[v]:
|
| 107 |
+
nbrs[u].remove(v)
|
| 108 |
+
pos = node_pos[u]
|
| 109 |
+
bin_start = bin_boundaries[core[u]]
|
| 110 |
+
node_pos[u] = bin_start
|
| 111 |
+
node_pos[nodes[bin_start]] = pos
|
| 112 |
+
nodes[bin_start], nodes[pos] = nodes[pos], nodes[bin_start]
|
| 113 |
+
bin_boundaries[core[u]] += 1
|
| 114 |
+
core[u] -= 1
|
| 115 |
+
return core
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def _core_subgraph(G, k_filter, k=None, core=None):
|
| 119 |
+
"""Returns the subgraph induced by nodes passing filter `k_filter`.
|
| 120 |
+
|
| 121 |
+
Parameters
|
| 122 |
+
----------
|
| 123 |
+
G : NetworkX graph
|
| 124 |
+
The graph or directed graph to process
|
| 125 |
+
k_filter : filter function
|
| 126 |
+
This function filters the nodes chosen. It takes three inputs:
|
| 127 |
+
A node of G, the filter's cutoff, and the core dict of the graph.
|
| 128 |
+
The function should return a Boolean value.
|
| 129 |
+
k : int, optional
|
| 130 |
+
The order of the core. If not specified use the max core number.
|
| 131 |
+
This value is used as the cutoff for the filter.
|
| 132 |
+
core : dict, optional
|
| 133 |
+
Precomputed core numbers keyed by node for the graph `G`.
|
| 134 |
+
If not specified, the core numbers will be computed from `G`.
|
| 135 |
+
|
| 136 |
+
"""
|
| 137 |
+
if core is None:
|
| 138 |
+
core = core_number(G)
|
| 139 |
+
if k is None:
|
| 140 |
+
k = max(core.values())
|
| 141 |
+
nodes = (v for v in core if k_filter(v, k, core))
|
| 142 |
+
return G.subgraph(nodes).copy()
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
@nx._dispatch(preserve_all_attrs=True)
|
| 146 |
+
def k_core(G, k=None, core_number=None):
|
| 147 |
+
"""Returns the k-core of G.
|
| 148 |
+
|
| 149 |
+
A k-core is a maximal subgraph that contains nodes of degree k or more.
|
| 150 |
+
|
| 151 |
+
Parameters
|
| 152 |
+
----------
|
| 153 |
+
G : NetworkX graph
|
| 154 |
+
A graph or directed graph
|
| 155 |
+
k : int, optional
|
| 156 |
+
The order of the core. If not specified return the main core.
|
| 157 |
+
core_number : dictionary, optional
|
| 158 |
+
Precomputed core numbers for the graph G.
|
| 159 |
+
|
| 160 |
+
Returns
|
| 161 |
+
-------
|
| 162 |
+
G : NetworkX graph
|
| 163 |
+
The k-core subgraph
|
| 164 |
+
|
| 165 |
+
Raises
|
| 166 |
+
------
|
| 167 |
+
NetworkXError
|
| 168 |
+
The k-core is not defined for graphs with self loops or parallel edges.
|
| 169 |
+
|
| 170 |
+
Notes
|
| 171 |
+
-----
|
| 172 |
+
The main core is the core with the largest degree.
|
| 173 |
+
|
| 174 |
+
Not implemented for graphs with parallel edges or self loops.
|
| 175 |
+
|
| 176 |
+
For directed graphs the node degree is defined to be the
|
| 177 |
+
in-degree + out-degree.
|
| 178 |
+
|
| 179 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
| 180 |
+
|
| 181 |
+
See Also
|
| 182 |
+
--------
|
| 183 |
+
core_number
|
| 184 |
+
|
| 185 |
+
References
|
| 186 |
+
----------
|
| 187 |
+
.. [1] An O(m) Algorithm for Cores Decomposition of Networks
|
| 188 |
+
Vladimir Batagelj and Matjaz Zaversnik, 2003.
|
| 189 |
+
https://arxiv.org/abs/cs.DS/0310049
|
| 190 |
+
"""
|
| 191 |
+
|
| 192 |
+
def k_filter(v, k, c):
|
| 193 |
+
return c[v] >= k
|
| 194 |
+
|
| 195 |
+
return _core_subgraph(G, k_filter, k, core_number)
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
@nx._dispatch(preserve_all_attrs=True)
|
| 199 |
+
def k_shell(G, k=None, core_number=None):
|
| 200 |
+
"""Returns the k-shell of G.
|
| 201 |
+
|
| 202 |
+
The k-shell is the subgraph induced by nodes with core number k.
|
| 203 |
+
That is, nodes in the k-core that are not in the (k+1)-core.
|
| 204 |
+
|
| 205 |
+
Parameters
|
| 206 |
+
----------
|
| 207 |
+
G : NetworkX graph
|
| 208 |
+
A graph or directed graph.
|
| 209 |
+
k : int, optional
|
| 210 |
+
The order of the shell. If not specified return the outer shell.
|
| 211 |
+
core_number : dictionary, optional
|
| 212 |
+
Precomputed core numbers for the graph G.
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
Returns
|
| 216 |
+
-------
|
| 217 |
+
G : NetworkX graph
|
| 218 |
+
The k-shell subgraph
|
| 219 |
+
|
| 220 |
+
Raises
|
| 221 |
+
------
|
| 222 |
+
NetworkXError
|
| 223 |
+
The k-shell is not implemented for graphs with self loops
|
| 224 |
+
or parallel edges.
|
| 225 |
+
|
| 226 |
+
Notes
|
| 227 |
+
-----
|
| 228 |
+
This is similar to k_corona but in that case only neighbors in the
|
| 229 |
+
k-core are considered.
|
| 230 |
+
|
| 231 |
+
Not implemented for graphs with parallel edges or self loops.
|
| 232 |
+
|
| 233 |
+
For directed graphs the node degree is defined to be the
|
| 234 |
+
in-degree + out-degree.
|
| 235 |
+
|
| 236 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
| 237 |
+
|
| 238 |
+
See Also
|
| 239 |
+
--------
|
| 240 |
+
core_number
|
| 241 |
+
k_corona
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
References
|
| 245 |
+
----------
|
| 246 |
+
.. [1] A model of Internet topology using k-shell decomposition
|
| 247 |
+
Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt,
|
| 248 |
+
and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154
|
| 249 |
+
http://www.pnas.org/content/104/27/11150.full
|
| 250 |
+
"""
|
| 251 |
+
|
| 252 |
+
def k_filter(v, k, c):
|
| 253 |
+
return c[v] == k
|
| 254 |
+
|
| 255 |
+
return _core_subgraph(G, k_filter, k, core_number)
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
@nx._dispatch(preserve_all_attrs=True)
|
| 259 |
+
def k_crust(G, k=None, core_number=None):
|
| 260 |
+
"""Returns the k-crust of G.
|
| 261 |
+
|
| 262 |
+
The k-crust is the graph G with the edges of the k-core removed
|
| 263 |
+
and isolated nodes found after the removal of edges are also removed.
|
| 264 |
+
|
| 265 |
+
Parameters
|
| 266 |
+
----------
|
| 267 |
+
G : NetworkX graph
|
| 268 |
+
A graph or directed graph.
|
| 269 |
+
k : int, optional
|
| 270 |
+
The order of the shell. If not specified return the main crust.
|
| 271 |
+
core_number : dictionary, optional
|
| 272 |
+
Precomputed core numbers for the graph G.
|
| 273 |
+
|
| 274 |
+
Returns
|
| 275 |
+
-------
|
| 276 |
+
G : NetworkX graph
|
| 277 |
+
The k-crust subgraph
|
| 278 |
+
|
| 279 |
+
Raises
|
| 280 |
+
------
|
| 281 |
+
NetworkXError
|
| 282 |
+
The k-crust is not implemented for graphs with self loops
|
| 283 |
+
or parallel edges.
|
| 284 |
+
|
| 285 |
+
Notes
|
| 286 |
+
-----
|
| 287 |
+
This definition of k-crust is different than the definition in [1]_.
|
| 288 |
+
The k-crust in [1]_ is equivalent to the k+1 crust of this algorithm.
|
| 289 |
+
|
| 290 |
+
Not implemented for graphs with parallel edges or self loops.
|
| 291 |
+
|
| 292 |
+
For directed graphs the node degree is defined to be the
|
| 293 |
+
in-degree + out-degree.
|
| 294 |
+
|
| 295 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
| 296 |
+
|
| 297 |
+
See Also
|
| 298 |
+
--------
|
| 299 |
+
core_number
|
| 300 |
+
|
| 301 |
+
References
|
| 302 |
+
----------
|
| 303 |
+
.. [1] A model of Internet topology using k-shell decomposition
|
| 304 |
+
Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt,
|
| 305 |
+
and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154
|
| 306 |
+
http://www.pnas.org/content/104/27/11150.full
|
| 307 |
+
"""
|
| 308 |
+
# Default for k is one less than in _core_subgraph, so just inline.
|
| 309 |
+
# Filter is c[v] <= k
|
| 310 |
+
if core_number is None:
|
| 311 |
+
core_number = nx.core_number(G)
|
| 312 |
+
if k is None:
|
| 313 |
+
k = max(core_number.values()) - 1
|
| 314 |
+
nodes = (v for v in core_number if core_number[v] <= k)
|
| 315 |
+
return G.subgraph(nodes).copy()
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
@nx._dispatch(preserve_all_attrs=True)
|
| 319 |
+
def k_corona(G, k, core_number=None):
|
| 320 |
+
"""Returns the k-corona of G.
|
| 321 |
+
|
| 322 |
+
The k-corona is the subgraph of nodes in the k-core which have
|
| 323 |
+
exactly k neighbours in the k-core.
|
| 324 |
+
|
| 325 |
+
Parameters
|
| 326 |
+
----------
|
| 327 |
+
G : NetworkX graph
|
| 328 |
+
A graph or directed graph
|
| 329 |
+
k : int
|
| 330 |
+
The order of the corona.
|
| 331 |
+
core_number : dictionary, optional
|
| 332 |
+
Precomputed core numbers for the graph G.
|
| 333 |
+
|
| 334 |
+
Returns
|
| 335 |
+
-------
|
| 336 |
+
G : NetworkX graph
|
| 337 |
+
The k-corona subgraph
|
| 338 |
+
|
| 339 |
+
Raises
|
| 340 |
+
------
|
| 341 |
+
NetworkXError
|
| 342 |
+
The k-corona is not defined for graphs with self loops or
|
| 343 |
+
parallel edges.
|
| 344 |
+
|
| 345 |
+
Notes
|
| 346 |
+
-----
|
| 347 |
+
Not implemented for graphs with parallel edges or self loops.
|
| 348 |
+
|
| 349 |
+
For directed graphs the node degree is defined to be the
|
| 350 |
+
in-degree + out-degree.
|
| 351 |
+
|
| 352 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
| 353 |
+
|
| 354 |
+
See Also
|
| 355 |
+
--------
|
| 356 |
+
core_number
|
| 357 |
+
|
| 358 |
+
References
|
| 359 |
+
----------
|
| 360 |
+
.. [1] k -core (bootstrap) percolation on complex networks:
|
| 361 |
+
Critical phenomena and nonlocal effects,
|
| 362 |
+
A. V. Goltsev, S. N. Dorogovtsev, and J. F. F. Mendes,
|
| 363 |
+
Phys. Rev. E 73, 056101 (2006)
|
| 364 |
+
http://link.aps.org/doi/10.1103/PhysRevE.73.056101
|
| 365 |
+
"""
|
| 366 |
+
|
| 367 |
+
def func(v, k, c):
|
| 368 |
+
return c[v] == k and k == sum(1 for w in G[v] if c[w] >= k)
|
| 369 |
+
|
| 370 |
+
return _core_subgraph(G, func, k, core_number)
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
@not_implemented_for("directed")
|
| 374 |
+
@not_implemented_for("multigraph")
|
| 375 |
+
@nx._dispatch(preserve_all_attrs=True)
|
| 376 |
+
def k_truss(G, k):
|
| 377 |
+
"""Returns the k-truss of `G`.
|
| 378 |
+
|
| 379 |
+
The k-truss is the maximal induced subgraph of `G` which contains at least
|
| 380 |
+
three vertices where every edge is incident to at least `k-2` triangles.
|
| 381 |
+
|
| 382 |
+
Parameters
|
| 383 |
+
----------
|
| 384 |
+
G : NetworkX graph
|
| 385 |
+
An undirected graph
|
| 386 |
+
k : int
|
| 387 |
+
The order of the truss
|
| 388 |
+
|
| 389 |
+
Returns
|
| 390 |
+
-------
|
| 391 |
+
H : NetworkX graph
|
| 392 |
+
The k-truss subgraph
|
| 393 |
+
|
| 394 |
+
Raises
|
| 395 |
+
------
|
| 396 |
+
NetworkXError
|
| 397 |
+
|
| 398 |
+
The k-truss is not defined for graphs with self loops, directed graphs
|
| 399 |
+
and multigraphs.
|
| 400 |
+
|
| 401 |
+
Notes
|
| 402 |
+
-----
|
| 403 |
+
A k-clique is a (k-2)-truss and a k-truss is a (k+1)-core.
|
| 404 |
+
|
| 405 |
+
Not implemented for digraphs or graphs with parallel edges or self loops.
|
| 406 |
+
|
| 407 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
| 408 |
+
|
| 409 |
+
K-trusses were originally defined in [2] which states that the k-truss
|
| 410 |
+
is the maximal induced subgraph where each edge belongs to at least
|
| 411 |
+
`k-2` triangles. A more recent paper, [1], uses a slightly different
|
| 412 |
+
definition requiring that each edge belong to at least `k` triangles.
|
| 413 |
+
This implementation uses the original definition of `k-2` triangles.
|
| 414 |
+
|
| 415 |
+
References
|
| 416 |
+
----------
|
| 417 |
+
.. [1] Bounds and Algorithms for k-truss. Paul Burkhardt, Vance Faber,
|
| 418 |
+
David G. Harris, 2018. https://arxiv.org/abs/1806.05523v2
|
| 419 |
+
.. [2] Trusses: Cohesive Subgraphs for Social Network Analysis. Jonathan
|
| 420 |
+
Cohen, 2005.
|
| 421 |
+
"""
|
| 422 |
+
if nx.number_of_selfloops(G) > 0:
|
| 423 |
+
msg = (
|
| 424 |
+
"Input graph has self loops which is not permitted; "
|
| 425 |
+
"Consider using G.remove_edges_from(nx.selfloop_edges(G))."
|
| 426 |
+
)
|
| 427 |
+
raise NetworkXError(msg)
|
| 428 |
+
|
| 429 |
+
H = G.copy()
|
| 430 |
+
|
| 431 |
+
n_dropped = 1
|
| 432 |
+
while n_dropped > 0:
|
| 433 |
+
n_dropped = 0
|
| 434 |
+
to_drop = []
|
| 435 |
+
seen = set()
|
| 436 |
+
for u in H:
|
| 437 |
+
nbrs_u = set(H[u])
|
| 438 |
+
seen.add(u)
|
| 439 |
+
new_nbrs = [v for v in nbrs_u if v not in seen]
|
| 440 |
+
for v in new_nbrs:
|
| 441 |
+
if len(nbrs_u & set(H[v])) < (k - 2):
|
| 442 |
+
to_drop.append((u, v))
|
| 443 |
+
H.remove_edges_from(to_drop)
|
| 444 |
+
n_dropped = len(to_drop)
|
| 445 |
+
H.remove_nodes_from(list(nx.isolates(H)))
|
| 446 |
+
|
| 447 |
+
return H
|
| 448 |
+
|
| 449 |
+
|
| 450 |
+
@not_implemented_for("multigraph")
|
| 451 |
+
@not_implemented_for("directed")
|
| 452 |
+
@nx._dispatch
|
| 453 |
+
def onion_layers(G):
|
| 454 |
+
"""Returns the layer of each vertex in an onion decomposition of the graph.
|
| 455 |
+
|
| 456 |
+
The onion decomposition refines the k-core decomposition by providing
|
| 457 |
+
information on the internal organization of each k-shell. It is usually
|
| 458 |
+
used alongside the `core numbers`.
|
| 459 |
+
|
| 460 |
+
Parameters
|
| 461 |
+
----------
|
| 462 |
+
G : NetworkX graph
|
| 463 |
+
A simple graph without self loops or parallel edges
|
| 464 |
+
|
| 465 |
+
Returns
|
| 466 |
+
-------
|
| 467 |
+
od_layers : dictionary
|
| 468 |
+
A dictionary keyed by vertex to the onion layer. The layers are
|
| 469 |
+
contiguous integers starting at 1.
|
| 470 |
+
|
| 471 |
+
Raises
|
| 472 |
+
------
|
| 473 |
+
NetworkXError
|
| 474 |
+
The onion decomposition is not implemented for graphs with self loops
|
| 475 |
+
or parallel edges or for directed graphs.
|
| 476 |
+
|
| 477 |
+
Notes
|
| 478 |
+
-----
|
| 479 |
+
Not implemented for graphs with parallel edges or self loops.
|
| 480 |
+
|
| 481 |
+
Not implemented for directed graphs.
|
| 482 |
+
|
| 483 |
+
See Also
|
| 484 |
+
--------
|
| 485 |
+
core_number
|
| 486 |
+
|
| 487 |
+
References
|
| 488 |
+
----------
|
| 489 |
+
.. [1] Multi-scale structure and topological anomaly detection via a new
|
| 490 |
+
network statistic: The onion decomposition
|
| 491 |
+
L. Hébert-Dufresne, J. A. Grochow, and A. Allard
|
| 492 |
+
Scientific Reports 6, 31708 (2016)
|
| 493 |
+
http://doi.org/10.1038/srep31708
|
| 494 |
+
.. [2] Percolation and the effective structure of complex networks
|
| 495 |
+
A. Allard and L. Hébert-Dufresne
|
| 496 |
+
Physical Review X 9, 011023 (2019)
|
| 497 |
+
http://doi.org/10.1103/PhysRevX.9.011023
|
| 498 |
+
"""
|
| 499 |
+
if nx.number_of_selfloops(G) > 0:
|
| 500 |
+
msg = (
|
| 501 |
+
"Input graph contains self loops which is not permitted; "
|
| 502 |
+
"Consider using G.remove_edges_from(nx.selfloop_edges(G))."
|
| 503 |
+
)
|
| 504 |
+
raise NetworkXError(msg)
|
| 505 |
+
# Dictionaries to register the k-core/onion decompositions.
|
| 506 |
+
od_layers = {}
|
| 507 |
+
# Adjacency list
|
| 508 |
+
neighbors = {v: list(nx.all_neighbors(G, v)) for v in G}
|
| 509 |
+
# Effective degree of nodes.
|
| 510 |
+
degrees = dict(G.degree())
|
| 511 |
+
# Performs the onion decomposition.
|
| 512 |
+
current_core = 1
|
| 513 |
+
current_layer = 1
|
| 514 |
+
# Sets vertices of degree 0 to layer 1, if any.
|
| 515 |
+
isolated_nodes = list(nx.isolates(G))
|
| 516 |
+
if len(isolated_nodes) > 0:
|
| 517 |
+
for v in isolated_nodes:
|
| 518 |
+
od_layers[v] = current_layer
|
| 519 |
+
degrees.pop(v)
|
| 520 |
+
current_layer = 2
|
| 521 |
+
# Finds the layer for the remaining nodes.
|
| 522 |
+
while len(degrees) > 0:
|
| 523 |
+
# Sets the order for looking at nodes.
|
| 524 |
+
nodes = sorted(degrees, key=degrees.get)
|
| 525 |
+
# Sets properly the current core.
|
| 526 |
+
min_degree = degrees[nodes[0]]
|
| 527 |
+
if min_degree > current_core:
|
| 528 |
+
current_core = min_degree
|
| 529 |
+
# Identifies vertices in the current layer.
|
| 530 |
+
this_layer = []
|
| 531 |
+
for n in nodes:
|
| 532 |
+
if degrees[n] > current_core:
|
| 533 |
+
break
|
| 534 |
+
this_layer.append(n)
|
| 535 |
+
# Identifies the core/layer of the vertices in the current layer.
|
| 536 |
+
for v in this_layer:
|
| 537 |
+
od_layers[v] = current_layer
|
| 538 |
+
for n in neighbors[v]:
|
| 539 |
+
neighbors[n].remove(v)
|
| 540 |
+
degrees[n] = degrees[n] - 1
|
| 541 |
+
degrees.pop(v)
|
| 542 |
+
# Updates the layer count.
|
| 543 |
+
current_layer = current_layer + 1
|
| 544 |
+
# Returns the dictionaries containing the onion layer of each vertices.
|
| 545 |
+
return od_layers
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/d_separation.py
ADDED
|
@@ -0,0 +1,457 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Algorithm for testing d-separation in DAGs.
|
| 3 |
+
|
| 4 |
+
*d-separation* is a test for conditional independence in probability
|
| 5 |
+
distributions that can be factorized using DAGs. It is a purely
|
| 6 |
+
graphical test that uses the underlying graph and makes no reference
|
| 7 |
+
to the actual distribution parameters. See [1]_ for a formal
|
| 8 |
+
definition.
|
| 9 |
+
|
| 10 |
+
The implementation is based on the conceptually simple linear time
|
| 11 |
+
algorithm presented in [2]_. Refer to [3]_, [4]_ for a couple of
|
| 12 |
+
alternative algorithms.
|
| 13 |
+
|
| 14 |
+
Here, we provide a brief overview of d-separation and related concepts that
|
| 15 |
+
are relevant for understanding it:
|
| 16 |
+
|
| 17 |
+
Blocking paths
|
| 18 |
+
--------------
|
| 19 |
+
|
| 20 |
+
Before we overview, we introduce the following terminology to describe paths:
|
| 21 |
+
|
| 22 |
+
- "open" path: A path between two nodes that can be traversed
|
| 23 |
+
- "blocked" path: A path between two nodes that cannot be traversed
|
| 24 |
+
|
| 25 |
+
A **collider** is a triplet of nodes along a path that is like the following:
|
| 26 |
+
``... u -> c <- v ...``), where 'c' is a common successor of ``u`` and ``v``. A path
|
| 27 |
+
through a collider is considered "blocked". When
|
| 28 |
+
a node that is a collider, or a descendant of a collider is included in
|
| 29 |
+
the d-separating set, then the path through that collider node is "open". If the
|
| 30 |
+
path through the collider node is open, then we will call this node an open collider.
|
| 31 |
+
|
| 32 |
+
The d-separation set blocks the paths between ``u`` and ``v``. If you include colliders,
|
| 33 |
+
or their descendant nodes in the d-separation set, then those colliders will open up,
|
| 34 |
+
enabling a path to be traversed if it is not blocked some other way.
|
| 35 |
+
|
| 36 |
+
Illustration of D-separation with examples
|
| 37 |
+
------------------------------------------
|
| 38 |
+
|
| 39 |
+
For a pair of two nodes, ``u`` and ``v``, all paths are considered open if
|
| 40 |
+
there is a path between ``u`` and ``v`` that is not blocked. That means, there is an open
|
| 41 |
+
path between ``u`` and ``v`` that does not encounter a collider, or a variable in the
|
| 42 |
+
d-separating set.
|
| 43 |
+
|
| 44 |
+
For example, if the d-separating set is the empty set, then the following paths are
|
| 45 |
+
unblocked between ``u`` and ``v``:
|
| 46 |
+
|
| 47 |
+
- u <- z -> v
|
| 48 |
+
- u -> w -> ... -> z -> v
|
| 49 |
+
|
| 50 |
+
If for example, 'z' is in the d-separating set, then 'z' blocks those paths
|
| 51 |
+
between ``u`` and ``v``.
|
| 52 |
+
|
| 53 |
+
Colliders block a path by default if they and their descendants are not included
|
| 54 |
+
in the d-separating set. An example of a path that is blocked when the d-separating
|
| 55 |
+
set is empty is:
|
| 56 |
+
|
| 57 |
+
- u -> w -> ... -> z <- v
|
| 58 |
+
|
| 59 |
+
because 'z' is a collider in this path and 'z' is not in the d-separating set. However,
|
| 60 |
+
if 'z' or a descendant of 'z' is included in the d-separating set, then the path through
|
| 61 |
+
the collider at 'z' (... -> z <- ...) is now "open".
|
| 62 |
+
|
| 63 |
+
D-separation is concerned with blocking all paths between u and v. Therefore, a
|
| 64 |
+
d-separating set between ``u`` and ``v`` is one where all paths are blocked.
|
| 65 |
+
|
| 66 |
+
D-separation and its applications in probability
|
| 67 |
+
------------------------------------------------
|
| 68 |
+
|
| 69 |
+
D-separation is commonly used in probabilistic graphical models. D-separation
|
| 70 |
+
connects the idea of probabilistic "dependence" with separation in a graph. If
|
| 71 |
+
one assumes the causal Markov condition [5]_, then d-separation implies conditional
|
| 72 |
+
independence in probability distributions.
|
| 73 |
+
|
| 74 |
+
Examples
|
| 75 |
+
--------
|
| 76 |
+
|
| 77 |
+
>>>
|
| 78 |
+
>>> # HMM graph with five states and observation nodes
|
| 79 |
+
... g = nx.DiGraph()
|
| 80 |
+
>>> g.add_edges_from(
|
| 81 |
+
... [
|
| 82 |
+
... ("S1", "S2"),
|
| 83 |
+
... ("S2", "S3"),
|
| 84 |
+
... ("S3", "S4"),
|
| 85 |
+
... ("S4", "S5"),
|
| 86 |
+
... ("S1", "O1"),
|
| 87 |
+
... ("S2", "O2"),
|
| 88 |
+
... ("S3", "O3"),
|
| 89 |
+
... ("S4", "O4"),
|
| 90 |
+
... ("S5", "O5"),
|
| 91 |
+
... ]
|
| 92 |
+
... )
|
| 93 |
+
>>>
|
| 94 |
+
>>> # states/obs before 'S3' are d-separated from states/obs after 'S3'
|
| 95 |
+
... nx.d_separated(g, {"S1", "S2", "O1", "O2"}, {"S4", "S5", "O4", "O5"}, {"S3"})
|
| 96 |
+
True
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
References
|
| 100 |
+
----------
|
| 101 |
+
|
| 102 |
+
.. [1] Pearl, J. (2009). Causality. Cambridge: Cambridge University Press.
|
| 103 |
+
|
| 104 |
+
.. [2] Darwiche, A. (2009). Modeling and reasoning with Bayesian networks.
|
| 105 |
+
Cambridge: Cambridge University Press.
|
| 106 |
+
|
| 107 |
+
.. [3] Shachter, R. D. (1998).
|
| 108 |
+
Bayes-ball: rational pastime (for determining irrelevance and requisite
|
| 109 |
+
information in belief networks and influence diagrams).
|
| 110 |
+
In , Proceedings of the Fourteenth Conference on Uncertainty in Artificial
|
| 111 |
+
Intelligence (pp. 480–487).
|
| 112 |
+
San Francisco, CA, USA: Morgan Kaufmann Publishers Inc.
|
| 113 |
+
|
| 114 |
+
.. [4] Koller, D., & Friedman, N. (2009).
|
| 115 |
+
Probabilistic graphical models: principles and techniques. The MIT Press.
|
| 116 |
+
|
| 117 |
+
.. [5] https://en.wikipedia.org/wiki/Causal_Markov_condition
|
| 118 |
+
|
| 119 |
+
"""
|
| 120 |
+
|
| 121 |
+
from collections import deque
|
| 122 |
+
|
| 123 |
+
import networkx as nx
|
| 124 |
+
from networkx.utils import UnionFind, not_implemented_for
|
| 125 |
+
|
| 126 |
+
__all__ = ["d_separated", "minimal_d_separator", "is_minimal_d_separator"]
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
@not_implemented_for("undirected")
|
| 130 |
+
@nx._dispatch
|
| 131 |
+
def d_separated(G, x, y, z):
|
| 132 |
+
"""
|
| 133 |
+
Return whether node sets ``x`` and ``y`` are d-separated by ``z``.
|
| 134 |
+
|
| 135 |
+
Parameters
|
| 136 |
+
----------
|
| 137 |
+
G : graph
|
| 138 |
+
A NetworkX DAG.
|
| 139 |
+
|
| 140 |
+
x : set
|
| 141 |
+
First set of nodes in ``G``.
|
| 142 |
+
|
| 143 |
+
y : set
|
| 144 |
+
Second set of nodes in ``G``.
|
| 145 |
+
|
| 146 |
+
z : set
|
| 147 |
+
Set of conditioning nodes in ``G``. Can be empty set.
|
| 148 |
+
|
| 149 |
+
Returns
|
| 150 |
+
-------
|
| 151 |
+
b : bool
|
| 152 |
+
A boolean that is true if ``x`` is d-separated from ``y`` given ``z`` in ``G``.
|
| 153 |
+
|
| 154 |
+
Raises
|
| 155 |
+
------
|
| 156 |
+
NetworkXError
|
| 157 |
+
The *d-separation* test is commonly used with directed
|
| 158 |
+
graphical models which are acyclic. Accordingly, the algorithm
|
| 159 |
+
raises a :exc:`NetworkXError` if the input graph is not a DAG.
|
| 160 |
+
|
| 161 |
+
NodeNotFound
|
| 162 |
+
If any of the input nodes are not found in the graph,
|
| 163 |
+
a :exc:`NodeNotFound` exception is raised.
|
| 164 |
+
|
| 165 |
+
Notes
|
| 166 |
+
-----
|
| 167 |
+
A d-separating set in a DAG is a set of nodes that
|
| 168 |
+
blocks all paths between the two sets. Nodes in `z`
|
| 169 |
+
block a path if they are part of the path and are not a collider,
|
| 170 |
+
or a descendant of a collider. A collider structure along a path
|
| 171 |
+
is ``... -> c <- ...`` where ``c`` is the collider node.
|
| 172 |
+
|
| 173 |
+
https://en.wikipedia.org/wiki/Bayesian_network#d-separation
|
| 174 |
+
"""
|
| 175 |
+
|
| 176 |
+
if not nx.is_directed_acyclic_graph(G):
|
| 177 |
+
raise nx.NetworkXError("graph should be directed acyclic")
|
| 178 |
+
|
| 179 |
+
union_xyz = x.union(y).union(z)
|
| 180 |
+
|
| 181 |
+
if any(n not in G.nodes for n in union_xyz):
|
| 182 |
+
raise nx.NodeNotFound("one or more specified nodes not found in the graph")
|
| 183 |
+
|
| 184 |
+
G_copy = G.copy()
|
| 185 |
+
|
| 186 |
+
# transform the graph by removing leaves that are not in x | y | z
|
| 187 |
+
# until no more leaves can be removed.
|
| 188 |
+
leaves = deque([n for n in G_copy.nodes if G_copy.out_degree[n] == 0])
|
| 189 |
+
while len(leaves) > 0:
|
| 190 |
+
leaf = leaves.popleft()
|
| 191 |
+
if leaf not in union_xyz:
|
| 192 |
+
for p in G_copy.predecessors(leaf):
|
| 193 |
+
if G_copy.out_degree[p] == 1:
|
| 194 |
+
leaves.append(p)
|
| 195 |
+
G_copy.remove_node(leaf)
|
| 196 |
+
|
| 197 |
+
# transform the graph by removing outgoing edges from the
|
| 198 |
+
# conditioning set.
|
| 199 |
+
edges_to_remove = list(G_copy.out_edges(z))
|
| 200 |
+
G_copy.remove_edges_from(edges_to_remove)
|
| 201 |
+
|
| 202 |
+
# use disjoint-set data structure to check if any node in `x`
|
| 203 |
+
# occurs in the same weakly connected component as a node in `y`.
|
| 204 |
+
disjoint_set = UnionFind(G_copy.nodes())
|
| 205 |
+
for component in nx.weakly_connected_components(G_copy):
|
| 206 |
+
disjoint_set.union(*component)
|
| 207 |
+
disjoint_set.union(*x)
|
| 208 |
+
disjoint_set.union(*y)
|
| 209 |
+
|
| 210 |
+
if x and y and disjoint_set[next(iter(x))] == disjoint_set[next(iter(y))]:
|
| 211 |
+
return False
|
| 212 |
+
else:
|
| 213 |
+
return True
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
@not_implemented_for("undirected")
|
| 217 |
+
@nx._dispatch
|
| 218 |
+
def minimal_d_separator(G, u, v):
|
| 219 |
+
"""Compute a minimal d-separating set between 'u' and 'v'.
|
| 220 |
+
|
| 221 |
+
A d-separating set in a DAG is a set of nodes that blocks all paths
|
| 222 |
+
between the two nodes, 'u' and 'v'. This function
|
| 223 |
+
constructs a d-separating set that is "minimal", meaning it is the smallest
|
| 224 |
+
d-separating set for 'u' and 'v'. This is not necessarily
|
| 225 |
+
unique. For more details, see Notes.
|
| 226 |
+
|
| 227 |
+
Parameters
|
| 228 |
+
----------
|
| 229 |
+
G : graph
|
| 230 |
+
A networkx DAG.
|
| 231 |
+
u : node
|
| 232 |
+
A node in the graph, G.
|
| 233 |
+
v : node
|
| 234 |
+
A node in the graph, G.
|
| 235 |
+
|
| 236 |
+
Raises
|
| 237 |
+
------
|
| 238 |
+
NetworkXError
|
| 239 |
+
Raises a :exc:`NetworkXError` if the input graph is not a DAG.
|
| 240 |
+
|
| 241 |
+
NodeNotFound
|
| 242 |
+
If any of the input nodes are not found in the graph,
|
| 243 |
+
a :exc:`NodeNotFound` exception is raised.
|
| 244 |
+
|
| 245 |
+
References
|
| 246 |
+
----------
|
| 247 |
+
.. [1] Tian, J., & Paz, A. (1998). Finding Minimal D-separators.
|
| 248 |
+
|
| 249 |
+
Notes
|
| 250 |
+
-----
|
| 251 |
+
This function only finds ``a`` minimal d-separator. It does not guarantee
|
| 252 |
+
uniqueness, since in a DAG there may be more than one minimal d-separator
|
| 253 |
+
between two nodes. Moreover, this only checks for minimal separators
|
| 254 |
+
between two nodes, not two sets. Finding minimal d-separators between
|
| 255 |
+
two sets of nodes is not supported.
|
| 256 |
+
|
| 257 |
+
Uses the algorithm presented in [1]_. The complexity of the algorithm
|
| 258 |
+
is :math:`O(|E_{An}^m|)`, where :math:`|E_{An}^m|` stands for the
|
| 259 |
+
number of edges in the moralized graph of the sub-graph consisting
|
| 260 |
+
of only the ancestors of 'u' and 'v'. For full details, see [1]_.
|
| 261 |
+
|
| 262 |
+
The algorithm works by constructing the moral graph consisting of just
|
| 263 |
+
the ancestors of `u` and `v`. Then it constructs a candidate for
|
| 264 |
+
a separating set ``Z'`` from the predecessors of `u` and `v`.
|
| 265 |
+
Then BFS is run starting from `u` and marking nodes
|
| 266 |
+
found from ``Z'`` and calling those nodes ``Z''``.
|
| 267 |
+
Then BFS is run again starting from `v` and marking nodes if they are
|
| 268 |
+
present in ``Z''``. Those marked nodes are the returned minimal
|
| 269 |
+
d-separating set.
|
| 270 |
+
|
| 271 |
+
https://en.wikipedia.org/wiki/Bayesian_network#d-separation
|
| 272 |
+
"""
|
| 273 |
+
if not nx.is_directed_acyclic_graph(G):
|
| 274 |
+
raise nx.NetworkXError("graph should be directed acyclic")
|
| 275 |
+
|
| 276 |
+
union_uv = {u, v}
|
| 277 |
+
|
| 278 |
+
if any(n not in G.nodes for n in union_uv):
|
| 279 |
+
raise nx.NodeNotFound("one or more specified nodes not found in the graph")
|
| 280 |
+
|
| 281 |
+
# first construct the set of ancestors of X and Y
|
| 282 |
+
x_anc = nx.ancestors(G, u)
|
| 283 |
+
y_anc = nx.ancestors(G, v)
|
| 284 |
+
D_anc_xy = x_anc.union(y_anc)
|
| 285 |
+
D_anc_xy.update((u, v))
|
| 286 |
+
|
| 287 |
+
# second, construct the moralization of the subgraph of Anc(X,Y)
|
| 288 |
+
moral_G = nx.moral_graph(G.subgraph(D_anc_xy))
|
| 289 |
+
|
| 290 |
+
# find a separating set Z' in moral_G
|
| 291 |
+
Z_prime = set(G.predecessors(u)).union(set(G.predecessors(v)))
|
| 292 |
+
|
| 293 |
+
# perform BFS on the graph from 'x' to mark
|
| 294 |
+
Z_dprime = _bfs_with_marks(moral_G, u, Z_prime)
|
| 295 |
+
Z = _bfs_with_marks(moral_G, v, Z_dprime)
|
| 296 |
+
return Z
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
@not_implemented_for("undirected")
|
| 300 |
+
@nx._dispatch
|
| 301 |
+
def is_minimal_d_separator(G, u, v, z):
|
| 302 |
+
"""Determine if a d-separating set is minimal.
|
| 303 |
+
|
| 304 |
+
A d-separating set, `z`, in a DAG is a set of nodes that blocks
|
| 305 |
+
all paths between the two nodes, `u` and `v`. This function
|
| 306 |
+
verifies that a set is "minimal", meaning there is no smaller
|
| 307 |
+
d-separating set between the two nodes.
|
| 308 |
+
|
| 309 |
+
Note: This function checks whether `z` is a d-separator AND is minimal.
|
| 310 |
+
One can use the function `d_separated` to only check if `z` is a d-separator.
|
| 311 |
+
See examples below.
|
| 312 |
+
|
| 313 |
+
Parameters
|
| 314 |
+
----------
|
| 315 |
+
G : nx.DiGraph
|
| 316 |
+
The graph.
|
| 317 |
+
u : node
|
| 318 |
+
A node in the graph.
|
| 319 |
+
v : node
|
| 320 |
+
A node in the graph.
|
| 321 |
+
z : Set of nodes
|
| 322 |
+
The set of nodes to check if it is a minimal d-separating set.
|
| 323 |
+
The function :func:`d_separated` is called inside this function
|
| 324 |
+
to verify that `z` is in fact a d-separator.
|
| 325 |
+
|
| 326 |
+
Returns
|
| 327 |
+
-------
|
| 328 |
+
bool
|
| 329 |
+
Whether or not the set `z` is a d-separator and is also minimal.
|
| 330 |
+
|
| 331 |
+
Examples
|
| 332 |
+
--------
|
| 333 |
+
>>> G = nx.path_graph([0, 1, 2, 3], create_using=nx.DiGraph)
|
| 334 |
+
>>> G.add_node(4)
|
| 335 |
+
>>> nx.is_minimal_d_separator(G, 0, 2, {1})
|
| 336 |
+
True
|
| 337 |
+
>>> # since {1} is the minimal d-separator, {1, 3, 4} is not minimal
|
| 338 |
+
>>> nx.is_minimal_d_separator(G, 0, 2, {1, 3, 4})
|
| 339 |
+
False
|
| 340 |
+
>>> # alternatively, if we only want to check that {1, 3, 4} is a d-separator
|
| 341 |
+
>>> nx.d_separated(G, {0}, {4}, {1, 3, 4})
|
| 342 |
+
True
|
| 343 |
+
|
| 344 |
+
Raises
|
| 345 |
+
------
|
| 346 |
+
NetworkXError
|
| 347 |
+
Raises a :exc:`NetworkXError` if the input graph is not a DAG.
|
| 348 |
+
|
| 349 |
+
NodeNotFound
|
| 350 |
+
If any of the input nodes are not found in the graph,
|
| 351 |
+
a :exc:`NodeNotFound` exception is raised.
|
| 352 |
+
|
| 353 |
+
References
|
| 354 |
+
----------
|
| 355 |
+
.. [1] Tian, J., & Paz, A. (1998). Finding Minimal D-separators.
|
| 356 |
+
|
| 357 |
+
Notes
|
| 358 |
+
-----
|
| 359 |
+
This function only works on verifying a d-separating set is minimal
|
| 360 |
+
between two nodes. To verify that a d-separating set is minimal between
|
| 361 |
+
two sets of nodes is not supported.
|
| 362 |
+
|
| 363 |
+
Uses algorithm 2 presented in [1]_. The complexity of the algorithm
|
| 364 |
+
is :math:`O(|E_{An}^m|)`, where :math:`|E_{An}^m|` stands for the
|
| 365 |
+
number of edges in the moralized graph of the sub-graph consisting
|
| 366 |
+
of only the ancestors of ``u`` and ``v``.
|
| 367 |
+
|
| 368 |
+
The algorithm works by constructing the moral graph consisting of just
|
| 369 |
+
the ancestors of `u` and `v`. First, it performs BFS on the moral graph
|
| 370 |
+
starting from `u` and marking any nodes it encounters that are part of
|
| 371 |
+
the separating set, `z`. If a node is marked, then it does not continue
|
| 372 |
+
along that path. In the second stage, BFS with markings is repeated on the
|
| 373 |
+
moral graph starting from `v`. If at any stage, any node in `z` is
|
| 374 |
+
not marked, then `z` is considered not minimal. If the end of the algorithm
|
| 375 |
+
is reached, then `z` is minimal.
|
| 376 |
+
|
| 377 |
+
For full details, see [1]_.
|
| 378 |
+
|
| 379 |
+
https://en.wikipedia.org/wiki/Bayesian_network#d-separation
|
| 380 |
+
"""
|
| 381 |
+
if not nx.d_separated(G, {u}, {v}, z):
|
| 382 |
+
return False
|
| 383 |
+
|
| 384 |
+
x_anc = nx.ancestors(G, u)
|
| 385 |
+
y_anc = nx.ancestors(G, v)
|
| 386 |
+
xy_anc = x_anc.union(y_anc)
|
| 387 |
+
|
| 388 |
+
# if Z contains any node which is not in ancestors of X or Y
|
| 389 |
+
# then it is definitely not minimal
|
| 390 |
+
if any(node not in xy_anc for node in z):
|
| 391 |
+
return False
|
| 392 |
+
|
| 393 |
+
D_anc_xy = x_anc.union(y_anc)
|
| 394 |
+
D_anc_xy.update((u, v))
|
| 395 |
+
|
| 396 |
+
# second, construct the moralization of the subgraph
|
| 397 |
+
moral_G = nx.moral_graph(G.subgraph(D_anc_xy))
|
| 398 |
+
|
| 399 |
+
# start BFS from X
|
| 400 |
+
marks = _bfs_with_marks(moral_G, u, z)
|
| 401 |
+
|
| 402 |
+
# if not all the Z is marked, then the set is not minimal
|
| 403 |
+
if any(node not in marks for node in z):
|
| 404 |
+
return False
|
| 405 |
+
|
| 406 |
+
# similarly, start BFS from Y and check the marks
|
| 407 |
+
marks = _bfs_with_marks(moral_G, v, z)
|
| 408 |
+
# if not all the Z is marked, then the set is not minimal
|
| 409 |
+
if any(node not in marks for node in z):
|
| 410 |
+
return False
|
| 411 |
+
|
| 412 |
+
return True
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
@not_implemented_for("directed")
|
| 416 |
+
def _bfs_with_marks(G, start_node, check_set):
|
| 417 |
+
"""Breadth-first-search with markings.
|
| 418 |
+
|
| 419 |
+
Performs BFS starting from ``start_node`` and whenever a node
|
| 420 |
+
inside ``check_set`` is met, it is "marked". Once a node is marked,
|
| 421 |
+
BFS does not continue along that path. The resulting marked nodes
|
| 422 |
+
are returned.
|
| 423 |
+
|
| 424 |
+
Parameters
|
| 425 |
+
----------
|
| 426 |
+
G : nx.Graph
|
| 427 |
+
An undirected graph.
|
| 428 |
+
start_node : node
|
| 429 |
+
The start of the BFS.
|
| 430 |
+
check_set : set
|
| 431 |
+
The set of nodes to check against.
|
| 432 |
+
|
| 433 |
+
Returns
|
| 434 |
+
-------
|
| 435 |
+
marked : set
|
| 436 |
+
A set of nodes that were marked.
|
| 437 |
+
"""
|
| 438 |
+
visited = {}
|
| 439 |
+
marked = set()
|
| 440 |
+
queue = []
|
| 441 |
+
|
| 442 |
+
visited[start_node] = None
|
| 443 |
+
queue.append(start_node)
|
| 444 |
+
while queue:
|
| 445 |
+
m = queue.pop(0)
|
| 446 |
+
|
| 447 |
+
for nbr in G.neighbors(m):
|
| 448 |
+
if nbr not in visited:
|
| 449 |
+
# memoize where we visited so far
|
| 450 |
+
visited[nbr] = None
|
| 451 |
+
|
| 452 |
+
# mark the node in Z' and do not continue along that path
|
| 453 |
+
if nbr in check_set:
|
| 454 |
+
marked.add(nbr)
|
| 455 |
+
else:
|
| 456 |
+
queue.append(nbr)
|
| 457 |
+
return marked
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/dominating.py
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for computing dominating sets in a graph."""
|
| 2 |
+
from itertools import chain
|
| 3 |
+
|
| 4 |
+
import networkx as nx
|
| 5 |
+
from networkx.utils import arbitrary_element
|
| 6 |
+
|
| 7 |
+
__all__ = ["dominating_set", "is_dominating_set"]
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@nx._dispatch
|
| 11 |
+
def dominating_set(G, start_with=None):
|
| 12 |
+
r"""Finds a dominating set for the graph G.
|
| 13 |
+
|
| 14 |
+
A *dominating set* for a graph with node set *V* is a subset *D* of
|
| 15 |
+
*V* such that every node not in *D* is adjacent to at least one
|
| 16 |
+
member of *D* [1]_.
|
| 17 |
+
|
| 18 |
+
Parameters
|
| 19 |
+
----------
|
| 20 |
+
G : NetworkX graph
|
| 21 |
+
|
| 22 |
+
start_with : node (default=None)
|
| 23 |
+
Node to use as a starting point for the algorithm.
|
| 24 |
+
|
| 25 |
+
Returns
|
| 26 |
+
-------
|
| 27 |
+
D : set
|
| 28 |
+
A dominating set for G.
|
| 29 |
+
|
| 30 |
+
Notes
|
| 31 |
+
-----
|
| 32 |
+
This function is an implementation of algorithm 7 in [2]_ which
|
| 33 |
+
finds some dominating set, not necessarily the smallest one.
|
| 34 |
+
|
| 35 |
+
See also
|
| 36 |
+
--------
|
| 37 |
+
is_dominating_set
|
| 38 |
+
|
| 39 |
+
References
|
| 40 |
+
----------
|
| 41 |
+
.. [1] https://en.wikipedia.org/wiki/Dominating_set
|
| 42 |
+
|
| 43 |
+
.. [2] Abdol-Hossein Esfahanian. Connectivity Algorithms.
|
| 44 |
+
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
|
| 45 |
+
|
| 46 |
+
"""
|
| 47 |
+
all_nodes = set(G)
|
| 48 |
+
if start_with is None:
|
| 49 |
+
start_with = arbitrary_element(all_nodes)
|
| 50 |
+
if start_with not in G:
|
| 51 |
+
raise nx.NetworkXError(f"node {start_with} is not in G")
|
| 52 |
+
dominating_set = {start_with}
|
| 53 |
+
dominated_nodes = set(G[start_with])
|
| 54 |
+
remaining_nodes = all_nodes - dominated_nodes - dominating_set
|
| 55 |
+
while remaining_nodes:
|
| 56 |
+
# Choose an arbitrary node and determine its undominated neighbors.
|
| 57 |
+
v = remaining_nodes.pop()
|
| 58 |
+
undominated_neighbors = set(G[v]) - dominating_set
|
| 59 |
+
# Add the node to the dominating set and the neighbors to the
|
| 60 |
+
# dominated set. Finally, remove all of those nodes from the set
|
| 61 |
+
# of remaining nodes.
|
| 62 |
+
dominating_set.add(v)
|
| 63 |
+
dominated_nodes |= undominated_neighbors
|
| 64 |
+
remaining_nodes -= undominated_neighbors
|
| 65 |
+
return dominating_set
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
@nx._dispatch
|
| 69 |
+
def is_dominating_set(G, nbunch):
|
| 70 |
+
"""Checks if `nbunch` is a dominating set for `G`.
|
| 71 |
+
|
| 72 |
+
A *dominating set* for a graph with node set *V* is a subset *D* of
|
| 73 |
+
*V* such that every node not in *D* is adjacent to at least one
|
| 74 |
+
member of *D* [1]_.
|
| 75 |
+
|
| 76 |
+
Parameters
|
| 77 |
+
----------
|
| 78 |
+
G : NetworkX graph
|
| 79 |
+
|
| 80 |
+
nbunch : iterable
|
| 81 |
+
An iterable of nodes in the graph `G`.
|
| 82 |
+
|
| 83 |
+
See also
|
| 84 |
+
--------
|
| 85 |
+
dominating_set
|
| 86 |
+
|
| 87 |
+
References
|
| 88 |
+
----------
|
| 89 |
+
.. [1] https://en.wikipedia.org/wiki/Dominating_set
|
| 90 |
+
|
| 91 |
+
"""
|
| 92 |
+
testset = {n for n in nbunch if n in G}
|
| 93 |
+
nbrs = set(chain.from_iterable(G[n] for n in testset))
|
| 94 |
+
return len(set(G) - testset - nbrs) == 0
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/efficiency_measures.py
ADDED
|
@@ -0,0 +1,168 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Provides functions for computing the efficiency of nodes and graphs."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.exception import NetworkXNoPath
|
| 5 |
+
|
| 6 |
+
from ..utils import not_implemented_for
|
| 7 |
+
|
| 8 |
+
__all__ = ["efficiency", "local_efficiency", "global_efficiency"]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@not_implemented_for("directed")
|
| 12 |
+
@nx._dispatch
|
| 13 |
+
def efficiency(G, u, v):
|
| 14 |
+
"""Returns the efficiency of a pair of nodes in a graph.
|
| 15 |
+
|
| 16 |
+
The *efficiency* of a pair of nodes is the multiplicative inverse of the
|
| 17 |
+
shortest path distance between the nodes [1]_. Returns 0 if no path
|
| 18 |
+
between nodes.
|
| 19 |
+
|
| 20 |
+
Parameters
|
| 21 |
+
----------
|
| 22 |
+
G : :class:`networkx.Graph`
|
| 23 |
+
An undirected graph for which to compute the average local efficiency.
|
| 24 |
+
u, v : node
|
| 25 |
+
Nodes in the graph ``G``.
|
| 26 |
+
|
| 27 |
+
Returns
|
| 28 |
+
-------
|
| 29 |
+
float
|
| 30 |
+
Multiplicative inverse of the shortest path distance between the nodes.
|
| 31 |
+
|
| 32 |
+
Examples
|
| 33 |
+
--------
|
| 34 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
| 35 |
+
>>> nx.efficiency(G, 2, 3) # this gives efficiency for node 2 and 3
|
| 36 |
+
0.5
|
| 37 |
+
|
| 38 |
+
Notes
|
| 39 |
+
-----
|
| 40 |
+
Edge weights are ignored when computing the shortest path distances.
|
| 41 |
+
|
| 42 |
+
See also
|
| 43 |
+
--------
|
| 44 |
+
local_efficiency
|
| 45 |
+
global_efficiency
|
| 46 |
+
|
| 47 |
+
References
|
| 48 |
+
----------
|
| 49 |
+
.. [1] Latora, Vito, and Massimo Marchiori.
|
| 50 |
+
"Efficient behavior of small-world networks."
|
| 51 |
+
*Physical Review Letters* 87.19 (2001): 198701.
|
| 52 |
+
<https://doi.org/10.1103/PhysRevLett.87.198701>
|
| 53 |
+
|
| 54 |
+
"""
|
| 55 |
+
try:
|
| 56 |
+
eff = 1 / nx.shortest_path_length(G, u, v)
|
| 57 |
+
except NetworkXNoPath:
|
| 58 |
+
eff = 0
|
| 59 |
+
return eff
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
@not_implemented_for("directed")
|
| 63 |
+
@nx._dispatch
|
| 64 |
+
def global_efficiency(G):
|
| 65 |
+
"""Returns the average global efficiency of the graph.
|
| 66 |
+
|
| 67 |
+
The *efficiency* of a pair of nodes in a graph is the multiplicative
|
| 68 |
+
inverse of the shortest path distance between the nodes. The *average
|
| 69 |
+
global efficiency* of a graph is the average efficiency of all pairs of
|
| 70 |
+
nodes [1]_.
|
| 71 |
+
|
| 72 |
+
Parameters
|
| 73 |
+
----------
|
| 74 |
+
G : :class:`networkx.Graph`
|
| 75 |
+
An undirected graph for which to compute the average global efficiency.
|
| 76 |
+
|
| 77 |
+
Returns
|
| 78 |
+
-------
|
| 79 |
+
float
|
| 80 |
+
The average global efficiency of the graph.
|
| 81 |
+
|
| 82 |
+
Examples
|
| 83 |
+
--------
|
| 84 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
| 85 |
+
>>> round(nx.global_efficiency(G), 12)
|
| 86 |
+
0.916666666667
|
| 87 |
+
|
| 88 |
+
Notes
|
| 89 |
+
-----
|
| 90 |
+
Edge weights are ignored when computing the shortest path distances.
|
| 91 |
+
|
| 92 |
+
See also
|
| 93 |
+
--------
|
| 94 |
+
local_efficiency
|
| 95 |
+
|
| 96 |
+
References
|
| 97 |
+
----------
|
| 98 |
+
.. [1] Latora, Vito, and Massimo Marchiori.
|
| 99 |
+
"Efficient behavior of small-world networks."
|
| 100 |
+
*Physical Review Letters* 87.19 (2001): 198701.
|
| 101 |
+
<https://doi.org/10.1103/PhysRevLett.87.198701>
|
| 102 |
+
|
| 103 |
+
"""
|
| 104 |
+
n = len(G)
|
| 105 |
+
denom = n * (n - 1)
|
| 106 |
+
if denom != 0:
|
| 107 |
+
lengths = nx.all_pairs_shortest_path_length(G)
|
| 108 |
+
g_eff = 0
|
| 109 |
+
for source, targets in lengths:
|
| 110 |
+
for target, distance in targets.items():
|
| 111 |
+
if distance > 0:
|
| 112 |
+
g_eff += 1 / distance
|
| 113 |
+
g_eff /= denom
|
| 114 |
+
# g_eff = sum(1 / d for s, tgts in lengths
|
| 115 |
+
# for t, d in tgts.items() if d > 0) / denom
|
| 116 |
+
else:
|
| 117 |
+
g_eff = 0
|
| 118 |
+
# TODO This can be made more efficient by computing all pairs shortest
|
| 119 |
+
# path lengths in parallel.
|
| 120 |
+
return g_eff
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
@not_implemented_for("directed")
|
| 124 |
+
@nx._dispatch
|
| 125 |
+
def local_efficiency(G):
|
| 126 |
+
"""Returns the average local efficiency of the graph.
|
| 127 |
+
|
| 128 |
+
The *efficiency* of a pair of nodes in a graph is the multiplicative
|
| 129 |
+
inverse of the shortest path distance between the nodes. The *local
|
| 130 |
+
efficiency* of a node in the graph is the average global efficiency of the
|
| 131 |
+
subgraph induced by the neighbors of the node. The *average local
|
| 132 |
+
efficiency* is the average of the local efficiencies of each node [1]_.
|
| 133 |
+
|
| 134 |
+
Parameters
|
| 135 |
+
----------
|
| 136 |
+
G : :class:`networkx.Graph`
|
| 137 |
+
An undirected graph for which to compute the average local efficiency.
|
| 138 |
+
|
| 139 |
+
Returns
|
| 140 |
+
-------
|
| 141 |
+
float
|
| 142 |
+
The average local efficiency of the graph.
|
| 143 |
+
|
| 144 |
+
Examples
|
| 145 |
+
--------
|
| 146 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
| 147 |
+
>>> nx.local_efficiency(G)
|
| 148 |
+
0.9166666666666667
|
| 149 |
+
|
| 150 |
+
Notes
|
| 151 |
+
-----
|
| 152 |
+
Edge weights are ignored when computing the shortest path distances.
|
| 153 |
+
|
| 154 |
+
See also
|
| 155 |
+
--------
|
| 156 |
+
global_efficiency
|
| 157 |
+
|
| 158 |
+
References
|
| 159 |
+
----------
|
| 160 |
+
.. [1] Latora, Vito, and Massimo Marchiori.
|
| 161 |
+
"Efficient behavior of small-world networks."
|
| 162 |
+
*Physical Review Letters* 87.19 (2001): 198701.
|
| 163 |
+
<https://doi.org/10.1103/PhysRevLett.87.198701>
|
| 164 |
+
|
| 165 |
+
"""
|
| 166 |
+
# TODO This summation can be trivially parallelized.
|
| 167 |
+
efficiency_list = (global_efficiency(G.subgraph(G[v])) for v in G)
|
| 168 |
+
return sum(efficiency_list) / len(G)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/euler.py
ADDED
|
@@ -0,0 +1,469 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Eulerian circuits and graphs.
|
| 3 |
+
"""
|
| 4 |
+
from itertools import combinations
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
|
| 8 |
+
from ..utils import arbitrary_element, not_implemented_for
|
| 9 |
+
|
| 10 |
+
__all__ = [
|
| 11 |
+
"is_eulerian",
|
| 12 |
+
"eulerian_circuit",
|
| 13 |
+
"eulerize",
|
| 14 |
+
"is_semieulerian",
|
| 15 |
+
"has_eulerian_path",
|
| 16 |
+
"eulerian_path",
|
| 17 |
+
]
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@nx._dispatch
|
| 21 |
+
def is_eulerian(G):
|
| 22 |
+
"""Returns True if and only if `G` is Eulerian.
|
| 23 |
+
|
| 24 |
+
A graph is *Eulerian* if it has an Eulerian circuit. An *Eulerian
|
| 25 |
+
circuit* is a closed walk that includes each edge of a graph exactly
|
| 26 |
+
once.
|
| 27 |
+
|
| 28 |
+
Graphs with isolated vertices (i.e. vertices with zero degree) are not
|
| 29 |
+
considered to have Eulerian circuits. Therefore, if the graph is not
|
| 30 |
+
connected (or not strongly connected, for directed graphs), this function
|
| 31 |
+
returns False.
|
| 32 |
+
|
| 33 |
+
Parameters
|
| 34 |
+
----------
|
| 35 |
+
G : NetworkX graph
|
| 36 |
+
A graph, either directed or undirected.
|
| 37 |
+
|
| 38 |
+
Examples
|
| 39 |
+
--------
|
| 40 |
+
>>> nx.is_eulerian(nx.DiGraph({0: [3], 1: [2], 2: [3], 3: [0, 1]}))
|
| 41 |
+
True
|
| 42 |
+
>>> nx.is_eulerian(nx.complete_graph(5))
|
| 43 |
+
True
|
| 44 |
+
>>> nx.is_eulerian(nx.petersen_graph())
|
| 45 |
+
False
|
| 46 |
+
|
| 47 |
+
If you prefer to allow graphs with isolated vertices to have Eulerian circuits,
|
| 48 |
+
you can first remove such vertices and then call `is_eulerian` as below example shows.
|
| 49 |
+
|
| 50 |
+
>>> G = nx.Graph([(0, 1), (1, 2), (0, 2)])
|
| 51 |
+
>>> G.add_node(3)
|
| 52 |
+
>>> nx.is_eulerian(G)
|
| 53 |
+
False
|
| 54 |
+
|
| 55 |
+
>>> G.remove_nodes_from(list(nx.isolates(G)))
|
| 56 |
+
>>> nx.is_eulerian(G)
|
| 57 |
+
True
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
"""
|
| 61 |
+
if G.is_directed():
|
| 62 |
+
# Every node must have equal in degree and out degree and the
|
| 63 |
+
# graph must be strongly connected
|
| 64 |
+
return all(
|
| 65 |
+
G.in_degree(n) == G.out_degree(n) for n in G
|
| 66 |
+
) and nx.is_strongly_connected(G)
|
| 67 |
+
# An undirected Eulerian graph has no vertices of odd degree and
|
| 68 |
+
# must be connected.
|
| 69 |
+
return all(d % 2 == 0 for v, d in G.degree()) and nx.is_connected(G)
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
@nx._dispatch
|
| 73 |
+
def is_semieulerian(G):
|
| 74 |
+
"""Return True iff `G` is semi-Eulerian.
|
| 75 |
+
|
| 76 |
+
G is semi-Eulerian if it has an Eulerian path but no Eulerian circuit.
|
| 77 |
+
|
| 78 |
+
See Also
|
| 79 |
+
--------
|
| 80 |
+
has_eulerian_path
|
| 81 |
+
is_eulerian
|
| 82 |
+
"""
|
| 83 |
+
return has_eulerian_path(G) and not is_eulerian(G)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def _find_path_start(G):
|
| 87 |
+
"""Return a suitable starting vertex for an Eulerian path.
|
| 88 |
+
|
| 89 |
+
If no path exists, return None.
|
| 90 |
+
"""
|
| 91 |
+
if not has_eulerian_path(G):
|
| 92 |
+
return None
|
| 93 |
+
|
| 94 |
+
if is_eulerian(G):
|
| 95 |
+
return arbitrary_element(G)
|
| 96 |
+
|
| 97 |
+
if G.is_directed():
|
| 98 |
+
v1, v2 = (v for v in G if G.in_degree(v) != G.out_degree(v))
|
| 99 |
+
# Determines which is the 'start' node (as opposed to the 'end')
|
| 100 |
+
if G.out_degree(v1) > G.in_degree(v1):
|
| 101 |
+
return v1
|
| 102 |
+
else:
|
| 103 |
+
return v2
|
| 104 |
+
|
| 105 |
+
else:
|
| 106 |
+
# In an undirected graph randomly choose one of the possibilities
|
| 107 |
+
start = [v for v in G if G.degree(v) % 2 != 0][0]
|
| 108 |
+
return start
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def _simplegraph_eulerian_circuit(G, source):
|
| 112 |
+
if G.is_directed():
|
| 113 |
+
degree = G.out_degree
|
| 114 |
+
edges = G.out_edges
|
| 115 |
+
else:
|
| 116 |
+
degree = G.degree
|
| 117 |
+
edges = G.edges
|
| 118 |
+
vertex_stack = [source]
|
| 119 |
+
last_vertex = None
|
| 120 |
+
while vertex_stack:
|
| 121 |
+
current_vertex = vertex_stack[-1]
|
| 122 |
+
if degree(current_vertex) == 0:
|
| 123 |
+
if last_vertex is not None:
|
| 124 |
+
yield (last_vertex, current_vertex)
|
| 125 |
+
last_vertex = current_vertex
|
| 126 |
+
vertex_stack.pop()
|
| 127 |
+
else:
|
| 128 |
+
_, next_vertex = arbitrary_element(edges(current_vertex))
|
| 129 |
+
vertex_stack.append(next_vertex)
|
| 130 |
+
G.remove_edge(current_vertex, next_vertex)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def _multigraph_eulerian_circuit(G, source):
|
| 134 |
+
if G.is_directed():
|
| 135 |
+
degree = G.out_degree
|
| 136 |
+
edges = G.out_edges
|
| 137 |
+
else:
|
| 138 |
+
degree = G.degree
|
| 139 |
+
edges = G.edges
|
| 140 |
+
vertex_stack = [(source, None)]
|
| 141 |
+
last_vertex = None
|
| 142 |
+
last_key = None
|
| 143 |
+
while vertex_stack:
|
| 144 |
+
current_vertex, current_key = vertex_stack[-1]
|
| 145 |
+
if degree(current_vertex) == 0:
|
| 146 |
+
if last_vertex is not None:
|
| 147 |
+
yield (last_vertex, current_vertex, last_key)
|
| 148 |
+
last_vertex, last_key = current_vertex, current_key
|
| 149 |
+
vertex_stack.pop()
|
| 150 |
+
else:
|
| 151 |
+
triple = arbitrary_element(edges(current_vertex, keys=True))
|
| 152 |
+
_, next_vertex, next_key = triple
|
| 153 |
+
vertex_stack.append((next_vertex, next_key))
|
| 154 |
+
G.remove_edge(current_vertex, next_vertex, next_key)
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
@nx._dispatch
|
| 158 |
+
def eulerian_circuit(G, source=None, keys=False):
|
| 159 |
+
"""Returns an iterator over the edges of an Eulerian circuit in `G`.
|
| 160 |
+
|
| 161 |
+
An *Eulerian circuit* is a closed walk that includes each edge of a
|
| 162 |
+
graph exactly once.
|
| 163 |
+
|
| 164 |
+
Parameters
|
| 165 |
+
----------
|
| 166 |
+
G : NetworkX graph
|
| 167 |
+
A graph, either directed or undirected.
|
| 168 |
+
|
| 169 |
+
source : node, optional
|
| 170 |
+
Starting node for circuit.
|
| 171 |
+
|
| 172 |
+
keys : bool
|
| 173 |
+
If False, edges generated by this function will be of the form
|
| 174 |
+
``(u, v)``. Otherwise, edges will be of the form ``(u, v, k)``.
|
| 175 |
+
This option is ignored unless `G` is a multigraph.
|
| 176 |
+
|
| 177 |
+
Returns
|
| 178 |
+
-------
|
| 179 |
+
edges : iterator
|
| 180 |
+
An iterator over edges in the Eulerian circuit.
|
| 181 |
+
|
| 182 |
+
Raises
|
| 183 |
+
------
|
| 184 |
+
NetworkXError
|
| 185 |
+
If the graph is not Eulerian.
|
| 186 |
+
|
| 187 |
+
See Also
|
| 188 |
+
--------
|
| 189 |
+
is_eulerian
|
| 190 |
+
|
| 191 |
+
Notes
|
| 192 |
+
-----
|
| 193 |
+
This is a linear time implementation of an algorithm adapted from [1]_.
|
| 194 |
+
|
| 195 |
+
For general information about Euler tours, see [2]_.
|
| 196 |
+
|
| 197 |
+
References
|
| 198 |
+
----------
|
| 199 |
+
.. [1] J. Edmonds, E. L. Johnson.
|
| 200 |
+
Matching, Euler tours and the Chinese postman.
|
| 201 |
+
Mathematical programming, Volume 5, Issue 1 (1973), 111-114.
|
| 202 |
+
.. [2] https://en.wikipedia.org/wiki/Eulerian_path
|
| 203 |
+
|
| 204 |
+
Examples
|
| 205 |
+
--------
|
| 206 |
+
To get an Eulerian circuit in an undirected graph::
|
| 207 |
+
|
| 208 |
+
>>> G = nx.complete_graph(3)
|
| 209 |
+
>>> list(nx.eulerian_circuit(G))
|
| 210 |
+
[(0, 2), (2, 1), (1, 0)]
|
| 211 |
+
>>> list(nx.eulerian_circuit(G, source=1))
|
| 212 |
+
[(1, 2), (2, 0), (0, 1)]
|
| 213 |
+
|
| 214 |
+
To get the sequence of vertices in an Eulerian circuit::
|
| 215 |
+
|
| 216 |
+
>>> [u for u, v in nx.eulerian_circuit(G)]
|
| 217 |
+
[0, 2, 1]
|
| 218 |
+
|
| 219 |
+
"""
|
| 220 |
+
if not is_eulerian(G):
|
| 221 |
+
raise nx.NetworkXError("G is not Eulerian.")
|
| 222 |
+
if G.is_directed():
|
| 223 |
+
G = G.reverse()
|
| 224 |
+
else:
|
| 225 |
+
G = G.copy()
|
| 226 |
+
if source is None:
|
| 227 |
+
source = arbitrary_element(G)
|
| 228 |
+
if G.is_multigraph():
|
| 229 |
+
for u, v, k in _multigraph_eulerian_circuit(G, source):
|
| 230 |
+
if keys:
|
| 231 |
+
yield u, v, k
|
| 232 |
+
else:
|
| 233 |
+
yield u, v
|
| 234 |
+
else:
|
| 235 |
+
yield from _simplegraph_eulerian_circuit(G, source)
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
@nx._dispatch
|
| 239 |
+
def has_eulerian_path(G, source=None):
|
| 240 |
+
"""Return True iff `G` has an Eulerian path.
|
| 241 |
+
|
| 242 |
+
An Eulerian path is a path in a graph which uses each edge of a graph
|
| 243 |
+
exactly once. If `source` is specified, then this function checks
|
| 244 |
+
whether an Eulerian path that starts at node `source` exists.
|
| 245 |
+
|
| 246 |
+
A directed graph has an Eulerian path iff:
|
| 247 |
+
- at most one vertex has out_degree - in_degree = 1,
|
| 248 |
+
- at most one vertex has in_degree - out_degree = 1,
|
| 249 |
+
- every other vertex has equal in_degree and out_degree,
|
| 250 |
+
- and all of its vertices belong to a single connected
|
| 251 |
+
component of the underlying undirected graph.
|
| 252 |
+
|
| 253 |
+
If `source` is not None, an Eulerian path starting at `source` exists if no
|
| 254 |
+
other node has out_degree - in_degree = 1. This is equivalent to either
|
| 255 |
+
there exists an Eulerian circuit or `source` has out_degree - in_degree = 1
|
| 256 |
+
and the conditions above hold.
|
| 257 |
+
|
| 258 |
+
An undirected graph has an Eulerian path iff:
|
| 259 |
+
- exactly zero or two vertices have odd degree,
|
| 260 |
+
- and all of its vertices belong to a single connected component.
|
| 261 |
+
|
| 262 |
+
If `source` is not None, an Eulerian path starting at `source` exists if
|
| 263 |
+
either there exists an Eulerian circuit or `source` has an odd degree and the
|
| 264 |
+
conditions above hold.
|
| 265 |
+
|
| 266 |
+
Graphs with isolated vertices (i.e. vertices with zero degree) are not considered
|
| 267 |
+
to have an Eulerian path. Therefore, if the graph is not connected (or not strongly
|
| 268 |
+
connected, for directed graphs), this function returns False.
|
| 269 |
+
|
| 270 |
+
Parameters
|
| 271 |
+
----------
|
| 272 |
+
G : NetworkX Graph
|
| 273 |
+
The graph to find an euler path in.
|
| 274 |
+
|
| 275 |
+
source : node, optional
|
| 276 |
+
Starting node for path.
|
| 277 |
+
|
| 278 |
+
Returns
|
| 279 |
+
-------
|
| 280 |
+
Bool : True if G has an Eulerian path.
|
| 281 |
+
|
| 282 |
+
Examples
|
| 283 |
+
--------
|
| 284 |
+
If you prefer to allow graphs with isolated vertices to have Eulerian path,
|
| 285 |
+
you can first remove such vertices and then call `has_eulerian_path` as below example shows.
|
| 286 |
+
|
| 287 |
+
>>> G = nx.Graph([(0, 1), (1, 2), (0, 2)])
|
| 288 |
+
>>> G.add_node(3)
|
| 289 |
+
>>> nx.has_eulerian_path(G)
|
| 290 |
+
False
|
| 291 |
+
|
| 292 |
+
>>> G.remove_nodes_from(list(nx.isolates(G)))
|
| 293 |
+
>>> nx.has_eulerian_path(G)
|
| 294 |
+
True
|
| 295 |
+
|
| 296 |
+
See Also
|
| 297 |
+
--------
|
| 298 |
+
is_eulerian
|
| 299 |
+
eulerian_path
|
| 300 |
+
"""
|
| 301 |
+
if nx.is_eulerian(G):
|
| 302 |
+
return True
|
| 303 |
+
|
| 304 |
+
if G.is_directed():
|
| 305 |
+
ins = G.in_degree
|
| 306 |
+
outs = G.out_degree
|
| 307 |
+
# Since we know it is not eulerian, outs - ins must be 1 for source
|
| 308 |
+
if source is not None and outs[source] - ins[source] != 1:
|
| 309 |
+
return False
|
| 310 |
+
|
| 311 |
+
unbalanced_ins = 0
|
| 312 |
+
unbalanced_outs = 0
|
| 313 |
+
for v in G:
|
| 314 |
+
if ins[v] - outs[v] == 1:
|
| 315 |
+
unbalanced_ins += 1
|
| 316 |
+
elif outs[v] - ins[v] == 1:
|
| 317 |
+
unbalanced_outs += 1
|
| 318 |
+
elif ins[v] != outs[v]:
|
| 319 |
+
return False
|
| 320 |
+
|
| 321 |
+
return (
|
| 322 |
+
unbalanced_ins <= 1 and unbalanced_outs <= 1 and nx.is_weakly_connected(G)
|
| 323 |
+
)
|
| 324 |
+
else:
|
| 325 |
+
# We know it is not eulerian, so degree of source must be odd.
|
| 326 |
+
if source is not None and G.degree[source] % 2 != 1:
|
| 327 |
+
return False
|
| 328 |
+
|
| 329 |
+
# Sum is 2 since we know it is not eulerian (which implies sum is 0)
|
| 330 |
+
return sum(d % 2 == 1 for v, d in G.degree()) == 2 and nx.is_connected(G)
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
@nx._dispatch
|
| 334 |
+
def eulerian_path(G, source=None, keys=False):
|
| 335 |
+
"""Return an iterator over the edges of an Eulerian path in `G`.
|
| 336 |
+
|
| 337 |
+
Parameters
|
| 338 |
+
----------
|
| 339 |
+
G : NetworkX Graph
|
| 340 |
+
The graph in which to look for an eulerian path.
|
| 341 |
+
source : node or None (default: None)
|
| 342 |
+
The node at which to start the search. None means search over all
|
| 343 |
+
starting nodes.
|
| 344 |
+
keys : Bool (default: False)
|
| 345 |
+
Indicates whether to yield edge 3-tuples (u, v, edge_key).
|
| 346 |
+
The default yields edge 2-tuples
|
| 347 |
+
|
| 348 |
+
Yields
|
| 349 |
+
------
|
| 350 |
+
Edge tuples along the eulerian path.
|
| 351 |
+
|
| 352 |
+
Warning: If `source` provided is not the start node of an Euler path
|
| 353 |
+
will raise error even if an Euler Path exists.
|
| 354 |
+
"""
|
| 355 |
+
if not has_eulerian_path(G, source):
|
| 356 |
+
raise nx.NetworkXError("Graph has no Eulerian paths.")
|
| 357 |
+
if G.is_directed():
|
| 358 |
+
G = G.reverse()
|
| 359 |
+
if source is None or nx.is_eulerian(G) is False:
|
| 360 |
+
source = _find_path_start(G)
|
| 361 |
+
if G.is_multigraph():
|
| 362 |
+
for u, v, k in _multigraph_eulerian_circuit(G, source):
|
| 363 |
+
if keys:
|
| 364 |
+
yield u, v, k
|
| 365 |
+
else:
|
| 366 |
+
yield u, v
|
| 367 |
+
else:
|
| 368 |
+
yield from _simplegraph_eulerian_circuit(G, source)
|
| 369 |
+
else:
|
| 370 |
+
G = G.copy()
|
| 371 |
+
if source is None:
|
| 372 |
+
source = _find_path_start(G)
|
| 373 |
+
if G.is_multigraph():
|
| 374 |
+
if keys:
|
| 375 |
+
yield from reversed(
|
| 376 |
+
[(v, u, k) for u, v, k in _multigraph_eulerian_circuit(G, source)]
|
| 377 |
+
)
|
| 378 |
+
else:
|
| 379 |
+
yield from reversed(
|
| 380 |
+
[(v, u) for u, v, k in _multigraph_eulerian_circuit(G, source)]
|
| 381 |
+
)
|
| 382 |
+
else:
|
| 383 |
+
yield from reversed(
|
| 384 |
+
[(v, u) for u, v in _simplegraph_eulerian_circuit(G, source)]
|
| 385 |
+
)
|
| 386 |
+
|
| 387 |
+
|
| 388 |
+
@not_implemented_for("directed")
|
| 389 |
+
@nx._dispatch
|
| 390 |
+
def eulerize(G):
|
| 391 |
+
"""Transforms a graph into an Eulerian graph.
|
| 392 |
+
|
| 393 |
+
If `G` is Eulerian the result is `G` as a MultiGraph, otherwise the result is a smallest
|
| 394 |
+
(in terms of the number of edges) multigraph whose underlying simple graph is `G`.
|
| 395 |
+
|
| 396 |
+
Parameters
|
| 397 |
+
----------
|
| 398 |
+
G : NetworkX graph
|
| 399 |
+
An undirected graph
|
| 400 |
+
|
| 401 |
+
Returns
|
| 402 |
+
-------
|
| 403 |
+
G : NetworkX multigraph
|
| 404 |
+
|
| 405 |
+
Raises
|
| 406 |
+
------
|
| 407 |
+
NetworkXError
|
| 408 |
+
If the graph is not connected.
|
| 409 |
+
|
| 410 |
+
See Also
|
| 411 |
+
--------
|
| 412 |
+
is_eulerian
|
| 413 |
+
eulerian_circuit
|
| 414 |
+
|
| 415 |
+
References
|
| 416 |
+
----------
|
| 417 |
+
.. [1] J. Edmonds, E. L. Johnson.
|
| 418 |
+
Matching, Euler tours and the Chinese postman.
|
| 419 |
+
Mathematical programming, Volume 5, Issue 1 (1973), 111-114.
|
| 420 |
+
.. [2] https://en.wikipedia.org/wiki/Eulerian_path
|
| 421 |
+
.. [3] http://web.math.princeton.edu/math_alive/5/Notes1.pdf
|
| 422 |
+
|
| 423 |
+
Examples
|
| 424 |
+
--------
|
| 425 |
+
>>> G = nx.complete_graph(10)
|
| 426 |
+
>>> H = nx.eulerize(G)
|
| 427 |
+
>>> nx.is_eulerian(H)
|
| 428 |
+
True
|
| 429 |
+
|
| 430 |
+
"""
|
| 431 |
+
if G.order() == 0:
|
| 432 |
+
raise nx.NetworkXPointlessConcept("Cannot Eulerize null graph")
|
| 433 |
+
if not nx.is_connected(G):
|
| 434 |
+
raise nx.NetworkXError("G is not connected")
|
| 435 |
+
odd_degree_nodes = [n for n, d in G.degree() if d % 2 == 1]
|
| 436 |
+
G = nx.MultiGraph(G)
|
| 437 |
+
if len(odd_degree_nodes) == 0:
|
| 438 |
+
return G
|
| 439 |
+
|
| 440 |
+
# get all shortest paths between vertices of odd degree
|
| 441 |
+
odd_deg_pairs_paths = [
|
| 442 |
+
(m, {n: nx.shortest_path(G, source=m, target=n)})
|
| 443 |
+
for m, n in combinations(odd_degree_nodes, 2)
|
| 444 |
+
]
|
| 445 |
+
|
| 446 |
+
# use the number of vertices in a graph + 1 as an upper bound on
|
| 447 |
+
# the maximum length of a path in G
|
| 448 |
+
upper_bound_on_max_path_length = len(G) + 1
|
| 449 |
+
|
| 450 |
+
# use "len(G) + 1 - len(P)",
|
| 451 |
+
# where P is a shortest path between vertices n and m,
|
| 452 |
+
# as edge-weights in a new graph
|
| 453 |
+
# store the paths in the graph for easy indexing later
|
| 454 |
+
Gp = nx.Graph()
|
| 455 |
+
for n, Ps in odd_deg_pairs_paths:
|
| 456 |
+
for m, P in Ps.items():
|
| 457 |
+
if n != m:
|
| 458 |
+
Gp.add_edge(
|
| 459 |
+
m, n, weight=upper_bound_on_max_path_length - len(P), path=P
|
| 460 |
+
)
|
| 461 |
+
|
| 462 |
+
# find the minimum weight matching of edges in the weighted graph
|
| 463 |
+
best_matching = nx.Graph(list(nx.max_weight_matching(Gp)))
|
| 464 |
+
|
| 465 |
+
# duplicate each edge along each path in the set of paths in Gp
|
| 466 |
+
for m, n in best_matching.edges():
|
| 467 |
+
path = Gp[m][n]["path"]
|
| 468 |
+
G.add_edges_from(nx.utils.pairwise(path))
|
| 469 |
+
return G
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/__pycache__/gomory_hu.cpython-311.pyc
ADDED
|
Binary file (7.09 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/preflowpush.py
ADDED
|
@@ -0,0 +1,429 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Highest-label preflow-push algorithm for maximum flow problems.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from collections import deque
|
| 6 |
+
from itertools import islice
|
| 7 |
+
|
| 8 |
+
import networkx as nx
|
| 9 |
+
|
| 10 |
+
from ...utils import arbitrary_element
|
| 11 |
+
from .utils import (
|
| 12 |
+
CurrentEdge,
|
| 13 |
+
GlobalRelabelThreshold,
|
| 14 |
+
Level,
|
| 15 |
+
build_residual_network,
|
| 16 |
+
detect_unboundedness,
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
__all__ = ["preflow_push"]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, value_only):
|
| 23 |
+
"""Implementation of the highest-label preflow-push algorithm."""
|
| 24 |
+
if s not in G:
|
| 25 |
+
raise nx.NetworkXError(f"node {str(s)} not in graph")
|
| 26 |
+
if t not in G:
|
| 27 |
+
raise nx.NetworkXError(f"node {str(t)} not in graph")
|
| 28 |
+
if s == t:
|
| 29 |
+
raise nx.NetworkXError("source and sink are the same node")
|
| 30 |
+
|
| 31 |
+
if global_relabel_freq is None:
|
| 32 |
+
global_relabel_freq = 0
|
| 33 |
+
if global_relabel_freq < 0:
|
| 34 |
+
raise nx.NetworkXError("global_relabel_freq must be nonnegative.")
|
| 35 |
+
|
| 36 |
+
if residual is None:
|
| 37 |
+
R = build_residual_network(G, capacity)
|
| 38 |
+
else:
|
| 39 |
+
R = residual
|
| 40 |
+
|
| 41 |
+
detect_unboundedness(R, s, t)
|
| 42 |
+
|
| 43 |
+
R_nodes = R.nodes
|
| 44 |
+
R_pred = R.pred
|
| 45 |
+
R_succ = R.succ
|
| 46 |
+
|
| 47 |
+
# Initialize/reset the residual network.
|
| 48 |
+
for u in R:
|
| 49 |
+
R_nodes[u]["excess"] = 0
|
| 50 |
+
for e in R_succ[u].values():
|
| 51 |
+
e["flow"] = 0
|
| 52 |
+
|
| 53 |
+
def reverse_bfs(src):
|
| 54 |
+
"""Perform a reverse breadth-first search from src in the residual
|
| 55 |
+
network.
|
| 56 |
+
"""
|
| 57 |
+
heights = {src: 0}
|
| 58 |
+
q = deque([(src, 0)])
|
| 59 |
+
while q:
|
| 60 |
+
u, height = q.popleft()
|
| 61 |
+
height += 1
|
| 62 |
+
for v, attr in R_pred[u].items():
|
| 63 |
+
if v not in heights and attr["flow"] < attr["capacity"]:
|
| 64 |
+
heights[v] = height
|
| 65 |
+
q.append((v, height))
|
| 66 |
+
return heights
|
| 67 |
+
|
| 68 |
+
# Initialize heights of the nodes.
|
| 69 |
+
heights = reverse_bfs(t)
|
| 70 |
+
|
| 71 |
+
if s not in heights:
|
| 72 |
+
# t is not reachable from s in the residual network. The maximum flow
|
| 73 |
+
# must be zero.
|
| 74 |
+
R.graph["flow_value"] = 0
|
| 75 |
+
return R
|
| 76 |
+
|
| 77 |
+
n = len(R)
|
| 78 |
+
# max_height represents the height of the highest level below level n with
|
| 79 |
+
# at least one active node.
|
| 80 |
+
max_height = max(heights[u] for u in heights if u != s)
|
| 81 |
+
heights[s] = n
|
| 82 |
+
|
| 83 |
+
grt = GlobalRelabelThreshold(n, R.size(), global_relabel_freq)
|
| 84 |
+
|
| 85 |
+
# Initialize heights and 'current edge' data structures of the nodes.
|
| 86 |
+
for u in R:
|
| 87 |
+
R_nodes[u]["height"] = heights[u] if u in heights else n + 1
|
| 88 |
+
R_nodes[u]["curr_edge"] = CurrentEdge(R_succ[u])
|
| 89 |
+
|
| 90 |
+
def push(u, v, flow):
|
| 91 |
+
"""Push flow units of flow from u to v."""
|
| 92 |
+
R_succ[u][v]["flow"] += flow
|
| 93 |
+
R_succ[v][u]["flow"] -= flow
|
| 94 |
+
R_nodes[u]["excess"] -= flow
|
| 95 |
+
R_nodes[v]["excess"] += flow
|
| 96 |
+
|
| 97 |
+
# The maximum flow must be nonzero now. Initialize the preflow by
|
| 98 |
+
# saturating all edges emanating from s.
|
| 99 |
+
for u, attr in R_succ[s].items():
|
| 100 |
+
flow = attr["capacity"]
|
| 101 |
+
if flow > 0:
|
| 102 |
+
push(s, u, flow)
|
| 103 |
+
|
| 104 |
+
# Partition nodes into levels.
|
| 105 |
+
levels = [Level() for i in range(2 * n)]
|
| 106 |
+
for u in R:
|
| 107 |
+
if u != s and u != t:
|
| 108 |
+
level = levels[R_nodes[u]["height"]]
|
| 109 |
+
if R_nodes[u]["excess"] > 0:
|
| 110 |
+
level.active.add(u)
|
| 111 |
+
else:
|
| 112 |
+
level.inactive.add(u)
|
| 113 |
+
|
| 114 |
+
def activate(v):
|
| 115 |
+
"""Move a node from the inactive set to the active set of its level."""
|
| 116 |
+
if v != s and v != t:
|
| 117 |
+
level = levels[R_nodes[v]["height"]]
|
| 118 |
+
if v in level.inactive:
|
| 119 |
+
level.inactive.remove(v)
|
| 120 |
+
level.active.add(v)
|
| 121 |
+
|
| 122 |
+
def relabel(u):
|
| 123 |
+
"""Relabel a node to create an admissible edge."""
|
| 124 |
+
grt.add_work(len(R_succ[u]))
|
| 125 |
+
return (
|
| 126 |
+
min(
|
| 127 |
+
R_nodes[v]["height"]
|
| 128 |
+
for v, attr in R_succ[u].items()
|
| 129 |
+
if attr["flow"] < attr["capacity"]
|
| 130 |
+
)
|
| 131 |
+
+ 1
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
def discharge(u, is_phase1):
|
| 135 |
+
"""Discharge a node until it becomes inactive or, during phase 1 (see
|
| 136 |
+
below), its height reaches at least n. The node is known to have the
|
| 137 |
+
largest height among active nodes.
|
| 138 |
+
"""
|
| 139 |
+
height = R_nodes[u]["height"]
|
| 140 |
+
curr_edge = R_nodes[u]["curr_edge"]
|
| 141 |
+
# next_height represents the next height to examine after discharging
|
| 142 |
+
# the current node. During phase 1, it is capped to below n.
|
| 143 |
+
next_height = height
|
| 144 |
+
levels[height].active.remove(u)
|
| 145 |
+
while True:
|
| 146 |
+
v, attr = curr_edge.get()
|
| 147 |
+
if height == R_nodes[v]["height"] + 1 and attr["flow"] < attr["capacity"]:
|
| 148 |
+
flow = min(R_nodes[u]["excess"], attr["capacity"] - attr["flow"])
|
| 149 |
+
push(u, v, flow)
|
| 150 |
+
activate(v)
|
| 151 |
+
if R_nodes[u]["excess"] == 0:
|
| 152 |
+
# The node has become inactive.
|
| 153 |
+
levels[height].inactive.add(u)
|
| 154 |
+
break
|
| 155 |
+
try:
|
| 156 |
+
curr_edge.move_to_next()
|
| 157 |
+
except StopIteration:
|
| 158 |
+
# We have run off the end of the adjacency list, and there can
|
| 159 |
+
# be no more admissible edges. Relabel the node to create one.
|
| 160 |
+
height = relabel(u)
|
| 161 |
+
if is_phase1 and height >= n - 1:
|
| 162 |
+
# Although the node is still active, with a height at least
|
| 163 |
+
# n - 1, it is now known to be on the s side of the minimum
|
| 164 |
+
# s-t cut. Stop processing it until phase 2.
|
| 165 |
+
levels[height].active.add(u)
|
| 166 |
+
break
|
| 167 |
+
# The first relabel operation after global relabeling may not
|
| 168 |
+
# increase the height of the node since the 'current edge' data
|
| 169 |
+
# structure is not rewound. Use height instead of (height - 1)
|
| 170 |
+
# in case other active nodes at the same level are missed.
|
| 171 |
+
next_height = height
|
| 172 |
+
R_nodes[u]["height"] = height
|
| 173 |
+
return next_height
|
| 174 |
+
|
| 175 |
+
def gap_heuristic(height):
|
| 176 |
+
"""Apply the gap heuristic."""
|
| 177 |
+
# Move all nodes at levels (height + 1) to max_height to level n + 1.
|
| 178 |
+
for level in islice(levels, height + 1, max_height + 1):
|
| 179 |
+
for u in level.active:
|
| 180 |
+
R_nodes[u]["height"] = n + 1
|
| 181 |
+
for u in level.inactive:
|
| 182 |
+
R_nodes[u]["height"] = n + 1
|
| 183 |
+
levels[n + 1].active.update(level.active)
|
| 184 |
+
level.active.clear()
|
| 185 |
+
levels[n + 1].inactive.update(level.inactive)
|
| 186 |
+
level.inactive.clear()
|
| 187 |
+
|
| 188 |
+
def global_relabel(from_sink):
|
| 189 |
+
"""Apply the global relabeling heuristic."""
|
| 190 |
+
src = t if from_sink else s
|
| 191 |
+
heights = reverse_bfs(src)
|
| 192 |
+
if not from_sink:
|
| 193 |
+
# s must be reachable from t. Remove t explicitly.
|
| 194 |
+
del heights[t]
|
| 195 |
+
max_height = max(heights.values())
|
| 196 |
+
if from_sink:
|
| 197 |
+
# Also mark nodes from which t is unreachable for relabeling. This
|
| 198 |
+
# serves the same purpose as the gap heuristic.
|
| 199 |
+
for u in R:
|
| 200 |
+
if u not in heights and R_nodes[u]["height"] < n:
|
| 201 |
+
heights[u] = n + 1
|
| 202 |
+
else:
|
| 203 |
+
# Shift the computed heights because the height of s is n.
|
| 204 |
+
for u in heights:
|
| 205 |
+
heights[u] += n
|
| 206 |
+
max_height += n
|
| 207 |
+
del heights[src]
|
| 208 |
+
for u, new_height in heights.items():
|
| 209 |
+
old_height = R_nodes[u]["height"]
|
| 210 |
+
if new_height != old_height:
|
| 211 |
+
if u in levels[old_height].active:
|
| 212 |
+
levels[old_height].active.remove(u)
|
| 213 |
+
levels[new_height].active.add(u)
|
| 214 |
+
else:
|
| 215 |
+
levels[old_height].inactive.remove(u)
|
| 216 |
+
levels[new_height].inactive.add(u)
|
| 217 |
+
R_nodes[u]["height"] = new_height
|
| 218 |
+
return max_height
|
| 219 |
+
|
| 220 |
+
# Phase 1: Find the maximum preflow by pushing as much flow as possible to
|
| 221 |
+
# t.
|
| 222 |
+
|
| 223 |
+
height = max_height
|
| 224 |
+
while height > 0:
|
| 225 |
+
# Discharge active nodes in the current level.
|
| 226 |
+
while True:
|
| 227 |
+
level = levels[height]
|
| 228 |
+
if not level.active:
|
| 229 |
+
# All active nodes in the current level have been discharged.
|
| 230 |
+
# Move to the next lower level.
|
| 231 |
+
height -= 1
|
| 232 |
+
break
|
| 233 |
+
# Record the old height and level for the gap heuristic.
|
| 234 |
+
old_height = height
|
| 235 |
+
old_level = level
|
| 236 |
+
u = arbitrary_element(level.active)
|
| 237 |
+
height = discharge(u, True)
|
| 238 |
+
if grt.is_reached():
|
| 239 |
+
# Global relabeling heuristic: Recompute the exact heights of
|
| 240 |
+
# all nodes.
|
| 241 |
+
height = global_relabel(True)
|
| 242 |
+
max_height = height
|
| 243 |
+
grt.clear_work()
|
| 244 |
+
elif not old_level.active and not old_level.inactive:
|
| 245 |
+
# Gap heuristic: If the level at old_height is empty (a 'gap'),
|
| 246 |
+
# a minimum cut has been identified. All nodes with heights
|
| 247 |
+
# above old_height can have their heights set to n + 1 and not
|
| 248 |
+
# be further processed before a maximum preflow is found.
|
| 249 |
+
gap_heuristic(old_height)
|
| 250 |
+
height = old_height - 1
|
| 251 |
+
max_height = height
|
| 252 |
+
else:
|
| 253 |
+
# Update the height of the highest level with at least one
|
| 254 |
+
# active node.
|
| 255 |
+
max_height = max(max_height, height)
|
| 256 |
+
|
| 257 |
+
# A maximum preflow has been found. The excess at t is the maximum flow
|
| 258 |
+
# value.
|
| 259 |
+
if value_only:
|
| 260 |
+
R.graph["flow_value"] = R_nodes[t]["excess"]
|
| 261 |
+
return R
|
| 262 |
+
|
| 263 |
+
# Phase 2: Convert the maximum preflow into a maximum flow by returning the
|
| 264 |
+
# excess to s.
|
| 265 |
+
|
| 266 |
+
# Relabel all nodes so that they have accurate heights.
|
| 267 |
+
height = global_relabel(False)
|
| 268 |
+
grt.clear_work()
|
| 269 |
+
|
| 270 |
+
# Continue to discharge the active nodes.
|
| 271 |
+
while height > n:
|
| 272 |
+
# Discharge active nodes in the current level.
|
| 273 |
+
while True:
|
| 274 |
+
level = levels[height]
|
| 275 |
+
if not level.active:
|
| 276 |
+
# All active nodes in the current level have been discharged.
|
| 277 |
+
# Move to the next lower level.
|
| 278 |
+
height -= 1
|
| 279 |
+
break
|
| 280 |
+
u = arbitrary_element(level.active)
|
| 281 |
+
height = discharge(u, False)
|
| 282 |
+
if grt.is_reached():
|
| 283 |
+
# Global relabeling heuristic.
|
| 284 |
+
height = global_relabel(False)
|
| 285 |
+
grt.clear_work()
|
| 286 |
+
|
| 287 |
+
R.graph["flow_value"] = R_nodes[t]["excess"]
|
| 288 |
+
return R
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
@nx._dispatch(
|
| 292 |
+
graphs={"G": 0, "residual?": 4},
|
| 293 |
+
edge_attrs={"capacity": float("inf")},
|
| 294 |
+
preserve_edge_attrs={"residual": {"capacity": float("inf")}},
|
| 295 |
+
preserve_graph_attrs={"residual"},
|
| 296 |
+
)
|
| 297 |
+
def preflow_push(
|
| 298 |
+
G, s, t, capacity="capacity", residual=None, global_relabel_freq=1, value_only=False
|
| 299 |
+
):
|
| 300 |
+
r"""Find a maximum single-commodity flow using the highest-label
|
| 301 |
+
preflow-push algorithm.
|
| 302 |
+
|
| 303 |
+
This function returns the residual network resulting after computing
|
| 304 |
+
the maximum flow. See below for details about the conventions
|
| 305 |
+
NetworkX uses for defining residual networks.
|
| 306 |
+
|
| 307 |
+
This algorithm has a running time of $O(n^2 \sqrt{m})$ for $n$ nodes and
|
| 308 |
+
$m$ edges.
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
Parameters
|
| 312 |
+
----------
|
| 313 |
+
G : NetworkX graph
|
| 314 |
+
Edges of the graph are expected to have an attribute called
|
| 315 |
+
'capacity'. If this attribute is not present, the edge is
|
| 316 |
+
considered to have infinite capacity.
|
| 317 |
+
|
| 318 |
+
s : node
|
| 319 |
+
Source node for the flow.
|
| 320 |
+
|
| 321 |
+
t : node
|
| 322 |
+
Sink node for the flow.
|
| 323 |
+
|
| 324 |
+
capacity : string
|
| 325 |
+
Edges of the graph G are expected to have an attribute capacity
|
| 326 |
+
that indicates how much flow the edge can support. If this
|
| 327 |
+
attribute is not present, the edge is considered to have
|
| 328 |
+
infinite capacity. Default value: 'capacity'.
|
| 329 |
+
|
| 330 |
+
residual : NetworkX graph
|
| 331 |
+
Residual network on which the algorithm is to be executed. If None, a
|
| 332 |
+
new residual network is created. Default value: None.
|
| 333 |
+
|
| 334 |
+
global_relabel_freq : integer, float
|
| 335 |
+
Relative frequency of applying the global relabeling heuristic to speed
|
| 336 |
+
up the algorithm. If it is None, the heuristic is disabled. Default
|
| 337 |
+
value: 1.
|
| 338 |
+
|
| 339 |
+
value_only : bool
|
| 340 |
+
If False, compute a maximum flow; otherwise, compute a maximum preflow
|
| 341 |
+
which is enough for computing the maximum flow value. Default value:
|
| 342 |
+
False.
|
| 343 |
+
|
| 344 |
+
Returns
|
| 345 |
+
-------
|
| 346 |
+
R : NetworkX DiGraph
|
| 347 |
+
Residual network after computing the maximum flow.
|
| 348 |
+
|
| 349 |
+
Raises
|
| 350 |
+
------
|
| 351 |
+
NetworkXError
|
| 352 |
+
The algorithm does not support MultiGraph and MultiDiGraph. If
|
| 353 |
+
the input graph is an instance of one of these two classes, a
|
| 354 |
+
NetworkXError is raised.
|
| 355 |
+
|
| 356 |
+
NetworkXUnbounded
|
| 357 |
+
If the graph has a path of infinite capacity, the value of a
|
| 358 |
+
feasible flow on the graph is unbounded above and the function
|
| 359 |
+
raises a NetworkXUnbounded.
|
| 360 |
+
|
| 361 |
+
See also
|
| 362 |
+
--------
|
| 363 |
+
:meth:`maximum_flow`
|
| 364 |
+
:meth:`minimum_cut`
|
| 365 |
+
:meth:`edmonds_karp`
|
| 366 |
+
:meth:`shortest_augmenting_path`
|
| 367 |
+
|
| 368 |
+
Notes
|
| 369 |
+
-----
|
| 370 |
+
The residual network :samp:`R` from an input graph :samp:`G` has the
|
| 371 |
+
same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair
|
| 372 |
+
of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a
|
| 373 |
+
self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists
|
| 374 |
+
in :samp:`G`. For each node :samp:`u` in :samp:`R`,
|
| 375 |
+
:samp:`R.nodes[u]['excess']` represents the difference between flow into
|
| 376 |
+
:samp:`u` and flow out of :samp:`u`.
|
| 377 |
+
|
| 378 |
+
For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']`
|
| 379 |
+
is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists
|
| 380 |
+
in :samp:`G` or zero otherwise. If the capacity is infinite,
|
| 381 |
+
:samp:`R[u][v]['capacity']` will have a high arbitrary finite value
|
| 382 |
+
that does not affect the solution of the problem. This value is stored in
|
| 383 |
+
:samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`,
|
| 384 |
+
:samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and
|
| 385 |
+
satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`.
|
| 386 |
+
|
| 387 |
+
The flow value, defined as the total flow into :samp:`t`, the sink, is
|
| 388 |
+
stored in :samp:`R.graph['flow_value']`. Reachability to :samp:`t` using
|
| 389 |
+
only edges :samp:`(u, v)` such that
|
| 390 |
+
:samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum
|
| 391 |
+
:samp:`s`-:samp:`t` cut.
|
| 392 |
+
|
| 393 |
+
Examples
|
| 394 |
+
--------
|
| 395 |
+
>>> from networkx.algorithms.flow import preflow_push
|
| 396 |
+
|
| 397 |
+
The functions that implement flow algorithms and output a residual
|
| 398 |
+
network, such as this one, are not imported to the base NetworkX
|
| 399 |
+
namespace, so you have to explicitly import them from the flow package.
|
| 400 |
+
|
| 401 |
+
>>> G = nx.DiGraph()
|
| 402 |
+
>>> G.add_edge("x", "a", capacity=3.0)
|
| 403 |
+
>>> G.add_edge("x", "b", capacity=1.0)
|
| 404 |
+
>>> G.add_edge("a", "c", capacity=3.0)
|
| 405 |
+
>>> G.add_edge("b", "c", capacity=5.0)
|
| 406 |
+
>>> G.add_edge("b", "d", capacity=4.0)
|
| 407 |
+
>>> G.add_edge("d", "e", capacity=2.0)
|
| 408 |
+
>>> G.add_edge("c", "y", capacity=2.0)
|
| 409 |
+
>>> G.add_edge("e", "y", capacity=3.0)
|
| 410 |
+
>>> R = preflow_push(G, "x", "y")
|
| 411 |
+
>>> flow_value = nx.maximum_flow_value(G, "x", "y")
|
| 412 |
+
>>> flow_value == R.graph["flow_value"]
|
| 413 |
+
True
|
| 414 |
+
>>> # preflow_push also stores the maximum flow value
|
| 415 |
+
>>> # in the excess attribute of the sink node t
|
| 416 |
+
>>> flow_value == R.nodes["y"]["excess"]
|
| 417 |
+
True
|
| 418 |
+
>>> # For some problems, you might only want to compute a
|
| 419 |
+
>>> # maximum preflow.
|
| 420 |
+
>>> R = preflow_push(G, "x", "y", value_only=True)
|
| 421 |
+
>>> flow_value == R.graph["flow_value"]
|
| 422 |
+
True
|
| 423 |
+
>>> flow_value == R.nodes["y"]["excess"]
|
| 424 |
+
True
|
| 425 |
+
|
| 426 |
+
"""
|
| 427 |
+
R = preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, value_only)
|
| 428 |
+
R.graph["algorithm"] = "preflow_push"
|
| 429 |
+
return R
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/matching.py
ADDED
|
@@ -0,0 +1,1151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for computing and verifying matchings in a graph."""
|
| 2 |
+
from collections import Counter
|
| 3 |
+
from itertools import combinations, repeat
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.utils import not_implemented_for
|
| 7 |
+
|
| 8 |
+
__all__ = [
|
| 9 |
+
"is_matching",
|
| 10 |
+
"is_maximal_matching",
|
| 11 |
+
"is_perfect_matching",
|
| 12 |
+
"max_weight_matching",
|
| 13 |
+
"min_weight_matching",
|
| 14 |
+
"maximal_matching",
|
| 15 |
+
]
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@not_implemented_for("multigraph")
|
| 19 |
+
@not_implemented_for("directed")
|
| 20 |
+
@nx._dispatch
|
| 21 |
+
def maximal_matching(G):
|
| 22 |
+
r"""Find a maximal matching in the graph.
|
| 23 |
+
|
| 24 |
+
A matching is a subset of edges in which no node occurs more than once.
|
| 25 |
+
A maximal matching cannot add more edges and still be a matching.
|
| 26 |
+
|
| 27 |
+
Parameters
|
| 28 |
+
----------
|
| 29 |
+
G : NetworkX graph
|
| 30 |
+
Undirected graph
|
| 31 |
+
|
| 32 |
+
Returns
|
| 33 |
+
-------
|
| 34 |
+
matching : set
|
| 35 |
+
A maximal matching of the graph.
|
| 36 |
+
|
| 37 |
+
Examples
|
| 38 |
+
--------
|
| 39 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (2, 4), (3, 5), (4, 5)])
|
| 40 |
+
>>> sorted(nx.maximal_matching(G))
|
| 41 |
+
[(1, 2), (3, 5)]
|
| 42 |
+
|
| 43 |
+
Notes
|
| 44 |
+
-----
|
| 45 |
+
The algorithm greedily selects a maximal matching M of the graph G
|
| 46 |
+
(i.e. no superset of M exists). It runs in $O(|E|)$ time.
|
| 47 |
+
"""
|
| 48 |
+
matching = set()
|
| 49 |
+
nodes = set()
|
| 50 |
+
for edge in G.edges():
|
| 51 |
+
# If the edge isn't covered, add it to the matching
|
| 52 |
+
# then remove neighborhood of u and v from consideration.
|
| 53 |
+
u, v = edge
|
| 54 |
+
if u not in nodes and v not in nodes and u != v:
|
| 55 |
+
matching.add(edge)
|
| 56 |
+
nodes.update(edge)
|
| 57 |
+
return matching
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def matching_dict_to_set(matching):
|
| 61 |
+
"""Converts matching dict format to matching set format
|
| 62 |
+
|
| 63 |
+
Converts a dictionary representing a matching (as returned by
|
| 64 |
+
:func:`max_weight_matching`) to a set representing a matching (as
|
| 65 |
+
returned by :func:`maximal_matching`).
|
| 66 |
+
|
| 67 |
+
In the definition of maximal matching adopted by NetworkX,
|
| 68 |
+
self-loops are not allowed, so the provided dictionary is expected
|
| 69 |
+
to never have any mapping from a key to itself. However, the
|
| 70 |
+
dictionary is expected to have mirrored key/value pairs, for
|
| 71 |
+
example, key ``u`` with value ``v`` and key ``v`` with value ``u``.
|
| 72 |
+
|
| 73 |
+
"""
|
| 74 |
+
edges = set()
|
| 75 |
+
for edge in matching.items():
|
| 76 |
+
u, v = edge
|
| 77 |
+
if (v, u) in edges or edge in edges:
|
| 78 |
+
continue
|
| 79 |
+
if u == v:
|
| 80 |
+
raise nx.NetworkXError(f"Selfloops cannot appear in matchings {edge}")
|
| 81 |
+
edges.add(edge)
|
| 82 |
+
return edges
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
@nx._dispatch
|
| 86 |
+
def is_matching(G, matching):
|
| 87 |
+
"""Return True if ``matching`` is a valid matching of ``G``
|
| 88 |
+
|
| 89 |
+
A *matching* in a graph is a set of edges in which no two distinct
|
| 90 |
+
edges share a common endpoint. Each node is incident to at most one
|
| 91 |
+
edge in the matching. The edges are said to be independent.
|
| 92 |
+
|
| 93 |
+
Parameters
|
| 94 |
+
----------
|
| 95 |
+
G : NetworkX graph
|
| 96 |
+
|
| 97 |
+
matching : dict or set
|
| 98 |
+
A dictionary or set representing a matching. If a dictionary, it
|
| 99 |
+
must have ``matching[u] == v`` and ``matching[v] == u`` for each
|
| 100 |
+
edge ``(u, v)`` in the matching. If a set, it must have elements
|
| 101 |
+
of the form ``(u, v)``, where ``(u, v)`` is an edge in the
|
| 102 |
+
matching.
|
| 103 |
+
|
| 104 |
+
Returns
|
| 105 |
+
-------
|
| 106 |
+
bool
|
| 107 |
+
Whether the given set or dictionary represents a valid matching
|
| 108 |
+
in the graph.
|
| 109 |
+
|
| 110 |
+
Raises
|
| 111 |
+
------
|
| 112 |
+
NetworkXError
|
| 113 |
+
If the proposed matching has an edge to a node not in G.
|
| 114 |
+
Or if the matching is not a collection of 2-tuple edges.
|
| 115 |
+
|
| 116 |
+
Examples
|
| 117 |
+
--------
|
| 118 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (2, 4), (3, 5), (4, 5)])
|
| 119 |
+
>>> nx.is_maximal_matching(G, {1: 3, 2: 4}) # using dict to represent matching
|
| 120 |
+
True
|
| 121 |
+
|
| 122 |
+
>>> nx.is_matching(G, {(1, 3), (2, 4)}) # using set to represent matching
|
| 123 |
+
True
|
| 124 |
+
|
| 125 |
+
"""
|
| 126 |
+
if isinstance(matching, dict):
|
| 127 |
+
matching = matching_dict_to_set(matching)
|
| 128 |
+
|
| 129 |
+
nodes = set()
|
| 130 |
+
for edge in matching:
|
| 131 |
+
if len(edge) != 2:
|
| 132 |
+
raise nx.NetworkXError(f"matching has non-2-tuple edge {edge}")
|
| 133 |
+
u, v = edge
|
| 134 |
+
if u not in G or v not in G:
|
| 135 |
+
raise nx.NetworkXError(f"matching contains edge {edge} with node not in G")
|
| 136 |
+
if u == v:
|
| 137 |
+
return False
|
| 138 |
+
if not G.has_edge(u, v):
|
| 139 |
+
return False
|
| 140 |
+
if u in nodes or v in nodes:
|
| 141 |
+
return False
|
| 142 |
+
nodes.update(edge)
|
| 143 |
+
return True
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
@nx._dispatch
|
| 147 |
+
def is_maximal_matching(G, matching):
|
| 148 |
+
"""Return True if ``matching`` is a maximal matching of ``G``
|
| 149 |
+
|
| 150 |
+
A *maximal matching* in a graph is a matching in which adding any
|
| 151 |
+
edge would cause the set to no longer be a valid matching.
|
| 152 |
+
|
| 153 |
+
Parameters
|
| 154 |
+
----------
|
| 155 |
+
G : NetworkX graph
|
| 156 |
+
|
| 157 |
+
matching : dict or set
|
| 158 |
+
A dictionary or set representing a matching. If a dictionary, it
|
| 159 |
+
must have ``matching[u] == v`` and ``matching[v] == u`` for each
|
| 160 |
+
edge ``(u, v)`` in the matching. If a set, it must have elements
|
| 161 |
+
of the form ``(u, v)``, where ``(u, v)`` is an edge in the
|
| 162 |
+
matching.
|
| 163 |
+
|
| 164 |
+
Returns
|
| 165 |
+
-------
|
| 166 |
+
bool
|
| 167 |
+
Whether the given set or dictionary represents a valid maximal
|
| 168 |
+
matching in the graph.
|
| 169 |
+
|
| 170 |
+
Examples
|
| 171 |
+
--------
|
| 172 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (3, 5)])
|
| 173 |
+
>>> nx.is_maximal_matching(G, {(1, 2), (3, 4)})
|
| 174 |
+
True
|
| 175 |
+
|
| 176 |
+
"""
|
| 177 |
+
if isinstance(matching, dict):
|
| 178 |
+
matching = matching_dict_to_set(matching)
|
| 179 |
+
# If the given set is not a matching, then it is not a maximal matching.
|
| 180 |
+
edges = set()
|
| 181 |
+
nodes = set()
|
| 182 |
+
for edge in matching:
|
| 183 |
+
if len(edge) != 2:
|
| 184 |
+
raise nx.NetworkXError(f"matching has non-2-tuple edge {edge}")
|
| 185 |
+
u, v = edge
|
| 186 |
+
if u not in G or v not in G:
|
| 187 |
+
raise nx.NetworkXError(f"matching contains edge {edge} with node not in G")
|
| 188 |
+
if u == v:
|
| 189 |
+
return False
|
| 190 |
+
if not G.has_edge(u, v):
|
| 191 |
+
return False
|
| 192 |
+
if u in nodes or v in nodes:
|
| 193 |
+
return False
|
| 194 |
+
nodes.update(edge)
|
| 195 |
+
edges.add(edge)
|
| 196 |
+
edges.add((v, u))
|
| 197 |
+
# A matching is maximal if adding any new edge from G to it
|
| 198 |
+
# causes the resulting set to match some node twice.
|
| 199 |
+
# Be careful to check for adding selfloops
|
| 200 |
+
for u, v in G.edges:
|
| 201 |
+
if (u, v) not in edges:
|
| 202 |
+
# could add edge (u, v) to edges and have a bigger matching
|
| 203 |
+
if u not in nodes and v not in nodes and u != v:
|
| 204 |
+
return False
|
| 205 |
+
return True
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
@nx._dispatch
|
| 209 |
+
def is_perfect_matching(G, matching):
|
| 210 |
+
"""Return True if ``matching`` is a perfect matching for ``G``
|
| 211 |
+
|
| 212 |
+
A *perfect matching* in a graph is a matching in which exactly one edge
|
| 213 |
+
is incident upon each vertex.
|
| 214 |
+
|
| 215 |
+
Parameters
|
| 216 |
+
----------
|
| 217 |
+
G : NetworkX graph
|
| 218 |
+
|
| 219 |
+
matching : dict or set
|
| 220 |
+
A dictionary or set representing a matching. If a dictionary, it
|
| 221 |
+
must have ``matching[u] == v`` and ``matching[v] == u`` for each
|
| 222 |
+
edge ``(u, v)`` in the matching. If a set, it must have elements
|
| 223 |
+
of the form ``(u, v)``, where ``(u, v)`` is an edge in the
|
| 224 |
+
matching.
|
| 225 |
+
|
| 226 |
+
Returns
|
| 227 |
+
-------
|
| 228 |
+
bool
|
| 229 |
+
Whether the given set or dictionary represents a valid perfect
|
| 230 |
+
matching in the graph.
|
| 231 |
+
|
| 232 |
+
Examples
|
| 233 |
+
--------
|
| 234 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (2, 4), (3, 5), (4, 5), (4, 6)])
|
| 235 |
+
>>> my_match = {1: 2, 3: 5, 4: 6}
|
| 236 |
+
>>> nx.is_perfect_matching(G, my_match)
|
| 237 |
+
True
|
| 238 |
+
|
| 239 |
+
"""
|
| 240 |
+
if isinstance(matching, dict):
|
| 241 |
+
matching = matching_dict_to_set(matching)
|
| 242 |
+
|
| 243 |
+
nodes = set()
|
| 244 |
+
for edge in matching:
|
| 245 |
+
if len(edge) != 2:
|
| 246 |
+
raise nx.NetworkXError(f"matching has non-2-tuple edge {edge}")
|
| 247 |
+
u, v = edge
|
| 248 |
+
if u not in G or v not in G:
|
| 249 |
+
raise nx.NetworkXError(f"matching contains edge {edge} with node not in G")
|
| 250 |
+
if u == v:
|
| 251 |
+
return False
|
| 252 |
+
if not G.has_edge(u, v):
|
| 253 |
+
return False
|
| 254 |
+
if u in nodes or v in nodes:
|
| 255 |
+
return False
|
| 256 |
+
nodes.update(edge)
|
| 257 |
+
return len(nodes) == len(G)
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
@not_implemented_for("multigraph")
|
| 261 |
+
@not_implemented_for("directed")
|
| 262 |
+
@nx._dispatch(edge_attrs="weight")
|
| 263 |
+
def min_weight_matching(G, weight="weight"):
|
| 264 |
+
"""Computing a minimum-weight maximal matching of G.
|
| 265 |
+
|
| 266 |
+
Use the maximum-weight algorithm with edge weights subtracted
|
| 267 |
+
from the maximum weight of all edges.
|
| 268 |
+
|
| 269 |
+
A matching is a subset of edges in which no node occurs more than once.
|
| 270 |
+
The weight of a matching is the sum of the weights of its edges.
|
| 271 |
+
A maximal matching cannot add more edges and still be a matching.
|
| 272 |
+
The cardinality of a matching is the number of matched edges.
|
| 273 |
+
|
| 274 |
+
This method replaces the edge weights with 1 plus the maximum edge weight
|
| 275 |
+
minus the original edge weight.
|
| 276 |
+
|
| 277 |
+
new_weight = (max_weight + 1) - edge_weight
|
| 278 |
+
|
| 279 |
+
then runs :func:`max_weight_matching` with the new weights.
|
| 280 |
+
The max weight matching with these new weights corresponds
|
| 281 |
+
to the min weight matching using the original weights.
|
| 282 |
+
Adding 1 to the max edge weight keeps all edge weights positive
|
| 283 |
+
and as integers if they started as integers.
|
| 284 |
+
|
| 285 |
+
You might worry that adding 1 to each weight would make the algorithm
|
| 286 |
+
favor matchings with more edges. But we use the parameter
|
| 287 |
+
`maxcardinality=True` in `max_weight_matching` to ensure that the
|
| 288 |
+
number of edges in the competing matchings are the same and thus
|
| 289 |
+
the optimum does not change due to changes in the number of edges.
|
| 290 |
+
|
| 291 |
+
Read the documentation of `max_weight_matching` for more information.
|
| 292 |
+
|
| 293 |
+
Parameters
|
| 294 |
+
----------
|
| 295 |
+
G : NetworkX graph
|
| 296 |
+
Undirected graph
|
| 297 |
+
|
| 298 |
+
weight: string, optional (default='weight')
|
| 299 |
+
Edge data key corresponding to the edge weight.
|
| 300 |
+
If key not found, uses 1 as weight.
|
| 301 |
+
|
| 302 |
+
Returns
|
| 303 |
+
-------
|
| 304 |
+
matching : set
|
| 305 |
+
A minimal weight matching of the graph.
|
| 306 |
+
|
| 307 |
+
See Also
|
| 308 |
+
--------
|
| 309 |
+
max_weight_matching
|
| 310 |
+
"""
|
| 311 |
+
if len(G.edges) == 0:
|
| 312 |
+
return max_weight_matching(G, maxcardinality=True, weight=weight)
|
| 313 |
+
G_edges = G.edges(data=weight, default=1)
|
| 314 |
+
max_weight = 1 + max(w for _, _, w in G_edges)
|
| 315 |
+
InvG = nx.Graph()
|
| 316 |
+
edges = ((u, v, max_weight - w) for u, v, w in G_edges)
|
| 317 |
+
InvG.add_weighted_edges_from(edges, weight=weight)
|
| 318 |
+
return max_weight_matching(InvG, maxcardinality=True, weight=weight)
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
@not_implemented_for("multigraph")
|
| 322 |
+
@not_implemented_for("directed")
|
| 323 |
+
@nx._dispatch(edge_attrs="weight")
|
| 324 |
+
def max_weight_matching(G, maxcardinality=False, weight="weight"):
|
| 325 |
+
"""Compute a maximum-weighted matching of G.
|
| 326 |
+
|
| 327 |
+
A matching is a subset of edges in which no node occurs more than once.
|
| 328 |
+
The weight of a matching is the sum of the weights of its edges.
|
| 329 |
+
A maximal matching cannot add more edges and still be a matching.
|
| 330 |
+
The cardinality of a matching is the number of matched edges.
|
| 331 |
+
|
| 332 |
+
Parameters
|
| 333 |
+
----------
|
| 334 |
+
G : NetworkX graph
|
| 335 |
+
Undirected graph
|
| 336 |
+
|
| 337 |
+
maxcardinality: bool, optional (default=False)
|
| 338 |
+
If maxcardinality is True, compute the maximum-cardinality matching
|
| 339 |
+
with maximum weight among all maximum-cardinality matchings.
|
| 340 |
+
|
| 341 |
+
weight: string, optional (default='weight')
|
| 342 |
+
Edge data key corresponding to the edge weight.
|
| 343 |
+
If key not found, uses 1 as weight.
|
| 344 |
+
|
| 345 |
+
|
| 346 |
+
Returns
|
| 347 |
+
-------
|
| 348 |
+
matching : set
|
| 349 |
+
A maximal matching of the graph.
|
| 350 |
+
|
| 351 |
+
Examples
|
| 352 |
+
--------
|
| 353 |
+
>>> G = nx.Graph()
|
| 354 |
+
>>> edges = [(1, 2, 6), (1, 3, 2), (2, 3, 1), (2, 4, 7), (3, 5, 9), (4, 5, 3)]
|
| 355 |
+
>>> G.add_weighted_edges_from(edges)
|
| 356 |
+
>>> sorted(nx.max_weight_matching(G))
|
| 357 |
+
[(2, 4), (5, 3)]
|
| 358 |
+
|
| 359 |
+
Notes
|
| 360 |
+
-----
|
| 361 |
+
If G has edges with weight attributes the edge data are used as
|
| 362 |
+
weight values else the weights are assumed to be 1.
|
| 363 |
+
|
| 364 |
+
This function takes time O(number_of_nodes ** 3).
|
| 365 |
+
|
| 366 |
+
If all edge weights are integers, the algorithm uses only integer
|
| 367 |
+
computations. If floating point weights are used, the algorithm
|
| 368 |
+
could return a slightly suboptimal matching due to numeric
|
| 369 |
+
precision errors.
|
| 370 |
+
|
| 371 |
+
This method is based on the "blossom" method for finding augmenting
|
| 372 |
+
paths and the "primal-dual" method for finding a matching of maximum
|
| 373 |
+
weight, both methods invented by Jack Edmonds [1]_.
|
| 374 |
+
|
| 375 |
+
Bipartite graphs can also be matched using the functions present in
|
| 376 |
+
:mod:`networkx.algorithms.bipartite.matching`.
|
| 377 |
+
|
| 378 |
+
References
|
| 379 |
+
----------
|
| 380 |
+
.. [1] "Efficient Algorithms for Finding Maximum Matching in Graphs",
|
| 381 |
+
Zvi Galil, ACM Computing Surveys, 1986.
|
| 382 |
+
"""
|
| 383 |
+
#
|
| 384 |
+
# The algorithm is taken from "Efficient Algorithms for Finding Maximum
|
| 385 |
+
# Matching in Graphs" by Zvi Galil, ACM Computing Surveys, 1986.
|
| 386 |
+
# It is based on the "blossom" method for finding augmenting paths and
|
| 387 |
+
# the "primal-dual" method for finding a matching of maximum weight, both
|
| 388 |
+
# methods invented by Jack Edmonds.
|
| 389 |
+
#
|
| 390 |
+
# A C program for maximum weight matching by Ed Rothberg was used
|
| 391 |
+
# extensively to validate this new code.
|
| 392 |
+
#
|
| 393 |
+
# Many terms used in the code comments are explained in the paper
|
| 394 |
+
# by Galil. You will probably need the paper to make sense of this code.
|
| 395 |
+
#
|
| 396 |
+
|
| 397 |
+
class NoNode:
|
| 398 |
+
"""Dummy value which is different from any node."""
|
| 399 |
+
|
| 400 |
+
class Blossom:
|
| 401 |
+
"""Representation of a non-trivial blossom or sub-blossom."""
|
| 402 |
+
|
| 403 |
+
__slots__ = ["childs", "edges", "mybestedges"]
|
| 404 |
+
|
| 405 |
+
# b.childs is an ordered list of b's sub-blossoms, starting with
|
| 406 |
+
# the base and going round the blossom.
|
| 407 |
+
|
| 408 |
+
# b.edges is the list of b's connecting edges, such that
|
| 409 |
+
# b.edges[i] = (v, w) where v is a vertex in b.childs[i]
|
| 410 |
+
# and w is a vertex in b.childs[wrap(i+1)].
|
| 411 |
+
|
| 412 |
+
# If b is a top-level S-blossom,
|
| 413 |
+
# b.mybestedges is a list of least-slack edges to neighbouring
|
| 414 |
+
# S-blossoms, or None if no such list has been computed yet.
|
| 415 |
+
# This is used for efficient computation of delta3.
|
| 416 |
+
|
| 417 |
+
# Generate the blossom's leaf vertices.
|
| 418 |
+
def leaves(self):
|
| 419 |
+
stack = [*self.childs]
|
| 420 |
+
while stack:
|
| 421 |
+
t = stack.pop()
|
| 422 |
+
if isinstance(t, Blossom):
|
| 423 |
+
stack.extend(t.childs)
|
| 424 |
+
else:
|
| 425 |
+
yield t
|
| 426 |
+
|
| 427 |
+
# Get a list of vertices.
|
| 428 |
+
gnodes = list(G)
|
| 429 |
+
if not gnodes:
|
| 430 |
+
return set() # don't bother with empty graphs
|
| 431 |
+
|
| 432 |
+
# Find the maximum edge weight.
|
| 433 |
+
maxweight = 0
|
| 434 |
+
allinteger = True
|
| 435 |
+
for i, j, d in G.edges(data=True):
|
| 436 |
+
wt = d.get(weight, 1)
|
| 437 |
+
if i != j and wt > maxweight:
|
| 438 |
+
maxweight = wt
|
| 439 |
+
allinteger = allinteger and (str(type(wt)).split("'")[1] in ("int", "long"))
|
| 440 |
+
|
| 441 |
+
# If v is a matched vertex, mate[v] is its partner vertex.
|
| 442 |
+
# If v is a single vertex, v does not occur as a key in mate.
|
| 443 |
+
# Initially all vertices are single; updated during augmentation.
|
| 444 |
+
mate = {}
|
| 445 |
+
|
| 446 |
+
# If b is a top-level blossom,
|
| 447 |
+
# label.get(b) is None if b is unlabeled (free),
|
| 448 |
+
# 1 if b is an S-blossom,
|
| 449 |
+
# 2 if b is a T-blossom.
|
| 450 |
+
# The label of a vertex is found by looking at the label of its top-level
|
| 451 |
+
# containing blossom.
|
| 452 |
+
# If v is a vertex inside a T-blossom, label[v] is 2 iff v is reachable
|
| 453 |
+
# from an S-vertex outside the blossom.
|
| 454 |
+
# Labels are assigned during a stage and reset after each augmentation.
|
| 455 |
+
label = {}
|
| 456 |
+
|
| 457 |
+
# If b is a labeled top-level blossom,
|
| 458 |
+
# labeledge[b] = (v, w) is the edge through which b obtained its label
|
| 459 |
+
# such that w is a vertex in b, or None if b's base vertex is single.
|
| 460 |
+
# If w is a vertex inside a T-blossom and label[w] == 2,
|
| 461 |
+
# labeledge[w] = (v, w) is an edge through which w is reachable from
|
| 462 |
+
# outside the blossom.
|
| 463 |
+
labeledge = {}
|
| 464 |
+
|
| 465 |
+
# If v is a vertex, inblossom[v] is the top-level blossom to which v
|
| 466 |
+
# belongs.
|
| 467 |
+
# If v is a top-level vertex, inblossom[v] == v since v is itself
|
| 468 |
+
# a (trivial) top-level blossom.
|
| 469 |
+
# Initially all vertices are top-level trivial blossoms.
|
| 470 |
+
inblossom = dict(zip(gnodes, gnodes))
|
| 471 |
+
|
| 472 |
+
# If b is a sub-blossom,
|
| 473 |
+
# blossomparent[b] is its immediate parent (sub-)blossom.
|
| 474 |
+
# If b is a top-level blossom, blossomparent[b] is None.
|
| 475 |
+
blossomparent = dict(zip(gnodes, repeat(None)))
|
| 476 |
+
|
| 477 |
+
# If b is a (sub-)blossom,
|
| 478 |
+
# blossombase[b] is its base VERTEX (i.e. recursive sub-blossom).
|
| 479 |
+
blossombase = dict(zip(gnodes, gnodes))
|
| 480 |
+
|
| 481 |
+
# If w is a free vertex (or an unreached vertex inside a T-blossom),
|
| 482 |
+
# bestedge[w] = (v, w) is the least-slack edge from an S-vertex,
|
| 483 |
+
# or None if there is no such edge.
|
| 484 |
+
# If b is a (possibly trivial) top-level S-blossom,
|
| 485 |
+
# bestedge[b] = (v, w) is the least-slack edge to a different S-blossom
|
| 486 |
+
# (v inside b), or None if there is no such edge.
|
| 487 |
+
# This is used for efficient computation of delta2 and delta3.
|
| 488 |
+
bestedge = {}
|
| 489 |
+
|
| 490 |
+
# If v is a vertex,
|
| 491 |
+
# dualvar[v] = 2 * u(v) where u(v) is the v's variable in the dual
|
| 492 |
+
# optimization problem (if all edge weights are integers, multiplication
|
| 493 |
+
# by two ensures that all values remain integers throughout the algorithm).
|
| 494 |
+
# Initially, u(v) = maxweight / 2.
|
| 495 |
+
dualvar = dict(zip(gnodes, repeat(maxweight)))
|
| 496 |
+
|
| 497 |
+
# If b is a non-trivial blossom,
|
| 498 |
+
# blossomdual[b] = z(b) where z(b) is b's variable in the dual
|
| 499 |
+
# optimization problem.
|
| 500 |
+
blossomdual = {}
|
| 501 |
+
|
| 502 |
+
# If (v, w) in allowedge or (w, v) in allowedg, then the edge
|
| 503 |
+
# (v, w) is known to have zero slack in the optimization problem;
|
| 504 |
+
# otherwise the edge may or may not have zero slack.
|
| 505 |
+
allowedge = {}
|
| 506 |
+
|
| 507 |
+
# Queue of newly discovered S-vertices.
|
| 508 |
+
queue = []
|
| 509 |
+
|
| 510 |
+
# Return 2 * slack of edge (v, w) (does not work inside blossoms).
|
| 511 |
+
def slack(v, w):
|
| 512 |
+
return dualvar[v] + dualvar[w] - 2 * G[v][w].get(weight, 1)
|
| 513 |
+
|
| 514 |
+
# Assign label t to the top-level blossom containing vertex w,
|
| 515 |
+
# coming through an edge from vertex v.
|
| 516 |
+
def assignLabel(w, t, v):
|
| 517 |
+
b = inblossom[w]
|
| 518 |
+
assert label.get(w) is None and label.get(b) is None
|
| 519 |
+
label[w] = label[b] = t
|
| 520 |
+
if v is not None:
|
| 521 |
+
labeledge[w] = labeledge[b] = (v, w)
|
| 522 |
+
else:
|
| 523 |
+
labeledge[w] = labeledge[b] = None
|
| 524 |
+
bestedge[w] = bestedge[b] = None
|
| 525 |
+
if t == 1:
|
| 526 |
+
# b became an S-vertex/blossom; add it(s vertices) to the queue.
|
| 527 |
+
if isinstance(b, Blossom):
|
| 528 |
+
queue.extend(b.leaves())
|
| 529 |
+
else:
|
| 530 |
+
queue.append(b)
|
| 531 |
+
elif t == 2:
|
| 532 |
+
# b became a T-vertex/blossom; assign label S to its mate.
|
| 533 |
+
# (If b is a non-trivial blossom, its base is the only vertex
|
| 534 |
+
# with an external mate.)
|
| 535 |
+
base = blossombase[b]
|
| 536 |
+
assignLabel(mate[base], 1, base)
|
| 537 |
+
|
| 538 |
+
# Trace back from vertices v and w to discover either a new blossom
|
| 539 |
+
# or an augmenting path. Return the base vertex of the new blossom,
|
| 540 |
+
# or NoNode if an augmenting path was found.
|
| 541 |
+
def scanBlossom(v, w):
|
| 542 |
+
# Trace back from v and w, placing breadcrumbs as we go.
|
| 543 |
+
path = []
|
| 544 |
+
base = NoNode
|
| 545 |
+
while v is not NoNode:
|
| 546 |
+
# Look for a breadcrumb in v's blossom or put a new breadcrumb.
|
| 547 |
+
b = inblossom[v]
|
| 548 |
+
if label[b] & 4:
|
| 549 |
+
base = blossombase[b]
|
| 550 |
+
break
|
| 551 |
+
assert label[b] == 1
|
| 552 |
+
path.append(b)
|
| 553 |
+
label[b] = 5
|
| 554 |
+
# Trace one step back.
|
| 555 |
+
if labeledge[b] is None:
|
| 556 |
+
# The base of blossom b is single; stop tracing this path.
|
| 557 |
+
assert blossombase[b] not in mate
|
| 558 |
+
v = NoNode
|
| 559 |
+
else:
|
| 560 |
+
assert labeledge[b][0] == mate[blossombase[b]]
|
| 561 |
+
v = labeledge[b][0]
|
| 562 |
+
b = inblossom[v]
|
| 563 |
+
assert label[b] == 2
|
| 564 |
+
# b is a T-blossom; trace one more step back.
|
| 565 |
+
v = labeledge[b][0]
|
| 566 |
+
# Swap v and w so that we alternate between both paths.
|
| 567 |
+
if w is not NoNode:
|
| 568 |
+
v, w = w, v
|
| 569 |
+
# Remove breadcrumbs.
|
| 570 |
+
for b in path:
|
| 571 |
+
label[b] = 1
|
| 572 |
+
# Return base vertex, if we found one.
|
| 573 |
+
return base
|
| 574 |
+
|
| 575 |
+
# Construct a new blossom with given base, through S-vertices v and w.
|
| 576 |
+
# Label the new blossom as S; set its dual variable to zero;
|
| 577 |
+
# relabel its T-vertices to S and add them to the queue.
|
| 578 |
+
def addBlossom(base, v, w):
|
| 579 |
+
bb = inblossom[base]
|
| 580 |
+
bv = inblossom[v]
|
| 581 |
+
bw = inblossom[w]
|
| 582 |
+
# Create blossom.
|
| 583 |
+
b = Blossom()
|
| 584 |
+
blossombase[b] = base
|
| 585 |
+
blossomparent[b] = None
|
| 586 |
+
blossomparent[bb] = b
|
| 587 |
+
# Make list of sub-blossoms and their interconnecting edge endpoints.
|
| 588 |
+
b.childs = path = []
|
| 589 |
+
b.edges = edgs = [(v, w)]
|
| 590 |
+
# Trace back from v to base.
|
| 591 |
+
while bv != bb:
|
| 592 |
+
# Add bv to the new blossom.
|
| 593 |
+
blossomparent[bv] = b
|
| 594 |
+
path.append(bv)
|
| 595 |
+
edgs.append(labeledge[bv])
|
| 596 |
+
assert label[bv] == 2 or (
|
| 597 |
+
label[bv] == 1 and labeledge[bv][0] == mate[blossombase[bv]]
|
| 598 |
+
)
|
| 599 |
+
# Trace one step back.
|
| 600 |
+
v = labeledge[bv][0]
|
| 601 |
+
bv = inblossom[v]
|
| 602 |
+
# Add base sub-blossom; reverse lists.
|
| 603 |
+
path.append(bb)
|
| 604 |
+
path.reverse()
|
| 605 |
+
edgs.reverse()
|
| 606 |
+
# Trace back from w to base.
|
| 607 |
+
while bw != bb:
|
| 608 |
+
# Add bw to the new blossom.
|
| 609 |
+
blossomparent[bw] = b
|
| 610 |
+
path.append(bw)
|
| 611 |
+
edgs.append((labeledge[bw][1], labeledge[bw][0]))
|
| 612 |
+
assert label[bw] == 2 or (
|
| 613 |
+
label[bw] == 1 and labeledge[bw][0] == mate[blossombase[bw]]
|
| 614 |
+
)
|
| 615 |
+
# Trace one step back.
|
| 616 |
+
w = labeledge[bw][0]
|
| 617 |
+
bw = inblossom[w]
|
| 618 |
+
# Set label to S.
|
| 619 |
+
assert label[bb] == 1
|
| 620 |
+
label[b] = 1
|
| 621 |
+
labeledge[b] = labeledge[bb]
|
| 622 |
+
# Set dual variable to zero.
|
| 623 |
+
blossomdual[b] = 0
|
| 624 |
+
# Relabel vertices.
|
| 625 |
+
for v in b.leaves():
|
| 626 |
+
if label[inblossom[v]] == 2:
|
| 627 |
+
# This T-vertex now turns into an S-vertex because it becomes
|
| 628 |
+
# part of an S-blossom; add it to the queue.
|
| 629 |
+
queue.append(v)
|
| 630 |
+
inblossom[v] = b
|
| 631 |
+
# Compute b.mybestedges.
|
| 632 |
+
bestedgeto = {}
|
| 633 |
+
for bv in path:
|
| 634 |
+
if isinstance(bv, Blossom):
|
| 635 |
+
if bv.mybestedges is not None:
|
| 636 |
+
# Walk this subblossom's least-slack edges.
|
| 637 |
+
nblist = bv.mybestedges
|
| 638 |
+
# The sub-blossom won't need this data again.
|
| 639 |
+
bv.mybestedges = None
|
| 640 |
+
else:
|
| 641 |
+
# This subblossom does not have a list of least-slack
|
| 642 |
+
# edges; get the information from the vertices.
|
| 643 |
+
nblist = [
|
| 644 |
+
(v, w) for v in bv.leaves() for w in G.neighbors(v) if v != w
|
| 645 |
+
]
|
| 646 |
+
else:
|
| 647 |
+
nblist = [(bv, w) for w in G.neighbors(bv) if bv != w]
|
| 648 |
+
for k in nblist:
|
| 649 |
+
(i, j) = k
|
| 650 |
+
if inblossom[j] == b:
|
| 651 |
+
i, j = j, i
|
| 652 |
+
bj = inblossom[j]
|
| 653 |
+
if (
|
| 654 |
+
bj != b
|
| 655 |
+
and label.get(bj) == 1
|
| 656 |
+
and ((bj not in bestedgeto) or slack(i, j) < slack(*bestedgeto[bj]))
|
| 657 |
+
):
|
| 658 |
+
bestedgeto[bj] = k
|
| 659 |
+
# Forget about least-slack edge of the subblossom.
|
| 660 |
+
bestedge[bv] = None
|
| 661 |
+
b.mybestedges = list(bestedgeto.values())
|
| 662 |
+
# Select bestedge[b].
|
| 663 |
+
mybestedge = None
|
| 664 |
+
bestedge[b] = None
|
| 665 |
+
for k in b.mybestedges:
|
| 666 |
+
kslack = slack(*k)
|
| 667 |
+
if mybestedge is None or kslack < mybestslack:
|
| 668 |
+
mybestedge = k
|
| 669 |
+
mybestslack = kslack
|
| 670 |
+
bestedge[b] = mybestedge
|
| 671 |
+
|
| 672 |
+
# Expand the given top-level blossom.
|
| 673 |
+
def expandBlossom(b, endstage):
|
| 674 |
+
# This is an obnoxiously complicated recursive function for the sake of
|
| 675 |
+
# a stack-transformation. So, we hack around the complexity by using
|
| 676 |
+
# a trampoline pattern. By yielding the arguments to each recursive
|
| 677 |
+
# call, we keep the actual callstack flat.
|
| 678 |
+
|
| 679 |
+
def _recurse(b, endstage):
|
| 680 |
+
# Convert sub-blossoms into top-level blossoms.
|
| 681 |
+
for s in b.childs:
|
| 682 |
+
blossomparent[s] = None
|
| 683 |
+
if isinstance(s, Blossom):
|
| 684 |
+
if endstage and blossomdual[s] == 0:
|
| 685 |
+
# Recursively expand this sub-blossom.
|
| 686 |
+
yield s
|
| 687 |
+
else:
|
| 688 |
+
for v in s.leaves():
|
| 689 |
+
inblossom[v] = s
|
| 690 |
+
else:
|
| 691 |
+
inblossom[s] = s
|
| 692 |
+
# If we expand a T-blossom during a stage, its sub-blossoms must be
|
| 693 |
+
# relabeled.
|
| 694 |
+
if (not endstage) and label.get(b) == 2:
|
| 695 |
+
# Start at the sub-blossom through which the expanding
|
| 696 |
+
# blossom obtained its label, and relabel sub-blossoms untili
|
| 697 |
+
# we reach the base.
|
| 698 |
+
# Figure out through which sub-blossom the expanding blossom
|
| 699 |
+
# obtained its label initially.
|
| 700 |
+
entrychild = inblossom[labeledge[b][1]]
|
| 701 |
+
# Decide in which direction we will go round the blossom.
|
| 702 |
+
j = b.childs.index(entrychild)
|
| 703 |
+
if j & 1:
|
| 704 |
+
# Start index is odd; go forward and wrap.
|
| 705 |
+
j -= len(b.childs)
|
| 706 |
+
jstep = 1
|
| 707 |
+
else:
|
| 708 |
+
# Start index is even; go backward.
|
| 709 |
+
jstep = -1
|
| 710 |
+
# Move along the blossom until we get to the base.
|
| 711 |
+
v, w = labeledge[b]
|
| 712 |
+
while j != 0:
|
| 713 |
+
# Relabel the T-sub-blossom.
|
| 714 |
+
if jstep == 1:
|
| 715 |
+
p, q = b.edges[j]
|
| 716 |
+
else:
|
| 717 |
+
q, p = b.edges[j - 1]
|
| 718 |
+
label[w] = None
|
| 719 |
+
label[q] = None
|
| 720 |
+
assignLabel(w, 2, v)
|
| 721 |
+
# Step to the next S-sub-blossom and note its forward edge.
|
| 722 |
+
allowedge[(p, q)] = allowedge[(q, p)] = True
|
| 723 |
+
j += jstep
|
| 724 |
+
if jstep == 1:
|
| 725 |
+
v, w = b.edges[j]
|
| 726 |
+
else:
|
| 727 |
+
w, v = b.edges[j - 1]
|
| 728 |
+
# Step to the next T-sub-blossom.
|
| 729 |
+
allowedge[(v, w)] = allowedge[(w, v)] = True
|
| 730 |
+
j += jstep
|
| 731 |
+
# Relabel the base T-sub-blossom WITHOUT stepping through to
|
| 732 |
+
# its mate (so don't call assignLabel).
|
| 733 |
+
bw = b.childs[j]
|
| 734 |
+
label[w] = label[bw] = 2
|
| 735 |
+
labeledge[w] = labeledge[bw] = (v, w)
|
| 736 |
+
bestedge[bw] = None
|
| 737 |
+
# Continue along the blossom until we get back to entrychild.
|
| 738 |
+
j += jstep
|
| 739 |
+
while b.childs[j] != entrychild:
|
| 740 |
+
# Examine the vertices of the sub-blossom to see whether
|
| 741 |
+
# it is reachable from a neighbouring S-vertex outside the
|
| 742 |
+
# expanding blossom.
|
| 743 |
+
bv = b.childs[j]
|
| 744 |
+
if label.get(bv) == 1:
|
| 745 |
+
# This sub-blossom just got label S through one of its
|
| 746 |
+
# neighbours; leave it be.
|
| 747 |
+
j += jstep
|
| 748 |
+
continue
|
| 749 |
+
if isinstance(bv, Blossom):
|
| 750 |
+
for v in bv.leaves():
|
| 751 |
+
if label.get(v):
|
| 752 |
+
break
|
| 753 |
+
else:
|
| 754 |
+
v = bv
|
| 755 |
+
# If the sub-blossom contains a reachable vertex, assign
|
| 756 |
+
# label T to the sub-blossom.
|
| 757 |
+
if label.get(v):
|
| 758 |
+
assert label[v] == 2
|
| 759 |
+
assert inblossom[v] == bv
|
| 760 |
+
label[v] = None
|
| 761 |
+
label[mate[blossombase[bv]]] = None
|
| 762 |
+
assignLabel(v, 2, labeledge[v][0])
|
| 763 |
+
j += jstep
|
| 764 |
+
# Remove the expanded blossom entirely.
|
| 765 |
+
label.pop(b, None)
|
| 766 |
+
labeledge.pop(b, None)
|
| 767 |
+
bestedge.pop(b, None)
|
| 768 |
+
del blossomparent[b]
|
| 769 |
+
del blossombase[b]
|
| 770 |
+
del blossomdual[b]
|
| 771 |
+
|
| 772 |
+
# Now, we apply the trampoline pattern. We simulate a recursive
|
| 773 |
+
# callstack by maintaining a stack of generators, each yielding a
|
| 774 |
+
# sequence of function arguments. We grow the stack by appending a call
|
| 775 |
+
# to _recurse on each argument tuple, and shrink the stack whenever a
|
| 776 |
+
# generator is exhausted.
|
| 777 |
+
stack = [_recurse(b, endstage)]
|
| 778 |
+
while stack:
|
| 779 |
+
top = stack[-1]
|
| 780 |
+
for s in top:
|
| 781 |
+
stack.append(_recurse(s, endstage))
|
| 782 |
+
break
|
| 783 |
+
else:
|
| 784 |
+
stack.pop()
|
| 785 |
+
|
| 786 |
+
# Swap matched/unmatched edges over an alternating path through blossom b
|
| 787 |
+
# between vertex v and the base vertex. Keep blossom bookkeeping
|
| 788 |
+
# consistent.
|
| 789 |
+
def augmentBlossom(b, v):
|
| 790 |
+
# This is an obnoxiously complicated recursive function for the sake of
|
| 791 |
+
# a stack-transformation. So, we hack around the complexity by using
|
| 792 |
+
# a trampoline pattern. By yielding the arguments to each recursive
|
| 793 |
+
# call, we keep the actual callstack flat.
|
| 794 |
+
|
| 795 |
+
def _recurse(b, v):
|
| 796 |
+
# Bubble up through the blossom tree from vertex v to an immediate
|
| 797 |
+
# sub-blossom of b.
|
| 798 |
+
t = v
|
| 799 |
+
while blossomparent[t] != b:
|
| 800 |
+
t = blossomparent[t]
|
| 801 |
+
# Recursively deal with the first sub-blossom.
|
| 802 |
+
if isinstance(t, Blossom):
|
| 803 |
+
yield (t, v)
|
| 804 |
+
# Decide in which direction we will go round the blossom.
|
| 805 |
+
i = j = b.childs.index(t)
|
| 806 |
+
if i & 1:
|
| 807 |
+
# Start index is odd; go forward and wrap.
|
| 808 |
+
j -= len(b.childs)
|
| 809 |
+
jstep = 1
|
| 810 |
+
else:
|
| 811 |
+
# Start index is even; go backward.
|
| 812 |
+
jstep = -1
|
| 813 |
+
# Move along the blossom until we get to the base.
|
| 814 |
+
while j != 0:
|
| 815 |
+
# Step to the next sub-blossom and augment it recursively.
|
| 816 |
+
j += jstep
|
| 817 |
+
t = b.childs[j]
|
| 818 |
+
if jstep == 1:
|
| 819 |
+
w, x = b.edges[j]
|
| 820 |
+
else:
|
| 821 |
+
x, w = b.edges[j - 1]
|
| 822 |
+
if isinstance(t, Blossom):
|
| 823 |
+
yield (t, w)
|
| 824 |
+
# Step to the next sub-blossom and augment it recursively.
|
| 825 |
+
j += jstep
|
| 826 |
+
t = b.childs[j]
|
| 827 |
+
if isinstance(t, Blossom):
|
| 828 |
+
yield (t, x)
|
| 829 |
+
# Match the edge connecting those sub-blossoms.
|
| 830 |
+
mate[w] = x
|
| 831 |
+
mate[x] = w
|
| 832 |
+
# Rotate the list of sub-blossoms to put the new base at the front.
|
| 833 |
+
b.childs = b.childs[i:] + b.childs[:i]
|
| 834 |
+
b.edges = b.edges[i:] + b.edges[:i]
|
| 835 |
+
blossombase[b] = blossombase[b.childs[0]]
|
| 836 |
+
assert blossombase[b] == v
|
| 837 |
+
|
| 838 |
+
# Now, we apply the trampoline pattern. We simulate a recursive
|
| 839 |
+
# callstack by maintaining a stack of generators, each yielding a
|
| 840 |
+
# sequence of function arguments. We grow the stack by appending a call
|
| 841 |
+
# to _recurse on each argument tuple, and shrink the stack whenever a
|
| 842 |
+
# generator is exhausted.
|
| 843 |
+
stack = [_recurse(b, v)]
|
| 844 |
+
while stack:
|
| 845 |
+
top = stack[-1]
|
| 846 |
+
for args in top:
|
| 847 |
+
stack.append(_recurse(*args))
|
| 848 |
+
break
|
| 849 |
+
else:
|
| 850 |
+
stack.pop()
|
| 851 |
+
|
| 852 |
+
# Swap matched/unmatched edges over an alternating path between two
|
| 853 |
+
# single vertices. The augmenting path runs through S-vertices v and w.
|
| 854 |
+
def augmentMatching(v, w):
|
| 855 |
+
for s, j in ((v, w), (w, v)):
|
| 856 |
+
# Match vertex s to vertex j. Then trace back from s
|
| 857 |
+
# until we find a single vertex, swapping matched and unmatched
|
| 858 |
+
# edges as we go.
|
| 859 |
+
while 1:
|
| 860 |
+
bs = inblossom[s]
|
| 861 |
+
assert label[bs] == 1
|
| 862 |
+
assert (labeledge[bs] is None and blossombase[bs] not in mate) or (
|
| 863 |
+
labeledge[bs][0] == mate[blossombase[bs]]
|
| 864 |
+
)
|
| 865 |
+
# Augment through the S-blossom from s to base.
|
| 866 |
+
if isinstance(bs, Blossom):
|
| 867 |
+
augmentBlossom(bs, s)
|
| 868 |
+
# Update mate[s]
|
| 869 |
+
mate[s] = j
|
| 870 |
+
# Trace one step back.
|
| 871 |
+
if labeledge[bs] is None:
|
| 872 |
+
# Reached single vertex; stop.
|
| 873 |
+
break
|
| 874 |
+
t = labeledge[bs][0]
|
| 875 |
+
bt = inblossom[t]
|
| 876 |
+
assert label[bt] == 2
|
| 877 |
+
# Trace one more step back.
|
| 878 |
+
s, j = labeledge[bt]
|
| 879 |
+
# Augment through the T-blossom from j to base.
|
| 880 |
+
assert blossombase[bt] == t
|
| 881 |
+
if isinstance(bt, Blossom):
|
| 882 |
+
augmentBlossom(bt, j)
|
| 883 |
+
# Update mate[j]
|
| 884 |
+
mate[j] = s
|
| 885 |
+
|
| 886 |
+
# Verify that the optimum solution has been reached.
|
| 887 |
+
def verifyOptimum():
|
| 888 |
+
if maxcardinality:
|
| 889 |
+
# Vertices may have negative dual;
|
| 890 |
+
# find a constant non-negative number to add to all vertex duals.
|
| 891 |
+
vdualoffset = max(0, -min(dualvar.values()))
|
| 892 |
+
else:
|
| 893 |
+
vdualoffset = 0
|
| 894 |
+
# 0. all dual variables are non-negative
|
| 895 |
+
assert min(dualvar.values()) + vdualoffset >= 0
|
| 896 |
+
assert len(blossomdual) == 0 or min(blossomdual.values()) >= 0
|
| 897 |
+
# 0. all edges have non-negative slack and
|
| 898 |
+
# 1. all matched edges have zero slack;
|
| 899 |
+
for i, j, d in G.edges(data=True):
|
| 900 |
+
wt = d.get(weight, 1)
|
| 901 |
+
if i == j:
|
| 902 |
+
continue # ignore self-loops
|
| 903 |
+
s = dualvar[i] + dualvar[j] - 2 * wt
|
| 904 |
+
iblossoms = [i]
|
| 905 |
+
jblossoms = [j]
|
| 906 |
+
while blossomparent[iblossoms[-1]] is not None:
|
| 907 |
+
iblossoms.append(blossomparent[iblossoms[-1]])
|
| 908 |
+
while blossomparent[jblossoms[-1]] is not None:
|
| 909 |
+
jblossoms.append(blossomparent[jblossoms[-1]])
|
| 910 |
+
iblossoms.reverse()
|
| 911 |
+
jblossoms.reverse()
|
| 912 |
+
for bi, bj in zip(iblossoms, jblossoms):
|
| 913 |
+
if bi != bj:
|
| 914 |
+
break
|
| 915 |
+
s += 2 * blossomdual[bi]
|
| 916 |
+
assert s >= 0
|
| 917 |
+
if mate.get(i) == j or mate.get(j) == i:
|
| 918 |
+
assert mate[i] == j and mate[j] == i
|
| 919 |
+
assert s == 0
|
| 920 |
+
# 2. all single vertices have zero dual value;
|
| 921 |
+
for v in gnodes:
|
| 922 |
+
assert (v in mate) or dualvar[v] + vdualoffset == 0
|
| 923 |
+
# 3. all blossoms with positive dual value are full.
|
| 924 |
+
for b in blossomdual:
|
| 925 |
+
if blossomdual[b] > 0:
|
| 926 |
+
assert len(b.edges) % 2 == 1
|
| 927 |
+
for i, j in b.edges[1::2]:
|
| 928 |
+
assert mate[i] == j and mate[j] == i
|
| 929 |
+
# Ok.
|
| 930 |
+
|
| 931 |
+
# Main loop: continue until no further improvement is possible.
|
| 932 |
+
while 1:
|
| 933 |
+
# Each iteration of this loop is a "stage".
|
| 934 |
+
# A stage finds an augmenting path and uses that to improve
|
| 935 |
+
# the matching.
|
| 936 |
+
|
| 937 |
+
# Remove labels from top-level blossoms/vertices.
|
| 938 |
+
label.clear()
|
| 939 |
+
labeledge.clear()
|
| 940 |
+
|
| 941 |
+
# Forget all about least-slack edges.
|
| 942 |
+
bestedge.clear()
|
| 943 |
+
for b in blossomdual:
|
| 944 |
+
b.mybestedges = None
|
| 945 |
+
|
| 946 |
+
# Loss of labeling means that we can not be sure that currently
|
| 947 |
+
# allowable edges remain allowable throughout this stage.
|
| 948 |
+
allowedge.clear()
|
| 949 |
+
|
| 950 |
+
# Make queue empty.
|
| 951 |
+
queue[:] = []
|
| 952 |
+
|
| 953 |
+
# Label single blossoms/vertices with S and put them in the queue.
|
| 954 |
+
for v in gnodes:
|
| 955 |
+
if (v not in mate) and label.get(inblossom[v]) is None:
|
| 956 |
+
assignLabel(v, 1, None)
|
| 957 |
+
|
| 958 |
+
# Loop until we succeed in augmenting the matching.
|
| 959 |
+
augmented = 0
|
| 960 |
+
while 1:
|
| 961 |
+
# Each iteration of this loop is a "substage".
|
| 962 |
+
# A substage tries to find an augmenting path;
|
| 963 |
+
# if found, the path is used to improve the matching and
|
| 964 |
+
# the stage ends. If there is no augmenting path, the
|
| 965 |
+
# primal-dual method is used to pump some slack out of
|
| 966 |
+
# the dual variables.
|
| 967 |
+
|
| 968 |
+
# Continue labeling until all vertices which are reachable
|
| 969 |
+
# through an alternating path have got a label.
|
| 970 |
+
while queue and not augmented:
|
| 971 |
+
# Take an S vertex from the queue.
|
| 972 |
+
v = queue.pop()
|
| 973 |
+
assert label[inblossom[v]] == 1
|
| 974 |
+
|
| 975 |
+
# Scan its neighbours:
|
| 976 |
+
for w in G.neighbors(v):
|
| 977 |
+
if w == v:
|
| 978 |
+
continue # ignore self-loops
|
| 979 |
+
# w is a neighbour to v
|
| 980 |
+
bv = inblossom[v]
|
| 981 |
+
bw = inblossom[w]
|
| 982 |
+
if bv == bw:
|
| 983 |
+
# this edge is internal to a blossom; ignore it
|
| 984 |
+
continue
|
| 985 |
+
if (v, w) not in allowedge:
|
| 986 |
+
kslack = slack(v, w)
|
| 987 |
+
if kslack <= 0:
|
| 988 |
+
# edge k has zero slack => it is allowable
|
| 989 |
+
allowedge[(v, w)] = allowedge[(w, v)] = True
|
| 990 |
+
if (v, w) in allowedge:
|
| 991 |
+
if label.get(bw) is None:
|
| 992 |
+
# (C1) w is a free vertex;
|
| 993 |
+
# label w with T and label its mate with S (R12).
|
| 994 |
+
assignLabel(w, 2, v)
|
| 995 |
+
elif label.get(bw) == 1:
|
| 996 |
+
# (C2) w is an S-vertex (not in the same blossom);
|
| 997 |
+
# follow back-links to discover either an
|
| 998 |
+
# augmenting path or a new blossom.
|
| 999 |
+
base = scanBlossom(v, w)
|
| 1000 |
+
if base is not NoNode:
|
| 1001 |
+
# Found a new blossom; add it to the blossom
|
| 1002 |
+
# bookkeeping and turn it into an S-blossom.
|
| 1003 |
+
addBlossom(base, v, w)
|
| 1004 |
+
else:
|
| 1005 |
+
# Found an augmenting path; augment the
|
| 1006 |
+
# matching and end this stage.
|
| 1007 |
+
augmentMatching(v, w)
|
| 1008 |
+
augmented = 1
|
| 1009 |
+
break
|
| 1010 |
+
elif label.get(w) is None:
|
| 1011 |
+
# w is inside a T-blossom, but w itself has not
|
| 1012 |
+
# yet been reached from outside the blossom;
|
| 1013 |
+
# mark it as reached (we need this to relabel
|
| 1014 |
+
# during T-blossom expansion).
|
| 1015 |
+
assert label[bw] == 2
|
| 1016 |
+
label[w] = 2
|
| 1017 |
+
labeledge[w] = (v, w)
|
| 1018 |
+
elif label.get(bw) == 1:
|
| 1019 |
+
# keep track of the least-slack non-allowable edge to
|
| 1020 |
+
# a different S-blossom.
|
| 1021 |
+
if bestedge.get(bv) is None or kslack < slack(*bestedge[bv]):
|
| 1022 |
+
bestedge[bv] = (v, w)
|
| 1023 |
+
elif label.get(w) is None:
|
| 1024 |
+
# w is a free vertex (or an unreached vertex inside
|
| 1025 |
+
# a T-blossom) but we can not reach it yet;
|
| 1026 |
+
# keep track of the least-slack edge that reaches w.
|
| 1027 |
+
if bestedge.get(w) is None or kslack < slack(*bestedge[w]):
|
| 1028 |
+
bestedge[w] = (v, w)
|
| 1029 |
+
|
| 1030 |
+
if augmented:
|
| 1031 |
+
break
|
| 1032 |
+
|
| 1033 |
+
# There is no augmenting path under these constraints;
|
| 1034 |
+
# compute delta and reduce slack in the optimization problem.
|
| 1035 |
+
# (Note that our vertex dual variables, edge slacks and delta's
|
| 1036 |
+
# are pre-multiplied by two.)
|
| 1037 |
+
deltatype = -1
|
| 1038 |
+
delta = deltaedge = deltablossom = None
|
| 1039 |
+
|
| 1040 |
+
# Compute delta1: the minimum value of any vertex dual.
|
| 1041 |
+
if not maxcardinality:
|
| 1042 |
+
deltatype = 1
|
| 1043 |
+
delta = min(dualvar.values())
|
| 1044 |
+
|
| 1045 |
+
# Compute delta2: the minimum slack on any edge between
|
| 1046 |
+
# an S-vertex and a free vertex.
|
| 1047 |
+
for v in G.nodes():
|
| 1048 |
+
if label.get(inblossom[v]) is None and bestedge.get(v) is not None:
|
| 1049 |
+
d = slack(*bestedge[v])
|
| 1050 |
+
if deltatype == -1 or d < delta:
|
| 1051 |
+
delta = d
|
| 1052 |
+
deltatype = 2
|
| 1053 |
+
deltaedge = bestedge[v]
|
| 1054 |
+
|
| 1055 |
+
# Compute delta3: half the minimum slack on any edge between
|
| 1056 |
+
# a pair of S-blossoms.
|
| 1057 |
+
for b in blossomparent:
|
| 1058 |
+
if (
|
| 1059 |
+
blossomparent[b] is None
|
| 1060 |
+
and label.get(b) == 1
|
| 1061 |
+
and bestedge.get(b) is not None
|
| 1062 |
+
):
|
| 1063 |
+
kslack = slack(*bestedge[b])
|
| 1064 |
+
if allinteger:
|
| 1065 |
+
assert (kslack % 2) == 0
|
| 1066 |
+
d = kslack // 2
|
| 1067 |
+
else:
|
| 1068 |
+
d = kslack / 2.0
|
| 1069 |
+
if deltatype == -1 or d < delta:
|
| 1070 |
+
delta = d
|
| 1071 |
+
deltatype = 3
|
| 1072 |
+
deltaedge = bestedge[b]
|
| 1073 |
+
|
| 1074 |
+
# Compute delta4: minimum z variable of any T-blossom.
|
| 1075 |
+
for b in blossomdual:
|
| 1076 |
+
if (
|
| 1077 |
+
blossomparent[b] is None
|
| 1078 |
+
and label.get(b) == 2
|
| 1079 |
+
and (deltatype == -1 or blossomdual[b] < delta)
|
| 1080 |
+
):
|
| 1081 |
+
delta = blossomdual[b]
|
| 1082 |
+
deltatype = 4
|
| 1083 |
+
deltablossom = b
|
| 1084 |
+
|
| 1085 |
+
if deltatype == -1:
|
| 1086 |
+
# No further improvement possible; max-cardinality optimum
|
| 1087 |
+
# reached. Do a final delta update to make the optimum
|
| 1088 |
+
# verifiable.
|
| 1089 |
+
assert maxcardinality
|
| 1090 |
+
deltatype = 1
|
| 1091 |
+
delta = max(0, min(dualvar.values()))
|
| 1092 |
+
|
| 1093 |
+
# Update dual variables according to delta.
|
| 1094 |
+
for v in gnodes:
|
| 1095 |
+
if label.get(inblossom[v]) == 1:
|
| 1096 |
+
# S-vertex: 2*u = 2*u - 2*delta
|
| 1097 |
+
dualvar[v] -= delta
|
| 1098 |
+
elif label.get(inblossom[v]) == 2:
|
| 1099 |
+
# T-vertex: 2*u = 2*u + 2*delta
|
| 1100 |
+
dualvar[v] += delta
|
| 1101 |
+
for b in blossomdual:
|
| 1102 |
+
if blossomparent[b] is None:
|
| 1103 |
+
if label.get(b) == 1:
|
| 1104 |
+
# top-level S-blossom: z = z + 2*delta
|
| 1105 |
+
blossomdual[b] += delta
|
| 1106 |
+
elif label.get(b) == 2:
|
| 1107 |
+
# top-level T-blossom: z = z - 2*delta
|
| 1108 |
+
blossomdual[b] -= delta
|
| 1109 |
+
|
| 1110 |
+
# Take action at the point where minimum delta occurred.
|
| 1111 |
+
if deltatype == 1:
|
| 1112 |
+
# No further improvement possible; optimum reached.
|
| 1113 |
+
break
|
| 1114 |
+
elif deltatype == 2:
|
| 1115 |
+
# Use the least-slack edge to continue the search.
|
| 1116 |
+
(v, w) = deltaedge
|
| 1117 |
+
assert label[inblossom[v]] == 1
|
| 1118 |
+
allowedge[(v, w)] = allowedge[(w, v)] = True
|
| 1119 |
+
queue.append(v)
|
| 1120 |
+
elif deltatype == 3:
|
| 1121 |
+
# Use the least-slack edge to continue the search.
|
| 1122 |
+
(v, w) = deltaedge
|
| 1123 |
+
allowedge[(v, w)] = allowedge[(w, v)] = True
|
| 1124 |
+
assert label[inblossom[v]] == 1
|
| 1125 |
+
queue.append(v)
|
| 1126 |
+
elif deltatype == 4:
|
| 1127 |
+
# Expand the least-z blossom.
|
| 1128 |
+
expandBlossom(deltablossom, False)
|
| 1129 |
+
|
| 1130 |
+
# End of a this substage.
|
| 1131 |
+
|
| 1132 |
+
# Paranoia check that the matching is symmetric.
|
| 1133 |
+
for v in mate:
|
| 1134 |
+
assert mate[mate[v]] == v
|
| 1135 |
+
|
| 1136 |
+
# Stop when no more augmenting path can be found.
|
| 1137 |
+
if not augmented:
|
| 1138 |
+
break
|
| 1139 |
+
|
| 1140 |
+
# End of a stage; expand all S-blossoms which have zero dual.
|
| 1141 |
+
for b in list(blossomdual.keys()):
|
| 1142 |
+
if b not in blossomdual:
|
| 1143 |
+
continue # already expanded
|
| 1144 |
+
if blossomparent[b] is None and label.get(b) == 1 and blossomdual[b] == 0:
|
| 1145 |
+
expandBlossom(b, True)
|
| 1146 |
+
|
| 1147 |
+
# Verify that we reached the optimum solution (only for integer weights).
|
| 1148 |
+
if allinteger:
|
| 1149 |
+
verifyOptimum()
|
| 1150 |
+
|
| 1151 |
+
return matching_dict_to_set(mate)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/planarity.py
ADDED
|
@@ -0,0 +1,1179 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from collections import defaultdict
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
__all__ = ["check_planarity", "is_planar", "PlanarEmbedding"]
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@nx._dispatch
|
| 9 |
+
def is_planar(G):
|
| 10 |
+
"""Returns True if and only if `G` is planar.
|
| 11 |
+
|
| 12 |
+
A graph is *planar* iff it can be drawn in a plane without
|
| 13 |
+
any edge intersections.
|
| 14 |
+
|
| 15 |
+
Parameters
|
| 16 |
+
----------
|
| 17 |
+
G : NetworkX graph
|
| 18 |
+
|
| 19 |
+
Returns
|
| 20 |
+
-------
|
| 21 |
+
bool
|
| 22 |
+
Whether the graph is planar.
|
| 23 |
+
|
| 24 |
+
Examples
|
| 25 |
+
--------
|
| 26 |
+
>>> G = nx.Graph([(0, 1), (0, 2)])
|
| 27 |
+
>>> nx.is_planar(G)
|
| 28 |
+
True
|
| 29 |
+
>>> nx.is_planar(nx.complete_graph(5))
|
| 30 |
+
False
|
| 31 |
+
|
| 32 |
+
See Also
|
| 33 |
+
--------
|
| 34 |
+
check_planarity :
|
| 35 |
+
Check if graph is planar *and* return a `PlanarEmbedding` instance if True.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
return check_planarity(G, counterexample=False)[0]
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
@nx._dispatch
|
| 42 |
+
def check_planarity(G, counterexample=False):
|
| 43 |
+
"""Check if a graph is planar and return a counterexample or an embedding.
|
| 44 |
+
|
| 45 |
+
A graph is planar iff it can be drawn in a plane without
|
| 46 |
+
any edge intersections.
|
| 47 |
+
|
| 48 |
+
Parameters
|
| 49 |
+
----------
|
| 50 |
+
G : NetworkX graph
|
| 51 |
+
counterexample : bool
|
| 52 |
+
A Kuratowski subgraph (to proof non planarity) is only returned if set
|
| 53 |
+
to true.
|
| 54 |
+
|
| 55 |
+
Returns
|
| 56 |
+
-------
|
| 57 |
+
(is_planar, certificate) : (bool, NetworkX graph) tuple
|
| 58 |
+
is_planar is true if the graph is planar.
|
| 59 |
+
If the graph is planar `certificate` is a PlanarEmbedding
|
| 60 |
+
otherwise it is a Kuratowski subgraph.
|
| 61 |
+
|
| 62 |
+
Examples
|
| 63 |
+
--------
|
| 64 |
+
>>> G = nx.Graph([(0, 1), (0, 2)])
|
| 65 |
+
>>> is_planar, P = nx.check_planarity(G)
|
| 66 |
+
>>> print(is_planar)
|
| 67 |
+
True
|
| 68 |
+
|
| 69 |
+
When `G` is planar, a `PlanarEmbedding` instance is returned:
|
| 70 |
+
|
| 71 |
+
>>> P.get_data()
|
| 72 |
+
{0: [1, 2], 1: [0], 2: [0]}
|
| 73 |
+
|
| 74 |
+
Notes
|
| 75 |
+
-----
|
| 76 |
+
A (combinatorial) embedding consists of cyclic orderings of the incident
|
| 77 |
+
edges at each vertex. Given such an embedding there are multiple approaches
|
| 78 |
+
discussed in literature to drawing the graph (subject to various
|
| 79 |
+
constraints, e.g. integer coordinates), see e.g. [2].
|
| 80 |
+
|
| 81 |
+
The planarity check algorithm and extraction of the combinatorial embedding
|
| 82 |
+
is based on the Left-Right Planarity Test [1].
|
| 83 |
+
|
| 84 |
+
A counterexample is only generated if the corresponding parameter is set,
|
| 85 |
+
because the complexity of the counterexample generation is higher.
|
| 86 |
+
|
| 87 |
+
See also
|
| 88 |
+
--------
|
| 89 |
+
is_planar :
|
| 90 |
+
Check for planarity without creating a `PlanarEmbedding` or counterexample.
|
| 91 |
+
|
| 92 |
+
References
|
| 93 |
+
----------
|
| 94 |
+
.. [1] Ulrik Brandes:
|
| 95 |
+
The Left-Right Planarity Test
|
| 96 |
+
2009
|
| 97 |
+
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.217.9208
|
| 98 |
+
.. [2] Takao Nishizeki, Md Saidur Rahman:
|
| 99 |
+
Planar graph drawing
|
| 100 |
+
Lecture Notes Series on Computing: Volume 12
|
| 101 |
+
2004
|
| 102 |
+
"""
|
| 103 |
+
|
| 104 |
+
planarity_state = LRPlanarity(G)
|
| 105 |
+
embedding = planarity_state.lr_planarity()
|
| 106 |
+
if embedding is None:
|
| 107 |
+
# graph is not planar
|
| 108 |
+
if counterexample:
|
| 109 |
+
return False, get_counterexample(G)
|
| 110 |
+
else:
|
| 111 |
+
return False, None
|
| 112 |
+
else:
|
| 113 |
+
# graph is planar
|
| 114 |
+
return True, embedding
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
@nx._dispatch
|
| 118 |
+
def check_planarity_recursive(G, counterexample=False):
|
| 119 |
+
"""Recursive version of :meth:`check_planarity`."""
|
| 120 |
+
planarity_state = LRPlanarity(G)
|
| 121 |
+
embedding = planarity_state.lr_planarity_recursive()
|
| 122 |
+
if embedding is None:
|
| 123 |
+
# graph is not planar
|
| 124 |
+
if counterexample:
|
| 125 |
+
return False, get_counterexample_recursive(G)
|
| 126 |
+
else:
|
| 127 |
+
return False, None
|
| 128 |
+
else:
|
| 129 |
+
# graph is planar
|
| 130 |
+
return True, embedding
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
@nx._dispatch
|
| 134 |
+
def get_counterexample(G):
|
| 135 |
+
"""Obtains a Kuratowski subgraph.
|
| 136 |
+
|
| 137 |
+
Raises nx.NetworkXException if G is planar.
|
| 138 |
+
|
| 139 |
+
The function removes edges such that the graph is still not planar.
|
| 140 |
+
At some point the removal of any edge would make the graph planar.
|
| 141 |
+
This subgraph must be a Kuratowski subgraph.
|
| 142 |
+
|
| 143 |
+
Parameters
|
| 144 |
+
----------
|
| 145 |
+
G : NetworkX graph
|
| 146 |
+
|
| 147 |
+
Returns
|
| 148 |
+
-------
|
| 149 |
+
subgraph : NetworkX graph
|
| 150 |
+
A Kuratowski subgraph that proves that G is not planar.
|
| 151 |
+
|
| 152 |
+
"""
|
| 153 |
+
# copy graph
|
| 154 |
+
G = nx.Graph(G)
|
| 155 |
+
|
| 156 |
+
if check_planarity(G)[0]:
|
| 157 |
+
raise nx.NetworkXException("G is planar - no counter example.")
|
| 158 |
+
|
| 159 |
+
# find Kuratowski subgraph
|
| 160 |
+
subgraph = nx.Graph()
|
| 161 |
+
for u in G:
|
| 162 |
+
nbrs = list(G[u])
|
| 163 |
+
for v in nbrs:
|
| 164 |
+
G.remove_edge(u, v)
|
| 165 |
+
if check_planarity(G)[0]:
|
| 166 |
+
G.add_edge(u, v)
|
| 167 |
+
subgraph.add_edge(u, v)
|
| 168 |
+
|
| 169 |
+
return subgraph
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
@nx._dispatch
|
| 173 |
+
def get_counterexample_recursive(G):
|
| 174 |
+
"""Recursive version of :meth:`get_counterexample`."""
|
| 175 |
+
|
| 176 |
+
# copy graph
|
| 177 |
+
G = nx.Graph(G)
|
| 178 |
+
|
| 179 |
+
if check_planarity_recursive(G)[0]:
|
| 180 |
+
raise nx.NetworkXException("G is planar - no counter example.")
|
| 181 |
+
|
| 182 |
+
# find Kuratowski subgraph
|
| 183 |
+
subgraph = nx.Graph()
|
| 184 |
+
for u in G:
|
| 185 |
+
nbrs = list(G[u])
|
| 186 |
+
for v in nbrs:
|
| 187 |
+
G.remove_edge(u, v)
|
| 188 |
+
if check_planarity_recursive(G)[0]:
|
| 189 |
+
G.add_edge(u, v)
|
| 190 |
+
subgraph.add_edge(u, v)
|
| 191 |
+
|
| 192 |
+
return subgraph
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
class Interval:
|
| 196 |
+
"""Represents a set of return edges.
|
| 197 |
+
|
| 198 |
+
All return edges in an interval induce a same constraint on the contained
|
| 199 |
+
edges, which means that all edges must either have a left orientation or
|
| 200 |
+
all edges must have a right orientation.
|
| 201 |
+
"""
|
| 202 |
+
|
| 203 |
+
def __init__(self, low=None, high=None):
|
| 204 |
+
self.low = low
|
| 205 |
+
self.high = high
|
| 206 |
+
|
| 207 |
+
def empty(self):
|
| 208 |
+
"""Check if the interval is empty"""
|
| 209 |
+
return self.low is None and self.high is None
|
| 210 |
+
|
| 211 |
+
def copy(self):
|
| 212 |
+
"""Returns a copy of this interval"""
|
| 213 |
+
return Interval(self.low, self.high)
|
| 214 |
+
|
| 215 |
+
def conflicting(self, b, planarity_state):
|
| 216 |
+
"""Returns True if interval I conflicts with edge b"""
|
| 217 |
+
return (
|
| 218 |
+
not self.empty()
|
| 219 |
+
and planarity_state.lowpt[self.high] > planarity_state.lowpt[b]
|
| 220 |
+
)
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
class ConflictPair:
|
| 224 |
+
"""Represents a different constraint between two intervals.
|
| 225 |
+
|
| 226 |
+
The edges in the left interval must have a different orientation than
|
| 227 |
+
the one in the right interval.
|
| 228 |
+
"""
|
| 229 |
+
|
| 230 |
+
def __init__(self, left=Interval(), right=Interval()):
|
| 231 |
+
self.left = left
|
| 232 |
+
self.right = right
|
| 233 |
+
|
| 234 |
+
def swap(self):
|
| 235 |
+
"""Swap left and right intervals"""
|
| 236 |
+
temp = self.left
|
| 237 |
+
self.left = self.right
|
| 238 |
+
self.right = temp
|
| 239 |
+
|
| 240 |
+
def lowest(self, planarity_state):
|
| 241 |
+
"""Returns the lowest lowpoint of a conflict pair"""
|
| 242 |
+
if self.left.empty():
|
| 243 |
+
return planarity_state.lowpt[self.right.low]
|
| 244 |
+
if self.right.empty():
|
| 245 |
+
return planarity_state.lowpt[self.left.low]
|
| 246 |
+
return min(
|
| 247 |
+
planarity_state.lowpt[self.left.low], planarity_state.lowpt[self.right.low]
|
| 248 |
+
)
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
def top_of_stack(l):
|
| 252 |
+
"""Returns the element on top of the stack."""
|
| 253 |
+
if not l:
|
| 254 |
+
return None
|
| 255 |
+
return l[-1]
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
class LRPlanarity:
|
| 259 |
+
"""A class to maintain the state during planarity check."""
|
| 260 |
+
|
| 261 |
+
__slots__ = [
|
| 262 |
+
"G",
|
| 263 |
+
"roots",
|
| 264 |
+
"height",
|
| 265 |
+
"lowpt",
|
| 266 |
+
"lowpt2",
|
| 267 |
+
"nesting_depth",
|
| 268 |
+
"parent_edge",
|
| 269 |
+
"DG",
|
| 270 |
+
"adjs",
|
| 271 |
+
"ordered_adjs",
|
| 272 |
+
"ref",
|
| 273 |
+
"side",
|
| 274 |
+
"S",
|
| 275 |
+
"stack_bottom",
|
| 276 |
+
"lowpt_edge",
|
| 277 |
+
"left_ref",
|
| 278 |
+
"right_ref",
|
| 279 |
+
"embedding",
|
| 280 |
+
]
|
| 281 |
+
|
| 282 |
+
def __init__(self, G):
|
| 283 |
+
# copy G without adding self-loops
|
| 284 |
+
self.G = nx.Graph()
|
| 285 |
+
self.G.add_nodes_from(G.nodes)
|
| 286 |
+
for e in G.edges:
|
| 287 |
+
if e[0] != e[1]:
|
| 288 |
+
self.G.add_edge(e[0], e[1])
|
| 289 |
+
|
| 290 |
+
self.roots = []
|
| 291 |
+
|
| 292 |
+
# distance from tree root
|
| 293 |
+
self.height = defaultdict(lambda: None)
|
| 294 |
+
|
| 295 |
+
self.lowpt = {} # height of lowest return point of an edge
|
| 296 |
+
self.lowpt2 = {} # height of second lowest return point
|
| 297 |
+
self.nesting_depth = {} # for nesting order
|
| 298 |
+
|
| 299 |
+
# None -> missing edge
|
| 300 |
+
self.parent_edge = defaultdict(lambda: None)
|
| 301 |
+
|
| 302 |
+
# oriented DFS graph
|
| 303 |
+
self.DG = nx.DiGraph()
|
| 304 |
+
self.DG.add_nodes_from(G.nodes)
|
| 305 |
+
|
| 306 |
+
self.adjs = {}
|
| 307 |
+
self.ordered_adjs = {}
|
| 308 |
+
|
| 309 |
+
self.ref = defaultdict(lambda: None)
|
| 310 |
+
self.side = defaultdict(lambda: 1)
|
| 311 |
+
|
| 312 |
+
# stack of conflict pairs
|
| 313 |
+
self.S = []
|
| 314 |
+
self.stack_bottom = {}
|
| 315 |
+
self.lowpt_edge = {}
|
| 316 |
+
|
| 317 |
+
self.left_ref = {}
|
| 318 |
+
self.right_ref = {}
|
| 319 |
+
|
| 320 |
+
self.embedding = PlanarEmbedding()
|
| 321 |
+
|
| 322 |
+
def lr_planarity(self):
|
| 323 |
+
"""Execute the LR planarity test.
|
| 324 |
+
|
| 325 |
+
Returns
|
| 326 |
+
-------
|
| 327 |
+
embedding : dict
|
| 328 |
+
If the graph is planar an embedding is returned. Otherwise None.
|
| 329 |
+
"""
|
| 330 |
+
if self.G.order() > 2 and self.G.size() > 3 * self.G.order() - 6:
|
| 331 |
+
# graph is not planar
|
| 332 |
+
return None
|
| 333 |
+
|
| 334 |
+
# make adjacency lists for dfs
|
| 335 |
+
for v in self.G:
|
| 336 |
+
self.adjs[v] = list(self.G[v])
|
| 337 |
+
|
| 338 |
+
# orientation of the graph by depth first search traversal
|
| 339 |
+
for v in self.G:
|
| 340 |
+
if self.height[v] is None:
|
| 341 |
+
self.height[v] = 0
|
| 342 |
+
self.roots.append(v)
|
| 343 |
+
self.dfs_orientation(v)
|
| 344 |
+
|
| 345 |
+
# Free no longer used variables
|
| 346 |
+
self.G = None
|
| 347 |
+
self.lowpt2 = None
|
| 348 |
+
self.adjs = None
|
| 349 |
+
|
| 350 |
+
# testing
|
| 351 |
+
for v in self.DG: # sort the adjacency lists by nesting depth
|
| 352 |
+
# note: this sorting leads to non linear time
|
| 353 |
+
self.ordered_adjs[v] = sorted(
|
| 354 |
+
self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
|
| 355 |
+
)
|
| 356 |
+
for v in self.roots:
|
| 357 |
+
if not self.dfs_testing(v):
|
| 358 |
+
return None
|
| 359 |
+
|
| 360 |
+
# Free no longer used variables
|
| 361 |
+
self.height = None
|
| 362 |
+
self.lowpt = None
|
| 363 |
+
self.S = None
|
| 364 |
+
self.stack_bottom = None
|
| 365 |
+
self.lowpt_edge = None
|
| 366 |
+
|
| 367 |
+
for e in self.DG.edges:
|
| 368 |
+
self.nesting_depth[e] = self.sign(e) * self.nesting_depth[e]
|
| 369 |
+
|
| 370 |
+
self.embedding.add_nodes_from(self.DG.nodes)
|
| 371 |
+
for v in self.DG:
|
| 372 |
+
# sort the adjacency lists again
|
| 373 |
+
self.ordered_adjs[v] = sorted(
|
| 374 |
+
self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
|
| 375 |
+
)
|
| 376 |
+
# initialize the embedding
|
| 377 |
+
previous_node = None
|
| 378 |
+
for w in self.ordered_adjs[v]:
|
| 379 |
+
self.embedding.add_half_edge_cw(v, w, previous_node)
|
| 380 |
+
previous_node = w
|
| 381 |
+
|
| 382 |
+
# Free no longer used variables
|
| 383 |
+
self.DG = None
|
| 384 |
+
self.nesting_depth = None
|
| 385 |
+
self.ref = None
|
| 386 |
+
|
| 387 |
+
# compute the complete embedding
|
| 388 |
+
for v in self.roots:
|
| 389 |
+
self.dfs_embedding(v)
|
| 390 |
+
|
| 391 |
+
# Free no longer used variables
|
| 392 |
+
self.roots = None
|
| 393 |
+
self.parent_edge = None
|
| 394 |
+
self.ordered_adjs = None
|
| 395 |
+
self.left_ref = None
|
| 396 |
+
self.right_ref = None
|
| 397 |
+
self.side = None
|
| 398 |
+
|
| 399 |
+
return self.embedding
|
| 400 |
+
|
| 401 |
+
def lr_planarity_recursive(self):
|
| 402 |
+
"""Recursive version of :meth:`lr_planarity`."""
|
| 403 |
+
if self.G.order() > 2 and self.G.size() > 3 * self.G.order() - 6:
|
| 404 |
+
# graph is not planar
|
| 405 |
+
return None
|
| 406 |
+
|
| 407 |
+
# orientation of the graph by depth first search traversal
|
| 408 |
+
for v in self.G:
|
| 409 |
+
if self.height[v] is None:
|
| 410 |
+
self.height[v] = 0
|
| 411 |
+
self.roots.append(v)
|
| 412 |
+
self.dfs_orientation_recursive(v)
|
| 413 |
+
|
| 414 |
+
# Free no longer used variable
|
| 415 |
+
self.G = None
|
| 416 |
+
|
| 417 |
+
# testing
|
| 418 |
+
for v in self.DG: # sort the adjacency lists by nesting depth
|
| 419 |
+
# note: this sorting leads to non linear time
|
| 420 |
+
self.ordered_adjs[v] = sorted(
|
| 421 |
+
self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
|
| 422 |
+
)
|
| 423 |
+
for v in self.roots:
|
| 424 |
+
if not self.dfs_testing_recursive(v):
|
| 425 |
+
return None
|
| 426 |
+
|
| 427 |
+
for e in self.DG.edges:
|
| 428 |
+
self.nesting_depth[e] = self.sign_recursive(e) * self.nesting_depth[e]
|
| 429 |
+
|
| 430 |
+
self.embedding.add_nodes_from(self.DG.nodes)
|
| 431 |
+
for v in self.DG:
|
| 432 |
+
# sort the adjacency lists again
|
| 433 |
+
self.ordered_adjs[v] = sorted(
|
| 434 |
+
self.DG[v], key=lambda x: self.nesting_depth[(v, x)]
|
| 435 |
+
)
|
| 436 |
+
# initialize the embedding
|
| 437 |
+
previous_node = None
|
| 438 |
+
for w in self.ordered_adjs[v]:
|
| 439 |
+
self.embedding.add_half_edge_cw(v, w, previous_node)
|
| 440 |
+
previous_node = w
|
| 441 |
+
|
| 442 |
+
# compute the complete embedding
|
| 443 |
+
for v in self.roots:
|
| 444 |
+
self.dfs_embedding_recursive(v)
|
| 445 |
+
|
| 446 |
+
return self.embedding
|
| 447 |
+
|
| 448 |
+
def dfs_orientation(self, v):
|
| 449 |
+
"""Orient the graph by DFS, compute lowpoints and nesting order."""
|
| 450 |
+
# the recursion stack
|
| 451 |
+
dfs_stack = [v]
|
| 452 |
+
# index of next edge to handle in adjacency list of each node
|
| 453 |
+
ind = defaultdict(lambda: 0)
|
| 454 |
+
# boolean to indicate whether to skip the initial work for an edge
|
| 455 |
+
skip_init = defaultdict(lambda: False)
|
| 456 |
+
|
| 457 |
+
while dfs_stack:
|
| 458 |
+
v = dfs_stack.pop()
|
| 459 |
+
e = self.parent_edge[v]
|
| 460 |
+
|
| 461 |
+
for w in self.adjs[v][ind[v] :]:
|
| 462 |
+
vw = (v, w)
|
| 463 |
+
|
| 464 |
+
if not skip_init[vw]:
|
| 465 |
+
if (v, w) in self.DG.edges or (w, v) in self.DG.edges:
|
| 466 |
+
ind[v] += 1
|
| 467 |
+
continue # the edge was already oriented
|
| 468 |
+
|
| 469 |
+
self.DG.add_edge(v, w) # orient the edge
|
| 470 |
+
|
| 471 |
+
self.lowpt[vw] = self.height[v]
|
| 472 |
+
self.lowpt2[vw] = self.height[v]
|
| 473 |
+
if self.height[w] is None: # (v, w) is a tree edge
|
| 474 |
+
self.parent_edge[w] = vw
|
| 475 |
+
self.height[w] = self.height[v] + 1
|
| 476 |
+
|
| 477 |
+
dfs_stack.append(v) # revisit v after finishing w
|
| 478 |
+
dfs_stack.append(w) # visit w next
|
| 479 |
+
skip_init[vw] = True # don't redo this block
|
| 480 |
+
break # handle next node in dfs_stack (i.e. w)
|
| 481 |
+
else: # (v, w) is a back edge
|
| 482 |
+
self.lowpt[vw] = self.height[w]
|
| 483 |
+
|
| 484 |
+
# determine nesting graph
|
| 485 |
+
self.nesting_depth[vw] = 2 * self.lowpt[vw]
|
| 486 |
+
if self.lowpt2[vw] < self.height[v]: # chordal
|
| 487 |
+
self.nesting_depth[vw] += 1
|
| 488 |
+
|
| 489 |
+
# update lowpoints of parent edge e
|
| 490 |
+
if e is not None:
|
| 491 |
+
if self.lowpt[vw] < self.lowpt[e]:
|
| 492 |
+
self.lowpt2[e] = min(self.lowpt[e], self.lowpt2[vw])
|
| 493 |
+
self.lowpt[e] = self.lowpt[vw]
|
| 494 |
+
elif self.lowpt[vw] > self.lowpt[e]:
|
| 495 |
+
self.lowpt2[e] = min(self.lowpt2[e], self.lowpt[vw])
|
| 496 |
+
else:
|
| 497 |
+
self.lowpt2[e] = min(self.lowpt2[e], self.lowpt2[vw])
|
| 498 |
+
|
| 499 |
+
ind[v] += 1
|
| 500 |
+
|
| 501 |
+
def dfs_orientation_recursive(self, v):
|
| 502 |
+
"""Recursive version of :meth:`dfs_orientation`."""
|
| 503 |
+
e = self.parent_edge[v]
|
| 504 |
+
for w in self.G[v]:
|
| 505 |
+
if (v, w) in self.DG.edges or (w, v) in self.DG.edges:
|
| 506 |
+
continue # the edge was already oriented
|
| 507 |
+
vw = (v, w)
|
| 508 |
+
self.DG.add_edge(v, w) # orient the edge
|
| 509 |
+
|
| 510 |
+
self.lowpt[vw] = self.height[v]
|
| 511 |
+
self.lowpt2[vw] = self.height[v]
|
| 512 |
+
if self.height[w] is None: # (v, w) is a tree edge
|
| 513 |
+
self.parent_edge[w] = vw
|
| 514 |
+
self.height[w] = self.height[v] + 1
|
| 515 |
+
self.dfs_orientation_recursive(w)
|
| 516 |
+
else: # (v, w) is a back edge
|
| 517 |
+
self.lowpt[vw] = self.height[w]
|
| 518 |
+
|
| 519 |
+
# determine nesting graph
|
| 520 |
+
self.nesting_depth[vw] = 2 * self.lowpt[vw]
|
| 521 |
+
if self.lowpt2[vw] < self.height[v]: # chordal
|
| 522 |
+
self.nesting_depth[vw] += 1
|
| 523 |
+
|
| 524 |
+
# update lowpoints of parent edge e
|
| 525 |
+
if e is not None:
|
| 526 |
+
if self.lowpt[vw] < self.lowpt[e]:
|
| 527 |
+
self.lowpt2[e] = min(self.lowpt[e], self.lowpt2[vw])
|
| 528 |
+
self.lowpt[e] = self.lowpt[vw]
|
| 529 |
+
elif self.lowpt[vw] > self.lowpt[e]:
|
| 530 |
+
self.lowpt2[e] = min(self.lowpt2[e], self.lowpt[vw])
|
| 531 |
+
else:
|
| 532 |
+
self.lowpt2[e] = min(self.lowpt2[e], self.lowpt2[vw])
|
| 533 |
+
|
| 534 |
+
def dfs_testing(self, v):
|
| 535 |
+
"""Test for LR partition."""
|
| 536 |
+
# the recursion stack
|
| 537 |
+
dfs_stack = [v]
|
| 538 |
+
# index of next edge to handle in adjacency list of each node
|
| 539 |
+
ind = defaultdict(lambda: 0)
|
| 540 |
+
# boolean to indicate whether to skip the initial work for an edge
|
| 541 |
+
skip_init = defaultdict(lambda: False)
|
| 542 |
+
|
| 543 |
+
while dfs_stack:
|
| 544 |
+
v = dfs_stack.pop()
|
| 545 |
+
e = self.parent_edge[v]
|
| 546 |
+
# to indicate whether to skip the final block after the for loop
|
| 547 |
+
skip_final = False
|
| 548 |
+
|
| 549 |
+
for w in self.ordered_adjs[v][ind[v] :]:
|
| 550 |
+
ei = (v, w)
|
| 551 |
+
|
| 552 |
+
if not skip_init[ei]:
|
| 553 |
+
self.stack_bottom[ei] = top_of_stack(self.S)
|
| 554 |
+
|
| 555 |
+
if ei == self.parent_edge[w]: # tree edge
|
| 556 |
+
dfs_stack.append(v) # revisit v after finishing w
|
| 557 |
+
dfs_stack.append(w) # visit w next
|
| 558 |
+
skip_init[ei] = True # don't redo this block
|
| 559 |
+
skip_final = True # skip final work after breaking
|
| 560 |
+
break # handle next node in dfs_stack (i.e. w)
|
| 561 |
+
else: # back edge
|
| 562 |
+
self.lowpt_edge[ei] = ei
|
| 563 |
+
self.S.append(ConflictPair(right=Interval(ei, ei)))
|
| 564 |
+
|
| 565 |
+
# integrate new return edges
|
| 566 |
+
if self.lowpt[ei] < self.height[v]:
|
| 567 |
+
if w == self.ordered_adjs[v][0]: # e_i has return edge
|
| 568 |
+
self.lowpt_edge[e] = self.lowpt_edge[ei]
|
| 569 |
+
else: # add constraints of e_i
|
| 570 |
+
if not self.add_constraints(ei, e):
|
| 571 |
+
# graph is not planar
|
| 572 |
+
return False
|
| 573 |
+
|
| 574 |
+
ind[v] += 1
|
| 575 |
+
|
| 576 |
+
if not skip_final:
|
| 577 |
+
# remove back edges returning to parent
|
| 578 |
+
if e is not None: # v isn't root
|
| 579 |
+
self.remove_back_edges(e)
|
| 580 |
+
|
| 581 |
+
return True
|
| 582 |
+
|
| 583 |
+
def dfs_testing_recursive(self, v):
|
| 584 |
+
"""Recursive version of :meth:`dfs_testing`."""
|
| 585 |
+
e = self.parent_edge[v]
|
| 586 |
+
for w in self.ordered_adjs[v]:
|
| 587 |
+
ei = (v, w)
|
| 588 |
+
self.stack_bottom[ei] = top_of_stack(self.S)
|
| 589 |
+
if ei == self.parent_edge[w]: # tree edge
|
| 590 |
+
if not self.dfs_testing_recursive(w):
|
| 591 |
+
return False
|
| 592 |
+
else: # back edge
|
| 593 |
+
self.lowpt_edge[ei] = ei
|
| 594 |
+
self.S.append(ConflictPair(right=Interval(ei, ei)))
|
| 595 |
+
|
| 596 |
+
# integrate new return edges
|
| 597 |
+
if self.lowpt[ei] < self.height[v]:
|
| 598 |
+
if w == self.ordered_adjs[v][0]: # e_i has return edge
|
| 599 |
+
self.lowpt_edge[e] = self.lowpt_edge[ei]
|
| 600 |
+
else: # add constraints of e_i
|
| 601 |
+
if not self.add_constraints(ei, e):
|
| 602 |
+
# graph is not planar
|
| 603 |
+
return False
|
| 604 |
+
|
| 605 |
+
# remove back edges returning to parent
|
| 606 |
+
if e is not None: # v isn't root
|
| 607 |
+
self.remove_back_edges(e)
|
| 608 |
+
return True
|
| 609 |
+
|
| 610 |
+
def add_constraints(self, ei, e):
|
| 611 |
+
P = ConflictPair()
|
| 612 |
+
# merge return edges of e_i into P.right
|
| 613 |
+
while True:
|
| 614 |
+
Q = self.S.pop()
|
| 615 |
+
if not Q.left.empty():
|
| 616 |
+
Q.swap()
|
| 617 |
+
if not Q.left.empty(): # not planar
|
| 618 |
+
return False
|
| 619 |
+
if self.lowpt[Q.right.low] > self.lowpt[e]:
|
| 620 |
+
# merge intervals
|
| 621 |
+
if P.right.empty(): # topmost interval
|
| 622 |
+
P.right = Q.right.copy()
|
| 623 |
+
else:
|
| 624 |
+
self.ref[P.right.low] = Q.right.high
|
| 625 |
+
P.right.low = Q.right.low
|
| 626 |
+
else: # align
|
| 627 |
+
self.ref[Q.right.low] = self.lowpt_edge[e]
|
| 628 |
+
if top_of_stack(self.S) == self.stack_bottom[ei]:
|
| 629 |
+
break
|
| 630 |
+
# merge conflicting return edges of e_1,...,e_i-1 into P.L
|
| 631 |
+
while top_of_stack(self.S).left.conflicting(ei, self) or top_of_stack(
|
| 632 |
+
self.S
|
| 633 |
+
).right.conflicting(ei, self):
|
| 634 |
+
Q = self.S.pop()
|
| 635 |
+
if Q.right.conflicting(ei, self):
|
| 636 |
+
Q.swap()
|
| 637 |
+
if Q.right.conflicting(ei, self): # not planar
|
| 638 |
+
return False
|
| 639 |
+
# merge interval below lowpt(e_i) into P.R
|
| 640 |
+
self.ref[P.right.low] = Q.right.high
|
| 641 |
+
if Q.right.low is not None:
|
| 642 |
+
P.right.low = Q.right.low
|
| 643 |
+
|
| 644 |
+
if P.left.empty(): # topmost interval
|
| 645 |
+
P.left = Q.left.copy()
|
| 646 |
+
else:
|
| 647 |
+
self.ref[P.left.low] = Q.left.high
|
| 648 |
+
P.left.low = Q.left.low
|
| 649 |
+
|
| 650 |
+
if not (P.left.empty() and P.right.empty()):
|
| 651 |
+
self.S.append(P)
|
| 652 |
+
return True
|
| 653 |
+
|
| 654 |
+
def remove_back_edges(self, e):
|
| 655 |
+
u = e[0]
|
| 656 |
+
# trim back edges ending at parent u
|
| 657 |
+
# drop entire conflict pairs
|
| 658 |
+
while self.S and top_of_stack(self.S).lowest(self) == self.height[u]:
|
| 659 |
+
P = self.S.pop()
|
| 660 |
+
if P.left.low is not None:
|
| 661 |
+
self.side[P.left.low] = -1
|
| 662 |
+
|
| 663 |
+
if self.S: # one more conflict pair to consider
|
| 664 |
+
P = self.S.pop()
|
| 665 |
+
# trim left interval
|
| 666 |
+
while P.left.high is not None and P.left.high[1] == u:
|
| 667 |
+
P.left.high = self.ref[P.left.high]
|
| 668 |
+
if P.left.high is None and P.left.low is not None:
|
| 669 |
+
# just emptied
|
| 670 |
+
self.ref[P.left.low] = P.right.low
|
| 671 |
+
self.side[P.left.low] = -1
|
| 672 |
+
P.left.low = None
|
| 673 |
+
# trim right interval
|
| 674 |
+
while P.right.high is not None and P.right.high[1] == u:
|
| 675 |
+
P.right.high = self.ref[P.right.high]
|
| 676 |
+
if P.right.high is None and P.right.low is not None:
|
| 677 |
+
# just emptied
|
| 678 |
+
self.ref[P.right.low] = P.left.low
|
| 679 |
+
self.side[P.right.low] = -1
|
| 680 |
+
P.right.low = None
|
| 681 |
+
self.S.append(P)
|
| 682 |
+
|
| 683 |
+
# side of e is side of a highest return edge
|
| 684 |
+
if self.lowpt[e] < self.height[u]: # e has return edge
|
| 685 |
+
hl = top_of_stack(self.S).left.high
|
| 686 |
+
hr = top_of_stack(self.S).right.high
|
| 687 |
+
|
| 688 |
+
if hl is not None and (hr is None or self.lowpt[hl] > self.lowpt[hr]):
|
| 689 |
+
self.ref[e] = hl
|
| 690 |
+
else:
|
| 691 |
+
self.ref[e] = hr
|
| 692 |
+
|
| 693 |
+
def dfs_embedding(self, v):
|
| 694 |
+
"""Completes the embedding."""
|
| 695 |
+
# the recursion stack
|
| 696 |
+
dfs_stack = [v]
|
| 697 |
+
# index of next edge to handle in adjacency list of each node
|
| 698 |
+
ind = defaultdict(lambda: 0)
|
| 699 |
+
|
| 700 |
+
while dfs_stack:
|
| 701 |
+
v = dfs_stack.pop()
|
| 702 |
+
|
| 703 |
+
for w in self.ordered_adjs[v][ind[v] :]:
|
| 704 |
+
ind[v] += 1
|
| 705 |
+
ei = (v, w)
|
| 706 |
+
|
| 707 |
+
if ei == self.parent_edge[w]: # tree edge
|
| 708 |
+
self.embedding.add_half_edge_first(w, v)
|
| 709 |
+
self.left_ref[v] = w
|
| 710 |
+
self.right_ref[v] = w
|
| 711 |
+
|
| 712 |
+
dfs_stack.append(v) # revisit v after finishing w
|
| 713 |
+
dfs_stack.append(w) # visit w next
|
| 714 |
+
break # handle next node in dfs_stack (i.e. w)
|
| 715 |
+
else: # back edge
|
| 716 |
+
if self.side[ei] == 1:
|
| 717 |
+
self.embedding.add_half_edge_cw(w, v, self.right_ref[w])
|
| 718 |
+
else:
|
| 719 |
+
self.embedding.add_half_edge_ccw(w, v, self.left_ref[w])
|
| 720 |
+
self.left_ref[w] = v
|
| 721 |
+
|
| 722 |
+
def dfs_embedding_recursive(self, v):
|
| 723 |
+
"""Recursive version of :meth:`dfs_embedding`."""
|
| 724 |
+
for w in self.ordered_adjs[v]:
|
| 725 |
+
ei = (v, w)
|
| 726 |
+
if ei == self.parent_edge[w]: # tree edge
|
| 727 |
+
self.embedding.add_half_edge_first(w, v)
|
| 728 |
+
self.left_ref[v] = w
|
| 729 |
+
self.right_ref[v] = w
|
| 730 |
+
self.dfs_embedding_recursive(w)
|
| 731 |
+
else: # back edge
|
| 732 |
+
if self.side[ei] == 1:
|
| 733 |
+
# place v directly after right_ref[w] in embed. list of w
|
| 734 |
+
self.embedding.add_half_edge_cw(w, v, self.right_ref[w])
|
| 735 |
+
else:
|
| 736 |
+
# place v directly before left_ref[w] in embed. list of w
|
| 737 |
+
self.embedding.add_half_edge_ccw(w, v, self.left_ref[w])
|
| 738 |
+
self.left_ref[w] = v
|
| 739 |
+
|
| 740 |
+
def sign(self, e):
|
| 741 |
+
"""Resolve the relative side of an edge to the absolute side."""
|
| 742 |
+
# the recursion stack
|
| 743 |
+
dfs_stack = [e]
|
| 744 |
+
# dict to remember reference edges
|
| 745 |
+
old_ref = defaultdict(lambda: None)
|
| 746 |
+
|
| 747 |
+
while dfs_stack:
|
| 748 |
+
e = dfs_stack.pop()
|
| 749 |
+
|
| 750 |
+
if self.ref[e] is not None:
|
| 751 |
+
dfs_stack.append(e) # revisit e after finishing self.ref[e]
|
| 752 |
+
dfs_stack.append(self.ref[e]) # visit self.ref[e] next
|
| 753 |
+
old_ref[e] = self.ref[e] # remember value of self.ref[e]
|
| 754 |
+
self.ref[e] = None
|
| 755 |
+
else:
|
| 756 |
+
self.side[e] *= self.side[old_ref[e]]
|
| 757 |
+
|
| 758 |
+
return self.side[e]
|
| 759 |
+
|
| 760 |
+
def sign_recursive(self, e):
|
| 761 |
+
"""Recursive version of :meth:`sign`."""
|
| 762 |
+
if self.ref[e] is not None:
|
| 763 |
+
self.side[e] = self.side[e] * self.sign_recursive(self.ref[e])
|
| 764 |
+
self.ref[e] = None
|
| 765 |
+
return self.side[e]
|
| 766 |
+
|
| 767 |
+
|
| 768 |
+
class PlanarEmbedding(nx.DiGraph):
|
| 769 |
+
"""Represents a planar graph with its planar embedding.
|
| 770 |
+
|
| 771 |
+
The planar embedding is given by a `combinatorial embedding
|
| 772 |
+
<https://en.wikipedia.org/wiki/Graph_embedding#Combinatorial_embedding>`_.
|
| 773 |
+
|
| 774 |
+
.. note:: `check_planarity` is the preferred way to check if a graph is planar.
|
| 775 |
+
|
| 776 |
+
**Neighbor ordering:**
|
| 777 |
+
|
| 778 |
+
In comparison to a usual graph structure, the embedding also stores the
|
| 779 |
+
order of all neighbors for every vertex.
|
| 780 |
+
The order of the neighbors can be given in clockwise (cw) direction or
|
| 781 |
+
counterclockwise (ccw) direction. This order is stored as edge attributes
|
| 782 |
+
in the underlying directed graph. For the edge (u, v) the edge attribute
|
| 783 |
+
'cw' is set to the neighbor of u that follows immediately after v in
|
| 784 |
+
clockwise direction.
|
| 785 |
+
|
| 786 |
+
In order for a PlanarEmbedding to be valid it must fulfill multiple
|
| 787 |
+
conditions. It is possible to check if these conditions are fulfilled with
|
| 788 |
+
the method :meth:`check_structure`.
|
| 789 |
+
The conditions are:
|
| 790 |
+
|
| 791 |
+
* Edges must go in both directions (because the edge attributes differ)
|
| 792 |
+
* Every edge must have a 'cw' and 'ccw' attribute which corresponds to a
|
| 793 |
+
correct planar embedding.
|
| 794 |
+
* A node with non zero degree must have a node attribute 'first_nbr'.
|
| 795 |
+
|
| 796 |
+
As long as a PlanarEmbedding is invalid only the following methods should
|
| 797 |
+
be called:
|
| 798 |
+
|
| 799 |
+
* :meth:`add_half_edge_ccw`
|
| 800 |
+
* :meth:`add_half_edge_cw`
|
| 801 |
+
* :meth:`connect_components`
|
| 802 |
+
* :meth:`add_half_edge_first`
|
| 803 |
+
|
| 804 |
+
Even though the graph is a subclass of nx.DiGraph, it can still be used
|
| 805 |
+
for algorithms that require undirected graphs, because the method
|
| 806 |
+
:meth:`is_directed` is overridden. This is possible, because a valid
|
| 807 |
+
PlanarGraph must have edges in both directions.
|
| 808 |
+
|
| 809 |
+
**Half edges:**
|
| 810 |
+
|
| 811 |
+
In methods like `add_half_edge_ccw` the term "half-edge" is used, which is
|
| 812 |
+
a term that is used in `doubly connected edge lists
|
| 813 |
+
<https://en.wikipedia.org/wiki/Doubly_connected_edge_list>`_. It is used
|
| 814 |
+
to emphasize that the edge is only in one direction and there exists
|
| 815 |
+
another half-edge in the opposite direction.
|
| 816 |
+
While conventional edges always have two faces (including outer face) next
|
| 817 |
+
to them, it is possible to assign each half-edge *exactly one* face.
|
| 818 |
+
For a half-edge (u, v) that is orientated such that u is below v then the
|
| 819 |
+
face that belongs to (u, v) is to the right of this half-edge.
|
| 820 |
+
|
| 821 |
+
See Also
|
| 822 |
+
--------
|
| 823 |
+
is_planar :
|
| 824 |
+
Preferred way to check if an existing graph is planar.
|
| 825 |
+
|
| 826 |
+
check_planarity :
|
| 827 |
+
A convenient way to create a `PlanarEmbedding`. If not planar,
|
| 828 |
+
it returns a subgraph that shows this.
|
| 829 |
+
|
| 830 |
+
Examples
|
| 831 |
+
--------
|
| 832 |
+
|
| 833 |
+
Create an embedding of a star graph (compare `nx.star_graph(3)`):
|
| 834 |
+
|
| 835 |
+
>>> G = nx.PlanarEmbedding()
|
| 836 |
+
>>> G.add_half_edge_cw(0, 1, None)
|
| 837 |
+
>>> G.add_half_edge_cw(0, 2, 1)
|
| 838 |
+
>>> G.add_half_edge_cw(0, 3, 2)
|
| 839 |
+
>>> G.add_half_edge_cw(1, 0, None)
|
| 840 |
+
>>> G.add_half_edge_cw(2, 0, None)
|
| 841 |
+
>>> G.add_half_edge_cw(3, 0, None)
|
| 842 |
+
|
| 843 |
+
Alternatively the same embedding can also be defined in counterclockwise
|
| 844 |
+
orientation. The following results in exactly the same PlanarEmbedding:
|
| 845 |
+
|
| 846 |
+
>>> G = nx.PlanarEmbedding()
|
| 847 |
+
>>> G.add_half_edge_ccw(0, 1, None)
|
| 848 |
+
>>> G.add_half_edge_ccw(0, 3, 1)
|
| 849 |
+
>>> G.add_half_edge_ccw(0, 2, 3)
|
| 850 |
+
>>> G.add_half_edge_ccw(1, 0, None)
|
| 851 |
+
>>> G.add_half_edge_ccw(2, 0, None)
|
| 852 |
+
>>> G.add_half_edge_ccw(3, 0, None)
|
| 853 |
+
|
| 854 |
+
After creating a graph, it is possible to validate that the PlanarEmbedding
|
| 855 |
+
object is correct:
|
| 856 |
+
|
| 857 |
+
>>> G.check_structure()
|
| 858 |
+
|
| 859 |
+
"""
|
| 860 |
+
|
| 861 |
+
def get_data(self):
|
| 862 |
+
"""Converts the adjacency structure into a better readable structure.
|
| 863 |
+
|
| 864 |
+
Returns
|
| 865 |
+
-------
|
| 866 |
+
embedding : dict
|
| 867 |
+
A dict mapping all nodes to a list of neighbors sorted in
|
| 868 |
+
clockwise order.
|
| 869 |
+
|
| 870 |
+
See Also
|
| 871 |
+
--------
|
| 872 |
+
set_data
|
| 873 |
+
|
| 874 |
+
"""
|
| 875 |
+
embedding = {}
|
| 876 |
+
for v in self:
|
| 877 |
+
embedding[v] = list(self.neighbors_cw_order(v))
|
| 878 |
+
return embedding
|
| 879 |
+
|
| 880 |
+
def set_data(self, data):
|
| 881 |
+
"""Inserts edges according to given sorted neighbor list.
|
| 882 |
+
|
| 883 |
+
The input format is the same as the output format of get_data().
|
| 884 |
+
|
| 885 |
+
Parameters
|
| 886 |
+
----------
|
| 887 |
+
data : dict
|
| 888 |
+
A dict mapping all nodes to a list of neighbors sorted in
|
| 889 |
+
clockwise order.
|
| 890 |
+
|
| 891 |
+
See Also
|
| 892 |
+
--------
|
| 893 |
+
get_data
|
| 894 |
+
|
| 895 |
+
"""
|
| 896 |
+
for v in data:
|
| 897 |
+
for w in reversed(data[v]):
|
| 898 |
+
self.add_half_edge_first(v, w)
|
| 899 |
+
|
| 900 |
+
def neighbors_cw_order(self, v):
|
| 901 |
+
"""Generator for the neighbors of v in clockwise order.
|
| 902 |
+
|
| 903 |
+
Parameters
|
| 904 |
+
----------
|
| 905 |
+
v : node
|
| 906 |
+
|
| 907 |
+
Yields
|
| 908 |
+
------
|
| 909 |
+
node
|
| 910 |
+
|
| 911 |
+
"""
|
| 912 |
+
if len(self[v]) == 0:
|
| 913 |
+
# v has no neighbors
|
| 914 |
+
return
|
| 915 |
+
start_node = self.nodes[v]["first_nbr"]
|
| 916 |
+
yield start_node
|
| 917 |
+
current_node = self[v][start_node]["cw"]
|
| 918 |
+
while start_node != current_node:
|
| 919 |
+
yield current_node
|
| 920 |
+
current_node = self[v][current_node]["cw"]
|
| 921 |
+
|
| 922 |
+
def check_structure(self):
|
| 923 |
+
"""Runs without exceptions if this object is valid.
|
| 924 |
+
|
| 925 |
+
Checks that the following properties are fulfilled:
|
| 926 |
+
|
| 927 |
+
* Edges go in both directions (because the edge attributes differ).
|
| 928 |
+
* Every edge has a 'cw' and 'ccw' attribute which corresponds to a
|
| 929 |
+
correct planar embedding.
|
| 930 |
+
* A node with a degree larger than 0 has a node attribute 'first_nbr'.
|
| 931 |
+
|
| 932 |
+
Running this method verifies that the underlying Graph must be planar.
|
| 933 |
+
|
| 934 |
+
Raises
|
| 935 |
+
------
|
| 936 |
+
NetworkXException
|
| 937 |
+
This exception is raised with a short explanation if the
|
| 938 |
+
PlanarEmbedding is invalid.
|
| 939 |
+
"""
|
| 940 |
+
# Check fundamental structure
|
| 941 |
+
for v in self:
|
| 942 |
+
try:
|
| 943 |
+
sorted_nbrs = set(self.neighbors_cw_order(v))
|
| 944 |
+
except KeyError as err:
|
| 945 |
+
msg = f"Bad embedding. Missing orientation for a neighbor of {v}"
|
| 946 |
+
raise nx.NetworkXException(msg) from err
|
| 947 |
+
|
| 948 |
+
unsorted_nbrs = set(self[v])
|
| 949 |
+
if sorted_nbrs != unsorted_nbrs:
|
| 950 |
+
msg = "Bad embedding. Edge orientations not set correctly."
|
| 951 |
+
raise nx.NetworkXException(msg)
|
| 952 |
+
for w in self[v]:
|
| 953 |
+
# Check if opposite half-edge exists
|
| 954 |
+
if not self.has_edge(w, v):
|
| 955 |
+
msg = "Bad embedding. Opposite half-edge is missing."
|
| 956 |
+
raise nx.NetworkXException(msg)
|
| 957 |
+
|
| 958 |
+
# Check planarity
|
| 959 |
+
counted_half_edges = set()
|
| 960 |
+
for component in nx.connected_components(self):
|
| 961 |
+
if len(component) == 1:
|
| 962 |
+
# Don't need to check single node component
|
| 963 |
+
continue
|
| 964 |
+
num_nodes = len(component)
|
| 965 |
+
num_half_edges = 0
|
| 966 |
+
num_faces = 0
|
| 967 |
+
for v in component:
|
| 968 |
+
for w in self.neighbors_cw_order(v):
|
| 969 |
+
num_half_edges += 1
|
| 970 |
+
if (v, w) not in counted_half_edges:
|
| 971 |
+
# We encountered a new face
|
| 972 |
+
num_faces += 1
|
| 973 |
+
# Mark all half-edges belonging to this face
|
| 974 |
+
self.traverse_face(v, w, counted_half_edges)
|
| 975 |
+
num_edges = num_half_edges // 2 # num_half_edges is even
|
| 976 |
+
if num_nodes - num_edges + num_faces != 2:
|
| 977 |
+
# The result does not match Euler's formula
|
| 978 |
+
msg = "Bad embedding. The graph does not match Euler's formula"
|
| 979 |
+
raise nx.NetworkXException(msg)
|
| 980 |
+
|
| 981 |
+
def add_half_edge_ccw(self, start_node, end_node, reference_neighbor):
|
| 982 |
+
"""Adds a half-edge from start_node to end_node.
|
| 983 |
+
|
| 984 |
+
The half-edge is added counter clockwise next to the existing half-edge
|
| 985 |
+
(start_node, reference_neighbor).
|
| 986 |
+
|
| 987 |
+
Parameters
|
| 988 |
+
----------
|
| 989 |
+
start_node : node
|
| 990 |
+
Start node of inserted edge.
|
| 991 |
+
end_node : node
|
| 992 |
+
End node of inserted edge.
|
| 993 |
+
reference_neighbor: node
|
| 994 |
+
End node of reference edge.
|
| 995 |
+
|
| 996 |
+
Raises
|
| 997 |
+
------
|
| 998 |
+
NetworkXException
|
| 999 |
+
If the reference_neighbor does not exist.
|
| 1000 |
+
|
| 1001 |
+
See Also
|
| 1002 |
+
--------
|
| 1003 |
+
add_half_edge_cw
|
| 1004 |
+
connect_components
|
| 1005 |
+
add_half_edge_first
|
| 1006 |
+
|
| 1007 |
+
"""
|
| 1008 |
+
if reference_neighbor is None:
|
| 1009 |
+
# The start node has no neighbors
|
| 1010 |
+
self.add_edge(start_node, end_node) # Add edge to graph
|
| 1011 |
+
self[start_node][end_node]["cw"] = end_node
|
| 1012 |
+
self[start_node][end_node]["ccw"] = end_node
|
| 1013 |
+
self.nodes[start_node]["first_nbr"] = end_node
|
| 1014 |
+
else:
|
| 1015 |
+
ccw_reference = self[start_node][reference_neighbor]["ccw"]
|
| 1016 |
+
self.add_half_edge_cw(start_node, end_node, ccw_reference)
|
| 1017 |
+
|
| 1018 |
+
if reference_neighbor == self.nodes[start_node].get("first_nbr", None):
|
| 1019 |
+
# Update first neighbor
|
| 1020 |
+
self.nodes[start_node]["first_nbr"] = end_node
|
| 1021 |
+
|
| 1022 |
+
def add_half_edge_cw(self, start_node, end_node, reference_neighbor):
|
| 1023 |
+
"""Adds a half-edge from start_node to end_node.
|
| 1024 |
+
|
| 1025 |
+
The half-edge is added clockwise next to the existing half-edge
|
| 1026 |
+
(start_node, reference_neighbor).
|
| 1027 |
+
|
| 1028 |
+
Parameters
|
| 1029 |
+
----------
|
| 1030 |
+
start_node : node
|
| 1031 |
+
Start node of inserted edge.
|
| 1032 |
+
end_node : node
|
| 1033 |
+
End node of inserted edge.
|
| 1034 |
+
reference_neighbor: node
|
| 1035 |
+
End node of reference edge.
|
| 1036 |
+
|
| 1037 |
+
Raises
|
| 1038 |
+
------
|
| 1039 |
+
NetworkXException
|
| 1040 |
+
If the reference_neighbor does not exist.
|
| 1041 |
+
|
| 1042 |
+
See Also
|
| 1043 |
+
--------
|
| 1044 |
+
add_half_edge_ccw
|
| 1045 |
+
connect_components
|
| 1046 |
+
add_half_edge_first
|
| 1047 |
+
"""
|
| 1048 |
+
self.add_edge(start_node, end_node) # Add edge to graph
|
| 1049 |
+
|
| 1050 |
+
if reference_neighbor is None:
|
| 1051 |
+
# The start node has no neighbors
|
| 1052 |
+
self[start_node][end_node]["cw"] = end_node
|
| 1053 |
+
self[start_node][end_node]["ccw"] = end_node
|
| 1054 |
+
self.nodes[start_node]["first_nbr"] = end_node
|
| 1055 |
+
return
|
| 1056 |
+
|
| 1057 |
+
if reference_neighbor not in self[start_node]:
|
| 1058 |
+
raise nx.NetworkXException(
|
| 1059 |
+
"Cannot add edge. Reference neighbor does not exist"
|
| 1060 |
+
)
|
| 1061 |
+
|
| 1062 |
+
# Get half-edge at the other side
|
| 1063 |
+
cw_reference = self[start_node][reference_neighbor]["cw"]
|
| 1064 |
+
# Alter half-edge data structures
|
| 1065 |
+
self[start_node][reference_neighbor]["cw"] = end_node
|
| 1066 |
+
self[start_node][end_node]["cw"] = cw_reference
|
| 1067 |
+
self[start_node][cw_reference]["ccw"] = end_node
|
| 1068 |
+
self[start_node][end_node]["ccw"] = reference_neighbor
|
| 1069 |
+
|
| 1070 |
+
def connect_components(self, v, w):
|
| 1071 |
+
"""Adds half-edges for (v, w) and (w, v) at some position.
|
| 1072 |
+
|
| 1073 |
+
This method should only be called if v and w are in different
|
| 1074 |
+
components, or it might break the embedding.
|
| 1075 |
+
This especially means that if `connect_components(v, w)`
|
| 1076 |
+
is called it is not allowed to call `connect_components(w, v)`
|
| 1077 |
+
afterwards. The neighbor orientations in both directions are
|
| 1078 |
+
all set correctly after the first call.
|
| 1079 |
+
|
| 1080 |
+
Parameters
|
| 1081 |
+
----------
|
| 1082 |
+
v : node
|
| 1083 |
+
w : node
|
| 1084 |
+
|
| 1085 |
+
See Also
|
| 1086 |
+
--------
|
| 1087 |
+
add_half_edge_ccw
|
| 1088 |
+
add_half_edge_cw
|
| 1089 |
+
add_half_edge_first
|
| 1090 |
+
"""
|
| 1091 |
+
self.add_half_edge_first(v, w)
|
| 1092 |
+
self.add_half_edge_first(w, v)
|
| 1093 |
+
|
| 1094 |
+
def add_half_edge_first(self, start_node, end_node):
|
| 1095 |
+
"""The added half-edge is inserted at the first position in the order.
|
| 1096 |
+
|
| 1097 |
+
Parameters
|
| 1098 |
+
----------
|
| 1099 |
+
start_node : node
|
| 1100 |
+
end_node : node
|
| 1101 |
+
|
| 1102 |
+
See Also
|
| 1103 |
+
--------
|
| 1104 |
+
add_half_edge_ccw
|
| 1105 |
+
add_half_edge_cw
|
| 1106 |
+
connect_components
|
| 1107 |
+
"""
|
| 1108 |
+
if start_node in self and "first_nbr" in self.nodes[start_node]:
|
| 1109 |
+
reference = self.nodes[start_node]["first_nbr"]
|
| 1110 |
+
else:
|
| 1111 |
+
reference = None
|
| 1112 |
+
self.add_half_edge_ccw(start_node, end_node, reference)
|
| 1113 |
+
|
| 1114 |
+
def next_face_half_edge(self, v, w):
|
| 1115 |
+
"""Returns the following half-edge left of a face.
|
| 1116 |
+
|
| 1117 |
+
Parameters
|
| 1118 |
+
----------
|
| 1119 |
+
v : node
|
| 1120 |
+
w : node
|
| 1121 |
+
|
| 1122 |
+
Returns
|
| 1123 |
+
-------
|
| 1124 |
+
half-edge : tuple
|
| 1125 |
+
"""
|
| 1126 |
+
new_node = self[w][v]["ccw"]
|
| 1127 |
+
return w, new_node
|
| 1128 |
+
|
| 1129 |
+
def traverse_face(self, v, w, mark_half_edges=None):
|
| 1130 |
+
"""Returns nodes on the face that belong to the half-edge (v, w).
|
| 1131 |
+
|
| 1132 |
+
The face that is traversed lies to the right of the half-edge (in an
|
| 1133 |
+
orientation where v is below w).
|
| 1134 |
+
|
| 1135 |
+
Optionally it is possible to pass a set to which all encountered half
|
| 1136 |
+
edges are added. Before calling this method, this set must not include
|
| 1137 |
+
any half-edges that belong to the face.
|
| 1138 |
+
|
| 1139 |
+
Parameters
|
| 1140 |
+
----------
|
| 1141 |
+
v : node
|
| 1142 |
+
Start node of half-edge.
|
| 1143 |
+
w : node
|
| 1144 |
+
End node of half-edge.
|
| 1145 |
+
mark_half_edges: set, optional
|
| 1146 |
+
Set to which all encountered half-edges are added.
|
| 1147 |
+
|
| 1148 |
+
Returns
|
| 1149 |
+
-------
|
| 1150 |
+
face : list
|
| 1151 |
+
A list of nodes that lie on this face.
|
| 1152 |
+
"""
|
| 1153 |
+
if mark_half_edges is None:
|
| 1154 |
+
mark_half_edges = set()
|
| 1155 |
+
|
| 1156 |
+
face_nodes = [v]
|
| 1157 |
+
mark_half_edges.add((v, w))
|
| 1158 |
+
prev_node = v
|
| 1159 |
+
cur_node = w
|
| 1160 |
+
# Last half-edge is (incoming_node, v)
|
| 1161 |
+
incoming_node = self[v][w]["cw"]
|
| 1162 |
+
|
| 1163 |
+
while cur_node != v or prev_node != incoming_node:
|
| 1164 |
+
face_nodes.append(cur_node)
|
| 1165 |
+
prev_node, cur_node = self.next_face_half_edge(prev_node, cur_node)
|
| 1166 |
+
if (prev_node, cur_node) in mark_half_edges:
|
| 1167 |
+
raise nx.NetworkXException("Bad planar embedding. Impossible face.")
|
| 1168 |
+
mark_half_edges.add((prev_node, cur_node))
|
| 1169 |
+
|
| 1170 |
+
return face_nodes
|
| 1171 |
+
|
| 1172 |
+
def is_directed(self):
|
| 1173 |
+
"""A valid PlanarEmbedding is undirected.
|
| 1174 |
+
|
| 1175 |
+
All reverse edges are contained, i.e. for every existing
|
| 1176 |
+
half-edge (v, w) the half-edge in the opposite direction (w, v) is also
|
| 1177 |
+
contained.
|
| 1178 |
+
"""
|
| 1179 |
+
return False
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/reciprocity.py
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Algorithms to calculate reciprocity in a directed graph."""
|
| 2 |
+
import networkx as nx
|
| 3 |
+
from networkx import NetworkXError
|
| 4 |
+
|
| 5 |
+
from ..utils import not_implemented_for
|
| 6 |
+
|
| 7 |
+
__all__ = ["reciprocity", "overall_reciprocity"]
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@not_implemented_for("undirected", "multigraph")
|
| 11 |
+
@nx._dispatch
|
| 12 |
+
def reciprocity(G, nodes=None):
|
| 13 |
+
r"""Compute the reciprocity in a directed graph.
|
| 14 |
+
|
| 15 |
+
The reciprocity of a directed graph is defined as the ratio
|
| 16 |
+
of the number of edges pointing in both directions to the total
|
| 17 |
+
number of edges in the graph.
|
| 18 |
+
Formally, $r = |{(u,v) \in G|(v,u) \in G}| / |{(u,v) \in G}|$.
|
| 19 |
+
|
| 20 |
+
The reciprocity of a single node u is defined similarly,
|
| 21 |
+
it is the ratio of the number of edges in both directions to
|
| 22 |
+
the total number of edges attached to node u.
|
| 23 |
+
|
| 24 |
+
Parameters
|
| 25 |
+
----------
|
| 26 |
+
G : graph
|
| 27 |
+
A networkx directed graph
|
| 28 |
+
nodes : container of nodes, optional (default=whole graph)
|
| 29 |
+
Compute reciprocity for nodes in this container.
|
| 30 |
+
|
| 31 |
+
Returns
|
| 32 |
+
-------
|
| 33 |
+
out : dictionary
|
| 34 |
+
Reciprocity keyed by node label.
|
| 35 |
+
|
| 36 |
+
Notes
|
| 37 |
+
-----
|
| 38 |
+
The reciprocity is not defined for isolated nodes.
|
| 39 |
+
In such cases this function will return None.
|
| 40 |
+
|
| 41 |
+
"""
|
| 42 |
+
# If `nodes` is not specified, calculate the reciprocity of the graph.
|
| 43 |
+
if nodes is None:
|
| 44 |
+
return overall_reciprocity(G)
|
| 45 |
+
|
| 46 |
+
# If `nodes` represents a single node in the graph, return only its
|
| 47 |
+
# reciprocity.
|
| 48 |
+
if nodes in G:
|
| 49 |
+
reciprocity = next(_reciprocity_iter(G, nodes))[1]
|
| 50 |
+
if reciprocity is None:
|
| 51 |
+
raise NetworkXError("Not defined for isolated nodes.")
|
| 52 |
+
else:
|
| 53 |
+
return reciprocity
|
| 54 |
+
|
| 55 |
+
# Otherwise, `nodes` represents an iterable of nodes, so return a
|
| 56 |
+
# dictionary mapping node to its reciprocity.
|
| 57 |
+
return dict(_reciprocity_iter(G, nodes))
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _reciprocity_iter(G, nodes):
|
| 61 |
+
"""Return an iterator of (node, reciprocity)."""
|
| 62 |
+
n = G.nbunch_iter(nodes)
|
| 63 |
+
for node in n:
|
| 64 |
+
pred = set(G.predecessors(node))
|
| 65 |
+
succ = set(G.successors(node))
|
| 66 |
+
overlap = pred & succ
|
| 67 |
+
n_total = len(pred) + len(succ)
|
| 68 |
+
|
| 69 |
+
# Reciprocity is not defined for isolated nodes.
|
| 70 |
+
# Return None.
|
| 71 |
+
if n_total == 0:
|
| 72 |
+
yield (node, None)
|
| 73 |
+
else:
|
| 74 |
+
reciprocity = 2 * len(overlap) / n_total
|
| 75 |
+
yield (node, reciprocity)
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
@not_implemented_for("undirected", "multigraph")
|
| 79 |
+
@nx._dispatch
|
| 80 |
+
def overall_reciprocity(G):
|
| 81 |
+
"""Compute the reciprocity for the whole graph.
|
| 82 |
+
|
| 83 |
+
See the doc of reciprocity for the definition.
|
| 84 |
+
|
| 85 |
+
Parameters
|
| 86 |
+
----------
|
| 87 |
+
G : graph
|
| 88 |
+
A networkx graph
|
| 89 |
+
|
| 90 |
+
"""
|
| 91 |
+
n_all_edge = G.number_of_edges()
|
| 92 |
+
n_overlap_edge = (n_all_edge - G.to_undirected().number_of_edges()) * 2
|
| 93 |
+
|
| 94 |
+
if n_all_edge == 0:
|
| 95 |
+
raise NetworkXError("Not defined for empty graphs")
|
| 96 |
+
|
| 97 |
+
return n_overlap_edge / n_all_edge
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/regular.py
ADDED
|
@@ -0,0 +1,212 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for computing and verifying regular graphs."""
|
| 2 |
+
import networkx as nx
|
| 3 |
+
from networkx.utils import not_implemented_for
|
| 4 |
+
|
| 5 |
+
__all__ = ["is_regular", "is_k_regular", "k_factor"]
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@nx._dispatch
|
| 9 |
+
def is_regular(G):
|
| 10 |
+
"""Determines whether the graph ``G`` is a regular graph.
|
| 11 |
+
|
| 12 |
+
A regular graph is a graph where each vertex has the same degree. A
|
| 13 |
+
regular digraph is a graph where the indegree and outdegree of each
|
| 14 |
+
vertex are equal.
|
| 15 |
+
|
| 16 |
+
Parameters
|
| 17 |
+
----------
|
| 18 |
+
G : NetworkX graph
|
| 19 |
+
|
| 20 |
+
Returns
|
| 21 |
+
-------
|
| 22 |
+
bool
|
| 23 |
+
Whether the given graph or digraph is regular.
|
| 24 |
+
|
| 25 |
+
Examples
|
| 26 |
+
--------
|
| 27 |
+
>>> G = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 1)])
|
| 28 |
+
>>> nx.is_regular(G)
|
| 29 |
+
True
|
| 30 |
+
|
| 31 |
+
"""
|
| 32 |
+
n1 = nx.utils.arbitrary_element(G)
|
| 33 |
+
if not G.is_directed():
|
| 34 |
+
d1 = G.degree(n1)
|
| 35 |
+
return all(d1 == d for _, d in G.degree)
|
| 36 |
+
else:
|
| 37 |
+
d_in = G.in_degree(n1)
|
| 38 |
+
in_regular = all(d_in == d for _, d in G.in_degree)
|
| 39 |
+
d_out = G.out_degree(n1)
|
| 40 |
+
out_regular = all(d_out == d for _, d in G.out_degree)
|
| 41 |
+
return in_regular and out_regular
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@not_implemented_for("directed")
|
| 45 |
+
@nx._dispatch
|
| 46 |
+
def is_k_regular(G, k):
|
| 47 |
+
"""Determines whether the graph ``G`` is a k-regular graph.
|
| 48 |
+
|
| 49 |
+
A k-regular graph is a graph where each vertex has degree k.
|
| 50 |
+
|
| 51 |
+
Parameters
|
| 52 |
+
----------
|
| 53 |
+
G : NetworkX graph
|
| 54 |
+
|
| 55 |
+
Returns
|
| 56 |
+
-------
|
| 57 |
+
bool
|
| 58 |
+
Whether the given graph is k-regular.
|
| 59 |
+
|
| 60 |
+
Examples
|
| 61 |
+
--------
|
| 62 |
+
>>> G = nx.Graph([(1, 2), (2, 3), (3, 4), (4, 1)])
|
| 63 |
+
>>> nx.is_k_regular(G, k=3)
|
| 64 |
+
False
|
| 65 |
+
|
| 66 |
+
"""
|
| 67 |
+
return all(d == k for n, d in G.degree)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
@not_implemented_for("directed")
|
| 71 |
+
@not_implemented_for("multigraph")
|
| 72 |
+
@nx._dispatch(edge_attrs="matching_weight")
|
| 73 |
+
def k_factor(G, k, matching_weight="weight"):
|
| 74 |
+
"""Compute a k-factor of G
|
| 75 |
+
|
| 76 |
+
A k-factor of a graph is a spanning k-regular subgraph.
|
| 77 |
+
A spanning k-regular subgraph of G is a subgraph that contains
|
| 78 |
+
each vertex of G and a subset of the edges of G such that each
|
| 79 |
+
vertex has degree k.
|
| 80 |
+
|
| 81 |
+
Parameters
|
| 82 |
+
----------
|
| 83 |
+
G : NetworkX graph
|
| 84 |
+
Undirected graph
|
| 85 |
+
|
| 86 |
+
matching_weight: string, optional (default='weight')
|
| 87 |
+
Edge data key corresponding to the edge weight.
|
| 88 |
+
Used for finding the max-weighted perfect matching.
|
| 89 |
+
If key not found, uses 1 as weight.
|
| 90 |
+
|
| 91 |
+
Returns
|
| 92 |
+
-------
|
| 93 |
+
G2 : NetworkX graph
|
| 94 |
+
A k-factor of G
|
| 95 |
+
|
| 96 |
+
Examples
|
| 97 |
+
--------
|
| 98 |
+
>>> G = nx.Graph([(1, 2), (2, 3), (3, 4), (4, 1)])
|
| 99 |
+
>>> G2 = nx.k_factor(G, k=1)
|
| 100 |
+
>>> G2.edges()
|
| 101 |
+
EdgeView([(1, 2), (3, 4)])
|
| 102 |
+
|
| 103 |
+
References
|
| 104 |
+
----------
|
| 105 |
+
.. [1] "An algorithm for computing simple k-factors.",
|
| 106 |
+
Meijer, Henk, Yurai Núñez-Rodríguez, and David Rappaport,
|
| 107 |
+
Information processing letters, 2009.
|
| 108 |
+
"""
|
| 109 |
+
|
| 110 |
+
from networkx.algorithms.matching import is_perfect_matching, max_weight_matching
|
| 111 |
+
|
| 112 |
+
class LargeKGadget:
|
| 113 |
+
def __init__(self, k, degree, node, g):
|
| 114 |
+
self.original = node
|
| 115 |
+
self.g = g
|
| 116 |
+
self.k = k
|
| 117 |
+
self.degree = degree
|
| 118 |
+
|
| 119 |
+
self.outer_vertices = [(node, x) for x in range(degree)]
|
| 120 |
+
self.core_vertices = [(node, x + degree) for x in range(degree - k)]
|
| 121 |
+
|
| 122 |
+
def replace_node(self):
|
| 123 |
+
adj_view = self.g[self.original]
|
| 124 |
+
neighbors = list(adj_view.keys())
|
| 125 |
+
edge_attrs = list(adj_view.values())
|
| 126 |
+
for outer, neighbor, edge_attrs in zip(
|
| 127 |
+
self.outer_vertices, neighbors, edge_attrs
|
| 128 |
+
):
|
| 129 |
+
self.g.add_edge(outer, neighbor, **edge_attrs)
|
| 130 |
+
for core in self.core_vertices:
|
| 131 |
+
for outer in self.outer_vertices:
|
| 132 |
+
self.g.add_edge(core, outer)
|
| 133 |
+
self.g.remove_node(self.original)
|
| 134 |
+
|
| 135 |
+
def restore_node(self):
|
| 136 |
+
self.g.add_node(self.original)
|
| 137 |
+
for outer in self.outer_vertices:
|
| 138 |
+
adj_view = self.g[outer]
|
| 139 |
+
for neighbor, edge_attrs in list(adj_view.items()):
|
| 140 |
+
if neighbor not in self.core_vertices:
|
| 141 |
+
self.g.add_edge(self.original, neighbor, **edge_attrs)
|
| 142 |
+
break
|
| 143 |
+
g.remove_nodes_from(self.outer_vertices)
|
| 144 |
+
g.remove_nodes_from(self.core_vertices)
|
| 145 |
+
|
| 146 |
+
class SmallKGadget:
|
| 147 |
+
def __init__(self, k, degree, node, g):
|
| 148 |
+
self.original = node
|
| 149 |
+
self.k = k
|
| 150 |
+
self.degree = degree
|
| 151 |
+
self.g = g
|
| 152 |
+
|
| 153 |
+
self.outer_vertices = [(node, x) for x in range(degree)]
|
| 154 |
+
self.inner_vertices = [(node, x + degree) for x in range(degree)]
|
| 155 |
+
self.core_vertices = [(node, x + 2 * degree) for x in range(k)]
|
| 156 |
+
|
| 157 |
+
def replace_node(self):
|
| 158 |
+
adj_view = self.g[self.original]
|
| 159 |
+
for outer, inner, (neighbor, edge_attrs) in zip(
|
| 160 |
+
self.outer_vertices, self.inner_vertices, list(adj_view.items())
|
| 161 |
+
):
|
| 162 |
+
self.g.add_edge(outer, inner)
|
| 163 |
+
self.g.add_edge(outer, neighbor, **edge_attrs)
|
| 164 |
+
for core in self.core_vertices:
|
| 165 |
+
for inner in self.inner_vertices:
|
| 166 |
+
self.g.add_edge(core, inner)
|
| 167 |
+
self.g.remove_node(self.original)
|
| 168 |
+
|
| 169 |
+
def restore_node(self):
|
| 170 |
+
self.g.add_node(self.original)
|
| 171 |
+
for outer in self.outer_vertices:
|
| 172 |
+
adj_view = self.g[outer]
|
| 173 |
+
for neighbor, edge_attrs in adj_view.items():
|
| 174 |
+
if neighbor not in self.core_vertices:
|
| 175 |
+
self.g.add_edge(self.original, neighbor, **edge_attrs)
|
| 176 |
+
break
|
| 177 |
+
self.g.remove_nodes_from(self.outer_vertices)
|
| 178 |
+
self.g.remove_nodes_from(self.inner_vertices)
|
| 179 |
+
self.g.remove_nodes_from(self.core_vertices)
|
| 180 |
+
|
| 181 |
+
# Step 1
|
| 182 |
+
if any(d < k for _, d in G.degree):
|
| 183 |
+
raise nx.NetworkXUnfeasible("Graph contains a vertex with degree less than k")
|
| 184 |
+
g = G.copy()
|
| 185 |
+
|
| 186 |
+
# Step 2
|
| 187 |
+
gadgets = []
|
| 188 |
+
for node, degree in list(g.degree):
|
| 189 |
+
if k < degree / 2.0:
|
| 190 |
+
gadget = SmallKGadget(k, degree, node, g)
|
| 191 |
+
else:
|
| 192 |
+
gadget = LargeKGadget(k, degree, node, g)
|
| 193 |
+
gadget.replace_node()
|
| 194 |
+
gadgets.append(gadget)
|
| 195 |
+
|
| 196 |
+
# Step 3
|
| 197 |
+
matching = max_weight_matching(g, maxcardinality=True, weight=matching_weight)
|
| 198 |
+
|
| 199 |
+
# Step 4
|
| 200 |
+
if not is_perfect_matching(g, matching):
|
| 201 |
+
raise nx.NetworkXUnfeasible(
|
| 202 |
+
"Cannot find k-factor because no perfect matching exists"
|
| 203 |
+
)
|
| 204 |
+
|
| 205 |
+
for edge in g.edges():
|
| 206 |
+
if edge not in matching and (edge[1], edge[0]) not in matching:
|
| 207 |
+
g.remove_edge(edge[0], edge[1])
|
| 208 |
+
|
| 209 |
+
for gadget in gadgets:
|
| 210 |
+
gadget.restore_node()
|
| 211 |
+
|
| 212 |
+
return g
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/traversal/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (405 Bytes). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/traversal/__pycache__/depth_first_search.cpython-311.pyc
ADDED
|
Binary file (15.3 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/traversal/breadth_first_search.py
ADDED
|
@@ -0,0 +1,581 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Basic algorithms for breadth-first searching the nodes of a graph."""
|
| 2 |
+
import math
|
| 3 |
+
from collections import deque
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
__all__ = [
|
| 8 |
+
"bfs_edges",
|
| 9 |
+
"bfs_tree",
|
| 10 |
+
"bfs_predecessors",
|
| 11 |
+
"bfs_successors",
|
| 12 |
+
"descendants_at_distance",
|
| 13 |
+
"bfs_layers",
|
| 14 |
+
"bfs_labeled_edges",
|
| 15 |
+
"generic_bfs_edges",
|
| 16 |
+
]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@nx._dispatch
|
| 20 |
+
def generic_bfs_edges(G, source, neighbors=None, depth_limit=None, sort_neighbors=None):
|
| 21 |
+
"""Iterate over edges in a breadth-first search.
|
| 22 |
+
|
| 23 |
+
The breadth-first search begins at `source` and enqueues the
|
| 24 |
+
neighbors of newly visited nodes specified by the `neighbors`
|
| 25 |
+
function.
|
| 26 |
+
|
| 27 |
+
Parameters
|
| 28 |
+
----------
|
| 29 |
+
G : NetworkX graph
|
| 30 |
+
|
| 31 |
+
source : node
|
| 32 |
+
Starting node for the breadth-first search; this function
|
| 33 |
+
iterates over only those edges in the component reachable from
|
| 34 |
+
this node.
|
| 35 |
+
|
| 36 |
+
neighbors : function
|
| 37 |
+
A function that takes a newly visited node of the graph as input
|
| 38 |
+
and returns an *iterator* (not just a list) of nodes that are
|
| 39 |
+
neighbors of that node with custom ordering. If not specified, this is
|
| 40 |
+
just the``G.neighbors`` method, but in general it can be any function
|
| 41 |
+
that returns an iterator over some or all of the neighbors of a
|
| 42 |
+
given node, in any order.
|
| 43 |
+
|
| 44 |
+
depth_limit : int, optional(default=len(G))
|
| 45 |
+
Specify the maximum search depth.
|
| 46 |
+
|
| 47 |
+
sort_neighbors : Callable
|
| 48 |
+
|
| 49 |
+
.. deprecated:: 3.2
|
| 50 |
+
|
| 51 |
+
The sort_neighbors parameter is deprecated and will be removed in
|
| 52 |
+
version 3.4. A custom (e.g. sorted) ordering of neighbors can be
|
| 53 |
+
specified with the `neighbors` parameter.
|
| 54 |
+
|
| 55 |
+
A function that takes the list of neighbors of a given node as input,
|
| 56 |
+
and returns an iterator over these neighbors but with a custom
|
| 57 |
+
ordering.
|
| 58 |
+
|
| 59 |
+
Yields
|
| 60 |
+
------
|
| 61 |
+
edge
|
| 62 |
+
Edges in the breadth-first search starting from `source`.
|
| 63 |
+
|
| 64 |
+
Examples
|
| 65 |
+
--------
|
| 66 |
+
>>> G = nx.path_graph(3)
|
| 67 |
+
>>> list(nx.bfs_edges(G, 0))
|
| 68 |
+
[(0, 1), (1, 2)]
|
| 69 |
+
>>> list(nx.bfs_edges(G, source=0, depth_limit=1))
|
| 70 |
+
[(0, 1)]
|
| 71 |
+
|
| 72 |
+
Notes
|
| 73 |
+
-----
|
| 74 |
+
This implementation is from `PADS`_, which was in the public domain
|
| 75 |
+
when it was first accessed in July, 2004. The modifications
|
| 76 |
+
to allow depth limits are based on the Wikipedia article
|
| 77 |
+
"`Depth-limited-search`_".
|
| 78 |
+
|
| 79 |
+
.. _PADS: http://www.ics.uci.edu/~eppstein/PADS/BFS.py
|
| 80 |
+
.. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search
|
| 81 |
+
"""
|
| 82 |
+
if neighbors is None:
|
| 83 |
+
neighbors = G.neighbors
|
| 84 |
+
if sort_neighbors is not None:
|
| 85 |
+
import warnings
|
| 86 |
+
|
| 87 |
+
warnings.warn(
|
| 88 |
+
(
|
| 89 |
+
"The sort_neighbors parameter is deprecated and will be removed\n"
|
| 90 |
+
"in NetworkX 3.4, use the neighbors parameter instead."
|
| 91 |
+
),
|
| 92 |
+
DeprecationWarning,
|
| 93 |
+
stacklevel=2,
|
| 94 |
+
)
|
| 95 |
+
_neighbors = neighbors
|
| 96 |
+
neighbors = lambda node: iter(sort_neighbors(_neighbors(node)))
|
| 97 |
+
if depth_limit is None:
|
| 98 |
+
depth_limit = len(G)
|
| 99 |
+
|
| 100 |
+
seen = {source}
|
| 101 |
+
n = len(G)
|
| 102 |
+
depth = 0
|
| 103 |
+
next_parents_children = [(source, neighbors(source))]
|
| 104 |
+
while next_parents_children and depth < depth_limit:
|
| 105 |
+
this_parents_children = next_parents_children
|
| 106 |
+
next_parents_children = []
|
| 107 |
+
for parent, children in this_parents_children:
|
| 108 |
+
for child in children:
|
| 109 |
+
if child not in seen:
|
| 110 |
+
seen.add(child)
|
| 111 |
+
next_parents_children.append((child, neighbors(child)))
|
| 112 |
+
yield parent, child
|
| 113 |
+
if len(seen) == n:
|
| 114 |
+
return
|
| 115 |
+
depth += 1
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
@nx._dispatch
|
| 119 |
+
def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None):
|
| 120 |
+
"""Iterate over edges in a breadth-first-search starting at source.
|
| 121 |
+
|
| 122 |
+
Parameters
|
| 123 |
+
----------
|
| 124 |
+
G : NetworkX graph
|
| 125 |
+
|
| 126 |
+
source : node
|
| 127 |
+
Specify starting node for breadth-first search; this function
|
| 128 |
+
iterates over only those edges in the component reachable from
|
| 129 |
+
this node.
|
| 130 |
+
|
| 131 |
+
reverse : bool, optional
|
| 132 |
+
If True traverse a directed graph in the reverse direction
|
| 133 |
+
|
| 134 |
+
depth_limit : int, optional(default=len(G))
|
| 135 |
+
Specify the maximum search depth
|
| 136 |
+
|
| 137 |
+
sort_neighbors : function
|
| 138 |
+
A function that takes the list of neighbors of given node as input, and
|
| 139 |
+
returns an *iterator* over these neighbors but with custom ordering.
|
| 140 |
+
|
| 141 |
+
Yields
|
| 142 |
+
------
|
| 143 |
+
edge: 2-tuple of nodes
|
| 144 |
+
Yields edges resulting from the breadth-first search.
|
| 145 |
+
|
| 146 |
+
Examples
|
| 147 |
+
--------
|
| 148 |
+
To get the edges in a breadth-first search::
|
| 149 |
+
|
| 150 |
+
>>> G = nx.path_graph(3)
|
| 151 |
+
>>> list(nx.bfs_edges(G, 0))
|
| 152 |
+
[(0, 1), (1, 2)]
|
| 153 |
+
>>> list(nx.bfs_edges(G, source=0, depth_limit=1))
|
| 154 |
+
[(0, 1)]
|
| 155 |
+
|
| 156 |
+
To get the nodes in a breadth-first search order::
|
| 157 |
+
|
| 158 |
+
>>> G = nx.path_graph(3)
|
| 159 |
+
>>> root = 2
|
| 160 |
+
>>> edges = nx.bfs_edges(G, root)
|
| 161 |
+
>>> nodes = [root] + [v for u, v in edges]
|
| 162 |
+
>>> nodes
|
| 163 |
+
[2, 1, 0]
|
| 164 |
+
|
| 165 |
+
Notes
|
| 166 |
+
-----
|
| 167 |
+
The naming of this function is very similar to
|
| 168 |
+
:func:`~networkx.algorithms.traversal.edgebfs.edge_bfs`. The difference
|
| 169 |
+
is that ``edge_bfs`` yields edges even if they extend back to an already
|
| 170 |
+
explored node while this generator yields the edges of the tree that results
|
| 171 |
+
from a breadth-first-search (BFS) so no edges are reported if they extend
|
| 172 |
+
to already explored nodes. That means ``edge_bfs`` reports all edges while
|
| 173 |
+
``bfs_edges`` only reports those traversed by a node-based BFS. Yet another
|
| 174 |
+
description is that ``bfs_edges`` reports the edges traversed during BFS
|
| 175 |
+
while ``edge_bfs`` reports all edges in the order they are explored.
|
| 176 |
+
|
| 177 |
+
Based on the breadth-first search implementation in PADS [1]_
|
| 178 |
+
by D. Eppstein, July 2004; with modifications to allow depth limits
|
| 179 |
+
as described in [2]_.
|
| 180 |
+
|
| 181 |
+
References
|
| 182 |
+
----------
|
| 183 |
+
.. [1] http://www.ics.uci.edu/~eppstein/PADS/BFS.py.
|
| 184 |
+
.. [2] https://en.wikipedia.org/wiki/Depth-limited_search
|
| 185 |
+
|
| 186 |
+
See Also
|
| 187 |
+
--------
|
| 188 |
+
bfs_tree
|
| 189 |
+
:func:`~networkx.algorithms.traversal.depth_first_search.dfs_edges`
|
| 190 |
+
:func:`~networkx.algorithms.traversal.edgebfs.edge_bfs`
|
| 191 |
+
|
| 192 |
+
"""
|
| 193 |
+
if reverse and G.is_directed():
|
| 194 |
+
successors = G.predecessors
|
| 195 |
+
else:
|
| 196 |
+
successors = G.neighbors
|
| 197 |
+
|
| 198 |
+
if callable(sort_neighbors):
|
| 199 |
+
yield from generic_bfs_edges(
|
| 200 |
+
G, source, lambda node: iter(sort_neighbors(successors(node))), depth_limit
|
| 201 |
+
)
|
| 202 |
+
else:
|
| 203 |
+
yield from generic_bfs_edges(G, source, successors, depth_limit)
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
@nx._dispatch
|
| 207 |
+
def bfs_tree(G, source, reverse=False, depth_limit=None, sort_neighbors=None):
|
| 208 |
+
"""Returns an oriented tree constructed from of a breadth-first-search
|
| 209 |
+
starting at source.
|
| 210 |
+
|
| 211 |
+
Parameters
|
| 212 |
+
----------
|
| 213 |
+
G : NetworkX graph
|
| 214 |
+
|
| 215 |
+
source : node
|
| 216 |
+
Specify starting node for breadth-first search
|
| 217 |
+
|
| 218 |
+
reverse : bool, optional
|
| 219 |
+
If True traverse a directed graph in the reverse direction
|
| 220 |
+
|
| 221 |
+
depth_limit : int, optional(default=len(G))
|
| 222 |
+
Specify the maximum search depth
|
| 223 |
+
|
| 224 |
+
sort_neighbors : function
|
| 225 |
+
A function that takes the list of neighbors of given node as input, and
|
| 226 |
+
returns an *iterator* over these neighbors but with custom ordering.
|
| 227 |
+
|
| 228 |
+
Returns
|
| 229 |
+
-------
|
| 230 |
+
T: NetworkX DiGraph
|
| 231 |
+
An oriented tree
|
| 232 |
+
|
| 233 |
+
Examples
|
| 234 |
+
--------
|
| 235 |
+
>>> G = nx.path_graph(3)
|
| 236 |
+
>>> list(nx.bfs_tree(G, 1).edges())
|
| 237 |
+
[(1, 0), (1, 2)]
|
| 238 |
+
>>> H = nx.Graph()
|
| 239 |
+
>>> nx.add_path(H, [0, 1, 2, 3, 4, 5, 6])
|
| 240 |
+
>>> nx.add_path(H, [2, 7, 8, 9, 10])
|
| 241 |
+
>>> sorted(list(nx.bfs_tree(H, source=3, depth_limit=3).edges()))
|
| 242 |
+
[(1, 0), (2, 1), (2, 7), (3, 2), (3, 4), (4, 5), (5, 6), (7, 8)]
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
Notes
|
| 246 |
+
-----
|
| 247 |
+
Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py
|
| 248 |
+
by D. Eppstein, July 2004. The modifications
|
| 249 |
+
to allow depth limits based on the Wikipedia article
|
| 250 |
+
"`Depth-limited-search`_".
|
| 251 |
+
|
| 252 |
+
.. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search
|
| 253 |
+
|
| 254 |
+
See Also
|
| 255 |
+
--------
|
| 256 |
+
dfs_tree
|
| 257 |
+
bfs_edges
|
| 258 |
+
edge_bfs
|
| 259 |
+
"""
|
| 260 |
+
T = nx.DiGraph()
|
| 261 |
+
T.add_node(source)
|
| 262 |
+
edges_gen = bfs_edges(
|
| 263 |
+
G,
|
| 264 |
+
source,
|
| 265 |
+
reverse=reverse,
|
| 266 |
+
depth_limit=depth_limit,
|
| 267 |
+
sort_neighbors=sort_neighbors,
|
| 268 |
+
)
|
| 269 |
+
T.add_edges_from(edges_gen)
|
| 270 |
+
return T
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
@nx._dispatch
|
| 274 |
+
def bfs_predecessors(G, source, depth_limit=None, sort_neighbors=None):
|
| 275 |
+
"""Returns an iterator of predecessors in breadth-first-search from source.
|
| 276 |
+
|
| 277 |
+
Parameters
|
| 278 |
+
----------
|
| 279 |
+
G : NetworkX graph
|
| 280 |
+
|
| 281 |
+
source : node
|
| 282 |
+
Specify starting node for breadth-first search
|
| 283 |
+
|
| 284 |
+
depth_limit : int, optional(default=len(G))
|
| 285 |
+
Specify the maximum search depth
|
| 286 |
+
|
| 287 |
+
sort_neighbors : function
|
| 288 |
+
A function that takes the list of neighbors of given node as input, and
|
| 289 |
+
returns an *iterator* over these neighbors but with custom ordering.
|
| 290 |
+
|
| 291 |
+
Returns
|
| 292 |
+
-------
|
| 293 |
+
pred: iterator
|
| 294 |
+
(node, predecessor) iterator where `predecessor` is the predecessor of
|
| 295 |
+
`node` in a breadth first search starting from `source`.
|
| 296 |
+
|
| 297 |
+
Examples
|
| 298 |
+
--------
|
| 299 |
+
>>> G = nx.path_graph(3)
|
| 300 |
+
>>> dict(nx.bfs_predecessors(G, 0))
|
| 301 |
+
{1: 0, 2: 1}
|
| 302 |
+
>>> H = nx.Graph()
|
| 303 |
+
>>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)])
|
| 304 |
+
>>> dict(nx.bfs_predecessors(H, 0))
|
| 305 |
+
{1: 0, 2: 0, 3: 1, 4: 1, 5: 2, 6: 2}
|
| 306 |
+
>>> M = nx.Graph()
|
| 307 |
+
>>> nx.add_path(M, [0, 1, 2, 3, 4, 5, 6])
|
| 308 |
+
>>> nx.add_path(M, [2, 7, 8, 9, 10])
|
| 309 |
+
>>> sorted(nx.bfs_predecessors(M, source=1, depth_limit=3))
|
| 310 |
+
[(0, 1), (2, 1), (3, 2), (4, 3), (7, 2), (8, 7)]
|
| 311 |
+
>>> N = nx.DiGraph()
|
| 312 |
+
>>> nx.add_path(N, [0, 1, 2, 3, 4, 7])
|
| 313 |
+
>>> nx.add_path(N, [3, 5, 6, 7])
|
| 314 |
+
>>> sorted(nx.bfs_predecessors(N, source=2))
|
| 315 |
+
[(3, 2), (4, 3), (5, 3), (6, 5), (7, 4)]
|
| 316 |
+
|
| 317 |
+
Notes
|
| 318 |
+
-----
|
| 319 |
+
Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py
|
| 320 |
+
by D. Eppstein, July 2004. The modifications
|
| 321 |
+
to allow depth limits based on the Wikipedia article
|
| 322 |
+
"`Depth-limited-search`_".
|
| 323 |
+
|
| 324 |
+
.. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search
|
| 325 |
+
|
| 326 |
+
See Also
|
| 327 |
+
--------
|
| 328 |
+
bfs_tree
|
| 329 |
+
bfs_edges
|
| 330 |
+
edge_bfs
|
| 331 |
+
"""
|
| 332 |
+
for s, t in bfs_edges(
|
| 333 |
+
G, source, depth_limit=depth_limit, sort_neighbors=sort_neighbors
|
| 334 |
+
):
|
| 335 |
+
yield (t, s)
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
@nx._dispatch
|
| 339 |
+
def bfs_successors(G, source, depth_limit=None, sort_neighbors=None):
|
| 340 |
+
"""Returns an iterator of successors in breadth-first-search from source.
|
| 341 |
+
|
| 342 |
+
Parameters
|
| 343 |
+
----------
|
| 344 |
+
G : NetworkX graph
|
| 345 |
+
|
| 346 |
+
source : node
|
| 347 |
+
Specify starting node for breadth-first search
|
| 348 |
+
|
| 349 |
+
depth_limit : int, optional(default=len(G))
|
| 350 |
+
Specify the maximum search depth
|
| 351 |
+
|
| 352 |
+
sort_neighbors : function
|
| 353 |
+
A function that takes the list of neighbors of given node as input, and
|
| 354 |
+
returns an *iterator* over these neighbors but with custom ordering.
|
| 355 |
+
|
| 356 |
+
Returns
|
| 357 |
+
-------
|
| 358 |
+
succ: iterator
|
| 359 |
+
(node, successors) iterator where `successors` is the non-empty list of
|
| 360 |
+
successors of `node` in a breadth first search from `source`.
|
| 361 |
+
To appear in the iterator, `node` must have successors.
|
| 362 |
+
|
| 363 |
+
Examples
|
| 364 |
+
--------
|
| 365 |
+
>>> G = nx.path_graph(3)
|
| 366 |
+
>>> dict(nx.bfs_successors(G, 0))
|
| 367 |
+
{0: [1], 1: [2]}
|
| 368 |
+
>>> H = nx.Graph()
|
| 369 |
+
>>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)])
|
| 370 |
+
>>> dict(nx.bfs_successors(H, 0))
|
| 371 |
+
{0: [1, 2], 1: [3, 4], 2: [5, 6]}
|
| 372 |
+
>>> G = nx.Graph()
|
| 373 |
+
>>> nx.add_path(G, [0, 1, 2, 3, 4, 5, 6])
|
| 374 |
+
>>> nx.add_path(G, [2, 7, 8, 9, 10])
|
| 375 |
+
>>> dict(nx.bfs_successors(G, source=1, depth_limit=3))
|
| 376 |
+
{1: [0, 2], 2: [3, 7], 3: [4], 7: [8]}
|
| 377 |
+
>>> G = nx.DiGraph()
|
| 378 |
+
>>> nx.add_path(G, [0, 1, 2, 3, 4, 5])
|
| 379 |
+
>>> dict(nx.bfs_successors(G, source=3))
|
| 380 |
+
{3: [4], 4: [5]}
|
| 381 |
+
|
| 382 |
+
Notes
|
| 383 |
+
-----
|
| 384 |
+
Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py
|
| 385 |
+
by D. Eppstein, July 2004.The modifications
|
| 386 |
+
to allow depth limits based on the Wikipedia article
|
| 387 |
+
"`Depth-limited-search`_".
|
| 388 |
+
|
| 389 |
+
.. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search
|
| 390 |
+
|
| 391 |
+
See Also
|
| 392 |
+
--------
|
| 393 |
+
bfs_tree
|
| 394 |
+
bfs_edges
|
| 395 |
+
edge_bfs
|
| 396 |
+
"""
|
| 397 |
+
parent = source
|
| 398 |
+
children = []
|
| 399 |
+
for p, c in bfs_edges(
|
| 400 |
+
G, source, depth_limit=depth_limit, sort_neighbors=sort_neighbors
|
| 401 |
+
):
|
| 402 |
+
if p == parent:
|
| 403 |
+
children.append(c)
|
| 404 |
+
continue
|
| 405 |
+
yield (parent, children)
|
| 406 |
+
children = [c]
|
| 407 |
+
parent = p
|
| 408 |
+
yield (parent, children)
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
@nx._dispatch
|
| 412 |
+
def bfs_layers(G, sources):
|
| 413 |
+
"""Returns an iterator of all the layers in breadth-first search traversal.
|
| 414 |
+
|
| 415 |
+
Parameters
|
| 416 |
+
----------
|
| 417 |
+
G : NetworkX graph
|
| 418 |
+
A graph over which to find the layers using breadth-first search.
|
| 419 |
+
|
| 420 |
+
sources : node in `G` or list of nodes in `G`
|
| 421 |
+
Specify starting nodes for single source or multiple sources breadth-first search
|
| 422 |
+
|
| 423 |
+
Yields
|
| 424 |
+
------
|
| 425 |
+
layer: list of nodes
|
| 426 |
+
Yields list of nodes at the same distance from sources
|
| 427 |
+
|
| 428 |
+
Examples
|
| 429 |
+
--------
|
| 430 |
+
>>> G = nx.path_graph(5)
|
| 431 |
+
>>> dict(enumerate(nx.bfs_layers(G, [0, 4])))
|
| 432 |
+
{0: [0, 4], 1: [1, 3], 2: [2]}
|
| 433 |
+
>>> H = nx.Graph()
|
| 434 |
+
>>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)])
|
| 435 |
+
>>> dict(enumerate(nx.bfs_layers(H, [1])))
|
| 436 |
+
{0: [1], 1: [0, 3, 4], 2: [2], 3: [5, 6]}
|
| 437 |
+
>>> dict(enumerate(nx.bfs_layers(H, [1, 6])))
|
| 438 |
+
{0: [1, 6], 1: [0, 3, 4, 2], 2: [5]}
|
| 439 |
+
"""
|
| 440 |
+
if sources in G:
|
| 441 |
+
sources = [sources]
|
| 442 |
+
|
| 443 |
+
current_layer = list(sources)
|
| 444 |
+
visited = set(sources)
|
| 445 |
+
|
| 446 |
+
for source in current_layer:
|
| 447 |
+
if source not in G:
|
| 448 |
+
raise nx.NetworkXError(f"The node {source} is not in the graph.")
|
| 449 |
+
|
| 450 |
+
# this is basically BFS, except that the current layer only stores the nodes at
|
| 451 |
+
# same distance from sources at each iteration
|
| 452 |
+
while current_layer:
|
| 453 |
+
yield current_layer
|
| 454 |
+
next_layer = []
|
| 455 |
+
for node in current_layer:
|
| 456 |
+
for child in G[node]:
|
| 457 |
+
if child not in visited:
|
| 458 |
+
visited.add(child)
|
| 459 |
+
next_layer.append(child)
|
| 460 |
+
current_layer = next_layer
|
| 461 |
+
|
| 462 |
+
|
| 463 |
+
REVERSE_EDGE = "reverse"
|
| 464 |
+
TREE_EDGE = "tree"
|
| 465 |
+
FORWARD_EDGE = "forward"
|
| 466 |
+
LEVEL_EDGE = "level"
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
@nx._dispatch
|
| 470 |
+
def bfs_labeled_edges(G, sources):
|
| 471 |
+
"""Iterate over edges in a breadth-first search (BFS) labeled by type.
|
| 472 |
+
|
| 473 |
+
We generate triple of the form (*u*, *v*, *d*), where (*u*, *v*) is the
|
| 474 |
+
edge being explored in the breadth-first search and *d* is one of the
|
| 475 |
+
strings 'tree', 'forward', 'level', or 'reverse'. A 'tree' edge is one in
|
| 476 |
+
which *v* is first discovered and placed into the layer below *u*. A
|
| 477 |
+
'forward' edge is one in which *u* is on the layer above *v* and *v* has
|
| 478 |
+
already been discovered. A 'level' edge is one in which both *u* and *v*
|
| 479 |
+
occur on the same layer. A 'reverse' edge is one in which *u* is on a layer
|
| 480 |
+
below *v*.
|
| 481 |
+
|
| 482 |
+
We emit each edge exactly once. In an undirected graph, 'reverse' edges do
|
| 483 |
+
not occur, because each is discovered either as a 'tree' or 'forward' edge.
|
| 484 |
+
|
| 485 |
+
Parameters
|
| 486 |
+
----------
|
| 487 |
+
G : NetworkX graph
|
| 488 |
+
A graph over which to find the layers using breadth-first search.
|
| 489 |
+
|
| 490 |
+
sources : node in `G` or list of nodes in `G`
|
| 491 |
+
Starting nodes for single source or multiple sources breadth-first search
|
| 492 |
+
|
| 493 |
+
Yields
|
| 494 |
+
------
|
| 495 |
+
edges: generator
|
| 496 |
+
A generator of triples (*u*, *v*, *d*) where (*u*, *v*) is the edge being
|
| 497 |
+
explored and *d* is described above.
|
| 498 |
+
|
| 499 |
+
Examples
|
| 500 |
+
--------
|
| 501 |
+
>>> G = nx.cycle_graph(4, create_using = nx.DiGraph)
|
| 502 |
+
>>> list(nx.bfs_labeled_edges(G, 0))
|
| 503 |
+
[(0, 1, 'tree'), (1, 2, 'tree'), (2, 3, 'tree'), (3, 0, 'reverse')]
|
| 504 |
+
>>> G = nx.complete_graph(3)
|
| 505 |
+
>>> list(nx.bfs_labeled_edges(G, 0))
|
| 506 |
+
[(0, 1, 'tree'), (0, 2, 'tree'), (1, 2, 'level')]
|
| 507 |
+
>>> list(nx.bfs_labeled_edges(G, [0, 1]))
|
| 508 |
+
[(0, 1, 'level'), (0, 2, 'tree'), (1, 2, 'forward')]
|
| 509 |
+
"""
|
| 510 |
+
if sources in G:
|
| 511 |
+
sources = [sources]
|
| 512 |
+
|
| 513 |
+
neighbors = G._adj
|
| 514 |
+
directed = G.is_directed()
|
| 515 |
+
visited = set()
|
| 516 |
+
visit = visited.discard if directed else visited.add
|
| 517 |
+
# We use visited in a negative sense, so the visited set stays empty for the
|
| 518 |
+
# directed case and level edges are reported on their first occurrence in
|
| 519 |
+
# the undirected case. Note our use of visited.discard -- this is built-in
|
| 520 |
+
# thus somewhat faster than a python-defined def nop(x): pass
|
| 521 |
+
depth = {s: 0 for s in sources}
|
| 522 |
+
queue = deque(depth.items())
|
| 523 |
+
push = queue.append
|
| 524 |
+
pop = queue.popleft
|
| 525 |
+
while queue:
|
| 526 |
+
u, du = pop()
|
| 527 |
+
for v in neighbors[u]:
|
| 528 |
+
if v not in depth:
|
| 529 |
+
depth[v] = dv = du + 1
|
| 530 |
+
push((v, dv))
|
| 531 |
+
yield u, v, TREE_EDGE
|
| 532 |
+
else:
|
| 533 |
+
dv = depth[v]
|
| 534 |
+
if du == dv:
|
| 535 |
+
if v not in visited:
|
| 536 |
+
yield u, v, LEVEL_EDGE
|
| 537 |
+
elif du < dv:
|
| 538 |
+
yield u, v, FORWARD_EDGE
|
| 539 |
+
elif directed:
|
| 540 |
+
yield u, v, REVERSE_EDGE
|
| 541 |
+
visit(u)
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
@nx._dispatch
|
| 545 |
+
def descendants_at_distance(G, source, distance):
|
| 546 |
+
"""Returns all nodes at a fixed `distance` from `source` in `G`.
|
| 547 |
+
|
| 548 |
+
Parameters
|
| 549 |
+
----------
|
| 550 |
+
G : NetworkX graph
|
| 551 |
+
A graph
|
| 552 |
+
source : node in `G`
|
| 553 |
+
distance : the distance of the wanted nodes from `source`
|
| 554 |
+
|
| 555 |
+
Returns
|
| 556 |
+
-------
|
| 557 |
+
set()
|
| 558 |
+
The descendants of `source` in `G` at the given `distance` from `source`
|
| 559 |
+
|
| 560 |
+
Examples
|
| 561 |
+
--------
|
| 562 |
+
>>> G = nx.path_graph(5)
|
| 563 |
+
>>> nx.descendants_at_distance(G, 2, 2)
|
| 564 |
+
{0, 4}
|
| 565 |
+
>>> H = nx.DiGraph()
|
| 566 |
+
>>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)])
|
| 567 |
+
>>> nx.descendants_at_distance(H, 0, 2)
|
| 568 |
+
{3, 4, 5, 6}
|
| 569 |
+
>>> nx.descendants_at_distance(H, 5, 0)
|
| 570 |
+
{5}
|
| 571 |
+
>>> nx.descendants_at_distance(H, 5, 1)
|
| 572 |
+
set()
|
| 573 |
+
"""
|
| 574 |
+
if source not in G:
|
| 575 |
+
raise nx.NetworkXError(f"The node {source} is not in the graph.")
|
| 576 |
+
|
| 577 |
+
bfs_generator = nx.bfs_layers(G, source)
|
| 578 |
+
for i, layer in enumerate(bfs_generator):
|
| 579 |
+
if i == distance:
|
| 580 |
+
return set(layer)
|
| 581 |
+
return set()
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/traversal/depth_first_search.py
ADDED
|
@@ -0,0 +1,469 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Basic algorithms for depth-first searching the nodes of a graph."""
|
| 2 |
+
from collections import defaultdict
|
| 3 |
+
|
| 4 |
+
import networkx as nx
|
| 5 |
+
|
| 6 |
+
__all__ = [
|
| 7 |
+
"dfs_edges",
|
| 8 |
+
"dfs_tree",
|
| 9 |
+
"dfs_predecessors",
|
| 10 |
+
"dfs_successors",
|
| 11 |
+
"dfs_preorder_nodes",
|
| 12 |
+
"dfs_postorder_nodes",
|
| 13 |
+
"dfs_labeled_edges",
|
| 14 |
+
]
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
@nx._dispatch
|
| 18 |
+
def dfs_edges(G, source=None, depth_limit=None):
|
| 19 |
+
"""Iterate over edges in a depth-first-search (DFS).
|
| 20 |
+
|
| 21 |
+
Perform a depth-first-search over the nodes of `G` and yield
|
| 22 |
+
the edges in order. This may not generate all edges in `G`
|
| 23 |
+
(see `~networkx.algorithms.traversal.edgedfs.edge_dfs`).
|
| 24 |
+
|
| 25 |
+
Parameters
|
| 26 |
+
----------
|
| 27 |
+
G : NetworkX graph
|
| 28 |
+
|
| 29 |
+
source : node, optional
|
| 30 |
+
Specify starting node for depth-first search and yield edges in
|
| 31 |
+
the component reachable from source.
|
| 32 |
+
|
| 33 |
+
depth_limit : int, optional (default=len(G))
|
| 34 |
+
Specify the maximum search depth.
|
| 35 |
+
|
| 36 |
+
Yields
|
| 37 |
+
------
|
| 38 |
+
edge: 2-tuple of nodes
|
| 39 |
+
Yields edges resulting from the depth-first-search.
|
| 40 |
+
|
| 41 |
+
Examples
|
| 42 |
+
--------
|
| 43 |
+
>>> G = nx.path_graph(5)
|
| 44 |
+
>>> list(nx.dfs_edges(G, source=0))
|
| 45 |
+
[(0, 1), (1, 2), (2, 3), (3, 4)]
|
| 46 |
+
>>> list(nx.dfs_edges(G, source=0, depth_limit=2))
|
| 47 |
+
[(0, 1), (1, 2)]
|
| 48 |
+
|
| 49 |
+
Notes
|
| 50 |
+
-----
|
| 51 |
+
If a source is not specified then a source is chosen arbitrarily and
|
| 52 |
+
repeatedly until all components in the graph are searched.
|
| 53 |
+
|
| 54 |
+
The implementation of this function is adapted from David Eppstein's
|
| 55 |
+
depth-first search function in PADS [1]_, with modifications
|
| 56 |
+
to allow depth limits based on the Wikipedia article
|
| 57 |
+
"Depth-limited search" [2]_.
|
| 58 |
+
|
| 59 |
+
See Also
|
| 60 |
+
--------
|
| 61 |
+
dfs_preorder_nodes
|
| 62 |
+
dfs_postorder_nodes
|
| 63 |
+
dfs_labeled_edges
|
| 64 |
+
:func:`~networkx.algorithms.traversal.edgedfs.edge_dfs`
|
| 65 |
+
:func:`~networkx.algorithms.traversal.breadth_first_search.bfs_edges`
|
| 66 |
+
|
| 67 |
+
References
|
| 68 |
+
----------
|
| 69 |
+
.. [1] http://www.ics.uci.edu/~eppstein/PADS
|
| 70 |
+
.. [2] https://en.wikipedia.org/wiki/Depth-limited_search
|
| 71 |
+
"""
|
| 72 |
+
if source is None:
|
| 73 |
+
# edges for all components
|
| 74 |
+
nodes = G
|
| 75 |
+
else:
|
| 76 |
+
# edges for components with source
|
| 77 |
+
nodes = [source]
|
| 78 |
+
if depth_limit is None:
|
| 79 |
+
depth_limit = len(G)
|
| 80 |
+
|
| 81 |
+
visited = set()
|
| 82 |
+
for start in nodes:
|
| 83 |
+
if start in visited:
|
| 84 |
+
continue
|
| 85 |
+
visited.add(start)
|
| 86 |
+
stack = [(start, iter(G[start]))]
|
| 87 |
+
depth_now = 1
|
| 88 |
+
while stack:
|
| 89 |
+
parent, children = stack[-1]
|
| 90 |
+
for child in children:
|
| 91 |
+
if child not in visited:
|
| 92 |
+
yield parent, child
|
| 93 |
+
visited.add(child)
|
| 94 |
+
if depth_now < depth_limit:
|
| 95 |
+
stack.append((child, iter(G[child])))
|
| 96 |
+
depth_now += 1
|
| 97 |
+
break
|
| 98 |
+
else:
|
| 99 |
+
stack.pop()
|
| 100 |
+
depth_now -= 1
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
@nx._dispatch
|
| 104 |
+
def dfs_tree(G, source=None, depth_limit=None):
|
| 105 |
+
"""Returns oriented tree constructed from a depth-first-search from source.
|
| 106 |
+
|
| 107 |
+
Parameters
|
| 108 |
+
----------
|
| 109 |
+
G : NetworkX graph
|
| 110 |
+
|
| 111 |
+
source : node, optional
|
| 112 |
+
Specify starting node for depth-first search.
|
| 113 |
+
|
| 114 |
+
depth_limit : int, optional (default=len(G))
|
| 115 |
+
Specify the maximum search depth.
|
| 116 |
+
|
| 117 |
+
Returns
|
| 118 |
+
-------
|
| 119 |
+
T : NetworkX DiGraph
|
| 120 |
+
An oriented tree
|
| 121 |
+
|
| 122 |
+
Examples
|
| 123 |
+
--------
|
| 124 |
+
>>> G = nx.path_graph(5)
|
| 125 |
+
>>> T = nx.dfs_tree(G, source=0, depth_limit=2)
|
| 126 |
+
>>> list(T.edges())
|
| 127 |
+
[(0, 1), (1, 2)]
|
| 128 |
+
>>> T = nx.dfs_tree(G, source=0)
|
| 129 |
+
>>> list(T.edges())
|
| 130 |
+
[(0, 1), (1, 2), (2, 3), (3, 4)]
|
| 131 |
+
|
| 132 |
+
See Also
|
| 133 |
+
--------
|
| 134 |
+
dfs_preorder_nodes
|
| 135 |
+
dfs_postorder_nodes
|
| 136 |
+
dfs_labeled_edges
|
| 137 |
+
edge_dfs
|
| 138 |
+
bfs_tree
|
| 139 |
+
"""
|
| 140 |
+
T = nx.DiGraph()
|
| 141 |
+
if source is None:
|
| 142 |
+
T.add_nodes_from(G)
|
| 143 |
+
else:
|
| 144 |
+
T.add_node(source)
|
| 145 |
+
T.add_edges_from(dfs_edges(G, source, depth_limit))
|
| 146 |
+
return T
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
@nx._dispatch
|
| 150 |
+
def dfs_predecessors(G, source=None, depth_limit=None):
|
| 151 |
+
"""Returns dictionary of predecessors in depth-first-search from source.
|
| 152 |
+
|
| 153 |
+
Parameters
|
| 154 |
+
----------
|
| 155 |
+
G : NetworkX graph
|
| 156 |
+
|
| 157 |
+
source : node, optional
|
| 158 |
+
Specify starting node for depth-first search.
|
| 159 |
+
Note that you will get predecessors for all nodes in the
|
| 160 |
+
component containing `source`. This input only specifies
|
| 161 |
+
where the DFS starts.
|
| 162 |
+
|
| 163 |
+
depth_limit : int, optional (default=len(G))
|
| 164 |
+
Specify the maximum search depth.
|
| 165 |
+
|
| 166 |
+
Returns
|
| 167 |
+
-------
|
| 168 |
+
pred: dict
|
| 169 |
+
A dictionary with nodes as keys and predecessor nodes as values.
|
| 170 |
+
|
| 171 |
+
Examples
|
| 172 |
+
--------
|
| 173 |
+
>>> G = nx.path_graph(4)
|
| 174 |
+
>>> nx.dfs_predecessors(G, source=0)
|
| 175 |
+
{1: 0, 2: 1, 3: 2}
|
| 176 |
+
>>> nx.dfs_predecessors(G, source=0, depth_limit=2)
|
| 177 |
+
{1: 0, 2: 1}
|
| 178 |
+
|
| 179 |
+
Notes
|
| 180 |
+
-----
|
| 181 |
+
If a source is not specified then a source is chosen arbitrarily and
|
| 182 |
+
repeatedly until all components in the graph are searched.
|
| 183 |
+
|
| 184 |
+
The implementation of this function is adapted from David Eppstein's
|
| 185 |
+
depth-first search function in `PADS`_, with modifications
|
| 186 |
+
to allow depth limits based on the Wikipedia article
|
| 187 |
+
"`Depth-limited search`_".
|
| 188 |
+
|
| 189 |
+
.. _PADS: http://www.ics.uci.edu/~eppstein/PADS
|
| 190 |
+
.. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search
|
| 191 |
+
|
| 192 |
+
See Also
|
| 193 |
+
--------
|
| 194 |
+
dfs_preorder_nodes
|
| 195 |
+
dfs_postorder_nodes
|
| 196 |
+
dfs_labeled_edges
|
| 197 |
+
edge_dfs
|
| 198 |
+
bfs_tree
|
| 199 |
+
"""
|
| 200 |
+
return {t: s for s, t in dfs_edges(G, source, depth_limit)}
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
@nx._dispatch
|
| 204 |
+
def dfs_successors(G, source=None, depth_limit=None):
|
| 205 |
+
"""Returns dictionary of successors in depth-first-search from source.
|
| 206 |
+
|
| 207 |
+
Parameters
|
| 208 |
+
----------
|
| 209 |
+
G : NetworkX graph
|
| 210 |
+
|
| 211 |
+
source : node, optional
|
| 212 |
+
Specify starting node for depth-first search.
|
| 213 |
+
Note that you will get successors for all nodes in the
|
| 214 |
+
component containing `source`. This input only specifies
|
| 215 |
+
where the DFS starts.
|
| 216 |
+
|
| 217 |
+
depth_limit : int, optional (default=len(G))
|
| 218 |
+
Specify the maximum search depth.
|
| 219 |
+
|
| 220 |
+
Returns
|
| 221 |
+
-------
|
| 222 |
+
succ: dict
|
| 223 |
+
A dictionary with nodes as keys and list of successor nodes as values.
|
| 224 |
+
|
| 225 |
+
Examples
|
| 226 |
+
--------
|
| 227 |
+
>>> G = nx.path_graph(5)
|
| 228 |
+
>>> nx.dfs_successors(G, source=0)
|
| 229 |
+
{0: [1], 1: [2], 2: [3], 3: [4]}
|
| 230 |
+
>>> nx.dfs_successors(G, source=0, depth_limit=2)
|
| 231 |
+
{0: [1], 1: [2]}
|
| 232 |
+
|
| 233 |
+
Notes
|
| 234 |
+
-----
|
| 235 |
+
If a source is not specified then a source is chosen arbitrarily and
|
| 236 |
+
repeatedly until all components in the graph are searched.
|
| 237 |
+
|
| 238 |
+
The implementation of this function is adapted from David Eppstein's
|
| 239 |
+
depth-first search function in `PADS`_, with modifications
|
| 240 |
+
to allow depth limits based on the Wikipedia article
|
| 241 |
+
"`Depth-limited search`_".
|
| 242 |
+
|
| 243 |
+
.. _PADS: http://www.ics.uci.edu/~eppstein/PADS
|
| 244 |
+
.. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search
|
| 245 |
+
|
| 246 |
+
See Also
|
| 247 |
+
--------
|
| 248 |
+
dfs_preorder_nodes
|
| 249 |
+
dfs_postorder_nodes
|
| 250 |
+
dfs_labeled_edges
|
| 251 |
+
edge_dfs
|
| 252 |
+
bfs_tree
|
| 253 |
+
"""
|
| 254 |
+
d = defaultdict(list)
|
| 255 |
+
for s, t in dfs_edges(G, source=source, depth_limit=depth_limit):
|
| 256 |
+
d[s].append(t)
|
| 257 |
+
return dict(d)
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
@nx._dispatch
|
| 261 |
+
def dfs_postorder_nodes(G, source=None, depth_limit=None):
|
| 262 |
+
"""Generate nodes in a depth-first-search post-ordering starting at source.
|
| 263 |
+
|
| 264 |
+
Parameters
|
| 265 |
+
----------
|
| 266 |
+
G : NetworkX graph
|
| 267 |
+
|
| 268 |
+
source : node, optional
|
| 269 |
+
Specify starting node for depth-first search.
|
| 270 |
+
|
| 271 |
+
depth_limit : int, optional (default=len(G))
|
| 272 |
+
Specify the maximum search depth.
|
| 273 |
+
|
| 274 |
+
Returns
|
| 275 |
+
-------
|
| 276 |
+
nodes: generator
|
| 277 |
+
A generator of nodes in a depth-first-search post-ordering.
|
| 278 |
+
|
| 279 |
+
Examples
|
| 280 |
+
--------
|
| 281 |
+
>>> G = nx.path_graph(5)
|
| 282 |
+
>>> list(nx.dfs_postorder_nodes(G, source=0))
|
| 283 |
+
[4, 3, 2, 1, 0]
|
| 284 |
+
>>> list(nx.dfs_postorder_nodes(G, source=0, depth_limit=2))
|
| 285 |
+
[1, 0]
|
| 286 |
+
|
| 287 |
+
Notes
|
| 288 |
+
-----
|
| 289 |
+
If a source is not specified then a source is chosen arbitrarily and
|
| 290 |
+
repeatedly until all components in the graph are searched.
|
| 291 |
+
|
| 292 |
+
The implementation of this function is adapted from David Eppstein's
|
| 293 |
+
depth-first search function in `PADS`_, with modifications
|
| 294 |
+
to allow depth limits based on the Wikipedia article
|
| 295 |
+
"`Depth-limited search`_".
|
| 296 |
+
|
| 297 |
+
.. _PADS: http://www.ics.uci.edu/~eppstein/PADS
|
| 298 |
+
.. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search
|
| 299 |
+
|
| 300 |
+
See Also
|
| 301 |
+
--------
|
| 302 |
+
dfs_edges
|
| 303 |
+
dfs_preorder_nodes
|
| 304 |
+
dfs_labeled_edges
|
| 305 |
+
edge_dfs
|
| 306 |
+
bfs_tree
|
| 307 |
+
"""
|
| 308 |
+
edges = nx.dfs_labeled_edges(G, source=source, depth_limit=depth_limit)
|
| 309 |
+
return (v for u, v, d in edges if d == "reverse")
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
@nx._dispatch
|
| 313 |
+
def dfs_preorder_nodes(G, source=None, depth_limit=None):
|
| 314 |
+
"""Generate nodes in a depth-first-search pre-ordering starting at source.
|
| 315 |
+
|
| 316 |
+
Parameters
|
| 317 |
+
----------
|
| 318 |
+
G : NetworkX graph
|
| 319 |
+
|
| 320 |
+
source : node, optional
|
| 321 |
+
Specify starting node for depth-first search and return nodes in
|
| 322 |
+
the component reachable from source.
|
| 323 |
+
|
| 324 |
+
depth_limit : int, optional (default=len(G))
|
| 325 |
+
Specify the maximum search depth.
|
| 326 |
+
|
| 327 |
+
Returns
|
| 328 |
+
-------
|
| 329 |
+
nodes: generator
|
| 330 |
+
A generator of nodes in a depth-first-search pre-ordering.
|
| 331 |
+
|
| 332 |
+
Examples
|
| 333 |
+
--------
|
| 334 |
+
>>> G = nx.path_graph(5)
|
| 335 |
+
>>> list(nx.dfs_preorder_nodes(G, source=0))
|
| 336 |
+
[0, 1, 2, 3, 4]
|
| 337 |
+
>>> list(nx.dfs_preorder_nodes(G, source=0, depth_limit=2))
|
| 338 |
+
[0, 1, 2]
|
| 339 |
+
|
| 340 |
+
Notes
|
| 341 |
+
-----
|
| 342 |
+
If a source is not specified then a source is chosen arbitrarily and
|
| 343 |
+
repeatedly until all components in the graph are searched.
|
| 344 |
+
|
| 345 |
+
The implementation of this function is adapted from David Eppstein's
|
| 346 |
+
depth-first search function in `PADS`_, with modifications
|
| 347 |
+
to allow depth limits based on the Wikipedia article
|
| 348 |
+
"`Depth-limited search`_".
|
| 349 |
+
|
| 350 |
+
.. _PADS: http://www.ics.uci.edu/~eppstein/PADS
|
| 351 |
+
.. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search
|
| 352 |
+
|
| 353 |
+
See Also
|
| 354 |
+
--------
|
| 355 |
+
dfs_edges
|
| 356 |
+
dfs_postorder_nodes
|
| 357 |
+
dfs_labeled_edges
|
| 358 |
+
bfs_edges
|
| 359 |
+
"""
|
| 360 |
+
edges = nx.dfs_labeled_edges(G, source=source, depth_limit=depth_limit)
|
| 361 |
+
return (v for u, v, d in edges if d == "forward")
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
@nx._dispatch
|
| 365 |
+
def dfs_labeled_edges(G, source=None, depth_limit=None):
|
| 366 |
+
"""Iterate over edges in a depth-first-search (DFS) labeled by type.
|
| 367 |
+
|
| 368 |
+
Parameters
|
| 369 |
+
----------
|
| 370 |
+
G : NetworkX graph
|
| 371 |
+
|
| 372 |
+
source : node, optional
|
| 373 |
+
Specify starting node for depth-first search and return edges in
|
| 374 |
+
the component reachable from source.
|
| 375 |
+
|
| 376 |
+
depth_limit : int, optional (default=len(G))
|
| 377 |
+
Specify the maximum search depth.
|
| 378 |
+
|
| 379 |
+
Returns
|
| 380 |
+
-------
|
| 381 |
+
edges: generator
|
| 382 |
+
A generator of triples of the form (*u*, *v*, *d*), where (*u*,
|
| 383 |
+
*v*) is the edge being explored in the depth-first search and *d*
|
| 384 |
+
is one of the strings 'forward', 'nontree', 'reverse', or 'reverse-depth_limit'.
|
| 385 |
+
A 'forward' edge is one in which *u* has been visited but *v* has
|
| 386 |
+
not. A 'nontree' edge is one in which both *u* and *v* have been
|
| 387 |
+
visited but the edge is not in the DFS tree. A 'reverse' edge is
|
| 388 |
+
one in which both *u* and *v* have been visited and the edge is in
|
| 389 |
+
the DFS tree. When the `depth_limit` is reached via a 'forward' edge,
|
| 390 |
+
a 'reverse' edge is immediately generated rather than the subtree
|
| 391 |
+
being explored. To indicate this flavor of 'reverse' edge, the string
|
| 392 |
+
yielded is 'reverse-depth_limit'.
|
| 393 |
+
|
| 394 |
+
Examples
|
| 395 |
+
--------
|
| 396 |
+
|
| 397 |
+
The labels reveal the complete transcript of the depth-first search
|
| 398 |
+
algorithm in more detail than, for example, :func:`dfs_edges`::
|
| 399 |
+
|
| 400 |
+
>>> from pprint import pprint
|
| 401 |
+
>>>
|
| 402 |
+
>>> G = nx.DiGraph([(0, 1), (1, 2), (2, 1)])
|
| 403 |
+
>>> pprint(list(nx.dfs_labeled_edges(G, source=0)))
|
| 404 |
+
[(0, 0, 'forward'),
|
| 405 |
+
(0, 1, 'forward'),
|
| 406 |
+
(1, 2, 'forward'),
|
| 407 |
+
(2, 1, 'nontree'),
|
| 408 |
+
(1, 2, 'reverse'),
|
| 409 |
+
(0, 1, 'reverse'),
|
| 410 |
+
(0, 0, 'reverse')]
|
| 411 |
+
|
| 412 |
+
Notes
|
| 413 |
+
-----
|
| 414 |
+
If a source is not specified then a source is chosen arbitrarily and
|
| 415 |
+
repeatedly until all components in the graph are searched.
|
| 416 |
+
|
| 417 |
+
The implementation of this function is adapted from David Eppstein's
|
| 418 |
+
depth-first search function in `PADS`_, with modifications
|
| 419 |
+
to allow depth limits based on the Wikipedia article
|
| 420 |
+
"`Depth-limited search`_".
|
| 421 |
+
|
| 422 |
+
.. _PADS: http://www.ics.uci.edu/~eppstein/PADS
|
| 423 |
+
.. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search
|
| 424 |
+
|
| 425 |
+
See Also
|
| 426 |
+
--------
|
| 427 |
+
dfs_edges
|
| 428 |
+
dfs_preorder_nodes
|
| 429 |
+
dfs_postorder_nodes
|
| 430 |
+
"""
|
| 431 |
+
# Based on http://www.ics.uci.edu/~eppstein/PADS/DFS.py
|
| 432 |
+
# by D. Eppstein, July 2004.
|
| 433 |
+
if source is None:
|
| 434 |
+
# edges for all components
|
| 435 |
+
nodes = G
|
| 436 |
+
else:
|
| 437 |
+
# edges for components with source
|
| 438 |
+
nodes = [source]
|
| 439 |
+
if depth_limit is None:
|
| 440 |
+
depth_limit = len(G)
|
| 441 |
+
|
| 442 |
+
visited = set()
|
| 443 |
+
for start in nodes:
|
| 444 |
+
if start in visited:
|
| 445 |
+
continue
|
| 446 |
+
yield start, start, "forward"
|
| 447 |
+
visited.add(start)
|
| 448 |
+
stack = [(start, iter(G[start]))]
|
| 449 |
+
depth_now = 1
|
| 450 |
+
while stack:
|
| 451 |
+
parent, children = stack[-1]
|
| 452 |
+
for child in children:
|
| 453 |
+
if child in visited:
|
| 454 |
+
yield parent, child, "nontree"
|
| 455 |
+
else:
|
| 456 |
+
yield parent, child, "forward"
|
| 457 |
+
visited.add(child)
|
| 458 |
+
if depth_now < depth_limit:
|
| 459 |
+
stack.append((child, iter(G[child])))
|
| 460 |
+
depth_now += 1
|
| 461 |
+
break
|
| 462 |
+
else:
|
| 463 |
+
yield parent, child, "reverse-depth_limit"
|
| 464 |
+
else:
|
| 465 |
+
stack.pop()
|
| 466 |
+
depth_now -= 1
|
| 467 |
+
if stack:
|
| 468 |
+
yield stack[-1][0], parent, "reverse"
|
| 469 |
+
yield start, start, "reverse"
|