Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/covering.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/d_separation.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/hybrid.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/richclub.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/smallworld.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/sparsifiers.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/time_dependent.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/tournament.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/connectivity.py +412 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/kcomponents.py +369 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/matching.py +43 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_distance_measures.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_ramsey.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_traveling_salesman.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_steinertree.py +191 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_traveling_salesman.py +963 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/traveling_salesman.py +1442 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/vertex_cover.py +82 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__init__.py +24 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/asyn_fluid.py +150 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_kclique.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_utils.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/test_kclique.py +91 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/__init__.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/weakly_connected.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/attracting.py +114 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/strongly_connected.py +431 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/__pycache__/test_attracting.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/__pycache__/test_biconnected.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/test_attracting.py +70 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/test_semiconnected.py +55 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/test_strongly_connected.py +207 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/__pycache__/boykovkolmogorov.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/__pycache__/mincost.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/__pycache__/shortestaugmentingpath.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/tests/__pycache__/test_maxflow.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/tests/test_gomory_hu.py +128 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/tests/test_maxflow.py +560 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py +1060 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__init__.py +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-311.pyc +0 -0
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/covering.cpython-311.pyc
ADDED
|
Binary file (6.2 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/d_separation.cpython-311.pyc
ADDED
|
Binary file (18.5 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/hybrid.cpython-311.pyc
ADDED
|
Binary file (6.33 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/richclub.cpython-311.pyc
ADDED
|
Binary file (5.69 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/smallworld.cpython-311.pyc
ADDED
|
Binary file (15.9 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/sparsifiers.cpython-311.pyc
ADDED
|
Binary file (10.4 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/time_dependent.cpython-311.pyc
ADDED
|
Binary file (8.21 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/tournament.cpython-311.pyc
ADDED
|
Binary file (16.4 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (1.53 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-311.pyc
ADDED
|
Binary file (14.6 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-311.pyc
ADDED
|
Binary file (5.14 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/connectivity.py
ADDED
|
@@ -0,0 +1,412 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" Fast approximation for node connectivity
|
| 2 |
+
"""
|
| 3 |
+
import itertools
|
| 4 |
+
from operator import itemgetter
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
|
| 8 |
+
__all__ = [
|
| 9 |
+
"local_node_connectivity",
|
| 10 |
+
"node_connectivity",
|
| 11 |
+
"all_pairs_node_connectivity",
|
| 12 |
+
]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
@nx._dispatch(name="approximate_local_node_connectivity")
|
| 16 |
+
def local_node_connectivity(G, source, target, cutoff=None):
|
| 17 |
+
"""Compute node connectivity between source and target.
|
| 18 |
+
|
| 19 |
+
Pairwise or local node connectivity between two distinct and nonadjacent
|
| 20 |
+
nodes is the minimum number of nodes that must be removed (minimum
|
| 21 |
+
separating cutset) to disconnect them. By Menger's theorem, this is equal
|
| 22 |
+
to the number of node independent paths (paths that share no nodes other
|
| 23 |
+
than source and target). Which is what we compute in this function.
|
| 24 |
+
|
| 25 |
+
This algorithm is a fast approximation that gives an strict lower
|
| 26 |
+
bound on the actual number of node independent paths between two nodes [1]_.
|
| 27 |
+
It works for both directed and undirected graphs.
|
| 28 |
+
|
| 29 |
+
Parameters
|
| 30 |
+
----------
|
| 31 |
+
|
| 32 |
+
G : NetworkX graph
|
| 33 |
+
|
| 34 |
+
source : node
|
| 35 |
+
Starting node for node connectivity
|
| 36 |
+
|
| 37 |
+
target : node
|
| 38 |
+
Ending node for node connectivity
|
| 39 |
+
|
| 40 |
+
cutoff : integer
|
| 41 |
+
Maximum node connectivity to consider. If None, the minimum degree
|
| 42 |
+
of source or target is used as a cutoff. Default value None.
|
| 43 |
+
|
| 44 |
+
Returns
|
| 45 |
+
-------
|
| 46 |
+
k: integer
|
| 47 |
+
pairwise node connectivity
|
| 48 |
+
|
| 49 |
+
Examples
|
| 50 |
+
--------
|
| 51 |
+
>>> # Platonic octahedral graph has node connectivity 4
|
| 52 |
+
>>> # for each non adjacent node pair
|
| 53 |
+
>>> from networkx.algorithms import approximation as approx
|
| 54 |
+
>>> G = nx.octahedral_graph()
|
| 55 |
+
>>> approx.local_node_connectivity(G, 0, 5)
|
| 56 |
+
4
|
| 57 |
+
|
| 58 |
+
Notes
|
| 59 |
+
-----
|
| 60 |
+
This algorithm [1]_ finds node independents paths between two nodes by
|
| 61 |
+
computing their shortest path using BFS, marking the nodes of the path
|
| 62 |
+
found as 'used' and then searching other shortest paths excluding the
|
| 63 |
+
nodes marked as used until no more paths exist. It is not exact because
|
| 64 |
+
a shortest path could use nodes that, if the path were longer, may belong
|
| 65 |
+
to two different node independent paths. Thus it only guarantees an
|
| 66 |
+
strict lower bound on node connectivity.
|
| 67 |
+
|
| 68 |
+
Note that the authors propose a further refinement, losing accuracy and
|
| 69 |
+
gaining speed, which is not implemented yet.
|
| 70 |
+
|
| 71 |
+
See also
|
| 72 |
+
--------
|
| 73 |
+
all_pairs_node_connectivity
|
| 74 |
+
node_connectivity
|
| 75 |
+
|
| 76 |
+
References
|
| 77 |
+
----------
|
| 78 |
+
.. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
|
| 79 |
+
Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
|
| 80 |
+
http://eclectic.ss.uci.edu/~drwhite/working.pdf
|
| 81 |
+
|
| 82 |
+
"""
|
| 83 |
+
if target == source:
|
| 84 |
+
raise nx.NetworkXError("source and target have to be different nodes.")
|
| 85 |
+
|
| 86 |
+
# Maximum possible node independent paths
|
| 87 |
+
if G.is_directed():
|
| 88 |
+
possible = min(G.out_degree(source), G.in_degree(target))
|
| 89 |
+
else:
|
| 90 |
+
possible = min(G.degree(source), G.degree(target))
|
| 91 |
+
|
| 92 |
+
K = 0
|
| 93 |
+
if not possible:
|
| 94 |
+
return K
|
| 95 |
+
|
| 96 |
+
if cutoff is None:
|
| 97 |
+
cutoff = float("inf")
|
| 98 |
+
|
| 99 |
+
exclude = set()
|
| 100 |
+
for i in range(min(possible, cutoff)):
|
| 101 |
+
try:
|
| 102 |
+
path = _bidirectional_shortest_path(G, source, target, exclude)
|
| 103 |
+
exclude.update(set(path))
|
| 104 |
+
K += 1
|
| 105 |
+
except nx.NetworkXNoPath:
|
| 106 |
+
break
|
| 107 |
+
|
| 108 |
+
return K
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
@nx._dispatch(name="approximate_node_connectivity")
|
| 112 |
+
def node_connectivity(G, s=None, t=None):
|
| 113 |
+
r"""Returns an approximation for node connectivity for a graph or digraph G.
|
| 114 |
+
|
| 115 |
+
Node connectivity is equal to the minimum number of nodes that
|
| 116 |
+
must be removed to disconnect G or render it trivial. By Menger's theorem,
|
| 117 |
+
this is equal to the number of node independent paths (paths that
|
| 118 |
+
share no nodes other than source and target).
|
| 119 |
+
|
| 120 |
+
If source and target nodes are provided, this function returns the
|
| 121 |
+
local node connectivity: the minimum number of nodes that must be
|
| 122 |
+
removed to break all paths from source to target in G.
|
| 123 |
+
|
| 124 |
+
This algorithm is based on a fast approximation that gives an strict lower
|
| 125 |
+
bound on the actual number of node independent paths between two nodes [1]_.
|
| 126 |
+
It works for both directed and undirected graphs.
|
| 127 |
+
|
| 128 |
+
Parameters
|
| 129 |
+
----------
|
| 130 |
+
G : NetworkX graph
|
| 131 |
+
Undirected graph
|
| 132 |
+
|
| 133 |
+
s : node
|
| 134 |
+
Source node. Optional. Default value: None.
|
| 135 |
+
|
| 136 |
+
t : node
|
| 137 |
+
Target node. Optional. Default value: None.
|
| 138 |
+
|
| 139 |
+
Returns
|
| 140 |
+
-------
|
| 141 |
+
K : integer
|
| 142 |
+
Node connectivity of G, or local node connectivity if source
|
| 143 |
+
and target are provided.
|
| 144 |
+
|
| 145 |
+
Examples
|
| 146 |
+
--------
|
| 147 |
+
>>> # Platonic octahedral graph is 4-node-connected
|
| 148 |
+
>>> from networkx.algorithms import approximation as approx
|
| 149 |
+
>>> G = nx.octahedral_graph()
|
| 150 |
+
>>> approx.node_connectivity(G)
|
| 151 |
+
4
|
| 152 |
+
|
| 153 |
+
Notes
|
| 154 |
+
-----
|
| 155 |
+
This algorithm [1]_ finds node independents paths between two nodes by
|
| 156 |
+
computing their shortest path using BFS, marking the nodes of the path
|
| 157 |
+
found as 'used' and then searching other shortest paths excluding the
|
| 158 |
+
nodes marked as used until no more paths exist. It is not exact because
|
| 159 |
+
a shortest path could use nodes that, if the path were longer, may belong
|
| 160 |
+
to two different node independent paths. Thus it only guarantees an
|
| 161 |
+
strict lower bound on node connectivity.
|
| 162 |
+
|
| 163 |
+
See also
|
| 164 |
+
--------
|
| 165 |
+
all_pairs_node_connectivity
|
| 166 |
+
local_node_connectivity
|
| 167 |
+
|
| 168 |
+
References
|
| 169 |
+
----------
|
| 170 |
+
.. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
|
| 171 |
+
Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
|
| 172 |
+
http://eclectic.ss.uci.edu/~drwhite/working.pdf
|
| 173 |
+
|
| 174 |
+
"""
|
| 175 |
+
if (s is not None and t is None) or (s is None and t is not None):
|
| 176 |
+
raise nx.NetworkXError("Both source and target must be specified.")
|
| 177 |
+
|
| 178 |
+
# Local node connectivity
|
| 179 |
+
if s is not None and t is not None:
|
| 180 |
+
if s not in G:
|
| 181 |
+
raise nx.NetworkXError(f"node {s} not in graph")
|
| 182 |
+
if t not in G:
|
| 183 |
+
raise nx.NetworkXError(f"node {t} not in graph")
|
| 184 |
+
return local_node_connectivity(G, s, t)
|
| 185 |
+
|
| 186 |
+
# Global node connectivity
|
| 187 |
+
if G.is_directed():
|
| 188 |
+
connected_func = nx.is_weakly_connected
|
| 189 |
+
iter_func = itertools.permutations
|
| 190 |
+
|
| 191 |
+
def neighbors(v):
|
| 192 |
+
return itertools.chain(G.predecessors(v), G.successors(v))
|
| 193 |
+
|
| 194 |
+
else:
|
| 195 |
+
connected_func = nx.is_connected
|
| 196 |
+
iter_func = itertools.combinations
|
| 197 |
+
neighbors = G.neighbors
|
| 198 |
+
|
| 199 |
+
if not connected_func(G):
|
| 200 |
+
return 0
|
| 201 |
+
|
| 202 |
+
# Choose a node with minimum degree
|
| 203 |
+
v, minimum_degree = min(G.degree(), key=itemgetter(1))
|
| 204 |
+
# Node connectivity is bounded by minimum degree
|
| 205 |
+
K = minimum_degree
|
| 206 |
+
# compute local node connectivity with all non-neighbors nodes
|
| 207 |
+
# and store the minimum
|
| 208 |
+
for w in set(G) - set(neighbors(v)) - {v}:
|
| 209 |
+
K = min(K, local_node_connectivity(G, v, w, cutoff=K))
|
| 210 |
+
# Same for non adjacent pairs of neighbors of v
|
| 211 |
+
for x, y in iter_func(neighbors(v), 2):
|
| 212 |
+
if y not in G[x] and x != y:
|
| 213 |
+
K = min(K, local_node_connectivity(G, x, y, cutoff=K))
|
| 214 |
+
return K
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
@nx._dispatch(name="approximate_all_pairs_node_connectivity")
|
| 218 |
+
def all_pairs_node_connectivity(G, nbunch=None, cutoff=None):
|
| 219 |
+
"""Compute node connectivity between all pairs of nodes.
|
| 220 |
+
|
| 221 |
+
Pairwise or local node connectivity between two distinct and nonadjacent
|
| 222 |
+
nodes is the minimum number of nodes that must be removed (minimum
|
| 223 |
+
separating cutset) to disconnect them. By Menger's theorem, this is equal
|
| 224 |
+
to the number of node independent paths (paths that share no nodes other
|
| 225 |
+
than source and target). Which is what we compute in this function.
|
| 226 |
+
|
| 227 |
+
This algorithm is a fast approximation that gives an strict lower
|
| 228 |
+
bound on the actual number of node independent paths between two nodes [1]_.
|
| 229 |
+
It works for both directed and undirected graphs.
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
Parameters
|
| 233 |
+
----------
|
| 234 |
+
G : NetworkX graph
|
| 235 |
+
|
| 236 |
+
nbunch: container
|
| 237 |
+
Container of nodes. If provided node connectivity will be computed
|
| 238 |
+
only over pairs of nodes in nbunch.
|
| 239 |
+
|
| 240 |
+
cutoff : integer
|
| 241 |
+
Maximum node connectivity to consider. If None, the minimum degree
|
| 242 |
+
of source or target is used as a cutoff in each pair of nodes.
|
| 243 |
+
Default value None.
|
| 244 |
+
|
| 245 |
+
Returns
|
| 246 |
+
-------
|
| 247 |
+
K : dictionary
|
| 248 |
+
Dictionary, keyed by source and target, of pairwise node connectivity
|
| 249 |
+
|
| 250 |
+
Examples
|
| 251 |
+
--------
|
| 252 |
+
A 3 node cycle with one extra node attached has connectivity 2 between all
|
| 253 |
+
nodes in the cycle and connectivity 1 between the extra node and the rest:
|
| 254 |
+
|
| 255 |
+
>>> G = nx.cycle_graph(3)
|
| 256 |
+
>>> G.add_edge(2, 3)
|
| 257 |
+
>>> import pprint # for nice dictionary formatting
|
| 258 |
+
>>> pprint.pprint(nx.all_pairs_node_connectivity(G))
|
| 259 |
+
{0: {1: 2, 2: 2, 3: 1},
|
| 260 |
+
1: {0: 2, 2: 2, 3: 1},
|
| 261 |
+
2: {0: 2, 1: 2, 3: 1},
|
| 262 |
+
3: {0: 1, 1: 1, 2: 1}}
|
| 263 |
+
|
| 264 |
+
See Also
|
| 265 |
+
--------
|
| 266 |
+
local_node_connectivity
|
| 267 |
+
node_connectivity
|
| 268 |
+
|
| 269 |
+
References
|
| 270 |
+
----------
|
| 271 |
+
.. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
|
| 272 |
+
Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
|
| 273 |
+
http://eclectic.ss.uci.edu/~drwhite/working.pdf
|
| 274 |
+
"""
|
| 275 |
+
if nbunch is None:
|
| 276 |
+
nbunch = G
|
| 277 |
+
else:
|
| 278 |
+
nbunch = set(nbunch)
|
| 279 |
+
|
| 280 |
+
directed = G.is_directed()
|
| 281 |
+
if directed:
|
| 282 |
+
iter_func = itertools.permutations
|
| 283 |
+
else:
|
| 284 |
+
iter_func = itertools.combinations
|
| 285 |
+
|
| 286 |
+
all_pairs = {n: {} for n in nbunch}
|
| 287 |
+
|
| 288 |
+
for u, v in iter_func(nbunch, 2):
|
| 289 |
+
k = local_node_connectivity(G, u, v, cutoff=cutoff)
|
| 290 |
+
all_pairs[u][v] = k
|
| 291 |
+
if not directed:
|
| 292 |
+
all_pairs[v][u] = k
|
| 293 |
+
|
| 294 |
+
return all_pairs
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
def _bidirectional_shortest_path(G, source, target, exclude):
|
| 298 |
+
"""Returns shortest path between source and target ignoring nodes in the
|
| 299 |
+
container 'exclude'.
|
| 300 |
+
|
| 301 |
+
Parameters
|
| 302 |
+
----------
|
| 303 |
+
|
| 304 |
+
G : NetworkX graph
|
| 305 |
+
|
| 306 |
+
source : node
|
| 307 |
+
Starting node for path
|
| 308 |
+
|
| 309 |
+
target : node
|
| 310 |
+
Ending node for path
|
| 311 |
+
|
| 312 |
+
exclude: container
|
| 313 |
+
Container for nodes to exclude from the search for shortest paths
|
| 314 |
+
|
| 315 |
+
Returns
|
| 316 |
+
-------
|
| 317 |
+
path: list
|
| 318 |
+
Shortest path between source and target ignoring nodes in 'exclude'
|
| 319 |
+
|
| 320 |
+
Raises
|
| 321 |
+
------
|
| 322 |
+
NetworkXNoPath
|
| 323 |
+
If there is no path or if nodes are adjacent and have only one path
|
| 324 |
+
between them
|
| 325 |
+
|
| 326 |
+
Notes
|
| 327 |
+
-----
|
| 328 |
+
This function and its helper are originally from
|
| 329 |
+
networkx.algorithms.shortest_paths.unweighted and are modified to
|
| 330 |
+
accept the extra parameter 'exclude', which is a container for nodes
|
| 331 |
+
already used in other paths that should be ignored.
|
| 332 |
+
|
| 333 |
+
References
|
| 334 |
+
----------
|
| 335 |
+
.. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
|
| 336 |
+
Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
|
| 337 |
+
http://eclectic.ss.uci.edu/~drwhite/working.pdf
|
| 338 |
+
|
| 339 |
+
"""
|
| 340 |
+
# call helper to do the real work
|
| 341 |
+
results = _bidirectional_pred_succ(G, source, target, exclude)
|
| 342 |
+
pred, succ, w = results
|
| 343 |
+
|
| 344 |
+
# build path from pred+w+succ
|
| 345 |
+
path = []
|
| 346 |
+
# from source to w
|
| 347 |
+
while w is not None:
|
| 348 |
+
path.append(w)
|
| 349 |
+
w = pred[w]
|
| 350 |
+
path.reverse()
|
| 351 |
+
# from w to target
|
| 352 |
+
w = succ[path[-1]]
|
| 353 |
+
while w is not None:
|
| 354 |
+
path.append(w)
|
| 355 |
+
w = succ[w]
|
| 356 |
+
|
| 357 |
+
return path
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
def _bidirectional_pred_succ(G, source, target, exclude):
|
| 361 |
+
# does BFS from both source and target and meets in the middle
|
| 362 |
+
# excludes nodes in the container "exclude" from the search
|
| 363 |
+
|
| 364 |
+
# handle either directed or undirected
|
| 365 |
+
if G.is_directed():
|
| 366 |
+
Gpred = G.predecessors
|
| 367 |
+
Gsucc = G.successors
|
| 368 |
+
else:
|
| 369 |
+
Gpred = G.neighbors
|
| 370 |
+
Gsucc = G.neighbors
|
| 371 |
+
|
| 372 |
+
# predecessor and successors in search
|
| 373 |
+
pred = {source: None}
|
| 374 |
+
succ = {target: None}
|
| 375 |
+
|
| 376 |
+
# initialize fringes, start with forward
|
| 377 |
+
forward_fringe = [source]
|
| 378 |
+
reverse_fringe = [target]
|
| 379 |
+
|
| 380 |
+
level = 0
|
| 381 |
+
|
| 382 |
+
while forward_fringe and reverse_fringe:
|
| 383 |
+
# Make sure that we iterate one step forward and one step backwards
|
| 384 |
+
# thus source and target will only trigger "found path" when they are
|
| 385 |
+
# adjacent and then they can be safely included in the container 'exclude'
|
| 386 |
+
level += 1
|
| 387 |
+
if level % 2 != 0:
|
| 388 |
+
this_level = forward_fringe
|
| 389 |
+
forward_fringe = []
|
| 390 |
+
for v in this_level:
|
| 391 |
+
for w in Gsucc(v):
|
| 392 |
+
if w in exclude:
|
| 393 |
+
continue
|
| 394 |
+
if w not in pred:
|
| 395 |
+
forward_fringe.append(w)
|
| 396 |
+
pred[w] = v
|
| 397 |
+
if w in succ:
|
| 398 |
+
return pred, succ, w # found path
|
| 399 |
+
else:
|
| 400 |
+
this_level = reverse_fringe
|
| 401 |
+
reverse_fringe = []
|
| 402 |
+
for v in this_level:
|
| 403 |
+
for w in Gpred(v):
|
| 404 |
+
if w in exclude:
|
| 405 |
+
continue
|
| 406 |
+
if w not in succ:
|
| 407 |
+
succ[w] = v
|
| 408 |
+
reverse_fringe.append(w)
|
| 409 |
+
if w in pred:
|
| 410 |
+
return pred, succ, w # found path
|
| 411 |
+
|
| 412 |
+
raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/kcomponents.py
ADDED
|
@@ -0,0 +1,369 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" Fast approximation for k-component structure
|
| 2 |
+
"""
|
| 3 |
+
import itertools
|
| 4 |
+
from collections import defaultdict
|
| 5 |
+
from collections.abc import Mapping
|
| 6 |
+
from functools import cached_property
|
| 7 |
+
|
| 8 |
+
import networkx as nx
|
| 9 |
+
from networkx.algorithms.approximation import local_node_connectivity
|
| 10 |
+
from networkx.exception import NetworkXError
|
| 11 |
+
from networkx.utils import not_implemented_for
|
| 12 |
+
|
| 13 |
+
__all__ = ["k_components"]
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@not_implemented_for("directed")
|
| 17 |
+
@nx._dispatch(name="approximate_k_components")
|
| 18 |
+
def k_components(G, min_density=0.95):
|
| 19 |
+
r"""Returns the approximate k-component structure of a graph G.
|
| 20 |
+
|
| 21 |
+
A `k`-component is a maximal subgraph of a graph G that has, at least,
|
| 22 |
+
node connectivity `k`: we need to remove at least `k` nodes to break it
|
| 23 |
+
into more components. `k`-components have an inherent hierarchical
|
| 24 |
+
structure because they are nested in terms of connectivity: a connected
|
| 25 |
+
graph can contain several 2-components, each of which can contain
|
| 26 |
+
one or more 3-components, and so forth.
|
| 27 |
+
|
| 28 |
+
This implementation is based on the fast heuristics to approximate
|
| 29 |
+
the `k`-component structure of a graph [1]_. Which, in turn, it is based on
|
| 30 |
+
a fast approximation algorithm for finding good lower bounds of the number
|
| 31 |
+
of node independent paths between two nodes [2]_.
|
| 32 |
+
|
| 33 |
+
Parameters
|
| 34 |
+
----------
|
| 35 |
+
G : NetworkX graph
|
| 36 |
+
Undirected graph
|
| 37 |
+
|
| 38 |
+
min_density : Float
|
| 39 |
+
Density relaxation threshold. Default value 0.95
|
| 40 |
+
|
| 41 |
+
Returns
|
| 42 |
+
-------
|
| 43 |
+
k_components : dict
|
| 44 |
+
Dictionary with connectivity level `k` as key and a list of
|
| 45 |
+
sets of nodes that form a k-component of level `k` as values.
|
| 46 |
+
|
| 47 |
+
Raises
|
| 48 |
+
------
|
| 49 |
+
NetworkXNotImplemented
|
| 50 |
+
If G is directed.
|
| 51 |
+
|
| 52 |
+
Examples
|
| 53 |
+
--------
|
| 54 |
+
>>> # Petersen graph has 10 nodes and it is triconnected, thus all
|
| 55 |
+
>>> # nodes are in a single component on all three connectivity levels
|
| 56 |
+
>>> from networkx.algorithms import approximation as apxa
|
| 57 |
+
>>> G = nx.petersen_graph()
|
| 58 |
+
>>> k_components = apxa.k_components(G)
|
| 59 |
+
|
| 60 |
+
Notes
|
| 61 |
+
-----
|
| 62 |
+
The logic of the approximation algorithm for computing the `k`-component
|
| 63 |
+
structure [1]_ is based on repeatedly applying simple and fast algorithms
|
| 64 |
+
for `k`-cores and biconnected components in order to narrow down the
|
| 65 |
+
number of pairs of nodes over which we have to compute White and Newman's
|
| 66 |
+
approximation algorithm for finding node independent paths [2]_. More
|
| 67 |
+
formally, this algorithm is based on Whitney's theorem, which states
|
| 68 |
+
an inclusion relation among node connectivity, edge connectivity, and
|
| 69 |
+
minimum degree for any graph G. This theorem implies that every
|
| 70 |
+
`k`-component is nested inside a `k`-edge-component, which in turn,
|
| 71 |
+
is contained in a `k`-core. Thus, this algorithm computes node independent
|
| 72 |
+
paths among pairs of nodes in each biconnected part of each `k`-core,
|
| 73 |
+
and repeats this procedure for each `k` from 3 to the maximal core number
|
| 74 |
+
of a node in the input graph.
|
| 75 |
+
|
| 76 |
+
Because, in practice, many nodes of the core of level `k` inside a
|
| 77 |
+
bicomponent actually are part of a component of level k, the auxiliary
|
| 78 |
+
graph needed for the algorithm is likely to be very dense. Thus, we use
|
| 79 |
+
a complement graph data structure (see `AntiGraph`) to save memory.
|
| 80 |
+
AntiGraph only stores information of the edges that are *not* present
|
| 81 |
+
in the actual auxiliary graph. When applying algorithms to this
|
| 82 |
+
complement graph data structure, it behaves as if it were the dense
|
| 83 |
+
version.
|
| 84 |
+
|
| 85 |
+
See also
|
| 86 |
+
--------
|
| 87 |
+
k_components
|
| 88 |
+
|
| 89 |
+
References
|
| 90 |
+
----------
|
| 91 |
+
.. [1] Torrents, J. and F. Ferraro (2015) Structural Cohesion:
|
| 92 |
+
Visualization and Heuristics for Fast Computation.
|
| 93 |
+
https://arxiv.org/pdf/1503.04476v1
|
| 94 |
+
|
| 95 |
+
.. [2] White, Douglas R., and Mark Newman (2001) A Fast Algorithm for
|
| 96 |
+
Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
|
| 97 |
+
https://www.santafe.edu/research/results/working-papers/fast-approximation-algorithms-for-finding-node-ind
|
| 98 |
+
|
| 99 |
+
.. [3] Moody, J. and D. White (2003). Social cohesion and embeddedness:
|
| 100 |
+
A hierarchical conception of social groups.
|
| 101 |
+
American Sociological Review 68(1), 103--28.
|
| 102 |
+
https://doi.org/10.2307/3088904
|
| 103 |
+
|
| 104 |
+
"""
|
| 105 |
+
# Dictionary with connectivity level (k) as keys and a list of
|
| 106 |
+
# sets of nodes that form a k-component as values
|
| 107 |
+
k_components = defaultdict(list)
|
| 108 |
+
# make a few functions local for speed
|
| 109 |
+
node_connectivity = local_node_connectivity
|
| 110 |
+
k_core = nx.k_core
|
| 111 |
+
core_number = nx.core_number
|
| 112 |
+
biconnected_components = nx.biconnected_components
|
| 113 |
+
combinations = itertools.combinations
|
| 114 |
+
# Exact solution for k = {1,2}
|
| 115 |
+
# There is a linear time algorithm for triconnectivity, if we had an
|
| 116 |
+
# implementation available we could start from k = 4.
|
| 117 |
+
for component in nx.connected_components(G):
|
| 118 |
+
# isolated nodes have connectivity 0
|
| 119 |
+
comp = set(component)
|
| 120 |
+
if len(comp) > 1:
|
| 121 |
+
k_components[1].append(comp)
|
| 122 |
+
for bicomponent in nx.biconnected_components(G):
|
| 123 |
+
# avoid considering dyads as bicomponents
|
| 124 |
+
bicomp = set(bicomponent)
|
| 125 |
+
if len(bicomp) > 2:
|
| 126 |
+
k_components[2].append(bicomp)
|
| 127 |
+
# There is no k-component of k > maximum core number
|
| 128 |
+
# \kappa(G) <= \lambda(G) <= \delta(G)
|
| 129 |
+
g_cnumber = core_number(G)
|
| 130 |
+
max_core = max(g_cnumber.values())
|
| 131 |
+
for k in range(3, max_core + 1):
|
| 132 |
+
C = k_core(G, k, core_number=g_cnumber)
|
| 133 |
+
for nodes in biconnected_components(C):
|
| 134 |
+
# Build a subgraph SG induced by the nodes that are part of
|
| 135 |
+
# each biconnected component of the k-core subgraph C.
|
| 136 |
+
if len(nodes) < k:
|
| 137 |
+
continue
|
| 138 |
+
SG = G.subgraph(nodes)
|
| 139 |
+
# Build auxiliary graph
|
| 140 |
+
H = _AntiGraph()
|
| 141 |
+
H.add_nodes_from(SG.nodes())
|
| 142 |
+
for u, v in combinations(SG, 2):
|
| 143 |
+
K = node_connectivity(SG, u, v, cutoff=k)
|
| 144 |
+
if k > K:
|
| 145 |
+
H.add_edge(u, v)
|
| 146 |
+
for h_nodes in biconnected_components(H):
|
| 147 |
+
if len(h_nodes) <= k:
|
| 148 |
+
continue
|
| 149 |
+
SH = H.subgraph(h_nodes)
|
| 150 |
+
for Gc in _cliques_heuristic(SG, SH, k, min_density):
|
| 151 |
+
for k_nodes in biconnected_components(Gc):
|
| 152 |
+
Gk = nx.k_core(SG.subgraph(k_nodes), k)
|
| 153 |
+
if len(Gk) <= k:
|
| 154 |
+
continue
|
| 155 |
+
k_components[k].append(set(Gk))
|
| 156 |
+
return k_components
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
def _cliques_heuristic(G, H, k, min_density):
|
| 160 |
+
h_cnumber = nx.core_number(H)
|
| 161 |
+
for i, c_value in enumerate(sorted(set(h_cnumber.values()), reverse=True)):
|
| 162 |
+
cands = {n for n, c in h_cnumber.items() if c == c_value}
|
| 163 |
+
# Skip checking for overlap for the highest core value
|
| 164 |
+
if i == 0:
|
| 165 |
+
overlap = False
|
| 166 |
+
else:
|
| 167 |
+
overlap = set.intersection(
|
| 168 |
+
*[{x for x in H[n] if x not in cands} for n in cands]
|
| 169 |
+
)
|
| 170 |
+
if overlap and len(overlap) < k:
|
| 171 |
+
SH = H.subgraph(cands | overlap)
|
| 172 |
+
else:
|
| 173 |
+
SH = H.subgraph(cands)
|
| 174 |
+
sh_cnumber = nx.core_number(SH)
|
| 175 |
+
SG = nx.k_core(G.subgraph(SH), k)
|
| 176 |
+
while not (_same(sh_cnumber) and nx.density(SH) >= min_density):
|
| 177 |
+
# This subgraph must be writable => .copy()
|
| 178 |
+
SH = H.subgraph(SG).copy()
|
| 179 |
+
if len(SH) <= k:
|
| 180 |
+
break
|
| 181 |
+
sh_cnumber = nx.core_number(SH)
|
| 182 |
+
sh_deg = dict(SH.degree())
|
| 183 |
+
min_deg = min(sh_deg.values())
|
| 184 |
+
SH.remove_nodes_from(n for n, d in sh_deg.items() if d == min_deg)
|
| 185 |
+
SG = nx.k_core(G.subgraph(SH), k)
|
| 186 |
+
else:
|
| 187 |
+
yield SG
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
def _same(measure, tol=0):
|
| 191 |
+
vals = set(measure.values())
|
| 192 |
+
if (max(vals) - min(vals)) <= tol:
|
| 193 |
+
return True
|
| 194 |
+
return False
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
class _AntiGraph(nx.Graph):
|
| 198 |
+
"""
|
| 199 |
+
Class for complement graphs.
|
| 200 |
+
|
| 201 |
+
The main goal is to be able to work with big and dense graphs with
|
| 202 |
+
a low memory footprint.
|
| 203 |
+
|
| 204 |
+
In this class you add the edges that *do not exist* in the dense graph,
|
| 205 |
+
the report methods of the class return the neighbors, the edges and
|
| 206 |
+
the degree as if it was the dense graph. Thus it's possible to use
|
| 207 |
+
an instance of this class with some of NetworkX functions. In this
|
| 208 |
+
case we only use k-core, connected_components, and biconnected_components.
|
| 209 |
+
"""
|
| 210 |
+
|
| 211 |
+
all_edge_dict = {"weight": 1}
|
| 212 |
+
|
| 213 |
+
def single_edge_dict(self):
|
| 214 |
+
return self.all_edge_dict
|
| 215 |
+
|
| 216 |
+
edge_attr_dict_factory = single_edge_dict # type: ignore[assignment]
|
| 217 |
+
|
| 218 |
+
def __getitem__(self, n):
|
| 219 |
+
"""Returns a dict of neighbors of node n in the dense graph.
|
| 220 |
+
|
| 221 |
+
Parameters
|
| 222 |
+
----------
|
| 223 |
+
n : node
|
| 224 |
+
A node in the graph.
|
| 225 |
+
|
| 226 |
+
Returns
|
| 227 |
+
-------
|
| 228 |
+
adj_dict : dictionary
|
| 229 |
+
The adjacency dictionary for nodes connected to n.
|
| 230 |
+
|
| 231 |
+
"""
|
| 232 |
+
all_edge_dict = self.all_edge_dict
|
| 233 |
+
return {
|
| 234 |
+
node: all_edge_dict for node in set(self._adj) - set(self._adj[n]) - {n}
|
| 235 |
+
}
|
| 236 |
+
|
| 237 |
+
def neighbors(self, n):
|
| 238 |
+
"""Returns an iterator over all neighbors of node n in the
|
| 239 |
+
dense graph.
|
| 240 |
+
"""
|
| 241 |
+
try:
|
| 242 |
+
return iter(set(self._adj) - set(self._adj[n]) - {n})
|
| 243 |
+
except KeyError as err:
|
| 244 |
+
raise NetworkXError(f"The node {n} is not in the graph.") from err
|
| 245 |
+
|
| 246 |
+
class AntiAtlasView(Mapping):
|
| 247 |
+
"""An adjacency inner dict for AntiGraph"""
|
| 248 |
+
|
| 249 |
+
def __init__(self, graph, node):
|
| 250 |
+
self._graph = graph
|
| 251 |
+
self._atlas = graph._adj[node]
|
| 252 |
+
self._node = node
|
| 253 |
+
|
| 254 |
+
def __len__(self):
|
| 255 |
+
return len(self._graph) - len(self._atlas) - 1
|
| 256 |
+
|
| 257 |
+
def __iter__(self):
|
| 258 |
+
return (n for n in self._graph if n not in self._atlas and n != self._node)
|
| 259 |
+
|
| 260 |
+
def __getitem__(self, nbr):
|
| 261 |
+
nbrs = set(self._graph._adj) - set(self._atlas) - {self._node}
|
| 262 |
+
if nbr in nbrs:
|
| 263 |
+
return self._graph.all_edge_dict
|
| 264 |
+
raise KeyError(nbr)
|
| 265 |
+
|
| 266 |
+
class AntiAdjacencyView(AntiAtlasView):
|
| 267 |
+
"""An adjacency outer dict for AntiGraph"""
|
| 268 |
+
|
| 269 |
+
def __init__(self, graph):
|
| 270 |
+
self._graph = graph
|
| 271 |
+
self._atlas = graph._adj
|
| 272 |
+
|
| 273 |
+
def __len__(self):
|
| 274 |
+
return len(self._atlas)
|
| 275 |
+
|
| 276 |
+
def __iter__(self):
|
| 277 |
+
return iter(self._graph)
|
| 278 |
+
|
| 279 |
+
def __getitem__(self, node):
|
| 280 |
+
if node not in self._graph:
|
| 281 |
+
raise KeyError(node)
|
| 282 |
+
return self._graph.AntiAtlasView(self._graph, node)
|
| 283 |
+
|
| 284 |
+
@cached_property
|
| 285 |
+
def adj(self):
|
| 286 |
+
return self.AntiAdjacencyView(self)
|
| 287 |
+
|
| 288 |
+
def subgraph(self, nodes):
|
| 289 |
+
"""This subgraph method returns a full AntiGraph. Not a View"""
|
| 290 |
+
nodes = set(nodes)
|
| 291 |
+
G = _AntiGraph()
|
| 292 |
+
G.add_nodes_from(nodes)
|
| 293 |
+
for n in G:
|
| 294 |
+
Gnbrs = G.adjlist_inner_dict_factory()
|
| 295 |
+
G._adj[n] = Gnbrs
|
| 296 |
+
for nbr, d in self._adj[n].items():
|
| 297 |
+
if nbr in G._adj:
|
| 298 |
+
Gnbrs[nbr] = d
|
| 299 |
+
G._adj[nbr][n] = d
|
| 300 |
+
G.graph = self.graph
|
| 301 |
+
return G
|
| 302 |
+
|
| 303 |
+
class AntiDegreeView(nx.reportviews.DegreeView):
|
| 304 |
+
def __iter__(self):
|
| 305 |
+
all_nodes = set(self._succ)
|
| 306 |
+
for n in self._nodes:
|
| 307 |
+
nbrs = all_nodes - set(self._succ[n]) - {n}
|
| 308 |
+
yield (n, len(nbrs))
|
| 309 |
+
|
| 310 |
+
def __getitem__(self, n):
|
| 311 |
+
nbrs = set(self._succ) - set(self._succ[n]) - {n}
|
| 312 |
+
# AntiGraph is a ThinGraph so all edges have weight 1
|
| 313 |
+
return len(nbrs) + (n in nbrs)
|
| 314 |
+
|
| 315 |
+
@cached_property
|
| 316 |
+
def degree(self):
|
| 317 |
+
"""Returns an iterator for (node, degree) and degree for single node.
|
| 318 |
+
|
| 319 |
+
The node degree is the number of edges adjacent to the node.
|
| 320 |
+
|
| 321 |
+
Parameters
|
| 322 |
+
----------
|
| 323 |
+
nbunch : iterable container, optional (default=all nodes)
|
| 324 |
+
A container of nodes. The container will be iterated
|
| 325 |
+
through once.
|
| 326 |
+
|
| 327 |
+
weight : string or None, optional (default=None)
|
| 328 |
+
The edge attribute that holds the numerical value used
|
| 329 |
+
as a weight. If None, then each edge has weight 1.
|
| 330 |
+
The degree is the sum of the edge weights adjacent to the node.
|
| 331 |
+
|
| 332 |
+
Returns
|
| 333 |
+
-------
|
| 334 |
+
deg:
|
| 335 |
+
Degree of the node, if a single node is passed as argument.
|
| 336 |
+
nd_iter : an iterator
|
| 337 |
+
The iterator returns two-tuples of (node, degree).
|
| 338 |
+
|
| 339 |
+
See Also
|
| 340 |
+
--------
|
| 341 |
+
degree
|
| 342 |
+
|
| 343 |
+
Examples
|
| 344 |
+
--------
|
| 345 |
+
>>> G = nx.path_graph(4)
|
| 346 |
+
>>> G.degree(0) # node 0 with degree 1
|
| 347 |
+
1
|
| 348 |
+
>>> list(G.degree([0, 1]))
|
| 349 |
+
[(0, 1), (1, 2)]
|
| 350 |
+
|
| 351 |
+
"""
|
| 352 |
+
return self.AntiDegreeView(self)
|
| 353 |
+
|
| 354 |
+
def adjacency(self):
|
| 355 |
+
"""Returns an iterator of (node, adjacency set) tuples for all nodes
|
| 356 |
+
in the dense graph.
|
| 357 |
+
|
| 358 |
+
This is the fastest way to look at every edge.
|
| 359 |
+
For directed graphs, only outgoing adjacencies are included.
|
| 360 |
+
|
| 361 |
+
Returns
|
| 362 |
+
-------
|
| 363 |
+
adj_iter : iterator
|
| 364 |
+
An iterator of (node, adjacency set) for all nodes in
|
| 365 |
+
the graph.
|
| 366 |
+
|
| 367 |
+
"""
|
| 368 |
+
for n in self._adj:
|
| 369 |
+
yield (n, set(self._adj) - set(self._adj[n]) - {n})
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/matching.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
**************
|
| 3 |
+
Graph Matching
|
| 4 |
+
**************
|
| 5 |
+
|
| 6 |
+
Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent
|
| 7 |
+
edges; that is, no two edges share a common vertex.
|
| 8 |
+
|
| 9 |
+
`Wikipedia: Matching <https://en.wikipedia.org/wiki/Matching_(graph_theory)>`_
|
| 10 |
+
"""
|
| 11 |
+
import networkx as nx
|
| 12 |
+
|
| 13 |
+
__all__ = ["min_maximal_matching"]
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@nx._dispatch
|
| 17 |
+
def min_maximal_matching(G):
|
| 18 |
+
r"""Returns the minimum maximal matching of G. That is, out of all maximal
|
| 19 |
+
matchings of the graph G, the smallest is returned.
|
| 20 |
+
|
| 21 |
+
Parameters
|
| 22 |
+
----------
|
| 23 |
+
G : NetworkX graph
|
| 24 |
+
Undirected graph
|
| 25 |
+
|
| 26 |
+
Returns
|
| 27 |
+
-------
|
| 28 |
+
min_maximal_matching : set
|
| 29 |
+
Returns a set of edges such that no two edges share a common endpoint
|
| 30 |
+
and every edge not in the set shares some common endpoint in the set.
|
| 31 |
+
Cardinality will be 2*OPT in the worst case.
|
| 32 |
+
|
| 33 |
+
Notes
|
| 34 |
+
-----
|
| 35 |
+
The algorithm computes an approximate solution for the minimum maximal
|
| 36 |
+
cardinality matching problem. The solution is no more than 2 * OPT in size.
|
| 37 |
+
Runtime is $O(|E|)$.
|
| 38 |
+
|
| 39 |
+
References
|
| 40 |
+
----------
|
| 41 |
+
.. [1] Vazirani, Vijay Approximation Algorithms (2001)
|
| 42 |
+
"""
|
| 43 |
+
return nx.maximal_matching(G)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_distance_measures.cpython-311.pyc
ADDED
|
Binary file (4.94 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_ramsey.cpython-311.pyc
ADDED
|
Binary file (2.39 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_traveling_salesman.cpython-311.pyc
ADDED
|
Binary file (49.3 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_steinertree.py
ADDED
|
@@ -0,0 +1,191 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.algorithms.approximation.steinertree import metric_closure, steiner_tree
|
| 5 |
+
from networkx.utils import edges_equal
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class TestSteinerTree:
|
| 9 |
+
@classmethod
|
| 10 |
+
def setup_class(cls):
|
| 11 |
+
G1 = nx.Graph()
|
| 12 |
+
G1.add_edge(1, 2, weight=10)
|
| 13 |
+
G1.add_edge(2, 3, weight=10)
|
| 14 |
+
G1.add_edge(3, 4, weight=10)
|
| 15 |
+
G1.add_edge(4, 5, weight=10)
|
| 16 |
+
G1.add_edge(5, 6, weight=10)
|
| 17 |
+
G1.add_edge(2, 7, weight=1)
|
| 18 |
+
G1.add_edge(7, 5, weight=1)
|
| 19 |
+
|
| 20 |
+
G2 = nx.Graph()
|
| 21 |
+
G2.add_edge(0, 5, weight=6)
|
| 22 |
+
G2.add_edge(1, 2, weight=2)
|
| 23 |
+
G2.add_edge(1, 5, weight=3)
|
| 24 |
+
G2.add_edge(2, 4, weight=4)
|
| 25 |
+
G2.add_edge(3, 5, weight=5)
|
| 26 |
+
G2.add_edge(4, 5, weight=1)
|
| 27 |
+
|
| 28 |
+
G3 = nx.Graph()
|
| 29 |
+
G3.add_edge(1, 2, weight=8)
|
| 30 |
+
G3.add_edge(1, 9, weight=3)
|
| 31 |
+
G3.add_edge(1, 8, weight=6)
|
| 32 |
+
G3.add_edge(1, 10, weight=2)
|
| 33 |
+
G3.add_edge(1, 14, weight=3)
|
| 34 |
+
G3.add_edge(2, 3, weight=6)
|
| 35 |
+
G3.add_edge(3, 4, weight=3)
|
| 36 |
+
G3.add_edge(3, 10, weight=2)
|
| 37 |
+
G3.add_edge(3, 11, weight=1)
|
| 38 |
+
G3.add_edge(4, 5, weight=1)
|
| 39 |
+
G3.add_edge(4, 11, weight=1)
|
| 40 |
+
G3.add_edge(5, 6, weight=4)
|
| 41 |
+
G3.add_edge(5, 11, weight=2)
|
| 42 |
+
G3.add_edge(5, 12, weight=1)
|
| 43 |
+
G3.add_edge(5, 13, weight=3)
|
| 44 |
+
G3.add_edge(6, 7, weight=2)
|
| 45 |
+
G3.add_edge(6, 12, weight=3)
|
| 46 |
+
G3.add_edge(6, 13, weight=1)
|
| 47 |
+
G3.add_edge(7, 8, weight=3)
|
| 48 |
+
G3.add_edge(7, 9, weight=3)
|
| 49 |
+
G3.add_edge(7, 11, weight=5)
|
| 50 |
+
G3.add_edge(7, 13, weight=2)
|
| 51 |
+
G3.add_edge(7, 14, weight=4)
|
| 52 |
+
G3.add_edge(8, 9, weight=2)
|
| 53 |
+
G3.add_edge(9, 14, weight=1)
|
| 54 |
+
G3.add_edge(10, 11, weight=2)
|
| 55 |
+
G3.add_edge(10, 14, weight=1)
|
| 56 |
+
G3.add_edge(11, 12, weight=1)
|
| 57 |
+
G3.add_edge(11, 14, weight=7)
|
| 58 |
+
G3.add_edge(12, 14, weight=3)
|
| 59 |
+
G3.add_edge(12, 15, weight=1)
|
| 60 |
+
G3.add_edge(13, 14, weight=4)
|
| 61 |
+
G3.add_edge(13, 15, weight=1)
|
| 62 |
+
G3.add_edge(14, 15, weight=2)
|
| 63 |
+
|
| 64 |
+
cls.G1 = G1
|
| 65 |
+
cls.G2 = G2
|
| 66 |
+
cls.G3 = G3
|
| 67 |
+
cls.G1_term_nodes = [1, 2, 3, 4, 5]
|
| 68 |
+
cls.G2_term_nodes = [0, 2, 3]
|
| 69 |
+
cls.G3_term_nodes = [1, 3, 5, 6, 8, 10, 11, 12, 13]
|
| 70 |
+
|
| 71 |
+
cls.methods = ["kou", "mehlhorn"]
|
| 72 |
+
|
| 73 |
+
def test_connected_metric_closure(self):
|
| 74 |
+
G = self.G1.copy()
|
| 75 |
+
G.add_node(100)
|
| 76 |
+
pytest.raises(nx.NetworkXError, metric_closure, G)
|
| 77 |
+
|
| 78 |
+
def test_metric_closure(self):
|
| 79 |
+
M = metric_closure(self.G1)
|
| 80 |
+
mc = [
|
| 81 |
+
(1, 2, {"distance": 10, "path": [1, 2]}),
|
| 82 |
+
(1, 3, {"distance": 20, "path": [1, 2, 3]}),
|
| 83 |
+
(1, 4, {"distance": 22, "path": [1, 2, 7, 5, 4]}),
|
| 84 |
+
(1, 5, {"distance": 12, "path": [1, 2, 7, 5]}),
|
| 85 |
+
(1, 6, {"distance": 22, "path": [1, 2, 7, 5, 6]}),
|
| 86 |
+
(1, 7, {"distance": 11, "path": [1, 2, 7]}),
|
| 87 |
+
(2, 3, {"distance": 10, "path": [2, 3]}),
|
| 88 |
+
(2, 4, {"distance": 12, "path": [2, 7, 5, 4]}),
|
| 89 |
+
(2, 5, {"distance": 2, "path": [2, 7, 5]}),
|
| 90 |
+
(2, 6, {"distance": 12, "path": [2, 7, 5, 6]}),
|
| 91 |
+
(2, 7, {"distance": 1, "path": [2, 7]}),
|
| 92 |
+
(3, 4, {"distance": 10, "path": [3, 4]}),
|
| 93 |
+
(3, 5, {"distance": 12, "path": [3, 2, 7, 5]}),
|
| 94 |
+
(3, 6, {"distance": 22, "path": [3, 2, 7, 5, 6]}),
|
| 95 |
+
(3, 7, {"distance": 11, "path": [3, 2, 7]}),
|
| 96 |
+
(4, 5, {"distance": 10, "path": [4, 5]}),
|
| 97 |
+
(4, 6, {"distance": 20, "path": [4, 5, 6]}),
|
| 98 |
+
(4, 7, {"distance": 11, "path": [4, 5, 7]}),
|
| 99 |
+
(5, 6, {"distance": 10, "path": [5, 6]}),
|
| 100 |
+
(5, 7, {"distance": 1, "path": [5, 7]}),
|
| 101 |
+
(6, 7, {"distance": 11, "path": [6, 5, 7]}),
|
| 102 |
+
]
|
| 103 |
+
assert edges_equal(list(M.edges(data=True)), mc)
|
| 104 |
+
|
| 105 |
+
def test_steiner_tree(self):
|
| 106 |
+
valid_steiner_trees = [
|
| 107 |
+
[
|
| 108 |
+
[
|
| 109 |
+
(1, 2, {"weight": 10}),
|
| 110 |
+
(2, 3, {"weight": 10}),
|
| 111 |
+
(2, 7, {"weight": 1}),
|
| 112 |
+
(3, 4, {"weight": 10}),
|
| 113 |
+
(5, 7, {"weight": 1}),
|
| 114 |
+
],
|
| 115 |
+
[
|
| 116 |
+
(1, 2, {"weight": 10}),
|
| 117 |
+
(2, 7, {"weight": 1}),
|
| 118 |
+
(3, 4, {"weight": 10}),
|
| 119 |
+
(4, 5, {"weight": 10}),
|
| 120 |
+
(5, 7, {"weight": 1}),
|
| 121 |
+
],
|
| 122 |
+
[
|
| 123 |
+
(1, 2, {"weight": 10}),
|
| 124 |
+
(2, 3, {"weight": 10}),
|
| 125 |
+
(2, 7, {"weight": 1}),
|
| 126 |
+
(4, 5, {"weight": 10}),
|
| 127 |
+
(5, 7, {"weight": 1}),
|
| 128 |
+
],
|
| 129 |
+
],
|
| 130 |
+
[
|
| 131 |
+
[
|
| 132 |
+
(0, 5, {"weight": 6}),
|
| 133 |
+
(1, 2, {"weight": 2}),
|
| 134 |
+
(1, 5, {"weight": 3}),
|
| 135 |
+
(3, 5, {"weight": 5}),
|
| 136 |
+
],
|
| 137 |
+
[
|
| 138 |
+
(0, 5, {"weight": 6}),
|
| 139 |
+
(4, 2, {"weight": 4}),
|
| 140 |
+
(4, 5, {"weight": 1}),
|
| 141 |
+
(3, 5, {"weight": 5}),
|
| 142 |
+
],
|
| 143 |
+
],
|
| 144 |
+
[
|
| 145 |
+
[
|
| 146 |
+
(1, 10, {"weight": 2}),
|
| 147 |
+
(3, 10, {"weight": 2}),
|
| 148 |
+
(3, 11, {"weight": 1}),
|
| 149 |
+
(5, 12, {"weight": 1}),
|
| 150 |
+
(6, 13, {"weight": 1}),
|
| 151 |
+
(8, 9, {"weight": 2}),
|
| 152 |
+
(9, 14, {"weight": 1}),
|
| 153 |
+
(10, 14, {"weight": 1}),
|
| 154 |
+
(11, 12, {"weight": 1}),
|
| 155 |
+
(12, 15, {"weight": 1}),
|
| 156 |
+
(13, 15, {"weight": 1}),
|
| 157 |
+
]
|
| 158 |
+
],
|
| 159 |
+
]
|
| 160 |
+
for method in self.methods:
|
| 161 |
+
for G, term_nodes, valid_trees in zip(
|
| 162 |
+
[self.G1, self.G2, self.G3],
|
| 163 |
+
[self.G1_term_nodes, self.G2_term_nodes, self.G3_term_nodes],
|
| 164 |
+
valid_steiner_trees,
|
| 165 |
+
):
|
| 166 |
+
S = steiner_tree(G, term_nodes, method=method)
|
| 167 |
+
assert any(
|
| 168 |
+
edges_equal(list(S.edges(data=True)), valid_tree)
|
| 169 |
+
for valid_tree in valid_trees
|
| 170 |
+
)
|
| 171 |
+
|
| 172 |
+
def test_multigraph_steiner_tree(self):
|
| 173 |
+
G = nx.MultiGraph()
|
| 174 |
+
G.add_edges_from(
|
| 175 |
+
[
|
| 176 |
+
(1, 2, 0, {"weight": 1}),
|
| 177 |
+
(2, 3, 0, {"weight": 999}),
|
| 178 |
+
(2, 3, 1, {"weight": 1}),
|
| 179 |
+
(3, 4, 0, {"weight": 1}),
|
| 180 |
+
(3, 5, 0, {"weight": 1}),
|
| 181 |
+
]
|
| 182 |
+
)
|
| 183 |
+
terminal_nodes = [2, 4, 5]
|
| 184 |
+
expected_edges = [
|
| 185 |
+
(2, 3, 1, {"weight": 1}), # edge with key 1 has lower weight
|
| 186 |
+
(3, 4, 0, {"weight": 1}),
|
| 187 |
+
(3, 5, 0, {"weight": 1}),
|
| 188 |
+
]
|
| 189 |
+
for method in self.methods:
|
| 190 |
+
S = steiner_tree(G, terminal_nodes, method=method)
|
| 191 |
+
assert edges_equal(S.edges(data=True, keys=True), expected_edges)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_traveling_salesman.py
ADDED
|
@@ -0,0 +1,963 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Unit tests for the traveling_salesman module."""
|
| 2 |
+
import random
|
| 3 |
+
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
import networkx.algorithms.approximation as nx_app
|
| 8 |
+
|
| 9 |
+
pairwise = nx.utils.pairwise
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def test_christofides_hamiltonian():
|
| 13 |
+
random.seed(42)
|
| 14 |
+
G = nx.complete_graph(20)
|
| 15 |
+
for u, v in G.edges():
|
| 16 |
+
G[u][v]["weight"] = random.randint(0, 10)
|
| 17 |
+
|
| 18 |
+
H = nx.Graph()
|
| 19 |
+
H.add_edges_from(pairwise(nx_app.christofides(G)))
|
| 20 |
+
H.remove_edges_from(nx.find_cycle(H))
|
| 21 |
+
assert len(H.edges) == 0
|
| 22 |
+
|
| 23 |
+
tree = nx.minimum_spanning_tree(G, weight="weight")
|
| 24 |
+
H = nx.Graph()
|
| 25 |
+
H.add_edges_from(pairwise(nx_app.christofides(G, tree)))
|
| 26 |
+
H.remove_edges_from(nx.find_cycle(H))
|
| 27 |
+
assert len(H.edges) == 0
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def test_christofides_incomplete_graph():
|
| 31 |
+
G = nx.complete_graph(10)
|
| 32 |
+
G.remove_edge(0, 1)
|
| 33 |
+
pytest.raises(nx.NetworkXError, nx_app.christofides, G)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def test_christofides_ignore_selfloops():
|
| 37 |
+
G = nx.complete_graph(5)
|
| 38 |
+
G.add_edge(3, 3)
|
| 39 |
+
cycle = nx_app.christofides(G)
|
| 40 |
+
assert len(cycle) - 1 == len(G) == len(set(cycle))
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
# set up graphs for other tests
|
| 44 |
+
class TestBase:
|
| 45 |
+
@classmethod
|
| 46 |
+
def setup_class(cls):
|
| 47 |
+
cls.DG = nx.DiGraph()
|
| 48 |
+
cls.DG.add_weighted_edges_from(
|
| 49 |
+
{
|
| 50 |
+
("A", "B", 3),
|
| 51 |
+
("A", "C", 17),
|
| 52 |
+
("A", "D", 14),
|
| 53 |
+
("B", "A", 3),
|
| 54 |
+
("B", "C", 12),
|
| 55 |
+
("B", "D", 16),
|
| 56 |
+
("C", "A", 13),
|
| 57 |
+
("C", "B", 12),
|
| 58 |
+
("C", "D", 4),
|
| 59 |
+
("D", "A", 14),
|
| 60 |
+
("D", "B", 15),
|
| 61 |
+
("D", "C", 2),
|
| 62 |
+
}
|
| 63 |
+
)
|
| 64 |
+
cls.DG_cycle = ["D", "C", "B", "A", "D"]
|
| 65 |
+
cls.DG_cost = 31.0
|
| 66 |
+
|
| 67 |
+
cls.DG2 = nx.DiGraph()
|
| 68 |
+
cls.DG2.add_weighted_edges_from(
|
| 69 |
+
{
|
| 70 |
+
("A", "B", 3),
|
| 71 |
+
("A", "C", 17),
|
| 72 |
+
("A", "D", 14),
|
| 73 |
+
("B", "A", 30),
|
| 74 |
+
("B", "C", 2),
|
| 75 |
+
("B", "D", 16),
|
| 76 |
+
("C", "A", 33),
|
| 77 |
+
("C", "B", 32),
|
| 78 |
+
("C", "D", 34),
|
| 79 |
+
("D", "A", 14),
|
| 80 |
+
("D", "B", 15),
|
| 81 |
+
("D", "C", 2),
|
| 82 |
+
}
|
| 83 |
+
)
|
| 84 |
+
cls.DG2_cycle = ["D", "A", "B", "C", "D"]
|
| 85 |
+
cls.DG2_cost = 53.0
|
| 86 |
+
|
| 87 |
+
cls.unweightedUG = nx.complete_graph(5, nx.Graph())
|
| 88 |
+
cls.unweightedDG = nx.complete_graph(5, nx.DiGraph())
|
| 89 |
+
|
| 90 |
+
cls.incompleteUG = nx.Graph()
|
| 91 |
+
cls.incompleteUG.add_weighted_edges_from({(0, 1, 1), (1, 2, 3)})
|
| 92 |
+
cls.incompleteDG = nx.DiGraph()
|
| 93 |
+
cls.incompleteDG.add_weighted_edges_from({(0, 1, 1), (1, 2, 3)})
|
| 94 |
+
|
| 95 |
+
cls.UG = nx.Graph()
|
| 96 |
+
cls.UG.add_weighted_edges_from(
|
| 97 |
+
{
|
| 98 |
+
("A", "B", 3),
|
| 99 |
+
("A", "C", 17),
|
| 100 |
+
("A", "D", 14),
|
| 101 |
+
("B", "C", 12),
|
| 102 |
+
("B", "D", 16),
|
| 103 |
+
("C", "D", 4),
|
| 104 |
+
}
|
| 105 |
+
)
|
| 106 |
+
cls.UG_cycle = ["D", "C", "B", "A", "D"]
|
| 107 |
+
cls.UG_cost = 33.0
|
| 108 |
+
|
| 109 |
+
cls.UG2 = nx.Graph()
|
| 110 |
+
cls.UG2.add_weighted_edges_from(
|
| 111 |
+
{
|
| 112 |
+
("A", "B", 1),
|
| 113 |
+
("A", "C", 15),
|
| 114 |
+
("A", "D", 5),
|
| 115 |
+
("B", "C", 16),
|
| 116 |
+
("B", "D", 8),
|
| 117 |
+
("C", "D", 3),
|
| 118 |
+
}
|
| 119 |
+
)
|
| 120 |
+
cls.UG2_cycle = ["D", "C", "B", "A", "D"]
|
| 121 |
+
cls.UG2_cost = 25.0
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def validate_solution(soln, cost, exp_soln, exp_cost):
|
| 125 |
+
assert soln == exp_soln
|
| 126 |
+
assert cost == exp_cost
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def validate_symmetric_solution(soln, cost, exp_soln, exp_cost):
|
| 130 |
+
assert soln == exp_soln or soln == exp_soln[::-1]
|
| 131 |
+
assert cost == exp_cost
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
class TestGreedyTSP(TestBase):
|
| 135 |
+
def test_greedy(self):
|
| 136 |
+
cycle = nx_app.greedy_tsp(self.DG, source="D")
|
| 137 |
+
cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 138 |
+
validate_solution(cycle, cost, ["D", "C", "B", "A", "D"], 31.0)
|
| 139 |
+
|
| 140 |
+
cycle = nx_app.greedy_tsp(self.DG2, source="D")
|
| 141 |
+
cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 142 |
+
validate_solution(cycle, cost, ["D", "C", "B", "A", "D"], 78.0)
|
| 143 |
+
|
| 144 |
+
cycle = nx_app.greedy_tsp(self.UG, source="D")
|
| 145 |
+
cost = sum(self.UG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 146 |
+
validate_solution(cycle, cost, ["D", "C", "B", "A", "D"], 33.0)
|
| 147 |
+
|
| 148 |
+
cycle = nx_app.greedy_tsp(self.UG2, source="D")
|
| 149 |
+
cost = sum(self.UG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 150 |
+
validate_solution(cycle, cost, ["D", "C", "A", "B", "D"], 27.0)
|
| 151 |
+
|
| 152 |
+
def test_not_complete_graph(self):
|
| 153 |
+
pytest.raises(nx.NetworkXError, nx_app.greedy_tsp, self.incompleteUG)
|
| 154 |
+
pytest.raises(nx.NetworkXError, nx_app.greedy_tsp, self.incompleteDG)
|
| 155 |
+
|
| 156 |
+
def test_not_weighted_graph(self):
|
| 157 |
+
nx_app.greedy_tsp(self.unweightedUG)
|
| 158 |
+
nx_app.greedy_tsp(self.unweightedDG)
|
| 159 |
+
|
| 160 |
+
def test_two_nodes(self):
|
| 161 |
+
G = nx.Graph()
|
| 162 |
+
G.add_weighted_edges_from({(1, 2, 1)})
|
| 163 |
+
cycle = nx_app.greedy_tsp(G)
|
| 164 |
+
cost = sum(G[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 165 |
+
validate_solution(cycle, cost, [1, 2, 1], 2)
|
| 166 |
+
|
| 167 |
+
def test_ignore_selfloops(self):
|
| 168 |
+
G = nx.complete_graph(5)
|
| 169 |
+
G.add_edge(3, 3)
|
| 170 |
+
cycle = nx_app.greedy_tsp(G)
|
| 171 |
+
assert len(cycle) - 1 == len(G) == len(set(cycle))
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
class TestSimulatedAnnealingTSP(TestBase):
|
| 175 |
+
tsp = staticmethod(nx_app.simulated_annealing_tsp)
|
| 176 |
+
|
| 177 |
+
def test_simulated_annealing_directed(self):
|
| 178 |
+
cycle = self.tsp(self.DG, "greedy", source="D", seed=42)
|
| 179 |
+
cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 180 |
+
validate_solution(cycle, cost, self.DG_cycle, self.DG_cost)
|
| 181 |
+
|
| 182 |
+
initial_sol = ["D", "B", "A", "C", "D"]
|
| 183 |
+
cycle = self.tsp(self.DG, initial_sol, source="D", seed=42)
|
| 184 |
+
cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 185 |
+
validate_solution(cycle, cost, self.DG_cycle, self.DG_cost)
|
| 186 |
+
|
| 187 |
+
initial_sol = ["D", "A", "C", "B", "D"]
|
| 188 |
+
cycle = self.tsp(self.DG, initial_sol, move="1-0", source="D", seed=42)
|
| 189 |
+
cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 190 |
+
validate_solution(cycle, cost, self.DG_cycle, self.DG_cost)
|
| 191 |
+
|
| 192 |
+
cycle = self.tsp(self.DG2, "greedy", source="D", seed=42)
|
| 193 |
+
cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 194 |
+
validate_solution(cycle, cost, self.DG2_cycle, self.DG2_cost)
|
| 195 |
+
|
| 196 |
+
cycle = self.tsp(self.DG2, "greedy", move="1-0", source="D", seed=42)
|
| 197 |
+
cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 198 |
+
validate_solution(cycle, cost, self.DG2_cycle, self.DG2_cost)
|
| 199 |
+
|
| 200 |
+
def test_simulated_annealing_undirected(self):
|
| 201 |
+
cycle = self.tsp(self.UG, "greedy", source="D", seed=42)
|
| 202 |
+
cost = sum(self.UG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 203 |
+
validate_solution(cycle, cost, self.UG_cycle, self.UG_cost)
|
| 204 |
+
|
| 205 |
+
cycle = self.tsp(self.UG2, "greedy", source="D", seed=42)
|
| 206 |
+
cost = sum(self.UG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 207 |
+
validate_symmetric_solution(cycle, cost, self.UG2_cycle, self.UG2_cost)
|
| 208 |
+
|
| 209 |
+
cycle = self.tsp(self.UG2, "greedy", move="1-0", source="D", seed=42)
|
| 210 |
+
cost = sum(self.UG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 211 |
+
validate_symmetric_solution(cycle, cost, self.UG2_cycle, self.UG2_cost)
|
| 212 |
+
|
| 213 |
+
def test_error_on_input_order_mistake(self):
|
| 214 |
+
# see issue #4846 https://github.com/networkx/networkx/issues/4846
|
| 215 |
+
pytest.raises(TypeError, self.tsp, self.UG, weight="weight")
|
| 216 |
+
pytest.raises(nx.NetworkXError, self.tsp, self.UG, "weight")
|
| 217 |
+
|
| 218 |
+
def test_not_complete_graph(self):
|
| 219 |
+
pytest.raises(nx.NetworkXError, self.tsp, self.incompleteUG, "greedy", source=0)
|
| 220 |
+
pytest.raises(nx.NetworkXError, self.tsp, self.incompleteDG, "greedy", source=0)
|
| 221 |
+
|
| 222 |
+
def test_ignore_selfloops(self):
|
| 223 |
+
G = nx.complete_graph(5)
|
| 224 |
+
G.add_edge(3, 3)
|
| 225 |
+
cycle = self.tsp(G, "greedy")
|
| 226 |
+
assert len(cycle) - 1 == len(G) == len(set(cycle))
|
| 227 |
+
|
| 228 |
+
def test_not_weighted_graph(self):
|
| 229 |
+
self.tsp(self.unweightedUG, "greedy")
|
| 230 |
+
self.tsp(self.unweightedDG, "greedy")
|
| 231 |
+
|
| 232 |
+
def test_two_nodes(self):
|
| 233 |
+
G = nx.Graph()
|
| 234 |
+
G.add_weighted_edges_from({(1, 2, 1)})
|
| 235 |
+
|
| 236 |
+
cycle = self.tsp(G, "greedy", source=1, seed=42)
|
| 237 |
+
cost = sum(G[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 238 |
+
validate_solution(cycle, cost, [1, 2, 1], 2)
|
| 239 |
+
|
| 240 |
+
cycle = self.tsp(G, [1, 2, 1], source=1, seed=42)
|
| 241 |
+
cost = sum(G[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 242 |
+
validate_solution(cycle, cost, [1, 2, 1], 2)
|
| 243 |
+
|
| 244 |
+
def test_failure_of_costs_too_high_when_iterations_low(self):
|
| 245 |
+
# Simulated Annealing Version:
|
| 246 |
+
# set number of moves low and alpha high
|
| 247 |
+
cycle = self.tsp(
|
| 248 |
+
self.DG2, "greedy", source="D", move="1-0", alpha=1, N_inner=1, seed=42
|
| 249 |
+
)
|
| 250 |
+
cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 251 |
+
print(cycle, cost)
|
| 252 |
+
assert cost > self.DG2_cost
|
| 253 |
+
|
| 254 |
+
# Try with an incorrect initial guess
|
| 255 |
+
initial_sol = ["D", "A", "B", "C", "D"]
|
| 256 |
+
cycle = self.tsp(
|
| 257 |
+
self.DG,
|
| 258 |
+
initial_sol,
|
| 259 |
+
source="D",
|
| 260 |
+
move="1-0",
|
| 261 |
+
alpha=0.1,
|
| 262 |
+
N_inner=1,
|
| 263 |
+
max_iterations=1,
|
| 264 |
+
seed=42,
|
| 265 |
+
)
|
| 266 |
+
cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 267 |
+
print(cycle, cost)
|
| 268 |
+
assert cost > self.DG_cost
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
class TestThresholdAcceptingTSP(TestSimulatedAnnealingTSP):
|
| 272 |
+
tsp = staticmethod(nx_app.threshold_accepting_tsp)
|
| 273 |
+
|
| 274 |
+
def test_failure_of_costs_too_high_when_iterations_low(self):
|
| 275 |
+
# Threshold Version:
|
| 276 |
+
# set number of moves low and number of iterations low
|
| 277 |
+
cycle = self.tsp(
|
| 278 |
+
self.DG2,
|
| 279 |
+
"greedy",
|
| 280 |
+
source="D",
|
| 281 |
+
move="1-0",
|
| 282 |
+
N_inner=1,
|
| 283 |
+
max_iterations=1,
|
| 284 |
+
seed=4,
|
| 285 |
+
)
|
| 286 |
+
cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 287 |
+
assert cost > self.DG2_cost
|
| 288 |
+
|
| 289 |
+
# set threshold too low
|
| 290 |
+
initial_sol = ["D", "A", "B", "C", "D"]
|
| 291 |
+
cycle = self.tsp(
|
| 292 |
+
self.DG, initial_sol, source="D", move="1-0", threshold=-3, seed=42
|
| 293 |
+
)
|
| 294 |
+
cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
| 295 |
+
assert cost > self.DG_cost
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
# Tests for function traveling_salesman_problem
|
| 299 |
+
def test_TSP_method():
|
| 300 |
+
G = nx.cycle_graph(9)
|
| 301 |
+
G[4][5]["weight"] = 10
|
| 302 |
+
|
| 303 |
+
def my_tsp_method(G, weight):
|
| 304 |
+
return nx_app.simulated_annealing_tsp(G, "greedy", weight, source=4, seed=1)
|
| 305 |
+
|
| 306 |
+
path = nx_app.traveling_salesman_problem(G, method=my_tsp_method, cycle=False)
|
| 307 |
+
print(path)
|
| 308 |
+
assert path == [4, 3, 2, 1, 0, 8, 7, 6, 5]
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
def test_TSP_unweighted():
|
| 312 |
+
G = nx.cycle_graph(9)
|
| 313 |
+
path = nx_app.traveling_salesman_problem(G, nodes=[3, 6], cycle=False)
|
| 314 |
+
assert path in ([3, 4, 5, 6], [6, 5, 4, 3])
|
| 315 |
+
|
| 316 |
+
cycle = nx_app.traveling_salesman_problem(G, nodes=[3, 6])
|
| 317 |
+
assert cycle in ([3, 4, 5, 6, 5, 4, 3], [6, 5, 4, 3, 4, 5, 6])
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def test_TSP_weighted():
|
| 321 |
+
G = nx.cycle_graph(9)
|
| 322 |
+
G[0][1]["weight"] = 2
|
| 323 |
+
G[1][2]["weight"] = 2
|
| 324 |
+
G[2][3]["weight"] = 2
|
| 325 |
+
G[3][4]["weight"] = 4
|
| 326 |
+
G[4][5]["weight"] = 5
|
| 327 |
+
G[5][6]["weight"] = 4
|
| 328 |
+
G[6][7]["weight"] = 2
|
| 329 |
+
G[7][8]["weight"] = 2
|
| 330 |
+
G[8][0]["weight"] = 2
|
| 331 |
+
tsp = nx_app.traveling_salesman_problem
|
| 332 |
+
|
| 333 |
+
# path between 3 and 6
|
| 334 |
+
expected_paths = ([3, 2, 1, 0, 8, 7, 6], [6, 7, 8, 0, 1, 2, 3])
|
| 335 |
+
# cycle between 3 and 6
|
| 336 |
+
expected_cycles = (
|
| 337 |
+
[3, 2, 1, 0, 8, 7, 6, 7, 8, 0, 1, 2, 3],
|
| 338 |
+
[6, 7, 8, 0, 1, 2, 3, 2, 1, 0, 8, 7, 6],
|
| 339 |
+
)
|
| 340 |
+
# path through all nodes
|
| 341 |
+
expected_tourpaths = ([5, 6, 7, 8, 0, 1, 2, 3, 4], [4, 3, 2, 1, 0, 8, 7, 6, 5])
|
| 342 |
+
|
| 343 |
+
# Check default method
|
| 344 |
+
cycle = tsp(G, nodes=[3, 6], weight="weight")
|
| 345 |
+
assert cycle in expected_cycles
|
| 346 |
+
|
| 347 |
+
path = tsp(G, nodes=[3, 6], weight="weight", cycle=False)
|
| 348 |
+
assert path in expected_paths
|
| 349 |
+
|
| 350 |
+
tourpath = tsp(G, weight="weight", cycle=False)
|
| 351 |
+
assert tourpath in expected_tourpaths
|
| 352 |
+
|
| 353 |
+
# Check all methods
|
| 354 |
+
methods = [
|
| 355 |
+
nx_app.christofides,
|
| 356 |
+
nx_app.greedy_tsp,
|
| 357 |
+
lambda G, wt: nx_app.simulated_annealing_tsp(G, "greedy", weight=wt),
|
| 358 |
+
lambda G, wt: nx_app.threshold_accepting_tsp(G, "greedy", weight=wt),
|
| 359 |
+
]
|
| 360 |
+
for method in methods:
|
| 361 |
+
cycle = tsp(G, nodes=[3, 6], weight="weight", method=method)
|
| 362 |
+
assert cycle in expected_cycles
|
| 363 |
+
|
| 364 |
+
path = tsp(G, nodes=[3, 6], weight="weight", method=method, cycle=False)
|
| 365 |
+
assert path in expected_paths
|
| 366 |
+
|
| 367 |
+
tourpath = tsp(G, weight="weight", method=method, cycle=False)
|
| 368 |
+
assert tourpath in expected_tourpaths
|
| 369 |
+
|
| 370 |
+
|
| 371 |
+
def test_TSP_incomplete_graph_short_path():
|
| 372 |
+
G = nx.cycle_graph(9)
|
| 373 |
+
G.add_edges_from([(4, 9), (9, 10), (10, 11), (11, 0)])
|
| 374 |
+
G[4][5]["weight"] = 5
|
| 375 |
+
|
| 376 |
+
cycle = nx_app.traveling_salesman_problem(G)
|
| 377 |
+
print(cycle)
|
| 378 |
+
assert len(cycle) == 17 and len(set(cycle)) == 12
|
| 379 |
+
|
| 380 |
+
# make sure that cutting one edge out of complete graph formulation
|
| 381 |
+
# cuts out many edges out of the path of the TSP
|
| 382 |
+
path = nx_app.traveling_salesman_problem(G, cycle=False)
|
| 383 |
+
print(path)
|
| 384 |
+
assert len(path) == 13 and len(set(path)) == 12
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
def test_held_karp_ascent():
|
| 388 |
+
"""
|
| 389 |
+
Test the Held-Karp relaxation with the ascent method
|
| 390 |
+
"""
|
| 391 |
+
import networkx.algorithms.approximation.traveling_salesman as tsp
|
| 392 |
+
|
| 393 |
+
np = pytest.importorskip("numpy")
|
| 394 |
+
pytest.importorskip("scipy")
|
| 395 |
+
|
| 396 |
+
# Adjacency matrix from page 1153 of the 1970 Held and Karp paper
|
| 397 |
+
# which have been edited to be directional, but also symmetric
|
| 398 |
+
G_array = np.array(
|
| 399 |
+
[
|
| 400 |
+
[0, 97, 60, 73, 17, 52],
|
| 401 |
+
[97, 0, 41, 52, 90, 30],
|
| 402 |
+
[60, 41, 0, 21, 35, 41],
|
| 403 |
+
[73, 52, 21, 0, 95, 46],
|
| 404 |
+
[17, 90, 35, 95, 0, 81],
|
| 405 |
+
[52, 30, 41, 46, 81, 0],
|
| 406 |
+
]
|
| 407 |
+
)
|
| 408 |
+
|
| 409 |
+
solution_edges = [(1, 3), (2, 4), (3, 2), (4, 0), (5, 1), (0, 5)]
|
| 410 |
+
|
| 411 |
+
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
| 412 |
+
opt_hk, z_star = tsp.held_karp_ascent(G)
|
| 413 |
+
|
| 414 |
+
# Check that the optimal weights are the same
|
| 415 |
+
assert round(opt_hk, 2) == 207.00
|
| 416 |
+
# Check that the z_stars are the same
|
| 417 |
+
solution = nx.DiGraph()
|
| 418 |
+
solution.add_edges_from(solution_edges)
|
| 419 |
+
assert nx.utils.edges_equal(z_star.edges, solution.edges)
|
| 420 |
+
|
| 421 |
+
|
| 422 |
+
def test_ascent_fractional_solution():
|
| 423 |
+
"""
|
| 424 |
+
Test the ascent method using a modified version of Figure 2 on page 1140
|
| 425 |
+
in 'The Traveling Salesman Problem and Minimum Spanning Trees' by Held and
|
| 426 |
+
Karp
|
| 427 |
+
"""
|
| 428 |
+
import networkx.algorithms.approximation.traveling_salesman as tsp
|
| 429 |
+
|
| 430 |
+
np = pytest.importorskip("numpy")
|
| 431 |
+
pytest.importorskip("scipy")
|
| 432 |
+
|
| 433 |
+
# This version of Figure 2 has all of the edge weights multiplied by 100
|
| 434 |
+
# and is a complete directed graph with infinite edge weights for the
|
| 435 |
+
# edges not listed in the original graph
|
| 436 |
+
G_array = np.array(
|
| 437 |
+
[
|
| 438 |
+
[0, 100, 100, 100000, 100000, 1],
|
| 439 |
+
[100, 0, 100, 100000, 1, 100000],
|
| 440 |
+
[100, 100, 0, 1, 100000, 100000],
|
| 441 |
+
[100000, 100000, 1, 0, 100, 100],
|
| 442 |
+
[100000, 1, 100000, 100, 0, 100],
|
| 443 |
+
[1, 100000, 100000, 100, 100, 0],
|
| 444 |
+
]
|
| 445 |
+
)
|
| 446 |
+
|
| 447 |
+
solution_z_star = {
|
| 448 |
+
(0, 1): 5 / 12,
|
| 449 |
+
(0, 2): 5 / 12,
|
| 450 |
+
(0, 5): 5 / 6,
|
| 451 |
+
(1, 0): 5 / 12,
|
| 452 |
+
(1, 2): 1 / 3,
|
| 453 |
+
(1, 4): 5 / 6,
|
| 454 |
+
(2, 0): 5 / 12,
|
| 455 |
+
(2, 1): 1 / 3,
|
| 456 |
+
(2, 3): 5 / 6,
|
| 457 |
+
(3, 2): 5 / 6,
|
| 458 |
+
(3, 4): 1 / 3,
|
| 459 |
+
(3, 5): 1 / 2,
|
| 460 |
+
(4, 1): 5 / 6,
|
| 461 |
+
(4, 3): 1 / 3,
|
| 462 |
+
(4, 5): 1 / 2,
|
| 463 |
+
(5, 0): 5 / 6,
|
| 464 |
+
(5, 3): 1 / 2,
|
| 465 |
+
(5, 4): 1 / 2,
|
| 466 |
+
}
|
| 467 |
+
|
| 468 |
+
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
| 469 |
+
opt_hk, z_star = tsp.held_karp_ascent(G)
|
| 470 |
+
|
| 471 |
+
# Check that the optimal weights are the same
|
| 472 |
+
assert round(opt_hk, 2) == 303.00
|
| 473 |
+
# Check that the z_stars are the same
|
| 474 |
+
assert {key: round(z_star[key], 4) for key in z_star} == {
|
| 475 |
+
key: round(solution_z_star[key], 4) for key in solution_z_star
|
| 476 |
+
}
|
| 477 |
+
|
| 478 |
+
|
| 479 |
+
def test_ascent_method_asymmetric():
|
| 480 |
+
"""
|
| 481 |
+
Tests the ascent method using a truly asymmetric graph for which the
|
| 482 |
+
solution has been brute forced
|
| 483 |
+
"""
|
| 484 |
+
import networkx.algorithms.approximation.traveling_salesman as tsp
|
| 485 |
+
|
| 486 |
+
np = pytest.importorskip("numpy")
|
| 487 |
+
pytest.importorskip("scipy")
|
| 488 |
+
|
| 489 |
+
G_array = np.array(
|
| 490 |
+
[
|
| 491 |
+
[0, 26, 63, 59, 69, 31, 41],
|
| 492 |
+
[62, 0, 91, 53, 75, 87, 47],
|
| 493 |
+
[47, 82, 0, 90, 15, 9, 18],
|
| 494 |
+
[68, 19, 5, 0, 58, 34, 93],
|
| 495 |
+
[11, 58, 53, 55, 0, 61, 79],
|
| 496 |
+
[88, 75, 13, 76, 98, 0, 40],
|
| 497 |
+
[41, 61, 55, 88, 46, 45, 0],
|
| 498 |
+
]
|
| 499 |
+
)
|
| 500 |
+
|
| 501 |
+
solution_edges = [(0, 1), (1, 3), (3, 2), (2, 5), (5, 6), (4, 0), (6, 4)]
|
| 502 |
+
|
| 503 |
+
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
| 504 |
+
opt_hk, z_star = tsp.held_karp_ascent(G)
|
| 505 |
+
|
| 506 |
+
# Check that the optimal weights are the same
|
| 507 |
+
assert round(opt_hk, 2) == 190.00
|
| 508 |
+
# Check that the z_stars match.
|
| 509 |
+
solution = nx.DiGraph()
|
| 510 |
+
solution.add_edges_from(solution_edges)
|
| 511 |
+
assert nx.utils.edges_equal(z_star.edges, solution.edges)
|
| 512 |
+
|
| 513 |
+
|
| 514 |
+
def test_ascent_method_asymmetric_2():
|
| 515 |
+
"""
|
| 516 |
+
Tests the ascent method using a truly asymmetric graph for which the
|
| 517 |
+
solution has been brute forced
|
| 518 |
+
"""
|
| 519 |
+
import networkx.algorithms.approximation.traveling_salesman as tsp
|
| 520 |
+
|
| 521 |
+
np = pytest.importorskip("numpy")
|
| 522 |
+
pytest.importorskip("scipy")
|
| 523 |
+
|
| 524 |
+
G_array = np.array(
|
| 525 |
+
[
|
| 526 |
+
[0, 45, 39, 92, 29, 31],
|
| 527 |
+
[72, 0, 4, 12, 21, 60],
|
| 528 |
+
[81, 6, 0, 98, 70, 53],
|
| 529 |
+
[49, 71, 59, 0, 98, 94],
|
| 530 |
+
[74, 95, 24, 43, 0, 47],
|
| 531 |
+
[56, 43, 3, 65, 22, 0],
|
| 532 |
+
]
|
| 533 |
+
)
|
| 534 |
+
|
| 535 |
+
solution_edges = [(0, 5), (5, 4), (1, 3), (3, 0), (2, 1), (4, 2)]
|
| 536 |
+
|
| 537 |
+
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
| 538 |
+
opt_hk, z_star = tsp.held_karp_ascent(G)
|
| 539 |
+
|
| 540 |
+
# Check that the optimal weights are the same
|
| 541 |
+
assert round(opt_hk, 2) == 144.00
|
| 542 |
+
# Check that the z_stars match.
|
| 543 |
+
solution = nx.DiGraph()
|
| 544 |
+
solution.add_edges_from(solution_edges)
|
| 545 |
+
assert nx.utils.edges_equal(z_star.edges, solution.edges)
|
| 546 |
+
|
| 547 |
+
|
| 548 |
+
def test_held_karp_ascent_asymmetric_3():
|
| 549 |
+
"""
|
| 550 |
+
Tests the ascent method using a truly asymmetric graph with a fractional
|
| 551 |
+
solution for which the solution has been brute forced.
|
| 552 |
+
|
| 553 |
+
In this graph their are two different optimal, integral solutions (which
|
| 554 |
+
are also the overall atsp solutions) to the Held Karp relaxation. However,
|
| 555 |
+
this particular graph has two different tours of optimal value and the
|
| 556 |
+
possible solutions in the held_karp_ascent function are not stored in an
|
| 557 |
+
ordered data structure.
|
| 558 |
+
"""
|
| 559 |
+
import networkx.algorithms.approximation.traveling_salesman as tsp
|
| 560 |
+
|
| 561 |
+
np = pytest.importorskip("numpy")
|
| 562 |
+
pytest.importorskip("scipy")
|
| 563 |
+
|
| 564 |
+
G_array = np.array(
|
| 565 |
+
[
|
| 566 |
+
[0, 1, 5, 2, 7, 4],
|
| 567 |
+
[7, 0, 7, 7, 1, 4],
|
| 568 |
+
[4, 7, 0, 9, 2, 1],
|
| 569 |
+
[7, 2, 7, 0, 4, 4],
|
| 570 |
+
[5, 5, 4, 4, 0, 3],
|
| 571 |
+
[3, 9, 1, 3, 4, 0],
|
| 572 |
+
]
|
| 573 |
+
)
|
| 574 |
+
|
| 575 |
+
solution1_edges = [(0, 3), (1, 4), (2, 5), (3, 1), (4, 2), (5, 0)]
|
| 576 |
+
|
| 577 |
+
solution2_edges = [(0, 3), (3, 1), (1, 4), (4, 5), (2, 0), (5, 2)]
|
| 578 |
+
|
| 579 |
+
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
| 580 |
+
opt_hk, z_star = tsp.held_karp_ascent(G)
|
| 581 |
+
|
| 582 |
+
assert round(opt_hk, 2) == 13.00
|
| 583 |
+
# Check that the z_stars are the same
|
| 584 |
+
solution1 = nx.DiGraph()
|
| 585 |
+
solution1.add_edges_from(solution1_edges)
|
| 586 |
+
solution2 = nx.DiGraph()
|
| 587 |
+
solution2.add_edges_from(solution2_edges)
|
| 588 |
+
assert nx.utils.edges_equal(z_star.edges, solution1.edges) or nx.utils.edges_equal(
|
| 589 |
+
z_star.edges, solution2.edges
|
| 590 |
+
)
|
| 591 |
+
|
| 592 |
+
|
| 593 |
+
def test_held_karp_ascent_fractional_asymmetric():
|
| 594 |
+
"""
|
| 595 |
+
Tests the ascent method using a truly asymmetric graph with a fractional
|
| 596 |
+
solution for which the solution has been brute forced
|
| 597 |
+
"""
|
| 598 |
+
import networkx.algorithms.approximation.traveling_salesman as tsp
|
| 599 |
+
|
| 600 |
+
np = pytest.importorskip("numpy")
|
| 601 |
+
pytest.importorskip("scipy")
|
| 602 |
+
|
| 603 |
+
G_array = np.array(
|
| 604 |
+
[
|
| 605 |
+
[0, 100, 150, 100000, 100000, 1],
|
| 606 |
+
[150, 0, 100, 100000, 1, 100000],
|
| 607 |
+
[100, 150, 0, 1, 100000, 100000],
|
| 608 |
+
[100000, 100000, 1, 0, 150, 100],
|
| 609 |
+
[100000, 2, 100000, 100, 0, 150],
|
| 610 |
+
[2, 100000, 100000, 150, 100, 0],
|
| 611 |
+
]
|
| 612 |
+
)
|
| 613 |
+
|
| 614 |
+
solution_z_star = {
|
| 615 |
+
(0, 1): 5 / 12,
|
| 616 |
+
(0, 2): 5 / 12,
|
| 617 |
+
(0, 5): 5 / 6,
|
| 618 |
+
(1, 0): 5 / 12,
|
| 619 |
+
(1, 2): 5 / 12,
|
| 620 |
+
(1, 4): 5 / 6,
|
| 621 |
+
(2, 0): 5 / 12,
|
| 622 |
+
(2, 1): 5 / 12,
|
| 623 |
+
(2, 3): 5 / 6,
|
| 624 |
+
(3, 2): 5 / 6,
|
| 625 |
+
(3, 4): 5 / 12,
|
| 626 |
+
(3, 5): 5 / 12,
|
| 627 |
+
(4, 1): 5 / 6,
|
| 628 |
+
(4, 3): 5 / 12,
|
| 629 |
+
(4, 5): 5 / 12,
|
| 630 |
+
(5, 0): 5 / 6,
|
| 631 |
+
(5, 3): 5 / 12,
|
| 632 |
+
(5, 4): 5 / 12,
|
| 633 |
+
}
|
| 634 |
+
|
| 635 |
+
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
| 636 |
+
opt_hk, z_star = tsp.held_karp_ascent(G)
|
| 637 |
+
|
| 638 |
+
# Check that the optimal weights are the same
|
| 639 |
+
assert round(opt_hk, 2) == 304.00
|
| 640 |
+
# Check that the z_stars are the same
|
| 641 |
+
assert {key: round(z_star[key], 4) for key in z_star} == {
|
| 642 |
+
key: round(solution_z_star[key], 4) for key in solution_z_star
|
| 643 |
+
}
|
| 644 |
+
|
| 645 |
+
|
| 646 |
+
def test_spanning_tree_distribution():
|
| 647 |
+
"""
|
| 648 |
+
Test that we can create an exponential distribution of spanning trees such
|
| 649 |
+
that the probability of each tree is proportional to the product of edge
|
| 650 |
+
weights.
|
| 651 |
+
|
| 652 |
+
Results of this test have been confirmed with hypothesis testing from the
|
| 653 |
+
created distribution.
|
| 654 |
+
|
| 655 |
+
This test uses the symmetric, fractional Held Karp solution.
|
| 656 |
+
"""
|
| 657 |
+
import networkx.algorithms.approximation.traveling_salesman as tsp
|
| 658 |
+
|
| 659 |
+
pytest.importorskip("numpy")
|
| 660 |
+
pytest.importorskip("scipy")
|
| 661 |
+
|
| 662 |
+
z_star = {
|
| 663 |
+
(0, 1): 5 / 12,
|
| 664 |
+
(0, 2): 5 / 12,
|
| 665 |
+
(0, 5): 5 / 6,
|
| 666 |
+
(1, 0): 5 / 12,
|
| 667 |
+
(1, 2): 1 / 3,
|
| 668 |
+
(1, 4): 5 / 6,
|
| 669 |
+
(2, 0): 5 / 12,
|
| 670 |
+
(2, 1): 1 / 3,
|
| 671 |
+
(2, 3): 5 / 6,
|
| 672 |
+
(3, 2): 5 / 6,
|
| 673 |
+
(3, 4): 1 / 3,
|
| 674 |
+
(3, 5): 1 / 2,
|
| 675 |
+
(4, 1): 5 / 6,
|
| 676 |
+
(4, 3): 1 / 3,
|
| 677 |
+
(4, 5): 1 / 2,
|
| 678 |
+
(5, 0): 5 / 6,
|
| 679 |
+
(5, 3): 1 / 2,
|
| 680 |
+
(5, 4): 1 / 2,
|
| 681 |
+
}
|
| 682 |
+
|
| 683 |
+
solution_gamma = {
|
| 684 |
+
(0, 1): -0.6383,
|
| 685 |
+
(0, 2): -0.6827,
|
| 686 |
+
(0, 5): 0,
|
| 687 |
+
(1, 2): -1.0781,
|
| 688 |
+
(1, 4): 0,
|
| 689 |
+
(2, 3): 0,
|
| 690 |
+
(5, 3): -0.2820,
|
| 691 |
+
(5, 4): -0.3327,
|
| 692 |
+
(4, 3): -0.9927,
|
| 693 |
+
}
|
| 694 |
+
|
| 695 |
+
# The undirected support of z_star
|
| 696 |
+
G = nx.MultiGraph()
|
| 697 |
+
for u, v in z_star:
|
| 698 |
+
if (u, v) in G.edges or (v, u) in G.edges:
|
| 699 |
+
continue
|
| 700 |
+
G.add_edge(u, v)
|
| 701 |
+
|
| 702 |
+
gamma = tsp.spanning_tree_distribution(G, z_star)
|
| 703 |
+
|
| 704 |
+
assert {key: round(gamma[key], 4) for key in gamma} == solution_gamma
|
| 705 |
+
|
| 706 |
+
|
| 707 |
+
def test_asadpour_tsp():
|
| 708 |
+
"""
|
| 709 |
+
Test the complete asadpour tsp algorithm with the fractional, symmetric
|
| 710 |
+
Held Karp solution. This test also uses an incomplete graph as input.
|
| 711 |
+
"""
|
| 712 |
+
# This version of Figure 2 has all of the edge weights multiplied by 100
|
| 713 |
+
# and the 0 weight edges have a weight of 1.
|
| 714 |
+
pytest.importorskip("numpy")
|
| 715 |
+
pytest.importorskip("scipy")
|
| 716 |
+
|
| 717 |
+
edge_list = [
|
| 718 |
+
(0, 1, 100),
|
| 719 |
+
(0, 2, 100),
|
| 720 |
+
(0, 5, 1),
|
| 721 |
+
(1, 2, 100),
|
| 722 |
+
(1, 4, 1),
|
| 723 |
+
(2, 3, 1),
|
| 724 |
+
(3, 4, 100),
|
| 725 |
+
(3, 5, 100),
|
| 726 |
+
(4, 5, 100),
|
| 727 |
+
(1, 0, 100),
|
| 728 |
+
(2, 0, 100),
|
| 729 |
+
(5, 0, 1),
|
| 730 |
+
(2, 1, 100),
|
| 731 |
+
(4, 1, 1),
|
| 732 |
+
(3, 2, 1),
|
| 733 |
+
(4, 3, 100),
|
| 734 |
+
(5, 3, 100),
|
| 735 |
+
(5, 4, 100),
|
| 736 |
+
]
|
| 737 |
+
|
| 738 |
+
G = nx.DiGraph()
|
| 739 |
+
G.add_weighted_edges_from(edge_list)
|
| 740 |
+
|
| 741 |
+
def fixed_asadpour(G, weight):
|
| 742 |
+
return nx_app.asadpour_atsp(G, weight, 19)
|
| 743 |
+
|
| 744 |
+
tour = nx_app.traveling_salesman_problem(G, weight="weight", method=fixed_asadpour)
|
| 745 |
+
|
| 746 |
+
# Check that the returned list is a valid tour. Because this is an
|
| 747 |
+
# incomplete graph, the conditions are not as strict. We need the tour to
|
| 748 |
+
#
|
| 749 |
+
# Start and end at the same node
|
| 750 |
+
# Pass through every vertex at least once
|
| 751 |
+
# Have a total cost at most ln(6) / ln(ln(6)) = 3.0723 times the optimal
|
| 752 |
+
#
|
| 753 |
+
# For the second condition it is possible to have the tour pass through the
|
| 754 |
+
# same vertex more then. Imagine that the tour on the complete version takes
|
| 755 |
+
# an edge not in the original graph. In the output this is substituted with
|
| 756 |
+
# the shortest path between those vertices, allowing vertices to appear more
|
| 757 |
+
# than once.
|
| 758 |
+
#
|
| 759 |
+
# However, we are using a fixed random number generator so we know what the
|
| 760 |
+
# expected tour is.
|
| 761 |
+
expected_tours = [[1, 4, 5, 0, 2, 3, 2, 1], [3, 2, 0, 1, 4, 5, 3]]
|
| 762 |
+
|
| 763 |
+
assert tour in expected_tours
|
| 764 |
+
|
| 765 |
+
|
| 766 |
+
def test_asadpour_real_world():
|
| 767 |
+
"""
|
| 768 |
+
This test uses airline prices between the six largest cities in the US.
|
| 769 |
+
|
| 770 |
+
* New York City -> JFK
|
| 771 |
+
* Los Angeles -> LAX
|
| 772 |
+
* Chicago -> ORD
|
| 773 |
+
* Houston -> IAH
|
| 774 |
+
* Phoenix -> PHX
|
| 775 |
+
* Philadelphia -> PHL
|
| 776 |
+
|
| 777 |
+
Flight prices from August 2021 using Delta or American airlines to get
|
| 778 |
+
nonstop flight. The brute force solution found the optimal tour to cost $872
|
| 779 |
+
|
| 780 |
+
This test also uses the `source` keyword argument to ensure that the tour
|
| 781 |
+
always starts at city 0.
|
| 782 |
+
"""
|
| 783 |
+
np = pytest.importorskip("numpy")
|
| 784 |
+
pytest.importorskip("scipy")
|
| 785 |
+
|
| 786 |
+
G_array = np.array(
|
| 787 |
+
[
|
| 788 |
+
# JFK LAX ORD IAH PHX PHL
|
| 789 |
+
[0, 243, 199, 208, 169, 183], # JFK
|
| 790 |
+
[277, 0, 217, 123, 127, 252], # LAX
|
| 791 |
+
[297, 197, 0, 197, 123, 177], # ORD
|
| 792 |
+
[303, 169, 197, 0, 117, 117], # IAH
|
| 793 |
+
[257, 127, 160, 117, 0, 319], # PHX
|
| 794 |
+
[183, 332, 217, 117, 319, 0], # PHL
|
| 795 |
+
]
|
| 796 |
+
)
|
| 797 |
+
|
| 798 |
+
node_map = {0: "JFK", 1: "LAX", 2: "ORD", 3: "IAH", 4: "PHX", 5: "PHL"}
|
| 799 |
+
|
| 800 |
+
expected_tours = [
|
| 801 |
+
["JFK", "LAX", "PHX", "ORD", "IAH", "PHL", "JFK"],
|
| 802 |
+
["JFK", "ORD", "PHX", "LAX", "IAH", "PHL", "JFK"],
|
| 803 |
+
]
|
| 804 |
+
|
| 805 |
+
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
| 806 |
+
nx.relabel_nodes(G, node_map, copy=False)
|
| 807 |
+
|
| 808 |
+
def fixed_asadpour(G, weight):
|
| 809 |
+
return nx_app.asadpour_atsp(G, weight, 37, source="JFK")
|
| 810 |
+
|
| 811 |
+
tour = nx_app.traveling_salesman_problem(G, weight="weight", method=fixed_asadpour)
|
| 812 |
+
|
| 813 |
+
assert tour in expected_tours
|
| 814 |
+
|
| 815 |
+
|
| 816 |
+
def test_asadpour_real_world_path():
|
| 817 |
+
"""
|
| 818 |
+
This test uses airline prices between the six largest cities in the US. This
|
| 819 |
+
time using a path, not a cycle.
|
| 820 |
+
|
| 821 |
+
* New York City -> JFK
|
| 822 |
+
* Los Angeles -> LAX
|
| 823 |
+
* Chicago -> ORD
|
| 824 |
+
* Houston -> IAH
|
| 825 |
+
* Phoenix -> PHX
|
| 826 |
+
* Philadelphia -> PHL
|
| 827 |
+
|
| 828 |
+
Flight prices from August 2021 using Delta or American airlines to get
|
| 829 |
+
nonstop flight. The brute force solution found the optimal tour to cost $872
|
| 830 |
+
"""
|
| 831 |
+
np = pytest.importorskip("numpy")
|
| 832 |
+
pytest.importorskip("scipy")
|
| 833 |
+
|
| 834 |
+
G_array = np.array(
|
| 835 |
+
[
|
| 836 |
+
# JFK LAX ORD IAH PHX PHL
|
| 837 |
+
[0, 243, 199, 208, 169, 183], # JFK
|
| 838 |
+
[277, 0, 217, 123, 127, 252], # LAX
|
| 839 |
+
[297, 197, 0, 197, 123, 177], # ORD
|
| 840 |
+
[303, 169, 197, 0, 117, 117], # IAH
|
| 841 |
+
[257, 127, 160, 117, 0, 319], # PHX
|
| 842 |
+
[183, 332, 217, 117, 319, 0], # PHL
|
| 843 |
+
]
|
| 844 |
+
)
|
| 845 |
+
|
| 846 |
+
node_map = {0: "JFK", 1: "LAX", 2: "ORD", 3: "IAH", 4: "PHX", 5: "PHL"}
|
| 847 |
+
|
| 848 |
+
expected_paths = [
|
| 849 |
+
["ORD", "PHX", "LAX", "IAH", "PHL", "JFK"],
|
| 850 |
+
["JFK", "PHL", "IAH", "ORD", "PHX", "LAX"],
|
| 851 |
+
]
|
| 852 |
+
|
| 853 |
+
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
| 854 |
+
nx.relabel_nodes(G, node_map, copy=False)
|
| 855 |
+
|
| 856 |
+
def fixed_asadpour(G, weight):
|
| 857 |
+
return nx_app.asadpour_atsp(G, weight, 56)
|
| 858 |
+
|
| 859 |
+
path = nx_app.traveling_salesman_problem(
|
| 860 |
+
G, weight="weight", cycle=False, method=fixed_asadpour
|
| 861 |
+
)
|
| 862 |
+
|
| 863 |
+
assert path in expected_paths
|
| 864 |
+
|
| 865 |
+
|
| 866 |
+
def test_asadpour_disconnected_graph():
|
| 867 |
+
"""
|
| 868 |
+
Test that the proper exception is raised when asadpour_atsp is given an
|
| 869 |
+
disconnected graph.
|
| 870 |
+
"""
|
| 871 |
+
|
| 872 |
+
G = nx.complete_graph(4, create_using=nx.DiGraph)
|
| 873 |
+
# have to set edge weights so that if the exception is not raised, the
|
| 874 |
+
# function will complete and we will fail the test
|
| 875 |
+
nx.set_edge_attributes(G, 1, "weight")
|
| 876 |
+
G.add_node(5)
|
| 877 |
+
|
| 878 |
+
pytest.raises(nx.NetworkXError, nx_app.asadpour_atsp, G)
|
| 879 |
+
|
| 880 |
+
|
| 881 |
+
def test_asadpour_incomplete_graph():
|
| 882 |
+
"""
|
| 883 |
+
Test that the proper exception is raised when asadpour_atsp is given an
|
| 884 |
+
incomplete graph
|
| 885 |
+
"""
|
| 886 |
+
|
| 887 |
+
G = nx.complete_graph(4, create_using=nx.DiGraph)
|
| 888 |
+
# have to set edge weights so that if the exception is not raised, the
|
| 889 |
+
# function will complete and we will fail the test
|
| 890 |
+
nx.set_edge_attributes(G, 1, "weight")
|
| 891 |
+
G.remove_edge(0, 1)
|
| 892 |
+
|
| 893 |
+
pytest.raises(nx.NetworkXError, nx_app.asadpour_atsp, G)
|
| 894 |
+
|
| 895 |
+
|
| 896 |
+
def test_asadpour_empty_graph():
|
| 897 |
+
"""
|
| 898 |
+
Test the asadpour_atsp function with an empty graph
|
| 899 |
+
"""
|
| 900 |
+
G = nx.DiGraph()
|
| 901 |
+
|
| 902 |
+
pytest.raises(nx.NetworkXError, nx_app.asadpour_atsp, G)
|
| 903 |
+
|
| 904 |
+
|
| 905 |
+
@pytest.mark.slow
|
| 906 |
+
def test_asadpour_integral_held_karp():
|
| 907 |
+
"""
|
| 908 |
+
This test uses an integral held karp solution and the held karp function
|
| 909 |
+
will return a graph rather than a dict, bypassing most of the asadpour
|
| 910 |
+
algorithm.
|
| 911 |
+
|
| 912 |
+
At first glance, this test probably doesn't look like it ensures that we
|
| 913 |
+
skip the rest of the asadpour algorithm, but it does. We are not fixing a
|
| 914 |
+
see for the random number generator, so if we sample any spanning trees
|
| 915 |
+
the approximation would be different basically every time this test is
|
| 916 |
+
executed but it is not since held karp is deterministic and we do not
|
| 917 |
+
reach the portion of the code with the dependence on random numbers.
|
| 918 |
+
"""
|
| 919 |
+
np = pytest.importorskip("numpy")
|
| 920 |
+
|
| 921 |
+
G_array = np.array(
|
| 922 |
+
[
|
| 923 |
+
[0, 26, 63, 59, 69, 31, 41],
|
| 924 |
+
[62, 0, 91, 53, 75, 87, 47],
|
| 925 |
+
[47, 82, 0, 90, 15, 9, 18],
|
| 926 |
+
[68, 19, 5, 0, 58, 34, 93],
|
| 927 |
+
[11, 58, 53, 55, 0, 61, 79],
|
| 928 |
+
[88, 75, 13, 76, 98, 0, 40],
|
| 929 |
+
[41, 61, 55, 88, 46, 45, 0],
|
| 930 |
+
]
|
| 931 |
+
)
|
| 932 |
+
|
| 933 |
+
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
| 934 |
+
|
| 935 |
+
for _ in range(2):
|
| 936 |
+
tour = nx_app.traveling_salesman_problem(G, method=nx_app.asadpour_atsp)
|
| 937 |
+
|
| 938 |
+
assert [1, 3, 2, 5, 2, 6, 4, 0, 1] == tour
|
| 939 |
+
|
| 940 |
+
|
| 941 |
+
def test_directed_tsp_impossible():
|
| 942 |
+
"""
|
| 943 |
+
Test the asadpour algorithm with a graph without a hamiltonian circuit
|
| 944 |
+
"""
|
| 945 |
+
pytest.importorskip("numpy")
|
| 946 |
+
|
| 947 |
+
# In this graph, once we leave node 0 we cannot return
|
| 948 |
+
edges = [
|
| 949 |
+
(0, 1, 10),
|
| 950 |
+
(0, 2, 11),
|
| 951 |
+
(0, 3, 12),
|
| 952 |
+
(1, 2, 4),
|
| 953 |
+
(1, 3, 6),
|
| 954 |
+
(2, 1, 3),
|
| 955 |
+
(2, 3, 2),
|
| 956 |
+
(3, 1, 5),
|
| 957 |
+
(3, 2, 1),
|
| 958 |
+
]
|
| 959 |
+
|
| 960 |
+
G = nx.DiGraph()
|
| 961 |
+
G.add_weighted_edges_from(edges)
|
| 962 |
+
|
| 963 |
+
pytest.raises(nx.NetworkXError, nx_app.traveling_salesman_problem, G)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/traveling_salesman.py
ADDED
|
@@ -0,0 +1,1442 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
=================================
|
| 3 |
+
Travelling Salesman Problem (TSP)
|
| 4 |
+
=================================
|
| 5 |
+
|
| 6 |
+
Implementation of approximate algorithms
|
| 7 |
+
for solving and approximating the TSP problem.
|
| 8 |
+
|
| 9 |
+
Categories of algorithms which are implemented:
|
| 10 |
+
|
| 11 |
+
- Christofides (provides a 3/2-approximation of TSP)
|
| 12 |
+
- Greedy
|
| 13 |
+
- Simulated Annealing (SA)
|
| 14 |
+
- Threshold Accepting (TA)
|
| 15 |
+
- Asadpour Asymmetric Traveling Salesman Algorithm
|
| 16 |
+
|
| 17 |
+
The Travelling Salesman Problem tries to find, given the weight
|
| 18 |
+
(distance) between all points where a salesman has to visit, the
|
| 19 |
+
route so that:
|
| 20 |
+
|
| 21 |
+
- The total distance (cost) which the salesman travels is minimized.
|
| 22 |
+
- The salesman returns to the starting point.
|
| 23 |
+
- Note that for a complete graph, the salesman visits each point once.
|
| 24 |
+
|
| 25 |
+
The function `travelling_salesman_problem` allows for incomplete
|
| 26 |
+
graphs by finding all-pairs shortest paths, effectively converting
|
| 27 |
+
the problem to a complete graph problem. It calls one of the
|
| 28 |
+
approximate methods on that problem and then converts the result
|
| 29 |
+
back to the original graph using the previously found shortest paths.
|
| 30 |
+
|
| 31 |
+
TSP is an NP-hard problem in combinatorial optimization,
|
| 32 |
+
important in operations research and theoretical computer science.
|
| 33 |
+
|
| 34 |
+
http://en.wikipedia.org/wiki/Travelling_salesman_problem
|
| 35 |
+
"""
|
| 36 |
+
import math
|
| 37 |
+
|
| 38 |
+
import networkx as nx
|
| 39 |
+
from networkx.algorithms.tree.mst import random_spanning_tree
|
| 40 |
+
from networkx.utils import not_implemented_for, pairwise, py_random_state
|
| 41 |
+
|
| 42 |
+
__all__ = [
|
| 43 |
+
"traveling_salesman_problem",
|
| 44 |
+
"christofides",
|
| 45 |
+
"asadpour_atsp",
|
| 46 |
+
"greedy_tsp",
|
| 47 |
+
"simulated_annealing_tsp",
|
| 48 |
+
"threshold_accepting_tsp",
|
| 49 |
+
]
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def swap_two_nodes(soln, seed):
|
| 53 |
+
"""Swap two nodes in `soln` to give a neighbor solution.
|
| 54 |
+
|
| 55 |
+
Parameters
|
| 56 |
+
----------
|
| 57 |
+
soln : list of nodes
|
| 58 |
+
Current cycle of nodes
|
| 59 |
+
|
| 60 |
+
seed : integer, random_state, or None (default)
|
| 61 |
+
Indicator of random number generation state.
|
| 62 |
+
See :ref:`Randomness<randomness>`.
|
| 63 |
+
|
| 64 |
+
Returns
|
| 65 |
+
-------
|
| 66 |
+
list
|
| 67 |
+
The solution after move is applied. (A neighbor solution.)
|
| 68 |
+
|
| 69 |
+
Notes
|
| 70 |
+
-----
|
| 71 |
+
This function assumes that the incoming list `soln` is a cycle
|
| 72 |
+
(that the first and last element are the same) and also that
|
| 73 |
+
we don't want any move to change the first node in the list
|
| 74 |
+
(and thus not the last node either).
|
| 75 |
+
|
| 76 |
+
The input list is changed as well as returned. Make a copy if needed.
|
| 77 |
+
|
| 78 |
+
See Also
|
| 79 |
+
--------
|
| 80 |
+
move_one_node
|
| 81 |
+
"""
|
| 82 |
+
a, b = seed.sample(range(1, len(soln) - 1), k=2)
|
| 83 |
+
soln[a], soln[b] = soln[b], soln[a]
|
| 84 |
+
return soln
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def move_one_node(soln, seed):
|
| 88 |
+
"""Move one node to another position to give a neighbor solution.
|
| 89 |
+
|
| 90 |
+
The node to move and the position to move to are chosen randomly.
|
| 91 |
+
The first and last nodes are left untouched as soln must be a cycle
|
| 92 |
+
starting at that node.
|
| 93 |
+
|
| 94 |
+
Parameters
|
| 95 |
+
----------
|
| 96 |
+
soln : list of nodes
|
| 97 |
+
Current cycle of nodes
|
| 98 |
+
|
| 99 |
+
seed : integer, random_state, or None (default)
|
| 100 |
+
Indicator of random number generation state.
|
| 101 |
+
See :ref:`Randomness<randomness>`.
|
| 102 |
+
|
| 103 |
+
Returns
|
| 104 |
+
-------
|
| 105 |
+
list
|
| 106 |
+
The solution after move is applied. (A neighbor solution.)
|
| 107 |
+
|
| 108 |
+
Notes
|
| 109 |
+
-----
|
| 110 |
+
This function assumes that the incoming list `soln` is a cycle
|
| 111 |
+
(that the first and last element are the same) and also that
|
| 112 |
+
we don't want any move to change the first node in the list
|
| 113 |
+
(and thus not the last node either).
|
| 114 |
+
|
| 115 |
+
The input list is changed as well as returned. Make a copy if needed.
|
| 116 |
+
|
| 117 |
+
See Also
|
| 118 |
+
--------
|
| 119 |
+
swap_two_nodes
|
| 120 |
+
"""
|
| 121 |
+
a, b = seed.sample(range(1, len(soln) - 1), k=2)
|
| 122 |
+
soln.insert(b, soln.pop(a))
|
| 123 |
+
return soln
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
@not_implemented_for("directed")
|
| 127 |
+
@nx._dispatch(edge_attrs="weight")
|
| 128 |
+
def christofides(G, weight="weight", tree=None):
|
| 129 |
+
"""Approximate a solution of the traveling salesman problem
|
| 130 |
+
|
| 131 |
+
Compute a 3/2-approximation of the traveling salesman problem
|
| 132 |
+
in a complete undirected graph using Christofides [1]_ algorithm.
|
| 133 |
+
|
| 134 |
+
Parameters
|
| 135 |
+
----------
|
| 136 |
+
G : Graph
|
| 137 |
+
`G` should be a complete weighted undirected graph.
|
| 138 |
+
The distance between all pairs of nodes should be included.
|
| 139 |
+
|
| 140 |
+
weight : string, optional (default="weight")
|
| 141 |
+
Edge data key corresponding to the edge weight.
|
| 142 |
+
If any edge does not have this attribute the weight is set to 1.
|
| 143 |
+
|
| 144 |
+
tree : NetworkX graph or None (default: None)
|
| 145 |
+
A minimum spanning tree of G. Or, if None, the minimum spanning
|
| 146 |
+
tree is computed using :func:`networkx.minimum_spanning_tree`
|
| 147 |
+
|
| 148 |
+
Returns
|
| 149 |
+
-------
|
| 150 |
+
list
|
| 151 |
+
List of nodes in `G` along a cycle with a 3/2-approximation of
|
| 152 |
+
the minimal Hamiltonian cycle.
|
| 153 |
+
|
| 154 |
+
References
|
| 155 |
+
----------
|
| 156 |
+
.. [1] Christofides, Nicos. "Worst-case analysis of a new heuristic for
|
| 157 |
+
the travelling salesman problem." No. RR-388. Carnegie-Mellon Univ
|
| 158 |
+
Pittsburgh Pa Management Sciences Research Group, 1976.
|
| 159 |
+
"""
|
| 160 |
+
# Remove selfloops if necessary
|
| 161 |
+
loop_nodes = nx.nodes_with_selfloops(G)
|
| 162 |
+
try:
|
| 163 |
+
node = next(loop_nodes)
|
| 164 |
+
except StopIteration:
|
| 165 |
+
pass
|
| 166 |
+
else:
|
| 167 |
+
G = G.copy()
|
| 168 |
+
G.remove_edge(node, node)
|
| 169 |
+
G.remove_edges_from((n, n) for n in loop_nodes)
|
| 170 |
+
# Check that G is a complete graph
|
| 171 |
+
N = len(G) - 1
|
| 172 |
+
# This check ignores selfloops which is what we want here.
|
| 173 |
+
if any(len(nbrdict) != N for n, nbrdict in G.adj.items()):
|
| 174 |
+
raise nx.NetworkXError("G must be a complete graph.")
|
| 175 |
+
|
| 176 |
+
if tree is None:
|
| 177 |
+
tree = nx.minimum_spanning_tree(G, weight=weight)
|
| 178 |
+
L = G.copy()
|
| 179 |
+
L.remove_nodes_from([v for v, degree in tree.degree if not (degree % 2)])
|
| 180 |
+
MG = nx.MultiGraph()
|
| 181 |
+
MG.add_edges_from(tree.edges)
|
| 182 |
+
edges = nx.min_weight_matching(L, weight=weight)
|
| 183 |
+
MG.add_edges_from(edges)
|
| 184 |
+
return _shortcutting(nx.eulerian_circuit(MG))
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
def _shortcutting(circuit):
|
| 188 |
+
"""Remove duplicate nodes in the path"""
|
| 189 |
+
nodes = []
|
| 190 |
+
for u, v in circuit:
|
| 191 |
+
if v in nodes:
|
| 192 |
+
continue
|
| 193 |
+
if not nodes:
|
| 194 |
+
nodes.append(u)
|
| 195 |
+
nodes.append(v)
|
| 196 |
+
nodes.append(nodes[0])
|
| 197 |
+
return nodes
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
@nx._dispatch(edge_attrs="weight")
|
| 201 |
+
def traveling_salesman_problem(G, weight="weight", nodes=None, cycle=True, method=None):
|
| 202 |
+
"""Find the shortest path in `G` connecting specified nodes
|
| 203 |
+
|
| 204 |
+
This function allows approximate solution to the traveling salesman
|
| 205 |
+
problem on networks that are not complete graphs and/or where the
|
| 206 |
+
salesman does not need to visit all nodes.
|
| 207 |
+
|
| 208 |
+
This function proceeds in two steps. First, it creates a complete
|
| 209 |
+
graph using the all-pairs shortest_paths between nodes in `nodes`.
|
| 210 |
+
Edge weights in the new graph are the lengths of the paths
|
| 211 |
+
between each pair of nodes in the original graph.
|
| 212 |
+
Second, an algorithm (default: `christofides` for undirected and
|
| 213 |
+
`asadpour_atsp` for directed) is used to approximate the minimal Hamiltonian
|
| 214 |
+
cycle on this new graph. The available algorithms are:
|
| 215 |
+
|
| 216 |
+
- christofides
|
| 217 |
+
- greedy_tsp
|
| 218 |
+
- simulated_annealing_tsp
|
| 219 |
+
- threshold_accepting_tsp
|
| 220 |
+
- asadpour_atsp
|
| 221 |
+
|
| 222 |
+
Once the Hamiltonian Cycle is found, this function post-processes to
|
| 223 |
+
accommodate the structure of the original graph. If `cycle` is ``False``,
|
| 224 |
+
the biggest weight edge is removed to make a Hamiltonian path.
|
| 225 |
+
Then each edge on the new complete graph used for that analysis is
|
| 226 |
+
replaced by the shortest_path between those nodes on the original graph.
|
| 227 |
+
|
| 228 |
+
Parameters
|
| 229 |
+
----------
|
| 230 |
+
G : NetworkX graph
|
| 231 |
+
A possibly weighted graph
|
| 232 |
+
|
| 233 |
+
nodes : collection of nodes (default=G.nodes)
|
| 234 |
+
collection (list, set, etc.) of nodes to visit
|
| 235 |
+
|
| 236 |
+
weight : string, optional (default="weight")
|
| 237 |
+
Edge data key corresponding to the edge weight.
|
| 238 |
+
If any edge does not have this attribute the weight is set to 1.
|
| 239 |
+
|
| 240 |
+
cycle : bool (default: True)
|
| 241 |
+
Indicates whether a cycle should be returned, or a path.
|
| 242 |
+
Note: the cycle is the approximate minimal cycle.
|
| 243 |
+
The path simply removes the biggest edge in that cycle.
|
| 244 |
+
|
| 245 |
+
method : function (default: None)
|
| 246 |
+
A function that returns a cycle on all nodes and approximates
|
| 247 |
+
the solution to the traveling salesman problem on a complete
|
| 248 |
+
graph. The returned cycle is then used to find a corresponding
|
| 249 |
+
solution on `G`. `method` should be callable; take inputs
|
| 250 |
+
`G`, and `weight`; and return a list of nodes along the cycle.
|
| 251 |
+
|
| 252 |
+
Provided options include :func:`christofides`, :func:`greedy_tsp`,
|
| 253 |
+
:func:`simulated_annealing_tsp` and :func:`threshold_accepting_tsp`.
|
| 254 |
+
|
| 255 |
+
If `method is None`: use :func:`christofides` for undirected `G` and
|
| 256 |
+
:func:`threshold_accepting_tsp` for directed `G`.
|
| 257 |
+
|
| 258 |
+
To specify parameters for these provided functions, construct lambda
|
| 259 |
+
functions that state the specific value. `method` must have 2 inputs.
|
| 260 |
+
(See examples).
|
| 261 |
+
|
| 262 |
+
Returns
|
| 263 |
+
-------
|
| 264 |
+
list
|
| 265 |
+
List of nodes in `G` along a path with an approximation of the minimal
|
| 266 |
+
path through `nodes`.
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
Raises
|
| 270 |
+
------
|
| 271 |
+
NetworkXError
|
| 272 |
+
If `G` is a directed graph it has to be strongly connected or the
|
| 273 |
+
complete version cannot be generated.
|
| 274 |
+
|
| 275 |
+
Examples
|
| 276 |
+
--------
|
| 277 |
+
>>> tsp = nx.approximation.traveling_salesman_problem
|
| 278 |
+
>>> G = nx.cycle_graph(9)
|
| 279 |
+
>>> G[4][5]["weight"] = 5 # all other weights are 1
|
| 280 |
+
>>> tsp(G, nodes=[3, 6])
|
| 281 |
+
[3, 2, 1, 0, 8, 7, 6, 7, 8, 0, 1, 2, 3]
|
| 282 |
+
>>> path = tsp(G, cycle=False)
|
| 283 |
+
>>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
|
| 284 |
+
True
|
| 285 |
+
|
| 286 |
+
Build (curry) your own function to provide parameter values to the methods.
|
| 287 |
+
|
| 288 |
+
>>> SA_tsp = nx.approximation.simulated_annealing_tsp
|
| 289 |
+
>>> method = lambda G, wt: SA_tsp(G, "greedy", weight=wt, temp=500)
|
| 290 |
+
>>> path = tsp(G, cycle=False, method=method)
|
| 291 |
+
>>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
|
| 292 |
+
True
|
| 293 |
+
|
| 294 |
+
"""
|
| 295 |
+
if method is None:
|
| 296 |
+
if G.is_directed():
|
| 297 |
+
method = asadpour_atsp
|
| 298 |
+
else:
|
| 299 |
+
method = christofides
|
| 300 |
+
if nodes is None:
|
| 301 |
+
nodes = list(G.nodes)
|
| 302 |
+
|
| 303 |
+
dist = {}
|
| 304 |
+
path = {}
|
| 305 |
+
for n, (d, p) in nx.all_pairs_dijkstra(G, weight=weight):
|
| 306 |
+
dist[n] = d
|
| 307 |
+
path[n] = p
|
| 308 |
+
|
| 309 |
+
if G.is_directed():
|
| 310 |
+
# If the graph is not strongly connected, raise an exception
|
| 311 |
+
if not nx.is_strongly_connected(G):
|
| 312 |
+
raise nx.NetworkXError("G is not strongly connected")
|
| 313 |
+
GG = nx.DiGraph()
|
| 314 |
+
else:
|
| 315 |
+
GG = nx.Graph()
|
| 316 |
+
for u in nodes:
|
| 317 |
+
for v in nodes:
|
| 318 |
+
if u == v:
|
| 319 |
+
continue
|
| 320 |
+
GG.add_edge(u, v, weight=dist[u][v])
|
| 321 |
+
best_GG = method(GG, weight)
|
| 322 |
+
|
| 323 |
+
if not cycle:
|
| 324 |
+
# find and remove the biggest edge
|
| 325 |
+
(u, v) = max(pairwise(best_GG), key=lambda x: dist[x[0]][x[1]])
|
| 326 |
+
pos = best_GG.index(u) + 1
|
| 327 |
+
while best_GG[pos] != v:
|
| 328 |
+
pos = best_GG[pos:].index(u) + 1
|
| 329 |
+
best_GG = best_GG[pos:-1] + best_GG[:pos]
|
| 330 |
+
|
| 331 |
+
best_path = []
|
| 332 |
+
for u, v in pairwise(best_GG):
|
| 333 |
+
best_path.extend(path[u][v][:-1])
|
| 334 |
+
best_path.append(v)
|
| 335 |
+
return best_path
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
@not_implemented_for("undirected")
|
| 339 |
+
@py_random_state(2)
|
| 340 |
+
@nx._dispatch(edge_attrs="weight")
|
| 341 |
+
def asadpour_atsp(G, weight="weight", seed=None, source=None):
|
| 342 |
+
"""
|
| 343 |
+
Returns an approximate solution to the traveling salesman problem.
|
| 344 |
+
|
| 345 |
+
This approximate solution is one of the best known approximations for the
|
| 346 |
+
asymmetric traveling salesman problem developed by Asadpour et al,
|
| 347 |
+
[1]_. The algorithm first solves the Held-Karp relaxation to find a lower
|
| 348 |
+
bound for the weight of the cycle. Next, it constructs an exponential
|
| 349 |
+
distribution of undirected spanning trees where the probability of an
|
| 350 |
+
edge being in the tree corresponds to the weight of that edge using a
|
| 351 |
+
maximum entropy rounding scheme. Next we sample that distribution
|
| 352 |
+
$2 \\lceil \\ln n \\rceil$ times and save the minimum sampled tree once the
|
| 353 |
+
direction of the arcs is added back to the edges. Finally, we augment
|
| 354 |
+
then short circuit that graph to find the approximate tour for the
|
| 355 |
+
salesman.
|
| 356 |
+
|
| 357 |
+
Parameters
|
| 358 |
+
----------
|
| 359 |
+
G : nx.DiGraph
|
| 360 |
+
The graph should be a complete weighted directed graph. The
|
| 361 |
+
distance between all paris of nodes should be included and the triangle
|
| 362 |
+
inequality should hold. That is, the direct edge between any two nodes
|
| 363 |
+
should be the path of least cost.
|
| 364 |
+
|
| 365 |
+
weight : string, optional (default="weight")
|
| 366 |
+
Edge data key corresponding to the edge weight.
|
| 367 |
+
If any edge does not have this attribute the weight is set to 1.
|
| 368 |
+
|
| 369 |
+
seed : integer, random_state, or None (default)
|
| 370 |
+
Indicator of random number generation state.
|
| 371 |
+
See :ref:`Randomness<randomness>`.
|
| 372 |
+
|
| 373 |
+
source : node label (default=`None`)
|
| 374 |
+
If given, return the cycle starting and ending at the given node.
|
| 375 |
+
|
| 376 |
+
Returns
|
| 377 |
+
-------
|
| 378 |
+
cycle : list of nodes
|
| 379 |
+
Returns the cycle (list of nodes) that a salesman can follow to minimize
|
| 380 |
+
the total weight of the trip.
|
| 381 |
+
|
| 382 |
+
Raises
|
| 383 |
+
------
|
| 384 |
+
NetworkXError
|
| 385 |
+
If `G` is not complete or has less than two nodes, the algorithm raises
|
| 386 |
+
an exception.
|
| 387 |
+
|
| 388 |
+
NetworkXError
|
| 389 |
+
If `source` is not `None` and is not a node in `G`, the algorithm raises
|
| 390 |
+
an exception.
|
| 391 |
+
|
| 392 |
+
NetworkXNotImplemented
|
| 393 |
+
If `G` is an undirected graph.
|
| 394 |
+
|
| 395 |
+
References
|
| 396 |
+
----------
|
| 397 |
+
.. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi,
|
| 398 |
+
An o(log n/log log n)-approximation algorithm for the asymmetric
|
| 399 |
+
traveling salesman problem, Operations research, 65 (2017),
|
| 400 |
+
pp. 1043–1061
|
| 401 |
+
|
| 402 |
+
Examples
|
| 403 |
+
--------
|
| 404 |
+
>>> import networkx as nx
|
| 405 |
+
>>> import networkx.algorithms.approximation as approx
|
| 406 |
+
>>> G = nx.complete_graph(3, create_using=nx.DiGraph)
|
| 407 |
+
>>> nx.set_edge_attributes(G, {(0, 1): 2, (1, 2): 2, (2, 0): 2, (0, 2): 1, (2, 1): 1, (1, 0): 1}, "weight")
|
| 408 |
+
>>> tour = approx.asadpour_atsp(G,source=0)
|
| 409 |
+
>>> tour
|
| 410 |
+
[0, 2, 1, 0]
|
| 411 |
+
"""
|
| 412 |
+
from math import ceil, exp
|
| 413 |
+
from math import log as ln
|
| 414 |
+
|
| 415 |
+
# Check that G is a complete graph
|
| 416 |
+
N = len(G) - 1
|
| 417 |
+
if N < 2:
|
| 418 |
+
raise nx.NetworkXError("G must have at least two nodes")
|
| 419 |
+
# This check ignores selfloops which is what we want here.
|
| 420 |
+
if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
|
| 421 |
+
raise nx.NetworkXError("G is not a complete DiGraph")
|
| 422 |
+
# Check that the source vertex, if given, is in the graph
|
| 423 |
+
if source is not None and source not in G.nodes:
|
| 424 |
+
raise nx.NetworkXError("Given source node not in G.")
|
| 425 |
+
|
| 426 |
+
opt_hk, z_star = held_karp_ascent(G, weight)
|
| 427 |
+
|
| 428 |
+
# Test to see if the ascent method found an integer solution or a fractional
|
| 429 |
+
# solution. If it is integral then z_star is a nx.Graph, otherwise it is
|
| 430 |
+
# a dict
|
| 431 |
+
if not isinstance(z_star, dict):
|
| 432 |
+
# Here we are using the shortcutting method to go from the list of edges
|
| 433 |
+
# returned from eulerian_circuit to a list of nodes
|
| 434 |
+
return _shortcutting(nx.eulerian_circuit(z_star, source=source))
|
| 435 |
+
|
| 436 |
+
# Create the undirected support of z_star
|
| 437 |
+
z_support = nx.MultiGraph()
|
| 438 |
+
for u, v in z_star:
|
| 439 |
+
if (u, v) not in z_support.edges:
|
| 440 |
+
edge_weight = min(G[u][v][weight], G[v][u][weight])
|
| 441 |
+
z_support.add_edge(u, v, **{weight: edge_weight})
|
| 442 |
+
|
| 443 |
+
# Create the exponential distribution of spanning trees
|
| 444 |
+
gamma = spanning_tree_distribution(z_support, z_star)
|
| 445 |
+
|
| 446 |
+
# Write the lambda values to the edges of z_support
|
| 447 |
+
z_support = nx.Graph(z_support)
|
| 448 |
+
lambda_dict = {(u, v): exp(gamma[(u, v)]) for u, v in z_support.edges()}
|
| 449 |
+
nx.set_edge_attributes(z_support, lambda_dict, "weight")
|
| 450 |
+
del gamma, lambda_dict
|
| 451 |
+
|
| 452 |
+
# Sample 2 * ceil( ln(n) ) spanning trees and record the minimum one
|
| 453 |
+
minimum_sampled_tree = None
|
| 454 |
+
minimum_sampled_tree_weight = math.inf
|
| 455 |
+
for _ in range(2 * ceil(ln(G.number_of_nodes()))):
|
| 456 |
+
sampled_tree = random_spanning_tree(z_support, "weight", seed=seed)
|
| 457 |
+
sampled_tree_weight = sampled_tree.size(weight)
|
| 458 |
+
if sampled_tree_weight < minimum_sampled_tree_weight:
|
| 459 |
+
minimum_sampled_tree = sampled_tree.copy()
|
| 460 |
+
minimum_sampled_tree_weight = sampled_tree_weight
|
| 461 |
+
|
| 462 |
+
# Orient the edges in that tree to keep the cost of the tree the same.
|
| 463 |
+
t_star = nx.MultiDiGraph()
|
| 464 |
+
for u, v, d in minimum_sampled_tree.edges(data=weight):
|
| 465 |
+
if d == G[u][v][weight]:
|
| 466 |
+
t_star.add_edge(u, v, **{weight: d})
|
| 467 |
+
else:
|
| 468 |
+
t_star.add_edge(v, u, **{weight: d})
|
| 469 |
+
|
| 470 |
+
# Find the node demands needed to neutralize the flow of t_star in G
|
| 471 |
+
node_demands = {n: t_star.out_degree(n) - t_star.in_degree(n) for n in t_star}
|
| 472 |
+
nx.set_node_attributes(G, node_demands, "demand")
|
| 473 |
+
|
| 474 |
+
# Find the min_cost_flow
|
| 475 |
+
flow_dict = nx.min_cost_flow(G, "demand")
|
| 476 |
+
|
| 477 |
+
# Build the flow into t_star
|
| 478 |
+
for source, values in flow_dict.items():
|
| 479 |
+
for target in values:
|
| 480 |
+
if (source, target) not in t_star.edges and values[target] > 0:
|
| 481 |
+
# IF values[target] > 0 we have to add that many edges
|
| 482 |
+
for _ in range(values[target]):
|
| 483 |
+
t_star.add_edge(source, target)
|
| 484 |
+
|
| 485 |
+
# Return the shortcut eulerian circuit
|
| 486 |
+
circuit = nx.eulerian_circuit(t_star, source=source)
|
| 487 |
+
return _shortcutting(circuit)
|
| 488 |
+
|
| 489 |
+
|
| 490 |
+
@nx._dispatch(edge_attrs="weight")
|
| 491 |
+
def held_karp_ascent(G, weight="weight"):
|
| 492 |
+
"""
|
| 493 |
+
Minimizes the Held-Karp relaxation of the TSP for `G`
|
| 494 |
+
|
| 495 |
+
Solves the Held-Karp relaxation of the input complete digraph and scales
|
| 496 |
+
the output solution for use in the Asadpour [1]_ ASTP algorithm.
|
| 497 |
+
|
| 498 |
+
The Held-Karp relaxation defines the lower bound for solutions to the
|
| 499 |
+
ATSP, although it does return a fractional solution. This is used in the
|
| 500 |
+
Asadpour algorithm as an initial solution which is later rounded to a
|
| 501 |
+
integral tree within the spanning tree polytopes. This function solves
|
| 502 |
+
the relaxation with the branch and bound method in [2]_.
|
| 503 |
+
|
| 504 |
+
Parameters
|
| 505 |
+
----------
|
| 506 |
+
G : nx.DiGraph
|
| 507 |
+
The graph should be a complete weighted directed graph.
|
| 508 |
+
The distance between all paris of nodes should be included.
|
| 509 |
+
|
| 510 |
+
weight : string, optional (default="weight")
|
| 511 |
+
Edge data key corresponding to the edge weight.
|
| 512 |
+
If any edge does not have this attribute the weight is set to 1.
|
| 513 |
+
|
| 514 |
+
Returns
|
| 515 |
+
-------
|
| 516 |
+
OPT : float
|
| 517 |
+
The cost for the optimal solution to the Held-Karp relaxation
|
| 518 |
+
z : dict or nx.Graph
|
| 519 |
+
A symmetrized and scaled version of the optimal solution to the
|
| 520 |
+
Held-Karp relaxation for use in the Asadpour algorithm.
|
| 521 |
+
|
| 522 |
+
If an integral solution is found, then that is an optimal solution for
|
| 523 |
+
the ATSP problem and that is returned instead.
|
| 524 |
+
|
| 525 |
+
References
|
| 526 |
+
----------
|
| 527 |
+
.. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi,
|
| 528 |
+
An o(log n/log log n)-approximation algorithm for the asymmetric
|
| 529 |
+
traveling salesman problem, Operations research, 65 (2017),
|
| 530 |
+
pp. 1043–1061
|
| 531 |
+
|
| 532 |
+
.. [2] M. Held, R. M. Karp, The traveling-salesman problem and minimum
|
| 533 |
+
spanning trees, Operations Research, 1970-11-01, Vol. 18 (6),
|
| 534 |
+
pp.1138-1162
|
| 535 |
+
"""
|
| 536 |
+
import numpy as np
|
| 537 |
+
from scipy import optimize
|
| 538 |
+
|
| 539 |
+
def k_pi():
|
| 540 |
+
"""
|
| 541 |
+
Find the set of minimum 1-Arborescences for G at point pi.
|
| 542 |
+
|
| 543 |
+
Returns
|
| 544 |
+
-------
|
| 545 |
+
Set
|
| 546 |
+
The set of minimum 1-Arborescences
|
| 547 |
+
"""
|
| 548 |
+
# Create a copy of G without vertex 1.
|
| 549 |
+
G_1 = G.copy()
|
| 550 |
+
minimum_1_arborescences = set()
|
| 551 |
+
minimum_1_arborescence_weight = math.inf
|
| 552 |
+
|
| 553 |
+
# node is node '1' in the Held and Karp paper
|
| 554 |
+
n = next(G.__iter__())
|
| 555 |
+
G_1.remove_node(n)
|
| 556 |
+
|
| 557 |
+
# Iterate over the spanning arborescences of the graph until we know
|
| 558 |
+
# that we have found the minimum 1-arborescences. My proposed strategy
|
| 559 |
+
# is to find the most extensive root to connect to from 'node 1' and
|
| 560 |
+
# the least expensive one. We then iterate over arborescences until
|
| 561 |
+
# the cost of the basic arborescence is the cost of the minimum one
|
| 562 |
+
# plus the difference between the most and least expensive roots,
|
| 563 |
+
# that way the cost of connecting 'node 1' will by definition not by
|
| 564 |
+
# minimum
|
| 565 |
+
min_root = {"node": None, weight: math.inf}
|
| 566 |
+
max_root = {"node": None, weight: -math.inf}
|
| 567 |
+
for u, v, d in G.edges(n, data=True):
|
| 568 |
+
if d[weight] < min_root[weight]:
|
| 569 |
+
min_root = {"node": v, weight: d[weight]}
|
| 570 |
+
if d[weight] > max_root[weight]:
|
| 571 |
+
max_root = {"node": v, weight: d[weight]}
|
| 572 |
+
|
| 573 |
+
min_in_edge = min(G.in_edges(n, data=True), key=lambda x: x[2][weight])
|
| 574 |
+
min_root[weight] = min_root[weight] + min_in_edge[2][weight]
|
| 575 |
+
max_root[weight] = max_root[weight] + min_in_edge[2][weight]
|
| 576 |
+
|
| 577 |
+
min_arb_weight = math.inf
|
| 578 |
+
for arb in nx.ArborescenceIterator(G_1):
|
| 579 |
+
arb_weight = arb.size(weight)
|
| 580 |
+
if min_arb_weight == math.inf:
|
| 581 |
+
min_arb_weight = arb_weight
|
| 582 |
+
elif arb_weight > min_arb_weight + max_root[weight] - min_root[weight]:
|
| 583 |
+
break
|
| 584 |
+
# We have to pick the root node of the arborescence for the out
|
| 585 |
+
# edge of the first vertex as that is the only node without an
|
| 586 |
+
# edge directed into it.
|
| 587 |
+
for N, deg in arb.in_degree:
|
| 588 |
+
if deg == 0:
|
| 589 |
+
# root found
|
| 590 |
+
arb.add_edge(n, N, **{weight: G[n][N][weight]})
|
| 591 |
+
arb_weight += G[n][N][weight]
|
| 592 |
+
break
|
| 593 |
+
|
| 594 |
+
# We can pick the minimum weight in-edge for the vertex with
|
| 595 |
+
# a cycle. If there are multiple edges with the same, minimum
|
| 596 |
+
# weight, We need to add all of them.
|
| 597 |
+
#
|
| 598 |
+
# Delete the edge (N, v) so that we cannot pick it.
|
| 599 |
+
edge_data = G[N][n]
|
| 600 |
+
G.remove_edge(N, n)
|
| 601 |
+
min_weight = min(G.in_edges(n, data=weight), key=lambda x: x[2])[2]
|
| 602 |
+
min_edges = [
|
| 603 |
+
(u, v, d) for u, v, d in G.in_edges(n, data=weight) if d == min_weight
|
| 604 |
+
]
|
| 605 |
+
for u, v, d in min_edges:
|
| 606 |
+
new_arb = arb.copy()
|
| 607 |
+
new_arb.add_edge(u, v, **{weight: d})
|
| 608 |
+
new_arb_weight = arb_weight + d
|
| 609 |
+
# Check to see the weight of the arborescence, if it is a
|
| 610 |
+
# new minimum, clear all of the old potential minimum
|
| 611 |
+
# 1-arborescences and add this is the only one. If its
|
| 612 |
+
# weight is above the known minimum, do not add it.
|
| 613 |
+
if new_arb_weight < minimum_1_arborescence_weight:
|
| 614 |
+
minimum_1_arborescences.clear()
|
| 615 |
+
minimum_1_arborescence_weight = new_arb_weight
|
| 616 |
+
# We have a 1-arborescence, add it to the set
|
| 617 |
+
if new_arb_weight == minimum_1_arborescence_weight:
|
| 618 |
+
minimum_1_arborescences.add(new_arb)
|
| 619 |
+
G.add_edge(N, n, **edge_data)
|
| 620 |
+
|
| 621 |
+
return minimum_1_arborescences
|
| 622 |
+
|
| 623 |
+
def direction_of_ascent():
|
| 624 |
+
"""
|
| 625 |
+
Find the direction of ascent at point pi.
|
| 626 |
+
|
| 627 |
+
See [1]_ for more information.
|
| 628 |
+
|
| 629 |
+
Returns
|
| 630 |
+
-------
|
| 631 |
+
dict
|
| 632 |
+
A mapping from the nodes of the graph which represents the direction
|
| 633 |
+
of ascent.
|
| 634 |
+
|
| 635 |
+
References
|
| 636 |
+
----------
|
| 637 |
+
.. [1] M. Held, R. M. Karp, The traveling-salesman problem and minimum
|
| 638 |
+
spanning trees, Operations Research, 1970-11-01, Vol. 18 (6),
|
| 639 |
+
pp.1138-1162
|
| 640 |
+
"""
|
| 641 |
+
# 1. Set d equal to the zero n-vector.
|
| 642 |
+
d = {}
|
| 643 |
+
for n in G:
|
| 644 |
+
d[n] = 0
|
| 645 |
+
del n
|
| 646 |
+
# 2. Find a 1-Arborescence T^k such that k is in K(pi, d).
|
| 647 |
+
minimum_1_arborescences = k_pi()
|
| 648 |
+
while True:
|
| 649 |
+
# Reduce K(pi) to K(pi, d)
|
| 650 |
+
# Find the arborescence in K(pi) which increases the lest in
|
| 651 |
+
# direction d
|
| 652 |
+
min_k_d_weight = math.inf
|
| 653 |
+
min_k_d = None
|
| 654 |
+
for arborescence in minimum_1_arborescences:
|
| 655 |
+
weighted_cost = 0
|
| 656 |
+
for n, deg in arborescence.degree:
|
| 657 |
+
weighted_cost += d[n] * (deg - 2)
|
| 658 |
+
if weighted_cost < min_k_d_weight:
|
| 659 |
+
min_k_d_weight = weighted_cost
|
| 660 |
+
min_k_d = arborescence
|
| 661 |
+
|
| 662 |
+
# 3. If sum of d_i * v_{i, k} is greater than zero, terminate
|
| 663 |
+
if min_k_d_weight > 0:
|
| 664 |
+
return d, min_k_d
|
| 665 |
+
# 4. d_i = d_i + v_{i, k}
|
| 666 |
+
for n, deg in min_k_d.degree:
|
| 667 |
+
d[n] += deg - 2
|
| 668 |
+
# Check that we do not need to terminate because the direction
|
| 669 |
+
# of ascent does not exist. This is done with linear
|
| 670 |
+
# programming.
|
| 671 |
+
c = np.full(len(minimum_1_arborescences), -1, dtype=int)
|
| 672 |
+
a_eq = np.empty((len(G) + 1, len(minimum_1_arborescences)), dtype=int)
|
| 673 |
+
b_eq = np.zeros(len(G) + 1, dtype=int)
|
| 674 |
+
b_eq[len(G)] = 1
|
| 675 |
+
for arb_count, arborescence in enumerate(minimum_1_arborescences):
|
| 676 |
+
n_count = len(G) - 1
|
| 677 |
+
for n, deg in arborescence.degree:
|
| 678 |
+
a_eq[n_count][arb_count] = deg - 2
|
| 679 |
+
n_count -= 1
|
| 680 |
+
a_eq[len(G)][arb_count] = 1
|
| 681 |
+
program_result = optimize.linprog(c, A_eq=a_eq, b_eq=b_eq)
|
| 682 |
+
# If the constants exist, then the direction of ascent doesn't
|
| 683 |
+
if program_result.success:
|
| 684 |
+
# There is no direction of ascent
|
| 685 |
+
return None, minimum_1_arborescences
|
| 686 |
+
|
| 687 |
+
# 5. GO TO 2
|
| 688 |
+
|
| 689 |
+
def find_epsilon(k, d):
|
| 690 |
+
"""
|
| 691 |
+
Given the direction of ascent at pi, find the maximum distance we can go
|
| 692 |
+
in that direction.
|
| 693 |
+
|
| 694 |
+
Parameters
|
| 695 |
+
----------
|
| 696 |
+
k_xy : set
|
| 697 |
+
The set of 1-arborescences which have the minimum rate of increase
|
| 698 |
+
in the direction of ascent
|
| 699 |
+
|
| 700 |
+
d : dict
|
| 701 |
+
The direction of ascent
|
| 702 |
+
|
| 703 |
+
Returns
|
| 704 |
+
-------
|
| 705 |
+
float
|
| 706 |
+
The distance we can travel in direction `d`
|
| 707 |
+
"""
|
| 708 |
+
min_epsilon = math.inf
|
| 709 |
+
for e_u, e_v, e_w in G.edges(data=weight):
|
| 710 |
+
if (e_u, e_v) in k.edges:
|
| 711 |
+
continue
|
| 712 |
+
# Now, I have found a condition which MUST be true for the edges to
|
| 713 |
+
# be a valid substitute. The edge in the graph which is the
|
| 714 |
+
# substitute is the one with the same terminated end. This can be
|
| 715 |
+
# checked rather simply.
|
| 716 |
+
#
|
| 717 |
+
# Find the edge within k which is the substitute. Because k is a
|
| 718 |
+
# 1-arborescence, we know that they is only one such edges
|
| 719 |
+
# leading into every vertex.
|
| 720 |
+
if len(k.in_edges(e_v, data=weight)) > 1:
|
| 721 |
+
raise Exception
|
| 722 |
+
sub_u, sub_v, sub_w = next(k.in_edges(e_v, data=weight).__iter__())
|
| 723 |
+
k.add_edge(e_u, e_v, **{weight: e_w})
|
| 724 |
+
k.remove_edge(sub_u, sub_v)
|
| 725 |
+
if (
|
| 726 |
+
max(d for n, d in k.in_degree()) <= 1
|
| 727 |
+
and len(G) == k.number_of_edges()
|
| 728 |
+
and nx.is_weakly_connected(k)
|
| 729 |
+
):
|
| 730 |
+
# Ascent method calculation
|
| 731 |
+
if d[sub_u] == d[e_u] or sub_w == e_w:
|
| 732 |
+
# Revert to the original graph
|
| 733 |
+
k.remove_edge(e_u, e_v)
|
| 734 |
+
k.add_edge(sub_u, sub_v, **{weight: sub_w})
|
| 735 |
+
continue
|
| 736 |
+
epsilon = (sub_w - e_w) / (d[e_u] - d[sub_u])
|
| 737 |
+
if 0 < epsilon < min_epsilon:
|
| 738 |
+
min_epsilon = epsilon
|
| 739 |
+
# Revert to the original graph
|
| 740 |
+
k.remove_edge(e_u, e_v)
|
| 741 |
+
k.add_edge(sub_u, sub_v, **{weight: sub_w})
|
| 742 |
+
|
| 743 |
+
return min_epsilon
|
| 744 |
+
|
| 745 |
+
# I have to know that the elements in pi correspond to the correct elements
|
| 746 |
+
# in the direction of ascent, even if the node labels are not integers.
|
| 747 |
+
# Thus, I will use dictionaries to made that mapping.
|
| 748 |
+
pi_dict = {}
|
| 749 |
+
for n in G:
|
| 750 |
+
pi_dict[n] = 0
|
| 751 |
+
del n
|
| 752 |
+
original_edge_weights = {}
|
| 753 |
+
for u, v, d in G.edges(data=True):
|
| 754 |
+
original_edge_weights[(u, v)] = d[weight]
|
| 755 |
+
dir_ascent, k_d = direction_of_ascent()
|
| 756 |
+
while dir_ascent is not None:
|
| 757 |
+
max_distance = find_epsilon(k_d, dir_ascent)
|
| 758 |
+
for n, v in dir_ascent.items():
|
| 759 |
+
pi_dict[n] += max_distance * v
|
| 760 |
+
for u, v, d in G.edges(data=True):
|
| 761 |
+
d[weight] = original_edge_weights[(u, v)] + pi_dict[u]
|
| 762 |
+
dir_ascent, k_d = direction_of_ascent()
|
| 763 |
+
# k_d is no longer an individual 1-arborescence but rather a set of
|
| 764 |
+
# minimal 1-arborescences at the maximum point of the polytope and should
|
| 765 |
+
# be reflected as such
|
| 766 |
+
k_max = k_d
|
| 767 |
+
|
| 768 |
+
# Search for a cycle within k_max. If a cycle exists, return it as the
|
| 769 |
+
# solution
|
| 770 |
+
for k in k_max:
|
| 771 |
+
if len([n for n in k if k.degree(n) == 2]) == G.order():
|
| 772 |
+
# Tour found
|
| 773 |
+
return k.size(weight), k
|
| 774 |
+
|
| 775 |
+
# Write the original edge weights back to G and every member of k_max at
|
| 776 |
+
# the maximum point. Also average the number of times that edge appears in
|
| 777 |
+
# the set of minimal 1-arborescences.
|
| 778 |
+
x_star = {}
|
| 779 |
+
size_k_max = len(k_max)
|
| 780 |
+
for u, v, d in G.edges(data=True):
|
| 781 |
+
edge_count = 0
|
| 782 |
+
d[weight] = original_edge_weights[(u, v)]
|
| 783 |
+
for k in k_max:
|
| 784 |
+
if (u, v) in k.edges():
|
| 785 |
+
edge_count += 1
|
| 786 |
+
k[u][v][weight] = original_edge_weights[(u, v)]
|
| 787 |
+
x_star[(u, v)] = edge_count / size_k_max
|
| 788 |
+
# Now symmetrize the edges in x_star and scale them according to (5) in
|
| 789 |
+
# reference [1]
|
| 790 |
+
z_star = {}
|
| 791 |
+
scale_factor = (G.order() - 1) / G.order()
|
| 792 |
+
for u, v in x_star:
|
| 793 |
+
frequency = x_star[(u, v)] + x_star[(v, u)]
|
| 794 |
+
if frequency > 0:
|
| 795 |
+
z_star[(u, v)] = scale_factor * frequency
|
| 796 |
+
del x_star
|
| 797 |
+
# Return the optimal weight and the z dict
|
| 798 |
+
return next(k_max.__iter__()).size(weight), z_star
|
| 799 |
+
|
| 800 |
+
|
| 801 |
+
@nx._dispatch
|
| 802 |
+
def spanning_tree_distribution(G, z):
|
| 803 |
+
"""
|
| 804 |
+
Find the asadpour exponential distribution of spanning trees.
|
| 805 |
+
|
| 806 |
+
Solves the Maximum Entropy Convex Program in the Asadpour algorithm [1]_
|
| 807 |
+
using the approach in section 7 to build an exponential distribution of
|
| 808 |
+
undirected spanning trees.
|
| 809 |
+
|
| 810 |
+
This algorithm ensures that the probability of any edge in a spanning
|
| 811 |
+
tree is proportional to the sum of the probabilities of the tress
|
| 812 |
+
containing that edge over the sum of the probabilities of all spanning
|
| 813 |
+
trees of the graph.
|
| 814 |
+
|
| 815 |
+
Parameters
|
| 816 |
+
----------
|
| 817 |
+
G : nx.MultiGraph
|
| 818 |
+
The undirected support graph for the Held Karp relaxation
|
| 819 |
+
|
| 820 |
+
z : dict
|
| 821 |
+
The output of `held_karp_ascent()`, a scaled version of the Held-Karp
|
| 822 |
+
solution.
|
| 823 |
+
|
| 824 |
+
Returns
|
| 825 |
+
-------
|
| 826 |
+
gamma : dict
|
| 827 |
+
The probability distribution which approximately preserves the marginal
|
| 828 |
+
probabilities of `z`.
|
| 829 |
+
"""
|
| 830 |
+
from math import exp
|
| 831 |
+
from math import log as ln
|
| 832 |
+
|
| 833 |
+
def q(e):
|
| 834 |
+
"""
|
| 835 |
+
The value of q(e) is described in the Asadpour paper is "the
|
| 836 |
+
probability that edge e will be included in a spanning tree T that is
|
| 837 |
+
chosen with probability proportional to exp(gamma(T))" which
|
| 838 |
+
basically means that it is the total probability of the edge appearing
|
| 839 |
+
across the whole distribution.
|
| 840 |
+
|
| 841 |
+
Parameters
|
| 842 |
+
----------
|
| 843 |
+
e : tuple
|
| 844 |
+
The `(u, v)` tuple describing the edge we are interested in
|
| 845 |
+
|
| 846 |
+
Returns
|
| 847 |
+
-------
|
| 848 |
+
float
|
| 849 |
+
The probability that a spanning tree chosen according to the
|
| 850 |
+
current values of gamma will include edge `e`.
|
| 851 |
+
"""
|
| 852 |
+
# Create the laplacian matrices
|
| 853 |
+
for u, v, d in G.edges(data=True):
|
| 854 |
+
d[lambda_key] = exp(gamma[(u, v)])
|
| 855 |
+
G_Kirchhoff = nx.total_spanning_tree_weight(G, lambda_key)
|
| 856 |
+
G_e = nx.contracted_edge(G, e, self_loops=False)
|
| 857 |
+
G_e_Kirchhoff = nx.total_spanning_tree_weight(G_e, lambda_key)
|
| 858 |
+
|
| 859 |
+
# Multiply by the weight of the contracted edge since it is not included
|
| 860 |
+
# in the total weight of the contracted graph.
|
| 861 |
+
return exp(gamma[(e[0], e[1])]) * G_e_Kirchhoff / G_Kirchhoff
|
| 862 |
+
|
| 863 |
+
# initialize gamma to the zero dict
|
| 864 |
+
gamma = {}
|
| 865 |
+
for u, v, _ in G.edges:
|
| 866 |
+
gamma[(u, v)] = 0
|
| 867 |
+
|
| 868 |
+
# set epsilon
|
| 869 |
+
EPSILON = 0.2
|
| 870 |
+
|
| 871 |
+
# pick an edge attribute name that is unlikely to be in the graph
|
| 872 |
+
lambda_key = "spanning_tree_distribution's secret attribute name for lambda"
|
| 873 |
+
|
| 874 |
+
while True:
|
| 875 |
+
# We need to know that know that no values of q_e are greater than
|
| 876 |
+
# (1 + epsilon) * z_e, however changing one gamma value can increase the
|
| 877 |
+
# value of a different q_e, so we have to complete the for loop without
|
| 878 |
+
# changing anything for the condition to be meet
|
| 879 |
+
in_range_count = 0
|
| 880 |
+
# Search for an edge with q_e > (1 + epsilon) * z_e
|
| 881 |
+
for u, v in gamma:
|
| 882 |
+
e = (u, v)
|
| 883 |
+
q_e = q(e)
|
| 884 |
+
z_e = z[e]
|
| 885 |
+
if q_e > (1 + EPSILON) * z_e:
|
| 886 |
+
delta = ln(
|
| 887 |
+
(q_e * (1 - (1 + EPSILON / 2) * z_e))
|
| 888 |
+
/ ((1 - q_e) * (1 + EPSILON / 2) * z_e)
|
| 889 |
+
)
|
| 890 |
+
gamma[e] -= delta
|
| 891 |
+
# Check that delta had the desired effect
|
| 892 |
+
new_q_e = q(e)
|
| 893 |
+
desired_q_e = (1 + EPSILON / 2) * z_e
|
| 894 |
+
if round(new_q_e, 8) != round(desired_q_e, 8):
|
| 895 |
+
raise nx.NetworkXError(
|
| 896 |
+
f"Unable to modify probability for edge ({u}, {v})"
|
| 897 |
+
)
|
| 898 |
+
else:
|
| 899 |
+
in_range_count += 1
|
| 900 |
+
# Check if the for loop terminated without changing any gamma
|
| 901 |
+
if in_range_count == len(gamma):
|
| 902 |
+
break
|
| 903 |
+
|
| 904 |
+
# Remove the new edge attributes
|
| 905 |
+
for _, _, d in G.edges(data=True):
|
| 906 |
+
if lambda_key in d:
|
| 907 |
+
del d[lambda_key]
|
| 908 |
+
|
| 909 |
+
return gamma
|
| 910 |
+
|
| 911 |
+
|
| 912 |
+
@nx._dispatch(edge_attrs="weight")
|
| 913 |
+
def greedy_tsp(G, weight="weight", source=None):
|
| 914 |
+
"""Return a low cost cycle starting at `source` and its cost.
|
| 915 |
+
|
| 916 |
+
This approximates a solution to the traveling salesman problem.
|
| 917 |
+
It finds a cycle of all the nodes that a salesman can visit in order
|
| 918 |
+
to visit many nodes while minimizing total distance.
|
| 919 |
+
It uses a simple greedy algorithm.
|
| 920 |
+
In essence, this function returns a large cycle given a source point
|
| 921 |
+
for which the total cost of the cycle is minimized.
|
| 922 |
+
|
| 923 |
+
Parameters
|
| 924 |
+
----------
|
| 925 |
+
G : Graph
|
| 926 |
+
The Graph should be a complete weighted undirected graph.
|
| 927 |
+
The distance between all pairs of nodes should be included.
|
| 928 |
+
|
| 929 |
+
weight : string, optional (default="weight")
|
| 930 |
+
Edge data key corresponding to the edge weight.
|
| 931 |
+
If any edge does not have this attribute the weight is set to 1.
|
| 932 |
+
|
| 933 |
+
source : node, optional (default: first node in list(G))
|
| 934 |
+
Starting node. If None, defaults to ``next(iter(G))``
|
| 935 |
+
|
| 936 |
+
Returns
|
| 937 |
+
-------
|
| 938 |
+
cycle : list of nodes
|
| 939 |
+
Returns the cycle (list of nodes) that a salesman
|
| 940 |
+
can follow to minimize total weight of the trip.
|
| 941 |
+
|
| 942 |
+
Raises
|
| 943 |
+
------
|
| 944 |
+
NetworkXError
|
| 945 |
+
If `G` is not complete, the algorithm raises an exception.
|
| 946 |
+
|
| 947 |
+
Examples
|
| 948 |
+
--------
|
| 949 |
+
>>> from networkx.algorithms import approximation as approx
|
| 950 |
+
>>> G = nx.DiGraph()
|
| 951 |
+
>>> G.add_weighted_edges_from({
|
| 952 |
+
... ("A", "B", 3), ("A", "C", 17), ("A", "D", 14), ("B", "A", 3),
|
| 953 |
+
... ("B", "C", 12), ("B", "D", 16), ("C", "A", 13),("C", "B", 12),
|
| 954 |
+
... ("C", "D", 4), ("D", "A", 14), ("D", "B", 15), ("D", "C", 2)
|
| 955 |
+
... })
|
| 956 |
+
>>> cycle = approx.greedy_tsp(G, source="D")
|
| 957 |
+
>>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
|
| 958 |
+
>>> cycle
|
| 959 |
+
['D', 'C', 'B', 'A', 'D']
|
| 960 |
+
>>> cost
|
| 961 |
+
31
|
| 962 |
+
|
| 963 |
+
Notes
|
| 964 |
+
-----
|
| 965 |
+
This implementation of a greedy algorithm is based on the following:
|
| 966 |
+
|
| 967 |
+
- The algorithm adds a node to the solution at every iteration.
|
| 968 |
+
- The algorithm selects a node not already in the cycle whose connection
|
| 969 |
+
to the previous node adds the least cost to the cycle.
|
| 970 |
+
|
| 971 |
+
A greedy algorithm does not always give the best solution.
|
| 972 |
+
However, it can construct a first feasible solution which can
|
| 973 |
+
be passed as a parameter to an iterative improvement algorithm such
|
| 974 |
+
as Simulated Annealing, or Threshold Accepting.
|
| 975 |
+
|
| 976 |
+
Time complexity: It has a running time $O(|V|^2)$
|
| 977 |
+
"""
|
| 978 |
+
# Check that G is a complete graph
|
| 979 |
+
N = len(G) - 1
|
| 980 |
+
# This check ignores selfloops which is what we want here.
|
| 981 |
+
if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
|
| 982 |
+
raise nx.NetworkXError("G must be a complete graph.")
|
| 983 |
+
|
| 984 |
+
if source is None:
|
| 985 |
+
source = nx.utils.arbitrary_element(G)
|
| 986 |
+
|
| 987 |
+
if G.number_of_nodes() == 2:
|
| 988 |
+
neighbor = next(G.neighbors(source))
|
| 989 |
+
return [source, neighbor, source]
|
| 990 |
+
|
| 991 |
+
nodeset = set(G)
|
| 992 |
+
nodeset.remove(source)
|
| 993 |
+
cycle = [source]
|
| 994 |
+
next_node = source
|
| 995 |
+
while nodeset:
|
| 996 |
+
nbrdict = G[next_node]
|
| 997 |
+
next_node = min(nodeset, key=lambda n: nbrdict[n].get(weight, 1))
|
| 998 |
+
cycle.append(next_node)
|
| 999 |
+
nodeset.remove(next_node)
|
| 1000 |
+
cycle.append(cycle[0])
|
| 1001 |
+
return cycle
|
| 1002 |
+
|
| 1003 |
+
|
| 1004 |
+
@py_random_state(9)
|
| 1005 |
+
@nx._dispatch(edge_attrs="weight")
|
| 1006 |
+
def simulated_annealing_tsp(
|
| 1007 |
+
G,
|
| 1008 |
+
init_cycle,
|
| 1009 |
+
weight="weight",
|
| 1010 |
+
source=None,
|
| 1011 |
+
temp=100,
|
| 1012 |
+
move="1-1",
|
| 1013 |
+
max_iterations=10,
|
| 1014 |
+
N_inner=100,
|
| 1015 |
+
alpha=0.01,
|
| 1016 |
+
seed=None,
|
| 1017 |
+
):
|
| 1018 |
+
"""Returns an approximate solution to the traveling salesman problem.
|
| 1019 |
+
|
| 1020 |
+
This function uses simulated annealing to approximate the minimal cost
|
| 1021 |
+
cycle through the nodes. Starting from a suboptimal solution, simulated
|
| 1022 |
+
annealing perturbs that solution, occasionally accepting changes that make
|
| 1023 |
+
the solution worse to escape from a locally optimal solution. The chance
|
| 1024 |
+
of accepting such changes decreases over the iterations to encourage
|
| 1025 |
+
an optimal result. In summary, the function returns a cycle starting
|
| 1026 |
+
at `source` for which the total cost is minimized. It also returns the cost.
|
| 1027 |
+
|
| 1028 |
+
The chance of accepting a proposed change is related to a parameter called
|
| 1029 |
+
the temperature (annealing has a physical analogue of steel hardening
|
| 1030 |
+
as it cools). As the temperature is reduced, the chance of moves that
|
| 1031 |
+
increase cost goes down.
|
| 1032 |
+
|
| 1033 |
+
Parameters
|
| 1034 |
+
----------
|
| 1035 |
+
G : Graph
|
| 1036 |
+
`G` should be a complete weighted graph.
|
| 1037 |
+
The distance between all pairs of nodes should be included.
|
| 1038 |
+
|
| 1039 |
+
init_cycle : list of all nodes or "greedy"
|
| 1040 |
+
The initial solution (a cycle through all nodes returning to the start).
|
| 1041 |
+
This argument has no default to make you think about it.
|
| 1042 |
+
If "greedy", use `greedy_tsp(G, weight)`.
|
| 1043 |
+
Other common starting cycles are `list(G) + [next(iter(G))]` or the final
|
| 1044 |
+
result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`.
|
| 1045 |
+
|
| 1046 |
+
weight : string, optional (default="weight")
|
| 1047 |
+
Edge data key corresponding to the edge weight.
|
| 1048 |
+
If any edge does not have this attribute the weight is set to 1.
|
| 1049 |
+
|
| 1050 |
+
source : node, optional (default: first node in list(G))
|
| 1051 |
+
Starting node. If None, defaults to ``next(iter(G))``
|
| 1052 |
+
|
| 1053 |
+
temp : int, optional (default=100)
|
| 1054 |
+
The algorithm's temperature parameter. It represents the initial
|
| 1055 |
+
value of temperature
|
| 1056 |
+
|
| 1057 |
+
move : "1-1" or "1-0" or function, optional (default="1-1")
|
| 1058 |
+
Indicator of what move to use when finding new trial solutions.
|
| 1059 |
+
Strings indicate two special built-in moves:
|
| 1060 |
+
|
| 1061 |
+
- "1-1": 1-1 exchange which transposes the position
|
| 1062 |
+
of two elements of the current solution.
|
| 1063 |
+
The function called is :func:`swap_two_nodes`.
|
| 1064 |
+
For example if we apply 1-1 exchange in the solution
|
| 1065 |
+
``A = [3, 2, 1, 4, 3]``
|
| 1066 |
+
we can get the following by the transposition of 1 and 4 elements:
|
| 1067 |
+
``A' = [3, 2, 4, 1, 3]``
|
| 1068 |
+
- "1-0": 1-0 exchange which moves an node in the solution
|
| 1069 |
+
to a new position.
|
| 1070 |
+
The function called is :func:`move_one_node`.
|
| 1071 |
+
For example if we apply 1-0 exchange in the solution
|
| 1072 |
+
``A = [3, 2, 1, 4, 3]``
|
| 1073 |
+
we can transfer the fourth element to the second position:
|
| 1074 |
+
``A' = [3, 4, 2, 1, 3]``
|
| 1075 |
+
|
| 1076 |
+
You may provide your own functions to enact a move from
|
| 1077 |
+
one solution to a neighbor solution. The function must take
|
| 1078 |
+
the solution as input along with a `seed` input to control
|
| 1079 |
+
random number generation (see the `seed` input here).
|
| 1080 |
+
Your function should maintain the solution as a cycle with
|
| 1081 |
+
equal first and last node and all others appearing once.
|
| 1082 |
+
Your function should return the new solution.
|
| 1083 |
+
|
| 1084 |
+
max_iterations : int, optional (default=10)
|
| 1085 |
+
Declared done when this number of consecutive iterations of
|
| 1086 |
+
the outer loop occurs without any change in the best cost solution.
|
| 1087 |
+
|
| 1088 |
+
N_inner : int, optional (default=100)
|
| 1089 |
+
The number of iterations of the inner loop.
|
| 1090 |
+
|
| 1091 |
+
alpha : float between (0, 1), optional (default=0.01)
|
| 1092 |
+
Percentage of temperature decrease in each iteration
|
| 1093 |
+
of outer loop
|
| 1094 |
+
|
| 1095 |
+
seed : integer, random_state, or None (default)
|
| 1096 |
+
Indicator of random number generation state.
|
| 1097 |
+
See :ref:`Randomness<randomness>`.
|
| 1098 |
+
|
| 1099 |
+
Returns
|
| 1100 |
+
-------
|
| 1101 |
+
cycle : list of nodes
|
| 1102 |
+
Returns the cycle (list of nodes) that a salesman
|
| 1103 |
+
can follow to minimize total weight of the trip.
|
| 1104 |
+
|
| 1105 |
+
Raises
|
| 1106 |
+
------
|
| 1107 |
+
NetworkXError
|
| 1108 |
+
If `G` is not complete the algorithm raises an exception.
|
| 1109 |
+
|
| 1110 |
+
Examples
|
| 1111 |
+
--------
|
| 1112 |
+
>>> from networkx.algorithms import approximation as approx
|
| 1113 |
+
>>> G = nx.DiGraph()
|
| 1114 |
+
>>> G.add_weighted_edges_from({
|
| 1115 |
+
... ("A", "B", 3), ("A", "C", 17), ("A", "D", 14), ("B", "A", 3),
|
| 1116 |
+
... ("B", "C", 12), ("B", "D", 16), ("C", "A", 13),("C", "B", 12),
|
| 1117 |
+
... ("C", "D", 4), ("D", "A", 14), ("D", "B", 15), ("D", "C", 2)
|
| 1118 |
+
... })
|
| 1119 |
+
>>> cycle = approx.simulated_annealing_tsp(G, "greedy", source="D")
|
| 1120 |
+
>>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
|
| 1121 |
+
>>> cycle
|
| 1122 |
+
['D', 'C', 'B', 'A', 'D']
|
| 1123 |
+
>>> cost
|
| 1124 |
+
31
|
| 1125 |
+
>>> incycle = ["D", "B", "A", "C", "D"]
|
| 1126 |
+
>>> cycle = approx.simulated_annealing_tsp(G, incycle, source="D")
|
| 1127 |
+
>>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
|
| 1128 |
+
>>> cycle
|
| 1129 |
+
['D', 'C', 'B', 'A', 'D']
|
| 1130 |
+
>>> cost
|
| 1131 |
+
31
|
| 1132 |
+
|
| 1133 |
+
Notes
|
| 1134 |
+
-----
|
| 1135 |
+
Simulated Annealing is a metaheuristic local search algorithm.
|
| 1136 |
+
The main characteristic of this algorithm is that it accepts
|
| 1137 |
+
even solutions which lead to the increase of the cost in order
|
| 1138 |
+
to escape from low quality local optimal solutions.
|
| 1139 |
+
|
| 1140 |
+
This algorithm needs an initial solution. If not provided, it is
|
| 1141 |
+
constructed by a simple greedy algorithm. At every iteration, the
|
| 1142 |
+
algorithm selects thoughtfully a neighbor solution.
|
| 1143 |
+
Consider $c(x)$ cost of current solution and $c(x')$ cost of a
|
| 1144 |
+
neighbor solution.
|
| 1145 |
+
If $c(x') - c(x) <= 0$ then the neighbor solution becomes the current
|
| 1146 |
+
solution for the next iteration. Otherwise, the algorithm accepts
|
| 1147 |
+
the neighbor solution with probability $p = exp - ([c(x') - c(x)] / temp)$.
|
| 1148 |
+
Otherwise the current solution is retained.
|
| 1149 |
+
|
| 1150 |
+
`temp` is a parameter of the algorithm and represents temperature.
|
| 1151 |
+
|
| 1152 |
+
Time complexity:
|
| 1153 |
+
For $N_i$ iterations of the inner loop and $N_o$ iterations of the
|
| 1154 |
+
outer loop, this algorithm has running time $O(N_i * N_o * |V|)$.
|
| 1155 |
+
|
| 1156 |
+
For more information and how the algorithm is inspired see:
|
| 1157 |
+
http://en.wikipedia.org/wiki/Simulated_annealing
|
| 1158 |
+
"""
|
| 1159 |
+
if move == "1-1":
|
| 1160 |
+
move = swap_two_nodes
|
| 1161 |
+
elif move == "1-0":
|
| 1162 |
+
move = move_one_node
|
| 1163 |
+
if init_cycle == "greedy":
|
| 1164 |
+
# Construct an initial solution using a greedy algorithm.
|
| 1165 |
+
cycle = greedy_tsp(G, weight=weight, source=source)
|
| 1166 |
+
if G.number_of_nodes() == 2:
|
| 1167 |
+
return cycle
|
| 1168 |
+
|
| 1169 |
+
else:
|
| 1170 |
+
cycle = list(init_cycle)
|
| 1171 |
+
if source is None:
|
| 1172 |
+
source = cycle[0]
|
| 1173 |
+
elif source != cycle[0]:
|
| 1174 |
+
raise nx.NetworkXError("source must be first node in init_cycle")
|
| 1175 |
+
if cycle[0] != cycle[-1]:
|
| 1176 |
+
raise nx.NetworkXError("init_cycle must be a cycle. (return to start)")
|
| 1177 |
+
|
| 1178 |
+
if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G):
|
| 1179 |
+
raise nx.NetworkXError("init_cycle should be a cycle over all nodes in G.")
|
| 1180 |
+
|
| 1181 |
+
# Check that G is a complete graph
|
| 1182 |
+
N = len(G) - 1
|
| 1183 |
+
# This check ignores selfloops which is what we want here.
|
| 1184 |
+
if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
|
| 1185 |
+
raise nx.NetworkXError("G must be a complete graph.")
|
| 1186 |
+
|
| 1187 |
+
if G.number_of_nodes() == 2:
|
| 1188 |
+
neighbor = next(G.neighbors(source))
|
| 1189 |
+
return [source, neighbor, source]
|
| 1190 |
+
|
| 1191 |
+
# Find the cost of initial solution
|
| 1192 |
+
cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle))
|
| 1193 |
+
|
| 1194 |
+
count = 0
|
| 1195 |
+
best_cycle = cycle.copy()
|
| 1196 |
+
best_cost = cost
|
| 1197 |
+
while count <= max_iterations and temp > 0:
|
| 1198 |
+
count += 1
|
| 1199 |
+
for i in range(N_inner):
|
| 1200 |
+
adj_sol = move(cycle, seed)
|
| 1201 |
+
adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol))
|
| 1202 |
+
delta = adj_cost - cost
|
| 1203 |
+
if delta <= 0:
|
| 1204 |
+
# Set current solution the adjacent solution.
|
| 1205 |
+
cycle = adj_sol
|
| 1206 |
+
cost = adj_cost
|
| 1207 |
+
|
| 1208 |
+
if cost < best_cost:
|
| 1209 |
+
count = 0
|
| 1210 |
+
best_cycle = cycle.copy()
|
| 1211 |
+
best_cost = cost
|
| 1212 |
+
else:
|
| 1213 |
+
# Accept even a worse solution with probability p.
|
| 1214 |
+
p = math.exp(-delta / temp)
|
| 1215 |
+
if p >= seed.random():
|
| 1216 |
+
cycle = adj_sol
|
| 1217 |
+
cost = adj_cost
|
| 1218 |
+
temp -= temp * alpha
|
| 1219 |
+
|
| 1220 |
+
return best_cycle
|
| 1221 |
+
|
| 1222 |
+
|
| 1223 |
+
@py_random_state(9)
|
| 1224 |
+
@nx._dispatch(edge_attrs="weight")
|
| 1225 |
+
def threshold_accepting_tsp(
|
| 1226 |
+
G,
|
| 1227 |
+
init_cycle,
|
| 1228 |
+
weight="weight",
|
| 1229 |
+
source=None,
|
| 1230 |
+
threshold=1,
|
| 1231 |
+
move="1-1",
|
| 1232 |
+
max_iterations=10,
|
| 1233 |
+
N_inner=100,
|
| 1234 |
+
alpha=0.1,
|
| 1235 |
+
seed=None,
|
| 1236 |
+
):
|
| 1237 |
+
"""Returns an approximate solution to the traveling salesman problem.
|
| 1238 |
+
|
| 1239 |
+
This function uses threshold accepting methods to approximate the minimal cost
|
| 1240 |
+
cycle through the nodes. Starting from a suboptimal solution, threshold
|
| 1241 |
+
accepting methods perturb that solution, accepting any changes that make
|
| 1242 |
+
the solution no worse than increasing by a threshold amount. Improvements
|
| 1243 |
+
in cost are accepted, but so are changes leading to small increases in cost.
|
| 1244 |
+
This allows the solution to leave suboptimal local minima in solution space.
|
| 1245 |
+
The threshold is decreased slowly as iterations proceed helping to ensure
|
| 1246 |
+
an optimum. In summary, the function returns a cycle starting at `source`
|
| 1247 |
+
for which the total cost is minimized.
|
| 1248 |
+
|
| 1249 |
+
Parameters
|
| 1250 |
+
----------
|
| 1251 |
+
G : Graph
|
| 1252 |
+
`G` should be a complete weighted graph.
|
| 1253 |
+
The distance between all pairs of nodes should be included.
|
| 1254 |
+
|
| 1255 |
+
init_cycle : list or "greedy"
|
| 1256 |
+
The initial solution (a cycle through all nodes returning to the start).
|
| 1257 |
+
This argument has no default to make you think about it.
|
| 1258 |
+
If "greedy", use `greedy_tsp(G, weight)`.
|
| 1259 |
+
Other common starting cycles are `list(G) + [next(iter(G))]` or the final
|
| 1260 |
+
result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`.
|
| 1261 |
+
|
| 1262 |
+
weight : string, optional (default="weight")
|
| 1263 |
+
Edge data key corresponding to the edge weight.
|
| 1264 |
+
If any edge does not have this attribute the weight is set to 1.
|
| 1265 |
+
|
| 1266 |
+
source : node, optional (default: first node in list(G))
|
| 1267 |
+
Starting node. If None, defaults to ``next(iter(G))``
|
| 1268 |
+
|
| 1269 |
+
threshold : int, optional (default=1)
|
| 1270 |
+
The algorithm's threshold parameter. It represents the initial
|
| 1271 |
+
threshold's value
|
| 1272 |
+
|
| 1273 |
+
move : "1-1" or "1-0" or function, optional (default="1-1")
|
| 1274 |
+
Indicator of what move to use when finding new trial solutions.
|
| 1275 |
+
Strings indicate two special built-in moves:
|
| 1276 |
+
|
| 1277 |
+
- "1-1": 1-1 exchange which transposes the position
|
| 1278 |
+
of two elements of the current solution.
|
| 1279 |
+
The function called is :func:`swap_two_nodes`.
|
| 1280 |
+
For example if we apply 1-1 exchange in the solution
|
| 1281 |
+
``A = [3, 2, 1, 4, 3]``
|
| 1282 |
+
we can get the following by the transposition of 1 and 4 elements:
|
| 1283 |
+
``A' = [3, 2, 4, 1, 3]``
|
| 1284 |
+
- "1-0": 1-0 exchange which moves an node in the solution
|
| 1285 |
+
to a new position.
|
| 1286 |
+
The function called is :func:`move_one_node`.
|
| 1287 |
+
For example if we apply 1-0 exchange in the solution
|
| 1288 |
+
``A = [3, 2, 1, 4, 3]``
|
| 1289 |
+
we can transfer the fourth element to the second position:
|
| 1290 |
+
``A' = [3, 4, 2, 1, 3]``
|
| 1291 |
+
|
| 1292 |
+
You may provide your own functions to enact a move from
|
| 1293 |
+
one solution to a neighbor solution. The function must take
|
| 1294 |
+
the solution as input along with a `seed` input to control
|
| 1295 |
+
random number generation (see the `seed` input here).
|
| 1296 |
+
Your function should maintain the solution as a cycle with
|
| 1297 |
+
equal first and last node and all others appearing once.
|
| 1298 |
+
Your function should return the new solution.
|
| 1299 |
+
|
| 1300 |
+
max_iterations : int, optional (default=10)
|
| 1301 |
+
Declared done when this number of consecutive iterations of
|
| 1302 |
+
the outer loop occurs without any change in the best cost solution.
|
| 1303 |
+
|
| 1304 |
+
N_inner : int, optional (default=100)
|
| 1305 |
+
The number of iterations of the inner loop.
|
| 1306 |
+
|
| 1307 |
+
alpha : float between (0, 1), optional (default=0.1)
|
| 1308 |
+
Percentage of threshold decrease when there is at
|
| 1309 |
+
least one acceptance of a neighbor solution.
|
| 1310 |
+
If no inner loop moves are accepted the threshold remains unchanged.
|
| 1311 |
+
|
| 1312 |
+
seed : integer, random_state, or None (default)
|
| 1313 |
+
Indicator of random number generation state.
|
| 1314 |
+
See :ref:`Randomness<randomness>`.
|
| 1315 |
+
|
| 1316 |
+
Returns
|
| 1317 |
+
-------
|
| 1318 |
+
cycle : list of nodes
|
| 1319 |
+
Returns the cycle (list of nodes) that a salesman
|
| 1320 |
+
can follow to minimize total weight of the trip.
|
| 1321 |
+
|
| 1322 |
+
Raises
|
| 1323 |
+
------
|
| 1324 |
+
NetworkXError
|
| 1325 |
+
If `G` is not complete the algorithm raises an exception.
|
| 1326 |
+
|
| 1327 |
+
Examples
|
| 1328 |
+
--------
|
| 1329 |
+
>>> from networkx.algorithms import approximation as approx
|
| 1330 |
+
>>> G = nx.DiGraph()
|
| 1331 |
+
>>> G.add_weighted_edges_from({
|
| 1332 |
+
... ("A", "B", 3), ("A", "C", 17), ("A", "D", 14), ("B", "A", 3),
|
| 1333 |
+
... ("B", "C", 12), ("B", "D", 16), ("C", "A", 13),("C", "B", 12),
|
| 1334 |
+
... ("C", "D", 4), ("D", "A", 14), ("D", "B", 15), ("D", "C", 2)
|
| 1335 |
+
... })
|
| 1336 |
+
>>> cycle = approx.threshold_accepting_tsp(G, "greedy", source="D")
|
| 1337 |
+
>>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
|
| 1338 |
+
>>> cycle
|
| 1339 |
+
['D', 'C', 'B', 'A', 'D']
|
| 1340 |
+
>>> cost
|
| 1341 |
+
31
|
| 1342 |
+
>>> incycle = ["D", "B", "A", "C", "D"]
|
| 1343 |
+
>>> cycle = approx.threshold_accepting_tsp(G, incycle, source="D")
|
| 1344 |
+
>>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
|
| 1345 |
+
>>> cycle
|
| 1346 |
+
['D', 'C', 'B', 'A', 'D']
|
| 1347 |
+
>>> cost
|
| 1348 |
+
31
|
| 1349 |
+
|
| 1350 |
+
Notes
|
| 1351 |
+
-----
|
| 1352 |
+
Threshold Accepting is a metaheuristic local search algorithm.
|
| 1353 |
+
The main characteristic of this algorithm is that it accepts
|
| 1354 |
+
even solutions which lead to the increase of the cost in order
|
| 1355 |
+
to escape from low quality local optimal solutions.
|
| 1356 |
+
|
| 1357 |
+
This algorithm needs an initial solution. This solution can be
|
| 1358 |
+
constructed by a simple greedy algorithm. At every iteration, it
|
| 1359 |
+
selects thoughtfully a neighbor solution.
|
| 1360 |
+
Consider $c(x)$ cost of current solution and $c(x')$ cost of
|
| 1361 |
+
neighbor solution.
|
| 1362 |
+
If $c(x') - c(x) <= threshold$ then the neighbor solution becomes the current
|
| 1363 |
+
solution for the next iteration, where the threshold is named threshold.
|
| 1364 |
+
|
| 1365 |
+
In comparison to the Simulated Annealing algorithm, the Threshold
|
| 1366 |
+
Accepting algorithm does not accept very low quality solutions
|
| 1367 |
+
(due to the presence of the threshold value). In the case of
|
| 1368 |
+
Simulated Annealing, even a very low quality solution can
|
| 1369 |
+
be accepted with probability $p$.
|
| 1370 |
+
|
| 1371 |
+
Time complexity:
|
| 1372 |
+
It has a running time $O(m * n * |V|)$ where $m$ and $n$ are the number
|
| 1373 |
+
of times the outer and inner loop run respectively.
|
| 1374 |
+
|
| 1375 |
+
For more information and how algorithm is inspired see:
|
| 1376 |
+
https://doi.org/10.1016/0021-9991(90)90201-B
|
| 1377 |
+
|
| 1378 |
+
See Also
|
| 1379 |
+
--------
|
| 1380 |
+
simulated_annealing_tsp
|
| 1381 |
+
|
| 1382 |
+
"""
|
| 1383 |
+
if move == "1-1":
|
| 1384 |
+
move = swap_two_nodes
|
| 1385 |
+
elif move == "1-0":
|
| 1386 |
+
move = move_one_node
|
| 1387 |
+
if init_cycle == "greedy":
|
| 1388 |
+
# Construct an initial solution using a greedy algorithm.
|
| 1389 |
+
cycle = greedy_tsp(G, weight=weight, source=source)
|
| 1390 |
+
if G.number_of_nodes() == 2:
|
| 1391 |
+
return cycle
|
| 1392 |
+
|
| 1393 |
+
else:
|
| 1394 |
+
cycle = list(init_cycle)
|
| 1395 |
+
if source is None:
|
| 1396 |
+
source = cycle[0]
|
| 1397 |
+
elif source != cycle[0]:
|
| 1398 |
+
raise nx.NetworkXError("source must be first node in init_cycle")
|
| 1399 |
+
if cycle[0] != cycle[-1]:
|
| 1400 |
+
raise nx.NetworkXError("init_cycle must be a cycle. (return to start)")
|
| 1401 |
+
|
| 1402 |
+
if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G):
|
| 1403 |
+
raise nx.NetworkXError("init_cycle is not all and only nodes.")
|
| 1404 |
+
|
| 1405 |
+
# Check that G is a complete graph
|
| 1406 |
+
N = len(G) - 1
|
| 1407 |
+
# This check ignores selfloops which is what we want here.
|
| 1408 |
+
if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
|
| 1409 |
+
raise nx.NetworkXError("G must be a complete graph.")
|
| 1410 |
+
|
| 1411 |
+
if G.number_of_nodes() == 2:
|
| 1412 |
+
neighbor = list(G.neighbors(source))[0]
|
| 1413 |
+
return [source, neighbor, source]
|
| 1414 |
+
|
| 1415 |
+
# Find the cost of initial solution
|
| 1416 |
+
cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle))
|
| 1417 |
+
|
| 1418 |
+
count = 0
|
| 1419 |
+
best_cycle = cycle.copy()
|
| 1420 |
+
best_cost = cost
|
| 1421 |
+
while count <= max_iterations:
|
| 1422 |
+
count += 1
|
| 1423 |
+
accepted = False
|
| 1424 |
+
for i in range(N_inner):
|
| 1425 |
+
adj_sol = move(cycle, seed)
|
| 1426 |
+
adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol))
|
| 1427 |
+
delta = adj_cost - cost
|
| 1428 |
+
if delta <= threshold:
|
| 1429 |
+
accepted = True
|
| 1430 |
+
|
| 1431 |
+
# Set current solution the adjacent solution.
|
| 1432 |
+
cycle = adj_sol
|
| 1433 |
+
cost = adj_cost
|
| 1434 |
+
|
| 1435 |
+
if cost < best_cost:
|
| 1436 |
+
count = 0
|
| 1437 |
+
best_cycle = cycle.copy()
|
| 1438 |
+
best_cost = cost
|
| 1439 |
+
if accepted:
|
| 1440 |
+
threshold -= threshold * alpha
|
| 1441 |
+
|
| 1442 |
+
return best_cycle
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/vertex_cover.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for computing an approximate minimum weight vertex cover.
|
| 2 |
+
|
| 3 |
+
A |vertex cover|_ is a subset of nodes such that each edge in the graph
|
| 4 |
+
is incident to at least one node in the subset.
|
| 5 |
+
|
| 6 |
+
.. _vertex cover: https://en.wikipedia.org/wiki/Vertex_cover
|
| 7 |
+
.. |vertex cover| replace:: *vertex cover*
|
| 8 |
+
|
| 9 |
+
"""
|
| 10 |
+
import networkx as nx
|
| 11 |
+
|
| 12 |
+
__all__ = ["min_weighted_vertex_cover"]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
@nx._dispatch(node_attrs="weight")
|
| 16 |
+
def min_weighted_vertex_cover(G, weight=None):
|
| 17 |
+
r"""Returns an approximate minimum weighted vertex cover.
|
| 18 |
+
|
| 19 |
+
The set of nodes returned by this function is guaranteed to be a
|
| 20 |
+
vertex cover, and the total weight of the set is guaranteed to be at
|
| 21 |
+
most twice the total weight of the minimum weight vertex cover. In
|
| 22 |
+
other words,
|
| 23 |
+
|
| 24 |
+
.. math::
|
| 25 |
+
|
| 26 |
+
w(S) \leq 2 * w(S^*),
|
| 27 |
+
|
| 28 |
+
where $S$ is the vertex cover returned by this function,
|
| 29 |
+
$S^*$ is the vertex cover of minimum weight out of all vertex
|
| 30 |
+
covers of the graph, and $w$ is the function that computes the
|
| 31 |
+
sum of the weights of each node in that given set.
|
| 32 |
+
|
| 33 |
+
Parameters
|
| 34 |
+
----------
|
| 35 |
+
G : NetworkX graph
|
| 36 |
+
|
| 37 |
+
weight : string, optional (default = None)
|
| 38 |
+
If None, every node has weight 1. If a string, use this node
|
| 39 |
+
attribute as the node weight. A node without this attribute is
|
| 40 |
+
assumed to have weight 1.
|
| 41 |
+
|
| 42 |
+
Returns
|
| 43 |
+
-------
|
| 44 |
+
min_weighted_cover : set
|
| 45 |
+
Returns a set of nodes whose weight sum is no more than twice
|
| 46 |
+
the weight sum of the minimum weight vertex cover.
|
| 47 |
+
|
| 48 |
+
Notes
|
| 49 |
+
-----
|
| 50 |
+
For a directed graph, a vertex cover has the same definition: a set
|
| 51 |
+
of nodes such that each edge in the graph is incident to at least
|
| 52 |
+
one node in the set. Whether the node is the head or tail of the
|
| 53 |
+
directed edge is ignored.
|
| 54 |
+
|
| 55 |
+
This is the local-ratio algorithm for computing an approximate
|
| 56 |
+
vertex cover. The algorithm greedily reduces the costs over edges,
|
| 57 |
+
iteratively building a cover. The worst-case runtime of this
|
| 58 |
+
implementation is $O(m \log n)$, where $n$ is the number
|
| 59 |
+
of nodes and $m$ the number of edges in the graph.
|
| 60 |
+
|
| 61 |
+
References
|
| 62 |
+
----------
|
| 63 |
+
.. [1] Bar-Yehuda, R., and Even, S. (1985). "A local-ratio theorem for
|
| 64 |
+
approximating the weighted vertex cover problem."
|
| 65 |
+
*Annals of Discrete Mathematics*, 25, 27–46
|
| 66 |
+
<http://www.cs.technion.ac.il/~reuven/PDF/vc_lr.pdf>
|
| 67 |
+
|
| 68 |
+
"""
|
| 69 |
+
cost = dict(G.nodes(data=weight, default=1))
|
| 70 |
+
# While there are uncovered edges, choose an uncovered and update
|
| 71 |
+
# the cost of the remaining edges.
|
| 72 |
+
cover = set()
|
| 73 |
+
for u, v in G.edges():
|
| 74 |
+
if u in cover or v in cover:
|
| 75 |
+
continue
|
| 76 |
+
if cost[u] <= cost[v]:
|
| 77 |
+
cover.add(u)
|
| 78 |
+
cost[v] -= cost[u]
|
| 79 |
+
else:
|
| 80 |
+
cover.add(v)
|
| 81 |
+
cost[u] -= cost[v]
|
| 82 |
+
return cover
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__init__.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for computing and measuring community structure.
|
| 2 |
+
|
| 3 |
+
The ``community`` subpackage can be accessed by using :mod:`networkx.community`, then accessing the
|
| 4 |
+
functions as attributes of ``community``. For example::
|
| 5 |
+
|
| 6 |
+
>>> import networkx as nx
|
| 7 |
+
>>> G = nx.barbell_graph(5, 1)
|
| 8 |
+
>>> communities_generator = nx.community.girvan_newman(G)
|
| 9 |
+
>>> top_level_communities = next(communities_generator)
|
| 10 |
+
>>> next_level_communities = next(communities_generator)
|
| 11 |
+
>>> sorted(map(sorted, next_level_communities))
|
| 12 |
+
[[0, 1, 2, 3, 4], [5], [6, 7, 8, 9, 10]]
|
| 13 |
+
|
| 14 |
+
"""
|
| 15 |
+
from networkx.algorithms.community.asyn_fluid import *
|
| 16 |
+
from networkx.algorithms.community.centrality import *
|
| 17 |
+
from networkx.algorithms.community.kclique import *
|
| 18 |
+
from networkx.algorithms.community.kernighan_lin import *
|
| 19 |
+
from networkx.algorithms.community.label_propagation import *
|
| 20 |
+
from networkx.algorithms.community.lukes import *
|
| 21 |
+
from networkx.algorithms.community.modularity_max import *
|
| 22 |
+
from networkx.algorithms.community.quality import *
|
| 23 |
+
from networkx.algorithms.community.community_utils import *
|
| 24 |
+
from networkx.algorithms.community.louvain import *
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-311.pyc
ADDED
|
Binary file (6.58 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-311.pyc
ADDED
|
Binary file (1.78 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-311.pyc
ADDED
|
Binary file (15.5 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-311.pyc
ADDED
|
Binary file (20 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/asyn_fluid.py
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Asynchronous Fluid Communities algorithm for community detection."""
|
| 2 |
+
|
| 3 |
+
from collections import Counter
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.algorithms.components import is_connected
|
| 7 |
+
from networkx.exception import NetworkXError
|
| 8 |
+
from networkx.utils import groups, not_implemented_for, py_random_state
|
| 9 |
+
|
| 10 |
+
__all__ = ["asyn_fluidc"]
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@not_implemented_for("directed", "multigraph")
|
| 14 |
+
@py_random_state(3)
|
| 15 |
+
@nx._dispatch
|
| 16 |
+
def asyn_fluidc(G, k, max_iter=100, seed=None):
|
| 17 |
+
"""Returns communities in `G` as detected by Fluid Communities algorithm.
|
| 18 |
+
|
| 19 |
+
The asynchronous fluid communities algorithm is described in
|
| 20 |
+
[1]_. The algorithm is based on the simple idea of fluids interacting
|
| 21 |
+
in an environment, expanding and pushing each other. Its initialization is
|
| 22 |
+
random, so found communities may vary on different executions.
|
| 23 |
+
|
| 24 |
+
The algorithm proceeds as follows. First each of the initial k communities
|
| 25 |
+
is initialized in a random vertex in the graph. Then the algorithm iterates
|
| 26 |
+
over all vertices in a random order, updating the community of each vertex
|
| 27 |
+
based on its own community and the communities of its neighbours. This
|
| 28 |
+
process is performed several times until convergence.
|
| 29 |
+
At all times, each community has a total density of 1, which is equally
|
| 30 |
+
distributed among the vertices it contains. If a vertex changes of
|
| 31 |
+
community, vertex densities of affected communities are adjusted
|
| 32 |
+
immediately. When a complete iteration over all vertices is done, such that
|
| 33 |
+
no vertex changes the community it belongs to, the algorithm has converged
|
| 34 |
+
and returns.
|
| 35 |
+
|
| 36 |
+
This is the original version of the algorithm described in [1]_.
|
| 37 |
+
Unfortunately, it does not support weighted graphs yet.
|
| 38 |
+
|
| 39 |
+
Parameters
|
| 40 |
+
----------
|
| 41 |
+
G : NetworkX graph
|
| 42 |
+
Graph must be simple and undirected.
|
| 43 |
+
|
| 44 |
+
k : integer
|
| 45 |
+
The number of communities to be found.
|
| 46 |
+
|
| 47 |
+
max_iter : integer
|
| 48 |
+
The number of maximum iterations allowed. By default 100.
|
| 49 |
+
|
| 50 |
+
seed : integer, random_state, or None (default)
|
| 51 |
+
Indicator of random number generation state.
|
| 52 |
+
See :ref:`Randomness<randomness>`.
|
| 53 |
+
|
| 54 |
+
Returns
|
| 55 |
+
-------
|
| 56 |
+
communities : iterable
|
| 57 |
+
Iterable of communities given as sets of nodes.
|
| 58 |
+
|
| 59 |
+
Notes
|
| 60 |
+
-----
|
| 61 |
+
k variable is not an optional argument.
|
| 62 |
+
|
| 63 |
+
References
|
| 64 |
+
----------
|
| 65 |
+
.. [1] Parés F., Garcia-Gasulla D. et al. "Fluid Communities: A
|
| 66 |
+
Competitive and Highly Scalable Community Detection Algorithm".
|
| 67 |
+
[https://arxiv.org/pdf/1703.09307.pdf].
|
| 68 |
+
"""
|
| 69 |
+
# Initial checks
|
| 70 |
+
if not isinstance(k, int):
|
| 71 |
+
raise NetworkXError("k must be an integer.")
|
| 72 |
+
if not k > 0:
|
| 73 |
+
raise NetworkXError("k must be greater than 0.")
|
| 74 |
+
if not is_connected(G):
|
| 75 |
+
raise NetworkXError("Fluid Communities require connected Graphs.")
|
| 76 |
+
if len(G) < k:
|
| 77 |
+
raise NetworkXError("k cannot be bigger than the number of nodes.")
|
| 78 |
+
# Initialization
|
| 79 |
+
max_density = 1.0
|
| 80 |
+
vertices = list(G)
|
| 81 |
+
seed.shuffle(vertices)
|
| 82 |
+
communities = {n: i for i, n in enumerate(vertices[:k])}
|
| 83 |
+
density = {}
|
| 84 |
+
com_to_numvertices = {}
|
| 85 |
+
for vertex in communities:
|
| 86 |
+
com_to_numvertices[communities[vertex]] = 1
|
| 87 |
+
density[communities[vertex]] = max_density
|
| 88 |
+
# Set up control variables and start iterating
|
| 89 |
+
iter_count = 0
|
| 90 |
+
cont = True
|
| 91 |
+
while cont:
|
| 92 |
+
cont = False
|
| 93 |
+
iter_count += 1
|
| 94 |
+
# Loop over all vertices in graph in a random order
|
| 95 |
+
vertices = list(G)
|
| 96 |
+
seed.shuffle(vertices)
|
| 97 |
+
for vertex in vertices:
|
| 98 |
+
# Updating rule
|
| 99 |
+
com_counter = Counter()
|
| 100 |
+
# Take into account self vertex community
|
| 101 |
+
try:
|
| 102 |
+
com_counter.update({communities[vertex]: density[communities[vertex]]})
|
| 103 |
+
except KeyError:
|
| 104 |
+
pass
|
| 105 |
+
# Gather neighbour vertex communities
|
| 106 |
+
for v in G[vertex]:
|
| 107 |
+
try:
|
| 108 |
+
com_counter.update({communities[v]: density[communities[v]]})
|
| 109 |
+
except KeyError:
|
| 110 |
+
continue
|
| 111 |
+
# Check which is the community with highest density
|
| 112 |
+
new_com = -1
|
| 113 |
+
if len(com_counter.keys()) > 0:
|
| 114 |
+
max_freq = max(com_counter.values())
|
| 115 |
+
best_communities = [
|
| 116 |
+
com
|
| 117 |
+
for com, freq in com_counter.items()
|
| 118 |
+
if (max_freq - freq) < 0.0001
|
| 119 |
+
]
|
| 120 |
+
# If actual vertex com in best communities, it is preserved
|
| 121 |
+
try:
|
| 122 |
+
if communities[vertex] in best_communities:
|
| 123 |
+
new_com = communities[vertex]
|
| 124 |
+
except KeyError:
|
| 125 |
+
pass
|
| 126 |
+
# If vertex community changes...
|
| 127 |
+
if new_com == -1:
|
| 128 |
+
# Set flag of non-convergence
|
| 129 |
+
cont = True
|
| 130 |
+
# Randomly chose a new community from candidates
|
| 131 |
+
new_com = seed.choice(best_communities)
|
| 132 |
+
# Update previous community status
|
| 133 |
+
try:
|
| 134 |
+
com_to_numvertices[communities[vertex]] -= 1
|
| 135 |
+
density[communities[vertex]] = (
|
| 136 |
+
max_density / com_to_numvertices[communities[vertex]]
|
| 137 |
+
)
|
| 138 |
+
except KeyError:
|
| 139 |
+
pass
|
| 140 |
+
# Update new community status
|
| 141 |
+
communities[vertex] = new_com
|
| 142 |
+
com_to_numvertices[communities[vertex]] += 1
|
| 143 |
+
density[communities[vertex]] = (
|
| 144 |
+
max_density / com_to_numvertices[communities[vertex]]
|
| 145 |
+
)
|
| 146 |
+
# If maximum iterations reached --> output actual results
|
| 147 |
+
if iter_count > max_iter:
|
| 148 |
+
break
|
| 149 |
+
# Return results by grouping communities as list of vertices
|
| 150 |
+
return iter(groups(communities).values())
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_kclique.cpython-311.pyc
ADDED
|
Binary file (5.61 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_utils.cpython-311.pyc
ADDED
|
Binary file (2.01 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/test_kclique.py
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import combinations
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def test_overlapping_K5():
|
| 9 |
+
G = nx.Graph()
|
| 10 |
+
G.add_edges_from(combinations(range(5), 2)) # Add a five clique
|
| 11 |
+
G.add_edges_from(combinations(range(2, 7), 2)) # Add another five clique
|
| 12 |
+
c = list(nx.community.k_clique_communities(G, 4))
|
| 13 |
+
assert c == [frozenset(range(7))]
|
| 14 |
+
c = set(nx.community.k_clique_communities(G, 5))
|
| 15 |
+
assert c == {frozenset(range(5)), frozenset(range(2, 7))}
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def test_isolated_K5():
|
| 19 |
+
G = nx.Graph()
|
| 20 |
+
G.add_edges_from(combinations(range(5), 2)) # Add a five clique
|
| 21 |
+
G.add_edges_from(combinations(range(5, 10), 2)) # Add another five clique
|
| 22 |
+
c = set(nx.community.k_clique_communities(G, 5))
|
| 23 |
+
assert c == {frozenset(range(5)), frozenset(range(5, 10))}
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class TestZacharyKarateClub:
|
| 27 |
+
def setup_method(self):
|
| 28 |
+
self.G = nx.karate_club_graph()
|
| 29 |
+
|
| 30 |
+
def _check_communities(self, k, expected):
|
| 31 |
+
communities = set(nx.community.k_clique_communities(self.G, k))
|
| 32 |
+
assert communities == expected
|
| 33 |
+
|
| 34 |
+
def test_k2(self):
|
| 35 |
+
# clique percolation with k=2 is just connected components
|
| 36 |
+
expected = {frozenset(self.G)}
|
| 37 |
+
self._check_communities(2, expected)
|
| 38 |
+
|
| 39 |
+
def test_k3(self):
|
| 40 |
+
comm1 = [
|
| 41 |
+
0,
|
| 42 |
+
1,
|
| 43 |
+
2,
|
| 44 |
+
3,
|
| 45 |
+
7,
|
| 46 |
+
8,
|
| 47 |
+
12,
|
| 48 |
+
13,
|
| 49 |
+
14,
|
| 50 |
+
15,
|
| 51 |
+
17,
|
| 52 |
+
18,
|
| 53 |
+
19,
|
| 54 |
+
20,
|
| 55 |
+
21,
|
| 56 |
+
22,
|
| 57 |
+
23,
|
| 58 |
+
26,
|
| 59 |
+
27,
|
| 60 |
+
28,
|
| 61 |
+
29,
|
| 62 |
+
30,
|
| 63 |
+
31,
|
| 64 |
+
32,
|
| 65 |
+
33,
|
| 66 |
+
]
|
| 67 |
+
comm2 = [0, 4, 5, 6, 10, 16]
|
| 68 |
+
comm3 = [24, 25, 31]
|
| 69 |
+
expected = {frozenset(comm1), frozenset(comm2), frozenset(comm3)}
|
| 70 |
+
self._check_communities(3, expected)
|
| 71 |
+
|
| 72 |
+
def test_k4(self):
|
| 73 |
+
expected = {
|
| 74 |
+
frozenset([0, 1, 2, 3, 7, 13]),
|
| 75 |
+
frozenset([8, 32, 30, 33]),
|
| 76 |
+
frozenset([32, 33, 29, 23]),
|
| 77 |
+
}
|
| 78 |
+
self._check_communities(4, expected)
|
| 79 |
+
|
| 80 |
+
def test_k5(self):
|
| 81 |
+
expected = {frozenset([0, 1, 2, 3, 7, 13])}
|
| 82 |
+
self._check_communities(5, expected)
|
| 83 |
+
|
| 84 |
+
def test_k6(self):
|
| 85 |
+
expected = set()
|
| 86 |
+
self._check_communities(6, expected)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def test_bad_k():
|
| 90 |
+
with pytest.raises(nx.NetworkXError):
|
| 91 |
+
list(nx.community.k_clique_communities(nx.Graph(), 1))
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (443 Bytes). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/weakly_connected.cpython-311.pyc
ADDED
|
Binary file (5.86 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/attracting.py
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Attracting components."""
|
| 2 |
+
import networkx as nx
|
| 3 |
+
from networkx.utils.decorators import not_implemented_for
|
| 4 |
+
|
| 5 |
+
__all__ = [
|
| 6 |
+
"number_attracting_components",
|
| 7 |
+
"attracting_components",
|
| 8 |
+
"is_attracting_component",
|
| 9 |
+
]
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
@not_implemented_for("undirected")
|
| 13 |
+
@nx._dispatch
|
| 14 |
+
def attracting_components(G):
|
| 15 |
+
"""Generates the attracting components in `G`.
|
| 16 |
+
|
| 17 |
+
An attracting component in a directed graph `G` is a strongly connected
|
| 18 |
+
component with the property that a random walker on the graph will never
|
| 19 |
+
leave the component, once it enters the component.
|
| 20 |
+
|
| 21 |
+
The nodes in attracting components can also be thought of as recurrent
|
| 22 |
+
nodes. If a random walker enters the attractor containing the node, then
|
| 23 |
+
the node will be visited infinitely often.
|
| 24 |
+
|
| 25 |
+
To obtain induced subgraphs on each component use:
|
| 26 |
+
``(G.subgraph(c).copy() for c in attracting_components(G))``
|
| 27 |
+
|
| 28 |
+
Parameters
|
| 29 |
+
----------
|
| 30 |
+
G : DiGraph, MultiDiGraph
|
| 31 |
+
The graph to be analyzed.
|
| 32 |
+
|
| 33 |
+
Returns
|
| 34 |
+
-------
|
| 35 |
+
attractors : generator of sets
|
| 36 |
+
A generator of sets of nodes, one for each attracting component of G.
|
| 37 |
+
|
| 38 |
+
Raises
|
| 39 |
+
------
|
| 40 |
+
NetworkXNotImplemented
|
| 41 |
+
If the input graph is undirected.
|
| 42 |
+
|
| 43 |
+
See Also
|
| 44 |
+
--------
|
| 45 |
+
number_attracting_components
|
| 46 |
+
is_attracting_component
|
| 47 |
+
|
| 48 |
+
"""
|
| 49 |
+
scc = list(nx.strongly_connected_components(G))
|
| 50 |
+
cG = nx.condensation(G, scc)
|
| 51 |
+
for n in cG:
|
| 52 |
+
if cG.out_degree(n) == 0:
|
| 53 |
+
yield scc[n]
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
@not_implemented_for("undirected")
|
| 57 |
+
@nx._dispatch
|
| 58 |
+
def number_attracting_components(G):
|
| 59 |
+
"""Returns the number of attracting components in `G`.
|
| 60 |
+
|
| 61 |
+
Parameters
|
| 62 |
+
----------
|
| 63 |
+
G : DiGraph, MultiDiGraph
|
| 64 |
+
The graph to be analyzed.
|
| 65 |
+
|
| 66 |
+
Returns
|
| 67 |
+
-------
|
| 68 |
+
n : int
|
| 69 |
+
The number of attracting components in G.
|
| 70 |
+
|
| 71 |
+
Raises
|
| 72 |
+
------
|
| 73 |
+
NetworkXNotImplemented
|
| 74 |
+
If the input graph is undirected.
|
| 75 |
+
|
| 76 |
+
See Also
|
| 77 |
+
--------
|
| 78 |
+
attracting_components
|
| 79 |
+
is_attracting_component
|
| 80 |
+
|
| 81 |
+
"""
|
| 82 |
+
return sum(1 for ac in attracting_components(G))
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
@not_implemented_for("undirected")
|
| 86 |
+
@nx._dispatch
|
| 87 |
+
def is_attracting_component(G):
|
| 88 |
+
"""Returns True if `G` consists of a single attracting component.
|
| 89 |
+
|
| 90 |
+
Parameters
|
| 91 |
+
----------
|
| 92 |
+
G : DiGraph, MultiDiGraph
|
| 93 |
+
The graph to be analyzed.
|
| 94 |
+
|
| 95 |
+
Returns
|
| 96 |
+
-------
|
| 97 |
+
attracting : bool
|
| 98 |
+
True if `G` has a single attracting component. Otherwise, False.
|
| 99 |
+
|
| 100 |
+
Raises
|
| 101 |
+
------
|
| 102 |
+
NetworkXNotImplemented
|
| 103 |
+
If the input graph is undirected.
|
| 104 |
+
|
| 105 |
+
See Also
|
| 106 |
+
--------
|
| 107 |
+
attracting_components
|
| 108 |
+
number_attracting_components
|
| 109 |
+
|
| 110 |
+
"""
|
| 111 |
+
ac = list(attracting_components(G))
|
| 112 |
+
if len(ac) == 1:
|
| 113 |
+
return len(ac[0]) == len(G)
|
| 114 |
+
return False
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/strongly_connected.py
ADDED
|
@@ -0,0 +1,431 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Strongly connected components."""
|
| 2 |
+
import networkx as nx
|
| 3 |
+
from networkx.utils.decorators import not_implemented_for
|
| 4 |
+
|
| 5 |
+
__all__ = [
|
| 6 |
+
"number_strongly_connected_components",
|
| 7 |
+
"strongly_connected_components",
|
| 8 |
+
"is_strongly_connected",
|
| 9 |
+
"strongly_connected_components_recursive",
|
| 10 |
+
"kosaraju_strongly_connected_components",
|
| 11 |
+
"condensation",
|
| 12 |
+
]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
@not_implemented_for("undirected")
|
| 16 |
+
@nx._dispatch
|
| 17 |
+
def strongly_connected_components(G):
|
| 18 |
+
"""Generate nodes in strongly connected components of graph.
|
| 19 |
+
|
| 20 |
+
Parameters
|
| 21 |
+
----------
|
| 22 |
+
G : NetworkX Graph
|
| 23 |
+
A directed graph.
|
| 24 |
+
|
| 25 |
+
Returns
|
| 26 |
+
-------
|
| 27 |
+
comp : generator of sets
|
| 28 |
+
A generator of sets of nodes, one for each strongly connected
|
| 29 |
+
component of G.
|
| 30 |
+
|
| 31 |
+
Raises
|
| 32 |
+
------
|
| 33 |
+
NetworkXNotImplemented
|
| 34 |
+
If G is undirected.
|
| 35 |
+
|
| 36 |
+
Examples
|
| 37 |
+
--------
|
| 38 |
+
Generate a sorted list of strongly connected components, largest first.
|
| 39 |
+
|
| 40 |
+
>>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
|
| 41 |
+
>>> nx.add_cycle(G, [10, 11, 12])
|
| 42 |
+
>>> [
|
| 43 |
+
... len(c)
|
| 44 |
+
... for c in sorted(nx.strongly_connected_components(G), key=len, reverse=True)
|
| 45 |
+
... ]
|
| 46 |
+
[4, 3]
|
| 47 |
+
|
| 48 |
+
If you only want the largest component, it's more efficient to
|
| 49 |
+
use max instead of sort.
|
| 50 |
+
|
| 51 |
+
>>> largest = max(nx.strongly_connected_components(G), key=len)
|
| 52 |
+
|
| 53 |
+
See Also
|
| 54 |
+
--------
|
| 55 |
+
connected_components
|
| 56 |
+
weakly_connected_components
|
| 57 |
+
kosaraju_strongly_connected_components
|
| 58 |
+
|
| 59 |
+
Notes
|
| 60 |
+
-----
|
| 61 |
+
Uses Tarjan's algorithm[1]_ with Nuutila's modifications[2]_.
|
| 62 |
+
Nonrecursive version of algorithm.
|
| 63 |
+
|
| 64 |
+
References
|
| 65 |
+
----------
|
| 66 |
+
.. [1] Depth-first search and linear graph algorithms, R. Tarjan
|
| 67 |
+
SIAM Journal of Computing 1(2):146-160, (1972).
|
| 68 |
+
|
| 69 |
+
.. [2] On finding the strongly connected components in a directed graph.
|
| 70 |
+
E. Nuutila and E. Soisalon-Soinen
|
| 71 |
+
Information Processing Letters 49(1): 9-14, (1994)..
|
| 72 |
+
|
| 73 |
+
"""
|
| 74 |
+
preorder = {}
|
| 75 |
+
lowlink = {}
|
| 76 |
+
scc_found = set()
|
| 77 |
+
scc_queue = []
|
| 78 |
+
i = 0 # Preorder counter
|
| 79 |
+
neighbors = {v: iter(G[v]) for v in G}
|
| 80 |
+
for source in G:
|
| 81 |
+
if source not in scc_found:
|
| 82 |
+
queue = [source]
|
| 83 |
+
while queue:
|
| 84 |
+
v = queue[-1]
|
| 85 |
+
if v not in preorder:
|
| 86 |
+
i = i + 1
|
| 87 |
+
preorder[v] = i
|
| 88 |
+
done = True
|
| 89 |
+
for w in neighbors[v]:
|
| 90 |
+
if w not in preorder:
|
| 91 |
+
queue.append(w)
|
| 92 |
+
done = False
|
| 93 |
+
break
|
| 94 |
+
if done:
|
| 95 |
+
lowlink[v] = preorder[v]
|
| 96 |
+
for w in G[v]:
|
| 97 |
+
if w not in scc_found:
|
| 98 |
+
if preorder[w] > preorder[v]:
|
| 99 |
+
lowlink[v] = min([lowlink[v], lowlink[w]])
|
| 100 |
+
else:
|
| 101 |
+
lowlink[v] = min([lowlink[v], preorder[w]])
|
| 102 |
+
queue.pop()
|
| 103 |
+
if lowlink[v] == preorder[v]:
|
| 104 |
+
scc = {v}
|
| 105 |
+
while scc_queue and preorder[scc_queue[-1]] > preorder[v]:
|
| 106 |
+
k = scc_queue.pop()
|
| 107 |
+
scc.add(k)
|
| 108 |
+
scc_found.update(scc)
|
| 109 |
+
yield scc
|
| 110 |
+
else:
|
| 111 |
+
scc_queue.append(v)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
@not_implemented_for("undirected")
|
| 115 |
+
@nx._dispatch
|
| 116 |
+
def kosaraju_strongly_connected_components(G, source=None):
|
| 117 |
+
"""Generate nodes in strongly connected components of graph.
|
| 118 |
+
|
| 119 |
+
Parameters
|
| 120 |
+
----------
|
| 121 |
+
G : NetworkX Graph
|
| 122 |
+
A directed graph.
|
| 123 |
+
|
| 124 |
+
Returns
|
| 125 |
+
-------
|
| 126 |
+
comp : generator of sets
|
| 127 |
+
A generator of sets of nodes, one for each strongly connected
|
| 128 |
+
component of G.
|
| 129 |
+
|
| 130 |
+
Raises
|
| 131 |
+
------
|
| 132 |
+
NetworkXNotImplemented
|
| 133 |
+
If G is undirected.
|
| 134 |
+
|
| 135 |
+
Examples
|
| 136 |
+
--------
|
| 137 |
+
Generate a sorted list of strongly connected components, largest first.
|
| 138 |
+
|
| 139 |
+
>>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
|
| 140 |
+
>>> nx.add_cycle(G, [10, 11, 12])
|
| 141 |
+
>>> [
|
| 142 |
+
... len(c)
|
| 143 |
+
... for c in sorted(
|
| 144 |
+
... nx.kosaraju_strongly_connected_components(G), key=len, reverse=True
|
| 145 |
+
... )
|
| 146 |
+
... ]
|
| 147 |
+
[4, 3]
|
| 148 |
+
|
| 149 |
+
If you only want the largest component, it's more efficient to
|
| 150 |
+
use max instead of sort.
|
| 151 |
+
|
| 152 |
+
>>> largest = max(nx.kosaraju_strongly_connected_components(G), key=len)
|
| 153 |
+
|
| 154 |
+
See Also
|
| 155 |
+
--------
|
| 156 |
+
strongly_connected_components
|
| 157 |
+
|
| 158 |
+
Notes
|
| 159 |
+
-----
|
| 160 |
+
Uses Kosaraju's algorithm.
|
| 161 |
+
|
| 162 |
+
"""
|
| 163 |
+
post = list(nx.dfs_postorder_nodes(G.reverse(copy=False), source=source))
|
| 164 |
+
|
| 165 |
+
seen = set()
|
| 166 |
+
while post:
|
| 167 |
+
r = post.pop()
|
| 168 |
+
if r in seen:
|
| 169 |
+
continue
|
| 170 |
+
c = nx.dfs_preorder_nodes(G, r)
|
| 171 |
+
new = {v for v in c if v not in seen}
|
| 172 |
+
seen.update(new)
|
| 173 |
+
yield new
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
@not_implemented_for("undirected")
|
| 177 |
+
@nx._dispatch
|
| 178 |
+
def strongly_connected_components_recursive(G):
|
| 179 |
+
"""Generate nodes in strongly connected components of graph.
|
| 180 |
+
|
| 181 |
+
.. deprecated:: 3.2
|
| 182 |
+
|
| 183 |
+
This function is deprecated and will be removed in a future version of
|
| 184 |
+
NetworkX. Use `strongly_connected_components` instead.
|
| 185 |
+
|
| 186 |
+
Recursive version of algorithm.
|
| 187 |
+
|
| 188 |
+
Parameters
|
| 189 |
+
----------
|
| 190 |
+
G : NetworkX Graph
|
| 191 |
+
A directed graph.
|
| 192 |
+
|
| 193 |
+
Returns
|
| 194 |
+
-------
|
| 195 |
+
comp : generator of sets
|
| 196 |
+
A generator of sets of nodes, one for each strongly connected
|
| 197 |
+
component of G.
|
| 198 |
+
|
| 199 |
+
Raises
|
| 200 |
+
------
|
| 201 |
+
NetworkXNotImplemented
|
| 202 |
+
If G is undirected.
|
| 203 |
+
|
| 204 |
+
Examples
|
| 205 |
+
--------
|
| 206 |
+
Generate a sorted list of strongly connected components, largest first.
|
| 207 |
+
|
| 208 |
+
>>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
|
| 209 |
+
>>> nx.add_cycle(G, [10, 11, 12])
|
| 210 |
+
>>> [
|
| 211 |
+
... len(c)
|
| 212 |
+
... for c in sorted(
|
| 213 |
+
... nx.strongly_connected_components_recursive(G), key=len, reverse=True
|
| 214 |
+
... )
|
| 215 |
+
... ]
|
| 216 |
+
[4, 3]
|
| 217 |
+
|
| 218 |
+
If you only want the largest component, it's more efficient to
|
| 219 |
+
use max instead of sort.
|
| 220 |
+
|
| 221 |
+
>>> largest = max(nx.strongly_connected_components_recursive(G), key=len)
|
| 222 |
+
|
| 223 |
+
To create the induced subgraph of the components use:
|
| 224 |
+
>>> S = [G.subgraph(c).copy() for c in nx.weakly_connected_components(G)]
|
| 225 |
+
|
| 226 |
+
See Also
|
| 227 |
+
--------
|
| 228 |
+
connected_components
|
| 229 |
+
|
| 230 |
+
Notes
|
| 231 |
+
-----
|
| 232 |
+
Uses Tarjan's algorithm[1]_ with Nuutila's modifications[2]_.
|
| 233 |
+
|
| 234 |
+
References
|
| 235 |
+
----------
|
| 236 |
+
.. [1] Depth-first search and linear graph algorithms, R. Tarjan
|
| 237 |
+
SIAM Journal of Computing 1(2):146-160, (1972).
|
| 238 |
+
|
| 239 |
+
.. [2] On finding the strongly connected components in a directed graph.
|
| 240 |
+
E. Nuutila and E. Soisalon-Soinen
|
| 241 |
+
Information Processing Letters 49(1): 9-14, (1994)..
|
| 242 |
+
|
| 243 |
+
"""
|
| 244 |
+
import warnings
|
| 245 |
+
|
| 246 |
+
warnings.warn(
|
| 247 |
+
(
|
| 248 |
+
"\n\nstrongly_connected_components_recursive is deprecated and will be\n"
|
| 249 |
+
"removed in the future. Use strongly_connected_components instead."
|
| 250 |
+
),
|
| 251 |
+
category=DeprecationWarning,
|
| 252 |
+
stacklevel=2,
|
| 253 |
+
)
|
| 254 |
+
|
| 255 |
+
yield from strongly_connected_components(G)
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
@not_implemented_for("undirected")
|
| 259 |
+
@nx._dispatch
|
| 260 |
+
def number_strongly_connected_components(G):
|
| 261 |
+
"""Returns number of strongly connected components in graph.
|
| 262 |
+
|
| 263 |
+
Parameters
|
| 264 |
+
----------
|
| 265 |
+
G : NetworkX graph
|
| 266 |
+
A directed graph.
|
| 267 |
+
|
| 268 |
+
Returns
|
| 269 |
+
-------
|
| 270 |
+
n : integer
|
| 271 |
+
Number of strongly connected components
|
| 272 |
+
|
| 273 |
+
Raises
|
| 274 |
+
------
|
| 275 |
+
NetworkXNotImplemented
|
| 276 |
+
If G is undirected.
|
| 277 |
+
|
| 278 |
+
Examples
|
| 279 |
+
--------
|
| 280 |
+
>>> G = nx.DiGraph([(0, 1), (1, 2), (2, 0), (2, 3), (4, 5), (3, 4), (5, 6), (6, 3), (6, 7)])
|
| 281 |
+
>>> nx.number_strongly_connected_components(G)
|
| 282 |
+
3
|
| 283 |
+
|
| 284 |
+
See Also
|
| 285 |
+
--------
|
| 286 |
+
strongly_connected_components
|
| 287 |
+
number_connected_components
|
| 288 |
+
number_weakly_connected_components
|
| 289 |
+
|
| 290 |
+
Notes
|
| 291 |
+
-----
|
| 292 |
+
For directed graphs only.
|
| 293 |
+
"""
|
| 294 |
+
return sum(1 for scc in strongly_connected_components(G))
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
@not_implemented_for("undirected")
|
| 298 |
+
@nx._dispatch
|
| 299 |
+
def is_strongly_connected(G):
|
| 300 |
+
"""Test directed graph for strong connectivity.
|
| 301 |
+
|
| 302 |
+
A directed graph is strongly connected if and only if every vertex in
|
| 303 |
+
the graph is reachable from every other vertex.
|
| 304 |
+
|
| 305 |
+
Parameters
|
| 306 |
+
----------
|
| 307 |
+
G : NetworkX Graph
|
| 308 |
+
A directed graph.
|
| 309 |
+
|
| 310 |
+
Returns
|
| 311 |
+
-------
|
| 312 |
+
connected : bool
|
| 313 |
+
True if the graph is strongly connected, False otherwise.
|
| 314 |
+
|
| 315 |
+
Examples
|
| 316 |
+
--------
|
| 317 |
+
>>> G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 0), (2, 4), (4, 2)])
|
| 318 |
+
>>> nx.is_strongly_connected(G)
|
| 319 |
+
True
|
| 320 |
+
>>> G.remove_edge(2, 3)
|
| 321 |
+
>>> nx.is_strongly_connected(G)
|
| 322 |
+
False
|
| 323 |
+
|
| 324 |
+
Raises
|
| 325 |
+
------
|
| 326 |
+
NetworkXNotImplemented
|
| 327 |
+
If G is undirected.
|
| 328 |
+
|
| 329 |
+
See Also
|
| 330 |
+
--------
|
| 331 |
+
is_weakly_connected
|
| 332 |
+
is_semiconnected
|
| 333 |
+
is_connected
|
| 334 |
+
is_biconnected
|
| 335 |
+
strongly_connected_components
|
| 336 |
+
|
| 337 |
+
Notes
|
| 338 |
+
-----
|
| 339 |
+
For directed graphs only.
|
| 340 |
+
"""
|
| 341 |
+
if len(G) == 0:
|
| 342 |
+
raise nx.NetworkXPointlessConcept(
|
| 343 |
+
"""Connectivity is undefined for the null graph."""
|
| 344 |
+
)
|
| 345 |
+
|
| 346 |
+
return len(next(strongly_connected_components(G))) == len(G)
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
@not_implemented_for("undirected")
|
| 350 |
+
@nx._dispatch
|
| 351 |
+
def condensation(G, scc=None):
|
| 352 |
+
"""Returns the condensation of G.
|
| 353 |
+
|
| 354 |
+
The condensation of G is the graph with each of the strongly connected
|
| 355 |
+
components contracted into a single node.
|
| 356 |
+
|
| 357 |
+
Parameters
|
| 358 |
+
----------
|
| 359 |
+
G : NetworkX DiGraph
|
| 360 |
+
A directed graph.
|
| 361 |
+
|
| 362 |
+
scc: list or generator (optional, default=None)
|
| 363 |
+
Strongly connected components. If provided, the elements in
|
| 364 |
+
`scc` must partition the nodes in `G`. If not provided, it will be
|
| 365 |
+
calculated as scc=nx.strongly_connected_components(G).
|
| 366 |
+
|
| 367 |
+
Returns
|
| 368 |
+
-------
|
| 369 |
+
C : NetworkX DiGraph
|
| 370 |
+
The condensation graph C of G. The node labels are integers
|
| 371 |
+
corresponding to the index of the component in the list of
|
| 372 |
+
strongly connected components of G. C has a graph attribute named
|
| 373 |
+
'mapping' with a dictionary mapping the original nodes to the
|
| 374 |
+
nodes in C to which they belong. Each node in C also has a node
|
| 375 |
+
attribute 'members' with the set of original nodes in G that
|
| 376 |
+
form the SCC that the node in C represents.
|
| 377 |
+
|
| 378 |
+
Raises
|
| 379 |
+
------
|
| 380 |
+
NetworkXNotImplemented
|
| 381 |
+
If G is undirected.
|
| 382 |
+
|
| 383 |
+
Examples
|
| 384 |
+
--------
|
| 385 |
+
Contracting two sets of strongly connected nodes into two distinct SCC
|
| 386 |
+
using the barbell graph.
|
| 387 |
+
|
| 388 |
+
>>> G = nx.barbell_graph(4, 0)
|
| 389 |
+
>>> G.remove_edge(3, 4)
|
| 390 |
+
>>> G = nx.DiGraph(G)
|
| 391 |
+
>>> H = nx.condensation(G)
|
| 392 |
+
>>> H.nodes.data()
|
| 393 |
+
NodeDataView({0: {'members': {0, 1, 2, 3}}, 1: {'members': {4, 5, 6, 7}}})
|
| 394 |
+
>>> H.graph['mapping']
|
| 395 |
+
{0: 0, 1: 0, 2: 0, 3: 0, 4: 1, 5: 1, 6: 1, 7: 1}
|
| 396 |
+
|
| 397 |
+
Contracting a complete graph into one single SCC.
|
| 398 |
+
|
| 399 |
+
>>> G = nx.complete_graph(7, create_using=nx.DiGraph)
|
| 400 |
+
>>> H = nx.condensation(G)
|
| 401 |
+
>>> H.nodes
|
| 402 |
+
NodeView((0,))
|
| 403 |
+
>>> H.nodes.data()
|
| 404 |
+
NodeDataView({0: {'members': {0, 1, 2, 3, 4, 5, 6}}})
|
| 405 |
+
|
| 406 |
+
Notes
|
| 407 |
+
-----
|
| 408 |
+
After contracting all strongly connected components to a single node,
|
| 409 |
+
the resulting graph is a directed acyclic graph.
|
| 410 |
+
|
| 411 |
+
"""
|
| 412 |
+
if scc is None:
|
| 413 |
+
scc = nx.strongly_connected_components(G)
|
| 414 |
+
mapping = {}
|
| 415 |
+
members = {}
|
| 416 |
+
C = nx.DiGraph()
|
| 417 |
+
# Add mapping dict as graph attribute
|
| 418 |
+
C.graph["mapping"] = mapping
|
| 419 |
+
if len(G) == 0:
|
| 420 |
+
return C
|
| 421 |
+
for i, component in enumerate(scc):
|
| 422 |
+
members[i] = component
|
| 423 |
+
mapping.update((n, i) for n in component)
|
| 424 |
+
number_of_components = i + 1
|
| 425 |
+
C.add_nodes_from(range(number_of_components))
|
| 426 |
+
C.add_edges_from(
|
| 427 |
+
(mapping[u], mapping[v]) for u, v in G.edges() if mapping[u] != mapping[v]
|
| 428 |
+
)
|
| 429 |
+
# Add a list of members (ie original nodes) to each node (ie scc) in C.
|
| 430 |
+
nx.set_node_attributes(C, members, "members")
|
| 431 |
+
return C
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/__pycache__/test_attracting.cpython-311.pyc
ADDED
|
Binary file (5.26 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/__pycache__/test_biconnected.cpython-311.pyc
ADDED
|
Binary file (12.6 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/test_attracting.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx import NetworkXNotImplemented
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class TestAttractingComponents:
|
| 8 |
+
@classmethod
|
| 9 |
+
def setup_class(cls):
|
| 10 |
+
cls.G1 = nx.DiGraph()
|
| 11 |
+
cls.G1.add_edges_from(
|
| 12 |
+
[
|
| 13 |
+
(5, 11),
|
| 14 |
+
(11, 2),
|
| 15 |
+
(11, 9),
|
| 16 |
+
(11, 10),
|
| 17 |
+
(7, 11),
|
| 18 |
+
(7, 8),
|
| 19 |
+
(8, 9),
|
| 20 |
+
(3, 8),
|
| 21 |
+
(3, 10),
|
| 22 |
+
]
|
| 23 |
+
)
|
| 24 |
+
cls.G2 = nx.DiGraph()
|
| 25 |
+
cls.G2.add_edges_from([(0, 1), (0, 2), (1, 1), (1, 2), (2, 1)])
|
| 26 |
+
|
| 27 |
+
cls.G3 = nx.DiGraph()
|
| 28 |
+
cls.G3.add_edges_from([(0, 1), (1, 2), (2, 1), (0, 3), (3, 4), (4, 3)])
|
| 29 |
+
|
| 30 |
+
cls.G4 = nx.DiGraph()
|
| 31 |
+
|
| 32 |
+
def test_attracting_components(self):
|
| 33 |
+
ac = list(nx.attracting_components(self.G1))
|
| 34 |
+
assert {2} in ac
|
| 35 |
+
assert {9} in ac
|
| 36 |
+
assert {10} in ac
|
| 37 |
+
|
| 38 |
+
ac = list(nx.attracting_components(self.G2))
|
| 39 |
+
ac = [tuple(sorted(x)) for x in ac]
|
| 40 |
+
assert ac == [(1, 2)]
|
| 41 |
+
|
| 42 |
+
ac = list(nx.attracting_components(self.G3))
|
| 43 |
+
ac = [tuple(sorted(x)) for x in ac]
|
| 44 |
+
assert (1, 2) in ac
|
| 45 |
+
assert (3, 4) in ac
|
| 46 |
+
assert len(ac) == 2
|
| 47 |
+
|
| 48 |
+
ac = list(nx.attracting_components(self.G4))
|
| 49 |
+
assert ac == []
|
| 50 |
+
|
| 51 |
+
def test_number_attacting_components(self):
|
| 52 |
+
assert nx.number_attracting_components(self.G1) == 3
|
| 53 |
+
assert nx.number_attracting_components(self.G2) == 1
|
| 54 |
+
assert nx.number_attracting_components(self.G3) == 2
|
| 55 |
+
assert nx.number_attracting_components(self.G4) == 0
|
| 56 |
+
|
| 57 |
+
def test_is_attracting_component(self):
|
| 58 |
+
assert not nx.is_attracting_component(self.G1)
|
| 59 |
+
assert not nx.is_attracting_component(self.G2)
|
| 60 |
+
assert not nx.is_attracting_component(self.G3)
|
| 61 |
+
g2 = self.G3.subgraph([1, 2])
|
| 62 |
+
assert nx.is_attracting_component(g2)
|
| 63 |
+
assert not nx.is_attracting_component(self.G4)
|
| 64 |
+
|
| 65 |
+
def test_connected_raise(self):
|
| 66 |
+
G = nx.Graph()
|
| 67 |
+
with pytest.raises(NetworkXNotImplemented):
|
| 68 |
+
next(nx.attracting_components(G))
|
| 69 |
+
pytest.raises(NetworkXNotImplemented, nx.number_attracting_components, G)
|
| 70 |
+
pytest.raises(NetworkXNotImplemented, nx.is_attracting_component, G)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/test_semiconnected.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import chain
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class TestIsSemiconnected:
|
| 9 |
+
def test_undirected(self):
|
| 10 |
+
pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, nx.Graph())
|
| 11 |
+
pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, nx.MultiGraph())
|
| 12 |
+
|
| 13 |
+
def test_empty(self):
|
| 14 |
+
pytest.raises(nx.NetworkXPointlessConcept, nx.is_semiconnected, nx.DiGraph())
|
| 15 |
+
pytest.raises(
|
| 16 |
+
nx.NetworkXPointlessConcept, nx.is_semiconnected, nx.MultiDiGraph()
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
def test_single_node_graph(self):
|
| 20 |
+
G = nx.DiGraph()
|
| 21 |
+
G.add_node(0)
|
| 22 |
+
assert nx.is_semiconnected(G)
|
| 23 |
+
|
| 24 |
+
def test_path(self):
|
| 25 |
+
G = nx.path_graph(100, create_using=nx.DiGraph())
|
| 26 |
+
assert nx.is_semiconnected(G)
|
| 27 |
+
G.add_edge(100, 99)
|
| 28 |
+
assert not nx.is_semiconnected(G)
|
| 29 |
+
|
| 30 |
+
def test_cycle(self):
|
| 31 |
+
G = nx.cycle_graph(100, create_using=nx.DiGraph())
|
| 32 |
+
assert nx.is_semiconnected(G)
|
| 33 |
+
G = nx.path_graph(100, create_using=nx.DiGraph())
|
| 34 |
+
G.add_edge(0, 99)
|
| 35 |
+
assert nx.is_semiconnected(G)
|
| 36 |
+
|
| 37 |
+
def test_tree(self):
|
| 38 |
+
G = nx.DiGraph()
|
| 39 |
+
G.add_edges_from(
|
| 40 |
+
chain.from_iterable([(i, 2 * i + 1), (i, 2 * i + 2)] for i in range(100))
|
| 41 |
+
)
|
| 42 |
+
assert not nx.is_semiconnected(G)
|
| 43 |
+
|
| 44 |
+
def test_dumbbell(self):
|
| 45 |
+
G = nx.cycle_graph(100, create_using=nx.DiGraph())
|
| 46 |
+
G.add_edges_from((i + 100, (i + 1) % 100 + 100) for i in range(100))
|
| 47 |
+
assert not nx.is_semiconnected(G) # G is disconnected.
|
| 48 |
+
G.add_edge(100, 99)
|
| 49 |
+
assert nx.is_semiconnected(G)
|
| 50 |
+
|
| 51 |
+
def test_alternating_path(self):
|
| 52 |
+
G = nx.DiGraph(
|
| 53 |
+
chain.from_iterable([(i, i - 1), (i, i + 1)] for i in range(0, 100, 2))
|
| 54 |
+
)
|
| 55 |
+
assert not nx.is_semiconnected(G)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/test_strongly_connected.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx import NetworkXNotImplemented
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class TestStronglyConnected:
|
| 8 |
+
@classmethod
|
| 9 |
+
def setup_class(cls):
|
| 10 |
+
cls.gc = []
|
| 11 |
+
G = nx.DiGraph()
|
| 12 |
+
G.add_edges_from(
|
| 13 |
+
[
|
| 14 |
+
(1, 2),
|
| 15 |
+
(2, 3),
|
| 16 |
+
(2, 8),
|
| 17 |
+
(3, 4),
|
| 18 |
+
(3, 7),
|
| 19 |
+
(4, 5),
|
| 20 |
+
(5, 3),
|
| 21 |
+
(5, 6),
|
| 22 |
+
(7, 4),
|
| 23 |
+
(7, 6),
|
| 24 |
+
(8, 1),
|
| 25 |
+
(8, 7),
|
| 26 |
+
]
|
| 27 |
+
)
|
| 28 |
+
C = {frozenset([3, 4, 5, 7]), frozenset([1, 2, 8]), frozenset([6])}
|
| 29 |
+
cls.gc.append((G, C))
|
| 30 |
+
|
| 31 |
+
G = nx.DiGraph()
|
| 32 |
+
G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)])
|
| 33 |
+
C = {frozenset([2, 3, 4]), frozenset([1])}
|
| 34 |
+
cls.gc.append((G, C))
|
| 35 |
+
|
| 36 |
+
G = nx.DiGraph()
|
| 37 |
+
G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)])
|
| 38 |
+
C = {frozenset([1, 2, 3])}
|
| 39 |
+
cls.gc.append((G, C))
|
| 40 |
+
|
| 41 |
+
# Eppstein's tests
|
| 42 |
+
G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []})
|
| 43 |
+
C = {
|
| 44 |
+
frozenset([0]),
|
| 45 |
+
frozenset([1]),
|
| 46 |
+
frozenset([2]),
|
| 47 |
+
frozenset([3]),
|
| 48 |
+
frozenset([4]),
|
| 49 |
+
frozenset([5]),
|
| 50 |
+
frozenset([6]),
|
| 51 |
+
}
|
| 52 |
+
cls.gc.append((G, C))
|
| 53 |
+
|
| 54 |
+
G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]})
|
| 55 |
+
C = {frozenset([0, 1, 2]), frozenset([3, 4])}
|
| 56 |
+
cls.gc.append((G, C))
|
| 57 |
+
|
| 58 |
+
def test_tarjan(self):
|
| 59 |
+
scc = nx.strongly_connected_components
|
| 60 |
+
for G, C in self.gc:
|
| 61 |
+
assert {frozenset(g) for g in scc(G)} == C
|
| 62 |
+
|
| 63 |
+
def test_tarjan_recursive(self):
|
| 64 |
+
scc = nx.strongly_connected_components_recursive
|
| 65 |
+
for G, C in self.gc:
|
| 66 |
+
with pytest.deprecated_call():
|
| 67 |
+
assert {frozenset(g) for g in scc(G)} == C
|
| 68 |
+
|
| 69 |
+
def test_kosaraju(self):
|
| 70 |
+
scc = nx.kosaraju_strongly_connected_components
|
| 71 |
+
for G, C in self.gc:
|
| 72 |
+
assert {frozenset(g) for g in scc(G)} == C
|
| 73 |
+
|
| 74 |
+
def test_number_strongly_connected_components(self):
|
| 75 |
+
ncc = nx.number_strongly_connected_components
|
| 76 |
+
for G, C in self.gc:
|
| 77 |
+
assert ncc(G) == len(C)
|
| 78 |
+
|
| 79 |
+
def test_is_strongly_connected(self):
|
| 80 |
+
for G, C in self.gc:
|
| 81 |
+
if len(C) == 1:
|
| 82 |
+
assert nx.is_strongly_connected(G)
|
| 83 |
+
else:
|
| 84 |
+
assert not nx.is_strongly_connected(G)
|
| 85 |
+
|
| 86 |
+
def test_contract_scc1(self):
|
| 87 |
+
G = nx.DiGraph()
|
| 88 |
+
G.add_edges_from(
|
| 89 |
+
[
|
| 90 |
+
(1, 2),
|
| 91 |
+
(2, 3),
|
| 92 |
+
(2, 11),
|
| 93 |
+
(2, 12),
|
| 94 |
+
(3, 4),
|
| 95 |
+
(4, 3),
|
| 96 |
+
(4, 5),
|
| 97 |
+
(5, 6),
|
| 98 |
+
(6, 5),
|
| 99 |
+
(6, 7),
|
| 100 |
+
(7, 8),
|
| 101 |
+
(7, 9),
|
| 102 |
+
(7, 10),
|
| 103 |
+
(8, 9),
|
| 104 |
+
(9, 7),
|
| 105 |
+
(10, 6),
|
| 106 |
+
(11, 2),
|
| 107 |
+
(11, 4),
|
| 108 |
+
(11, 6),
|
| 109 |
+
(12, 6),
|
| 110 |
+
(12, 11),
|
| 111 |
+
]
|
| 112 |
+
)
|
| 113 |
+
scc = list(nx.strongly_connected_components(G))
|
| 114 |
+
cG = nx.condensation(G, scc)
|
| 115 |
+
# DAG
|
| 116 |
+
assert nx.is_directed_acyclic_graph(cG)
|
| 117 |
+
# nodes
|
| 118 |
+
assert sorted(cG.nodes()) == [0, 1, 2, 3]
|
| 119 |
+
# edges
|
| 120 |
+
mapping = {}
|
| 121 |
+
for i, component in enumerate(scc):
|
| 122 |
+
for n in component:
|
| 123 |
+
mapping[n] = i
|
| 124 |
+
edge = (mapping[2], mapping[3])
|
| 125 |
+
assert cG.has_edge(*edge)
|
| 126 |
+
edge = (mapping[2], mapping[5])
|
| 127 |
+
assert cG.has_edge(*edge)
|
| 128 |
+
edge = (mapping[3], mapping[5])
|
| 129 |
+
assert cG.has_edge(*edge)
|
| 130 |
+
|
| 131 |
+
def test_contract_scc_isolate(self):
|
| 132 |
+
# Bug found and fixed in [1687].
|
| 133 |
+
G = nx.DiGraph()
|
| 134 |
+
G.add_edge(1, 2)
|
| 135 |
+
G.add_edge(2, 1)
|
| 136 |
+
scc = list(nx.strongly_connected_components(G))
|
| 137 |
+
cG = nx.condensation(G, scc)
|
| 138 |
+
assert list(cG.nodes()) == [0]
|
| 139 |
+
assert list(cG.edges()) == []
|
| 140 |
+
|
| 141 |
+
def test_contract_scc_edge(self):
|
| 142 |
+
G = nx.DiGraph()
|
| 143 |
+
G.add_edge(1, 2)
|
| 144 |
+
G.add_edge(2, 1)
|
| 145 |
+
G.add_edge(2, 3)
|
| 146 |
+
G.add_edge(3, 4)
|
| 147 |
+
G.add_edge(4, 3)
|
| 148 |
+
scc = list(nx.strongly_connected_components(G))
|
| 149 |
+
cG = nx.condensation(G, scc)
|
| 150 |
+
assert sorted(cG.nodes()) == [0, 1]
|
| 151 |
+
if 1 in scc[0]:
|
| 152 |
+
edge = (0, 1)
|
| 153 |
+
else:
|
| 154 |
+
edge = (1, 0)
|
| 155 |
+
assert list(cG.edges()) == [edge]
|
| 156 |
+
|
| 157 |
+
def test_condensation_mapping_and_members(self):
|
| 158 |
+
G, C = self.gc[1]
|
| 159 |
+
C = sorted(C, key=len, reverse=True)
|
| 160 |
+
cG = nx.condensation(G)
|
| 161 |
+
mapping = cG.graph["mapping"]
|
| 162 |
+
assert all(n in G for n in mapping)
|
| 163 |
+
assert all(0 == cN for n, cN in mapping.items() if n in C[0])
|
| 164 |
+
assert all(1 == cN for n, cN in mapping.items() if n in C[1])
|
| 165 |
+
for n, d in cG.nodes(data=True):
|
| 166 |
+
assert set(C[n]) == cG.nodes[n]["members"]
|
| 167 |
+
|
| 168 |
+
def test_null_graph(self):
|
| 169 |
+
G = nx.DiGraph()
|
| 170 |
+
assert list(nx.strongly_connected_components(G)) == []
|
| 171 |
+
assert list(nx.kosaraju_strongly_connected_components(G)) == []
|
| 172 |
+
with pytest.deprecated_call():
|
| 173 |
+
assert list(nx.strongly_connected_components_recursive(G)) == []
|
| 174 |
+
assert len(nx.condensation(G)) == 0
|
| 175 |
+
pytest.raises(
|
| 176 |
+
nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph()
|
| 177 |
+
)
|
| 178 |
+
|
| 179 |
+
def test_connected_raise(self):
|
| 180 |
+
G = nx.Graph()
|
| 181 |
+
with pytest.raises(NetworkXNotImplemented):
|
| 182 |
+
next(nx.strongly_connected_components(G))
|
| 183 |
+
with pytest.raises(NetworkXNotImplemented):
|
| 184 |
+
next(nx.kosaraju_strongly_connected_components(G))
|
| 185 |
+
with pytest.raises(NetworkXNotImplemented):
|
| 186 |
+
with pytest.deprecated_call():
|
| 187 |
+
next(nx.strongly_connected_components_recursive(G))
|
| 188 |
+
pytest.raises(NetworkXNotImplemented, nx.is_strongly_connected, G)
|
| 189 |
+
pytest.raises(
|
| 190 |
+
nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph()
|
| 191 |
+
)
|
| 192 |
+
pytest.raises(NetworkXNotImplemented, nx.condensation, G)
|
| 193 |
+
|
| 194 |
+
strong_cc_methods = (
|
| 195 |
+
nx.strongly_connected_components,
|
| 196 |
+
nx.kosaraju_strongly_connected_components,
|
| 197 |
+
)
|
| 198 |
+
|
| 199 |
+
@pytest.mark.parametrize("get_components", strong_cc_methods)
|
| 200 |
+
def test_connected_mutability(self, get_components):
|
| 201 |
+
DG = nx.path_graph(5, create_using=nx.DiGraph)
|
| 202 |
+
G = nx.disjoint_union(DG, DG)
|
| 203 |
+
seen = set()
|
| 204 |
+
for component in get_components(G):
|
| 205 |
+
assert len(seen & component) == 0
|
| 206 |
+
seen.update(component)
|
| 207 |
+
component.clear()
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/__pycache__/boykovkolmogorov.cpython-311.pyc
ADDED
|
Binary file (16 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/__pycache__/mincost.cpython-311.pyc
ADDED
|
Binary file (13.7 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/__pycache__/shortestaugmentingpath.cpython-311.pyc
ADDED
|
Binary file (11.3 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/tests/__pycache__/test_maxflow.cpython-311.pyc
ADDED
|
Binary file (32.6 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/tests/test_gomory_hu.py
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import combinations
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.algorithms.flow import (
|
| 7 |
+
boykov_kolmogorov,
|
| 8 |
+
dinitz,
|
| 9 |
+
edmonds_karp,
|
| 10 |
+
preflow_push,
|
| 11 |
+
shortest_augmenting_path,
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
flow_funcs = [
|
| 15 |
+
boykov_kolmogorov,
|
| 16 |
+
dinitz,
|
| 17 |
+
edmonds_karp,
|
| 18 |
+
preflow_push,
|
| 19 |
+
shortest_augmenting_path,
|
| 20 |
+
]
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class TestGomoryHuTree:
|
| 24 |
+
def minimum_edge_weight(self, T, u, v):
|
| 25 |
+
path = nx.shortest_path(T, u, v, weight="weight")
|
| 26 |
+
return min((T[u][v]["weight"], (u, v)) for (u, v) in zip(path, path[1:]))
|
| 27 |
+
|
| 28 |
+
def compute_cutset(self, G, T_orig, edge):
|
| 29 |
+
T = T_orig.copy()
|
| 30 |
+
T.remove_edge(*edge)
|
| 31 |
+
U, V = list(nx.connected_components(T))
|
| 32 |
+
cutset = set()
|
| 33 |
+
for x, nbrs in ((n, G[n]) for n in U):
|
| 34 |
+
cutset.update((x, y) for y in nbrs if y in V)
|
| 35 |
+
return cutset
|
| 36 |
+
|
| 37 |
+
def test_default_flow_function_karate_club_graph(self):
|
| 38 |
+
G = nx.karate_club_graph()
|
| 39 |
+
nx.set_edge_attributes(G, 1, "capacity")
|
| 40 |
+
T = nx.gomory_hu_tree(G)
|
| 41 |
+
assert nx.is_tree(T)
|
| 42 |
+
for u, v in combinations(G, 2):
|
| 43 |
+
cut_value, edge = self.minimum_edge_weight(T, u, v)
|
| 44 |
+
assert nx.minimum_cut_value(G, u, v) == cut_value
|
| 45 |
+
|
| 46 |
+
def test_karate_club_graph(self):
|
| 47 |
+
G = nx.karate_club_graph()
|
| 48 |
+
nx.set_edge_attributes(G, 1, "capacity")
|
| 49 |
+
for flow_func in flow_funcs:
|
| 50 |
+
T = nx.gomory_hu_tree(G, flow_func=flow_func)
|
| 51 |
+
assert nx.is_tree(T)
|
| 52 |
+
for u, v in combinations(G, 2):
|
| 53 |
+
cut_value, edge = self.minimum_edge_weight(T, u, v)
|
| 54 |
+
assert nx.minimum_cut_value(G, u, v) == cut_value
|
| 55 |
+
|
| 56 |
+
def test_davis_southern_women_graph(self):
|
| 57 |
+
G = nx.davis_southern_women_graph()
|
| 58 |
+
nx.set_edge_attributes(G, 1, "capacity")
|
| 59 |
+
for flow_func in flow_funcs:
|
| 60 |
+
T = nx.gomory_hu_tree(G, flow_func=flow_func)
|
| 61 |
+
assert nx.is_tree(T)
|
| 62 |
+
for u, v in combinations(G, 2):
|
| 63 |
+
cut_value, edge = self.minimum_edge_weight(T, u, v)
|
| 64 |
+
assert nx.minimum_cut_value(G, u, v) == cut_value
|
| 65 |
+
|
| 66 |
+
def test_florentine_families_graph(self):
|
| 67 |
+
G = nx.florentine_families_graph()
|
| 68 |
+
nx.set_edge_attributes(G, 1, "capacity")
|
| 69 |
+
for flow_func in flow_funcs:
|
| 70 |
+
T = nx.gomory_hu_tree(G, flow_func=flow_func)
|
| 71 |
+
assert nx.is_tree(T)
|
| 72 |
+
for u, v in combinations(G, 2):
|
| 73 |
+
cut_value, edge = self.minimum_edge_weight(T, u, v)
|
| 74 |
+
assert nx.minimum_cut_value(G, u, v) == cut_value
|
| 75 |
+
|
| 76 |
+
@pytest.mark.slow
|
| 77 |
+
def test_les_miserables_graph_cutset(self):
|
| 78 |
+
G = nx.les_miserables_graph()
|
| 79 |
+
nx.set_edge_attributes(G, 1, "capacity")
|
| 80 |
+
for flow_func in flow_funcs:
|
| 81 |
+
T = nx.gomory_hu_tree(G, flow_func=flow_func)
|
| 82 |
+
assert nx.is_tree(T)
|
| 83 |
+
for u, v in combinations(G, 2):
|
| 84 |
+
cut_value, edge = self.minimum_edge_weight(T, u, v)
|
| 85 |
+
assert nx.minimum_cut_value(G, u, v) == cut_value
|
| 86 |
+
|
| 87 |
+
def test_karate_club_graph_cutset(self):
|
| 88 |
+
G = nx.karate_club_graph()
|
| 89 |
+
nx.set_edge_attributes(G, 1, "capacity")
|
| 90 |
+
T = nx.gomory_hu_tree(G)
|
| 91 |
+
assert nx.is_tree(T)
|
| 92 |
+
u, v = 0, 33
|
| 93 |
+
cut_value, edge = self.minimum_edge_weight(T, u, v)
|
| 94 |
+
cutset = self.compute_cutset(G, T, edge)
|
| 95 |
+
assert cut_value == len(cutset)
|
| 96 |
+
|
| 97 |
+
def test_wikipedia_example(self):
|
| 98 |
+
# Example from https://en.wikipedia.org/wiki/Gomory%E2%80%93Hu_tree
|
| 99 |
+
G = nx.Graph()
|
| 100 |
+
G.add_weighted_edges_from(
|
| 101 |
+
(
|
| 102 |
+
(0, 1, 1),
|
| 103 |
+
(0, 2, 7),
|
| 104 |
+
(1, 2, 1),
|
| 105 |
+
(1, 3, 3),
|
| 106 |
+
(1, 4, 2),
|
| 107 |
+
(2, 4, 4),
|
| 108 |
+
(3, 4, 1),
|
| 109 |
+
(3, 5, 6),
|
| 110 |
+
(4, 5, 2),
|
| 111 |
+
)
|
| 112 |
+
)
|
| 113 |
+
for flow_func in flow_funcs:
|
| 114 |
+
T = nx.gomory_hu_tree(G, capacity="weight", flow_func=flow_func)
|
| 115 |
+
assert nx.is_tree(T)
|
| 116 |
+
for u, v in combinations(G, 2):
|
| 117 |
+
cut_value, edge = self.minimum_edge_weight(T, u, v)
|
| 118 |
+
assert nx.minimum_cut_value(G, u, v, capacity="weight") == cut_value
|
| 119 |
+
|
| 120 |
+
def test_directed_raises(self):
|
| 121 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 122 |
+
G = nx.DiGraph()
|
| 123 |
+
T = nx.gomory_hu_tree(G)
|
| 124 |
+
|
| 125 |
+
def test_empty_raises(self):
|
| 126 |
+
with pytest.raises(nx.NetworkXError):
|
| 127 |
+
G = nx.empty_graph()
|
| 128 |
+
T = nx.gomory_hu_tree(G)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/flow/tests/test_maxflow.py
ADDED
|
@@ -0,0 +1,560 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Maximum flow algorithms test suite.
|
| 2 |
+
"""
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.algorithms.flow import (
|
| 7 |
+
boykov_kolmogorov,
|
| 8 |
+
build_flow_dict,
|
| 9 |
+
build_residual_network,
|
| 10 |
+
dinitz,
|
| 11 |
+
edmonds_karp,
|
| 12 |
+
preflow_push,
|
| 13 |
+
shortest_augmenting_path,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
flow_funcs = {
|
| 17 |
+
boykov_kolmogorov,
|
| 18 |
+
dinitz,
|
| 19 |
+
edmonds_karp,
|
| 20 |
+
preflow_push,
|
| 21 |
+
shortest_augmenting_path,
|
| 22 |
+
}
|
| 23 |
+
|
| 24 |
+
max_min_funcs = {nx.maximum_flow, nx.minimum_cut}
|
| 25 |
+
flow_value_funcs = {nx.maximum_flow_value, nx.minimum_cut_value}
|
| 26 |
+
interface_funcs = max_min_funcs & flow_value_funcs
|
| 27 |
+
all_funcs = flow_funcs & interface_funcs
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def compute_cutset(G, partition):
|
| 31 |
+
reachable, non_reachable = partition
|
| 32 |
+
cutset = set()
|
| 33 |
+
for u, nbrs in ((n, G[n]) for n in reachable):
|
| 34 |
+
cutset.update((u, v) for v in nbrs if v in non_reachable)
|
| 35 |
+
return cutset
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def validate_flows(G, s, t, flowDict, solnValue, capacity, flow_func):
|
| 39 |
+
errmsg = f"Assertion failed in function: {flow_func.__name__}"
|
| 40 |
+
assert set(G) == set(flowDict), errmsg
|
| 41 |
+
for u in G:
|
| 42 |
+
assert set(G[u]) == set(flowDict[u]), errmsg
|
| 43 |
+
excess = {u: 0 for u in flowDict}
|
| 44 |
+
for u in flowDict:
|
| 45 |
+
for v, flow in flowDict[u].items():
|
| 46 |
+
if capacity in G[u][v]:
|
| 47 |
+
assert flow <= G[u][v][capacity]
|
| 48 |
+
assert flow >= 0, errmsg
|
| 49 |
+
excess[u] -= flow
|
| 50 |
+
excess[v] += flow
|
| 51 |
+
for u, exc in excess.items():
|
| 52 |
+
if u == s:
|
| 53 |
+
assert exc == -solnValue, errmsg
|
| 54 |
+
elif u == t:
|
| 55 |
+
assert exc == solnValue, errmsg
|
| 56 |
+
else:
|
| 57 |
+
assert exc == 0, errmsg
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def validate_cuts(G, s, t, solnValue, partition, capacity, flow_func):
|
| 61 |
+
errmsg = f"Assertion failed in function: {flow_func.__name__}"
|
| 62 |
+
assert all(n in G for n in partition[0]), errmsg
|
| 63 |
+
assert all(n in G for n in partition[1]), errmsg
|
| 64 |
+
cutset = compute_cutset(G, partition)
|
| 65 |
+
assert all(G.has_edge(u, v) for (u, v) in cutset), errmsg
|
| 66 |
+
assert solnValue == sum(G[u][v][capacity] for (u, v) in cutset), errmsg
|
| 67 |
+
H = G.copy()
|
| 68 |
+
H.remove_edges_from(cutset)
|
| 69 |
+
if not G.is_directed():
|
| 70 |
+
assert not nx.is_connected(H), errmsg
|
| 71 |
+
else:
|
| 72 |
+
assert not nx.is_strongly_connected(H), errmsg
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity="capacity"):
|
| 76 |
+
for flow_func in flow_funcs:
|
| 77 |
+
errmsg = f"Assertion failed in function: {flow_func.__name__}"
|
| 78 |
+
R = flow_func(G, s, t, capacity)
|
| 79 |
+
# Test both legacy and new implementations.
|
| 80 |
+
flow_value = R.graph["flow_value"]
|
| 81 |
+
flow_dict = build_flow_dict(G, R)
|
| 82 |
+
assert flow_value == solnValue, errmsg
|
| 83 |
+
validate_flows(G, s, t, flow_dict, solnValue, capacity, flow_func)
|
| 84 |
+
# Minimum cut
|
| 85 |
+
cut_value, partition = nx.minimum_cut(
|
| 86 |
+
G, s, t, capacity=capacity, flow_func=flow_func
|
| 87 |
+
)
|
| 88 |
+
validate_cuts(G, s, t, solnValue, partition, capacity, flow_func)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class TestMaxflowMinCutCommon:
|
| 92 |
+
def test_graph1(self):
|
| 93 |
+
# Trivial undirected graph
|
| 94 |
+
G = nx.Graph()
|
| 95 |
+
G.add_edge(1, 2, capacity=1.0)
|
| 96 |
+
|
| 97 |
+
solnFlows = {1: {2: 1.0}, 2: {1: 1.0}}
|
| 98 |
+
|
| 99 |
+
compare_flows_and_cuts(G, 1, 2, solnFlows, 1.0)
|
| 100 |
+
|
| 101 |
+
def test_graph2(self):
|
| 102 |
+
# A more complex undirected graph
|
| 103 |
+
# adapted from https://web.archive.org/web/20220815055650/https://www.topcoder.com/thrive/articles/Maximum%20Flow:%20Part%20One
|
| 104 |
+
G = nx.Graph()
|
| 105 |
+
G.add_edge("x", "a", capacity=3.0)
|
| 106 |
+
G.add_edge("x", "b", capacity=1.0)
|
| 107 |
+
G.add_edge("a", "c", capacity=3.0)
|
| 108 |
+
G.add_edge("b", "c", capacity=5.0)
|
| 109 |
+
G.add_edge("b", "d", capacity=4.0)
|
| 110 |
+
G.add_edge("d", "e", capacity=2.0)
|
| 111 |
+
G.add_edge("c", "y", capacity=2.0)
|
| 112 |
+
G.add_edge("e", "y", capacity=3.0)
|
| 113 |
+
|
| 114 |
+
H = {
|
| 115 |
+
"x": {"a": 3, "b": 1},
|
| 116 |
+
"a": {"c": 3, "x": 3},
|
| 117 |
+
"b": {"c": 1, "d": 2, "x": 1},
|
| 118 |
+
"c": {"a": 3, "b": 1, "y": 2},
|
| 119 |
+
"d": {"b": 2, "e": 2},
|
| 120 |
+
"e": {"d": 2, "y": 2},
|
| 121 |
+
"y": {"c": 2, "e": 2},
|
| 122 |
+
}
|
| 123 |
+
|
| 124 |
+
compare_flows_and_cuts(G, "x", "y", H, 4.0)
|
| 125 |
+
|
| 126 |
+
def test_digraph1(self):
|
| 127 |
+
# The classic directed graph example
|
| 128 |
+
G = nx.DiGraph()
|
| 129 |
+
G.add_edge("a", "b", capacity=1000.0)
|
| 130 |
+
G.add_edge("a", "c", capacity=1000.0)
|
| 131 |
+
G.add_edge("b", "c", capacity=1.0)
|
| 132 |
+
G.add_edge("b", "d", capacity=1000.0)
|
| 133 |
+
G.add_edge("c", "d", capacity=1000.0)
|
| 134 |
+
|
| 135 |
+
H = {
|
| 136 |
+
"a": {"b": 1000.0, "c": 1000.0},
|
| 137 |
+
"b": {"c": 0, "d": 1000.0},
|
| 138 |
+
"c": {"d": 1000.0},
|
| 139 |
+
"d": {},
|
| 140 |
+
}
|
| 141 |
+
|
| 142 |
+
compare_flows_and_cuts(G, "a", "d", H, 2000.0)
|
| 143 |
+
|
| 144 |
+
def test_digraph2(self):
|
| 145 |
+
# An example in which some edges end up with zero flow.
|
| 146 |
+
G = nx.DiGraph()
|
| 147 |
+
G.add_edge("s", "b", capacity=2)
|
| 148 |
+
G.add_edge("s", "c", capacity=1)
|
| 149 |
+
G.add_edge("c", "d", capacity=1)
|
| 150 |
+
G.add_edge("d", "a", capacity=1)
|
| 151 |
+
G.add_edge("b", "a", capacity=2)
|
| 152 |
+
G.add_edge("a", "t", capacity=2)
|
| 153 |
+
|
| 154 |
+
H = {
|
| 155 |
+
"s": {"b": 2, "c": 0},
|
| 156 |
+
"c": {"d": 0},
|
| 157 |
+
"d": {"a": 0},
|
| 158 |
+
"b": {"a": 2},
|
| 159 |
+
"a": {"t": 2},
|
| 160 |
+
"t": {},
|
| 161 |
+
}
|
| 162 |
+
|
| 163 |
+
compare_flows_and_cuts(G, "s", "t", H, 2)
|
| 164 |
+
|
| 165 |
+
def test_digraph3(self):
|
| 166 |
+
# A directed graph example from Cormen et al.
|
| 167 |
+
G = nx.DiGraph()
|
| 168 |
+
G.add_edge("s", "v1", capacity=16.0)
|
| 169 |
+
G.add_edge("s", "v2", capacity=13.0)
|
| 170 |
+
G.add_edge("v1", "v2", capacity=10.0)
|
| 171 |
+
G.add_edge("v2", "v1", capacity=4.0)
|
| 172 |
+
G.add_edge("v1", "v3", capacity=12.0)
|
| 173 |
+
G.add_edge("v3", "v2", capacity=9.0)
|
| 174 |
+
G.add_edge("v2", "v4", capacity=14.0)
|
| 175 |
+
G.add_edge("v4", "v3", capacity=7.0)
|
| 176 |
+
G.add_edge("v3", "t", capacity=20.0)
|
| 177 |
+
G.add_edge("v4", "t", capacity=4.0)
|
| 178 |
+
|
| 179 |
+
H = {
|
| 180 |
+
"s": {"v1": 12.0, "v2": 11.0},
|
| 181 |
+
"v2": {"v1": 0, "v4": 11.0},
|
| 182 |
+
"v1": {"v2": 0, "v3": 12.0},
|
| 183 |
+
"v3": {"v2": 0, "t": 19.0},
|
| 184 |
+
"v4": {"v3": 7.0, "t": 4.0},
|
| 185 |
+
"t": {},
|
| 186 |
+
}
|
| 187 |
+
|
| 188 |
+
compare_flows_and_cuts(G, "s", "t", H, 23.0)
|
| 189 |
+
|
| 190 |
+
def test_digraph4(self):
|
| 191 |
+
# A more complex directed graph
|
| 192 |
+
# from https://web.archive.org/web/20220815055650/https://www.topcoder.com/thrive/articles/Maximum%20Flow:%20Part%20One
|
| 193 |
+
G = nx.DiGraph()
|
| 194 |
+
G.add_edge("x", "a", capacity=3.0)
|
| 195 |
+
G.add_edge("x", "b", capacity=1.0)
|
| 196 |
+
G.add_edge("a", "c", capacity=3.0)
|
| 197 |
+
G.add_edge("b", "c", capacity=5.0)
|
| 198 |
+
G.add_edge("b", "d", capacity=4.0)
|
| 199 |
+
G.add_edge("d", "e", capacity=2.0)
|
| 200 |
+
G.add_edge("c", "y", capacity=2.0)
|
| 201 |
+
G.add_edge("e", "y", capacity=3.0)
|
| 202 |
+
|
| 203 |
+
H = {
|
| 204 |
+
"x": {"a": 2.0, "b": 1.0},
|
| 205 |
+
"a": {"c": 2.0},
|
| 206 |
+
"b": {"c": 0, "d": 1.0},
|
| 207 |
+
"c": {"y": 2.0},
|
| 208 |
+
"d": {"e": 1.0},
|
| 209 |
+
"e": {"y": 1.0},
|
| 210 |
+
"y": {},
|
| 211 |
+
}
|
| 212 |
+
|
| 213 |
+
compare_flows_and_cuts(G, "x", "y", H, 3.0)
|
| 214 |
+
|
| 215 |
+
def test_wikipedia_dinitz_example(self):
|
| 216 |
+
# Nice example from https://en.wikipedia.org/wiki/Dinic's_algorithm
|
| 217 |
+
G = nx.DiGraph()
|
| 218 |
+
G.add_edge("s", 1, capacity=10)
|
| 219 |
+
G.add_edge("s", 2, capacity=10)
|
| 220 |
+
G.add_edge(1, 3, capacity=4)
|
| 221 |
+
G.add_edge(1, 4, capacity=8)
|
| 222 |
+
G.add_edge(1, 2, capacity=2)
|
| 223 |
+
G.add_edge(2, 4, capacity=9)
|
| 224 |
+
G.add_edge(3, "t", capacity=10)
|
| 225 |
+
G.add_edge(4, 3, capacity=6)
|
| 226 |
+
G.add_edge(4, "t", capacity=10)
|
| 227 |
+
|
| 228 |
+
solnFlows = {
|
| 229 |
+
1: {2: 0, 3: 4, 4: 6},
|
| 230 |
+
2: {4: 9},
|
| 231 |
+
3: {"t": 9},
|
| 232 |
+
4: {3: 5, "t": 10},
|
| 233 |
+
"s": {1: 10, 2: 9},
|
| 234 |
+
"t": {},
|
| 235 |
+
}
|
| 236 |
+
|
| 237 |
+
compare_flows_and_cuts(G, "s", "t", solnFlows, 19)
|
| 238 |
+
|
| 239 |
+
def test_optional_capacity(self):
|
| 240 |
+
# Test optional capacity parameter.
|
| 241 |
+
G = nx.DiGraph()
|
| 242 |
+
G.add_edge("x", "a", spam=3.0)
|
| 243 |
+
G.add_edge("x", "b", spam=1.0)
|
| 244 |
+
G.add_edge("a", "c", spam=3.0)
|
| 245 |
+
G.add_edge("b", "c", spam=5.0)
|
| 246 |
+
G.add_edge("b", "d", spam=4.0)
|
| 247 |
+
G.add_edge("d", "e", spam=2.0)
|
| 248 |
+
G.add_edge("c", "y", spam=2.0)
|
| 249 |
+
G.add_edge("e", "y", spam=3.0)
|
| 250 |
+
|
| 251 |
+
solnFlows = {
|
| 252 |
+
"x": {"a": 2.0, "b": 1.0},
|
| 253 |
+
"a": {"c": 2.0},
|
| 254 |
+
"b": {"c": 0, "d": 1.0},
|
| 255 |
+
"c": {"y": 2.0},
|
| 256 |
+
"d": {"e": 1.0},
|
| 257 |
+
"e": {"y": 1.0},
|
| 258 |
+
"y": {},
|
| 259 |
+
}
|
| 260 |
+
solnValue = 3.0
|
| 261 |
+
s = "x"
|
| 262 |
+
t = "y"
|
| 263 |
+
|
| 264 |
+
compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity="spam")
|
| 265 |
+
|
| 266 |
+
def test_digraph_infcap_edges(self):
|
| 267 |
+
# DiGraph with infinite capacity edges
|
| 268 |
+
G = nx.DiGraph()
|
| 269 |
+
G.add_edge("s", "a")
|
| 270 |
+
G.add_edge("s", "b", capacity=30)
|
| 271 |
+
G.add_edge("a", "c", capacity=25)
|
| 272 |
+
G.add_edge("b", "c", capacity=12)
|
| 273 |
+
G.add_edge("a", "t", capacity=60)
|
| 274 |
+
G.add_edge("c", "t")
|
| 275 |
+
|
| 276 |
+
H = {
|
| 277 |
+
"s": {"a": 85, "b": 12},
|
| 278 |
+
"a": {"c": 25, "t": 60},
|
| 279 |
+
"b": {"c": 12},
|
| 280 |
+
"c": {"t": 37},
|
| 281 |
+
"t": {},
|
| 282 |
+
}
|
| 283 |
+
|
| 284 |
+
compare_flows_and_cuts(G, "s", "t", H, 97)
|
| 285 |
+
|
| 286 |
+
# DiGraph with infinite capacity digon
|
| 287 |
+
G = nx.DiGraph()
|
| 288 |
+
G.add_edge("s", "a", capacity=85)
|
| 289 |
+
G.add_edge("s", "b", capacity=30)
|
| 290 |
+
G.add_edge("a", "c")
|
| 291 |
+
G.add_edge("c", "a")
|
| 292 |
+
G.add_edge("b", "c", capacity=12)
|
| 293 |
+
G.add_edge("a", "t", capacity=60)
|
| 294 |
+
G.add_edge("c", "t", capacity=37)
|
| 295 |
+
|
| 296 |
+
H = {
|
| 297 |
+
"s": {"a": 85, "b": 12},
|
| 298 |
+
"a": {"c": 25, "t": 60},
|
| 299 |
+
"c": {"a": 0, "t": 37},
|
| 300 |
+
"b": {"c": 12},
|
| 301 |
+
"t": {},
|
| 302 |
+
}
|
| 303 |
+
|
| 304 |
+
compare_flows_and_cuts(G, "s", "t", H, 97)
|
| 305 |
+
|
| 306 |
+
def test_digraph_infcap_path(self):
|
| 307 |
+
# Graph with infinite capacity (s, t)-path
|
| 308 |
+
G = nx.DiGraph()
|
| 309 |
+
G.add_edge("s", "a")
|
| 310 |
+
G.add_edge("s", "b", capacity=30)
|
| 311 |
+
G.add_edge("a", "c")
|
| 312 |
+
G.add_edge("b", "c", capacity=12)
|
| 313 |
+
G.add_edge("a", "t", capacity=60)
|
| 314 |
+
G.add_edge("c", "t")
|
| 315 |
+
|
| 316 |
+
for flow_func in all_funcs:
|
| 317 |
+
pytest.raises(nx.NetworkXUnbounded, flow_func, G, "s", "t")
|
| 318 |
+
|
| 319 |
+
def test_graph_infcap_edges(self):
|
| 320 |
+
# Undirected graph with infinite capacity edges
|
| 321 |
+
G = nx.Graph()
|
| 322 |
+
G.add_edge("s", "a")
|
| 323 |
+
G.add_edge("s", "b", capacity=30)
|
| 324 |
+
G.add_edge("a", "c", capacity=25)
|
| 325 |
+
G.add_edge("b", "c", capacity=12)
|
| 326 |
+
G.add_edge("a", "t", capacity=60)
|
| 327 |
+
G.add_edge("c", "t")
|
| 328 |
+
|
| 329 |
+
H = {
|
| 330 |
+
"s": {"a": 85, "b": 12},
|
| 331 |
+
"a": {"c": 25, "s": 85, "t": 60},
|
| 332 |
+
"b": {"c": 12, "s": 12},
|
| 333 |
+
"c": {"a": 25, "b": 12, "t": 37},
|
| 334 |
+
"t": {"a": 60, "c": 37},
|
| 335 |
+
}
|
| 336 |
+
|
| 337 |
+
compare_flows_and_cuts(G, "s", "t", H, 97)
|
| 338 |
+
|
| 339 |
+
def test_digraph5(self):
|
| 340 |
+
# From ticket #429 by mfrasca.
|
| 341 |
+
G = nx.DiGraph()
|
| 342 |
+
G.add_edge("s", "a", capacity=2)
|
| 343 |
+
G.add_edge("s", "b", capacity=2)
|
| 344 |
+
G.add_edge("a", "b", capacity=5)
|
| 345 |
+
G.add_edge("a", "t", capacity=1)
|
| 346 |
+
G.add_edge("b", "a", capacity=1)
|
| 347 |
+
G.add_edge("b", "t", capacity=3)
|
| 348 |
+
flowSoln = {
|
| 349 |
+
"a": {"b": 1, "t": 1},
|
| 350 |
+
"b": {"a": 0, "t": 3},
|
| 351 |
+
"s": {"a": 2, "b": 2},
|
| 352 |
+
"t": {},
|
| 353 |
+
}
|
| 354 |
+
compare_flows_and_cuts(G, "s", "t", flowSoln, 4)
|
| 355 |
+
|
| 356 |
+
def test_disconnected(self):
|
| 357 |
+
G = nx.Graph()
|
| 358 |
+
G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity")
|
| 359 |
+
G.remove_node(1)
|
| 360 |
+
assert nx.maximum_flow_value(G, 0, 3) == 0
|
| 361 |
+
flowSoln = {0: {}, 2: {3: 0}, 3: {2: 0}}
|
| 362 |
+
compare_flows_and_cuts(G, 0, 3, flowSoln, 0)
|
| 363 |
+
|
| 364 |
+
def test_source_target_not_in_graph(self):
|
| 365 |
+
G = nx.Graph()
|
| 366 |
+
G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity")
|
| 367 |
+
G.remove_node(0)
|
| 368 |
+
for flow_func in all_funcs:
|
| 369 |
+
pytest.raises(nx.NetworkXError, flow_func, G, 0, 3)
|
| 370 |
+
G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity")
|
| 371 |
+
G.remove_node(3)
|
| 372 |
+
for flow_func in all_funcs:
|
| 373 |
+
pytest.raises(nx.NetworkXError, flow_func, G, 0, 3)
|
| 374 |
+
|
| 375 |
+
def test_source_target_coincide(self):
|
| 376 |
+
G = nx.Graph()
|
| 377 |
+
G.add_node(0)
|
| 378 |
+
for flow_func in all_funcs:
|
| 379 |
+
pytest.raises(nx.NetworkXError, flow_func, G, 0, 0)
|
| 380 |
+
|
| 381 |
+
def test_multigraphs_raise(self):
|
| 382 |
+
G = nx.MultiGraph()
|
| 383 |
+
M = nx.MultiDiGraph()
|
| 384 |
+
G.add_edges_from([(0, 1), (1, 0)], capacity=True)
|
| 385 |
+
for flow_func in all_funcs:
|
| 386 |
+
pytest.raises(nx.NetworkXError, flow_func, G, 0, 0)
|
| 387 |
+
|
| 388 |
+
|
| 389 |
+
class TestMaxFlowMinCutInterface:
|
| 390 |
+
def setup_method(self):
|
| 391 |
+
G = nx.DiGraph()
|
| 392 |
+
G.add_edge("x", "a", capacity=3.0)
|
| 393 |
+
G.add_edge("x", "b", capacity=1.0)
|
| 394 |
+
G.add_edge("a", "c", capacity=3.0)
|
| 395 |
+
G.add_edge("b", "c", capacity=5.0)
|
| 396 |
+
G.add_edge("b", "d", capacity=4.0)
|
| 397 |
+
G.add_edge("d", "e", capacity=2.0)
|
| 398 |
+
G.add_edge("c", "y", capacity=2.0)
|
| 399 |
+
G.add_edge("e", "y", capacity=3.0)
|
| 400 |
+
self.G = G
|
| 401 |
+
H = nx.DiGraph()
|
| 402 |
+
H.add_edge(0, 1, capacity=1.0)
|
| 403 |
+
H.add_edge(1, 2, capacity=1.0)
|
| 404 |
+
self.H = H
|
| 405 |
+
|
| 406 |
+
def test_flow_func_not_callable(self):
|
| 407 |
+
elements = ["this_should_be_callable", 10, {1, 2, 3}]
|
| 408 |
+
G = nx.Graph()
|
| 409 |
+
G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity")
|
| 410 |
+
for flow_func in interface_funcs:
|
| 411 |
+
for element in elements:
|
| 412 |
+
pytest.raises(nx.NetworkXError, flow_func, G, 0, 1, flow_func=element)
|
| 413 |
+
pytest.raises(nx.NetworkXError, flow_func, G, 0, 1, flow_func=element)
|
| 414 |
+
|
| 415 |
+
def test_flow_func_parameters(self):
|
| 416 |
+
G = self.G
|
| 417 |
+
fv = 3.0
|
| 418 |
+
for interface_func in interface_funcs:
|
| 419 |
+
for flow_func in flow_funcs:
|
| 420 |
+
errmsg = (
|
| 421 |
+
f"Assertion failed in function: {flow_func.__name__} "
|
| 422 |
+
f"in interface {interface_func.__name__}"
|
| 423 |
+
)
|
| 424 |
+
result = interface_func(G, "x", "y", flow_func=flow_func)
|
| 425 |
+
if interface_func in max_min_funcs:
|
| 426 |
+
result = result[0]
|
| 427 |
+
assert fv == result, errmsg
|
| 428 |
+
|
| 429 |
+
def test_minimum_cut_no_cutoff(self):
|
| 430 |
+
G = self.G
|
| 431 |
+
pytest.raises(
|
| 432 |
+
nx.NetworkXError,
|
| 433 |
+
nx.minimum_cut,
|
| 434 |
+
G,
|
| 435 |
+
"x",
|
| 436 |
+
"y",
|
| 437 |
+
flow_func=preflow_push,
|
| 438 |
+
cutoff=1.0,
|
| 439 |
+
)
|
| 440 |
+
pytest.raises(
|
| 441 |
+
nx.NetworkXError,
|
| 442 |
+
nx.minimum_cut_value,
|
| 443 |
+
G,
|
| 444 |
+
"x",
|
| 445 |
+
"y",
|
| 446 |
+
flow_func=preflow_push,
|
| 447 |
+
cutoff=1.0,
|
| 448 |
+
)
|
| 449 |
+
|
| 450 |
+
def test_kwargs(self):
|
| 451 |
+
G = self.H
|
| 452 |
+
fv = 1.0
|
| 453 |
+
to_test = (
|
| 454 |
+
(shortest_augmenting_path, {"two_phase": True}),
|
| 455 |
+
(preflow_push, {"global_relabel_freq": 5}),
|
| 456 |
+
)
|
| 457 |
+
for interface_func in interface_funcs:
|
| 458 |
+
for flow_func, kwargs in to_test:
|
| 459 |
+
errmsg = (
|
| 460 |
+
f"Assertion failed in function: {flow_func.__name__} "
|
| 461 |
+
f"in interface {interface_func.__name__}"
|
| 462 |
+
)
|
| 463 |
+
result = interface_func(G, 0, 2, flow_func=flow_func, **kwargs)
|
| 464 |
+
if interface_func in max_min_funcs:
|
| 465 |
+
result = result[0]
|
| 466 |
+
assert fv == result, errmsg
|
| 467 |
+
|
| 468 |
+
def test_kwargs_default_flow_func(self):
|
| 469 |
+
G = self.H
|
| 470 |
+
for interface_func in interface_funcs:
|
| 471 |
+
pytest.raises(
|
| 472 |
+
nx.NetworkXError, interface_func, G, 0, 1, global_relabel_freq=2
|
| 473 |
+
)
|
| 474 |
+
|
| 475 |
+
def test_reusing_residual(self):
|
| 476 |
+
G = self.G
|
| 477 |
+
fv = 3.0
|
| 478 |
+
s, t = "x", "y"
|
| 479 |
+
R = build_residual_network(G, "capacity")
|
| 480 |
+
for interface_func in interface_funcs:
|
| 481 |
+
for flow_func in flow_funcs:
|
| 482 |
+
errmsg = (
|
| 483 |
+
f"Assertion failed in function: {flow_func.__name__} "
|
| 484 |
+
f"in interface {interface_func.__name__}"
|
| 485 |
+
)
|
| 486 |
+
for i in range(3):
|
| 487 |
+
result = interface_func(
|
| 488 |
+
G, "x", "y", flow_func=flow_func, residual=R
|
| 489 |
+
)
|
| 490 |
+
if interface_func in max_min_funcs:
|
| 491 |
+
result = result[0]
|
| 492 |
+
assert fv == result, errmsg
|
| 493 |
+
|
| 494 |
+
|
| 495 |
+
# Tests specific to one algorithm
|
| 496 |
+
def test_preflow_push_global_relabel_freq():
|
| 497 |
+
G = nx.DiGraph()
|
| 498 |
+
G.add_edge(1, 2, capacity=1)
|
| 499 |
+
R = preflow_push(G, 1, 2, global_relabel_freq=None)
|
| 500 |
+
assert R.graph["flow_value"] == 1
|
| 501 |
+
pytest.raises(nx.NetworkXError, preflow_push, G, 1, 2, global_relabel_freq=-1)
|
| 502 |
+
|
| 503 |
+
|
| 504 |
+
def test_preflow_push_makes_enough_space():
|
| 505 |
+
# From ticket #1542
|
| 506 |
+
G = nx.DiGraph()
|
| 507 |
+
nx.add_path(G, [0, 1, 3], capacity=1)
|
| 508 |
+
nx.add_path(G, [1, 2, 3], capacity=1)
|
| 509 |
+
R = preflow_push(G, 0, 3, value_only=False)
|
| 510 |
+
assert R.graph["flow_value"] == 1
|
| 511 |
+
|
| 512 |
+
|
| 513 |
+
def test_shortest_augmenting_path_two_phase():
|
| 514 |
+
k = 5
|
| 515 |
+
p = 1000
|
| 516 |
+
G = nx.DiGraph()
|
| 517 |
+
for i in range(k):
|
| 518 |
+
G.add_edge("s", (i, 0), capacity=1)
|
| 519 |
+
nx.add_path(G, ((i, j) for j in range(p)), capacity=1)
|
| 520 |
+
G.add_edge((i, p - 1), "t", capacity=1)
|
| 521 |
+
R = shortest_augmenting_path(G, "s", "t", two_phase=True)
|
| 522 |
+
assert R.graph["flow_value"] == k
|
| 523 |
+
R = shortest_augmenting_path(G, "s", "t", two_phase=False)
|
| 524 |
+
assert R.graph["flow_value"] == k
|
| 525 |
+
|
| 526 |
+
|
| 527 |
+
class TestCutoff:
|
| 528 |
+
def test_cutoff(self):
|
| 529 |
+
k = 5
|
| 530 |
+
p = 1000
|
| 531 |
+
G = nx.DiGraph()
|
| 532 |
+
for i in range(k):
|
| 533 |
+
G.add_edge("s", (i, 0), capacity=2)
|
| 534 |
+
nx.add_path(G, ((i, j) for j in range(p)), capacity=2)
|
| 535 |
+
G.add_edge((i, p - 1), "t", capacity=2)
|
| 536 |
+
R = shortest_augmenting_path(G, "s", "t", two_phase=True, cutoff=k)
|
| 537 |
+
assert k <= R.graph["flow_value"] <= (2 * k)
|
| 538 |
+
R = shortest_augmenting_path(G, "s", "t", two_phase=False, cutoff=k)
|
| 539 |
+
assert k <= R.graph["flow_value"] <= (2 * k)
|
| 540 |
+
R = edmonds_karp(G, "s", "t", cutoff=k)
|
| 541 |
+
assert k <= R.graph["flow_value"] <= (2 * k)
|
| 542 |
+
R = dinitz(G, "s", "t", cutoff=k)
|
| 543 |
+
assert k <= R.graph["flow_value"] <= (2 * k)
|
| 544 |
+
R = boykov_kolmogorov(G, "s", "t", cutoff=k)
|
| 545 |
+
assert k <= R.graph["flow_value"] <= (2 * k)
|
| 546 |
+
|
| 547 |
+
def test_complete_graph_cutoff(self):
|
| 548 |
+
G = nx.complete_graph(5)
|
| 549 |
+
nx.set_edge_attributes(G, {(u, v): 1 for u, v in G.edges()}, "capacity")
|
| 550 |
+
for flow_func in [
|
| 551 |
+
shortest_augmenting_path,
|
| 552 |
+
edmonds_karp,
|
| 553 |
+
dinitz,
|
| 554 |
+
boykov_kolmogorov,
|
| 555 |
+
]:
|
| 556 |
+
for cutoff in [3, 2, 1]:
|
| 557 |
+
result = nx.maximum_flow_value(
|
| 558 |
+
G, 0, 4, flow_func=flow_func, cutoff=cutoff
|
| 559 |
+
)
|
| 560 |
+
assert cutoff == result, f"cutoff error in {flow_func.__name__}"
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-311.pyc
ADDED
|
Binary file (10.9 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py
ADDED
|
@@ -0,0 +1,1060 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
*************
|
| 3 |
+
VF2 Algorithm
|
| 4 |
+
*************
|
| 5 |
+
|
| 6 |
+
An implementation of VF2 algorithm for graph isomorphism testing.
|
| 7 |
+
|
| 8 |
+
The simplest interface to use this module is to call networkx.is_isomorphic().
|
| 9 |
+
|
| 10 |
+
Introduction
|
| 11 |
+
------------
|
| 12 |
+
|
| 13 |
+
The GraphMatcher and DiGraphMatcher are responsible for matching
|
| 14 |
+
graphs or directed graphs in a predetermined manner. This
|
| 15 |
+
usually means a check for an isomorphism, though other checks
|
| 16 |
+
are also possible. For example, a subgraph of one graph
|
| 17 |
+
can be checked for isomorphism to a second graph.
|
| 18 |
+
|
| 19 |
+
Matching is done via syntactic feasibility. It is also possible
|
| 20 |
+
to check for semantic feasibility. Feasibility, then, is defined
|
| 21 |
+
as the logical AND of the two functions.
|
| 22 |
+
|
| 23 |
+
To include a semantic check, the (Di)GraphMatcher class should be
|
| 24 |
+
subclassed, and the semantic_feasibility() function should be
|
| 25 |
+
redefined. By default, the semantic feasibility function always
|
| 26 |
+
returns True. The effect of this is that semantics are not
|
| 27 |
+
considered in the matching of G1 and G2.
|
| 28 |
+
|
| 29 |
+
Examples
|
| 30 |
+
--------
|
| 31 |
+
|
| 32 |
+
Suppose G1 and G2 are isomorphic graphs. Verification is as follows:
|
| 33 |
+
|
| 34 |
+
>>> from networkx.algorithms import isomorphism
|
| 35 |
+
>>> G1 = nx.path_graph(4)
|
| 36 |
+
>>> G2 = nx.path_graph(4)
|
| 37 |
+
>>> GM = isomorphism.GraphMatcher(G1, G2)
|
| 38 |
+
>>> GM.is_isomorphic()
|
| 39 |
+
True
|
| 40 |
+
|
| 41 |
+
GM.mapping stores the isomorphism mapping from G1 to G2.
|
| 42 |
+
|
| 43 |
+
>>> GM.mapping
|
| 44 |
+
{0: 0, 1: 1, 2: 2, 3: 3}
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
Suppose G1 and G2 are isomorphic directed graphs.
|
| 48 |
+
Verification is as follows:
|
| 49 |
+
|
| 50 |
+
>>> G1 = nx.path_graph(4, create_using=nx.DiGraph())
|
| 51 |
+
>>> G2 = nx.path_graph(4, create_using=nx.DiGraph())
|
| 52 |
+
>>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
|
| 53 |
+
>>> DiGM.is_isomorphic()
|
| 54 |
+
True
|
| 55 |
+
|
| 56 |
+
DiGM.mapping stores the isomorphism mapping from G1 to G2.
|
| 57 |
+
|
| 58 |
+
>>> DiGM.mapping
|
| 59 |
+
{0: 0, 1: 1, 2: 2, 3: 3}
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
Subgraph Isomorphism
|
| 64 |
+
--------------------
|
| 65 |
+
Graph theory literature can be ambiguous about the meaning of the
|
| 66 |
+
above statement, and we seek to clarify it now.
|
| 67 |
+
|
| 68 |
+
In the VF2 literature, a mapping M is said to be a graph-subgraph
|
| 69 |
+
isomorphism iff M is an isomorphism between G2 and a subgraph of G1.
|
| 70 |
+
Thus, to say that G1 and G2 are graph-subgraph isomorphic is to say
|
| 71 |
+
that a subgraph of G1 is isomorphic to G2.
|
| 72 |
+
|
| 73 |
+
Other literature uses the phrase 'subgraph isomorphic' as in 'G1 does
|
| 74 |
+
not have a subgraph isomorphic to G2'. Another use is as an in adverb
|
| 75 |
+
for isomorphic. Thus, to say that G1 and G2 are subgraph isomorphic
|
| 76 |
+
is to say that a subgraph of G1 is isomorphic to G2.
|
| 77 |
+
|
| 78 |
+
Finally, the term 'subgraph' can have multiple meanings. In this
|
| 79 |
+
context, 'subgraph' always means a 'node-induced subgraph'. Edge-induced
|
| 80 |
+
subgraph isomorphisms are not directly supported, but one should be
|
| 81 |
+
able to perform the check by making use of nx.line_graph(). For
|
| 82 |
+
subgraphs which are not induced, the term 'monomorphism' is preferred
|
| 83 |
+
over 'isomorphism'.
|
| 84 |
+
|
| 85 |
+
Let G=(N,E) be a graph with a set of nodes N and set of edges E.
|
| 86 |
+
|
| 87 |
+
If G'=(N',E') is a subgraph, then:
|
| 88 |
+
N' is a subset of N
|
| 89 |
+
E' is a subset of E
|
| 90 |
+
|
| 91 |
+
If G'=(N',E') is a node-induced subgraph, then:
|
| 92 |
+
N' is a subset of N
|
| 93 |
+
E' is the subset of edges in E relating nodes in N'
|
| 94 |
+
|
| 95 |
+
If G'=(N',E') is an edge-induced subgraph, then:
|
| 96 |
+
N' is the subset of nodes in N related by edges in E'
|
| 97 |
+
E' is a subset of E
|
| 98 |
+
|
| 99 |
+
If G'=(N',E') is a monomorphism, then:
|
| 100 |
+
N' is a subset of N
|
| 101 |
+
E' is a subset of the set of edges in E relating nodes in N'
|
| 102 |
+
|
| 103 |
+
Note that if G' is a node-induced subgraph of G, then it is always a
|
| 104 |
+
subgraph monomorphism of G, but the opposite is not always true, as a
|
| 105 |
+
monomorphism can have fewer edges.
|
| 106 |
+
|
| 107 |
+
References
|
| 108 |
+
----------
|
| 109 |
+
[1] Luigi P. Cordella, Pasquale Foggia, Carlo Sansone, Mario Vento,
|
| 110 |
+
"A (Sub)Graph Isomorphism Algorithm for Matching Large Graphs",
|
| 111 |
+
IEEE Transactions on Pattern Analysis and Machine Intelligence,
|
| 112 |
+
vol. 26, no. 10, pp. 1367-1372, Oct., 2004.
|
| 113 |
+
http://ieeexplore.ieee.org/iel5/34/29305/01323804.pdf
|
| 114 |
+
|
| 115 |
+
[2] L. P. Cordella, P. Foggia, C. Sansone, M. Vento, "An Improved
|
| 116 |
+
Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop
|
| 117 |
+
on Graph-based Representations in Pattern Recognition, Cuen,
|
| 118 |
+
pp. 149-159, 2001.
|
| 119 |
+
https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
|
| 120 |
+
|
| 121 |
+
See Also
|
| 122 |
+
--------
|
| 123 |
+
syntactic_feasibility(), semantic_feasibility()
|
| 124 |
+
|
| 125 |
+
Notes
|
| 126 |
+
-----
|
| 127 |
+
|
| 128 |
+
The implementation handles both directed and undirected graphs as well
|
| 129 |
+
as multigraphs.
|
| 130 |
+
|
| 131 |
+
In general, the subgraph isomorphism problem is NP-complete whereas the
|
| 132 |
+
graph isomorphism problem is most likely not NP-complete (although no
|
| 133 |
+
polynomial-time algorithm is known to exist).
|
| 134 |
+
|
| 135 |
+
"""
|
| 136 |
+
|
| 137 |
+
# This work was originally coded by Christopher Ellison
|
| 138 |
+
# as part of the Computational Mechanics Python (CMPy) project.
|
| 139 |
+
# James P. Crutchfield, principal investigator.
|
| 140 |
+
# Complexity Sciences Center and Physics Department, UC Davis.
|
| 141 |
+
|
| 142 |
+
import sys
|
| 143 |
+
|
| 144 |
+
__all__ = ["GraphMatcher", "DiGraphMatcher"]
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
class GraphMatcher:
|
| 148 |
+
"""Implementation of VF2 algorithm for matching undirected graphs.
|
| 149 |
+
|
| 150 |
+
Suitable for Graph and MultiGraph instances.
|
| 151 |
+
"""
|
| 152 |
+
|
| 153 |
+
def __init__(self, G1, G2):
|
| 154 |
+
"""Initialize GraphMatcher.
|
| 155 |
+
|
| 156 |
+
Parameters
|
| 157 |
+
----------
|
| 158 |
+
G1,G2: NetworkX Graph or MultiGraph instances.
|
| 159 |
+
The two graphs to check for isomorphism or monomorphism.
|
| 160 |
+
|
| 161 |
+
Examples
|
| 162 |
+
--------
|
| 163 |
+
To create a GraphMatcher which checks for syntactic feasibility:
|
| 164 |
+
|
| 165 |
+
>>> from networkx.algorithms import isomorphism
|
| 166 |
+
>>> G1 = nx.path_graph(4)
|
| 167 |
+
>>> G2 = nx.path_graph(4)
|
| 168 |
+
>>> GM = isomorphism.GraphMatcher(G1, G2)
|
| 169 |
+
"""
|
| 170 |
+
self.G1 = G1
|
| 171 |
+
self.G2 = G2
|
| 172 |
+
self.G1_nodes = set(G1.nodes())
|
| 173 |
+
self.G2_nodes = set(G2.nodes())
|
| 174 |
+
self.G2_node_order = {n: i for i, n in enumerate(G2)}
|
| 175 |
+
|
| 176 |
+
# Set recursion limit.
|
| 177 |
+
self.old_recursion_limit = sys.getrecursionlimit()
|
| 178 |
+
expected_max_recursion_level = len(self.G2)
|
| 179 |
+
if self.old_recursion_limit < 1.5 * expected_max_recursion_level:
|
| 180 |
+
# Give some breathing room.
|
| 181 |
+
sys.setrecursionlimit(int(1.5 * expected_max_recursion_level))
|
| 182 |
+
|
| 183 |
+
# Declare that we will be searching for a graph-graph isomorphism.
|
| 184 |
+
self.test = "graph"
|
| 185 |
+
|
| 186 |
+
# Initialize state
|
| 187 |
+
self.initialize()
|
| 188 |
+
|
| 189 |
+
def reset_recursion_limit(self):
|
| 190 |
+
"""Restores the recursion limit."""
|
| 191 |
+
# TODO:
|
| 192 |
+
# Currently, we use recursion and set the recursion level higher.
|
| 193 |
+
# It would be nice to restore the level, but because the
|
| 194 |
+
# (Di)GraphMatcher classes make use of cyclic references, garbage
|
| 195 |
+
# collection will never happen when we define __del__() to
|
| 196 |
+
# restore the recursion level. The result is a memory leak.
|
| 197 |
+
# So for now, we do not automatically restore the recursion level,
|
| 198 |
+
# and instead provide a method to do this manually. Eventually,
|
| 199 |
+
# we should turn this into a non-recursive implementation.
|
| 200 |
+
sys.setrecursionlimit(self.old_recursion_limit)
|
| 201 |
+
|
| 202 |
+
def candidate_pairs_iter(self):
|
| 203 |
+
"""Iterator over candidate pairs of nodes in G1 and G2."""
|
| 204 |
+
|
| 205 |
+
# All computations are done using the current state!
|
| 206 |
+
|
| 207 |
+
G1_nodes = self.G1_nodes
|
| 208 |
+
G2_nodes = self.G2_nodes
|
| 209 |
+
min_key = self.G2_node_order.__getitem__
|
| 210 |
+
|
| 211 |
+
# First we compute the inout-terminal sets.
|
| 212 |
+
T1_inout = [node for node in self.inout_1 if node not in self.core_1]
|
| 213 |
+
T2_inout = [node for node in self.inout_2 if node not in self.core_2]
|
| 214 |
+
|
| 215 |
+
# If T1_inout and T2_inout are both nonempty.
|
| 216 |
+
# P(s) = T1_inout x {min T2_inout}
|
| 217 |
+
if T1_inout and T2_inout:
|
| 218 |
+
node_2 = min(T2_inout, key=min_key)
|
| 219 |
+
for node_1 in T1_inout:
|
| 220 |
+
yield node_1, node_2
|
| 221 |
+
|
| 222 |
+
else:
|
| 223 |
+
# If T1_inout and T2_inout were both empty....
|
| 224 |
+
# P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
|
| 225 |
+
# if not (T1_inout or T2_inout): # as suggested by [2], incorrect
|
| 226 |
+
if 1: # as inferred from [1], correct
|
| 227 |
+
# First we determine the candidate node for G2
|
| 228 |
+
other_node = min(G2_nodes - set(self.core_2), key=min_key)
|
| 229 |
+
for node in self.G1:
|
| 230 |
+
if node not in self.core_1:
|
| 231 |
+
yield node, other_node
|
| 232 |
+
|
| 233 |
+
# For all other cases, we don't have any candidate pairs.
|
| 234 |
+
|
| 235 |
+
def initialize(self):
|
| 236 |
+
"""Reinitializes the state of the algorithm.
|
| 237 |
+
|
| 238 |
+
This method should be redefined if using something other than GMState.
|
| 239 |
+
If only subclassing GraphMatcher, a redefinition is not necessary.
|
| 240 |
+
|
| 241 |
+
"""
|
| 242 |
+
|
| 243 |
+
# core_1[n] contains the index of the node paired with n, which is m,
|
| 244 |
+
# provided n is in the mapping.
|
| 245 |
+
# core_2[m] contains the index of the node paired with m, which is n,
|
| 246 |
+
# provided m is in the mapping.
|
| 247 |
+
self.core_1 = {}
|
| 248 |
+
self.core_2 = {}
|
| 249 |
+
|
| 250 |
+
# See the paper for definitions of M_x and T_x^{y}
|
| 251 |
+
|
| 252 |
+
# inout_1[n] is non-zero if n is in M_1 or in T_1^{inout}
|
| 253 |
+
# inout_2[m] is non-zero if m is in M_2 or in T_2^{inout}
|
| 254 |
+
#
|
| 255 |
+
# The value stored is the depth of the SSR tree when the node became
|
| 256 |
+
# part of the corresponding set.
|
| 257 |
+
self.inout_1 = {}
|
| 258 |
+
self.inout_2 = {}
|
| 259 |
+
# Practically, these sets simply store the nodes in the subgraph.
|
| 260 |
+
|
| 261 |
+
self.state = GMState(self)
|
| 262 |
+
|
| 263 |
+
# Provide a convenient way to access the isomorphism mapping.
|
| 264 |
+
self.mapping = self.core_1.copy()
|
| 265 |
+
|
| 266 |
+
def is_isomorphic(self):
|
| 267 |
+
"""Returns True if G1 and G2 are isomorphic graphs."""
|
| 268 |
+
|
| 269 |
+
# Let's do two very quick checks!
|
| 270 |
+
# QUESTION: Should we call faster_graph_could_be_isomorphic(G1,G2)?
|
| 271 |
+
# For now, I just copy the code.
|
| 272 |
+
|
| 273 |
+
# Check global properties
|
| 274 |
+
if self.G1.order() != self.G2.order():
|
| 275 |
+
return False
|
| 276 |
+
|
| 277 |
+
# Check local properties
|
| 278 |
+
d1 = sorted(d for n, d in self.G1.degree())
|
| 279 |
+
d2 = sorted(d for n, d in self.G2.degree())
|
| 280 |
+
if d1 != d2:
|
| 281 |
+
return False
|
| 282 |
+
|
| 283 |
+
try:
|
| 284 |
+
x = next(self.isomorphisms_iter())
|
| 285 |
+
return True
|
| 286 |
+
except StopIteration:
|
| 287 |
+
return False
|
| 288 |
+
|
| 289 |
+
def isomorphisms_iter(self):
|
| 290 |
+
"""Generator over isomorphisms between G1 and G2."""
|
| 291 |
+
# Declare that we are looking for a graph-graph isomorphism.
|
| 292 |
+
self.test = "graph"
|
| 293 |
+
self.initialize()
|
| 294 |
+
yield from self.match()
|
| 295 |
+
|
| 296 |
+
def match(self):
|
| 297 |
+
"""Extends the isomorphism mapping.
|
| 298 |
+
|
| 299 |
+
This function is called recursively to determine if a complete
|
| 300 |
+
isomorphism can be found between G1 and G2. It cleans up the class
|
| 301 |
+
variables after each recursive call. If an isomorphism is found,
|
| 302 |
+
we yield the mapping.
|
| 303 |
+
|
| 304 |
+
"""
|
| 305 |
+
if len(self.core_1) == len(self.G2):
|
| 306 |
+
# Save the final mapping, otherwise garbage collection deletes it.
|
| 307 |
+
self.mapping = self.core_1.copy()
|
| 308 |
+
# The mapping is complete.
|
| 309 |
+
yield self.mapping
|
| 310 |
+
else:
|
| 311 |
+
for G1_node, G2_node in self.candidate_pairs_iter():
|
| 312 |
+
if self.syntactic_feasibility(G1_node, G2_node):
|
| 313 |
+
if self.semantic_feasibility(G1_node, G2_node):
|
| 314 |
+
# Recursive call, adding the feasible state.
|
| 315 |
+
newstate = self.state.__class__(self, G1_node, G2_node)
|
| 316 |
+
yield from self.match()
|
| 317 |
+
|
| 318 |
+
# restore data structures
|
| 319 |
+
newstate.restore()
|
| 320 |
+
|
| 321 |
+
def semantic_feasibility(self, G1_node, G2_node):
|
| 322 |
+
"""Returns True if adding (G1_node, G2_node) is semantically feasible.
|
| 323 |
+
|
| 324 |
+
The semantic feasibility function should return True if it is
|
| 325 |
+
acceptable to add the candidate pair (G1_node, G2_node) to the current
|
| 326 |
+
partial isomorphism mapping. The logic should focus on semantic
|
| 327 |
+
information contained in the edge data or a formalized node class.
|
| 328 |
+
|
| 329 |
+
By acceptable, we mean that the subsequent mapping can still become a
|
| 330 |
+
complete isomorphism mapping. Thus, if adding the candidate pair
|
| 331 |
+
definitely makes it so that the subsequent mapping cannot become a
|
| 332 |
+
complete isomorphism mapping, then this function must return False.
|
| 333 |
+
|
| 334 |
+
The default semantic feasibility function always returns True. The
|
| 335 |
+
effect is that semantics are not considered in the matching of G1
|
| 336 |
+
and G2.
|
| 337 |
+
|
| 338 |
+
The semantic checks might differ based on the what type of test is
|
| 339 |
+
being performed. A keyword description of the test is stored in
|
| 340 |
+
self.test. Here is a quick description of the currently implemented
|
| 341 |
+
tests::
|
| 342 |
+
|
| 343 |
+
test='graph'
|
| 344 |
+
Indicates that the graph matcher is looking for a graph-graph
|
| 345 |
+
isomorphism.
|
| 346 |
+
|
| 347 |
+
test='subgraph'
|
| 348 |
+
Indicates that the graph matcher is looking for a subgraph-graph
|
| 349 |
+
isomorphism such that a subgraph of G1 is isomorphic to G2.
|
| 350 |
+
|
| 351 |
+
test='mono'
|
| 352 |
+
Indicates that the graph matcher is looking for a subgraph-graph
|
| 353 |
+
monomorphism such that a subgraph of G1 is monomorphic to G2.
|
| 354 |
+
|
| 355 |
+
Any subclass which redefines semantic_feasibility() must maintain
|
| 356 |
+
the above form to keep the match() method functional. Implementations
|
| 357 |
+
should consider multigraphs.
|
| 358 |
+
"""
|
| 359 |
+
return True
|
| 360 |
+
|
| 361 |
+
def subgraph_is_isomorphic(self):
|
| 362 |
+
"""Returns True if a subgraph of G1 is isomorphic to G2."""
|
| 363 |
+
try:
|
| 364 |
+
x = next(self.subgraph_isomorphisms_iter())
|
| 365 |
+
return True
|
| 366 |
+
except StopIteration:
|
| 367 |
+
return False
|
| 368 |
+
|
| 369 |
+
def subgraph_is_monomorphic(self):
|
| 370 |
+
"""Returns True if a subgraph of G1 is monomorphic to G2."""
|
| 371 |
+
try:
|
| 372 |
+
x = next(self.subgraph_monomorphisms_iter())
|
| 373 |
+
return True
|
| 374 |
+
except StopIteration:
|
| 375 |
+
return False
|
| 376 |
+
|
| 377 |
+
# subgraph_is_isomorphic.__doc__ += "\n" + subgraph.replace('\n','\n'+indent)
|
| 378 |
+
|
| 379 |
+
def subgraph_isomorphisms_iter(self):
|
| 380 |
+
"""Generator over isomorphisms between a subgraph of G1 and G2."""
|
| 381 |
+
# Declare that we are looking for graph-subgraph isomorphism.
|
| 382 |
+
self.test = "subgraph"
|
| 383 |
+
self.initialize()
|
| 384 |
+
yield from self.match()
|
| 385 |
+
|
| 386 |
+
def subgraph_monomorphisms_iter(self):
|
| 387 |
+
"""Generator over monomorphisms between a subgraph of G1 and G2."""
|
| 388 |
+
# Declare that we are looking for graph-subgraph monomorphism.
|
| 389 |
+
self.test = "mono"
|
| 390 |
+
self.initialize()
|
| 391 |
+
yield from self.match()
|
| 392 |
+
|
| 393 |
+
# subgraph_isomorphisms_iter.__doc__ += "\n" + subgraph.replace('\n','\n'+indent)
|
| 394 |
+
|
| 395 |
+
def syntactic_feasibility(self, G1_node, G2_node):
|
| 396 |
+
"""Returns True if adding (G1_node, G2_node) is syntactically feasible.
|
| 397 |
+
|
| 398 |
+
This function returns True if it is adding the candidate pair
|
| 399 |
+
to the current partial isomorphism/monomorphism mapping is allowable.
|
| 400 |
+
The addition is allowable if the inclusion of the candidate pair does
|
| 401 |
+
not make it impossible for an isomorphism/monomorphism to be found.
|
| 402 |
+
"""
|
| 403 |
+
|
| 404 |
+
# The VF2 algorithm was designed to work with graphs having, at most,
|
| 405 |
+
# one edge connecting any two nodes. This is not the case when
|
| 406 |
+
# dealing with an MultiGraphs.
|
| 407 |
+
#
|
| 408 |
+
# Basically, when we test the look-ahead rules R_neighbor, we will
|
| 409 |
+
# make sure that the number of edges are checked. We also add
|
| 410 |
+
# a R_self check to verify that the number of selfloops is acceptable.
|
| 411 |
+
#
|
| 412 |
+
# Users might be comparing Graph instances with MultiGraph instances.
|
| 413 |
+
# So the generic GraphMatcher class must work with MultiGraphs.
|
| 414 |
+
# Care must be taken since the value in the innermost dictionary is a
|
| 415 |
+
# singlet for Graph instances. For MultiGraphs, the value in the
|
| 416 |
+
# innermost dictionary is a list.
|
| 417 |
+
|
| 418 |
+
###
|
| 419 |
+
# Test at each step to get a return value as soon as possible.
|
| 420 |
+
###
|
| 421 |
+
|
| 422 |
+
# Look ahead 0
|
| 423 |
+
|
| 424 |
+
# R_self
|
| 425 |
+
|
| 426 |
+
# The number of selfloops for G1_node must equal the number of
|
| 427 |
+
# self-loops for G2_node. Without this check, we would fail on
|
| 428 |
+
# R_neighbor at the next recursion level. But it is good to prune the
|
| 429 |
+
# search tree now.
|
| 430 |
+
|
| 431 |
+
if self.test == "mono":
|
| 432 |
+
if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
|
| 433 |
+
G2_node, G2_node
|
| 434 |
+
):
|
| 435 |
+
return False
|
| 436 |
+
else:
|
| 437 |
+
if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
|
| 438 |
+
G2_node, G2_node
|
| 439 |
+
):
|
| 440 |
+
return False
|
| 441 |
+
|
| 442 |
+
# R_neighbor
|
| 443 |
+
|
| 444 |
+
# For each neighbor n' of n in the partial mapping, the corresponding
|
| 445 |
+
# node m' is a neighbor of m, and vice versa. Also, the number of
|
| 446 |
+
# edges must be equal.
|
| 447 |
+
if self.test != "mono":
|
| 448 |
+
for neighbor in self.G1[G1_node]:
|
| 449 |
+
if neighbor in self.core_1:
|
| 450 |
+
if self.core_1[neighbor] not in self.G2[G2_node]:
|
| 451 |
+
return False
|
| 452 |
+
elif self.G1.number_of_edges(
|
| 453 |
+
neighbor, G1_node
|
| 454 |
+
) != self.G2.number_of_edges(self.core_1[neighbor], G2_node):
|
| 455 |
+
return False
|
| 456 |
+
|
| 457 |
+
for neighbor in self.G2[G2_node]:
|
| 458 |
+
if neighbor in self.core_2:
|
| 459 |
+
if self.core_2[neighbor] not in self.G1[G1_node]:
|
| 460 |
+
return False
|
| 461 |
+
elif self.test == "mono":
|
| 462 |
+
if self.G1.number_of_edges(
|
| 463 |
+
self.core_2[neighbor], G1_node
|
| 464 |
+
) < self.G2.number_of_edges(neighbor, G2_node):
|
| 465 |
+
return False
|
| 466 |
+
else:
|
| 467 |
+
if self.G1.number_of_edges(
|
| 468 |
+
self.core_2[neighbor], G1_node
|
| 469 |
+
) != self.G2.number_of_edges(neighbor, G2_node):
|
| 470 |
+
return False
|
| 471 |
+
|
| 472 |
+
if self.test != "mono":
|
| 473 |
+
# Look ahead 1
|
| 474 |
+
|
| 475 |
+
# R_terminout
|
| 476 |
+
# The number of neighbors of n in T_1^{inout} is equal to the
|
| 477 |
+
# number of neighbors of m that are in T_2^{inout}, and vice versa.
|
| 478 |
+
num1 = 0
|
| 479 |
+
for neighbor in self.G1[G1_node]:
|
| 480 |
+
if (neighbor in self.inout_1) and (neighbor not in self.core_1):
|
| 481 |
+
num1 += 1
|
| 482 |
+
num2 = 0
|
| 483 |
+
for neighbor in self.G2[G2_node]:
|
| 484 |
+
if (neighbor in self.inout_2) and (neighbor not in self.core_2):
|
| 485 |
+
num2 += 1
|
| 486 |
+
if self.test == "graph":
|
| 487 |
+
if num1 != num2:
|
| 488 |
+
return False
|
| 489 |
+
else: # self.test == 'subgraph'
|
| 490 |
+
if not (num1 >= num2):
|
| 491 |
+
return False
|
| 492 |
+
|
| 493 |
+
# Look ahead 2
|
| 494 |
+
|
| 495 |
+
# R_new
|
| 496 |
+
|
| 497 |
+
# The number of neighbors of n that are neither in the core_1 nor
|
| 498 |
+
# T_1^{inout} is equal to the number of neighbors of m
|
| 499 |
+
# that are neither in core_2 nor T_2^{inout}.
|
| 500 |
+
num1 = 0
|
| 501 |
+
for neighbor in self.G1[G1_node]:
|
| 502 |
+
if neighbor not in self.inout_1:
|
| 503 |
+
num1 += 1
|
| 504 |
+
num2 = 0
|
| 505 |
+
for neighbor in self.G2[G2_node]:
|
| 506 |
+
if neighbor not in self.inout_2:
|
| 507 |
+
num2 += 1
|
| 508 |
+
if self.test == "graph":
|
| 509 |
+
if num1 != num2:
|
| 510 |
+
return False
|
| 511 |
+
else: # self.test == 'subgraph'
|
| 512 |
+
if not (num1 >= num2):
|
| 513 |
+
return False
|
| 514 |
+
|
| 515 |
+
# Otherwise, this node pair is syntactically feasible!
|
| 516 |
+
return True
|
| 517 |
+
|
| 518 |
+
|
| 519 |
+
class DiGraphMatcher(GraphMatcher):
|
| 520 |
+
"""Implementation of VF2 algorithm for matching directed graphs.
|
| 521 |
+
|
| 522 |
+
Suitable for DiGraph and MultiDiGraph instances.
|
| 523 |
+
"""
|
| 524 |
+
|
| 525 |
+
def __init__(self, G1, G2):
|
| 526 |
+
"""Initialize DiGraphMatcher.
|
| 527 |
+
|
| 528 |
+
G1 and G2 should be nx.Graph or nx.MultiGraph instances.
|
| 529 |
+
|
| 530 |
+
Examples
|
| 531 |
+
--------
|
| 532 |
+
To create a GraphMatcher which checks for syntactic feasibility:
|
| 533 |
+
|
| 534 |
+
>>> from networkx.algorithms import isomorphism
|
| 535 |
+
>>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
|
| 536 |
+
>>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
|
| 537 |
+
>>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
|
| 538 |
+
"""
|
| 539 |
+
super().__init__(G1, G2)
|
| 540 |
+
|
| 541 |
+
def candidate_pairs_iter(self):
|
| 542 |
+
"""Iterator over candidate pairs of nodes in G1 and G2."""
|
| 543 |
+
|
| 544 |
+
# All computations are done using the current state!
|
| 545 |
+
|
| 546 |
+
G1_nodes = self.G1_nodes
|
| 547 |
+
G2_nodes = self.G2_nodes
|
| 548 |
+
min_key = self.G2_node_order.__getitem__
|
| 549 |
+
|
| 550 |
+
# First we compute the out-terminal sets.
|
| 551 |
+
T1_out = [node for node in self.out_1 if node not in self.core_1]
|
| 552 |
+
T2_out = [node for node in self.out_2 if node not in self.core_2]
|
| 553 |
+
|
| 554 |
+
# If T1_out and T2_out are both nonempty.
|
| 555 |
+
# P(s) = T1_out x {min T2_out}
|
| 556 |
+
if T1_out and T2_out:
|
| 557 |
+
node_2 = min(T2_out, key=min_key)
|
| 558 |
+
for node_1 in T1_out:
|
| 559 |
+
yield node_1, node_2
|
| 560 |
+
|
| 561 |
+
# If T1_out and T2_out were both empty....
|
| 562 |
+
# We compute the in-terminal sets.
|
| 563 |
+
|
| 564 |
+
# elif not (T1_out or T2_out): # as suggested by [2], incorrect
|
| 565 |
+
else: # as suggested by [1], correct
|
| 566 |
+
T1_in = [node for node in self.in_1 if node not in self.core_1]
|
| 567 |
+
T2_in = [node for node in self.in_2 if node not in self.core_2]
|
| 568 |
+
|
| 569 |
+
# If T1_in and T2_in are both nonempty.
|
| 570 |
+
# P(s) = T1_out x {min T2_out}
|
| 571 |
+
if T1_in and T2_in:
|
| 572 |
+
node_2 = min(T2_in, key=min_key)
|
| 573 |
+
for node_1 in T1_in:
|
| 574 |
+
yield node_1, node_2
|
| 575 |
+
|
| 576 |
+
# If all terminal sets are empty...
|
| 577 |
+
# P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
|
| 578 |
+
|
| 579 |
+
# elif not (T1_in or T2_in): # as suggested by [2], incorrect
|
| 580 |
+
else: # as inferred from [1], correct
|
| 581 |
+
node_2 = min(G2_nodes - set(self.core_2), key=min_key)
|
| 582 |
+
for node_1 in G1_nodes:
|
| 583 |
+
if node_1 not in self.core_1:
|
| 584 |
+
yield node_1, node_2
|
| 585 |
+
|
| 586 |
+
# For all other cases, we don't have any candidate pairs.
|
| 587 |
+
|
| 588 |
+
def initialize(self):
|
| 589 |
+
"""Reinitializes the state of the algorithm.
|
| 590 |
+
|
| 591 |
+
This method should be redefined if using something other than DiGMState.
|
| 592 |
+
If only subclassing GraphMatcher, a redefinition is not necessary.
|
| 593 |
+
"""
|
| 594 |
+
|
| 595 |
+
# core_1[n] contains the index of the node paired with n, which is m,
|
| 596 |
+
# provided n is in the mapping.
|
| 597 |
+
# core_2[m] contains the index of the node paired with m, which is n,
|
| 598 |
+
# provided m is in the mapping.
|
| 599 |
+
self.core_1 = {}
|
| 600 |
+
self.core_2 = {}
|
| 601 |
+
|
| 602 |
+
# See the paper for definitions of M_x and T_x^{y}
|
| 603 |
+
|
| 604 |
+
# in_1[n] is non-zero if n is in M_1 or in T_1^{in}
|
| 605 |
+
# out_1[n] is non-zero if n is in M_1 or in T_1^{out}
|
| 606 |
+
#
|
| 607 |
+
# in_2[m] is non-zero if m is in M_2 or in T_2^{in}
|
| 608 |
+
# out_2[m] is non-zero if m is in M_2 or in T_2^{out}
|
| 609 |
+
#
|
| 610 |
+
# The value stored is the depth of the search tree when the node became
|
| 611 |
+
# part of the corresponding set.
|
| 612 |
+
self.in_1 = {}
|
| 613 |
+
self.in_2 = {}
|
| 614 |
+
self.out_1 = {}
|
| 615 |
+
self.out_2 = {}
|
| 616 |
+
|
| 617 |
+
self.state = DiGMState(self)
|
| 618 |
+
|
| 619 |
+
# Provide a convenient way to access the isomorphism mapping.
|
| 620 |
+
self.mapping = self.core_1.copy()
|
| 621 |
+
|
| 622 |
+
def syntactic_feasibility(self, G1_node, G2_node):
|
| 623 |
+
"""Returns True if adding (G1_node, G2_node) is syntactically feasible.
|
| 624 |
+
|
| 625 |
+
This function returns True if it is adding the candidate pair
|
| 626 |
+
to the current partial isomorphism/monomorphism mapping is allowable.
|
| 627 |
+
The addition is allowable if the inclusion of the candidate pair does
|
| 628 |
+
not make it impossible for an isomorphism/monomorphism to be found.
|
| 629 |
+
"""
|
| 630 |
+
|
| 631 |
+
# The VF2 algorithm was designed to work with graphs having, at most,
|
| 632 |
+
# one edge connecting any two nodes. This is not the case when
|
| 633 |
+
# dealing with an MultiGraphs.
|
| 634 |
+
#
|
| 635 |
+
# Basically, when we test the look-ahead rules R_pred and R_succ, we
|
| 636 |
+
# will make sure that the number of edges are checked. We also add
|
| 637 |
+
# a R_self check to verify that the number of selfloops is acceptable.
|
| 638 |
+
|
| 639 |
+
# Users might be comparing DiGraph instances with MultiDiGraph
|
| 640 |
+
# instances. So the generic DiGraphMatcher class must work with
|
| 641 |
+
# MultiDiGraphs. Care must be taken since the value in the innermost
|
| 642 |
+
# dictionary is a singlet for DiGraph instances. For MultiDiGraphs,
|
| 643 |
+
# the value in the innermost dictionary is a list.
|
| 644 |
+
|
| 645 |
+
###
|
| 646 |
+
# Test at each step to get a return value as soon as possible.
|
| 647 |
+
###
|
| 648 |
+
|
| 649 |
+
# Look ahead 0
|
| 650 |
+
|
| 651 |
+
# R_self
|
| 652 |
+
|
| 653 |
+
# The number of selfloops for G1_node must equal the number of
|
| 654 |
+
# self-loops for G2_node. Without this check, we would fail on R_pred
|
| 655 |
+
# at the next recursion level. This should prune the tree even further.
|
| 656 |
+
if self.test == "mono":
|
| 657 |
+
if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
|
| 658 |
+
G2_node, G2_node
|
| 659 |
+
):
|
| 660 |
+
return False
|
| 661 |
+
else:
|
| 662 |
+
if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
|
| 663 |
+
G2_node, G2_node
|
| 664 |
+
):
|
| 665 |
+
return False
|
| 666 |
+
|
| 667 |
+
# R_pred
|
| 668 |
+
|
| 669 |
+
# For each predecessor n' of n in the partial mapping, the
|
| 670 |
+
# corresponding node m' is a predecessor of m, and vice versa. Also,
|
| 671 |
+
# the number of edges must be equal
|
| 672 |
+
if self.test != "mono":
|
| 673 |
+
for predecessor in self.G1.pred[G1_node]:
|
| 674 |
+
if predecessor in self.core_1:
|
| 675 |
+
if self.core_1[predecessor] not in self.G2.pred[G2_node]:
|
| 676 |
+
return False
|
| 677 |
+
elif self.G1.number_of_edges(
|
| 678 |
+
predecessor, G1_node
|
| 679 |
+
) != self.G2.number_of_edges(self.core_1[predecessor], G2_node):
|
| 680 |
+
return False
|
| 681 |
+
|
| 682 |
+
for predecessor in self.G2.pred[G2_node]:
|
| 683 |
+
if predecessor in self.core_2:
|
| 684 |
+
if self.core_2[predecessor] not in self.G1.pred[G1_node]:
|
| 685 |
+
return False
|
| 686 |
+
elif self.test == "mono":
|
| 687 |
+
if self.G1.number_of_edges(
|
| 688 |
+
self.core_2[predecessor], G1_node
|
| 689 |
+
) < self.G2.number_of_edges(predecessor, G2_node):
|
| 690 |
+
return False
|
| 691 |
+
else:
|
| 692 |
+
if self.G1.number_of_edges(
|
| 693 |
+
self.core_2[predecessor], G1_node
|
| 694 |
+
) != self.G2.number_of_edges(predecessor, G2_node):
|
| 695 |
+
return False
|
| 696 |
+
|
| 697 |
+
# R_succ
|
| 698 |
+
|
| 699 |
+
# For each successor n' of n in the partial mapping, the corresponding
|
| 700 |
+
# node m' is a successor of m, and vice versa. Also, the number of
|
| 701 |
+
# edges must be equal.
|
| 702 |
+
if self.test != "mono":
|
| 703 |
+
for successor in self.G1[G1_node]:
|
| 704 |
+
if successor in self.core_1:
|
| 705 |
+
if self.core_1[successor] not in self.G2[G2_node]:
|
| 706 |
+
return False
|
| 707 |
+
elif self.G1.number_of_edges(
|
| 708 |
+
G1_node, successor
|
| 709 |
+
) != self.G2.number_of_edges(G2_node, self.core_1[successor]):
|
| 710 |
+
return False
|
| 711 |
+
|
| 712 |
+
for successor in self.G2[G2_node]:
|
| 713 |
+
if successor in self.core_2:
|
| 714 |
+
if self.core_2[successor] not in self.G1[G1_node]:
|
| 715 |
+
return False
|
| 716 |
+
elif self.test == "mono":
|
| 717 |
+
if self.G1.number_of_edges(
|
| 718 |
+
G1_node, self.core_2[successor]
|
| 719 |
+
) < self.G2.number_of_edges(G2_node, successor):
|
| 720 |
+
return False
|
| 721 |
+
else:
|
| 722 |
+
if self.G1.number_of_edges(
|
| 723 |
+
G1_node, self.core_2[successor]
|
| 724 |
+
) != self.G2.number_of_edges(G2_node, successor):
|
| 725 |
+
return False
|
| 726 |
+
|
| 727 |
+
if self.test != "mono":
|
| 728 |
+
# Look ahead 1
|
| 729 |
+
|
| 730 |
+
# R_termin
|
| 731 |
+
# The number of predecessors of n that are in T_1^{in} is equal to the
|
| 732 |
+
# number of predecessors of m that are in T_2^{in}.
|
| 733 |
+
num1 = 0
|
| 734 |
+
for predecessor in self.G1.pred[G1_node]:
|
| 735 |
+
if (predecessor in self.in_1) and (predecessor not in self.core_1):
|
| 736 |
+
num1 += 1
|
| 737 |
+
num2 = 0
|
| 738 |
+
for predecessor in self.G2.pred[G2_node]:
|
| 739 |
+
if (predecessor in self.in_2) and (predecessor not in self.core_2):
|
| 740 |
+
num2 += 1
|
| 741 |
+
if self.test == "graph":
|
| 742 |
+
if num1 != num2:
|
| 743 |
+
return False
|
| 744 |
+
else: # self.test == 'subgraph'
|
| 745 |
+
if not (num1 >= num2):
|
| 746 |
+
return False
|
| 747 |
+
|
| 748 |
+
# The number of successors of n that are in T_1^{in} is equal to the
|
| 749 |
+
# number of successors of m that are in T_2^{in}.
|
| 750 |
+
num1 = 0
|
| 751 |
+
for successor in self.G1[G1_node]:
|
| 752 |
+
if (successor in self.in_1) and (successor not in self.core_1):
|
| 753 |
+
num1 += 1
|
| 754 |
+
num2 = 0
|
| 755 |
+
for successor in self.G2[G2_node]:
|
| 756 |
+
if (successor in self.in_2) and (successor not in self.core_2):
|
| 757 |
+
num2 += 1
|
| 758 |
+
if self.test == "graph":
|
| 759 |
+
if num1 != num2:
|
| 760 |
+
return False
|
| 761 |
+
else: # self.test == 'subgraph'
|
| 762 |
+
if not (num1 >= num2):
|
| 763 |
+
return False
|
| 764 |
+
|
| 765 |
+
# R_termout
|
| 766 |
+
|
| 767 |
+
# The number of predecessors of n that are in T_1^{out} is equal to the
|
| 768 |
+
# number of predecessors of m that are in T_2^{out}.
|
| 769 |
+
num1 = 0
|
| 770 |
+
for predecessor in self.G1.pred[G1_node]:
|
| 771 |
+
if (predecessor in self.out_1) and (predecessor not in self.core_1):
|
| 772 |
+
num1 += 1
|
| 773 |
+
num2 = 0
|
| 774 |
+
for predecessor in self.G2.pred[G2_node]:
|
| 775 |
+
if (predecessor in self.out_2) and (predecessor not in self.core_2):
|
| 776 |
+
num2 += 1
|
| 777 |
+
if self.test == "graph":
|
| 778 |
+
if num1 != num2:
|
| 779 |
+
return False
|
| 780 |
+
else: # self.test == 'subgraph'
|
| 781 |
+
if not (num1 >= num2):
|
| 782 |
+
return False
|
| 783 |
+
|
| 784 |
+
# The number of successors of n that are in T_1^{out} is equal to the
|
| 785 |
+
# number of successors of m that are in T_2^{out}.
|
| 786 |
+
num1 = 0
|
| 787 |
+
for successor in self.G1[G1_node]:
|
| 788 |
+
if (successor in self.out_1) and (successor not in self.core_1):
|
| 789 |
+
num1 += 1
|
| 790 |
+
num2 = 0
|
| 791 |
+
for successor in self.G2[G2_node]:
|
| 792 |
+
if (successor in self.out_2) and (successor not in self.core_2):
|
| 793 |
+
num2 += 1
|
| 794 |
+
if self.test == "graph":
|
| 795 |
+
if num1 != num2:
|
| 796 |
+
return False
|
| 797 |
+
else: # self.test == 'subgraph'
|
| 798 |
+
if not (num1 >= num2):
|
| 799 |
+
return False
|
| 800 |
+
|
| 801 |
+
# Look ahead 2
|
| 802 |
+
|
| 803 |
+
# R_new
|
| 804 |
+
|
| 805 |
+
# The number of predecessors of n that are neither in the core_1 nor
|
| 806 |
+
# T_1^{in} nor T_1^{out} is equal to the number of predecessors of m
|
| 807 |
+
# that are neither in core_2 nor T_2^{in} nor T_2^{out}.
|
| 808 |
+
num1 = 0
|
| 809 |
+
for predecessor in self.G1.pred[G1_node]:
|
| 810 |
+
if (predecessor not in self.in_1) and (predecessor not in self.out_1):
|
| 811 |
+
num1 += 1
|
| 812 |
+
num2 = 0
|
| 813 |
+
for predecessor in self.G2.pred[G2_node]:
|
| 814 |
+
if (predecessor not in self.in_2) and (predecessor not in self.out_2):
|
| 815 |
+
num2 += 1
|
| 816 |
+
if self.test == "graph":
|
| 817 |
+
if num1 != num2:
|
| 818 |
+
return False
|
| 819 |
+
else: # self.test == 'subgraph'
|
| 820 |
+
if not (num1 >= num2):
|
| 821 |
+
return False
|
| 822 |
+
|
| 823 |
+
# The number of successors of n that are neither in the core_1 nor
|
| 824 |
+
# T_1^{in} nor T_1^{out} is equal to the number of successors of m
|
| 825 |
+
# that are neither in core_2 nor T_2^{in} nor T_2^{out}.
|
| 826 |
+
num1 = 0
|
| 827 |
+
for successor in self.G1[G1_node]:
|
| 828 |
+
if (successor not in self.in_1) and (successor not in self.out_1):
|
| 829 |
+
num1 += 1
|
| 830 |
+
num2 = 0
|
| 831 |
+
for successor in self.G2[G2_node]:
|
| 832 |
+
if (successor not in self.in_2) and (successor not in self.out_2):
|
| 833 |
+
num2 += 1
|
| 834 |
+
if self.test == "graph":
|
| 835 |
+
if num1 != num2:
|
| 836 |
+
return False
|
| 837 |
+
else: # self.test == 'subgraph'
|
| 838 |
+
if not (num1 >= num2):
|
| 839 |
+
return False
|
| 840 |
+
|
| 841 |
+
# Otherwise, this node pair is syntactically feasible!
|
| 842 |
+
return True
|
| 843 |
+
|
| 844 |
+
|
| 845 |
+
class GMState:
|
| 846 |
+
"""Internal representation of state for the GraphMatcher class.
|
| 847 |
+
|
| 848 |
+
This class is used internally by the GraphMatcher class. It is used
|
| 849 |
+
only to store state specific data. There will be at most G2.order() of
|
| 850 |
+
these objects in memory at a time, due to the depth-first search
|
| 851 |
+
strategy employed by the VF2 algorithm.
|
| 852 |
+
"""
|
| 853 |
+
|
| 854 |
+
def __init__(self, GM, G1_node=None, G2_node=None):
|
| 855 |
+
"""Initializes GMState object.
|
| 856 |
+
|
| 857 |
+
Pass in the GraphMatcher to which this GMState belongs and the
|
| 858 |
+
new node pair that will be added to the GraphMatcher's current
|
| 859 |
+
isomorphism mapping.
|
| 860 |
+
"""
|
| 861 |
+
self.GM = GM
|
| 862 |
+
|
| 863 |
+
# Initialize the last stored node pair.
|
| 864 |
+
self.G1_node = None
|
| 865 |
+
self.G2_node = None
|
| 866 |
+
self.depth = len(GM.core_1)
|
| 867 |
+
|
| 868 |
+
if G1_node is None or G2_node is None:
|
| 869 |
+
# Then we reset the class variables
|
| 870 |
+
GM.core_1 = {}
|
| 871 |
+
GM.core_2 = {}
|
| 872 |
+
GM.inout_1 = {}
|
| 873 |
+
GM.inout_2 = {}
|
| 874 |
+
|
| 875 |
+
# Watch out! G1_node == 0 should evaluate to True.
|
| 876 |
+
if G1_node is not None and G2_node is not None:
|
| 877 |
+
# Add the node pair to the isomorphism mapping.
|
| 878 |
+
GM.core_1[G1_node] = G2_node
|
| 879 |
+
GM.core_2[G2_node] = G1_node
|
| 880 |
+
|
| 881 |
+
# Store the node that was added last.
|
| 882 |
+
self.G1_node = G1_node
|
| 883 |
+
self.G2_node = G2_node
|
| 884 |
+
|
| 885 |
+
# Now we must update the other two vectors.
|
| 886 |
+
# We will add only if it is not in there already!
|
| 887 |
+
self.depth = len(GM.core_1)
|
| 888 |
+
|
| 889 |
+
# First we add the new nodes...
|
| 890 |
+
if G1_node not in GM.inout_1:
|
| 891 |
+
GM.inout_1[G1_node] = self.depth
|
| 892 |
+
if G2_node not in GM.inout_2:
|
| 893 |
+
GM.inout_2[G2_node] = self.depth
|
| 894 |
+
|
| 895 |
+
# Now we add every other node...
|
| 896 |
+
|
| 897 |
+
# Updates for T_1^{inout}
|
| 898 |
+
new_nodes = set()
|
| 899 |
+
for node in GM.core_1:
|
| 900 |
+
new_nodes.update(
|
| 901 |
+
[neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1]
|
| 902 |
+
)
|
| 903 |
+
for node in new_nodes:
|
| 904 |
+
if node not in GM.inout_1:
|
| 905 |
+
GM.inout_1[node] = self.depth
|
| 906 |
+
|
| 907 |
+
# Updates for T_2^{inout}
|
| 908 |
+
new_nodes = set()
|
| 909 |
+
for node in GM.core_2:
|
| 910 |
+
new_nodes.update(
|
| 911 |
+
[neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2]
|
| 912 |
+
)
|
| 913 |
+
for node in new_nodes:
|
| 914 |
+
if node not in GM.inout_2:
|
| 915 |
+
GM.inout_2[node] = self.depth
|
| 916 |
+
|
| 917 |
+
def restore(self):
|
| 918 |
+
"""Deletes the GMState object and restores the class variables."""
|
| 919 |
+
# First we remove the node that was added from the core vectors.
|
| 920 |
+
# Watch out! G1_node == 0 should evaluate to True.
|
| 921 |
+
if self.G1_node is not None and self.G2_node is not None:
|
| 922 |
+
del self.GM.core_1[self.G1_node]
|
| 923 |
+
del self.GM.core_2[self.G2_node]
|
| 924 |
+
|
| 925 |
+
# Now we revert the other two vectors.
|
| 926 |
+
# Thus, we delete all entries which have this depth level.
|
| 927 |
+
for vector in (self.GM.inout_1, self.GM.inout_2):
|
| 928 |
+
for node in list(vector.keys()):
|
| 929 |
+
if vector[node] == self.depth:
|
| 930 |
+
del vector[node]
|
| 931 |
+
|
| 932 |
+
|
| 933 |
+
class DiGMState:
|
| 934 |
+
"""Internal representation of state for the DiGraphMatcher class.
|
| 935 |
+
|
| 936 |
+
This class is used internally by the DiGraphMatcher class. It is used
|
| 937 |
+
only to store state specific data. There will be at most G2.order() of
|
| 938 |
+
these objects in memory at a time, due to the depth-first search
|
| 939 |
+
strategy employed by the VF2 algorithm.
|
| 940 |
+
|
| 941 |
+
"""
|
| 942 |
+
|
| 943 |
+
def __init__(self, GM, G1_node=None, G2_node=None):
|
| 944 |
+
"""Initializes DiGMState object.
|
| 945 |
+
|
| 946 |
+
Pass in the DiGraphMatcher to which this DiGMState belongs and the
|
| 947 |
+
new node pair that will be added to the GraphMatcher's current
|
| 948 |
+
isomorphism mapping.
|
| 949 |
+
"""
|
| 950 |
+
self.GM = GM
|
| 951 |
+
|
| 952 |
+
# Initialize the last stored node pair.
|
| 953 |
+
self.G1_node = None
|
| 954 |
+
self.G2_node = None
|
| 955 |
+
self.depth = len(GM.core_1)
|
| 956 |
+
|
| 957 |
+
if G1_node is None or G2_node is None:
|
| 958 |
+
# Then we reset the class variables
|
| 959 |
+
GM.core_1 = {}
|
| 960 |
+
GM.core_2 = {}
|
| 961 |
+
GM.in_1 = {}
|
| 962 |
+
GM.in_2 = {}
|
| 963 |
+
GM.out_1 = {}
|
| 964 |
+
GM.out_2 = {}
|
| 965 |
+
|
| 966 |
+
# Watch out! G1_node == 0 should evaluate to True.
|
| 967 |
+
if G1_node is not None and G2_node is not None:
|
| 968 |
+
# Add the node pair to the isomorphism mapping.
|
| 969 |
+
GM.core_1[G1_node] = G2_node
|
| 970 |
+
GM.core_2[G2_node] = G1_node
|
| 971 |
+
|
| 972 |
+
# Store the node that was added last.
|
| 973 |
+
self.G1_node = G1_node
|
| 974 |
+
self.G2_node = G2_node
|
| 975 |
+
|
| 976 |
+
# Now we must update the other four vectors.
|
| 977 |
+
# We will add only if it is not in there already!
|
| 978 |
+
self.depth = len(GM.core_1)
|
| 979 |
+
|
| 980 |
+
# First we add the new nodes...
|
| 981 |
+
for vector in (GM.in_1, GM.out_1):
|
| 982 |
+
if G1_node not in vector:
|
| 983 |
+
vector[G1_node] = self.depth
|
| 984 |
+
for vector in (GM.in_2, GM.out_2):
|
| 985 |
+
if G2_node not in vector:
|
| 986 |
+
vector[G2_node] = self.depth
|
| 987 |
+
|
| 988 |
+
# Now we add every other node...
|
| 989 |
+
|
| 990 |
+
# Updates for T_1^{in}
|
| 991 |
+
new_nodes = set()
|
| 992 |
+
for node in GM.core_1:
|
| 993 |
+
new_nodes.update(
|
| 994 |
+
[
|
| 995 |
+
predecessor
|
| 996 |
+
for predecessor in GM.G1.predecessors(node)
|
| 997 |
+
if predecessor not in GM.core_1
|
| 998 |
+
]
|
| 999 |
+
)
|
| 1000 |
+
for node in new_nodes:
|
| 1001 |
+
if node not in GM.in_1:
|
| 1002 |
+
GM.in_1[node] = self.depth
|
| 1003 |
+
|
| 1004 |
+
# Updates for T_2^{in}
|
| 1005 |
+
new_nodes = set()
|
| 1006 |
+
for node in GM.core_2:
|
| 1007 |
+
new_nodes.update(
|
| 1008 |
+
[
|
| 1009 |
+
predecessor
|
| 1010 |
+
for predecessor in GM.G2.predecessors(node)
|
| 1011 |
+
if predecessor not in GM.core_2
|
| 1012 |
+
]
|
| 1013 |
+
)
|
| 1014 |
+
for node in new_nodes:
|
| 1015 |
+
if node not in GM.in_2:
|
| 1016 |
+
GM.in_2[node] = self.depth
|
| 1017 |
+
|
| 1018 |
+
# Updates for T_1^{out}
|
| 1019 |
+
new_nodes = set()
|
| 1020 |
+
for node in GM.core_1:
|
| 1021 |
+
new_nodes.update(
|
| 1022 |
+
[
|
| 1023 |
+
successor
|
| 1024 |
+
for successor in GM.G1.successors(node)
|
| 1025 |
+
if successor not in GM.core_1
|
| 1026 |
+
]
|
| 1027 |
+
)
|
| 1028 |
+
for node in new_nodes:
|
| 1029 |
+
if node not in GM.out_1:
|
| 1030 |
+
GM.out_1[node] = self.depth
|
| 1031 |
+
|
| 1032 |
+
# Updates for T_2^{out}
|
| 1033 |
+
new_nodes = set()
|
| 1034 |
+
for node in GM.core_2:
|
| 1035 |
+
new_nodes.update(
|
| 1036 |
+
[
|
| 1037 |
+
successor
|
| 1038 |
+
for successor in GM.G2.successors(node)
|
| 1039 |
+
if successor not in GM.core_2
|
| 1040 |
+
]
|
| 1041 |
+
)
|
| 1042 |
+
for node in new_nodes:
|
| 1043 |
+
if node not in GM.out_2:
|
| 1044 |
+
GM.out_2[node] = self.depth
|
| 1045 |
+
|
| 1046 |
+
def restore(self):
|
| 1047 |
+
"""Deletes the DiGMState object and restores the class variables."""
|
| 1048 |
+
|
| 1049 |
+
# First we remove the node that was added from the core vectors.
|
| 1050 |
+
# Watch out! G1_node == 0 should evaluate to True.
|
| 1051 |
+
if self.G1_node is not None and self.G2_node is not None:
|
| 1052 |
+
del self.GM.core_1[self.G1_node]
|
| 1053 |
+
del self.GM.core_2[self.G2_node]
|
| 1054 |
+
|
| 1055 |
+
# Now we revert the other four vectors.
|
| 1056 |
+
# Thus, we delete all entries which have this depth level.
|
| 1057 |
+
for vector in (self.GM.in_1, self.GM.in_2, self.GM.out_1, self.GM.out_2):
|
| 1058 |
+
for node in list(vector.keys()):
|
| 1059 |
+
if vector[node] == self.depth:
|
| 1060 |
+
del vector[node]
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__init__.py
ADDED
|
File without changes
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-311.pyc
ADDED
|
Binary file (4.09 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-311.pyc
ADDED
|
Binary file (19.1 kB). View file
|
|
|