Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/treewidth.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/distance_measures.py +141 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/maxcut.py +113 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/steinertree.py +220 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__init__.py +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_connectivity.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_steinertree.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_vertex_cover.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_connectivity.py +199 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_dominating_set.py +78 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/treewidth.py +252 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/basic.py +321 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/extendability.py +105 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/matching.py +589 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/redundancy.py +111 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/spectral.py +68 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/betweenness.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/betweenness_subset.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/eigenvector.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/flow_matrix.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/group.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/harmonic.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/load.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/reaching.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/subgraph_alg.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/community_utils.py +29 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/kclique.py +79 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/kernighan_lin.py +139 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/label_propagation.py +337 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_kernighan_lin.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_label_propagation.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_lukes.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_modularity_max.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py +129 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/test_centrality.py +84 -0
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-311.pyc
ADDED
|
Binary file (6.11 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-311.pyc
ADDED
|
Binary file (2.54 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/treewidth.cpython-311.pyc
ADDED
|
Binary file (9.61 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-311.pyc
ADDED
|
Binary file (3.44 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/distance_measures.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Distance measures approximated metrics."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.utils.decorators import py_random_state
|
| 5 |
+
|
| 6 |
+
__all__ = ["diameter"]
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@py_random_state(1)
|
| 10 |
+
@nx._dispatch(name="approximate_diameter")
|
| 11 |
+
def diameter(G, seed=None):
|
| 12 |
+
"""Returns a lower bound on the diameter of the graph G.
|
| 13 |
+
|
| 14 |
+
The function computes a lower bound on the diameter (i.e., the maximum eccentricity)
|
| 15 |
+
of a directed or undirected graph G. The procedure used varies depending on the graph
|
| 16 |
+
being directed or not.
|
| 17 |
+
|
| 18 |
+
If G is an `undirected` graph, then the function uses the `2-sweep` algorithm [1]_.
|
| 19 |
+
The main idea is to pick the farthest node from a random node and return its eccentricity.
|
| 20 |
+
|
| 21 |
+
Otherwise, if G is a `directed` graph, the function uses the `2-dSweep` algorithm [2]_,
|
| 22 |
+
The procedure starts by selecting a random source node $s$ from which it performs a
|
| 23 |
+
forward and a backward BFS. Let $a_1$ and $a_2$ be the farthest nodes in the forward and
|
| 24 |
+
backward cases, respectively. Then, it computes the backward eccentricity of $a_1$ using
|
| 25 |
+
a backward BFS and the forward eccentricity of $a_2$ using a forward BFS.
|
| 26 |
+
Finally, it returns the best lower bound between the two.
|
| 27 |
+
|
| 28 |
+
In both cases, the time complexity is linear with respect to the size of G.
|
| 29 |
+
|
| 30 |
+
Parameters
|
| 31 |
+
----------
|
| 32 |
+
G : NetworkX graph
|
| 33 |
+
|
| 34 |
+
seed : integer, random_state, or None (default)
|
| 35 |
+
Indicator of random number generation state.
|
| 36 |
+
See :ref:`Randomness<randomness>`.
|
| 37 |
+
|
| 38 |
+
Returns
|
| 39 |
+
-------
|
| 40 |
+
d : integer
|
| 41 |
+
Lower Bound on the Diameter of G
|
| 42 |
+
|
| 43 |
+
Raises
|
| 44 |
+
------
|
| 45 |
+
NetworkXError
|
| 46 |
+
If the graph is empty or
|
| 47 |
+
If the graph is undirected and not connected or
|
| 48 |
+
If the graph is directed and not strongly connected.
|
| 49 |
+
|
| 50 |
+
See Also
|
| 51 |
+
--------
|
| 52 |
+
networkx.algorithms.distance_measures.diameter
|
| 53 |
+
|
| 54 |
+
References
|
| 55 |
+
----------
|
| 56 |
+
.. [1] Magnien, Clémence, Matthieu Latapy, and Michel Habib.
|
| 57 |
+
*Fast computation of empirically tight bounds for the diameter of massive graphs.*
|
| 58 |
+
Journal of Experimental Algorithmics (JEA), 2009.
|
| 59 |
+
https://arxiv.org/pdf/0904.2728.pdf
|
| 60 |
+
.. [2] Crescenzi, Pierluigi, Roberto Grossi, Leonardo Lanzi, and Andrea Marino.
|
| 61 |
+
*On computing the diameter of real-world directed (weighted) graphs.*
|
| 62 |
+
International Symposium on Experimental Algorithms. Springer, Berlin, Heidelberg, 2012.
|
| 63 |
+
https://courses.cs.ut.ee/MTAT.03.238/2014_fall/uploads/Main/diameter.pdf
|
| 64 |
+
"""
|
| 65 |
+
# if G is empty
|
| 66 |
+
if not G:
|
| 67 |
+
raise nx.NetworkXError("Expected non-empty NetworkX graph!")
|
| 68 |
+
# if there's only a node
|
| 69 |
+
if G.number_of_nodes() == 1:
|
| 70 |
+
return 0
|
| 71 |
+
# if G is directed
|
| 72 |
+
if G.is_directed():
|
| 73 |
+
return _two_sweep_directed(G, seed)
|
| 74 |
+
# else if G is undirected
|
| 75 |
+
return _two_sweep_undirected(G, seed)
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def _two_sweep_undirected(G, seed):
|
| 79 |
+
"""Helper function for finding a lower bound on the diameter
|
| 80 |
+
for undirected Graphs.
|
| 81 |
+
|
| 82 |
+
The idea is to pick the farthest node from a random node
|
| 83 |
+
and return its eccentricity.
|
| 84 |
+
|
| 85 |
+
``G`` is a NetworkX undirected graph.
|
| 86 |
+
|
| 87 |
+
.. note::
|
| 88 |
+
|
| 89 |
+
``seed`` is a random.Random or numpy.random.RandomState instance
|
| 90 |
+
"""
|
| 91 |
+
# select a random source node
|
| 92 |
+
source = seed.choice(list(G))
|
| 93 |
+
# get the distances to the other nodes
|
| 94 |
+
distances = nx.shortest_path_length(G, source)
|
| 95 |
+
# if some nodes have not been visited, then the graph is not connected
|
| 96 |
+
if len(distances) != len(G):
|
| 97 |
+
raise nx.NetworkXError("Graph not connected.")
|
| 98 |
+
# take a node that is (one of) the farthest nodes from the source
|
| 99 |
+
*_, node = distances
|
| 100 |
+
# return the eccentricity of the node
|
| 101 |
+
return nx.eccentricity(G, node)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def _two_sweep_directed(G, seed):
|
| 105 |
+
"""Helper function for finding a lower bound on the diameter
|
| 106 |
+
for directed Graphs.
|
| 107 |
+
|
| 108 |
+
It implements 2-dSweep, the directed version of the 2-sweep algorithm.
|
| 109 |
+
The algorithm follows the following steps.
|
| 110 |
+
1. Select a source node $s$ at random.
|
| 111 |
+
2. Perform a forward BFS from $s$ to select a node $a_1$ at the maximum
|
| 112 |
+
distance from the source, and compute $LB_1$, the backward eccentricity of $a_1$.
|
| 113 |
+
3. Perform a backward BFS from $s$ to select a node $a_2$ at the maximum
|
| 114 |
+
distance from the source, and compute $LB_2$, the forward eccentricity of $a_2$.
|
| 115 |
+
4. Return the maximum between $LB_1$ and $LB_2$.
|
| 116 |
+
|
| 117 |
+
``G`` is a NetworkX directed graph.
|
| 118 |
+
|
| 119 |
+
.. note::
|
| 120 |
+
|
| 121 |
+
``seed`` is a random.Random or numpy.random.RandomState instance
|
| 122 |
+
"""
|
| 123 |
+
# get a new digraph G' with the edges reversed in the opposite direction
|
| 124 |
+
G_reversed = G.reverse()
|
| 125 |
+
# select a random source node
|
| 126 |
+
source = seed.choice(list(G))
|
| 127 |
+
# compute forward distances from source
|
| 128 |
+
forward_distances = nx.shortest_path_length(G, source)
|
| 129 |
+
# compute backward distances from source
|
| 130 |
+
backward_distances = nx.shortest_path_length(G_reversed, source)
|
| 131 |
+
# if either the source can't reach every node or not every node
|
| 132 |
+
# can reach the source, then the graph is not strongly connected
|
| 133 |
+
n = len(G)
|
| 134 |
+
if len(forward_distances) != n or len(backward_distances) != n:
|
| 135 |
+
raise nx.NetworkXError("DiGraph not strongly connected.")
|
| 136 |
+
# take a node a_1 at the maximum distance from the source in G
|
| 137 |
+
*_, a_1 = forward_distances
|
| 138 |
+
# take a node a_2 at the maximum distance from the source in G_reversed
|
| 139 |
+
*_, a_2 = backward_distances
|
| 140 |
+
# return the max between the backward eccentricity of a_1 and the forward eccentricity of a_2
|
| 141 |
+
return max(nx.eccentricity(G_reversed, a_1), nx.eccentricity(G, a_2))
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/maxcut.py
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
from networkx.utils.decorators import not_implemented_for, py_random_state
|
| 3 |
+
|
| 4 |
+
__all__ = ["randomized_partitioning", "one_exchange"]
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
@not_implemented_for("directed", "multigraph")
|
| 8 |
+
@py_random_state(1)
|
| 9 |
+
@nx._dispatch(edge_attrs="weight")
|
| 10 |
+
def randomized_partitioning(G, seed=None, p=0.5, weight=None):
|
| 11 |
+
"""Compute a random partitioning of the graph nodes and its cut value.
|
| 12 |
+
|
| 13 |
+
A partitioning is calculated by observing each node
|
| 14 |
+
and deciding to add it to the partition with probability `p`,
|
| 15 |
+
returning a random cut and its corresponding value (the
|
| 16 |
+
sum of weights of edges connecting different partitions).
|
| 17 |
+
|
| 18 |
+
Parameters
|
| 19 |
+
----------
|
| 20 |
+
G : NetworkX graph
|
| 21 |
+
|
| 22 |
+
seed : integer, random_state, or None (default)
|
| 23 |
+
Indicator of random number generation state.
|
| 24 |
+
See :ref:`Randomness<randomness>`.
|
| 25 |
+
|
| 26 |
+
p : scalar
|
| 27 |
+
Probability for each node to be part of the first partition.
|
| 28 |
+
Should be in [0,1]
|
| 29 |
+
|
| 30 |
+
weight : object
|
| 31 |
+
Edge attribute key to use as weight. If not specified, edges
|
| 32 |
+
have weight one.
|
| 33 |
+
|
| 34 |
+
Returns
|
| 35 |
+
-------
|
| 36 |
+
cut_size : scalar
|
| 37 |
+
Value of the minimum cut.
|
| 38 |
+
|
| 39 |
+
partition : pair of node sets
|
| 40 |
+
A partitioning of the nodes that defines a minimum cut.
|
| 41 |
+
"""
|
| 42 |
+
cut = {node for node in G.nodes() if seed.random() < p}
|
| 43 |
+
cut_size = nx.algorithms.cut_size(G, cut, weight=weight)
|
| 44 |
+
partition = (cut, G.nodes - cut)
|
| 45 |
+
return cut_size, partition
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def _swap_node_partition(cut, node):
|
| 49 |
+
return cut - {node} if node in cut else cut.union({node})
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
@not_implemented_for("directed", "multigraph")
|
| 53 |
+
@py_random_state(2)
|
| 54 |
+
@nx._dispatch(edge_attrs="weight")
|
| 55 |
+
def one_exchange(G, initial_cut=None, seed=None, weight=None):
|
| 56 |
+
"""Compute a partitioning of the graphs nodes and the corresponding cut value.
|
| 57 |
+
|
| 58 |
+
Use a greedy one exchange strategy to find a locally maximal cut
|
| 59 |
+
and its value, it works by finding the best node (one that gives
|
| 60 |
+
the highest gain to the cut value) to add to the current cut
|
| 61 |
+
and repeats this process until no improvement can be made.
|
| 62 |
+
|
| 63 |
+
Parameters
|
| 64 |
+
----------
|
| 65 |
+
G : networkx Graph
|
| 66 |
+
Graph to find a maximum cut for.
|
| 67 |
+
|
| 68 |
+
initial_cut : set
|
| 69 |
+
Cut to use as a starting point. If not supplied the algorithm
|
| 70 |
+
starts with an empty cut.
|
| 71 |
+
|
| 72 |
+
seed : integer, random_state, or None (default)
|
| 73 |
+
Indicator of random number generation state.
|
| 74 |
+
See :ref:`Randomness<randomness>`.
|
| 75 |
+
|
| 76 |
+
weight : object
|
| 77 |
+
Edge attribute key to use as weight. If not specified, edges
|
| 78 |
+
have weight one.
|
| 79 |
+
|
| 80 |
+
Returns
|
| 81 |
+
-------
|
| 82 |
+
cut_value : scalar
|
| 83 |
+
Value of the maximum cut.
|
| 84 |
+
|
| 85 |
+
partition : pair of node sets
|
| 86 |
+
A partitioning of the nodes that defines a maximum cut.
|
| 87 |
+
"""
|
| 88 |
+
if initial_cut is None:
|
| 89 |
+
initial_cut = set()
|
| 90 |
+
cut = set(initial_cut)
|
| 91 |
+
current_cut_size = nx.algorithms.cut_size(G, cut, weight=weight)
|
| 92 |
+
while True:
|
| 93 |
+
nodes = list(G.nodes())
|
| 94 |
+
# Shuffling the nodes ensures random tie-breaks in the following call to max
|
| 95 |
+
seed.shuffle(nodes)
|
| 96 |
+
best_node_to_swap = max(
|
| 97 |
+
nodes,
|
| 98 |
+
key=lambda v: nx.algorithms.cut_size(
|
| 99 |
+
G, _swap_node_partition(cut, v), weight=weight
|
| 100 |
+
),
|
| 101 |
+
default=None,
|
| 102 |
+
)
|
| 103 |
+
potential_cut = _swap_node_partition(cut, best_node_to_swap)
|
| 104 |
+
potential_cut_size = nx.algorithms.cut_size(G, potential_cut, weight=weight)
|
| 105 |
+
|
| 106 |
+
if potential_cut_size > current_cut_size:
|
| 107 |
+
cut = potential_cut
|
| 108 |
+
current_cut_size = potential_cut_size
|
| 109 |
+
else:
|
| 110 |
+
break
|
| 111 |
+
|
| 112 |
+
partition = (cut, G.nodes - cut)
|
| 113 |
+
return current_cut_size, partition
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/steinertree.py
ADDED
|
@@ -0,0 +1,220 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import chain
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.utils import not_implemented_for, pairwise
|
| 5 |
+
|
| 6 |
+
__all__ = ["metric_closure", "steiner_tree"]
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@not_implemented_for("directed")
|
| 10 |
+
@nx._dispatch(edge_attrs="weight")
|
| 11 |
+
def metric_closure(G, weight="weight"):
|
| 12 |
+
"""Return the metric closure of a graph.
|
| 13 |
+
|
| 14 |
+
The metric closure of a graph *G* is the complete graph in which each edge
|
| 15 |
+
is weighted by the shortest path distance between the nodes in *G* .
|
| 16 |
+
|
| 17 |
+
Parameters
|
| 18 |
+
----------
|
| 19 |
+
G : NetworkX graph
|
| 20 |
+
|
| 21 |
+
Returns
|
| 22 |
+
-------
|
| 23 |
+
NetworkX graph
|
| 24 |
+
Metric closure of the graph `G`.
|
| 25 |
+
|
| 26 |
+
"""
|
| 27 |
+
M = nx.Graph()
|
| 28 |
+
|
| 29 |
+
Gnodes = set(G)
|
| 30 |
+
|
| 31 |
+
# check for connected graph while processing first node
|
| 32 |
+
all_paths_iter = nx.all_pairs_dijkstra(G, weight=weight)
|
| 33 |
+
u, (distance, path) = next(all_paths_iter)
|
| 34 |
+
if Gnodes - set(distance):
|
| 35 |
+
msg = "G is not a connected graph. metric_closure is not defined."
|
| 36 |
+
raise nx.NetworkXError(msg)
|
| 37 |
+
Gnodes.remove(u)
|
| 38 |
+
for v in Gnodes:
|
| 39 |
+
M.add_edge(u, v, distance=distance[v], path=path[v])
|
| 40 |
+
|
| 41 |
+
# first node done -- now process the rest
|
| 42 |
+
for u, (distance, path) in all_paths_iter:
|
| 43 |
+
Gnodes.remove(u)
|
| 44 |
+
for v in Gnodes:
|
| 45 |
+
M.add_edge(u, v, distance=distance[v], path=path[v])
|
| 46 |
+
|
| 47 |
+
return M
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def _mehlhorn_steiner_tree(G, terminal_nodes, weight):
|
| 51 |
+
paths = nx.multi_source_dijkstra_path(G, terminal_nodes)
|
| 52 |
+
|
| 53 |
+
d_1 = {}
|
| 54 |
+
s = {}
|
| 55 |
+
for v in G.nodes():
|
| 56 |
+
s[v] = paths[v][0]
|
| 57 |
+
d_1[(v, s[v])] = len(paths[v]) - 1
|
| 58 |
+
|
| 59 |
+
# G1-G4 names match those from the Mehlhorn 1988 paper.
|
| 60 |
+
G_1_prime = nx.Graph()
|
| 61 |
+
for u, v, data in G.edges(data=True):
|
| 62 |
+
su, sv = s[u], s[v]
|
| 63 |
+
weight_here = d_1[(u, su)] + data.get(weight, 1) + d_1[(v, sv)]
|
| 64 |
+
if not G_1_prime.has_edge(su, sv):
|
| 65 |
+
G_1_prime.add_edge(su, sv, weight=weight_here)
|
| 66 |
+
else:
|
| 67 |
+
new_weight = min(weight_here, G_1_prime[su][sv][weight])
|
| 68 |
+
G_1_prime.add_edge(su, sv, weight=new_weight)
|
| 69 |
+
|
| 70 |
+
G_2 = nx.minimum_spanning_edges(G_1_prime, data=True)
|
| 71 |
+
|
| 72 |
+
G_3 = nx.Graph()
|
| 73 |
+
for u, v, d in G_2:
|
| 74 |
+
path = nx.shortest_path(G, u, v, weight)
|
| 75 |
+
for n1, n2 in pairwise(path):
|
| 76 |
+
G_3.add_edge(n1, n2)
|
| 77 |
+
|
| 78 |
+
G_3_mst = list(nx.minimum_spanning_edges(G_3, data=False))
|
| 79 |
+
if G.is_multigraph():
|
| 80 |
+
G_3_mst = (
|
| 81 |
+
(u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in G_3_mst
|
| 82 |
+
)
|
| 83 |
+
G_4 = G.edge_subgraph(G_3_mst).copy()
|
| 84 |
+
_remove_nonterminal_leaves(G_4, terminal_nodes)
|
| 85 |
+
return G_4.edges()
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def _kou_steiner_tree(G, terminal_nodes, weight):
|
| 89 |
+
# H is the subgraph induced by terminal_nodes in the metric closure M of G.
|
| 90 |
+
M = metric_closure(G, weight=weight)
|
| 91 |
+
H = M.subgraph(terminal_nodes)
|
| 92 |
+
|
| 93 |
+
# Use the 'distance' attribute of each edge provided by M.
|
| 94 |
+
mst_edges = nx.minimum_spanning_edges(H, weight="distance", data=True)
|
| 95 |
+
|
| 96 |
+
# Create an iterator over each edge in each shortest path; repeats are okay
|
| 97 |
+
mst_all_edges = chain.from_iterable(pairwise(d["path"]) for u, v, d in mst_edges)
|
| 98 |
+
if G.is_multigraph():
|
| 99 |
+
mst_all_edges = (
|
| 100 |
+
(u, v, min(G[u][v], key=lambda k: G[u][v][k][weight]))
|
| 101 |
+
for u, v in mst_all_edges
|
| 102 |
+
)
|
| 103 |
+
|
| 104 |
+
# Find the MST again, over this new set of edges
|
| 105 |
+
G_S = G.edge_subgraph(mst_all_edges)
|
| 106 |
+
T_S = nx.minimum_spanning_edges(G_S, weight="weight", data=False)
|
| 107 |
+
|
| 108 |
+
# Leaf nodes that are not terminal might still remain; remove them here
|
| 109 |
+
T_H = G.edge_subgraph(T_S).copy()
|
| 110 |
+
_remove_nonterminal_leaves(T_H, terminal_nodes)
|
| 111 |
+
|
| 112 |
+
return T_H.edges()
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def _remove_nonterminal_leaves(G, terminals):
|
| 116 |
+
terminals_set = set(terminals)
|
| 117 |
+
for n in list(G.nodes):
|
| 118 |
+
if n not in terminals_set and G.degree(n) == 1:
|
| 119 |
+
G.remove_node(n)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
ALGORITHMS = {
|
| 123 |
+
"kou": _kou_steiner_tree,
|
| 124 |
+
"mehlhorn": _mehlhorn_steiner_tree,
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
@not_implemented_for("directed")
|
| 129 |
+
@nx._dispatch(edge_attrs="weight")
|
| 130 |
+
def steiner_tree(G, terminal_nodes, weight="weight", method=None):
|
| 131 |
+
r"""Return an approximation to the minimum Steiner tree of a graph.
|
| 132 |
+
|
| 133 |
+
The minimum Steiner tree of `G` w.r.t a set of `terminal_nodes` (also *S*)
|
| 134 |
+
is a tree within `G` that spans those nodes and has minimum size (sum of
|
| 135 |
+
edge weights) among all such trees.
|
| 136 |
+
|
| 137 |
+
The approximation algorithm is specified with the `method` keyword
|
| 138 |
+
argument. All three available algorithms produce a tree whose weight is
|
| 139 |
+
within a ``(2 - (2 / l))`` factor of the weight of the optimal Steiner tree,
|
| 140 |
+
where ``l`` is the minimum number of leaf nodes across all possible Steiner
|
| 141 |
+
trees.
|
| 142 |
+
|
| 143 |
+
* ``"kou"`` [2]_ (runtime $O(|S| |V|^2)$) computes the minimum spanning tree of
|
| 144 |
+
the subgraph of the metric closure of *G* induced by the terminal nodes,
|
| 145 |
+
where the metric closure of *G* is the complete graph in which each edge is
|
| 146 |
+
weighted by the shortest path distance between the nodes in *G*.
|
| 147 |
+
|
| 148 |
+
* ``"mehlhorn"`` [3]_ (runtime $O(|E|+|V|\log|V|)$) modifies Kou et al.'s
|
| 149 |
+
algorithm, beginning by finding the closest terminal node for each
|
| 150 |
+
non-terminal. This data is used to create a complete graph containing only
|
| 151 |
+
the terminal nodes, in which edge is weighted with the shortest path
|
| 152 |
+
distance between them. The algorithm then proceeds in the same way as Kou
|
| 153 |
+
et al..
|
| 154 |
+
|
| 155 |
+
Parameters
|
| 156 |
+
----------
|
| 157 |
+
G : NetworkX graph
|
| 158 |
+
|
| 159 |
+
terminal_nodes : list
|
| 160 |
+
A list of terminal nodes for which minimum steiner tree is
|
| 161 |
+
to be found.
|
| 162 |
+
|
| 163 |
+
weight : string (default = 'weight')
|
| 164 |
+
Use the edge attribute specified by this string as the edge weight.
|
| 165 |
+
Any edge attribute not present defaults to 1.
|
| 166 |
+
|
| 167 |
+
method : string, optional (default = 'kou')
|
| 168 |
+
The algorithm to use to approximate the Steiner tree.
|
| 169 |
+
Supported options: 'kou', 'mehlhorn'.
|
| 170 |
+
Other inputs produce a ValueError.
|
| 171 |
+
|
| 172 |
+
Returns
|
| 173 |
+
-------
|
| 174 |
+
NetworkX graph
|
| 175 |
+
Approximation to the minimum steiner tree of `G` induced by
|
| 176 |
+
`terminal_nodes` .
|
| 177 |
+
|
| 178 |
+
Notes
|
| 179 |
+
-----
|
| 180 |
+
For multigraphs, the edge between two nodes with minimum weight is the
|
| 181 |
+
edge put into the Steiner tree.
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
References
|
| 185 |
+
----------
|
| 186 |
+
.. [1] Steiner_tree_problem on Wikipedia.
|
| 187 |
+
https://en.wikipedia.org/wiki/Steiner_tree_problem
|
| 188 |
+
.. [2] Kou, L., G. Markowsky, and L. Berman. 1981.
|
| 189 |
+
‘A Fast Algorithm for Steiner Trees’.
|
| 190 |
+
Acta Informatica 15 (2): 141–45.
|
| 191 |
+
https://doi.org/10.1007/BF00288961.
|
| 192 |
+
.. [3] Mehlhorn, Kurt. 1988.
|
| 193 |
+
‘A Faster Approximation Algorithm for the Steiner Problem in Graphs’.
|
| 194 |
+
Information Processing Letters 27 (3): 125–28.
|
| 195 |
+
https://doi.org/10.1016/0020-0190(88)90066-X.
|
| 196 |
+
"""
|
| 197 |
+
if method is None:
|
| 198 |
+
import warnings
|
| 199 |
+
|
| 200 |
+
msg = (
|
| 201 |
+
"steiner_tree will change default method from 'kou' to 'mehlhorn' "
|
| 202 |
+
"in version 3.2.\nSet the `method` kwarg to remove this warning."
|
| 203 |
+
)
|
| 204 |
+
warnings.warn(msg, FutureWarning, stacklevel=4)
|
| 205 |
+
method = "kou"
|
| 206 |
+
|
| 207 |
+
try:
|
| 208 |
+
algo = ALGORITHMS[method]
|
| 209 |
+
except KeyError as e:
|
| 210 |
+
msg = f"{method} is not a valid choice for an algorithm."
|
| 211 |
+
raise ValueError(msg) from e
|
| 212 |
+
|
| 213 |
+
edges = algo(G, terminal_nodes, weight)
|
| 214 |
+
# For multigraph we should add the minimal weight edge keys
|
| 215 |
+
if G.is_multigraph():
|
| 216 |
+
edges = (
|
| 217 |
+
(u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in edges
|
| 218 |
+
)
|
| 219 |
+
T = G.edge_subgraph(edges)
|
| 220 |
+
return T
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__init__.py
ADDED
|
File without changes
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_connectivity.cpython-311.pyc
ADDED
|
Binary file (11.5 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_steinertree.cpython-311.pyc
ADDED
|
Binary file (10.2 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_vertex_cover.cpython-311.pyc
ADDED
|
Binary file (5.22 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_connectivity.py
ADDED
|
@@ -0,0 +1,199 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.algorithms import approximation as approx
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def test_global_node_connectivity():
|
| 8 |
+
# Figure 1 chapter on Connectivity
|
| 9 |
+
G = nx.Graph()
|
| 10 |
+
G.add_edges_from(
|
| 11 |
+
[
|
| 12 |
+
(1, 2),
|
| 13 |
+
(1, 3),
|
| 14 |
+
(1, 4),
|
| 15 |
+
(1, 5),
|
| 16 |
+
(2, 3),
|
| 17 |
+
(2, 6),
|
| 18 |
+
(3, 4),
|
| 19 |
+
(3, 6),
|
| 20 |
+
(4, 6),
|
| 21 |
+
(4, 7),
|
| 22 |
+
(5, 7),
|
| 23 |
+
(6, 8),
|
| 24 |
+
(6, 9),
|
| 25 |
+
(7, 8),
|
| 26 |
+
(7, 10),
|
| 27 |
+
(8, 11),
|
| 28 |
+
(9, 10),
|
| 29 |
+
(9, 11),
|
| 30 |
+
(10, 11),
|
| 31 |
+
]
|
| 32 |
+
)
|
| 33 |
+
assert 2 == approx.local_node_connectivity(G, 1, 11)
|
| 34 |
+
assert 2 == approx.node_connectivity(G)
|
| 35 |
+
assert 2 == approx.node_connectivity(G, 1, 11)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def test_white_harary1():
|
| 39 |
+
# Figure 1b white and harary (2001)
|
| 40 |
+
# A graph with high adhesion (edge connectivity) and low cohesion
|
| 41 |
+
# (node connectivity)
|
| 42 |
+
G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4))
|
| 43 |
+
G.remove_node(7)
|
| 44 |
+
for i in range(4, 7):
|
| 45 |
+
G.add_edge(0, i)
|
| 46 |
+
G = nx.disjoint_union(G, nx.complete_graph(4))
|
| 47 |
+
G.remove_node(G.order() - 1)
|
| 48 |
+
for i in range(7, 10):
|
| 49 |
+
G.add_edge(0, i)
|
| 50 |
+
assert 1 == approx.node_connectivity(G)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def test_complete_graphs():
|
| 54 |
+
for n in range(5, 25, 5):
|
| 55 |
+
G = nx.complete_graph(n)
|
| 56 |
+
assert n - 1 == approx.node_connectivity(G)
|
| 57 |
+
assert n - 1 == approx.node_connectivity(G, 0, 3)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def test_empty_graphs():
|
| 61 |
+
for k in range(5, 25, 5):
|
| 62 |
+
G = nx.empty_graph(k)
|
| 63 |
+
assert 0 == approx.node_connectivity(G)
|
| 64 |
+
assert 0 == approx.node_connectivity(G, 0, 3)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def test_petersen():
|
| 68 |
+
G = nx.petersen_graph()
|
| 69 |
+
assert 3 == approx.node_connectivity(G)
|
| 70 |
+
assert 3 == approx.node_connectivity(G, 0, 5)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
# Approximation fails with tutte graph
|
| 74 |
+
# def test_tutte():
|
| 75 |
+
# G = nx.tutte_graph()
|
| 76 |
+
# assert_equal(3, approx.node_connectivity(G))
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def test_dodecahedral():
|
| 80 |
+
G = nx.dodecahedral_graph()
|
| 81 |
+
assert 3 == approx.node_connectivity(G)
|
| 82 |
+
assert 3 == approx.node_connectivity(G, 0, 5)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def test_octahedral():
|
| 86 |
+
G = nx.octahedral_graph()
|
| 87 |
+
assert 4 == approx.node_connectivity(G)
|
| 88 |
+
assert 4 == approx.node_connectivity(G, 0, 5)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
# Approximation can fail with icosahedral graph depending
|
| 92 |
+
# on iteration order.
|
| 93 |
+
# def test_icosahedral():
|
| 94 |
+
# G=nx.icosahedral_graph()
|
| 95 |
+
# assert_equal(5, approx.node_connectivity(G))
|
| 96 |
+
# assert_equal(5, approx.node_connectivity(G, 0, 5))
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def test_only_source():
|
| 100 |
+
G = nx.complete_graph(5)
|
| 101 |
+
pytest.raises(nx.NetworkXError, approx.node_connectivity, G, s=0)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def test_only_target():
|
| 105 |
+
G = nx.complete_graph(5)
|
| 106 |
+
pytest.raises(nx.NetworkXError, approx.node_connectivity, G, t=0)
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def test_missing_source():
|
| 110 |
+
G = nx.path_graph(4)
|
| 111 |
+
pytest.raises(nx.NetworkXError, approx.node_connectivity, G, 10, 1)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def test_missing_target():
|
| 115 |
+
G = nx.path_graph(4)
|
| 116 |
+
pytest.raises(nx.NetworkXError, approx.node_connectivity, G, 1, 10)
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def test_source_equals_target():
|
| 120 |
+
G = nx.complete_graph(5)
|
| 121 |
+
pytest.raises(nx.NetworkXError, approx.local_node_connectivity, G, 0, 0)
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def test_directed_node_connectivity():
|
| 125 |
+
G = nx.cycle_graph(10, create_using=nx.DiGraph()) # only one direction
|
| 126 |
+
D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges
|
| 127 |
+
assert 1 == approx.node_connectivity(G)
|
| 128 |
+
assert 1 == approx.node_connectivity(G, 1, 4)
|
| 129 |
+
assert 2 == approx.node_connectivity(D)
|
| 130 |
+
assert 2 == approx.node_connectivity(D, 1, 4)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
class TestAllPairsNodeConnectivityApprox:
|
| 134 |
+
@classmethod
|
| 135 |
+
def setup_class(cls):
|
| 136 |
+
cls.path = nx.path_graph(7)
|
| 137 |
+
cls.directed_path = nx.path_graph(7, create_using=nx.DiGraph())
|
| 138 |
+
cls.cycle = nx.cycle_graph(7)
|
| 139 |
+
cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph())
|
| 140 |
+
cls.gnp = nx.gnp_random_graph(30, 0.1)
|
| 141 |
+
cls.directed_gnp = nx.gnp_random_graph(30, 0.1, directed=True)
|
| 142 |
+
cls.K20 = nx.complete_graph(20)
|
| 143 |
+
cls.K10 = nx.complete_graph(10)
|
| 144 |
+
cls.K5 = nx.complete_graph(5)
|
| 145 |
+
cls.G_list = [
|
| 146 |
+
cls.path,
|
| 147 |
+
cls.directed_path,
|
| 148 |
+
cls.cycle,
|
| 149 |
+
cls.directed_cycle,
|
| 150 |
+
cls.gnp,
|
| 151 |
+
cls.directed_gnp,
|
| 152 |
+
cls.K10,
|
| 153 |
+
cls.K5,
|
| 154 |
+
cls.K20,
|
| 155 |
+
]
|
| 156 |
+
|
| 157 |
+
def test_cycles(self):
|
| 158 |
+
K_undir = approx.all_pairs_node_connectivity(self.cycle)
|
| 159 |
+
for source in K_undir:
|
| 160 |
+
for target, k in K_undir[source].items():
|
| 161 |
+
assert k == 2
|
| 162 |
+
K_dir = approx.all_pairs_node_connectivity(self.directed_cycle)
|
| 163 |
+
for source in K_dir:
|
| 164 |
+
for target, k in K_dir[source].items():
|
| 165 |
+
assert k == 1
|
| 166 |
+
|
| 167 |
+
def test_complete(self):
|
| 168 |
+
for G in [self.K10, self.K5, self.K20]:
|
| 169 |
+
K = approx.all_pairs_node_connectivity(G)
|
| 170 |
+
for source in K:
|
| 171 |
+
for target, k in K[source].items():
|
| 172 |
+
assert k == len(G) - 1
|
| 173 |
+
|
| 174 |
+
def test_paths(self):
|
| 175 |
+
K_undir = approx.all_pairs_node_connectivity(self.path)
|
| 176 |
+
for source in K_undir:
|
| 177 |
+
for target, k in K_undir[source].items():
|
| 178 |
+
assert k == 1
|
| 179 |
+
K_dir = approx.all_pairs_node_connectivity(self.directed_path)
|
| 180 |
+
for source in K_dir:
|
| 181 |
+
for target, k in K_dir[source].items():
|
| 182 |
+
if source < target:
|
| 183 |
+
assert k == 1
|
| 184 |
+
else:
|
| 185 |
+
assert k == 0
|
| 186 |
+
|
| 187 |
+
def test_cutoff(self):
|
| 188 |
+
for G in [self.K10, self.K5, self.K20]:
|
| 189 |
+
for mp in [2, 3, 4]:
|
| 190 |
+
paths = approx.all_pairs_node_connectivity(G, cutoff=mp)
|
| 191 |
+
for source in paths:
|
| 192 |
+
for target, K in paths[source].items():
|
| 193 |
+
assert K == mp
|
| 194 |
+
|
| 195 |
+
def test_all_pairs_connectivity_nbunch(self):
|
| 196 |
+
G = nx.complete_graph(5)
|
| 197 |
+
nbunch = [0, 2, 3]
|
| 198 |
+
C = approx.all_pairs_node_connectivity(G, nbunch=nbunch)
|
| 199 |
+
assert len(C) == len(nbunch)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_dominating_set.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.algorithms.approximation import (
|
| 5 |
+
min_edge_dominating_set,
|
| 6 |
+
min_weighted_dominating_set,
|
| 7 |
+
)
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TestMinWeightDominatingSet:
|
| 11 |
+
def test_min_weighted_dominating_set(self):
|
| 12 |
+
graph = nx.Graph()
|
| 13 |
+
graph.add_edge(1, 2)
|
| 14 |
+
graph.add_edge(1, 5)
|
| 15 |
+
graph.add_edge(2, 3)
|
| 16 |
+
graph.add_edge(2, 5)
|
| 17 |
+
graph.add_edge(3, 4)
|
| 18 |
+
graph.add_edge(3, 6)
|
| 19 |
+
graph.add_edge(5, 6)
|
| 20 |
+
|
| 21 |
+
vertices = {1, 2, 3, 4, 5, 6}
|
| 22 |
+
# due to ties, this might be hard to test tight bounds
|
| 23 |
+
dom_set = min_weighted_dominating_set(graph)
|
| 24 |
+
for vertex in vertices - dom_set:
|
| 25 |
+
neighbors = set(graph.neighbors(vertex))
|
| 26 |
+
assert len(neighbors & dom_set) > 0, "Non dominating set found!"
|
| 27 |
+
|
| 28 |
+
def test_star_graph(self):
|
| 29 |
+
"""Tests that an approximate dominating set for the star graph,
|
| 30 |
+
even when the center node does not have the smallest integer
|
| 31 |
+
label, gives just the center node.
|
| 32 |
+
|
| 33 |
+
For more information, see #1527.
|
| 34 |
+
|
| 35 |
+
"""
|
| 36 |
+
# Create a star graph in which the center node has the highest
|
| 37 |
+
# label instead of the lowest.
|
| 38 |
+
G = nx.star_graph(10)
|
| 39 |
+
G = nx.relabel_nodes(G, {0: 9, 9: 0})
|
| 40 |
+
assert min_weighted_dominating_set(G) == {9}
|
| 41 |
+
|
| 42 |
+
def test_null_graph(self):
|
| 43 |
+
"""Tests that the unique dominating set for the null graph is an empty set"""
|
| 44 |
+
G = nx.Graph()
|
| 45 |
+
assert min_weighted_dominating_set(G) == set()
|
| 46 |
+
|
| 47 |
+
def test_min_edge_dominating_set(self):
|
| 48 |
+
graph = nx.path_graph(5)
|
| 49 |
+
dom_set = min_edge_dominating_set(graph)
|
| 50 |
+
|
| 51 |
+
# this is a crappy way to test, but good enough for now.
|
| 52 |
+
for edge in graph.edges():
|
| 53 |
+
if edge in dom_set:
|
| 54 |
+
continue
|
| 55 |
+
else:
|
| 56 |
+
u, v = edge
|
| 57 |
+
found = False
|
| 58 |
+
for dom_edge in dom_set:
|
| 59 |
+
found |= u == dom_edge[0] or u == dom_edge[1]
|
| 60 |
+
assert found, "Non adjacent edge found!"
|
| 61 |
+
|
| 62 |
+
graph = nx.complete_graph(10)
|
| 63 |
+
dom_set = min_edge_dominating_set(graph)
|
| 64 |
+
|
| 65 |
+
# this is a crappy way to test, but good enough for now.
|
| 66 |
+
for edge in graph.edges():
|
| 67 |
+
if edge in dom_set:
|
| 68 |
+
continue
|
| 69 |
+
else:
|
| 70 |
+
u, v = edge
|
| 71 |
+
found = False
|
| 72 |
+
for dom_edge in dom_set:
|
| 73 |
+
found |= u == dom_edge[0] or u == dom_edge[1]
|
| 74 |
+
assert found, "Non adjacent edge found!"
|
| 75 |
+
|
| 76 |
+
graph = nx.Graph() # empty Networkx graph
|
| 77 |
+
with pytest.raises(ValueError, match="Expected non-empty NetworkX graph!"):
|
| 78 |
+
min_edge_dominating_set(graph)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/treewidth.py
ADDED
|
@@ -0,0 +1,252 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for computing treewidth decomposition.
|
| 2 |
+
|
| 3 |
+
Treewidth of an undirected graph is a number associated with the graph.
|
| 4 |
+
It can be defined as the size of the largest vertex set (bag) in a tree
|
| 5 |
+
decomposition of the graph minus one.
|
| 6 |
+
|
| 7 |
+
`Wikipedia: Treewidth <https://en.wikipedia.org/wiki/Treewidth>`_
|
| 8 |
+
|
| 9 |
+
The notions of treewidth and tree decomposition have gained their
|
| 10 |
+
attractiveness partly because many graph and network problems that are
|
| 11 |
+
intractable (e.g., NP-hard) on arbitrary graphs become efficiently
|
| 12 |
+
solvable (e.g., with a linear time algorithm) when the treewidth of the
|
| 13 |
+
input graphs is bounded by a constant [1]_ [2]_.
|
| 14 |
+
|
| 15 |
+
There are two different functions for computing a tree decomposition:
|
| 16 |
+
:func:`treewidth_min_degree` and :func:`treewidth_min_fill_in`.
|
| 17 |
+
|
| 18 |
+
.. [1] Hans L. Bodlaender and Arie M. C. A. Koster. 2010. "Treewidth
|
| 19 |
+
computations I.Upper bounds". Inf. Comput. 208, 3 (March 2010),259-275.
|
| 20 |
+
http://dx.doi.org/10.1016/j.ic.2009.03.008
|
| 21 |
+
|
| 22 |
+
.. [2] Hans L. Bodlaender. "Discovering Treewidth". Institute of Information
|
| 23 |
+
and Computing Sciences, Utrecht University.
|
| 24 |
+
Technical Report UU-CS-2005-018.
|
| 25 |
+
http://www.cs.uu.nl
|
| 26 |
+
|
| 27 |
+
.. [3] K. Wang, Z. Lu, and J. Hicks *Treewidth*.
|
| 28 |
+
https://web.archive.org/web/20210507025929/http://web.eecs.utk.edu/~cphill25/cs594_spring2015_projects/treewidth.pdf
|
| 29 |
+
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
import itertools
|
| 33 |
+
import sys
|
| 34 |
+
from heapq import heapify, heappop, heappush
|
| 35 |
+
|
| 36 |
+
import networkx as nx
|
| 37 |
+
from networkx.utils import not_implemented_for
|
| 38 |
+
|
| 39 |
+
__all__ = ["treewidth_min_degree", "treewidth_min_fill_in"]
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
@not_implemented_for("directed")
|
| 43 |
+
@not_implemented_for("multigraph")
|
| 44 |
+
@nx._dispatch
|
| 45 |
+
def treewidth_min_degree(G):
|
| 46 |
+
"""Returns a treewidth decomposition using the Minimum Degree heuristic.
|
| 47 |
+
|
| 48 |
+
The heuristic chooses the nodes according to their degree, i.e., first
|
| 49 |
+
the node with the lowest degree is chosen, then the graph is updated
|
| 50 |
+
and the corresponding node is removed. Next, a new node with the lowest
|
| 51 |
+
degree is chosen, and so on.
|
| 52 |
+
|
| 53 |
+
Parameters
|
| 54 |
+
----------
|
| 55 |
+
G : NetworkX graph
|
| 56 |
+
|
| 57 |
+
Returns
|
| 58 |
+
-------
|
| 59 |
+
Treewidth decomposition : (int, Graph) tuple
|
| 60 |
+
2-tuple with treewidth and the corresponding decomposed tree.
|
| 61 |
+
"""
|
| 62 |
+
deg_heuristic = MinDegreeHeuristic(G)
|
| 63 |
+
return treewidth_decomp(G, lambda graph: deg_heuristic.best_node(graph))
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
@not_implemented_for("directed")
|
| 67 |
+
@not_implemented_for("multigraph")
|
| 68 |
+
@nx._dispatch
|
| 69 |
+
def treewidth_min_fill_in(G):
|
| 70 |
+
"""Returns a treewidth decomposition using the Minimum Fill-in heuristic.
|
| 71 |
+
|
| 72 |
+
The heuristic chooses a node from the graph, where the number of edges
|
| 73 |
+
added turning the neighbourhood of the chosen node into clique is as
|
| 74 |
+
small as possible.
|
| 75 |
+
|
| 76 |
+
Parameters
|
| 77 |
+
----------
|
| 78 |
+
G : NetworkX graph
|
| 79 |
+
|
| 80 |
+
Returns
|
| 81 |
+
-------
|
| 82 |
+
Treewidth decomposition : (int, Graph) tuple
|
| 83 |
+
2-tuple with treewidth and the corresponding decomposed tree.
|
| 84 |
+
"""
|
| 85 |
+
return treewidth_decomp(G, min_fill_in_heuristic)
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class MinDegreeHeuristic:
|
| 89 |
+
"""Implements the Minimum Degree heuristic.
|
| 90 |
+
|
| 91 |
+
The heuristic chooses the nodes according to their degree
|
| 92 |
+
(number of neighbours), i.e., first the node with the lowest degree is
|
| 93 |
+
chosen, then the graph is updated and the corresponding node is
|
| 94 |
+
removed. Next, a new node with the lowest degree is chosen, and so on.
|
| 95 |
+
"""
|
| 96 |
+
|
| 97 |
+
def __init__(self, graph):
|
| 98 |
+
self._graph = graph
|
| 99 |
+
|
| 100 |
+
# nodes that have to be updated in the heap before each iteration
|
| 101 |
+
self._update_nodes = []
|
| 102 |
+
|
| 103 |
+
self._degreeq = [] # a heapq with 3-tuples (degree,unique_id,node)
|
| 104 |
+
self.count = itertools.count()
|
| 105 |
+
|
| 106 |
+
# build heap with initial degrees
|
| 107 |
+
for n in graph:
|
| 108 |
+
self._degreeq.append((len(graph[n]), next(self.count), n))
|
| 109 |
+
heapify(self._degreeq)
|
| 110 |
+
|
| 111 |
+
def best_node(self, graph):
|
| 112 |
+
# update nodes in self._update_nodes
|
| 113 |
+
for n in self._update_nodes:
|
| 114 |
+
# insert changed degrees into degreeq
|
| 115 |
+
heappush(self._degreeq, (len(graph[n]), next(self.count), n))
|
| 116 |
+
|
| 117 |
+
# get the next valid (minimum degree) node
|
| 118 |
+
while self._degreeq:
|
| 119 |
+
(min_degree, _, elim_node) = heappop(self._degreeq)
|
| 120 |
+
if elim_node not in graph or len(graph[elim_node]) != min_degree:
|
| 121 |
+
# outdated entry in degreeq
|
| 122 |
+
continue
|
| 123 |
+
elif min_degree == len(graph) - 1:
|
| 124 |
+
# fully connected: abort condition
|
| 125 |
+
return None
|
| 126 |
+
|
| 127 |
+
# remember to update nodes in the heap before getting the next node
|
| 128 |
+
self._update_nodes = graph[elim_node]
|
| 129 |
+
return elim_node
|
| 130 |
+
|
| 131 |
+
# the heap is empty: abort
|
| 132 |
+
return None
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def min_fill_in_heuristic(graph):
|
| 136 |
+
"""Implements the Minimum Degree heuristic.
|
| 137 |
+
|
| 138 |
+
Returns the node from the graph, where the number of edges added when
|
| 139 |
+
turning the neighbourhood of the chosen node into clique is as small as
|
| 140 |
+
possible. This algorithm chooses the nodes using the Minimum Fill-In
|
| 141 |
+
heuristic. The running time of the algorithm is :math:`O(V^3)` and it uses
|
| 142 |
+
additional constant memory."""
|
| 143 |
+
|
| 144 |
+
if len(graph) == 0:
|
| 145 |
+
return None
|
| 146 |
+
|
| 147 |
+
min_fill_in_node = None
|
| 148 |
+
|
| 149 |
+
min_fill_in = sys.maxsize
|
| 150 |
+
|
| 151 |
+
# sort nodes by degree
|
| 152 |
+
nodes_by_degree = sorted(graph, key=lambda x: len(graph[x]))
|
| 153 |
+
min_degree = len(graph[nodes_by_degree[0]])
|
| 154 |
+
|
| 155 |
+
# abort condition (handle complete graph)
|
| 156 |
+
if min_degree == len(graph) - 1:
|
| 157 |
+
return None
|
| 158 |
+
|
| 159 |
+
for node in nodes_by_degree:
|
| 160 |
+
num_fill_in = 0
|
| 161 |
+
nbrs = graph[node]
|
| 162 |
+
for nbr in nbrs:
|
| 163 |
+
# count how many nodes in nbrs current nbr is not connected to
|
| 164 |
+
# subtract 1 for the node itself
|
| 165 |
+
num_fill_in += len(nbrs - graph[nbr]) - 1
|
| 166 |
+
if num_fill_in >= 2 * min_fill_in:
|
| 167 |
+
break
|
| 168 |
+
|
| 169 |
+
num_fill_in /= 2 # divide by 2 because of double counting
|
| 170 |
+
|
| 171 |
+
if num_fill_in < min_fill_in: # update min-fill-in node
|
| 172 |
+
if num_fill_in == 0:
|
| 173 |
+
return node
|
| 174 |
+
min_fill_in = num_fill_in
|
| 175 |
+
min_fill_in_node = node
|
| 176 |
+
|
| 177 |
+
return min_fill_in_node
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
@nx._dispatch
|
| 181 |
+
def treewidth_decomp(G, heuristic=min_fill_in_heuristic):
|
| 182 |
+
"""Returns a treewidth decomposition using the passed heuristic.
|
| 183 |
+
|
| 184 |
+
Parameters
|
| 185 |
+
----------
|
| 186 |
+
G : NetworkX graph
|
| 187 |
+
heuristic : heuristic function
|
| 188 |
+
|
| 189 |
+
Returns
|
| 190 |
+
-------
|
| 191 |
+
Treewidth decomposition : (int, Graph) tuple
|
| 192 |
+
2-tuple with treewidth and the corresponding decomposed tree.
|
| 193 |
+
"""
|
| 194 |
+
|
| 195 |
+
# make dict-of-sets structure
|
| 196 |
+
graph = {n: set(G[n]) - {n} for n in G}
|
| 197 |
+
|
| 198 |
+
# stack containing nodes and neighbors in the order from the heuristic
|
| 199 |
+
node_stack = []
|
| 200 |
+
|
| 201 |
+
# get first node from heuristic
|
| 202 |
+
elim_node = heuristic(graph)
|
| 203 |
+
while elim_node is not None:
|
| 204 |
+
# connect all neighbours with each other
|
| 205 |
+
nbrs = graph[elim_node]
|
| 206 |
+
for u, v in itertools.permutations(nbrs, 2):
|
| 207 |
+
if v not in graph[u]:
|
| 208 |
+
graph[u].add(v)
|
| 209 |
+
|
| 210 |
+
# push node and its current neighbors on stack
|
| 211 |
+
node_stack.append((elim_node, nbrs))
|
| 212 |
+
|
| 213 |
+
# remove node from graph
|
| 214 |
+
for u in graph[elim_node]:
|
| 215 |
+
graph[u].remove(elim_node)
|
| 216 |
+
|
| 217 |
+
del graph[elim_node]
|
| 218 |
+
elim_node = heuristic(graph)
|
| 219 |
+
|
| 220 |
+
# the abort condition is met; put all remaining nodes into one bag
|
| 221 |
+
decomp = nx.Graph()
|
| 222 |
+
first_bag = frozenset(graph.keys())
|
| 223 |
+
decomp.add_node(first_bag)
|
| 224 |
+
|
| 225 |
+
treewidth = len(first_bag) - 1
|
| 226 |
+
|
| 227 |
+
while node_stack:
|
| 228 |
+
# get node and its neighbors from the stack
|
| 229 |
+
(curr_node, nbrs) = node_stack.pop()
|
| 230 |
+
|
| 231 |
+
# find a bag all neighbors are in
|
| 232 |
+
old_bag = None
|
| 233 |
+
for bag in decomp.nodes:
|
| 234 |
+
if nbrs <= bag:
|
| 235 |
+
old_bag = bag
|
| 236 |
+
break
|
| 237 |
+
|
| 238 |
+
if old_bag is None:
|
| 239 |
+
# no old_bag was found: just connect to the first_bag
|
| 240 |
+
old_bag = first_bag
|
| 241 |
+
|
| 242 |
+
# create new node for decomposition
|
| 243 |
+
nbrs.add(curr_node)
|
| 244 |
+
new_bag = frozenset(nbrs)
|
| 245 |
+
|
| 246 |
+
# update treewidth
|
| 247 |
+
treewidth = max(treewidth, len(new_bag) - 1)
|
| 248 |
+
|
| 249 |
+
# add edge to decomposition (implicitly also adds the new node)
|
| 250 |
+
decomp.add_edge(old_bag, new_bag)
|
| 251 |
+
|
| 252 |
+
return treewidth, decomp
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-311.pyc
ADDED
|
Binary file (11.3 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-311.pyc
ADDED
|
Binary file (9.66 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-311.pyc
ADDED
|
Binary file (13.9 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-311.pyc
ADDED
|
Binary file (8.49 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-311.pyc
ADDED
|
Binary file (23.5 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/basic.py
ADDED
|
@@ -0,0 +1,321 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
==========================
|
| 3 |
+
Bipartite Graph Algorithms
|
| 4 |
+
==========================
|
| 5 |
+
"""
|
| 6 |
+
import networkx as nx
|
| 7 |
+
from networkx.algorithms.components import connected_components
|
| 8 |
+
from networkx.exception import AmbiguousSolution
|
| 9 |
+
|
| 10 |
+
__all__ = [
|
| 11 |
+
"is_bipartite",
|
| 12 |
+
"is_bipartite_node_set",
|
| 13 |
+
"color",
|
| 14 |
+
"sets",
|
| 15 |
+
"density",
|
| 16 |
+
"degrees",
|
| 17 |
+
]
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@nx._dispatch
|
| 21 |
+
def color(G):
|
| 22 |
+
"""Returns a two-coloring of the graph.
|
| 23 |
+
|
| 24 |
+
Raises an exception if the graph is not bipartite.
|
| 25 |
+
|
| 26 |
+
Parameters
|
| 27 |
+
----------
|
| 28 |
+
G : NetworkX graph
|
| 29 |
+
|
| 30 |
+
Returns
|
| 31 |
+
-------
|
| 32 |
+
color : dictionary
|
| 33 |
+
A dictionary keyed by node with a 1 or 0 as data for each node color.
|
| 34 |
+
|
| 35 |
+
Raises
|
| 36 |
+
------
|
| 37 |
+
NetworkXError
|
| 38 |
+
If the graph is not two-colorable.
|
| 39 |
+
|
| 40 |
+
Examples
|
| 41 |
+
--------
|
| 42 |
+
>>> from networkx.algorithms import bipartite
|
| 43 |
+
>>> G = nx.path_graph(4)
|
| 44 |
+
>>> c = bipartite.color(G)
|
| 45 |
+
>>> print(c)
|
| 46 |
+
{0: 1, 1: 0, 2: 1, 3: 0}
|
| 47 |
+
|
| 48 |
+
You can use this to set a node attribute indicating the bipartite set:
|
| 49 |
+
|
| 50 |
+
>>> nx.set_node_attributes(G, c, "bipartite")
|
| 51 |
+
>>> print(G.nodes[0]["bipartite"])
|
| 52 |
+
1
|
| 53 |
+
>>> print(G.nodes[1]["bipartite"])
|
| 54 |
+
0
|
| 55 |
+
"""
|
| 56 |
+
if G.is_directed():
|
| 57 |
+
import itertools
|
| 58 |
+
|
| 59 |
+
def neighbors(v):
|
| 60 |
+
return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)])
|
| 61 |
+
|
| 62 |
+
else:
|
| 63 |
+
neighbors = G.neighbors
|
| 64 |
+
|
| 65 |
+
color = {}
|
| 66 |
+
for n in G: # handle disconnected graphs
|
| 67 |
+
if n in color or len(G[n]) == 0: # skip isolates
|
| 68 |
+
continue
|
| 69 |
+
queue = [n]
|
| 70 |
+
color[n] = 1 # nodes seen with color (1 or 0)
|
| 71 |
+
while queue:
|
| 72 |
+
v = queue.pop()
|
| 73 |
+
c = 1 - color[v] # opposite color of node v
|
| 74 |
+
for w in neighbors(v):
|
| 75 |
+
if w in color:
|
| 76 |
+
if color[w] == color[v]:
|
| 77 |
+
raise nx.NetworkXError("Graph is not bipartite.")
|
| 78 |
+
else:
|
| 79 |
+
color[w] = c
|
| 80 |
+
queue.append(w)
|
| 81 |
+
# color isolates with 0
|
| 82 |
+
color.update(dict.fromkeys(nx.isolates(G), 0))
|
| 83 |
+
return color
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
@nx._dispatch
|
| 87 |
+
def is_bipartite(G):
|
| 88 |
+
"""Returns True if graph G is bipartite, False if not.
|
| 89 |
+
|
| 90 |
+
Parameters
|
| 91 |
+
----------
|
| 92 |
+
G : NetworkX graph
|
| 93 |
+
|
| 94 |
+
Examples
|
| 95 |
+
--------
|
| 96 |
+
>>> from networkx.algorithms import bipartite
|
| 97 |
+
>>> G = nx.path_graph(4)
|
| 98 |
+
>>> print(bipartite.is_bipartite(G))
|
| 99 |
+
True
|
| 100 |
+
|
| 101 |
+
See Also
|
| 102 |
+
--------
|
| 103 |
+
color, is_bipartite_node_set
|
| 104 |
+
"""
|
| 105 |
+
try:
|
| 106 |
+
color(G)
|
| 107 |
+
return True
|
| 108 |
+
except nx.NetworkXError:
|
| 109 |
+
return False
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
@nx._dispatch
|
| 113 |
+
def is_bipartite_node_set(G, nodes):
|
| 114 |
+
"""Returns True if nodes and G/nodes are a bipartition of G.
|
| 115 |
+
|
| 116 |
+
Parameters
|
| 117 |
+
----------
|
| 118 |
+
G : NetworkX graph
|
| 119 |
+
|
| 120 |
+
nodes: list or container
|
| 121 |
+
Check if nodes are a one of a bipartite set.
|
| 122 |
+
|
| 123 |
+
Examples
|
| 124 |
+
--------
|
| 125 |
+
>>> from networkx.algorithms import bipartite
|
| 126 |
+
>>> G = nx.path_graph(4)
|
| 127 |
+
>>> X = set([1, 3])
|
| 128 |
+
>>> bipartite.is_bipartite_node_set(G, X)
|
| 129 |
+
True
|
| 130 |
+
|
| 131 |
+
Notes
|
| 132 |
+
-----
|
| 133 |
+
An exception is raised if the input nodes are not distinct, because in this
|
| 134 |
+
case some bipartite algorithms will yield incorrect results.
|
| 135 |
+
For connected graphs the bipartite sets are unique. This function handles
|
| 136 |
+
disconnected graphs.
|
| 137 |
+
"""
|
| 138 |
+
S = set(nodes)
|
| 139 |
+
|
| 140 |
+
if len(S) < len(nodes):
|
| 141 |
+
# this should maybe just return False?
|
| 142 |
+
raise AmbiguousSolution(
|
| 143 |
+
"The input node set contains duplicates.\n"
|
| 144 |
+
"This may lead to incorrect results when using it in bipartite algorithms.\n"
|
| 145 |
+
"Consider using set(nodes) as the input"
|
| 146 |
+
)
|
| 147 |
+
|
| 148 |
+
for CC in (G.subgraph(c).copy() for c in connected_components(G)):
|
| 149 |
+
X, Y = sets(CC)
|
| 150 |
+
if not (
|
| 151 |
+
(X.issubset(S) and Y.isdisjoint(S)) or (Y.issubset(S) and X.isdisjoint(S))
|
| 152 |
+
):
|
| 153 |
+
return False
|
| 154 |
+
return True
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
@nx._dispatch
|
| 158 |
+
def sets(G, top_nodes=None):
|
| 159 |
+
"""Returns bipartite node sets of graph G.
|
| 160 |
+
|
| 161 |
+
Raises an exception if the graph is not bipartite or if the input
|
| 162 |
+
graph is disconnected and thus more than one valid solution exists.
|
| 163 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 164 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 165 |
+
|
| 166 |
+
Parameters
|
| 167 |
+
----------
|
| 168 |
+
G : NetworkX graph
|
| 169 |
+
|
| 170 |
+
top_nodes : container, optional
|
| 171 |
+
Container with all nodes in one bipartite node set. If not supplied
|
| 172 |
+
it will be computed. But if more than one solution exists an exception
|
| 173 |
+
will be raised.
|
| 174 |
+
|
| 175 |
+
Returns
|
| 176 |
+
-------
|
| 177 |
+
X : set
|
| 178 |
+
Nodes from one side of the bipartite graph.
|
| 179 |
+
Y : set
|
| 180 |
+
Nodes from the other side.
|
| 181 |
+
|
| 182 |
+
Raises
|
| 183 |
+
------
|
| 184 |
+
AmbiguousSolution
|
| 185 |
+
Raised if the input bipartite graph is disconnected and no container
|
| 186 |
+
with all nodes in one bipartite set is provided. When determining
|
| 187 |
+
the nodes in each bipartite set more than one valid solution is
|
| 188 |
+
possible if the input graph is disconnected.
|
| 189 |
+
NetworkXError
|
| 190 |
+
Raised if the input graph is not bipartite.
|
| 191 |
+
|
| 192 |
+
Examples
|
| 193 |
+
--------
|
| 194 |
+
>>> from networkx.algorithms import bipartite
|
| 195 |
+
>>> G = nx.path_graph(4)
|
| 196 |
+
>>> X, Y = bipartite.sets(G)
|
| 197 |
+
>>> list(X)
|
| 198 |
+
[0, 2]
|
| 199 |
+
>>> list(Y)
|
| 200 |
+
[1, 3]
|
| 201 |
+
|
| 202 |
+
See Also
|
| 203 |
+
--------
|
| 204 |
+
color
|
| 205 |
+
|
| 206 |
+
"""
|
| 207 |
+
if G.is_directed():
|
| 208 |
+
is_connected = nx.is_weakly_connected
|
| 209 |
+
else:
|
| 210 |
+
is_connected = nx.is_connected
|
| 211 |
+
if top_nodes is not None:
|
| 212 |
+
X = set(top_nodes)
|
| 213 |
+
Y = set(G) - X
|
| 214 |
+
else:
|
| 215 |
+
if not is_connected(G):
|
| 216 |
+
msg = "Disconnected graph: Ambiguous solution for bipartite sets."
|
| 217 |
+
raise nx.AmbiguousSolution(msg)
|
| 218 |
+
c = color(G)
|
| 219 |
+
X = {n for n, is_top in c.items() if is_top}
|
| 220 |
+
Y = {n for n, is_top in c.items() if not is_top}
|
| 221 |
+
return (X, Y)
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
@nx._dispatch(graphs="B")
|
| 225 |
+
def density(B, nodes):
|
| 226 |
+
"""Returns density of bipartite graph B.
|
| 227 |
+
|
| 228 |
+
Parameters
|
| 229 |
+
----------
|
| 230 |
+
B : NetworkX graph
|
| 231 |
+
|
| 232 |
+
nodes: list or container
|
| 233 |
+
Nodes in one node set of the bipartite graph.
|
| 234 |
+
|
| 235 |
+
Returns
|
| 236 |
+
-------
|
| 237 |
+
d : float
|
| 238 |
+
The bipartite density
|
| 239 |
+
|
| 240 |
+
Examples
|
| 241 |
+
--------
|
| 242 |
+
>>> from networkx.algorithms import bipartite
|
| 243 |
+
>>> G = nx.complete_bipartite_graph(3, 2)
|
| 244 |
+
>>> X = set([0, 1, 2])
|
| 245 |
+
>>> bipartite.density(G, X)
|
| 246 |
+
1.0
|
| 247 |
+
>>> Y = set([3, 4])
|
| 248 |
+
>>> bipartite.density(G, Y)
|
| 249 |
+
1.0
|
| 250 |
+
|
| 251 |
+
Notes
|
| 252 |
+
-----
|
| 253 |
+
The container of nodes passed as argument must contain all nodes
|
| 254 |
+
in one of the two bipartite node sets to avoid ambiguity in the
|
| 255 |
+
case of disconnected graphs.
|
| 256 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 257 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 258 |
+
|
| 259 |
+
See Also
|
| 260 |
+
--------
|
| 261 |
+
color
|
| 262 |
+
"""
|
| 263 |
+
n = len(B)
|
| 264 |
+
m = nx.number_of_edges(B)
|
| 265 |
+
nb = len(nodes)
|
| 266 |
+
nt = n - nb
|
| 267 |
+
if m == 0: # includes cases n==0 and n==1
|
| 268 |
+
d = 0.0
|
| 269 |
+
else:
|
| 270 |
+
if B.is_directed():
|
| 271 |
+
d = m / (2 * nb * nt)
|
| 272 |
+
else:
|
| 273 |
+
d = m / (nb * nt)
|
| 274 |
+
return d
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
@nx._dispatch(graphs="B", edge_attrs="weight")
|
| 278 |
+
def degrees(B, nodes, weight=None):
|
| 279 |
+
"""Returns the degrees of the two node sets in the bipartite graph B.
|
| 280 |
+
|
| 281 |
+
Parameters
|
| 282 |
+
----------
|
| 283 |
+
B : NetworkX graph
|
| 284 |
+
|
| 285 |
+
nodes: list or container
|
| 286 |
+
Nodes in one node set of the bipartite graph.
|
| 287 |
+
|
| 288 |
+
weight : string or None, optional (default=None)
|
| 289 |
+
The edge attribute that holds the numerical value used as a weight.
|
| 290 |
+
If None, then each edge has weight 1.
|
| 291 |
+
The degree is the sum of the edge weights adjacent to the node.
|
| 292 |
+
|
| 293 |
+
Returns
|
| 294 |
+
-------
|
| 295 |
+
(degX,degY) : tuple of dictionaries
|
| 296 |
+
The degrees of the two bipartite sets as dictionaries keyed by node.
|
| 297 |
+
|
| 298 |
+
Examples
|
| 299 |
+
--------
|
| 300 |
+
>>> from networkx.algorithms import bipartite
|
| 301 |
+
>>> G = nx.complete_bipartite_graph(3, 2)
|
| 302 |
+
>>> Y = set([3, 4])
|
| 303 |
+
>>> degX, degY = bipartite.degrees(G, Y)
|
| 304 |
+
>>> dict(degX)
|
| 305 |
+
{0: 2, 1: 2, 2: 2}
|
| 306 |
+
|
| 307 |
+
Notes
|
| 308 |
+
-----
|
| 309 |
+
The container of nodes passed as argument must contain all nodes
|
| 310 |
+
in one of the two bipartite node sets to avoid ambiguity in the
|
| 311 |
+
case of disconnected graphs.
|
| 312 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 313 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 314 |
+
|
| 315 |
+
See Also
|
| 316 |
+
--------
|
| 317 |
+
color, density
|
| 318 |
+
"""
|
| 319 |
+
bottom = set(nodes)
|
| 320 |
+
top = set(B) - bottom
|
| 321 |
+
return (B.degree(top, weight), B.degree(bottom, weight))
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/extendability.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" Provides a function for computing the extendability of a graph which is
|
| 2 |
+
undirected, simple, connected and bipartite and contains at least one perfect matching."""
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.utils import not_implemented_for
|
| 7 |
+
|
| 8 |
+
__all__ = ["maximal_extendability"]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@not_implemented_for("directed")
|
| 12 |
+
@not_implemented_for("multigraph")
|
| 13 |
+
def maximal_extendability(G):
|
| 14 |
+
"""Computes the extendability of a graph.
|
| 15 |
+
|
| 16 |
+
The extendability of a graph is defined as the maximum $k$ for which `G`
|
| 17 |
+
is $k$-extendable. Graph `G` is $k$-extendable if and only if `G` has a
|
| 18 |
+
perfect matching and every set of $k$ independent edges can be extended
|
| 19 |
+
to a perfect matching in `G`.
|
| 20 |
+
|
| 21 |
+
Parameters
|
| 22 |
+
----------
|
| 23 |
+
G : NetworkX Graph
|
| 24 |
+
A fully-connected bipartite graph without self-loops
|
| 25 |
+
|
| 26 |
+
Returns
|
| 27 |
+
-------
|
| 28 |
+
extendability : int
|
| 29 |
+
|
| 30 |
+
Raises
|
| 31 |
+
------
|
| 32 |
+
NetworkXError
|
| 33 |
+
If the graph `G` is disconnected.
|
| 34 |
+
If the graph `G` is not bipartite.
|
| 35 |
+
If the graph `G` does not contain a perfect matching.
|
| 36 |
+
If the residual graph of `G` is not strongly connected.
|
| 37 |
+
|
| 38 |
+
Notes
|
| 39 |
+
-----
|
| 40 |
+
Definition:
|
| 41 |
+
Let `G` be a simple, connected, undirected and bipartite graph with a perfect
|
| 42 |
+
matching M and bipartition (U,V). The residual graph of `G`, denoted by $G_M$,
|
| 43 |
+
is the graph obtained from G by directing the edges of M from V to U and the
|
| 44 |
+
edges that do not belong to M from U to V.
|
| 45 |
+
|
| 46 |
+
Lemma [1]_ :
|
| 47 |
+
Let M be a perfect matching of `G`. `G` is $k$-extendable if and only if its residual
|
| 48 |
+
graph $G_M$ is strongly connected and there are $k$ vertex-disjoint directed
|
| 49 |
+
paths between every vertex of U and every vertex of V.
|
| 50 |
+
|
| 51 |
+
Assuming that input graph `G` is undirected, simple, connected, bipartite and contains
|
| 52 |
+
a perfect matching M, this function constructs the residual graph $G_M$ of G and
|
| 53 |
+
returns the minimum value among the maximum vertex-disjoint directed paths between
|
| 54 |
+
every vertex of U and every vertex of V in $G_M$. By combining the definitions
|
| 55 |
+
and the lemma, this value represents the extendability of the graph `G`.
|
| 56 |
+
|
| 57 |
+
Time complexity O($n^3$ $m^2$)) where $n$ is the number of vertices
|
| 58 |
+
and $m$ is the number of edges.
|
| 59 |
+
|
| 60 |
+
References
|
| 61 |
+
----------
|
| 62 |
+
.. [1] "A polynomial algorithm for the extendability problem in bipartite graphs",
|
| 63 |
+
J. Lakhal, L. Litzler, Information Processing Letters, 1998.
|
| 64 |
+
.. [2] "On n-extendible graphs", M. D. Plummer, Discrete Mathematics, 31:201–210, 1980
|
| 65 |
+
https://doi.org/10.1016/0012-365X(80)90037-0
|
| 66 |
+
|
| 67 |
+
"""
|
| 68 |
+
if not nx.is_connected(G):
|
| 69 |
+
raise nx.NetworkXError("Graph G is not connected")
|
| 70 |
+
|
| 71 |
+
if not nx.bipartite.is_bipartite(G):
|
| 72 |
+
raise nx.NetworkXError("Graph G is not bipartite")
|
| 73 |
+
|
| 74 |
+
U, V = nx.bipartite.sets(G)
|
| 75 |
+
|
| 76 |
+
maximum_matching = nx.bipartite.hopcroft_karp_matching(G)
|
| 77 |
+
|
| 78 |
+
if not nx.is_perfect_matching(G, maximum_matching):
|
| 79 |
+
raise nx.NetworkXError("Graph G does not contain a perfect matching")
|
| 80 |
+
|
| 81 |
+
# list of edges in perfect matching, directed from V to U
|
| 82 |
+
pm = [(node, maximum_matching[node]) for node in V & maximum_matching.keys()]
|
| 83 |
+
|
| 84 |
+
# Direct all the edges of G, from V to U if in matching, else from U to V
|
| 85 |
+
directed_edges = [
|
| 86 |
+
(x, y) if (x in V and (x, y) in pm) or (x in U and (y, x) not in pm) else (y, x)
|
| 87 |
+
for x, y in G.edges
|
| 88 |
+
]
|
| 89 |
+
|
| 90 |
+
# Construct the residual graph of G
|
| 91 |
+
residual_G = nx.DiGraph()
|
| 92 |
+
residual_G.add_nodes_from(G)
|
| 93 |
+
residual_G.add_edges_from(directed_edges)
|
| 94 |
+
|
| 95 |
+
if not nx.is_strongly_connected(residual_G):
|
| 96 |
+
raise nx.NetworkXError("The residual graph of G is not strongly connected")
|
| 97 |
+
|
| 98 |
+
# For node-pairs between V & U, keep min of max number of node-disjoint paths
|
| 99 |
+
# Variable $k$ stands for the extendability of graph G
|
| 100 |
+
k = float("Inf")
|
| 101 |
+
for u in U:
|
| 102 |
+
for v in V:
|
| 103 |
+
num_paths = sum(1 for _ in nx.node_disjoint_paths(residual_G, u, v))
|
| 104 |
+
k = k if k < num_paths else num_paths
|
| 105 |
+
return k
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/matching.py
ADDED
|
@@ -0,0 +1,589 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This module uses material from the Wikipedia article Hopcroft--Karp algorithm
|
| 2 |
+
# <https://en.wikipedia.org/wiki/Hopcroft%E2%80%93Karp_algorithm>, accessed on
|
| 3 |
+
# January 3, 2015, which is released under the Creative Commons
|
| 4 |
+
# Attribution-Share-Alike License 3.0
|
| 5 |
+
# <http://creativecommons.org/licenses/by-sa/3.0/>. That article includes
|
| 6 |
+
# pseudocode, which has been translated into the corresponding Python code.
|
| 7 |
+
#
|
| 8 |
+
# Portions of this module use code from David Eppstein's Python Algorithms and
|
| 9 |
+
# Data Structures (PADS) library, which is dedicated to the public domain (for
|
| 10 |
+
# proof, see <http://www.ics.uci.edu/~eppstein/PADS/ABOUT-PADS.txt>).
|
| 11 |
+
"""Provides functions for computing maximum cardinality matchings and minimum
|
| 12 |
+
weight full matchings in a bipartite graph.
|
| 13 |
+
|
| 14 |
+
If you don't care about the particular implementation of the maximum matching
|
| 15 |
+
algorithm, simply use the :func:`maximum_matching`. If you do care, you can
|
| 16 |
+
import one of the named maximum matching algorithms directly.
|
| 17 |
+
|
| 18 |
+
For example, to find a maximum matching in the complete bipartite graph with
|
| 19 |
+
two vertices on the left and three vertices on the right:
|
| 20 |
+
|
| 21 |
+
>>> G = nx.complete_bipartite_graph(2, 3)
|
| 22 |
+
>>> left, right = nx.bipartite.sets(G)
|
| 23 |
+
>>> list(left)
|
| 24 |
+
[0, 1]
|
| 25 |
+
>>> list(right)
|
| 26 |
+
[2, 3, 4]
|
| 27 |
+
>>> nx.bipartite.maximum_matching(G)
|
| 28 |
+
{0: 2, 1: 3, 2: 0, 3: 1}
|
| 29 |
+
|
| 30 |
+
The dictionary returned by :func:`maximum_matching` includes a mapping for
|
| 31 |
+
vertices in both the left and right vertex sets.
|
| 32 |
+
|
| 33 |
+
Similarly, :func:`minimum_weight_full_matching` produces, for a complete
|
| 34 |
+
weighted bipartite graph, a matching whose cardinality is the cardinality of
|
| 35 |
+
the smaller of the two partitions, and for which the sum of the weights of the
|
| 36 |
+
edges included in the matching is minimal.
|
| 37 |
+
|
| 38 |
+
"""
|
| 39 |
+
import collections
|
| 40 |
+
import itertools
|
| 41 |
+
|
| 42 |
+
import networkx as nx
|
| 43 |
+
from networkx.algorithms.bipartite import sets as bipartite_sets
|
| 44 |
+
from networkx.algorithms.bipartite.matrix import biadjacency_matrix
|
| 45 |
+
|
| 46 |
+
__all__ = [
|
| 47 |
+
"maximum_matching",
|
| 48 |
+
"hopcroft_karp_matching",
|
| 49 |
+
"eppstein_matching",
|
| 50 |
+
"to_vertex_cover",
|
| 51 |
+
"minimum_weight_full_matching",
|
| 52 |
+
]
|
| 53 |
+
|
| 54 |
+
INFINITY = float("inf")
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
@nx._dispatch
|
| 58 |
+
def hopcroft_karp_matching(G, top_nodes=None):
|
| 59 |
+
"""Returns the maximum cardinality matching of the bipartite graph `G`.
|
| 60 |
+
|
| 61 |
+
A matching is a set of edges that do not share any nodes. A maximum
|
| 62 |
+
cardinality matching is a matching with the most edges possible. It
|
| 63 |
+
is not always unique. Finding a matching in a bipartite graph can be
|
| 64 |
+
treated as a networkx flow problem.
|
| 65 |
+
|
| 66 |
+
The functions ``hopcroft_karp_matching`` and ``maximum_matching``
|
| 67 |
+
are aliases of the same function.
|
| 68 |
+
|
| 69 |
+
Parameters
|
| 70 |
+
----------
|
| 71 |
+
G : NetworkX graph
|
| 72 |
+
|
| 73 |
+
Undirected bipartite graph
|
| 74 |
+
|
| 75 |
+
top_nodes : container of nodes
|
| 76 |
+
|
| 77 |
+
Container with all nodes in one bipartite node set. If not supplied
|
| 78 |
+
it will be computed. But if more than one solution exists an exception
|
| 79 |
+
will be raised.
|
| 80 |
+
|
| 81 |
+
Returns
|
| 82 |
+
-------
|
| 83 |
+
matches : dictionary
|
| 84 |
+
|
| 85 |
+
The matching is returned as a dictionary, `matches`, such that
|
| 86 |
+
``matches[v] == w`` if node `v` is matched to node `w`. Unmatched
|
| 87 |
+
nodes do not occur as a key in `matches`.
|
| 88 |
+
|
| 89 |
+
Raises
|
| 90 |
+
------
|
| 91 |
+
AmbiguousSolution
|
| 92 |
+
Raised if the input bipartite graph is disconnected and no container
|
| 93 |
+
with all nodes in one bipartite set is provided. When determining
|
| 94 |
+
the nodes in each bipartite set more than one valid solution is
|
| 95 |
+
possible if the input graph is disconnected.
|
| 96 |
+
|
| 97 |
+
Notes
|
| 98 |
+
-----
|
| 99 |
+
This function is implemented with the `Hopcroft--Karp matching algorithm
|
| 100 |
+
<https://en.wikipedia.org/wiki/Hopcroft%E2%80%93Karp_algorithm>`_ for
|
| 101 |
+
bipartite graphs.
|
| 102 |
+
|
| 103 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 104 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 105 |
+
|
| 106 |
+
See Also
|
| 107 |
+
--------
|
| 108 |
+
maximum_matching
|
| 109 |
+
hopcroft_karp_matching
|
| 110 |
+
eppstein_matching
|
| 111 |
+
|
| 112 |
+
References
|
| 113 |
+
----------
|
| 114 |
+
.. [1] John E. Hopcroft and Richard M. Karp. "An n^{5 / 2} Algorithm for
|
| 115 |
+
Maximum Matchings in Bipartite Graphs" In: **SIAM Journal of Computing**
|
| 116 |
+
2.4 (1973), pp. 225--231. <https://doi.org/10.1137/0202019>.
|
| 117 |
+
|
| 118 |
+
"""
|
| 119 |
+
|
| 120 |
+
# First we define some auxiliary search functions.
|
| 121 |
+
#
|
| 122 |
+
# If you are a human reading these auxiliary search functions, the "global"
|
| 123 |
+
# variables `leftmatches`, `rightmatches`, `distances`, etc. are defined
|
| 124 |
+
# below the functions, so that they are initialized close to the initial
|
| 125 |
+
# invocation of the search functions.
|
| 126 |
+
def breadth_first_search():
|
| 127 |
+
for v in left:
|
| 128 |
+
if leftmatches[v] is None:
|
| 129 |
+
distances[v] = 0
|
| 130 |
+
queue.append(v)
|
| 131 |
+
else:
|
| 132 |
+
distances[v] = INFINITY
|
| 133 |
+
distances[None] = INFINITY
|
| 134 |
+
while queue:
|
| 135 |
+
v = queue.popleft()
|
| 136 |
+
if distances[v] < distances[None]:
|
| 137 |
+
for u in G[v]:
|
| 138 |
+
if distances[rightmatches[u]] is INFINITY:
|
| 139 |
+
distances[rightmatches[u]] = distances[v] + 1
|
| 140 |
+
queue.append(rightmatches[u])
|
| 141 |
+
return distances[None] is not INFINITY
|
| 142 |
+
|
| 143 |
+
def depth_first_search(v):
|
| 144 |
+
if v is not None:
|
| 145 |
+
for u in G[v]:
|
| 146 |
+
if distances[rightmatches[u]] == distances[v] + 1:
|
| 147 |
+
if depth_first_search(rightmatches[u]):
|
| 148 |
+
rightmatches[u] = v
|
| 149 |
+
leftmatches[v] = u
|
| 150 |
+
return True
|
| 151 |
+
distances[v] = INFINITY
|
| 152 |
+
return False
|
| 153 |
+
return True
|
| 154 |
+
|
| 155 |
+
# Initialize the "global" variables that maintain state during the search.
|
| 156 |
+
left, right = bipartite_sets(G, top_nodes)
|
| 157 |
+
leftmatches = {v: None for v in left}
|
| 158 |
+
rightmatches = {v: None for v in right}
|
| 159 |
+
distances = {}
|
| 160 |
+
queue = collections.deque()
|
| 161 |
+
|
| 162 |
+
# Implementation note: this counter is incremented as pairs are matched but
|
| 163 |
+
# it is currently not used elsewhere in the computation.
|
| 164 |
+
num_matched_pairs = 0
|
| 165 |
+
while breadth_first_search():
|
| 166 |
+
for v in left:
|
| 167 |
+
if leftmatches[v] is None:
|
| 168 |
+
if depth_first_search(v):
|
| 169 |
+
num_matched_pairs += 1
|
| 170 |
+
|
| 171 |
+
# Strip the entries matched to `None`.
|
| 172 |
+
leftmatches = {k: v for k, v in leftmatches.items() if v is not None}
|
| 173 |
+
rightmatches = {k: v for k, v in rightmatches.items() if v is not None}
|
| 174 |
+
|
| 175 |
+
# At this point, the left matches and the right matches are inverses of one
|
| 176 |
+
# another. In other words,
|
| 177 |
+
#
|
| 178 |
+
# leftmatches == {v, k for k, v in rightmatches.items()}
|
| 179 |
+
#
|
| 180 |
+
# Finally, we combine both the left matches and right matches.
|
| 181 |
+
return dict(itertools.chain(leftmatches.items(), rightmatches.items()))
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
@nx._dispatch
|
| 185 |
+
def eppstein_matching(G, top_nodes=None):
|
| 186 |
+
"""Returns the maximum cardinality matching of the bipartite graph `G`.
|
| 187 |
+
|
| 188 |
+
Parameters
|
| 189 |
+
----------
|
| 190 |
+
G : NetworkX graph
|
| 191 |
+
|
| 192 |
+
Undirected bipartite graph
|
| 193 |
+
|
| 194 |
+
top_nodes : container
|
| 195 |
+
|
| 196 |
+
Container with all nodes in one bipartite node set. If not supplied
|
| 197 |
+
it will be computed. But if more than one solution exists an exception
|
| 198 |
+
will be raised.
|
| 199 |
+
|
| 200 |
+
Returns
|
| 201 |
+
-------
|
| 202 |
+
matches : dictionary
|
| 203 |
+
|
| 204 |
+
The matching is returned as a dictionary, `matching`, such that
|
| 205 |
+
``matching[v] == w`` if node `v` is matched to node `w`. Unmatched
|
| 206 |
+
nodes do not occur as a key in `matching`.
|
| 207 |
+
|
| 208 |
+
Raises
|
| 209 |
+
------
|
| 210 |
+
AmbiguousSolution
|
| 211 |
+
Raised if the input bipartite graph is disconnected and no container
|
| 212 |
+
with all nodes in one bipartite set is provided. When determining
|
| 213 |
+
the nodes in each bipartite set more than one valid solution is
|
| 214 |
+
possible if the input graph is disconnected.
|
| 215 |
+
|
| 216 |
+
Notes
|
| 217 |
+
-----
|
| 218 |
+
This function is implemented with David Eppstein's version of the algorithm
|
| 219 |
+
Hopcroft--Karp algorithm (see :func:`hopcroft_karp_matching`), which
|
| 220 |
+
originally appeared in the `Python Algorithms and Data Structures library
|
| 221 |
+
(PADS) <http://www.ics.uci.edu/~eppstein/PADS/ABOUT-PADS.txt>`_.
|
| 222 |
+
|
| 223 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 224 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 225 |
+
|
| 226 |
+
See Also
|
| 227 |
+
--------
|
| 228 |
+
|
| 229 |
+
hopcroft_karp_matching
|
| 230 |
+
|
| 231 |
+
"""
|
| 232 |
+
# Due to its original implementation, a directed graph is needed
|
| 233 |
+
# so that the two sets of bipartite nodes can be distinguished
|
| 234 |
+
left, right = bipartite_sets(G, top_nodes)
|
| 235 |
+
G = nx.DiGraph(G.edges(left))
|
| 236 |
+
# initialize greedy matching (redundant, but faster than full search)
|
| 237 |
+
matching = {}
|
| 238 |
+
for u in G:
|
| 239 |
+
for v in G[u]:
|
| 240 |
+
if v not in matching:
|
| 241 |
+
matching[v] = u
|
| 242 |
+
break
|
| 243 |
+
while True:
|
| 244 |
+
# structure residual graph into layers
|
| 245 |
+
# pred[u] gives the neighbor in the previous layer for u in U
|
| 246 |
+
# preds[v] gives a list of neighbors in the previous layer for v in V
|
| 247 |
+
# unmatched gives a list of unmatched vertices in final layer of V,
|
| 248 |
+
# and is also used as a flag value for pred[u] when u is in the first
|
| 249 |
+
# layer
|
| 250 |
+
preds = {}
|
| 251 |
+
unmatched = []
|
| 252 |
+
pred = {u: unmatched for u in G}
|
| 253 |
+
for v in matching:
|
| 254 |
+
del pred[matching[v]]
|
| 255 |
+
layer = list(pred)
|
| 256 |
+
|
| 257 |
+
# repeatedly extend layering structure by another pair of layers
|
| 258 |
+
while layer and not unmatched:
|
| 259 |
+
newLayer = {}
|
| 260 |
+
for u in layer:
|
| 261 |
+
for v in G[u]:
|
| 262 |
+
if v not in preds:
|
| 263 |
+
newLayer.setdefault(v, []).append(u)
|
| 264 |
+
layer = []
|
| 265 |
+
for v in newLayer:
|
| 266 |
+
preds[v] = newLayer[v]
|
| 267 |
+
if v in matching:
|
| 268 |
+
layer.append(matching[v])
|
| 269 |
+
pred[matching[v]] = v
|
| 270 |
+
else:
|
| 271 |
+
unmatched.append(v)
|
| 272 |
+
|
| 273 |
+
# did we finish layering without finding any alternating paths?
|
| 274 |
+
if not unmatched:
|
| 275 |
+
# TODO - The lines between --- were unused and were thus commented
|
| 276 |
+
# out. This whole commented chunk should be reviewed to determine
|
| 277 |
+
# whether it should be built upon or completely removed.
|
| 278 |
+
# ---
|
| 279 |
+
# unlayered = {}
|
| 280 |
+
# for u in G:
|
| 281 |
+
# # TODO Why is extra inner loop necessary?
|
| 282 |
+
# for v in G[u]:
|
| 283 |
+
# if v not in preds:
|
| 284 |
+
# unlayered[v] = None
|
| 285 |
+
# ---
|
| 286 |
+
# TODO Originally, this function returned a three-tuple:
|
| 287 |
+
#
|
| 288 |
+
# return (matching, list(pred), list(unlayered))
|
| 289 |
+
#
|
| 290 |
+
# For some reason, the documentation for this function
|
| 291 |
+
# indicated that the second and third elements of the returned
|
| 292 |
+
# three-tuple would be the vertices in the left and right vertex
|
| 293 |
+
# sets, respectively, that are also in the maximum independent set.
|
| 294 |
+
# However, what I think the author meant was that the second
|
| 295 |
+
# element is the list of vertices that were unmatched and the third
|
| 296 |
+
# element was the list of vertices that were matched. Since that
|
| 297 |
+
# seems to be the case, they don't really need to be returned,
|
| 298 |
+
# since that information can be inferred from the matching
|
| 299 |
+
# dictionary.
|
| 300 |
+
|
| 301 |
+
# All the matched nodes must be a key in the dictionary
|
| 302 |
+
for key in matching.copy():
|
| 303 |
+
matching[matching[key]] = key
|
| 304 |
+
return matching
|
| 305 |
+
|
| 306 |
+
# recursively search backward through layers to find alternating paths
|
| 307 |
+
# recursion returns true if found path, false otherwise
|
| 308 |
+
def recurse(v):
|
| 309 |
+
if v in preds:
|
| 310 |
+
L = preds.pop(v)
|
| 311 |
+
for u in L:
|
| 312 |
+
if u in pred:
|
| 313 |
+
pu = pred.pop(u)
|
| 314 |
+
if pu is unmatched or recurse(pu):
|
| 315 |
+
matching[v] = u
|
| 316 |
+
return True
|
| 317 |
+
return False
|
| 318 |
+
|
| 319 |
+
for v in unmatched:
|
| 320 |
+
recurse(v)
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, targets):
|
| 324 |
+
"""Returns True if and only if the vertex `v` is connected to one of
|
| 325 |
+
the target vertices by an alternating path in `G`.
|
| 326 |
+
|
| 327 |
+
An *alternating path* is a path in which every other edge is in the
|
| 328 |
+
specified maximum matching (and the remaining edges in the path are not in
|
| 329 |
+
the matching). An alternating path may have matched edges in the even
|
| 330 |
+
positions or in the odd positions, as long as the edges alternate between
|
| 331 |
+
'matched' and 'unmatched'.
|
| 332 |
+
|
| 333 |
+
`G` is an undirected bipartite NetworkX graph.
|
| 334 |
+
|
| 335 |
+
`v` is a vertex in `G`.
|
| 336 |
+
|
| 337 |
+
`matched_edges` is a set of edges present in a maximum matching in `G`.
|
| 338 |
+
|
| 339 |
+
`unmatched_edges` is a set of edges not present in a maximum
|
| 340 |
+
matching in `G`.
|
| 341 |
+
|
| 342 |
+
`targets` is a set of vertices.
|
| 343 |
+
|
| 344 |
+
"""
|
| 345 |
+
|
| 346 |
+
def _alternating_dfs(u, along_matched=True):
|
| 347 |
+
"""Returns True if and only if `u` is connected to one of the
|
| 348 |
+
targets by an alternating path.
|
| 349 |
+
|
| 350 |
+
`u` is a vertex in the graph `G`.
|
| 351 |
+
|
| 352 |
+
If `along_matched` is True, this step of the depth-first search
|
| 353 |
+
will continue only through edges in the given matching. Otherwise, it
|
| 354 |
+
will continue only through edges *not* in the given matching.
|
| 355 |
+
|
| 356 |
+
"""
|
| 357 |
+
visited = set()
|
| 358 |
+
# Follow matched edges when depth is even,
|
| 359 |
+
# and follow unmatched edges when depth is odd.
|
| 360 |
+
initial_depth = 0 if along_matched else 1
|
| 361 |
+
stack = [(u, iter(G[u]), initial_depth)]
|
| 362 |
+
while stack:
|
| 363 |
+
parent, children, depth = stack[-1]
|
| 364 |
+
valid_edges = matched_edges if depth % 2 else unmatched_edges
|
| 365 |
+
try:
|
| 366 |
+
child = next(children)
|
| 367 |
+
if child not in visited:
|
| 368 |
+
if (parent, child) in valid_edges or (child, parent) in valid_edges:
|
| 369 |
+
if child in targets:
|
| 370 |
+
return True
|
| 371 |
+
visited.add(child)
|
| 372 |
+
stack.append((child, iter(G[child]), depth + 1))
|
| 373 |
+
except StopIteration:
|
| 374 |
+
stack.pop()
|
| 375 |
+
return False
|
| 376 |
+
|
| 377 |
+
# Check for alternating paths starting with edges in the matching, then
|
| 378 |
+
# check for alternating paths starting with edges not in the
|
| 379 |
+
# matching.
|
| 380 |
+
return _alternating_dfs(v, along_matched=True) or _alternating_dfs(
|
| 381 |
+
v, along_matched=False
|
| 382 |
+
)
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
def _connected_by_alternating_paths(G, matching, targets):
|
| 386 |
+
"""Returns the set of vertices that are connected to one of the target
|
| 387 |
+
vertices by an alternating path in `G` or are themselves a target.
|
| 388 |
+
|
| 389 |
+
An *alternating path* is a path in which every other edge is in the
|
| 390 |
+
specified maximum matching (and the remaining edges in the path are not in
|
| 391 |
+
the matching). An alternating path may have matched edges in the even
|
| 392 |
+
positions or in the odd positions, as long as the edges alternate between
|
| 393 |
+
'matched' and 'unmatched'.
|
| 394 |
+
|
| 395 |
+
`G` is an undirected bipartite NetworkX graph.
|
| 396 |
+
|
| 397 |
+
`matching` is a dictionary representing a maximum matching in `G`, as
|
| 398 |
+
returned by, for example, :func:`maximum_matching`.
|
| 399 |
+
|
| 400 |
+
`targets` is a set of vertices.
|
| 401 |
+
|
| 402 |
+
"""
|
| 403 |
+
# Get the set of matched edges and the set of unmatched edges. Only include
|
| 404 |
+
# one version of each undirected edge (for example, include edge (1, 2) but
|
| 405 |
+
# not edge (2, 1)). Using frozensets as an intermediary step we do not
|
| 406 |
+
# require nodes to be orderable.
|
| 407 |
+
edge_sets = {frozenset((u, v)) for u, v in matching.items()}
|
| 408 |
+
matched_edges = {tuple(edge) for edge in edge_sets}
|
| 409 |
+
unmatched_edges = {
|
| 410 |
+
(u, v) for (u, v) in G.edges() if frozenset((u, v)) not in edge_sets
|
| 411 |
+
}
|
| 412 |
+
|
| 413 |
+
return {
|
| 414 |
+
v
|
| 415 |
+
for v in G
|
| 416 |
+
if v in targets
|
| 417 |
+
or _is_connected_by_alternating_path(
|
| 418 |
+
G, v, matched_edges, unmatched_edges, targets
|
| 419 |
+
)
|
| 420 |
+
}
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
@nx._dispatch
|
| 424 |
+
def to_vertex_cover(G, matching, top_nodes=None):
|
| 425 |
+
"""Returns the minimum vertex cover corresponding to the given maximum
|
| 426 |
+
matching of the bipartite graph `G`.
|
| 427 |
+
|
| 428 |
+
Parameters
|
| 429 |
+
----------
|
| 430 |
+
G : NetworkX graph
|
| 431 |
+
|
| 432 |
+
Undirected bipartite graph
|
| 433 |
+
|
| 434 |
+
matching : dictionary
|
| 435 |
+
|
| 436 |
+
A dictionary whose keys are vertices in `G` and whose values are the
|
| 437 |
+
distinct neighbors comprising the maximum matching for `G`, as returned
|
| 438 |
+
by, for example, :func:`maximum_matching`. The dictionary *must*
|
| 439 |
+
represent the maximum matching.
|
| 440 |
+
|
| 441 |
+
top_nodes : container
|
| 442 |
+
|
| 443 |
+
Container with all nodes in one bipartite node set. If not supplied
|
| 444 |
+
it will be computed. But if more than one solution exists an exception
|
| 445 |
+
will be raised.
|
| 446 |
+
|
| 447 |
+
Returns
|
| 448 |
+
-------
|
| 449 |
+
vertex_cover : :class:`set`
|
| 450 |
+
|
| 451 |
+
The minimum vertex cover in `G`.
|
| 452 |
+
|
| 453 |
+
Raises
|
| 454 |
+
------
|
| 455 |
+
AmbiguousSolution
|
| 456 |
+
Raised if the input bipartite graph is disconnected and no container
|
| 457 |
+
with all nodes in one bipartite set is provided. When determining
|
| 458 |
+
the nodes in each bipartite set more than one valid solution is
|
| 459 |
+
possible if the input graph is disconnected.
|
| 460 |
+
|
| 461 |
+
Notes
|
| 462 |
+
-----
|
| 463 |
+
This function is implemented using the procedure guaranteed by `Konig's
|
| 464 |
+
theorem
|
| 465 |
+
<https://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29>`_,
|
| 466 |
+
which proves an equivalence between a maximum matching and a minimum vertex
|
| 467 |
+
cover in bipartite graphs.
|
| 468 |
+
|
| 469 |
+
Since a minimum vertex cover is the complement of a maximum independent set
|
| 470 |
+
for any graph, one can compute the maximum independent set of a bipartite
|
| 471 |
+
graph this way:
|
| 472 |
+
|
| 473 |
+
>>> G = nx.complete_bipartite_graph(2, 3)
|
| 474 |
+
>>> matching = nx.bipartite.maximum_matching(G)
|
| 475 |
+
>>> vertex_cover = nx.bipartite.to_vertex_cover(G, matching)
|
| 476 |
+
>>> independent_set = set(G) - vertex_cover
|
| 477 |
+
>>> print(list(independent_set))
|
| 478 |
+
[2, 3, 4]
|
| 479 |
+
|
| 480 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 481 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 482 |
+
|
| 483 |
+
"""
|
| 484 |
+
# This is a Python implementation of the algorithm described at
|
| 485 |
+
# <https://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29#Proof>.
|
| 486 |
+
L, R = bipartite_sets(G, top_nodes)
|
| 487 |
+
# Let U be the set of unmatched vertices in the left vertex set.
|
| 488 |
+
unmatched_vertices = set(G) - set(matching)
|
| 489 |
+
U = unmatched_vertices & L
|
| 490 |
+
# Let Z be the set of vertices that are either in U or are connected to U
|
| 491 |
+
# by alternating paths.
|
| 492 |
+
Z = _connected_by_alternating_paths(G, matching, U)
|
| 493 |
+
# At this point, every edge either has a right endpoint in Z or a left
|
| 494 |
+
# endpoint not in Z. This gives us the vertex cover.
|
| 495 |
+
return (L - Z) | (R & Z)
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
#: Returns the maximum cardinality matching in the given bipartite graph.
|
| 499 |
+
#:
|
| 500 |
+
#: This function is simply an alias for :func:`hopcroft_karp_matching`.
|
| 501 |
+
maximum_matching = hopcroft_karp_matching
|
| 502 |
+
|
| 503 |
+
|
| 504 |
+
@nx._dispatch(edge_attrs="weight")
|
| 505 |
+
def minimum_weight_full_matching(G, top_nodes=None, weight="weight"):
|
| 506 |
+
r"""Returns a minimum weight full matching of the bipartite graph `G`.
|
| 507 |
+
|
| 508 |
+
Let :math:`G = ((U, V), E)` be a weighted bipartite graph with real weights
|
| 509 |
+
:math:`w : E \to \mathbb{R}`. This function then produces a matching
|
| 510 |
+
:math:`M \subseteq E` with cardinality
|
| 511 |
+
|
| 512 |
+
.. math::
|
| 513 |
+
\lvert M \rvert = \min(\lvert U \rvert, \lvert V \rvert),
|
| 514 |
+
|
| 515 |
+
which minimizes the sum of the weights of the edges included in the
|
| 516 |
+
matching, :math:`\sum_{e \in M} w(e)`, or raises an error if no such
|
| 517 |
+
matching exists.
|
| 518 |
+
|
| 519 |
+
When :math:`\lvert U \rvert = \lvert V \rvert`, this is commonly
|
| 520 |
+
referred to as a perfect matching; here, since we allow
|
| 521 |
+
:math:`\lvert U \rvert` and :math:`\lvert V \rvert` to differ, we
|
| 522 |
+
follow Karp [1]_ and refer to the matching as *full*.
|
| 523 |
+
|
| 524 |
+
Parameters
|
| 525 |
+
----------
|
| 526 |
+
G : NetworkX graph
|
| 527 |
+
|
| 528 |
+
Undirected bipartite graph
|
| 529 |
+
|
| 530 |
+
top_nodes : container
|
| 531 |
+
|
| 532 |
+
Container with all nodes in one bipartite node set. If not supplied
|
| 533 |
+
it will be computed.
|
| 534 |
+
|
| 535 |
+
weight : string, optional (default='weight')
|
| 536 |
+
|
| 537 |
+
The edge data key used to provide each value in the matrix.
|
| 538 |
+
If None, then each edge has weight 1.
|
| 539 |
+
|
| 540 |
+
Returns
|
| 541 |
+
-------
|
| 542 |
+
matches : dictionary
|
| 543 |
+
|
| 544 |
+
The matching is returned as a dictionary, `matches`, such that
|
| 545 |
+
``matches[v] == w`` if node `v` is matched to node `w`. Unmatched
|
| 546 |
+
nodes do not occur as a key in `matches`.
|
| 547 |
+
|
| 548 |
+
Raises
|
| 549 |
+
------
|
| 550 |
+
ValueError
|
| 551 |
+
Raised if no full matching exists.
|
| 552 |
+
|
| 553 |
+
ImportError
|
| 554 |
+
Raised if SciPy is not available.
|
| 555 |
+
|
| 556 |
+
Notes
|
| 557 |
+
-----
|
| 558 |
+
The problem of determining a minimum weight full matching is also known as
|
| 559 |
+
the rectangular linear assignment problem. This implementation defers the
|
| 560 |
+
calculation of the assignment to SciPy.
|
| 561 |
+
|
| 562 |
+
References
|
| 563 |
+
----------
|
| 564 |
+
.. [1] Richard Manning Karp:
|
| 565 |
+
An algorithm to Solve the m x n Assignment Problem in Expected Time
|
| 566 |
+
O(mn log n).
|
| 567 |
+
Networks, 10(2):143–152, 1980.
|
| 568 |
+
|
| 569 |
+
"""
|
| 570 |
+
import numpy as np
|
| 571 |
+
import scipy as sp
|
| 572 |
+
|
| 573 |
+
left, right = nx.bipartite.sets(G, top_nodes)
|
| 574 |
+
U = list(left)
|
| 575 |
+
V = list(right)
|
| 576 |
+
# We explicitly create the biadjacency matrix having infinities
|
| 577 |
+
# where edges are missing (as opposed to zeros, which is what one would
|
| 578 |
+
# get by using toarray on the sparse matrix).
|
| 579 |
+
weights_sparse = biadjacency_matrix(
|
| 580 |
+
G, row_order=U, column_order=V, weight=weight, format="coo"
|
| 581 |
+
)
|
| 582 |
+
weights = np.full(weights_sparse.shape, np.inf)
|
| 583 |
+
weights[weights_sparse.row, weights_sparse.col] = weights_sparse.data
|
| 584 |
+
left_matches = sp.optimize.linear_sum_assignment(weights)
|
| 585 |
+
d = {U[u]: V[v] for u, v in zip(*left_matches)}
|
| 586 |
+
# d will contain the matching from edges in left to right; we need to
|
| 587 |
+
# add the ones from right to left as well.
|
| 588 |
+
d.update({v: u for u, v in d.items()})
|
| 589 |
+
return d
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/redundancy.py
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Node redundancy for bipartite graphs."""
|
| 2 |
+
from itertools import combinations
|
| 3 |
+
|
| 4 |
+
import networkx as nx
|
| 5 |
+
from networkx import NetworkXError
|
| 6 |
+
|
| 7 |
+
__all__ = ["node_redundancy"]
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@nx._dispatch
|
| 11 |
+
def node_redundancy(G, nodes=None):
|
| 12 |
+
r"""Computes the node redundancy coefficients for the nodes in the bipartite
|
| 13 |
+
graph `G`.
|
| 14 |
+
|
| 15 |
+
The redundancy coefficient of a node `v` is the fraction of pairs of
|
| 16 |
+
neighbors of `v` that are both linked to other nodes. In a one-mode
|
| 17 |
+
projection these nodes would be linked together even if `v` were
|
| 18 |
+
not there.
|
| 19 |
+
|
| 20 |
+
More formally, for any vertex `v`, the *redundancy coefficient of `v`* is
|
| 21 |
+
defined by
|
| 22 |
+
|
| 23 |
+
.. math::
|
| 24 |
+
|
| 25 |
+
rc(v) = \frac{|\{\{u, w\} \subseteq N(v),
|
| 26 |
+
\: \exists v' \neq v,\: (v',u) \in E\:
|
| 27 |
+
\mathrm{and}\: (v',w) \in E\}|}{ \frac{|N(v)|(|N(v)|-1)}{2}},
|
| 28 |
+
|
| 29 |
+
where `N(v)` is the set of neighbors of `v` in `G`.
|
| 30 |
+
|
| 31 |
+
Parameters
|
| 32 |
+
----------
|
| 33 |
+
G : graph
|
| 34 |
+
A bipartite graph
|
| 35 |
+
|
| 36 |
+
nodes : list or iterable (optional)
|
| 37 |
+
Compute redundancy for these nodes. The default is all nodes in G.
|
| 38 |
+
|
| 39 |
+
Returns
|
| 40 |
+
-------
|
| 41 |
+
redundancy : dictionary
|
| 42 |
+
A dictionary keyed by node with the node redundancy value.
|
| 43 |
+
|
| 44 |
+
Examples
|
| 45 |
+
--------
|
| 46 |
+
Compute the redundancy coefficient of each node in a graph::
|
| 47 |
+
|
| 48 |
+
>>> from networkx.algorithms import bipartite
|
| 49 |
+
>>> G = nx.cycle_graph(4)
|
| 50 |
+
>>> rc = bipartite.node_redundancy(G)
|
| 51 |
+
>>> rc[0]
|
| 52 |
+
1.0
|
| 53 |
+
|
| 54 |
+
Compute the average redundancy for the graph::
|
| 55 |
+
|
| 56 |
+
>>> from networkx.algorithms import bipartite
|
| 57 |
+
>>> G = nx.cycle_graph(4)
|
| 58 |
+
>>> rc = bipartite.node_redundancy(G)
|
| 59 |
+
>>> sum(rc.values()) / len(G)
|
| 60 |
+
1.0
|
| 61 |
+
|
| 62 |
+
Compute the average redundancy for a set of nodes::
|
| 63 |
+
|
| 64 |
+
>>> from networkx.algorithms import bipartite
|
| 65 |
+
>>> G = nx.cycle_graph(4)
|
| 66 |
+
>>> rc = bipartite.node_redundancy(G)
|
| 67 |
+
>>> nodes = [0, 2]
|
| 68 |
+
>>> sum(rc[n] for n in nodes) / len(nodes)
|
| 69 |
+
1.0
|
| 70 |
+
|
| 71 |
+
Raises
|
| 72 |
+
------
|
| 73 |
+
NetworkXError
|
| 74 |
+
If any of the nodes in the graph (or in `nodes`, if specified) has
|
| 75 |
+
(out-)degree less than two (which would result in division by zero,
|
| 76 |
+
according to the definition of the redundancy coefficient).
|
| 77 |
+
|
| 78 |
+
References
|
| 79 |
+
----------
|
| 80 |
+
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
|
| 81 |
+
Basic notions for the analysis of large two-mode networks.
|
| 82 |
+
Social Networks 30(1), 31--48.
|
| 83 |
+
|
| 84 |
+
"""
|
| 85 |
+
if nodes is None:
|
| 86 |
+
nodes = G
|
| 87 |
+
if any(len(G[v]) < 2 for v in nodes):
|
| 88 |
+
raise NetworkXError(
|
| 89 |
+
"Cannot compute redundancy coefficient for a node"
|
| 90 |
+
" that has fewer than two neighbors."
|
| 91 |
+
)
|
| 92 |
+
# TODO This can be trivially parallelized.
|
| 93 |
+
return {v: _node_redundancy(G, v) for v in nodes}
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def _node_redundancy(G, v):
|
| 97 |
+
"""Returns the redundancy of the node `v` in the bipartite graph `G`.
|
| 98 |
+
|
| 99 |
+
If `G` is a graph with `n` nodes, the redundancy of a node is the ratio
|
| 100 |
+
of the "overlap" of `v` to the maximum possible overlap of `v`
|
| 101 |
+
according to its degree. The overlap of `v` is the number of pairs of
|
| 102 |
+
neighbors that have mutual neighbors themselves, other than `v`.
|
| 103 |
+
|
| 104 |
+
`v` must have at least two neighbors in `G`.
|
| 105 |
+
|
| 106 |
+
"""
|
| 107 |
+
n = len(G[v])
|
| 108 |
+
overlap = sum(
|
| 109 |
+
1 for (u, w) in combinations(G[v], 2) if (set(G[u]) & set(G[w])) - {v}
|
| 110 |
+
)
|
| 111 |
+
return (2 * overlap) / (n * (n - 1))
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/bipartite/spectral.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Spectral bipartivity measure.
|
| 3 |
+
"""
|
| 4 |
+
import networkx as nx
|
| 5 |
+
|
| 6 |
+
__all__ = ["spectral_bipartivity"]
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@nx._dispatch(edge_attrs="weight")
|
| 10 |
+
def spectral_bipartivity(G, nodes=None, weight="weight"):
|
| 11 |
+
"""Returns the spectral bipartivity.
|
| 12 |
+
|
| 13 |
+
Parameters
|
| 14 |
+
----------
|
| 15 |
+
G : NetworkX graph
|
| 16 |
+
|
| 17 |
+
nodes : list or container optional(default is all nodes)
|
| 18 |
+
Nodes to return value of spectral bipartivity contribution.
|
| 19 |
+
|
| 20 |
+
weight : string or None optional (default = 'weight')
|
| 21 |
+
Edge data key to use for edge weights. If None, weights set to 1.
|
| 22 |
+
|
| 23 |
+
Returns
|
| 24 |
+
-------
|
| 25 |
+
sb : float or dict
|
| 26 |
+
A single number if the keyword nodes is not specified, or
|
| 27 |
+
a dictionary keyed by node with the spectral bipartivity contribution
|
| 28 |
+
of that node as the value.
|
| 29 |
+
|
| 30 |
+
Examples
|
| 31 |
+
--------
|
| 32 |
+
>>> from networkx.algorithms import bipartite
|
| 33 |
+
>>> G = nx.path_graph(4)
|
| 34 |
+
>>> bipartite.spectral_bipartivity(G)
|
| 35 |
+
1.0
|
| 36 |
+
|
| 37 |
+
Notes
|
| 38 |
+
-----
|
| 39 |
+
This implementation uses Numpy (dense) matrices which are not efficient
|
| 40 |
+
for storing large sparse graphs.
|
| 41 |
+
|
| 42 |
+
See Also
|
| 43 |
+
--------
|
| 44 |
+
color
|
| 45 |
+
|
| 46 |
+
References
|
| 47 |
+
----------
|
| 48 |
+
.. [1] E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of
|
| 49 |
+
bipartivity in complex networks", PhysRev E 72, 046105 (2005)
|
| 50 |
+
"""
|
| 51 |
+
import scipy as sp
|
| 52 |
+
|
| 53 |
+
nodelist = list(G) # ordering of nodes in matrix
|
| 54 |
+
A = nx.to_numpy_array(G, nodelist, weight=weight)
|
| 55 |
+
expA = sp.linalg.expm(A)
|
| 56 |
+
expmA = sp.linalg.expm(-A)
|
| 57 |
+
coshA = 0.5 * (expA + expmA)
|
| 58 |
+
if nodes is None:
|
| 59 |
+
# return single number for entire graph
|
| 60 |
+
return coshA.diagonal().sum() / expA.diagonal().sum()
|
| 61 |
+
else:
|
| 62 |
+
# contribution for individual nodes
|
| 63 |
+
index = dict(zip(nodelist, range(len(nodelist))))
|
| 64 |
+
sb = {}
|
| 65 |
+
for n in nodes:
|
| 66 |
+
i = index[n]
|
| 67 |
+
sb[n] = coshA[i, i] / expA[i, i]
|
| 68 |
+
return sb
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/betweenness.cpython-311.pyc
ADDED
|
Binary file (17.4 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/betweenness_subset.cpython-311.pyc
ADDED
|
Binary file (10.7 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness.cpython-311.pyc
ADDED
|
Binary file (16 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-311.pyc
ADDED
|
Binary file (5.97 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/eigenvector.cpython-311.pyc
ADDED
|
Binary file (15 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/flow_matrix.cpython-311.pyc
ADDED
|
Binary file (9.16 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/group.cpython-311.pyc
ADDED
|
Binary file (32.4 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/harmonic.cpython-311.pyc
ADDED
|
Binary file (3.51 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/load.cpython-311.pyc
ADDED
|
Binary file (8.21 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/reaching.cpython-311.pyc
ADDED
|
Binary file (8.95 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/subgraph_alg.cpython-311.pyc
ADDED
|
Binary file (11.5 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-311.pyc
ADDED
|
Binary file (7.33 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-311.pyc
ADDED
|
Binary file (3.5 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-311.pyc
ADDED
|
Binary file (18.8 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-311.pyc
ADDED
|
Binary file (12.2 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-311.pyc
ADDED
|
Binary file (15 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/community_utils.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helper functions for community-finding algorithms."""
|
| 2 |
+
import networkx as nx
|
| 3 |
+
|
| 4 |
+
__all__ = ["is_partition"]
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
@nx._dispatch
|
| 8 |
+
def is_partition(G, communities):
|
| 9 |
+
"""Returns *True* if `communities` is a partition of the nodes of `G`.
|
| 10 |
+
|
| 11 |
+
A partition of a universe set is a family of pairwise disjoint sets
|
| 12 |
+
whose union is the entire universe set.
|
| 13 |
+
|
| 14 |
+
Parameters
|
| 15 |
+
----------
|
| 16 |
+
G : NetworkX graph.
|
| 17 |
+
|
| 18 |
+
communities : list or iterable of sets of nodes
|
| 19 |
+
If not a list, the iterable is converted internally to a list.
|
| 20 |
+
If it is an iterator it is exhausted.
|
| 21 |
+
|
| 22 |
+
"""
|
| 23 |
+
# Alternate implementation:
|
| 24 |
+
# return all(sum(1 if v in c else 0 for c in communities) == 1 for v in G)
|
| 25 |
+
if not isinstance(communities, list):
|
| 26 |
+
communities = list(communities)
|
| 27 |
+
nodes = {n for c in communities for n in c if n in G}
|
| 28 |
+
|
| 29 |
+
return len(G) == len(nodes) == sum(len(c) for c in communities)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/kclique.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from collections import defaultdict
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
__all__ = ["k_clique_communities"]
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@nx._dispatch
|
| 9 |
+
def k_clique_communities(G, k, cliques=None):
|
| 10 |
+
"""Find k-clique communities in graph using the percolation method.
|
| 11 |
+
|
| 12 |
+
A k-clique community is the union of all cliques of size k that
|
| 13 |
+
can be reached through adjacent (sharing k-1 nodes) k-cliques.
|
| 14 |
+
|
| 15 |
+
Parameters
|
| 16 |
+
----------
|
| 17 |
+
G : NetworkX graph
|
| 18 |
+
|
| 19 |
+
k : int
|
| 20 |
+
Size of smallest clique
|
| 21 |
+
|
| 22 |
+
cliques: list or generator
|
| 23 |
+
Precomputed cliques (use networkx.find_cliques(G))
|
| 24 |
+
|
| 25 |
+
Returns
|
| 26 |
+
-------
|
| 27 |
+
Yields sets of nodes, one for each k-clique community.
|
| 28 |
+
|
| 29 |
+
Examples
|
| 30 |
+
--------
|
| 31 |
+
>>> G = nx.complete_graph(5)
|
| 32 |
+
>>> K5 = nx.convert_node_labels_to_integers(G, first_label=2)
|
| 33 |
+
>>> G.add_edges_from(K5.edges())
|
| 34 |
+
>>> c = list(nx.community.k_clique_communities(G, 4))
|
| 35 |
+
>>> sorted(list(c[0]))
|
| 36 |
+
[0, 1, 2, 3, 4, 5, 6]
|
| 37 |
+
>>> list(nx.community.k_clique_communities(G, 6))
|
| 38 |
+
[]
|
| 39 |
+
|
| 40 |
+
References
|
| 41 |
+
----------
|
| 42 |
+
.. [1] Gergely Palla, Imre Derényi, Illés Farkas1, and Tamás Vicsek,
|
| 43 |
+
Uncovering the overlapping community structure of complex networks
|
| 44 |
+
in nature and society Nature 435, 814-818, 2005,
|
| 45 |
+
doi:10.1038/nature03607
|
| 46 |
+
"""
|
| 47 |
+
if k < 2:
|
| 48 |
+
raise nx.NetworkXError(f"k={k}, k must be greater than 1.")
|
| 49 |
+
if cliques is None:
|
| 50 |
+
cliques = nx.find_cliques(G)
|
| 51 |
+
cliques = [frozenset(c) for c in cliques if len(c) >= k]
|
| 52 |
+
|
| 53 |
+
# First index which nodes are in which cliques
|
| 54 |
+
membership_dict = defaultdict(list)
|
| 55 |
+
for clique in cliques:
|
| 56 |
+
for node in clique:
|
| 57 |
+
membership_dict[node].append(clique)
|
| 58 |
+
|
| 59 |
+
# For each clique, see which adjacent cliques percolate
|
| 60 |
+
perc_graph = nx.Graph()
|
| 61 |
+
perc_graph.add_nodes_from(cliques)
|
| 62 |
+
for clique in cliques:
|
| 63 |
+
for adj_clique in _get_adjacent_cliques(clique, membership_dict):
|
| 64 |
+
if len(clique.intersection(adj_clique)) >= (k - 1):
|
| 65 |
+
perc_graph.add_edge(clique, adj_clique)
|
| 66 |
+
|
| 67 |
+
# Connected components of clique graph with perc edges
|
| 68 |
+
# are the percolated cliques
|
| 69 |
+
for component in nx.connected_components(perc_graph):
|
| 70 |
+
yield (frozenset.union(*component))
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def _get_adjacent_cliques(clique, membership_dict):
|
| 74 |
+
adjacent_cliques = set()
|
| 75 |
+
for n in clique:
|
| 76 |
+
for adj_clique in membership_dict[n]:
|
| 77 |
+
if clique != adj_clique:
|
| 78 |
+
adjacent_cliques.add(adj_clique)
|
| 79 |
+
return adjacent_cliques
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/kernighan_lin.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for computing the Kernighan–Lin bipartition algorithm."""
|
| 2 |
+
|
| 3 |
+
from itertools import count
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.algorithms.community.community_utils import is_partition
|
| 7 |
+
from networkx.utils import BinaryHeap, not_implemented_for, py_random_state
|
| 8 |
+
|
| 9 |
+
__all__ = ["kernighan_lin_bisection"]
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def _kernighan_lin_sweep(edges, side):
|
| 13 |
+
"""
|
| 14 |
+
This is a modified form of Kernighan-Lin, which moves single nodes at a
|
| 15 |
+
time, alternating between sides to keep the bisection balanced. We keep
|
| 16 |
+
two min-heaps of swap costs to make optimal-next-move selection fast.
|
| 17 |
+
"""
|
| 18 |
+
costs0, costs1 = costs = BinaryHeap(), BinaryHeap()
|
| 19 |
+
for u, side_u, edges_u in zip(count(), side, edges):
|
| 20 |
+
cost_u = sum(w if side[v] else -w for v, w in edges_u)
|
| 21 |
+
costs[side_u].insert(u, cost_u if side_u else -cost_u)
|
| 22 |
+
|
| 23 |
+
def _update_costs(costs_x, x):
|
| 24 |
+
for y, w in edges[x]:
|
| 25 |
+
costs_y = costs[side[y]]
|
| 26 |
+
cost_y = costs_y.get(y)
|
| 27 |
+
if cost_y is not None:
|
| 28 |
+
cost_y += 2 * (-w if costs_x is costs_y else w)
|
| 29 |
+
costs_y.insert(y, cost_y, True)
|
| 30 |
+
|
| 31 |
+
i = 0
|
| 32 |
+
totcost = 0
|
| 33 |
+
while costs0 and costs1:
|
| 34 |
+
u, cost_u = costs0.pop()
|
| 35 |
+
_update_costs(costs0, u)
|
| 36 |
+
v, cost_v = costs1.pop()
|
| 37 |
+
_update_costs(costs1, v)
|
| 38 |
+
totcost += cost_u + cost_v
|
| 39 |
+
i += 1
|
| 40 |
+
yield totcost, i, (u, v)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
@not_implemented_for("directed")
|
| 44 |
+
@py_random_state(4)
|
| 45 |
+
@nx._dispatch(edge_attrs="weight")
|
| 46 |
+
def kernighan_lin_bisection(G, partition=None, max_iter=10, weight="weight", seed=None):
|
| 47 |
+
"""Partition a graph into two blocks using the Kernighan–Lin
|
| 48 |
+
algorithm.
|
| 49 |
+
|
| 50 |
+
This algorithm partitions a network into two sets by iteratively
|
| 51 |
+
swapping pairs of nodes to reduce the edge cut between the two sets. The
|
| 52 |
+
pairs are chosen according to a modified form of Kernighan-Lin [1]_, which
|
| 53 |
+
moves node individually, alternating between sides to keep the bisection
|
| 54 |
+
balanced.
|
| 55 |
+
|
| 56 |
+
Parameters
|
| 57 |
+
----------
|
| 58 |
+
G : NetworkX graph
|
| 59 |
+
Graph must be undirected.
|
| 60 |
+
|
| 61 |
+
partition : tuple
|
| 62 |
+
Pair of iterables containing an initial partition. If not
|
| 63 |
+
specified, a random balanced partition is used.
|
| 64 |
+
|
| 65 |
+
max_iter : int
|
| 66 |
+
Maximum number of times to attempt swaps to find an
|
| 67 |
+
improvement before giving up.
|
| 68 |
+
|
| 69 |
+
weight : key
|
| 70 |
+
Edge data key to use as weight. If None, the weights are all
|
| 71 |
+
set to one.
|
| 72 |
+
|
| 73 |
+
seed : integer, random_state, or None (default)
|
| 74 |
+
Indicator of random number generation state.
|
| 75 |
+
See :ref:`Randomness<randomness>`.
|
| 76 |
+
Only used if partition is None
|
| 77 |
+
|
| 78 |
+
Returns
|
| 79 |
+
-------
|
| 80 |
+
partition : tuple
|
| 81 |
+
A pair of sets of nodes representing the bipartition.
|
| 82 |
+
|
| 83 |
+
Raises
|
| 84 |
+
------
|
| 85 |
+
NetworkXError
|
| 86 |
+
If partition is not a valid partition of the nodes of the graph.
|
| 87 |
+
|
| 88 |
+
References
|
| 89 |
+
----------
|
| 90 |
+
.. [1] Kernighan, B. W.; Lin, Shen (1970).
|
| 91 |
+
"An efficient heuristic procedure for partitioning graphs."
|
| 92 |
+
*Bell Systems Technical Journal* 49: 291--307.
|
| 93 |
+
Oxford University Press 2011.
|
| 94 |
+
|
| 95 |
+
"""
|
| 96 |
+
n = len(G)
|
| 97 |
+
labels = list(G)
|
| 98 |
+
seed.shuffle(labels)
|
| 99 |
+
index = {v: i for i, v in enumerate(labels)}
|
| 100 |
+
|
| 101 |
+
if partition is None:
|
| 102 |
+
side = [0] * (n // 2) + [1] * ((n + 1) // 2)
|
| 103 |
+
else:
|
| 104 |
+
try:
|
| 105 |
+
A, B = partition
|
| 106 |
+
except (TypeError, ValueError) as err:
|
| 107 |
+
raise nx.NetworkXError("partition must be two sets") from err
|
| 108 |
+
if not is_partition(G, (A, B)):
|
| 109 |
+
raise nx.NetworkXError("partition invalid")
|
| 110 |
+
side = [0] * n
|
| 111 |
+
for a in A:
|
| 112 |
+
side[index[a]] = 1
|
| 113 |
+
|
| 114 |
+
if G.is_multigraph():
|
| 115 |
+
edges = [
|
| 116 |
+
[
|
| 117 |
+
(index[u], sum(e.get(weight, 1) for e in d.values()))
|
| 118 |
+
for u, d in G[v].items()
|
| 119 |
+
]
|
| 120 |
+
for v in labels
|
| 121 |
+
]
|
| 122 |
+
else:
|
| 123 |
+
edges = [
|
| 124 |
+
[(index[u], e.get(weight, 1)) for u, e in G[v].items()] for v in labels
|
| 125 |
+
]
|
| 126 |
+
|
| 127 |
+
for i in range(max_iter):
|
| 128 |
+
costs = list(_kernighan_lin_sweep(edges, side))
|
| 129 |
+
min_cost, min_i, _ = min(costs)
|
| 130 |
+
if min_cost >= 0:
|
| 131 |
+
break
|
| 132 |
+
|
| 133 |
+
for _, _, (u, v) in costs[:min_i]:
|
| 134 |
+
side[u] = 1
|
| 135 |
+
side[v] = 0
|
| 136 |
+
|
| 137 |
+
A = {u for u, s in zip(labels, side) if s == 0}
|
| 138 |
+
B = {u for u, s in zip(labels, side) if s == 1}
|
| 139 |
+
return A, B
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/label_propagation.py
ADDED
|
@@ -0,0 +1,337 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Label propagation community detection algorithms.
|
| 3 |
+
"""
|
| 4 |
+
from collections import Counter, defaultdict, deque
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
from networkx.utils import groups, not_implemented_for, py_random_state
|
| 8 |
+
|
| 9 |
+
__all__ = [
|
| 10 |
+
"label_propagation_communities",
|
| 11 |
+
"asyn_lpa_communities",
|
| 12 |
+
"fast_label_propagation_communities",
|
| 13 |
+
]
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@py_random_state("seed")
|
| 17 |
+
@nx._dispatch(edge_attrs="weight")
|
| 18 |
+
def fast_label_propagation_communities(G, *, weight=None, seed=None):
|
| 19 |
+
"""Returns communities in `G` as detected by fast label propagation.
|
| 20 |
+
|
| 21 |
+
The fast label propagation algorithm is described in [1]_. The algorithm is
|
| 22 |
+
probabilistic and the found communities may vary in different executions.
|
| 23 |
+
|
| 24 |
+
The algorithm operates as follows. First, the community label of each node is
|
| 25 |
+
set to a unique label. The algorithm then repeatedly updates the labels of
|
| 26 |
+
the nodes to the most frequent label in their neighborhood. In case of ties,
|
| 27 |
+
a random label is chosen from the most frequent labels.
|
| 28 |
+
|
| 29 |
+
The algorithm maintains a queue of nodes that still need to be processed.
|
| 30 |
+
Initially, all nodes are added to the queue in a random order. Then the nodes
|
| 31 |
+
are removed from the queue one by one and processed. If a node updates its label,
|
| 32 |
+
all its neighbors that have a different label are added to the queue (if not
|
| 33 |
+
already in the queue). The algorithm stops when the queue is empty.
|
| 34 |
+
|
| 35 |
+
Parameters
|
| 36 |
+
----------
|
| 37 |
+
G : Graph, DiGraph, MultiGraph, or MultiDiGraph
|
| 38 |
+
Any NetworkX graph.
|
| 39 |
+
|
| 40 |
+
weight : string, or None (default)
|
| 41 |
+
The edge attribute representing a non-negative weight of an edge. If None,
|
| 42 |
+
each edge is assumed to have weight one. The weight of an edge is used in
|
| 43 |
+
determining the frequency with which a label appears among the neighbors of
|
| 44 |
+
a node (edge with weight `w` is equivalent to `w` unweighted edges).
|
| 45 |
+
|
| 46 |
+
seed : integer, random_state, or None (default)
|
| 47 |
+
Indicator of random number generation state. See :ref:`Randomness<randomness>`.
|
| 48 |
+
|
| 49 |
+
Returns
|
| 50 |
+
-------
|
| 51 |
+
communities : iterable
|
| 52 |
+
Iterable of communities given as sets of nodes.
|
| 53 |
+
|
| 54 |
+
Notes
|
| 55 |
+
-----
|
| 56 |
+
Edge directions are ignored for directed graphs.
|
| 57 |
+
Edge weights must be non-negative numbers.
|
| 58 |
+
|
| 59 |
+
References
|
| 60 |
+
----------
|
| 61 |
+
.. [1] Vincent A. Traag & Lovro Šubelj. "Large network community detection by
|
| 62 |
+
fast label propagation." Scientific Reports 13 (2023): 2701.
|
| 63 |
+
https://doi.org/10.1038/s41598-023-29610-z
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
# Queue of nodes to be processed.
|
| 67 |
+
nodes_queue = deque(G)
|
| 68 |
+
seed.shuffle(nodes_queue)
|
| 69 |
+
|
| 70 |
+
# Set of nodes in the queue.
|
| 71 |
+
nodes_set = set(G)
|
| 72 |
+
|
| 73 |
+
# Assign unique label to each node.
|
| 74 |
+
comms = {node: i for i, node in enumerate(G)}
|
| 75 |
+
|
| 76 |
+
while nodes_queue:
|
| 77 |
+
# Remove next node from the queue to process.
|
| 78 |
+
node = nodes_queue.popleft()
|
| 79 |
+
nodes_set.remove(node)
|
| 80 |
+
|
| 81 |
+
# Isolated nodes retain their initial label.
|
| 82 |
+
if G.degree(node) > 0:
|
| 83 |
+
# Compute frequency of labels in node's neighborhood.
|
| 84 |
+
label_freqs = _fast_label_count(G, comms, node, weight)
|
| 85 |
+
max_freq = max(label_freqs.values())
|
| 86 |
+
|
| 87 |
+
# Always sample new label from most frequent labels.
|
| 88 |
+
comm = seed.choice(
|
| 89 |
+
[comm for comm in label_freqs if label_freqs[comm] == max_freq]
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
if comms[node] != comm:
|
| 93 |
+
comms[node] = comm
|
| 94 |
+
|
| 95 |
+
# Add neighbors that have different label to the queue.
|
| 96 |
+
for nbr in nx.all_neighbors(G, node):
|
| 97 |
+
if comms[nbr] != comm and nbr not in nodes_set:
|
| 98 |
+
nodes_queue.append(nbr)
|
| 99 |
+
nodes_set.add(nbr)
|
| 100 |
+
|
| 101 |
+
yield from groups(comms).values()
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def _fast_label_count(G, comms, node, weight=None):
|
| 105 |
+
"""Computes the frequency of labels in the neighborhood of a node.
|
| 106 |
+
|
| 107 |
+
Returns a dictionary keyed by label to the frequency of that label.
|
| 108 |
+
"""
|
| 109 |
+
|
| 110 |
+
if weight is None:
|
| 111 |
+
# Unweighted (un)directed simple graph.
|
| 112 |
+
if not G.is_multigraph():
|
| 113 |
+
label_freqs = Counter(map(comms.get, nx.all_neighbors(G, node)))
|
| 114 |
+
|
| 115 |
+
# Unweighted (un)directed multigraph.
|
| 116 |
+
else:
|
| 117 |
+
label_freqs = defaultdict(int)
|
| 118 |
+
for nbr in G[node]:
|
| 119 |
+
label_freqs[comms[nbr]] += len(G[node][nbr])
|
| 120 |
+
|
| 121 |
+
if G.is_directed():
|
| 122 |
+
for nbr in G.pred[node]:
|
| 123 |
+
label_freqs[comms[nbr]] += len(G.pred[node][nbr])
|
| 124 |
+
|
| 125 |
+
else:
|
| 126 |
+
# Weighted undirected simple/multigraph.
|
| 127 |
+
label_freqs = defaultdict(float)
|
| 128 |
+
for _, nbr, w in G.edges(node, data=weight, default=1):
|
| 129 |
+
label_freqs[comms[nbr]] += w
|
| 130 |
+
|
| 131 |
+
# Weighted directed simple/multigraph.
|
| 132 |
+
if G.is_directed():
|
| 133 |
+
for nbr, _, w in G.in_edges(node, data=weight, default=1):
|
| 134 |
+
label_freqs[comms[nbr]] += w
|
| 135 |
+
|
| 136 |
+
return label_freqs
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
@py_random_state(2)
|
| 140 |
+
@nx._dispatch(edge_attrs="weight")
|
| 141 |
+
def asyn_lpa_communities(G, weight=None, seed=None):
|
| 142 |
+
"""Returns communities in `G` as detected by asynchronous label
|
| 143 |
+
propagation.
|
| 144 |
+
|
| 145 |
+
The asynchronous label propagation algorithm is described in
|
| 146 |
+
[1]_. The algorithm is probabilistic and the found communities may
|
| 147 |
+
vary on different executions.
|
| 148 |
+
|
| 149 |
+
The algorithm proceeds as follows. After initializing each node with
|
| 150 |
+
a unique label, the algorithm repeatedly sets the label of a node to
|
| 151 |
+
be the label that appears most frequently among that nodes
|
| 152 |
+
neighbors. The algorithm halts when each node has the label that
|
| 153 |
+
appears most frequently among its neighbors. The algorithm is
|
| 154 |
+
asynchronous because each node is updated without waiting for
|
| 155 |
+
updates on the remaining nodes.
|
| 156 |
+
|
| 157 |
+
This generalized version of the algorithm in [1]_ accepts edge
|
| 158 |
+
weights.
|
| 159 |
+
|
| 160 |
+
Parameters
|
| 161 |
+
----------
|
| 162 |
+
G : Graph
|
| 163 |
+
|
| 164 |
+
weight : string
|
| 165 |
+
The edge attribute representing the weight of an edge.
|
| 166 |
+
If None, each edge is assumed to have weight one. In this
|
| 167 |
+
algorithm, the weight of an edge is used in determining the
|
| 168 |
+
frequency with which a label appears among the neighbors of a
|
| 169 |
+
node: a higher weight means the label appears more often.
|
| 170 |
+
|
| 171 |
+
seed : integer, random_state, or None (default)
|
| 172 |
+
Indicator of random number generation state.
|
| 173 |
+
See :ref:`Randomness<randomness>`.
|
| 174 |
+
|
| 175 |
+
Returns
|
| 176 |
+
-------
|
| 177 |
+
communities : iterable
|
| 178 |
+
Iterable of communities given as sets of nodes.
|
| 179 |
+
|
| 180 |
+
Notes
|
| 181 |
+
-----
|
| 182 |
+
Edge weight attributes must be numerical.
|
| 183 |
+
|
| 184 |
+
References
|
| 185 |
+
----------
|
| 186 |
+
.. [1] Raghavan, Usha Nandini, Réka Albert, and Soundar Kumara. "Near
|
| 187 |
+
linear time algorithm to detect community structures in large-scale
|
| 188 |
+
networks." Physical Review E 76.3 (2007): 036106.
|
| 189 |
+
"""
|
| 190 |
+
|
| 191 |
+
labels = {n: i for i, n in enumerate(G)}
|
| 192 |
+
cont = True
|
| 193 |
+
|
| 194 |
+
while cont:
|
| 195 |
+
cont = False
|
| 196 |
+
nodes = list(G)
|
| 197 |
+
seed.shuffle(nodes)
|
| 198 |
+
|
| 199 |
+
for node in nodes:
|
| 200 |
+
if not G[node]:
|
| 201 |
+
continue
|
| 202 |
+
|
| 203 |
+
# Get label frequencies among adjacent nodes.
|
| 204 |
+
# Depending on the order they are processed in,
|
| 205 |
+
# some nodes will be in iteration t and others in t-1,
|
| 206 |
+
# making the algorithm asynchronous.
|
| 207 |
+
if weight is None:
|
| 208 |
+
# initialising a Counter from an iterator of labels is
|
| 209 |
+
# faster for getting unweighted label frequencies
|
| 210 |
+
label_freq = Counter(map(labels.get, G[node]))
|
| 211 |
+
else:
|
| 212 |
+
# updating a defaultdict is substantially faster
|
| 213 |
+
# for getting weighted label frequencies
|
| 214 |
+
label_freq = defaultdict(float)
|
| 215 |
+
for _, v, wt in G.edges(node, data=weight, default=1):
|
| 216 |
+
label_freq[labels[v]] += wt
|
| 217 |
+
|
| 218 |
+
# Get the labels that appear with maximum frequency.
|
| 219 |
+
max_freq = max(label_freq.values())
|
| 220 |
+
best_labels = [
|
| 221 |
+
label for label, freq in label_freq.items() if freq == max_freq
|
| 222 |
+
]
|
| 223 |
+
|
| 224 |
+
# If the node does not have one of the maximum frequency labels,
|
| 225 |
+
# randomly choose one of them and update the node's label.
|
| 226 |
+
# Continue the iteration as long as at least one node
|
| 227 |
+
# doesn't have a maximum frequency label.
|
| 228 |
+
if labels[node] not in best_labels:
|
| 229 |
+
labels[node] = seed.choice(best_labels)
|
| 230 |
+
cont = True
|
| 231 |
+
|
| 232 |
+
yield from groups(labels).values()
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
@not_implemented_for("directed")
|
| 236 |
+
@nx._dispatch
|
| 237 |
+
def label_propagation_communities(G):
|
| 238 |
+
"""Generates community sets determined by label propagation
|
| 239 |
+
|
| 240 |
+
Finds communities in `G` using a semi-synchronous label propagation
|
| 241 |
+
method [1]_. This method combines the advantages of both the synchronous
|
| 242 |
+
and asynchronous models. Not implemented for directed graphs.
|
| 243 |
+
|
| 244 |
+
Parameters
|
| 245 |
+
----------
|
| 246 |
+
G : graph
|
| 247 |
+
An undirected NetworkX graph.
|
| 248 |
+
|
| 249 |
+
Returns
|
| 250 |
+
-------
|
| 251 |
+
communities : iterable
|
| 252 |
+
A dict_values object that contains a set of nodes for each community.
|
| 253 |
+
|
| 254 |
+
Raises
|
| 255 |
+
------
|
| 256 |
+
NetworkXNotImplemented
|
| 257 |
+
If the graph is directed
|
| 258 |
+
|
| 259 |
+
References
|
| 260 |
+
----------
|
| 261 |
+
.. [1] Cordasco, G., & Gargano, L. (2010, December). Community detection
|
| 262 |
+
via semi-synchronous label propagation algorithms. In Business
|
| 263 |
+
Applications of Social Network Analysis (BASNA), 2010 IEEE International
|
| 264 |
+
Workshop on (pp. 1-8). IEEE.
|
| 265 |
+
"""
|
| 266 |
+
coloring = _color_network(G)
|
| 267 |
+
# Create a unique label for each node in the graph
|
| 268 |
+
labeling = {v: k for k, v in enumerate(G)}
|
| 269 |
+
while not _labeling_complete(labeling, G):
|
| 270 |
+
# Update the labels of every node with the same color.
|
| 271 |
+
for color, nodes in coloring.items():
|
| 272 |
+
for n in nodes:
|
| 273 |
+
_update_label(n, labeling, G)
|
| 274 |
+
|
| 275 |
+
clusters = defaultdict(set)
|
| 276 |
+
for node, label in labeling.items():
|
| 277 |
+
clusters[label].add(node)
|
| 278 |
+
return clusters.values()
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
def _color_network(G):
|
| 282 |
+
"""Colors the network so that neighboring nodes all have distinct colors.
|
| 283 |
+
|
| 284 |
+
Returns a dict keyed by color to a set of nodes with that color.
|
| 285 |
+
"""
|
| 286 |
+
coloring = {} # color => set(node)
|
| 287 |
+
colors = nx.coloring.greedy_color(G)
|
| 288 |
+
for node, color in colors.items():
|
| 289 |
+
if color in coloring:
|
| 290 |
+
coloring[color].add(node)
|
| 291 |
+
else:
|
| 292 |
+
coloring[color] = {node}
|
| 293 |
+
return coloring
|
| 294 |
+
|
| 295 |
+
|
| 296 |
+
def _labeling_complete(labeling, G):
|
| 297 |
+
"""Determines whether or not LPA is done.
|
| 298 |
+
|
| 299 |
+
Label propagation is complete when all nodes have a label that is
|
| 300 |
+
in the set of highest frequency labels amongst its neighbors.
|
| 301 |
+
|
| 302 |
+
Nodes with no neighbors are considered complete.
|
| 303 |
+
"""
|
| 304 |
+
return all(
|
| 305 |
+
labeling[v] in _most_frequent_labels(v, labeling, G) for v in G if len(G[v]) > 0
|
| 306 |
+
)
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
def _most_frequent_labels(node, labeling, G):
|
| 310 |
+
"""Returns a set of all labels with maximum frequency in `labeling`.
|
| 311 |
+
|
| 312 |
+
Input `labeling` should be a dict keyed by node to labels.
|
| 313 |
+
"""
|
| 314 |
+
if not G[node]:
|
| 315 |
+
# Nodes with no neighbors are themselves a community and are labeled
|
| 316 |
+
# accordingly, hence the immediate if statement.
|
| 317 |
+
return {labeling[node]}
|
| 318 |
+
|
| 319 |
+
# Compute the frequencies of all neighbours of node
|
| 320 |
+
freqs = Counter(labeling[q] for q in G[node])
|
| 321 |
+
max_freq = max(freqs.values())
|
| 322 |
+
return {label for label, freq in freqs.items() if freq == max_freq}
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
def _update_label(node, labeling, G):
|
| 326 |
+
"""Updates the label of a node using the Prec-Max tie breaking algorithm
|
| 327 |
+
|
| 328 |
+
The algorithm is explained in: 'Community Detection via Semi-Synchronous
|
| 329 |
+
Label Propagation Algorithms' Cordasco and Gargano, 2011
|
| 330 |
+
"""
|
| 331 |
+
high_labels = _most_frequent_labels(node, labeling, G)
|
| 332 |
+
if len(high_labels) == 1:
|
| 333 |
+
labeling[node] = high_labels.pop()
|
| 334 |
+
elif len(high_labels) > 1:
|
| 335 |
+
# Prec-Max
|
| 336 |
+
if labeling[node] not in high_labels:
|
| 337 |
+
labeling[node] = max(high_labels)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_kernighan_lin.cpython-311.pyc
ADDED
|
Binary file (5.56 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_label_propagation.cpython-311.pyc
ADDED
|
Binary file (19 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_lukes.cpython-311.pyc
ADDED
|
Binary file (6.93 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_modularity_max.cpython-311.pyc
ADDED
|
Binary file (14.4 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx import Graph, NetworkXError
|
| 5 |
+
from networkx.algorithms.community import asyn_fluidc
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def test_exceptions():
|
| 9 |
+
test = Graph()
|
| 10 |
+
test.add_node("a")
|
| 11 |
+
pytest.raises(NetworkXError, asyn_fluidc, test, "hi")
|
| 12 |
+
pytest.raises(NetworkXError, asyn_fluidc, test, -1)
|
| 13 |
+
pytest.raises(NetworkXError, asyn_fluidc, test, 3)
|
| 14 |
+
test.add_node("b")
|
| 15 |
+
pytest.raises(NetworkXError, asyn_fluidc, test, 1)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def test_single_node():
|
| 19 |
+
test = Graph()
|
| 20 |
+
|
| 21 |
+
test.add_node("a")
|
| 22 |
+
|
| 23 |
+
# ground truth
|
| 24 |
+
ground_truth = {frozenset(["a"])}
|
| 25 |
+
|
| 26 |
+
communities = asyn_fluidc(test, 1)
|
| 27 |
+
result = {frozenset(c) for c in communities}
|
| 28 |
+
assert result == ground_truth
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def test_two_nodes():
|
| 32 |
+
test = Graph()
|
| 33 |
+
|
| 34 |
+
test.add_edge("a", "b")
|
| 35 |
+
|
| 36 |
+
# ground truth
|
| 37 |
+
ground_truth = {frozenset(["a"]), frozenset(["b"])}
|
| 38 |
+
|
| 39 |
+
communities = asyn_fluidc(test, 2)
|
| 40 |
+
result = {frozenset(c) for c in communities}
|
| 41 |
+
assert result == ground_truth
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def test_two_clique_communities():
|
| 45 |
+
test = Graph()
|
| 46 |
+
|
| 47 |
+
# c1
|
| 48 |
+
test.add_edge("a", "b")
|
| 49 |
+
test.add_edge("a", "c")
|
| 50 |
+
test.add_edge("b", "c")
|
| 51 |
+
|
| 52 |
+
# connection
|
| 53 |
+
test.add_edge("c", "d")
|
| 54 |
+
|
| 55 |
+
# c2
|
| 56 |
+
test.add_edge("d", "e")
|
| 57 |
+
test.add_edge("d", "f")
|
| 58 |
+
test.add_edge("f", "e")
|
| 59 |
+
|
| 60 |
+
# ground truth
|
| 61 |
+
ground_truth = {frozenset(["a", "c", "b"]), frozenset(["e", "d", "f"])}
|
| 62 |
+
|
| 63 |
+
communities = asyn_fluidc(test, 2, seed=7)
|
| 64 |
+
result = {frozenset(c) for c in communities}
|
| 65 |
+
assert result == ground_truth
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def test_five_clique_ring():
|
| 69 |
+
test = Graph()
|
| 70 |
+
|
| 71 |
+
# c1
|
| 72 |
+
test.add_edge("1a", "1b")
|
| 73 |
+
test.add_edge("1a", "1c")
|
| 74 |
+
test.add_edge("1a", "1d")
|
| 75 |
+
test.add_edge("1b", "1c")
|
| 76 |
+
test.add_edge("1b", "1d")
|
| 77 |
+
test.add_edge("1c", "1d")
|
| 78 |
+
|
| 79 |
+
# c2
|
| 80 |
+
test.add_edge("2a", "2b")
|
| 81 |
+
test.add_edge("2a", "2c")
|
| 82 |
+
test.add_edge("2a", "2d")
|
| 83 |
+
test.add_edge("2b", "2c")
|
| 84 |
+
test.add_edge("2b", "2d")
|
| 85 |
+
test.add_edge("2c", "2d")
|
| 86 |
+
|
| 87 |
+
# c3
|
| 88 |
+
test.add_edge("3a", "3b")
|
| 89 |
+
test.add_edge("3a", "3c")
|
| 90 |
+
test.add_edge("3a", "3d")
|
| 91 |
+
test.add_edge("3b", "3c")
|
| 92 |
+
test.add_edge("3b", "3d")
|
| 93 |
+
test.add_edge("3c", "3d")
|
| 94 |
+
|
| 95 |
+
# c4
|
| 96 |
+
test.add_edge("4a", "4b")
|
| 97 |
+
test.add_edge("4a", "4c")
|
| 98 |
+
test.add_edge("4a", "4d")
|
| 99 |
+
test.add_edge("4b", "4c")
|
| 100 |
+
test.add_edge("4b", "4d")
|
| 101 |
+
test.add_edge("4c", "4d")
|
| 102 |
+
|
| 103 |
+
# c5
|
| 104 |
+
test.add_edge("5a", "5b")
|
| 105 |
+
test.add_edge("5a", "5c")
|
| 106 |
+
test.add_edge("5a", "5d")
|
| 107 |
+
test.add_edge("5b", "5c")
|
| 108 |
+
test.add_edge("5b", "5d")
|
| 109 |
+
test.add_edge("5c", "5d")
|
| 110 |
+
|
| 111 |
+
# connections
|
| 112 |
+
test.add_edge("1a", "2c")
|
| 113 |
+
test.add_edge("2a", "3c")
|
| 114 |
+
test.add_edge("3a", "4c")
|
| 115 |
+
test.add_edge("4a", "5c")
|
| 116 |
+
test.add_edge("5a", "1c")
|
| 117 |
+
|
| 118 |
+
# ground truth
|
| 119 |
+
ground_truth = {
|
| 120 |
+
frozenset(["1a", "1b", "1c", "1d"]),
|
| 121 |
+
frozenset(["2a", "2b", "2c", "2d"]),
|
| 122 |
+
frozenset(["3a", "3b", "3c", "3d"]),
|
| 123 |
+
frozenset(["4a", "4b", "4c", "4d"]),
|
| 124 |
+
frozenset(["5a", "5b", "5c", "5d"]),
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
communities = asyn_fluidc(test, 5, seed=9)
|
| 128 |
+
result = {frozenset(c) for c in communities}
|
| 129 |
+
assert result == ground_truth
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/test_centrality.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Unit tests for the :mod:`networkx.algorithms.community.centrality`
|
| 2 |
+
module.
|
| 3 |
+
|
| 4 |
+
"""
|
| 5 |
+
from operator import itemgetter
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def set_of_sets(iterable):
|
| 11 |
+
return set(map(frozenset, iterable))
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def validate_communities(result, expected):
|
| 15 |
+
assert set_of_sets(result) == set_of_sets(expected)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def validate_possible_communities(result, *expected):
|
| 19 |
+
assert any(set_of_sets(result) == set_of_sets(p) for p in expected)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class TestGirvanNewman:
|
| 23 |
+
"""Unit tests for the
|
| 24 |
+
:func:`networkx.algorithms.community.centrality.girvan_newman`
|
| 25 |
+
function.
|
| 26 |
+
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
def test_no_edges(self):
|
| 30 |
+
G = nx.empty_graph(3)
|
| 31 |
+
communities = list(nx.community.girvan_newman(G))
|
| 32 |
+
assert len(communities) == 1
|
| 33 |
+
validate_communities(communities[0], [{0}, {1}, {2}])
|
| 34 |
+
|
| 35 |
+
def test_undirected(self):
|
| 36 |
+
# Start with the graph .-.-.-.
|
| 37 |
+
G = nx.path_graph(4)
|
| 38 |
+
communities = list(nx.community.girvan_newman(G))
|
| 39 |
+
assert len(communities) == 3
|
| 40 |
+
# After one removal, we get the graph .-. .-.
|
| 41 |
+
validate_communities(communities[0], [{0, 1}, {2, 3}])
|
| 42 |
+
# After the next, we get the graph .-. . ., but there are two
|
| 43 |
+
# symmetric possible versions.
|
| 44 |
+
validate_possible_communities(
|
| 45 |
+
communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}]
|
| 46 |
+
)
|
| 47 |
+
# After the last removal, we always get the empty graph.
|
| 48 |
+
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
|
| 49 |
+
|
| 50 |
+
def test_directed(self):
|
| 51 |
+
G = nx.DiGraph(nx.path_graph(4))
|
| 52 |
+
communities = list(nx.community.girvan_newman(G))
|
| 53 |
+
assert len(communities) == 3
|
| 54 |
+
validate_communities(communities[0], [{0, 1}, {2, 3}])
|
| 55 |
+
validate_possible_communities(
|
| 56 |
+
communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}]
|
| 57 |
+
)
|
| 58 |
+
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
|
| 59 |
+
|
| 60 |
+
def test_selfloops(self):
|
| 61 |
+
G = nx.path_graph(4)
|
| 62 |
+
G.add_edge(0, 0)
|
| 63 |
+
G.add_edge(2, 2)
|
| 64 |
+
communities = list(nx.community.girvan_newman(G))
|
| 65 |
+
assert len(communities) == 3
|
| 66 |
+
validate_communities(communities[0], [{0, 1}, {2, 3}])
|
| 67 |
+
validate_possible_communities(
|
| 68 |
+
communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}]
|
| 69 |
+
)
|
| 70 |
+
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
|
| 71 |
+
|
| 72 |
+
def test_most_valuable_edge(self):
|
| 73 |
+
G = nx.Graph()
|
| 74 |
+
G.add_weighted_edges_from([(0, 1, 3), (1, 2, 2), (2, 3, 1)])
|
| 75 |
+
# Let the most valuable edge be the one with the highest weight.
|
| 76 |
+
|
| 77 |
+
def heaviest(G):
|
| 78 |
+
return max(G.edges(data="weight"), key=itemgetter(2))[:2]
|
| 79 |
+
|
| 80 |
+
communities = list(nx.community.girvan_newman(G, heaviest))
|
| 81 |
+
assert len(communities) == 3
|
| 82 |
+
validate_communities(communities[0], [{0}, {1, 2, 3}])
|
| 83 |
+
validate_communities(communities[1], [{0}, {1}, {2, 3}])
|
| 84 |
+
validate_communities(communities[2], [{0}, {1}, {2}, {3}])
|