Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__init__.py +5 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/correlation.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/neighbor_degree.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/connectivity.py +122 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/correlation.py +302 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/mixing.py +255 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/neighbor_degree.py +160 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/pairs.py +127 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__init__.py +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/base_test.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_connectivity.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_correlation.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_mixing.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_neighbor_degree.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_pairs.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/base_test.py +81 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_connectivity.py +143 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py +123 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py +176 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py +108 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_pairs.py +87 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/betweenness.py +436 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/closeness.py +282 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/current_flow_betweenness_subset.py +227 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/current_flow_closeness.py +96 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/eigenvector.py +357 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/flow_matrix.py +130 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/group.py +787 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/katz.py +331 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/percolation.py +128 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/subgraph_alg.py +340 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/coloring/__init__.py +4 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/coloring/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/coloring/__pycache__/equitable_coloring.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/coloring/__pycache__/greedy_coloring.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/coloring/equitable_coloring.py +505 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/coloring/greedy_coloring.py +565 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/coloring/tests/__init__.py +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/coloring/tests/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/coloring/tests/__pycache__/test_coloring.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/coloring/tests/test_coloring.py +863 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_kcomponents.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_stoer_wagner.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/shortest_paths/__init__.py +5 -0
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__init__.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from networkx.algorithms.assortativity.connectivity import *
|
| 2 |
+
from networkx.algorithms.assortativity.correlation import *
|
| 3 |
+
from networkx.algorithms.assortativity.mixing import *
|
| 4 |
+
from networkx.algorithms.assortativity.neighbor_degree import *
|
| 5 |
+
from networkx.algorithms.assortativity.pairs import *
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (538 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-311.pyc
ADDED
|
Binary file (5.71 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/correlation.cpython-311.pyc
ADDED
|
Binary file (12.4 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-311.pyc
ADDED
|
Binary file (8.78 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/neighbor_degree.cpython-311.pyc
ADDED
|
Binary file (6.6 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-311.pyc
ADDED
|
Binary file (5.09 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/connectivity.py
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from collections import defaultdict
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
__all__ = ["average_degree_connectivity"]
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 9 |
+
def average_degree_connectivity(
|
| 10 |
+
G, source="in+out", target="in+out", nodes=None, weight=None
|
| 11 |
+
):
|
| 12 |
+
r"""Compute the average degree connectivity of graph.
|
| 13 |
+
|
| 14 |
+
The average degree connectivity is the average nearest neighbor degree of
|
| 15 |
+
nodes with degree k. For weighted graphs, an analogous measure can
|
| 16 |
+
be computed using the weighted average neighbors degree defined in
|
| 17 |
+
[1]_, for a node `i`, as
|
| 18 |
+
|
| 19 |
+
.. math::
|
| 20 |
+
|
| 21 |
+
k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j
|
| 22 |
+
|
| 23 |
+
where `s_i` is the weighted degree of node `i`,
|
| 24 |
+
`w_{ij}` is the weight of the edge that links `i` and `j`,
|
| 25 |
+
and `N(i)` are the neighbors of node `i`.
|
| 26 |
+
|
| 27 |
+
Parameters
|
| 28 |
+
----------
|
| 29 |
+
G : NetworkX graph
|
| 30 |
+
|
| 31 |
+
source : "in"|"out"|"in+out" (default:"in+out")
|
| 32 |
+
Directed graphs only. Use "in"- or "out"-degree for source node.
|
| 33 |
+
|
| 34 |
+
target : "in"|"out"|"in+out" (default:"in+out"
|
| 35 |
+
Directed graphs only. Use "in"- or "out"-degree for target node.
|
| 36 |
+
|
| 37 |
+
nodes : list or iterable (optional)
|
| 38 |
+
Compute neighbor connectivity for these nodes. The default is all
|
| 39 |
+
nodes.
|
| 40 |
+
|
| 41 |
+
weight : string or None, optional (default=None)
|
| 42 |
+
The edge attribute that holds the numerical value used as a weight.
|
| 43 |
+
If None, then each edge has weight 1.
|
| 44 |
+
|
| 45 |
+
Returns
|
| 46 |
+
-------
|
| 47 |
+
d : dict
|
| 48 |
+
A dictionary keyed by degree k with the value of average connectivity.
|
| 49 |
+
|
| 50 |
+
Raises
|
| 51 |
+
------
|
| 52 |
+
NetworkXError
|
| 53 |
+
If either `source` or `target` are not one of 'in',
|
| 54 |
+
'out', or 'in+out'.
|
| 55 |
+
If either `source` or `target` is passed for an undirected graph.
|
| 56 |
+
|
| 57 |
+
Examples
|
| 58 |
+
--------
|
| 59 |
+
>>> G = nx.path_graph(4)
|
| 60 |
+
>>> G.edges[1, 2]["weight"] = 3
|
| 61 |
+
>>> nx.average_degree_connectivity(G)
|
| 62 |
+
{1: 2.0, 2: 1.5}
|
| 63 |
+
>>> nx.average_degree_connectivity(G, weight="weight")
|
| 64 |
+
{1: 2.0, 2: 1.75}
|
| 65 |
+
|
| 66 |
+
See Also
|
| 67 |
+
--------
|
| 68 |
+
average_neighbor_degree
|
| 69 |
+
|
| 70 |
+
References
|
| 71 |
+
----------
|
| 72 |
+
.. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani,
|
| 73 |
+
"The architecture of complex weighted networks".
|
| 74 |
+
PNAS 101 (11): 3747–3752 (2004).
|
| 75 |
+
"""
|
| 76 |
+
# First, determine the type of neighbors and the type of degree to use.
|
| 77 |
+
if G.is_directed():
|
| 78 |
+
if source not in ("in", "out", "in+out"):
|
| 79 |
+
raise nx.NetworkXError('source must be one of "in", "out", or "in+out"')
|
| 80 |
+
if target not in ("in", "out", "in+out"):
|
| 81 |
+
raise nx.NetworkXError('target must be one of "in", "out", or "in+out"')
|
| 82 |
+
direction = {"out": G.out_degree, "in": G.in_degree, "in+out": G.degree}
|
| 83 |
+
neighbor_funcs = {
|
| 84 |
+
"out": G.successors,
|
| 85 |
+
"in": G.predecessors,
|
| 86 |
+
"in+out": G.neighbors,
|
| 87 |
+
}
|
| 88 |
+
source_degree = direction[source]
|
| 89 |
+
target_degree = direction[target]
|
| 90 |
+
neighbors = neighbor_funcs[source]
|
| 91 |
+
# `reverse` indicates whether to look at the in-edge when
|
| 92 |
+
# computing the weight of an edge.
|
| 93 |
+
reverse = source == "in"
|
| 94 |
+
else:
|
| 95 |
+
if source != "in+out" or target != "in+out":
|
| 96 |
+
raise nx.NetworkXError(
|
| 97 |
+
f"source and target arguments are only supported for directed graphs"
|
| 98 |
+
)
|
| 99 |
+
source_degree = G.degree
|
| 100 |
+
target_degree = G.degree
|
| 101 |
+
neighbors = G.neighbors
|
| 102 |
+
reverse = False
|
| 103 |
+
dsum = defaultdict(int)
|
| 104 |
+
dnorm = defaultdict(int)
|
| 105 |
+
# Check if `source_nodes` is actually a single node in the graph.
|
| 106 |
+
source_nodes = source_degree(nodes)
|
| 107 |
+
if nodes in G:
|
| 108 |
+
source_nodes = [(nodes, source_degree(nodes))]
|
| 109 |
+
for n, k in source_nodes:
|
| 110 |
+
nbrdeg = target_degree(neighbors(n))
|
| 111 |
+
if weight is None:
|
| 112 |
+
s = sum(d for n, d in nbrdeg)
|
| 113 |
+
else: # weight nbr degree by weight of (n,nbr) edge
|
| 114 |
+
if reverse:
|
| 115 |
+
s = sum(G[nbr][n].get(weight, 1) * d for nbr, d in nbrdeg)
|
| 116 |
+
else:
|
| 117 |
+
s = sum(G[n][nbr].get(weight, 1) * d for nbr, d in nbrdeg)
|
| 118 |
+
dnorm[k] += source_degree(n, weight=weight)
|
| 119 |
+
dsum[k] += s
|
| 120 |
+
|
| 121 |
+
# normalize
|
| 122 |
+
return {k: avg if dnorm[k] == 0 else avg / dnorm[k] for k, avg in dsum.items()}
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/correlation.py
ADDED
|
@@ -0,0 +1,302 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Node assortativity coefficients and correlation measures."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.algorithms.assortativity.mixing import (
|
| 5 |
+
attribute_mixing_matrix,
|
| 6 |
+
degree_mixing_matrix,
|
| 7 |
+
)
|
| 8 |
+
from networkx.algorithms.assortativity.pairs import node_degree_xy
|
| 9 |
+
|
| 10 |
+
__all__ = [
|
| 11 |
+
"degree_pearson_correlation_coefficient",
|
| 12 |
+
"degree_assortativity_coefficient",
|
| 13 |
+
"attribute_assortativity_coefficient",
|
| 14 |
+
"numeric_assortativity_coefficient",
|
| 15 |
+
]
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 19 |
+
def degree_assortativity_coefficient(G, x="out", y="in", weight=None, nodes=None):
|
| 20 |
+
"""Compute degree assortativity of graph.
|
| 21 |
+
|
| 22 |
+
Assortativity measures the similarity of connections
|
| 23 |
+
in the graph with respect to the node degree.
|
| 24 |
+
|
| 25 |
+
Parameters
|
| 26 |
+
----------
|
| 27 |
+
G : NetworkX graph
|
| 28 |
+
|
| 29 |
+
x: string ('in','out')
|
| 30 |
+
The degree type for source node (directed graphs only).
|
| 31 |
+
|
| 32 |
+
y: string ('in','out')
|
| 33 |
+
The degree type for target node (directed graphs only).
|
| 34 |
+
|
| 35 |
+
weight: string or None, optional (default=None)
|
| 36 |
+
The edge attribute that holds the numerical value used
|
| 37 |
+
as a weight. If None, then each edge has weight 1.
|
| 38 |
+
The degree is the sum of the edge weights adjacent to the node.
|
| 39 |
+
|
| 40 |
+
nodes: list or iterable (optional)
|
| 41 |
+
Compute degree assortativity only for nodes in container.
|
| 42 |
+
The default is all nodes.
|
| 43 |
+
|
| 44 |
+
Returns
|
| 45 |
+
-------
|
| 46 |
+
r : float
|
| 47 |
+
Assortativity of graph by degree.
|
| 48 |
+
|
| 49 |
+
Examples
|
| 50 |
+
--------
|
| 51 |
+
>>> G = nx.path_graph(4)
|
| 52 |
+
>>> r = nx.degree_assortativity_coefficient(G)
|
| 53 |
+
>>> print(f"{r:3.1f}")
|
| 54 |
+
-0.5
|
| 55 |
+
|
| 56 |
+
See Also
|
| 57 |
+
--------
|
| 58 |
+
attribute_assortativity_coefficient
|
| 59 |
+
numeric_assortativity_coefficient
|
| 60 |
+
degree_mixing_dict
|
| 61 |
+
degree_mixing_matrix
|
| 62 |
+
|
| 63 |
+
Notes
|
| 64 |
+
-----
|
| 65 |
+
This computes Eq. (21) in Ref. [1]_ , where e is the joint
|
| 66 |
+
probability distribution (mixing matrix) of the degrees. If G is
|
| 67 |
+
directed than the matrix e is the joint probability of the
|
| 68 |
+
user-specified degree type for the source and target.
|
| 69 |
+
|
| 70 |
+
References
|
| 71 |
+
----------
|
| 72 |
+
.. [1] M. E. J. Newman, Mixing patterns in networks,
|
| 73 |
+
Physical Review E, 67 026126, 2003
|
| 74 |
+
.. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M.
|
| 75 |
+
Edge direction and the structure of networks, PNAS 107, 10815-20 (2010).
|
| 76 |
+
"""
|
| 77 |
+
if nodes is None:
|
| 78 |
+
nodes = G.nodes
|
| 79 |
+
|
| 80 |
+
degrees = None
|
| 81 |
+
|
| 82 |
+
if G.is_directed():
|
| 83 |
+
indeg = (
|
| 84 |
+
{d for _, d in G.in_degree(nodes, weight=weight)}
|
| 85 |
+
if "in" in (x, y)
|
| 86 |
+
else set()
|
| 87 |
+
)
|
| 88 |
+
outdeg = (
|
| 89 |
+
{d for _, d in G.out_degree(nodes, weight=weight)}
|
| 90 |
+
if "out" in (x, y)
|
| 91 |
+
else set()
|
| 92 |
+
)
|
| 93 |
+
degrees = set.union(indeg, outdeg)
|
| 94 |
+
else:
|
| 95 |
+
degrees = {d for _, d in G.degree(nodes, weight=weight)}
|
| 96 |
+
|
| 97 |
+
mapping = {d: i for i, d in enumerate(degrees)}
|
| 98 |
+
M = degree_mixing_matrix(G, x=x, y=y, nodes=nodes, weight=weight, mapping=mapping)
|
| 99 |
+
|
| 100 |
+
return _numeric_ac(M, mapping=mapping)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 104 |
+
def degree_pearson_correlation_coefficient(G, x="out", y="in", weight=None, nodes=None):
|
| 105 |
+
"""Compute degree assortativity of graph.
|
| 106 |
+
|
| 107 |
+
Assortativity measures the similarity of connections
|
| 108 |
+
in the graph with respect to the node degree.
|
| 109 |
+
|
| 110 |
+
This is the same as degree_assortativity_coefficient but uses the
|
| 111 |
+
potentially faster scipy.stats.pearsonr function.
|
| 112 |
+
|
| 113 |
+
Parameters
|
| 114 |
+
----------
|
| 115 |
+
G : NetworkX graph
|
| 116 |
+
|
| 117 |
+
x: string ('in','out')
|
| 118 |
+
The degree type for source node (directed graphs only).
|
| 119 |
+
|
| 120 |
+
y: string ('in','out')
|
| 121 |
+
The degree type for target node (directed graphs only).
|
| 122 |
+
|
| 123 |
+
weight: string or None, optional (default=None)
|
| 124 |
+
The edge attribute that holds the numerical value used
|
| 125 |
+
as a weight. If None, then each edge has weight 1.
|
| 126 |
+
The degree is the sum of the edge weights adjacent to the node.
|
| 127 |
+
|
| 128 |
+
nodes: list or iterable (optional)
|
| 129 |
+
Compute pearson correlation of degrees only for specified nodes.
|
| 130 |
+
The default is all nodes.
|
| 131 |
+
|
| 132 |
+
Returns
|
| 133 |
+
-------
|
| 134 |
+
r : float
|
| 135 |
+
Assortativity of graph by degree.
|
| 136 |
+
|
| 137 |
+
Examples
|
| 138 |
+
--------
|
| 139 |
+
>>> G = nx.path_graph(4)
|
| 140 |
+
>>> r = nx.degree_pearson_correlation_coefficient(G)
|
| 141 |
+
>>> print(f"{r:3.1f}")
|
| 142 |
+
-0.5
|
| 143 |
+
|
| 144 |
+
Notes
|
| 145 |
+
-----
|
| 146 |
+
This calls scipy.stats.pearsonr.
|
| 147 |
+
|
| 148 |
+
References
|
| 149 |
+
----------
|
| 150 |
+
.. [1] M. E. J. Newman, Mixing patterns in networks
|
| 151 |
+
Physical Review E, 67 026126, 2003
|
| 152 |
+
.. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M.
|
| 153 |
+
Edge direction and the structure of networks, PNAS 107, 10815-20 (2010).
|
| 154 |
+
"""
|
| 155 |
+
import scipy as sp
|
| 156 |
+
|
| 157 |
+
xy = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight)
|
| 158 |
+
x, y = zip(*xy)
|
| 159 |
+
return float(sp.stats.pearsonr(x, y)[0])
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
@nx._dispatchable(node_attrs="attribute")
|
| 163 |
+
def attribute_assortativity_coefficient(G, attribute, nodes=None):
|
| 164 |
+
"""Compute assortativity for node attributes.
|
| 165 |
+
|
| 166 |
+
Assortativity measures the similarity of connections
|
| 167 |
+
in the graph with respect to the given attribute.
|
| 168 |
+
|
| 169 |
+
Parameters
|
| 170 |
+
----------
|
| 171 |
+
G : NetworkX graph
|
| 172 |
+
|
| 173 |
+
attribute : string
|
| 174 |
+
Node attribute key
|
| 175 |
+
|
| 176 |
+
nodes: list or iterable (optional)
|
| 177 |
+
Compute attribute assortativity for nodes in container.
|
| 178 |
+
The default is all nodes.
|
| 179 |
+
|
| 180 |
+
Returns
|
| 181 |
+
-------
|
| 182 |
+
r: float
|
| 183 |
+
Assortativity of graph for given attribute
|
| 184 |
+
|
| 185 |
+
Examples
|
| 186 |
+
--------
|
| 187 |
+
>>> G = nx.Graph()
|
| 188 |
+
>>> G.add_nodes_from([0, 1], color="red")
|
| 189 |
+
>>> G.add_nodes_from([2, 3], color="blue")
|
| 190 |
+
>>> G.add_edges_from([(0, 1), (2, 3)])
|
| 191 |
+
>>> print(nx.attribute_assortativity_coefficient(G, "color"))
|
| 192 |
+
1.0
|
| 193 |
+
|
| 194 |
+
Notes
|
| 195 |
+
-----
|
| 196 |
+
This computes Eq. (2) in Ref. [1]_ , (trace(M)-sum(M^2))/(1-sum(M^2)),
|
| 197 |
+
where M is the joint probability distribution (mixing matrix)
|
| 198 |
+
of the specified attribute.
|
| 199 |
+
|
| 200 |
+
References
|
| 201 |
+
----------
|
| 202 |
+
.. [1] M. E. J. Newman, Mixing patterns in networks,
|
| 203 |
+
Physical Review E, 67 026126, 2003
|
| 204 |
+
"""
|
| 205 |
+
M = attribute_mixing_matrix(G, attribute, nodes)
|
| 206 |
+
return attribute_ac(M)
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
@nx._dispatchable(node_attrs="attribute")
|
| 210 |
+
def numeric_assortativity_coefficient(G, attribute, nodes=None):
|
| 211 |
+
"""Compute assortativity for numerical node attributes.
|
| 212 |
+
|
| 213 |
+
Assortativity measures the similarity of connections
|
| 214 |
+
in the graph with respect to the given numeric attribute.
|
| 215 |
+
|
| 216 |
+
Parameters
|
| 217 |
+
----------
|
| 218 |
+
G : NetworkX graph
|
| 219 |
+
|
| 220 |
+
attribute : string
|
| 221 |
+
Node attribute key.
|
| 222 |
+
|
| 223 |
+
nodes: list or iterable (optional)
|
| 224 |
+
Compute numeric assortativity only for attributes of nodes in
|
| 225 |
+
container. The default is all nodes.
|
| 226 |
+
|
| 227 |
+
Returns
|
| 228 |
+
-------
|
| 229 |
+
r: float
|
| 230 |
+
Assortativity of graph for given attribute
|
| 231 |
+
|
| 232 |
+
Examples
|
| 233 |
+
--------
|
| 234 |
+
>>> G = nx.Graph()
|
| 235 |
+
>>> G.add_nodes_from([0, 1], size=2)
|
| 236 |
+
>>> G.add_nodes_from([2, 3], size=3)
|
| 237 |
+
>>> G.add_edges_from([(0, 1), (2, 3)])
|
| 238 |
+
>>> print(nx.numeric_assortativity_coefficient(G, "size"))
|
| 239 |
+
1.0
|
| 240 |
+
|
| 241 |
+
Notes
|
| 242 |
+
-----
|
| 243 |
+
This computes Eq. (21) in Ref. [1]_ , which is the Pearson correlation
|
| 244 |
+
coefficient of the specified (scalar valued) attribute across edges.
|
| 245 |
+
|
| 246 |
+
References
|
| 247 |
+
----------
|
| 248 |
+
.. [1] M. E. J. Newman, Mixing patterns in networks
|
| 249 |
+
Physical Review E, 67 026126, 2003
|
| 250 |
+
"""
|
| 251 |
+
if nodes is None:
|
| 252 |
+
nodes = G.nodes
|
| 253 |
+
vals = {G.nodes[n][attribute] for n in nodes}
|
| 254 |
+
mapping = {d: i for i, d in enumerate(vals)}
|
| 255 |
+
M = attribute_mixing_matrix(G, attribute, nodes, mapping)
|
| 256 |
+
return _numeric_ac(M, mapping)
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
def attribute_ac(M):
|
| 260 |
+
"""Compute assortativity for attribute matrix M.
|
| 261 |
+
|
| 262 |
+
Parameters
|
| 263 |
+
----------
|
| 264 |
+
M : numpy.ndarray
|
| 265 |
+
2D ndarray representing the attribute mixing matrix.
|
| 266 |
+
|
| 267 |
+
Notes
|
| 268 |
+
-----
|
| 269 |
+
This computes Eq. (2) in Ref. [1]_ , (trace(e)-sum(e^2))/(1-sum(e^2)),
|
| 270 |
+
where e is the joint probability distribution (mixing matrix)
|
| 271 |
+
of the specified attribute.
|
| 272 |
+
|
| 273 |
+
References
|
| 274 |
+
----------
|
| 275 |
+
.. [1] M. E. J. Newman, Mixing patterns in networks,
|
| 276 |
+
Physical Review E, 67 026126, 2003
|
| 277 |
+
"""
|
| 278 |
+
if M.sum() != 1.0:
|
| 279 |
+
M = M / M.sum()
|
| 280 |
+
s = (M @ M).sum()
|
| 281 |
+
t = M.trace()
|
| 282 |
+
r = (t - s) / (1 - s)
|
| 283 |
+
return float(r)
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
def _numeric_ac(M, mapping):
|
| 287 |
+
# M is a 2D numpy array
|
| 288 |
+
# numeric assortativity coefficient, pearsonr
|
| 289 |
+
import numpy as np
|
| 290 |
+
|
| 291 |
+
if M.sum() != 1.0:
|
| 292 |
+
M = M / M.sum()
|
| 293 |
+
x = np.array(list(mapping.keys()))
|
| 294 |
+
y = x # x and y have the same support
|
| 295 |
+
idx = list(mapping.values())
|
| 296 |
+
a = M.sum(axis=0)
|
| 297 |
+
b = M.sum(axis=1)
|
| 298 |
+
vara = (a[idx] * x**2).sum() - ((a[idx] * x).sum()) ** 2
|
| 299 |
+
varb = (b[idx] * y**2).sum() - ((b[idx] * y).sum()) ** 2
|
| 300 |
+
xy = np.outer(x, y)
|
| 301 |
+
ab = np.outer(a[idx], b[idx])
|
| 302 |
+
return float((xy * (M - ab)).sum() / np.sqrt(vara * varb))
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/mixing.py
ADDED
|
@@ -0,0 +1,255 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Mixing matrices for node attributes and degree.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.algorithms.assortativity.pairs import node_attribute_xy, node_degree_xy
|
| 7 |
+
from networkx.utils import dict_to_numpy_array
|
| 8 |
+
|
| 9 |
+
__all__ = [
|
| 10 |
+
"attribute_mixing_matrix",
|
| 11 |
+
"attribute_mixing_dict",
|
| 12 |
+
"degree_mixing_matrix",
|
| 13 |
+
"degree_mixing_dict",
|
| 14 |
+
"mixing_dict",
|
| 15 |
+
]
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@nx._dispatchable(node_attrs="attribute")
|
| 19 |
+
def attribute_mixing_dict(G, attribute, nodes=None, normalized=False):
|
| 20 |
+
"""Returns dictionary representation of mixing matrix for attribute.
|
| 21 |
+
|
| 22 |
+
Parameters
|
| 23 |
+
----------
|
| 24 |
+
G : graph
|
| 25 |
+
NetworkX graph object.
|
| 26 |
+
|
| 27 |
+
attribute : string
|
| 28 |
+
Node attribute key.
|
| 29 |
+
|
| 30 |
+
nodes: list or iterable (optional)
|
| 31 |
+
Unse nodes in container to build the dict. The default is all nodes.
|
| 32 |
+
|
| 33 |
+
normalized : bool (default=False)
|
| 34 |
+
Return counts if False or probabilities if True.
|
| 35 |
+
|
| 36 |
+
Examples
|
| 37 |
+
--------
|
| 38 |
+
>>> G = nx.Graph()
|
| 39 |
+
>>> G.add_nodes_from([0, 1], color="red")
|
| 40 |
+
>>> G.add_nodes_from([2, 3], color="blue")
|
| 41 |
+
>>> G.add_edge(1, 3)
|
| 42 |
+
>>> d = nx.attribute_mixing_dict(G, "color")
|
| 43 |
+
>>> print(d["red"]["blue"])
|
| 44 |
+
1
|
| 45 |
+
>>> print(d["blue"]["red"]) # d symmetric for undirected graphs
|
| 46 |
+
1
|
| 47 |
+
|
| 48 |
+
Returns
|
| 49 |
+
-------
|
| 50 |
+
d : dictionary
|
| 51 |
+
Counts or joint probability of occurrence of attribute pairs.
|
| 52 |
+
"""
|
| 53 |
+
xy_iter = node_attribute_xy(G, attribute, nodes)
|
| 54 |
+
return mixing_dict(xy_iter, normalized=normalized)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
@nx._dispatchable(node_attrs="attribute")
|
| 58 |
+
def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, normalized=True):
|
| 59 |
+
"""Returns mixing matrix for attribute.
|
| 60 |
+
|
| 61 |
+
Parameters
|
| 62 |
+
----------
|
| 63 |
+
G : graph
|
| 64 |
+
NetworkX graph object.
|
| 65 |
+
|
| 66 |
+
attribute : string
|
| 67 |
+
Node attribute key.
|
| 68 |
+
|
| 69 |
+
nodes: list or iterable (optional)
|
| 70 |
+
Use only nodes in container to build the matrix. The default is
|
| 71 |
+
all nodes.
|
| 72 |
+
|
| 73 |
+
mapping : dictionary, optional
|
| 74 |
+
Mapping from node attribute to integer index in matrix.
|
| 75 |
+
If not specified, an arbitrary ordering will be used.
|
| 76 |
+
|
| 77 |
+
normalized : bool (default=True)
|
| 78 |
+
Return counts if False or probabilities if True.
|
| 79 |
+
|
| 80 |
+
Returns
|
| 81 |
+
-------
|
| 82 |
+
m: numpy array
|
| 83 |
+
Counts or joint probability of occurrence of attribute pairs.
|
| 84 |
+
|
| 85 |
+
Notes
|
| 86 |
+
-----
|
| 87 |
+
If each node has a unique attribute value, the unnormalized mixing matrix
|
| 88 |
+
will be equal to the adjacency matrix. To get a denser mixing matrix,
|
| 89 |
+
the rounding can be performed to form groups of nodes with equal values.
|
| 90 |
+
For example, the exact height of persons in cm (180.79155222, 163.9080892,
|
| 91 |
+
163.30095355, 167.99016217, 168.21590163, ...) can be rounded to (180, 163,
|
| 92 |
+
163, 168, 168, ...).
|
| 93 |
+
|
| 94 |
+
Definitions of attribute mixing matrix vary on whether the matrix
|
| 95 |
+
should include rows for attribute values that don't arise. Here we
|
| 96 |
+
do not include such empty-rows. But you can force them to appear
|
| 97 |
+
by inputting a `mapping` that includes those values.
|
| 98 |
+
|
| 99 |
+
Examples
|
| 100 |
+
--------
|
| 101 |
+
>>> G = nx.path_graph(3)
|
| 102 |
+
>>> gender = {0: "male", 1: "female", 2: "female"}
|
| 103 |
+
>>> nx.set_node_attributes(G, gender, "gender")
|
| 104 |
+
>>> mapping = {"male": 0, "female": 1}
|
| 105 |
+
>>> mix_mat = nx.attribute_mixing_matrix(G, "gender", mapping=mapping)
|
| 106 |
+
>>> mix_mat
|
| 107 |
+
array([[0. , 0.25],
|
| 108 |
+
[0.25, 0.5 ]])
|
| 109 |
+
"""
|
| 110 |
+
d = attribute_mixing_dict(G, attribute, nodes)
|
| 111 |
+
a = dict_to_numpy_array(d, mapping=mapping)
|
| 112 |
+
if normalized:
|
| 113 |
+
a = a / a.sum()
|
| 114 |
+
return a
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 118 |
+
def degree_mixing_dict(G, x="out", y="in", weight=None, nodes=None, normalized=False):
|
| 119 |
+
"""Returns dictionary representation of mixing matrix for degree.
|
| 120 |
+
|
| 121 |
+
Parameters
|
| 122 |
+
----------
|
| 123 |
+
G : graph
|
| 124 |
+
NetworkX graph object.
|
| 125 |
+
|
| 126 |
+
x: string ('in','out')
|
| 127 |
+
The degree type for source node (directed graphs only).
|
| 128 |
+
|
| 129 |
+
y: string ('in','out')
|
| 130 |
+
The degree type for target node (directed graphs only).
|
| 131 |
+
|
| 132 |
+
weight: string or None, optional (default=None)
|
| 133 |
+
The edge attribute that holds the numerical value used
|
| 134 |
+
as a weight. If None, then each edge has weight 1.
|
| 135 |
+
The degree is the sum of the edge weights adjacent to the node.
|
| 136 |
+
|
| 137 |
+
normalized : bool (default=False)
|
| 138 |
+
Return counts if False or probabilities if True.
|
| 139 |
+
|
| 140 |
+
Returns
|
| 141 |
+
-------
|
| 142 |
+
d: dictionary
|
| 143 |
+
Counts or joint probability of occurrence of degree pairs.
|
| 144 |
+
"""
|
| 145 |
+
xy_iter = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight)
|
| 146 |
+
return mixing_dict(xy_iter, normalized=normalized)
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 150 |
+
def degree_mixing_matrix(
|
| 151 |
+
G, x="out", y="in", weight=None, nodes=None, normalized=True, mapping=None
|
| 152 |
+
):
|
| 153 |
+
"""Returns mixing matrix for attribute.
|
| 154 |
+
|
| 155 |
+
Parameters
|
| 156 |
+
----------
|
| 157 |
+
G : graph
|
| 158 |
+
NetworkX graph object.
|
| 159 |
+
|
| 160 |
+
x: string ('in','out')
|
| 161 |
+
The degree type for source node (directed graphs only).
|
| 162 |
+
|
| 163 |
+
y: string ('in','out')
|
| 164 |
+
The degree type for target node (directed graphs only).
|
| 165 |
+
|
| 166 |
+
nodes: list or iterable (optional)
|
| 167 |
+
Build the matrix using only nodes in container.
|
| 168 |
+
The default is all nodes.
|
| 169 |
+
|
| 170 |
+
weight: string or None, optional (default=None)
|
| 171 |
+
The edge attribute that holds the numerical value used
|
| 172 |
+
as a weight. If None, then each edge has weight 1.
|
| 173 |
+
The degree is the sum of the edge weights adjacent to the node.
|
| 174 |
+
|
| 175 |
+
normalized : bool (default=True)
|
| 176 |
+
Return counts if False or probabilities if True.
|
| 177 |
+
|
| 178 |
+
mapping : dictionary, optional
|
| 179 |
+
Mapping from node degree to integer index in matrix.
|
| 180 |
+
If not specified, an arbitrary ordering will be used.
|
| 181 |
+
|
| 182 |
+
Returns
|
| 183 |
+
-------
|
| 184 |
+
m: numpy array
|
| 185 |
+
Counts, or joint probability, of occurrence of node degree.
|
| 186 |
+
|
| 187 |
+
Notes
|
| 188 |
+
-----
|
| 189 |
+
Definitions of degree mixing matrix vary on whether the matrix
|
| 190 |
+
should include rows for degree values that don't arise. Here we
|
| 191 |
+
do not include such empty-rows. But you can force them to appear
|
| 192 |
+
by inputting a `mapping` that includes those values. See examples.
|
| 193 |
+
|
| 194 |
+
Examples
|
| 195 |
+
--------
|
| 196 |
+
>>> G = nx.star_graph(3)
|
| 197 |
+
>>> mix_mat = nx.degree_mixing_matrix(G)
|
| 198 |
+
>>> mix_mat
|
| 199 |
+
array([[0. , 0.5],
|
| 200 |
+
[0.5, 0. ]])
|
| 201 |
+
|
| 202 |
+
If you want every possible degree to appear as a row, even if no nodes
|
| 203 |
+
have that degree, use `mapping` as follows,
|
| 204 |
+
|
| 205 |
+
>>> max_degree = max(deg for n, deg in G.degree)
|
| 206 |
+
>>> mapping = {x: x for x in range(max_degree + 1)} # identity mapping
|
| 207 |
+
>>> mix_mat = nx.degree_mixing_matrix(G, mapping=mapping)
|
| 208 |
+
>>> mix_mat
|
| 209 |
+
array([[0. , 0. , 0. , 0. ],
|
| 210 |
+
[0. , 0. , 0. , 0.5],
|
| 211 |
+
[0. , 0. , 0. , 0. ],
|
| 212 |
+
[0. , 0.5, 0. , 0. ]])
|
| 213 |
+
"""
|
| 214 |
+
d = degree_mixing_dict(G, x=x, y=y, nodes=nodes, weight=weight)
|
| 215 |
+
a = dict_to_numpy_array(d, mapping=mapping)
|
| 216 |
+
if normalized:
|
| 217 |
+
a = a / a.sum()
|
| 218 |
+
return a
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
def mixing_dict(xy, normalized=False):
|
| 222 |
+
"""Returns a dictionary representation of mixing matrix.
|
| 223 |
+
|
| 224 |
+
Parameters
|
| 225 |
+
----------
|
| 226 |
+
xy : list or container of two-tuples
|
| 227 |
+
Pairs of (x,y) items.
|
| 228 |
+
|
| 229 |
+
attribute : string
|
| 230 |
+
Node attribute key
|
| 231 |
+
|
| 232 |
+
normalized : bool (default=False)
|
| 233 |
+
Return counts if False or probabilities if True.
|
| 234 |
+
|
| 235 |
+
Returns
|
| 236 |
+
-------
|
| 237 |
+
d: dictionary
|
| 238 |
+
Counts or Joint probability of occurrence of values in xy.
|
| 239 |
+
"""
|
| 240 |
+
d = {}
|
| 241 |
+
psum = 0.0
|
| 242 |
+
for x, y in xy:
|
| 243 |
+
if x not in d:
|
| 244 |
+
d[x] = {}
|
| 245 |
+
if y not in d:
|
| 246 |
+
d[y] = {}
|
| 247 |
+
v = d[x].get(y, 0)
|
| 248 |
+
d[x][y] = v + 1
|
| 249 |
+
psum += 1
|
| 250 |
+
|
| 251 |
+
if normalized:
|
| 252 |
+
for _, jdict in d.items():
|
| 253 |
+
for j in jdict:
|
| 254 |
+
jdict[j] /= psum
|
| 255 |
+
return d
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/neighbor_degree.py
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
|
| 3 |
+
__all__ = ["average_neighbor_degree"]
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 7 |
+
def average_neighbor_degree(G, source="out", target="out", nodes=None, weight=None):
|
| 8 |
+
r"""Returns the average degree of the neighborhood of each node.
|
| 9 |
+
|
| 10 |
+
In an undirected graph, the neighborhood `N(i)` of node `i` contains the
|
| 11 |
+
nodes that are connected to `i` by an edge.
|
| 12 |
+
|
| 13 |
+
For directed graphs, `N(i)` is defined according to the parameter `source`:
|
| 14 |
+
|
| 15 |
+
- if source is 'in', then `N(i)` consists of predecessors of node `i`.
|
| 16 |
+
- if source is 'out', then `N(i)` consists of successors of node `i`.
|
| 17 |
+
- if source is 'in+out', then `N(i)` is both predecessors and successors.
|
| 18 |
+
|
| 19 |
+
The average neighborhood degree of a node `i` is
|
| 20 |
+
|
| 21 |
+
.. math::
|
| 22 |
+
|
| 23 |
+
k_{nn,i} = \frac{1}{|N(i)|} \sum_{j \in N(i)} k_j
|
| 24 |
+
|
| 25 |
+
where `N(i)` are the neighbors of node `i` and `k_j` is
|
| 26 |
+
the degree of node `j` which belongs to `N(i)`. For weighted
|
| 27 |
+
graphs, an analogous measure can be defined [1]_,
|
| 28 |
+
|
| 29 |
+
.. math::
|
| 30 |
+
|
| 31 |
+
k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j
|
| 32 |
+
|
| 33 |
+
where `s_i` is the weighted degree of node `i`, `w_{ij}`
|
| 34 |
+
is the weight of the edge that links `i` and `j` and
|
| 35 |
+
`N(i)` are the neighbors of node `i`.
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
Parameters
|
| 39 |
+
----------
|
| 40 |
+
G : NetworkX graph
|
| 41 |
+
|
| 42 |
+
source : string ("in"|"out"|"in+out"), optional (default="out")
|
| 43 |
+
Directed graphs only.
|
| 44 |
+
Use "in"- or "out"-neighbors of source node.
|
| 45 |
+
|
| 46 |
+
target : string ("in"|"out"|"in+out"), optional (default="out")
|
| 47 |
+
Directed graphs only.
|
| 48 |
+
Use "in"- or "out"-degree for target node.
|
| 49 |
+
|
| 50 |
+
nodes : list or iterable, optional (default=G.nodes)
|
| 51 |
+
Compute neighbor degree only for specified nodes.
|
| 52 |
+
|
| 53 |
+
weight : string or None, optional (default=None)
|
| 54 |
+
The edge attribute that holds the numerical value used as a weight.
|
| 55 |
+
If None, then each edge has weight 1.
|
| 56 |
+
|
| 57 |
+
Returns
|
| 58 |
+
-------
|
| 59 |
+
d: dict
|
| 60 |
+
A dictionary keyed by node to the average degree of its neighbors.
|
| 61 |
+
|
| 62 |
+
Raises
|
| 63 |
+
------
|
| 64 |
+
NetworkXError
|
| 65 |
+
If either `source` or `target` are not one of 'in', 'out', or 'in+out'.
|
| 66 |
+
If either `source` or `target` is passed for an undirected graph.
|
| 67 |
+
|
| 68 |
+
Examples
|
| 69 |
+
--------
|
| 70 |
+
>>> G = nx.path_graph(4)
|
| 71 |
+
>>> G.edges[0, 1]["weight"] = 5
|
| 72 |
+
>>> G.edges[2, 3]["weight"] = 3
|
| 73 |
+
|
| 74 |
+
>>> nx.average_neighbor_degree(G)
|
| 75 |
+
{0: 2.0, 1: 1.5, 2: 1.5, 3: 2.0}
|
| 76 |
+
>>> nx.average_neighbor_degree(G, weight="weight")
|
| 77 |
+
{0: 2.0, 1: 1.1666666666666667, 2: 1.25, 3: 2.0}
|
| 78 |
+
|
| 79 |
+
>>> G = nx.DiGraph()
|
| 80 |
+
>>> nx.add_path(G, [0, 1, 2, 3])
|
| 81 |
+
>>> nx.average_neighbor_degree(G, source="in", target="in")
|
| 82 |
+
{0: 0.0, 1: 0.0, 2: 1.0, 3: 1.0}
|
| 83 |
+
|
| 84 |
+
>>> nx.average_neighbor_degree(G, source="out", target="out")
|
| 85 |
+
{0: 1.0, 1: 1.0, 2: 0.0, 3: 0.0}
|
| 86 |
+
|
| 87 |
+
See Also
|
| 88 |
+
--------
|
| 89 |
+
average_degree_connectivity
|
| 90 |
+
|
| 91 |
+
References
|
| 92 |
+
----------
|
| 93 |
+
.. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani,
|
| 94 |
+
"The architecture of complex weighted networks".
|
| 95 |
+
PNAS 101 (11): 3747–3752 (2004).
|
| 96 |
+
"""
|
| 97 |
+
if G.is_directed():
|
| 98 |
+
if source == "in":
|
| 99 |
+
source_degree = G.in_degree
|
| 100 |
+
elif source == "out":
|
| 101 |
+
source_degree = G.out_degree
|
| 102 |
+
elif source == "in+out":
|
| 103 |
+
source_degree = G.degree
|
| 104 |
+
else:
|
| 105 |
+
raise nx.NetworkXError(
|
| 106 |
+
f"source argument {source} must be 'in', 'out' or 'in+out'"
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
if target == "in":
|
| 110 |
+
target_degree = G.in_degree
|
| 111 |
+
elif target == "out":
|
| 112 |
+
target_degree = G.out_degree
|
| 113 |
+
elif target == "in+out":
|
| 114 |
+
target_degree = G.degree
|
| 115 |
+
else:
|
| 116 |
+
raise nx.NetworkXError(
|
| 117 |
+
f"target argument {target} must be 'in', 'out' or 'in+out'"
|
| 118 |
+
)
|
| 119 |
+
else:
|
| 120 |
+
if source != "out" or target != "out":
|
| 121 |
+
raise nx.NetworkXError(
|
| 122 |
+
f"source and target arguments are only supported for directed graphs"
|
| 123 |
+
)
|
| 124 |
+
source_degree = target_degree = G.degree
|
| 125 |
+
|
| 126 |
+
# precompute target degrees -- should *not* be weighted degree
|
| 127 |
+
t_deg = dict(target_degree())
|
| 128 |
+
|
| 129 |
+
# Set up both predecessor and successor neighbor dicts leaving empty if not needed
|
| 130 |
+
G_P = G_S = {n: {} for n in G}
|
| 131 |
+
if G.is_directed():
|
| 132 |
+
# "in" or "in+out" cases: G_P contains predecessors
|
| 133 |
+
if "in" in source:
|
| 134 |
+
G_P = G.pred
|
| 135 |
+
# "out" or "in+out" cases: G_S contains successors
|
| 136 |
+
if "out" in source:
|
| 137 |
+
G_S = G.succ
|
| 138 |
+
else:
|
| 139 |
+
# undirected leave G_P empty but G_S is the adjacency
|
| 140 |
+
G_S = G.adj
|
| 141 |
+
|
| 142 |
+
# Main loop: Compute average degree of neighbors
|
| 143 |
+
avg = {}
|
| 144 |
+
for n, deg in source_degree(nodes, weight=weight):
|
| 145 |
+
# handle degree zero average
|
| 146 |
+
if deg == 0:
|
| 147 |
+
avg[n] = 0.0
|
| 148 |
+
continue
|
| 149 |
+
|
| 150 |
+
# we sum over both G_P and G_S, but one of the two is usually empty.
|
| 151 |
+
if weight is None:
|
| 152 |
+
avg[n] = (
|
| 153 |
+
sum(t_deg[nbr] for nbr in G_S[n]) + sum(t_deg[nbr] for nbr in G_P[n])
|
| 154 |
+
) / deg
|
| 155 |
+
else:
|
| 156 |
+
avg[n] = (
|
| 157 |
+
sum(dd.get(weight, 1) * t_deg[nbr] for nbr, dd in G_S[n].items())
|
| 158 |
+
+ sum(dd.get(weight, 1) * t_deg[nbr] for nbr, dd in G_P[n].items())
|
| 159 |
+
) / deg
|
| 160 |
+
return avg
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/pairs.py
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Generators of x-y pairs of node data."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
__all__ = ["node_attribute_xy", "node_degree_xy"]
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@nx._dispatchable(node_attrs="attribute")
|
| 9 |
+
def node_attribute_xy(G, attribute, nodes=None):
|
| 10 |
+
"""Yields 2-tuples of node attribute values for all edges in `G`.
|
| 11 |
+
|
| 12 |
+
This generator yields, for each edge in `G` incident to a node in `nodes`,
|
| 13 |
+
a 2-tuple of form ``(attribute value, attribute value)`` for the parameter
|
| 14 |
+
specified node-attribute.
|
| 15 |
+
|
| 16 |
+
Parameters
|
| 17 |
+
----------
|
| 18 |
+
G: NetworkX graph
|
| 19 |
+
|
| 20 |
+
attribute: key
|
| 21 |
+
The node attribute key.
|
| 22 |
+
|
| 23 |
+
nodes: list or iterable (optional)
|
| 24 |
+
Use only edges that are incident to specified nodes.
|
| 25 |
+
The default is all nodes.
|
| 26 |
+
|
| 27 |
+
Yields
|
| 28 |
+
------
|
| 29 |
+
(x, y): 2-tuple
|
| 30 |
+
Generates 2-tuple of (attribute, attribute) values.
|
| 31 |
+
|
| 32 |
+
Examples
|
| 33 |
+
--------
|
| 34 |
+
>>> G = nx.DiGraph()
|
| 35 |
+
>>> G.add_node(1, color="red")
|
| 36 |
+
>>> G.add_node(2, color="blue")
|
| 37 |
+
>>> G.add_node(3, color="green")
|
| 38 |
+
>>> G.add_edge(1, 2)
|
| 39 |
+
>>> list(nx.node_attribute_xy(G, "color"))
|
| 40 |
+
[('red', 'blue')]
|
| 41 |
+
|
| 42 |
+
Notes
|
| 43 |
+
-----
|
| 44 |
+
For undirected graphs, each edge is produced twice, once for each edge
|
| 45 |
+
representation (u, v) and (v, u), with the exception of self-loop edges
|
| 46 |
+
which only appear once.
|
| 47 |
+
"""
|
| 48 |
+
if nodes is None:
|
| 49 |
+
nodes = set(G)
|
| 50 |
+
else:
|
| 51 |
+
nodes = set(nodes)
|
| 52 |
+
Gnodes = G.nodes
|
| 53 |
+
for u, nbrsdict in G.adjacency():
|
| 54 |
+
if u not in nodes:
|
| 55 |
+
continue
|
| 56 |
+
uattr = Gnodes[u].get(attribute, None)
|
| 57 |
+
if G.is_multigraph():
|
| 58 |
+
for v, keys in nbrsdict.items():
|
| 59 |
+
vattr = Gnodes[v].get(attribute, None)
|
| 60 |
+
for _ in keys:
|
| 61 |
+
yield (uattr, vattr)
|
| 62 |
+
else:
|
| 63 |
+
for v in nbrsdict:
|
| 64 |
+
vattr = Gnodes[v].get(attribute, None)
|
| 65 |
+
yield (uattr, vattr)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 69 |
+
def node_degree_xy(G, x="out", y="in", weight=None, nodes=None):
|
| 70 |
+
"""Yields 2-tuples of ``(degree, degree)`` values for edges in `G`.
|
| 71 |
+
|
| 72 |
+
This generator yields, for each edge in `G` incident to a node in `nodes`,
|
| 73 |
+
a 2-tuple of form ``(degree, degree)``. The node degrees are weighted
|
| 74 |
+
when a `weight` attribute is specified.
|
| 75 |
+
|
| 76 |
+
Parameters
|
| 77 |
+
----------
|
| 78 |
+
G: NetworkX graph
|
| 79 |
+
|
| 80 |
+
x: string ('in','out')
|
| 81 |
+
The degree type for source node (directed graphs only).
|
| 82 |
+
|
| 83 |
+
y: string ('in','out')
|
| 84 |
+
The degree type for target node (directed graphs only).
|
| 85 |
+
|
| 86 |
+
weight: string or None, optional (default=None)
|
| 87 |
+
The edge attribute that holds the numerical value used
|
| 88 |
+
as a weight. If None, then each edge has weight 1.
|
| 89 |
+
The degree is the sum of the edge weights adjacent to the node.
|
| 90 |
+
|
| 91 |
+
nodes: list or iterable (optional)
|
| 92 |
+
Use only edges that are adjacency to specified nodes.
|
| 93 |
+
The default is all nodes.
|
| 94 |
+
|
| 95 |
+
Yields
|
| 96 |
+
------
|
| 97 |
+
(x, y): 2-tuple
|
| 98 |
+
Generates 2-tuple of (degree, degree) values.
|
| 99 |
+
|
| 100 |
+
Examples
|
| 101 |
+
--------
|
| 102 |
+
>>> G = nx.DiGraph()
|
| 103 |
+
>>> G.add_edge(1, 2)
|
| 104 |
+
>>> list(nx.node_degree_xy(G, x="out", y="in"))
|
| 105 |
+
[(1, 1)]
|
| 106 |
+
>>> list(nx.node_degree_xy(G, x="in", y="out"))
|
| 107 |
+
[(0, 0)]
|
| 108 |
+
|
| 109 |
+
Notes
|
| 110 |
+
-----
|
| 111 |
+
For undirected graphs, each edge is produced twice, once for each edge
|
| 112 |
+
representation (u, v) and (v, u), with the exception of self-loop edges
|
| 113 |
+
which only appear once.
|
| 114 |
+
"""
|
| 115 |
+
nodes = set(G) if nodes is None else set(nodes)
|
| 116 |
+
if G.is_directed():
|
| 117 |
+
direction = {"out": G.out_degree, "in": G.in_degree}
|
| 118 |
+
xdeg = direction[x]
|
| 119 |
+
ydeg = direction[y]
|
| 120 |
+
else:
|
| 121 |
+
xdeg = ydeg = G.degree
|
| 122 |
+
|
| 123 |
+
for u, degu in xdeg(nodes, weight=weight):
|
| 124 |
+
# use G.edges to treat multigraphs correctly
|
| 125 |
+
neighbors = (nbr for _, nbr in G.edges(u) if nbr in nodes)
|
| 126 |
+
for _, degv in ydeg(neighbors, weight=weight):
|
| 127 |
+
yield degu, degv
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__init__.py
ADDED
|
File without changes
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (212 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/base_test.cpython-311.pyc
ADDED
|
Binary file (6.06 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_connectivity.cpython-311.pyc
ADDED
|
Binary file (8.87 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_correlation.cpython-311.pyc
ADDED
|
Binary file (11.3 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_mixing.cpython-311.pyc
ADDED
|
Binary file (13.2 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_neighbor_degree.cpython-311.pyc
ADDED
|
Binary file (7.54 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_pairs.cpython-311.pyc
ADDED
|
Binary file (5.9 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/base_test.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class BaseTestAttributeMixing:
|
| 5 |
+
@classmethod
|
| 6 |
+
def setup_class(cls):
|
| 7 |
+
G = nx.Graph()
|
| 8 |
+
G.add_nodes_from([0, 1], fish="one")
|
| 9 |
+
G.add_nodes_from([2, 3], fish="two")
|
| 10 |
+
G.add_nodes_from([4], fish="red")
|
| 11 |
+
G.add_nodes_from([5], fish="blue")
|
| 12 |
+
G.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
|
| 13 |
+
cls.G = G
|
| 14 |
+
|
| 15 |
+
D = nx.DiGraph()
|
| 16 |
+
D.add_nodes_from([0, 1], fish="one")
|
| 17 |
+
D.add_nodes_from([2, 3], fish="two")
|
| 18 |
+
D.add_nodes_from([4], fish="red")
|
| 19 |
+
D.add_nodes_from([5], fish="blue")
|
| 20 |
+
D.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
|
| 21 |
+
cls.D = D
|
| 22 |
+
|
| 23 |
+
M = nx.MultiGraph()
|
| 24 |
+
M.add_nodes_from([0, 1], fish="one")
|
| 25 |
+
M.add_nodes_from([2, 3], fish="two")
|
| 26 |
+
M.add_nodes_from([4], fish="red")
|
| 27 |
+
M.add_nodes_from([5], fish="blue")
|
| 28 |
+
M.add_edges_from([(0, 1), (0, 1), (2, 3)])
|
| 29 |
+
cls.M = M
|
| 30 |
+
|
| 31 |
+
S = nx.Graph()
|
| 32 |
+
S.add_nodes_from([0, 1], fish="one")
|
| 33 |
+
S.add_nodes_from([2, 3], fish="two")
|
| 34 |
+
S.add_nodes_from([4], fish="red")
|
| 35 |
+
S.add_nodes_from([5], fish="blue")
|
| 36 |
+
S.add_edge(0, 0)
|
| 37 |
+
S.add_edge(2, 2)
|
| 38 |
+
cls.S = S
|
| 39 |
+
|
| 40 |
+
N = nx.Graph()
|
| 41 |
+
N.add_nodes_from([0, 1], margin=-2)
|
| 42 |
+
N.add_nodes_from([2, 3], margin=-2)
|
| 43 |
+
N.add_nodes_from([4], margin=-3)
|
| 44 |
+
N.add_nodes_from([5], margin=-4)
|
| 45 |
+
N.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
|
| 46 |
+
cls.N = N
|
| 47 |
+
|
| 48 |
+
F = nx.Graph()
|
| 49 |
+
F.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5)
|
| 50 |
+
F.add_edge(0, 2, weight=1)
|
| 51 |
+
nx.set_node_attributes(F, dict(F.degree(weight="weight")), "margin")
|
| 52 |
+
cls.F = F
|
| 53 |
+
|
| 54 |
+
K = nx.Graph()
|
| 55 |
+
K.add_nodes_from([1, 2], margin=-1)
|
| 56 |
+
K.add_nodes_from([3], margin=1)
|
| 57 |
+
K.add_nodes_from([4], margin=2)
|
| 58 |
+
K.add_edges_from([(3, 4), (1, 2), (1, 3)])
|
| 59 |
+
cls.K = K
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class BaseTestDegreeMixing:
|
| 63 |
+
@classmethod
|
| 64 |
+
def setup_class(cls):
|
| 65 |
+
cls.P4 = nx.path_graph(4)
|
| 66 |
+
cls.D = nx.DiGraph()
|
| 67 |
+
cls.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)])
|
| 68 |
+
cls.D2 = nx.DiGraph()
|
| 69 |
+
cls.D2.add_edges_from([(0, 3), (1, 0), (1, 2), (2, 4), (4, 1), (4, 3), (4, 2)])
|
| 70 |
+
cls.M = nx.MultiGraph()
|
| 71 |
+
nx.add_path(cls.M, range(4))
|
| 72 |
+
cls.M.add_edge(0, 1)
|
| 73 |
+
cls.S = nx.Graph()
|
| 74 |
+
cls.S.add_edges_from([(0, 0), (1, 1)])
|
| 75 |
+
cls.W = nx.Graph()
|
| 76 |
+
cls.W.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5)
|
| 77 |
+
cls.W.add_edge(0, 2, weight=1)
|
| 78 |
+
S1 = nx.star_graph(4)
|
| 79 |
+
S2 = nx.star_graph(4)
|
| 80 |
+
cls.DS = nx.disjoint_union(S1, S2)
|
| 81 |
+
cls.DS.add_edge(4, 5)
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_connectivity.py
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import permutations
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class TestNeighborConnectivity:
|
| 9 |
+
def test_degree_p4(self):
|
| 10 |
+
G = nx.path_graph(4)
|
| 11 |
+
answer = {1: 2.0, 2: 1.5}
|
| 12 |
+
nd = nx.average_degree_connectivity(G)
|
| 13 |
+
assert nd == answer
|
| 14 |
+
|
| 15 |
+
D = G.to_directed()
|
| 16 |
+
answer = {2: 2.0, 4: 1.5}
|
| 17 |
+
nd = nx.average_degree_connectivity(D)
|
| 18 |
+
assert nd == answer
|
| 19 |
+
|
| 20 |
+
answer = {1: 2.0, 2: 1.5}
|
| 21 |
+
D = G.to_directed()
|
| 22 |
+
nd = nx.average_degree_connectivity(D, source="in", target="in")
|
| 23 |
+
assert nd == answer
|
| 24 |
+
|
| 25 |
+
D = G.to_directed()
|
| 26 |
+
nd = nx.average_degree_connectivity(D, source="in", target="in")
|
| 27 |
+
assert nd == answer
|
| 28 |
+
|
| 29 |
+
def test_degree_p4_weighted(self):
|
| 30 |
+
G = nx.path_graph(4)
|
| 31 |
+
G[1][2]["weight"] = 4
|
| 32 |
+
answer = {1: 2.0, 2: 1.8}
|
| 33 |
+
nd = nx.average_degree_connectivity(G, weight="weight")
|
| 34 |
+
assert nd == answer
|
| 35 |
+
answer = {1: 2.0, 2: 1.5}
|
| 36 |
+
nd = nx.average_degree_connectivity(G)
|
| 37 |
+
assert nd == answer
|
| 38 |
+
|
| 39 |
+
D = G.to_directed()
|
| 40 |
+
answer = {2: 2.0, 4: 1.8}
|
| 41 |
+
nd = nx.average_degree_connectivity(D, weight="weight")
|
| 42 |
+
assert nd == answer
|
| 43 |
+
|
| 44 |
+
answer = {1: 2.0, 2: 1.8}
|
| 45 |
+
D = G.to_directed()
|
| 46 |
+
nd = nx.average_degree_connectivity(
|
| 47 |
+
D, weight="weight", source="in", target="in"
|
| 48 |
+
)
|
| 49 |
+
assert nd == answer
|
| 50 |
+
|
| 51 |
+
D = G.to_directed()
|
| 52 |
+
nd = nx.average_degree_connectivity(
|
| 53 |
+
D, source="in", target="out", weight="weight"
|
| 54 |
+
)
|
| 55 |
+
assert nd == answer
|
| 56 |
+
|
| 57 |
+
def test_weight_keyword(self):
|
| 58 |
+
G = nx.path_graph(4)
|
| 59 |
+
G[1][2]["other"] = 4
|
| 60 |
+
answer = {1: 2.0, 2: 1.8}
|
| 61 |
+
nd = nx.average_degree_connectivity(G, weight="other")
|
| 62 |
+
assert nd == answer
|
| 63 |
+
answer = {1: 2.0, 2: 1.5}
|
| 64 |
+
nd = nx.average_degree_connectivity(G, weight=None)
|
| 65 |
+
assert nd == answer
|
| 66 |
+
|
| 67 |
+
D = G.to_directed()
|
| 68 |
+
answer = {2: 2.0, 4: 1.8}
|
| 69 |
+
nd = nx.average_degree_connectivity(D, weight="other")
|
| 70 |
+
assert nd == answer
|
| 71 |
+
|
| 72 |
+
answer = {1: 2.0, 2: 1.8}
|
| 73 |
+
D = G.to_directed()
|
| 74 |
+
nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in")
|
| 75 |
+
assert nd == answer
|
| 76 |
+
|
| 77 |
+
D = G.to_directed()
|
| 78 |
+
nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in")
|
| 79 |
+
assert nd == answer
|
| 80 |
+
|
| 81 |
+
def test_degree_barrat(self):
|
| 82 |
+
G = nx.star_graph(5)
|
| 83 |
+
G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)])
|
| 84 |
+
G[0][5]["weight"] = 5
|
| 85 |
+
nd = nx.average_degree_connectivity(G)[5]
|
| 86 |
+
assert nd == 1.8
|
| 87 |
+
nd = nx.average_degree_connectivity(G, weight="weight")[5]
|
| 88 |
+
assert nd == pytest.approx(3.222222, abs=1e-5)
|
| 89 |
+
|
| 90 |
+
def test_zero_deg(self):
|
| 91 |
+
G = nx.DiGraph()
|
| 92 |
+
G.add_edge(1, 2)
|
| 93 |
+
G.add_edge(1, 3)
|
| 94 |
+
G.add_edge(1, 4)
|
| 95 |
+
c = nx.average_degree_connectivity(G)
|
| 96 |
+
assert c == {1: 0, 3: 1}
|
| 97 |
+
c = nx.average_degree_connectivity(G, source="in", target="in")
|
| 98 |
+
assert c == {0: 0, 1: 0}
|
| 99 |
+
c = nx.average_degree_connectivity(G, source="in", target="out")
|
| 100 |
+
assert c == {0: 0, 1: 3}
|
| 101 |
+
c = nx.average_degree_connectivity(G, source="in", target="in+out")
|
| 102 |
+
assert c == {0: 0, 1: 3}
|
| 103 |
+
c = nx.average_degree_connectivity(G, source="out", target="out")
|
| 104 |
+
assert c == {0: 0, 3: 0}
|
| 105 |
+
c = nx.average_degree_connectivity(G, source="out", target="in")
|
| 106 |
+
assert c == {0: 0, 3: 1}
|
| 107 |
+
c = nx.average_degree_connectivity(G, source="out", target="in+out")
|
| 108 |
+
assert c == {0: 0, 3: 1}
|
| 109 |
+
|
| 110 |
+
def test_in_out_weight(self):
|
| 111 |
+
G = nx.DiGraph()
|
| 112 |
+
G.add_edge(1, 2, weight=1)
|
| 113 |
+
G.add_edge(1, 3, weight=1)
|
| 114 |
+
G.add_edge(3, 1, weight=1)
|
| 115 |
+
for s, t in permutations(["in", "out", "in+out"], 2):
|
| 116 |
+
c = nx.average_degree_connectivity(G, source=s, target=t)
|
| 117 |
+
cw = nx.average_degree_connectivity(G, source=s, target=t, weight="weight")
|
| 118 |
+
assert c == cw
|
| 119 |
+
|
| 120 |
+
def test_invalid_source(self):
|
| 121 |
+
with pytest.raises(nx.NetworkXError):
|
| 122 |
+
G = nx.DiGraph()
|
| 123 |
+
nx.average_degree_connectivity(G, source="bogus")
|
| 124 |
+
|
| 125 |
+
def test_invalid_target(self):
|
| 126 |
+
with pytest.raises(nx.NetworkXError):
|
| 127 |
+
G = nx.DiGraph()
|
| 128 |
+
nx.average_degree_connectivity(G, target="bogus")
|
| 129 |
+
|
| 130 |
+
def test_invalid_undirected_graph(self):
|
| 131 |
+
G = nx.Graph()
|
| 132 |
+
with pytest.raises(nx.NetworkXError):
|
| 133 |
+
nx.average_degree_connectivity(G, target="bogus")
|
| 134 |
+
with pytest.raises(nx.NetworkXError):
|
| 135 |
+
nx.average_degree_connectivity(G, source="bogus")
|
| 136 |
+
|
| 137 |
+
def test_single_node(self):
|
| 138 |
+
# TODO Is this really the intended behavior for providing a
|
| 139 |
+
# single node as the argument `nodes`? Shouldn't the function
|
| 140 |
+
# just return the connectivity value itself?
|
| 141 |
+
G = nx.trivial_graph()
|
| 142 |
+
conn = nx.average_degree_connectivity(G, nodes=0)
|
| 143 |
+
assert conn == {0: 0}
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
np = pytest.importorskip("numpy")
|
| 4 |
+
pytest.importorskip("scipy")
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.algorithms.assortativity.correlation import attribute_ac
|
| 9 |
+
|
| 10 |
+
from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class TestDegreeMixingCorrelation(BaseTestDegreeMixing):
|
| 14 |
+
def test_degree_assortativity_undirected(self):
|
| 15 |
+
r = nx.degree_assortativity_coefficient(self.P4)
|
| 16 |
+
np.testing.assert_almost_equal(r, -1.0 / 2, decimal=4)
|
| 17 |
+
|
| 18 |
+
def test_degree_assortativity_node_kwargs(self):
|
| 19 |
+
G = nx.Graph()
|
| 20 |
+
edges = [(0, 1), (0, 3), (1, 2), (1, 3), (1, 4), (5, 9), (9, 0)]
|
| 21 |
+
G.add_edges_from(edges)
|
| 22 |
+
r = nx.degree_assortativity_coefficient(G, nodes=[1, 2, 4])
|
| 23 |
+
np.testing.assert_almost_equal(r, -1.0, decimal=4)
|
| 24 |
+
|
| 25 |
+
def test_degree_assortativity_directed(self):
|
| 26 |
+
r = nx.degree_assortativity_coefficient(self.D)
|
| 27 |
+
np.testing.assert_almost_equal(r, -0.57735, decimal=4)
|
| 28 |
+
|
| 29 |
+
def test_degree_assortativity_directed2(self):
|
| 30 |
+
"""Test degree assortativity for a directed graph where the set of
|
| 31 |
+
in/out degree does not equal the total degree."""
|
| 32 |
+
r = nx.degree_assortativity_coefficient(self.D2)
|
| 33 |
+
np.testing.assert_almost_equal(r, 0.14852, decimal=4)
|
| 34 |
+
|
| 35 |
+
def test_degree_assortativity_multigraph(self):
|
| 36 |
+
r = nx.degree_assortativity_coefficient(self.M)
|
| 37 |
+
np.testing.assert_almost_equal(r, -1.0 / 7.0, decimal=4)
|
| 38 |
+
|
| 39 |
+
def test_degree_pearson_assortativity_undirected(self):
|
| 40 |
+
r = nx.degree_pearson_correlation_coefficient(self.P4)
|
| 41 |
+
np.testing.assert_almost_equal(r, -1.0 / 2, decimal=4)
|
| 42 |
+
|
| 43 |
+
def test_degree_pearson_assortativity_directed(self):
|
| 44 |
+
r = nx.degree_pearson_correlation_coefficient(self.D)
|
| 45 |
+
np.testing.assert_almost_equal(r, -0.57735, decimal=4)
|
| 46 |
+
|
| 47 |
+
def test_degree_pearson_assortativity_directed2(self):
|
| 48 |
+
"""Test degree assortativity with Pearson for a directed graph where
|
| 49 |
+
the set of in/out degree does not equal the total degree."""
|
| 50 |
+
r = nx.degree_pearson_correlation_coefficient(self.D2)
|
| 51 |
+
np.testing.assert_almost_equal(r, 0.14852, decimal=4)
|
| 52 |
+
|
| 53 |
+
def test_degree_pearson_assortativity_multigraph(self):
|
| 54 |
+
r = nx.degree_pearson_correlation_coefficient(self.M)
|
| 55 |
+
np.testing.assert_almost_equal(r, -1.0 / 7.0, decimal=4)
|
| 56 |
+
|
| 57 |
+
def test_degree_assortativity_weighted(self):
|
| 58 |
+
r = nx.degree_assortativity_coefficient(self.W, weight="weight")
|
| 59 |
+
np.testing.assert_almost_equal(r, -0.1429, decimal=4)
|
| 60 |
+
|
| 61 |
+
def test_degree_assortativity_double_star(self):
|
| 62 |
+
r = nx.degree_assortativity_coefficient(self.DS)
|
| 63 |
+
np.testing.assert_almost_equal(r, -0.9339, decimal=4)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class TestAttributeMixingCorrelation(BaseTestAttributeMixing):
|
| 67 |
+
def test_attribute_assortativity_undirected(self):
|
| 68 |
+
r = nx.attribute_assortativity_coefficient(self.G, "fish")
|
| 69 |
+
assert r == 6.0 / 22.0
|
| 70 |
+
|
| 71 |
+
def test_attribute_assortativity_directed(self):
|
| 72 |
+
r = nx.attribute_assortativity_coefficient(self.D, "fish")
|
| 73 |
+
assert r == 1.0 / 3.0
|
| 74 |
+
|
| 75 |
+
def test_attribute_assortativity_multigraph(self):
|
| 76 |
+
r = nx.attribute_assortativity_coefficient(self.M, "fish")
|
| 77 |
+
assert r == 1.0
|
| 78 |
+
|
| 79 |
+
def test_attribute_assortativity_coefficient(self):
|
| 80 |
+
# from "Mixing patterns in networks"
|
| 81 |
+
# fmt: off
|
| 82 |
+
a = np.array([[0.258, 0.016, 0.035, 0.013],
|
| 83 |
+
[0.012, 0.157, 0.058, 0.019],
|
| 84 |
+
[0.013, 0.023, 0.306, 0.035],
|
| 85 |
+
[0.005, 0.007, 0.024, 0.016]])
|
| 86 |
+
# fmt: on
|
| 87 |
+
r = attribute_ac(a)
|
| 88 |
+
np.testing.assert_almost_equal(r, 0.623, decimal=3)
|
| 89 |
+
|
| 90 |
+
def test_attribute_assortativity_coefficient2(self):
|
| 91 |
+
# fmt: off
|
| 92 |
+
a = np.array([[0.18, 0.02, 0.01, 0.03],
|
| 93 |
+
[0.02, 0.20, 0.03, 0.02],
|
| 94 |
+
[0.01, 0.03, 0.16, 0.01],
|
| 95 |
+
[0.03, 0.02, 0.01, 0.22]])
|
| 96 |
+
# fmt: on
|
| 97 |
+
r = attribute_ac(a)
|
| 98 |
+
np.testing.assert_almost_equal(r, 0.68, decimal=2)
|
| 99 |
+
|
| 100 |
+
def test_attribute_assortativity(self):
|
| 101 |
+
a = np.array([[50, 50, 0], [50, 50, 0], [0, 0, 2]])
|
| 102 |
+
r = attribute_ac(a)
|
| 103 |
+
np.testing.assert_almost_equal(r, 0.029, decimal=3)
|
| 104 |
+
|
| 105 |
+
def test_attribute_assortativity_negative(self):
|
| 106 |
+
r = nx.numeric_assortativity_coefficient(self.N, "margin")
|
| 107 |
+
np.testing.assert_almost_equal(r, -0.2903, decimal=4)
|
| 108 |
+
|
| 109 |
+
def test_assortativity_node_kwargs(self):
|
| 110 |
+
G = nx.Graph()
|
| 111 |
+
G.add_nodes_from([0, 1], size=2)
|
| 112 |
+
G.add_nodes_from([2, 3], size=3)
|
| 113 |
+
G.add_edges_from([(0, 1), (2, 3)])
|
| 114 |
+
r = nx.numeric_assortativity_coefficient(G, "size", nodes=[0, 3])
|
| 115 |
+
np.testing.assert_almost_equal(r, 1.0, decimal=4)
|
| 116 |
+
|
| 117 |
+
def test_attribute_assortativity_float(self):
|
| 118 |
+
r = nx.numeric_assortativity_coefficient(self.F, "margin")
|
| 119 |
+
np.testing.assert_almost_equal(r, -0.1429, decimal=4)
|
| 120 |
+
|
| 121 |
+
def test_attribute_assortativity_mixed(self):
|
| 122 |
+
r = nx.numeric_assortativity_coefficient(self.K, "margin")
|
| 123 |
+
np.testing.assert_almost_equal(r, 0.4340, decimal=4)
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
np = pytest.importorskip("numpy")
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
|
| 8 |
+
from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class TestDegreeMixingDict(BaseTestDegreeMixing):
|
| 12 |
+
def test_degree_mixing_dict_undirected(self):
|
| 13 |
+
d = nx.degree_mixing_dict(self.P4)
|
| 14 |
+
d_result = {1: {2: 2}, 2: {1: 2, 2: 2}}
|
| 15 |
+
assert d == d_result
|
| 16 |
+
|
| 17 |
+
def test_degree_mixing_dict_undirected_normalized(self):
|
| 18 |
+
d = nx.degree_mixing_dict(self.P4, normalized=True)
|
| 19 |
+
d_result = {1: {2: 1.0 / 3}, 2: {1: 1.0 / 3, 2: 1.0 / 3}}
|
| 20 |
+
assert d == d_result
|
| 21 |
+
|
| 22 |
+
def test_degree_mixing_dict_directed(self):
|
| 23 |
+
d = nx.degree_mixing_dict(self.D)
|
| 24 |
+
print(d)
|
| 25 |
+
d_result = {1: {3: 2}, 2: {1: 1, 3: 1}, 3: {}}
|
| 26 |
+
assert d == d_result
|
| 27 |
+
|
| 28 |
+
def test_degree_mixing_dict_multigraph(self):
|
| 29 |
+
d = nx.degree_mixing_dict(self.M)
|
| 30 |
+
d_result = {1: {2: 1}, 2: {1: 1, 3: 3}, 3: {2: 3}}
|
| 31 |
+
assert d == d_result
|
| 32 |
+
|
| 33 |
+
def test_degree_mixing_dict_weighted(self):
|
| 34 |
+
d = nx.degree_mixing_dict(self.W, weight="weight")
|
| 35 |
+
d_result = {0.5: {1.5: 1}, 1.5: {1.5: 6, 0.5: 1}}
|
| 36 |
+
assert d == d_result
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class TestDegreeMixingMatrix(BaseTestDegreeMixing):
|
| 40 |
+
def test_degree_mixing_matrix_undirected(self):
|
| 41 |
+
# fmt: off
|
| 42 |
+
a_result = np.array([[0, 2],
|
| 43 |
+
[2, 2]]
|
| 44 |
+
)
|
| 45 |
+
# fmt: on
|
| 46 |
+
a = nx.degree_mixing_matrix(self.P4, normalized=False)
|
| 47 |
+
np.testing.assert_equal(a, a_result)
|
| 48 |
+
a = nx.degree_mixing_matrix(self.P4)
|
| 49 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 50 |
+
|
| 51 |
+
def test_degree_mixing_matrix_directed(self):
|
| 52 |
+
# fmt: off
|
| 53 |
+
a_result = np.array([[0, 0, 2],
|
| 54 |
+
[1, 0, 1],
|
| 55 |
+
[0, 0, 0]]
|
| 56 |
+
)
|
| 57 |
+
# fmt: on
|
| 58 |
+
a = nx.degree_mixing_matrix(self.D, normalized=False)
|
| 59 |
+
np.testing.assert_equal(a, a_result)
|
| 60 |
+
a = nx.degree_mixing_matrix(self.D)
|
| 61 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 62 |
+
|
| 63 |
+
def test_degree_mixing_matrix_multigraph(self):
|
| 64 |
+
# fmt: off
|
| 65 |
+
a_result = np.array([[0, 1, 0],
|
| 66 |
+
[1, 0, 3],
|
| 67 |
+
[0, 3, 0]]
|
| 68 |
+
)
|
| 69 |
+
# fmt: on
|
| 70 |
+
a = nx.degree_mixing_matrix(self.M, normalized=False)
|
| 71 |
+
np.testing.assert_equal(a, a_result)
|
| 72 |
+
a = nx.degree_mixing_matrix(self.M)
|
| 73 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 74 |
+
|
| 75 |
+
def test_degree_mixing_matrix_selfloop(self):
|
| 76 |
+
# fmt: off
|
| 77 |
+
a_result = np.array([[2]])
|
| 78 |
+
# fmt: on
|
| 79 |
+
a = nx.degree_mixing_matrix(self.S, normalized=False)
|
| 80 |
+
np.testing.assert_equal(a, a_result)
|
| 81 |
+
a = nx.degree_mixing_matrix(self.S)
|
| 82 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 83 |
+
|
| 84 |
+
def test_degree_mixing_matrix_weighted(self):
|
| 85 |
+
a_result = np.array([[0.0, 1.0], [1.0, 6.0]])
|
| 86 |
+
a = nx.degree_mixing_matrix(self.W, weight="weight", normalized=False)
|
| 87 |
+
np.testing.assert_equal(a, a_result)
|
| 88 |
+
a = nx.degree_mixing_matrix(self.W, weight="weight")
|
| 89 |
+
np.testing.assert_equal(a, a_result / float(a_result.sum()))
|
| 90 |
+
|
| 91 |
+
def test_degree_mixing_matrix_mapping(self):
|
| 92 |
+
a_result = np.array([[6.0, 1.0], [1.0, 0.0]])
|
| 93 |
+
mapping = {0.5: 1, 1.5: 0}
|
| 94 |
+
a = nx.degree_mixing_matrix(
|
| 95 |
+
self.W, weight="weight", normalized=False, mapping=mapping
|
| 96 |
+
)
|
| 97 |
+
np.testing.assert_equal(a, a_result)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
class TestAttributeMixingDict(BaseTestAttributeMixing):
|
| 101 |
+
def test_attribute_mixing_dict_undirected(self):
|
| 102 |
+
d = nx.attribute_mixing_dict(self.G, "fish")
|
| 103 |
+
d_result = {
|
| 104 |
+
"one": {"one": 2, "red": 1},
|
| 105 |
+
"two": {"two": 2, "blue": 1},
|
| 106 |
+
"red": {"one": 1},
|
| 107 |
+
"blue": {"two": 1},
|
| 108 |
+
}
|
| 109 |
+
assert d == d_result
|
| 110 |
+
|
| 111 |
+
def test_attribute_mixing_dict_directed(self):
|
| 112 |
+
d = nx.attribute_mixing_dict(self.D, "fish")
|
| 113 |
+
d_result = {
|
| 114 |
+
"one": {"one": 1, "red": 1},
|
| 115 |
+
"two": {"two": 1, "blue": 1},
|
| 116 |
+
"red": {},
|
| 117 |
+
"blue": {},
|
| 118 |
+
}
|
| 119 |
+
assert d == d_result
|
| 120 |
+
|
| 121 |
+
def test_attribute_mixing_dict_multigraph(self):
|
| 122 |
+
d = nx.attribute_mixing_dict(self.M, "fish")
|
| 123 |
+
d_result = {"one": {"one": 4}, "two": {"two": 2}}
|
| 124 |
+
assert d == d_result
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class TestAttributeMixingMatrix(BaseTestAttributeMixing):
|
| 128 |
+
def test_attribute_mixing_matrix_undirected(self):
|
| 129 |
+
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
|
| 130 |
+
a_result = np.array([[2, 0, 1, 0], [0, 2, 0, 1], [1, 0, 0, 0], [0, 1, 0, 0]])
|
| 131 |
+
a = nx.attribute_mixing_matrix(
|
| 132 |
+
self.G, "fish", mapping=mapping, normalized=False
|
| 133 |
+
)
|
| 134 |
+
np.testing.assert_equal(a, a_result)
|
| 135 |
+
a = nx.attribute_mixing_matrix(self.G, "fish", mapping=mapping)
|
| 136 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 137 |
+
|
| 138 |
+
def test_attribute_mixing_matrix_directed(self):
|
| 139 |
+
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
|
| 140 |
+
a_result = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]])
|
| 141 |
+
a = nx.attribute_mixing_matrix(
|
| 142 |
+
self.D, "fish", mapping=mapping, normalized=False
|
| 143 |
+
)
|
| 144 |
+
np.testing.assert_equal(a, a_result)
|
| 145 |
+
a = nx.attribute_mixing_matrix(self.D, "fish", mapping=mapping)
|
| 146 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 147 |
+
|
| 148 |
+
def test_attribute_mixing_matrix_multigraph(self):
|
| 149 |
+
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
|
| 150 |
+
a_result = np.array([[4, 0, 0, 0], [0, 2, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])
|
| 151 |
+
a = nx.attribute_mixing_matrix(
|
| 152 |
+
self.M, "fish", mapping=mapping, normalized=False
|
| 153 |
+
)
|
| 154 |
+
np.testing.assert_equal(a, a_result)
|
| 155 |
+
a = nx.attribute_mixing_matrix(self.M, "fish", mapping=mapping)
|
| 156 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
| 157 |
+
|
| 158 |
+
def test_attribute_mixing_matrix_negative(self):
|
| 159 |
+
mapping = {-2: 0, -3: 1, -4: 2}
|
| 160 |
+
a_result = np.array([[4.0, 1.0, 1.0], [1.0, 0.0, 0.0], [1.0, 0.0, 0.0]])
|
| 161 |
+
a = nx.attribute_mixing_matrix(
|
| 162 |
+
self.N, "margin", mapping=mapping, normalized=False
|
| 163 |
+
)
|
| 164 |
+
np.testing.assert_equal(a, a_result)
|
| 165 |
+
a = nx.attribute_mixing_matrix(self.N, "margin", mapping=mapping)
|
| 166 |
+
np.testing.assert_equal(a, a_result / float(a_result.sum()))
|
| 167 |
+
|
| 168 |
+
def test_attribute_mixing_matrix_float(self):
|
| 169 |
+
mapping = {0.5: 1, 1.5: 0}
|
| 170 |
+
a_result = np.array([[6.0, 1.0], [1.0, 0.0]])
|
| 171 |
+
a = nx.attribute_mixing_matrix(
|
| 172 |
+
self.F, "margin", mapping=mapping, normalized=False
|
| 173 |
+
)
|
| 174 |
+
np.testing.assert_equal(a, a_result)
|
| 175 |
+
a = nx.attribute_mixing_matrix(self.F, "margin", mapping=mapping)
|
| 176 |
+
np.testing.assert_equal(a, a_result / a_result.sum())
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestAverageNeighbor:
|
| 7 |
+
def test_degree_p4(self):
|
| 8 |
+
G = nx.path_graph(4)
|
| 9 |
+
answer = {0: 2, 1: 1.5, 2: 1.5, 3: 2}
|
| 10 |
+
nd = nx.average_neighbor_degree(G)
|
| 11 |
+
assert nd == answer
|
| 12 |
+
|
| 13 |
+
D = G.to_directed()
|
| 14 |
+
nd = nx.average_neighbor_degree(D)
|
| 15 |
+
assert nd == answer
|
| 16 |
+
|
| 17 |
+
D = nx.DiGraph(G.edges(data=True))
|
| 18 |
+
nd = nx.average_neighbor_degree(D)
|
| 19 |
+
assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
|
| 20 |
+
nd = nx.average_neighbor_degree(D, "in", "out")
|
| 21 |
+
assert nd == {0: 0, 1: 1, 2: 1, 3: 1}
|
| 22 |
+
nd = nx.average_neighbor_degree(D, "out", "in")
|
| 23 |
+
assert nd == {0: 1, 1: 1, 2: 1, 3: 0}
|
| 24 |
+
nd = nx.average_neighbor_degree(D, "in", "in")
|
| 25 |
+
assert nd == {0: 0, 1: 0, 2: 1, 3: 1}
|
| 26 |
+
|
| 27 |
+
def test_degree_p4_weighted(self):
|
| 28 |
+
G = nx.path_graph(4)
|
| 29 |
+
G[1][2]["weight"] = 4
|
| 30 |
+
answer = {0: 2, 1: 1.8, 2: 1.8, 3: 2}
|
| 31 |
+
nd = nx.average_neighbor_degree(G, weight="weight")
|
| 32 |
+
assert nd == answer
|
| 33 |
+
|
| 34 |
+
D = G.to_directed()
|
| 35 |
+
nd = nx.average_neighbor_degree(D, weight="weight")
|
| 36 |
+
assert nd == answer
|
| 37 |
+
|
| 38 |
+
D = nx.DiGraph(G.edges(data=True))
|
| 39 |
+
print(D.edges(data=True))
|
| 40 |
+
nd = nx.average_neighbor_degree(D, weight="weight")
|
| 41 |
+
assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
|
| 42 |
+
nd = nx.average_neighbor_degree(D, "out", "out", weight="weight")
|
| 43 |
+
assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
|
| 44 |
+
nd = nx.average_neighbor_degree(D, "in", "in", weight="weight")
|
| 45 |
+
assert nd == {0: 0, 1: 0, 2: 1, 3: 1}
|
| 46 |
+
nd = nx.average_neighbor_degree(D, "in", "out", weight="weight")
|
| 47 |
+
assert nd == {0: 0, 1: 1, 2: 1, 3: 1}
|
| 48 |
+
nd = nx.average_neighbor_degree(D, "out", "in", weight="weight")
|
| 49 |
+
assert nd == {0: 1, 1: 1, 2: 1, 3: 0}
|
| 50 |
+
nd = nx.average_neighbor_degree(D, source="in+out", weight="weight")
|
| 51 |
+
assert nd == {0: 1.0, 1: 1.0, 2: 0.8, 3: 1.0}
|
| 52 |
+
nd = nx.average_neighbor_degree(D, target="in+out", weight="weight")
|
| 53 |
+
assert nd == {0: 2.0, 1: 2.0, 2: 1.0, 3: 0.0}
|
| 54 |
+
|
| 55 |
+
D = G.to_directed()
|
| 56 |
+
nd = nx.average_neighbor_degree(D, weight="weight")
|
| 57 |
+
assert nd == answer
|
| 58 |
+
nd = nx.average_neighbor_degree(D, source="out", target="out", weight="weight")
|
| 59 |
+
assert nd == answer
|
| 60 |
+
|
| 61 |
+
D = G.to_directed()
|
| 62 |
+
nd = nx.average_neighbor_degree(D, source="in", target="in", weight="weight")
|
| 63 |
+
assert nd == answer
|
| 64 |
+
|
| 65 |
+
def test_degree_k4(self):
|
| 66 |
+
G = nx.complete_graph(4)
|
| 67 |
+
answer = {0: 3, 1: 3, 2: 3, 3: 3}
|
| 68 |
+
nd = nx.average_neighbor_degree(G)
|
| 69 |
+
assert nd == answer
|
| 70 |
+
|
| 71 |
+
D = G.to_directed()
|
| 72 |
+
nd = nx.average_neighbor_degree(D)
|
| 73 |
+
assert nd == answer
|
| 74 |
+
|
| 75 |
+
D = G.to_directed()
|
| 76 |
+
nd = nx.average_neighbor_degree(D)
|
| 77 |
+
assert nd == answer
|
| 78 |
+
|
| 79 |
+
D = G.to_directed()
|
| 80 |
+
nd = nx.average_neighbor_degree(D, source="in", target="in")
|
| 81 |
+
assert nd == answer
|
| 82 |
+
|
| 83 |
+
def test_degree_k4_nodes(self):
|
| 84 |
+
G = nx.complete_graph(4)
|
| 85 |
+
answer = {1: 3.0, 2: 3.0}
|
| 86 |
+
nd = nx.average_neighbor_degree(G, nodes=[1, 2])
|
| 87 |
+
assert nd == answer
|
| 88 |
+
|
| 89 |
+
def test_degree_barrat(self):
|
| 90 |
+
G = nx.star_graph(5)
|
| 91 |
+
G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)])
|
| 92 |
+
G[0][5]["weight"] = 5
|
| 93 |
+
nd = nx.average_neighbor_degree(G)[5]
|
| 94 |
+
assert nd == 1.8
|
| 95 |
+
nd = nx.average_neighbor_degree(G, weight="weight")[5]
|
| 96 |
+
assert nd == pytest.approx(3.222222, abs=1e-5)
|
| 97 |
+
|
| 98 |
+
def test_error_invalid_source_target(self):
|
| 99 |
+
G = nx.path_graph(4)
|
| 100 |
+
with pytest.raises(nx.NetworkXError):
|
| 101 |
+
nx.average_neighbor_degree(G, "error")
|
| 102 |
+
with pytest.raises(nx.NetworkXError):
|
| 103 |
+
nx.average_neighbor_degree(G, "in", "error")
|
| 104 |
+
G = G.to_directed()
|
| 105 |
+
with pytest.raises(nx.NetworkXError):
|
| 106 |
+
nx.average_neighbor_degree(G, "error")
|
| 107 |
+
with pytest.raises(nx.NetworkXError):
|
| 108 |
+
nx.average_neighbor_degree(G, "in", "error")
|
.venv/lib/python3.11/site-packages/networkx/algorithms/assortativity/tests/test_pairs.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
|
| 3 |
+
from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestAttributeMixingXY(BaseTestAttributeMixing):
|
| 7 |
+
def test_node_attribute_xy_undirected(self):
|
| 8 |
+
attrxy = sorted(nx.node_attribute_xy(self.G, "fish"))
|
| 9 |
+
attrxy_result = sorted(
|
| 10 |
+
[
|
| 11 |
+
("one", "one"),
|
| 12 |
+
("one", "one"),
|
| 13 |
+
("two", "two"),
|
| 14 |
+
("two", "two"),
|
| 15 |
+
("one", "red"),
|
| 16 |
+
("red", "one"),
|
| 17 |
+
("blue", "two"),
|
| 18 |
+
("two", "blue"),
|
| 19 |
+
]
|
| 20 |
+
)
|
| 21 |
+
assert attrxy == attrxy_result
|
| 22 |
+
|
| 23 |
+
def test_node_attribute_xy_undirected_nodes(self):
|
| 24 |
+
attrxy = sorted(nx.node_attribute_xy(self.G, "fish", nodes=["one", "yellow"]))
|
| 25 |
+
attrxy_result = sorted([])
|
| 26 |
+
assert attrxy == attrxy_result
|
| 27 |
+
|
| 28 |
+
def test_node_attribute_xy_directed(self):
|
| 29 |
+
attrxy = sorted(nx.node_attribute_xy(self.D, "fish"))
|
| 30 |
+
attrxy_result = sorted(
|
| 31 |
+
[("one", "one"), ("two", "two"), ("one", "red"), ("two", "blue")]
|
| 32 |
+
)
|
| 33 |
+
assert attrxy == attrxy_result
|
| 34 |
+
|
| 35 |
+
def test_node_attribute_xy_multigraph(self):
|
| 36 |
+
attrxy = sorted(nx.node_attribute_xy(self.M, "fish"))
|
| 37 |
+
attrxy_result = [
|
| 38 |
+
("one", "one"),
|
| 39 |
+
("one", "one"),
|
| 40 |
+
("one", "one"),
|
| 41 |
+
("one", "one"),
|
| 42 |
+
("two", "two"),
|
| 43 |
+
("two", "two"),
|
| 44 |
+
]
|
| 45 |
+
assert attrxy == attrxy_result
|
| 46 |
+
|
| 47 |
+
def test_node_attribute_xy_selfloop(self):
|
| 48 |
+
attrxy = sorted(nx.node_attribute_xy(self.S, "fish"))
|
| 49 |
+
attrxy_result = [("one", "one"), ("two", "two")]
|
| 50 |
+
assert attrxy == attrxy_result
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class TestDegreeMixingXY(BaseTestDegreeMixing):
|
| 54 |
+
def test_node_degree_xy_undirected(self):
|
| 55 |
+
xy = sorted(nx.node_degree_xy(self.P4))
|
| 56 |
+
xy_result = sorted([(1, 2), (2, 1), (2, 2), (2, 2), (1, 2), (2, 1)])
|
| 57 |
+
assert xy == xy_result
|
| 58 |
+
|
| 59 |
+
def test_node_degree_xy_undirected_nodes(self):
|
| 60 |
+
xy = sorted(nx.node_degree_xy(self.P4, nodes=[0, 1, -1]))
|
| 61 |
+
xy_result = sorted([(1, 2), (2, 1)])
|
| 62 |
+
assert xy == xy_result
|
| 63 |
+
|
| 64 |
+
def test_node_degree_xy_directed(self):
|
| 65 |
+
xy = sorted(nx.node_degree_xy(self.D))
|
| 66 |
+
xy_result = sorted([(2, 1), (2, 3), (1, 3), (1, 3)])
|
| 67 |
+
assert xy == xy_result
|
| 68 |
+
|
| 69 |
+
def test_node_degree_xy_multigraph(self):
|
| 70 |
+
xy = sorted(nx.node_degree_xy(self.M))
|
| 71 |
+
xy_result = sorted(
|
| 72 |
+
[(2, 3), (2, 3), (3, 2), (3, 2), (2, 3), (3, 2), (1, 2), (2, 1)]
|
| 73 |
+
)
|
| 74 |
+
assert xy == xy_result
|
| 75 |
+
|
| 76 |
+
def test_node_degree_xy_selfloop(self):
|
| 77 |
+
xy = sorted(nx.node_degree_xy(self.S))
|
| 78 |
+
xy_result = sorted([(2, 2), (2, 2)])
|
| 79 |
+
assert xy == xy_result
|
| 80 |
+
|
| 81 |
+
def test_node_degree_xy_weighted(self):
|
| 82 |
+
G = nx.Graph()
|
| 83 |
+
G.add_edge(1, 2, weight=7)
|
| 84 |
+
G.add_edge(2, 3, weight=10)
|
| 85 |
+
xy = sorted(nx.node_degree_xy(G, weight="weight"))
|
| 86 |
+
xy_result = sorted([(7, 17), (17, 10), (17, 7), (10, 17)])
|
| 87 |
+
assert xy == xy_result
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/betweenness.py
ADDED
|
@@ -0,0 +1,436 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Betweenness centrality measures."""
|
| 2 |
+
|
| 3 |
+
from collections import deque
|
| 4 |
+
from heapq import heappop, heappush
|
| 5 |
+
from itertools import count
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.algorithms.shortest_paths.weighted import _weight_function
|
| 9 |
+
from networkx.utils import py_random_state
|
| 10 |
+
from networkx.utils.decorators import not_implemented_for
|
| 11 |
+
|
| 12 |
+
__all__ = ["betweenness_centrality", "edge_betweenness_centrality"]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
@py_random_state(5)
|
| 16 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 17 |
+
def betweenness_centrality(
|
| 18 |
+
G, k=None, normalized=True, weight=None, endpoints=False, seed=None
|
| 19 |
+
):
|
| 20 |
+
r"""Compute the shortest-path betweenness centrality for nodes.
|
| 21 |
+
|
| 22 |
+
Betweenness centrality of a node $v$ is the sum of the
|
| 23 |
+
fraction of all-pairs shortest paths that pass through $v$
|
| 24 |
+
|
| 25 |
+
.. math::
|
| 26 |
+
|
| 27 |
+
c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)}
|
| 28 |
+
|
| 29 |
+
where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
|
| 30 |
+
shortest $(s, t)$-paths, and $\sigma(s, t|v)$ is the number of
|
| 31 |
+
those paths passing through some node $v$ other than $s, t$.
|
| 32 |
+
If $s = t$, $\sigma(s, t) = 1$, and if $v \in {s, t}$,
|
| 33 |
+
$\sigma(s, t|v) = 0$ [2]_.
|
| 34 |
+
|
| 35 |
+
Parameters
|
| 36 |
+
----------
|
| 37 |
+
G : graph
|
| 38 |
+
A NetworkX graph.
|
| 39 |
+
|
| 40 |
+
k : int, optional (default=None)
|
| 41 |
+
If k is not None use k node samples to estimate betweenness.
|
| 42 |
+
The value of k <= n where n is the number of nodes in the graph.
|
| 43 |
+
Higher values give better approximation.
|
| 44 |
+
|
| 45 |
+
normalized : bool, optional
|
| 46 |
+
If True the betweenness values are normalized by `2/((n-1)(n-2))`
|
| 47 |
+
for graphs, and `1/((n-1)(n-2))` for directed graphs where `n`
|
| 48 |
+
is the number of nodes in G.
|
| 49 |
+
|
| 50 |
+
weight : None or string, optional (default=None)
|
| 51 |
+
If None, all edge weights are considered equal.
|
| 52 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 53 |
+
Weights are used to calculate weighted shortest paths, so they are
|
| 54 |
+
interpreted as distances.
|
| 55 |
+
|
| 56 |
+
endpoints : bool, optional
|
| 57 |
+
If True include the endpoints in the shortest path counts.
|
| 58 |
+
|
| 59 |
+
seed : integer, random_state, or None (default)
|
| 60 |
+
Indicator of random number generation state.
|
| 61 |
+
See :ref:`Randomness<randomness>`.
|
| 62 |
+
Note that this is only used if k is not None.
|
| 63 |
+
|
| 64 |
+
Returns
|
| 65 |
+
-------
|
| 66 |
+
nodes : dictionary
|
| 67 |
+
Dictionary of nodes with betweenness centrality as the value.
|
| 68 |
+
|
| 69 |
+
See Also
|
| 70 |
+
--------
|
| 71 |
+
edge_betweenness_centrality
|
| 72 |
+
load_centrality
|
| 73 |
+
|
| 74 |
+
Notes
|
| 75 |
+
-----
|
| 76 |
+
The algorithm is from Ulrik Brandes [1]_.
|
| 77 |
+
See [4]_ for the original first published version and [2]_ for details on
|
| 78 |
+
algorithms for variations and related metrics.
|
| 79 |
+
|
| 80 |
+
For approximate betweenness calculations set k=#samples to use
|
| 81 |
+
k nodes ("pivots") to estimate the betweenness values. For an estimate
|
| 82 |
+
of the number of pivots needed see [3]_.
|
| 83 |
+
|
| 84 |
+
For weighted graphs the edge weights must be greater than zero.
|
| 85 |
+
Zero edge weights can produce an infinite number of equal length
|
| 86 |
+
paths between pairs of nodes.
|
| 87 |
+
|
| 88 |
+
The total number of paths between source and target is counted
|
| 89 |
+
differently for directed and undirected graphs. Directed paths
|
| 90 |
+
are easy to count. Undirected paths are tricky: should a path
|
| 91 |
+
from "u" to "v" count as 1 undirected path or as 2 directed paths?
|
| 92 |
+
|
| 93 |
+
For betweenness_centrality we report the number of undirected
|
| 94 |
+
paths when G is undirected.
|
| 95 |
+
|
| 96 |
+
For betweenness_centrality_subset the reporting is different.
|
| 97 |
+
If the source and target subsets are the same, then we want
|
| 98 |
+
to count undirected paths. But if the source and target subsets
|
| 99 |
+
differ -- for example, if sources is {0} and targets is {1},
|
| 100 |
+
then we are only counting the paths in one direction. They are
|
| 101 |
+
undirected paths but we are counting them in a directed way.
|
| 102 |
+
To count them as undirected paths, each should count as half a path.
|
| 103 |
+
|
| 104 |
+
This algorithm is not guaranteed to be correct if edge weights
|
| 105 |
+
are floating point numbers. As a workaround you can use integer
|
| 106 |
+
numbers by multiplying the relevant edge attributes by a convenient
|
| 107 |
+
constant factor (eg 100) and converting to integers.
|
| 108 |
+
|
| 109 |
+
References
|
| 110 |
+
----------
|
| 111 |
+
.. [1] Ulrik Brandes:
|
| 112 |
+
A Faster Algorithm for Betweenness Centrality.
|
| 113 |
+
Journal of Mathematical Sociology 25(2):163-177, 2001.
|
| 114 |
+
https://doi.org/10.1080/0022250X.2001.9990249
|
| 115 |
+
.. [2] Ulrik Brandes:
|
| 116 |
+
On Variants of Shortest-Path Betweenness
|
| 117 |
+
Centrality and their Generic Computation.
|
| 118 |
+
Social Networks 30(2):136-145, 2008.
|
| 119 |
+
https://doi.org/10.1016/j.socnet.2007.11.001
|
| 120 |
+
.. [3] Ulrik Brandes and Christian Pich:
|
| 121 |
+
Centrality Estimation in Large Networks.
|
| 122 |
+
International Journal of Bifurcation and Chaos 17(7):2303-2318, 2007.
|
| 123 |
+
https://dx.doi.org/10.1142/S0218127407018403
|
| 124 |
+
.. [4] Linton C. Freeman:
|
| 125 |
+
A set of measures of centrality based on betweenness.
|
| 126 |
+
Sociometry 40: 35–41, 1977
|
| 127 |
+
https://doi.org/10.2307/3033543
|
| 128 |
+
"""
|
| 129 |
+
betweenness = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
|
| 130 |
+
if k is None:
|
| 131 |
+
nodes = G
|
| 132 |
+
else:
|
| 133 |
+
nodes = seed.sample(list(G.nodes()), k)
|
| 134 |
+
for s in nodes:
|
| 135 |
+
# single source shortest paths
|
| 136 |
+
if weight is None: # use BFS
|
| 137 |
+
S, P, sigma, _ = _single_source_shortest_path_basic(G, s)
|
| 138 |
+
else: # use Dijkstra's algorithm
|
| 139 |
+
S, P, sigma, _ = _single_source_dijkstra_path_basic(G, s, weight)
|
| 140 |
+
# accumulation
|
| 141 |
+
if endpoints:
|
| 142 |
+
betweenness, _ = _accumulate_endpoints(betweenness, S, P, sigma, s)
|
| 143 |
+
else:
|
| 144 |
+
betweenness, _ = _accumulate_basic(betweenness, S, P, sigma, s)
|
| 145 |
+
# rescaling
|
| 146 |
+
betweenness = _rescale(
|
| 147 |
+
betweenness,
|
| 148 |
+
len(G),
|
| 149 |
+
normalized=normalized,
|
| 150 |
+
directed=G.is_directed(),
|
| 151 |
+
k=k,
|
| 152 |
+
endpoints=endpoints,
|
| 153 |
+
)
|
| 154 |
+
return betweenness
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
@py_random_state(4)
|
| 158 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 159 |
+
def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=None):
|
| 160 |
+
r"""Compute betweenness centrality for edges.
|
| 161 |
+
|
| 162 |
+
Betweenness centrality of an edge $e$ is the sum of the
|
| 163 |
+
fraction of all-pairs shortest paths that pass through $e$
|
| 164 |
+
|
| 165 |
+
.. math::
|
| 166 |
+
|
| 167 |
+
c_B(e) =\sum_{s,t \in V} \frac{\sigma(s, t|e)}{\sigma(s, t)}
|
| 168 |
+
|
| 169 |
+
where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
|
| 170 |
+
shortest $(s, t)$-paths, and $\sigma(s, t|e)$ is the number of
|
| 171 |
+
those paths passing through edge $e$ [2]_.
|
| 172 |
+
|
| 173 |
+
Parameters
|
| 174 |
+
----------
|
| 175 |
+
G : graph
|
| 176 |
+
A NetworkX graph.
|
| 177 |
+
|
| 178 |
+
k : int, optional (default=None)
|
| 179 |
+
If k is not None use k node samples to estimate betweenness.
|
| 180 |
+
The value of k <= n where n is the number of nodes in the graph.
|
| 181 |
+
Higher values give better approximation.
|
| 182 |
+
|
| 183 |
+
normalized : bool, optional
|
| 184 |
+
If True the betweenness values are normalized by $2/(n(n-1))$
|
| 185 |
+
for graphs, and $1/(n(n-1))$ for directed graphs where $n$
|
| 186 |
+
is the number of nodes in G.
|
| 187 |
+
|
| 188 |
+
weight : None or string, optional (default=None)
|
| 189 |
+
If None, all edge weights are considered equal.
|
| 190 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 191 |
+
Weights are used to calculate weighted shortest paths, so they are
|
| 192 |
+
interpreted as distances.
|
| 193 |
+
|
| 194 |
+
seed : integer, random_state, or None (default)
|
| 195 |
+
Indicator of random number generation state.
|
| 196 |
+
See :ref:`Randomness<randomness>`.
|
| 197 |
+
Note that this is only used if k is not None.
|
| 198 |
+
|
| 199 |
+
Returns
|
| 200 |
+
-------
|
| 201 |
+
edges : dictionary
|
| 202 |
+
Dictionary of edges with betweenness centrality as the value.
|
| 203 |
+
|
| 204 |
+
See Also
|
| 205 |
+
--------
|
| 206 |
+
betweenness_centrality
|
| 207 |
+
edge_load
|
| 208 |
+
|
| 209 |
+
Notes
|
| 210 |
+
-----
|
| 211 |
+
The algorithm is from Ulrik Brandes [1]_.
|
| 212 |
+
|
| 213 |
+
For weighted graphs the edge weights must be greater than zero.
|
| 214 |
+
Zero edge weights can produce an infinite number of equal length
|
| 215 |
+
paths between pairs of nodes.
|
| 216 |
+
|
| 217 |
+
References
|
| 218 |
+
----------
|
| 219 |
+
.. [1] A Faster Algorithm for Betweenness Centrality. Ulrik Brandes,
|
| 220 |
+
Journal of Mathematical Sociology 25(2):163-177, 2001.
|
| 221 |
+
https://doi.org/10.1080/0022250X.2001.9990249
|
| 222 |
+
.. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
|
| 223 |
+
Centrality and their Generic Computation.
|
| 224 |
+
Social Networks 30(2):136-145, 2008.
|
| 225 |
+
https://doi.org/10.1016/j.socnet.2007.11.001
|
| 226 |
+
"""
|
| 227 |
+
betweenness = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
|
| 228 |
+
# b[e]=0 for e in G.edges()
|
| 229 |
+
betweenness.update(dict.fromkeys(G.edges(), 0.0))
|
| 230 |
+
if k is None:
|
| 231 |
+
nodes = G
|
| 232 |
+
else:
|
| 233 |
+
nodes = seed.sample(list(G.nodes()), k)
|
| 234 |
+
for s in nodes:
|
| 235 |
+
# single source shortest paths
|
| 236 |
+
if weight is None: # use BFS
|
| 237 |
+
S, P, sigma, _ = _single_source_shortest_path_basic(G, s)
|
| 238 |
+
else: # use Dijkstra's algorithm
|
| 239 |
+
S, P, sigma, _ = _single_source_dijkstra_path_basic(G, s, weight)
|
| 240 |
+
# accumulation
|
| 241 |
+
betweenness = _accumulate_edges(betweenness, S, P, sigma, s)
|
| 242 |
+
# rescaling
|
| 243 |
+
for n in G: # remove nodes to only return edges
|
| 244 |
+
del betweenness[n]
|
| 245 |
+
betweenness = _rescale_e(
|
| 246 |
+
betweenness, len(G), normalized=normalized, directed=G.is_directed()
|
| 247 |
+
)
|
| 248 |
+
if G.is_multigraph():
|
| 249 |
+
betweenness = _add_edge_keys(G, betweenness, weight=weight)
|
| 250 |
+
return betweenness
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
# helpers for betweenness centrality
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
def _single_source_shortest_path_basic(G, s):
|
| 257 |
+
S = []
|
| 258 |
+
P = {}
|
| 259 |
+
for v in G:
|
| 260 |
+
P[v] = []
|
| 261 |
+
sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G
|
| 262 |
+
D = {}
|
| 263 |
+
sigma[s] = 1.0
|
| 264 |
+
D[s] = 0
|
| 265 |
+
Q = deque([s])
|
| 266 |
+
while Q: # use BFS to find shortest paths
|
| 267 |
+
v = Q.popleft()
|
| 268 |
+
S.append(v)
|
| 269 |
+
Dv = D[v]
|
| 270 |
+
sigmav = sigma[v]
|
| 271 |
+
for w in G[v]:
|
| 272 |
+
if w not in D:
|
| 273 |
+
Q.append(w)
|
| 274 |
+
D[w] = Dv + 1
|
| 275 |
+
if D[w] == Dv + 1: # this is a shortest path, count paths
|
| 276 |
+
sigma[w] += sigmav
|
| 277 |
+
P[w].append(v) # predecessors
|
| 278 |
+
return S, P, sigma, D
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
def _single_source_dijkstra_path_basic(G, s, weight):
|
| 282 |
+
weight = _weight_function(G, weight)
|
| 283 |
+
# modified from Eppstein
|
| 284 |
+
S = []
|
| 285 |
+
P = {}
|
| 286 |
+
for v in G:
|
| 287 |
+
P[v] = []
|
| 288 |
+
sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G
|
| 289 |
+
D = {}
|
| 290 |
+
sigma[s] = 1.0
|
| 291 |
+
push = heappush
|
| 292 |
+
pop = heappop
|
| 293 |
+
seen = {s: 0}
|
| 294 |
+
c = count()
|
| 295 |
+
Q = [] # use Q as heap with (distance,node id) tuples
|
| 296 |
+
push(Q, (0, next(c), s, s))
|
| 297 |
+
while Q:
|
| 298 |
+
(dist, _, pred, v) = pop(Q)
|
| 299 |
+
if v in D:
|
| 300 |
+
continue # already searched this node.
|
| 301 |
+
sigma[v] += sigma[pred] # count paths
|
| 302 |
+
S.append(v)
|
| 303 |
+
D[v] = dist
|
| 304 |
+
for w, edgedata in G[v].items():
|
| 305 |
+
vw_dist = dist + weight(v, w, edgedata)
|
| 306 |
+
if w not in D and (w not in seen or vw_dist < seen[w]):
|
| 307 |
+
seen[w] = vw_dist
|
| 308 |
+
push(Q, (vw_dist, next(c), v, w))
|
| 309 |
+
sigma[w] = 0.0
|
| 310 |
+
P[w] = [v]
|
| 311 |
+
elif vw_dist == seen[w]: # handle equal paths
|
| 312 |
+
sigma[w] += sigma[v]
|
| 313 |
+
P[w].append(v)
|
| 314 |
+
return S, P, sigma, D
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
def _accumulate_basic(betweenness, S, P, sigma, s):
|
| 318 |
+
delta = dict.fromkeys(S, 0)
|
| 319 |
+
while S:
|
| 320 |
+
w = S.pop()
|
| 321 |
+
coeff = (1 + delta[w]) / sigma[w]
|
| 322 |
+
for v in P[w]:
|
| 323 |
+
delta[v] += sigma[v] * coeff
|
| 324 |
+
if w != s:
|
| 325 |
+
betweenness[w] += delta[w]
|
| 326 |
+
return betweenness, delta
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
def _accumulate_endpoints(betweenness, S, P, sigma, s):
|
| 330 |
+
betweenness[s] += len(S) - 1
|
| 331 |
+
delta = dict.fromkeys(S, 0)
|
| 332 |
+
while S:
|
| 333 |
+
w = S.pop()
|
| 334 |
+
coeff = (1 + delta[w]) / sigma[w]
|
| 335 |
+
for v in P[w]:
|
| 336 |
+
delta[v] += sigma[v] * coeff
|
| 337 |
+
if w != s:
|
| 338 |
+
betweenness[w] += delta[w] + 1
|
| 339 |
+
return betweenness, delta
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
def _accumulate_edges(betweenness, S, P, sigma, s):
|
| 343 |
+
delta = dict.fromkeys(S, 0)
|
| 344 |
+
while S:
|
| 345 |
+
w = S.pop()
|
| 346 |
+
coeff = (1 + delta[w]) / sigma[w]
|
| 347 |
+
for v in P[w]:
|
| 348 |
+
c = sigma[v] * coeff
|
| 349 |
+
if (v, w) not in betweenness:
|
| 350 |
+
betweenness[(w, v)] += c
|
| 351 |
+
else:
|
| 352 |
+
betweenness[(v, w)] += c
|
| 353 |
+
delta[v] += c
|
| 354 |
+
if w != s:
|
| 355 |
+
betweenness[w] += delta[w]
|
| 356 |
+
return betweenness
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
def _rescale(betweenness, n, normalized, directed=False, k=None, endpoints=False):
|
| 360 |
+
if normalized:
|
| 361 |
+
if endpoints:
|
| 362 |
+
if n < 2:
|
| 363 |
+
scale = None # no normalization
|
| 364 |
+
else:
|
| 365 |
+
# Scale factor should include endpoint nodes
|
| 366 |
+
scale = 1 / (n * (n - 1))
|
| 367 |
+
elif n <= 2:
|
| 368 |
+
scale = None # no normalization b=0 for all nodes
|
| 369 |
+
else:
|
| 370 |
+
scale = 1 / ((n - 1) * (n - 2))
|
| 371 |
+
else: # rescale by 2 for undirected graphs
|
| 372 |
+
if not directed:
|
| 373 |
+
scale = 0.5
|
| 374 |
+
else:
|
| 375 |
+
scale = None
|
| 376 |
+
if scale is not None:
|
| 377 |
+
if k is not None:
|
| 378 |
+
scale = scale * n / k
|
| 379 |
+
for v in betweenness:
|
| 380 |
+
betweenness[v] *= scale
|
| 381 |
+
return betweenness
|
| 382 |
+
|
| 383 |
+
|
| 384 |
+
def _rescale_e(betweenness, n, normalized, directed=False, k=None):
|
| 385 |
+
if normalized:
|
| 386 |
+
if n <= 1:
|
| 387 |
+
scale = None # no normalization b=0 for all nodes
|
| 388 |
+
else:
|
| 389 |
+
scale = 1 / (n * (n - 1))
|
| 390 |
+
else: # rescale by 2 for undirected graphs
|
| 391 |
+
if not directed:
|
| 392 |
+
scale = 0.5
|
| 393 |
+
else:
|
| 394 |
+
scale = None
|
| 395 |
+
if scale is not None:
|
| 396 |
+
if k is not None:
|
| 397 |
+
scale = scale * n / k
|
| 398 |
+
for v in betweenness:
|
| 399 |
+
betweenness[v] *= scale
|
| 400 |
+
return betweenness
|
| 401 |
+
|
| 402 |
+
|
| 403 |
+
@not_implemented_for("graph")
|
| 404 |
+
def _add_edge_keys(G, betweenness, weight=None):
|
| 405 |
+
r"""Adds the corrected betweenness centrality (BC) values for multigraphs.
|
| 406 |
+
|
| 407 |
+
Parameters
|
| 408 |
+
----------
|
| 409 |
+
G : NetworkX graph.
|
| 410 |
+
|
| 411 |
+
betweenness : dictionary
|
| 412 |
+
Dictionary mapping adjacent node tuples to betweenness centrality values.
|
| 413 |
+
|
| 414 |
+
weight : string or function
|
| 415 |
+
See `_weight_function` for details. Defaults to `None`.
|
| 416 |
+
|
| 417 |
+
Returns
|
| 418 |
+
-------
|
| 419 |
+
edges : dictionary
|
| 420 |
+
The parameter `betweenness` including edges with keys and their
|
| 421 |
+
betweenness centrality values.
|
| 422 |
+
|
| 423 |
+
The BC value is divided among edges of equal weight.
|
| 424 |
+
"""
|
| 425 |
+
_weight = _weight_function(G, weight)
|
| 426 |
+
|
| 427 |
+
edge_bc = dict.fromkeys(G.edges, 0.0)
|
| 428 |
+
for u, v in betweenness:
|
| 429 |
+
d = G[u][v]
|
| 430 |
+
wt = _weight(u, v, d)
|
| 431 |
+
keys = [k for k in d if _weight(u, v, {k: d[k]}) == wt]
|
| 432 |
+
bc = betweenness[(u, v)] / len(keys)
|
| 433 |
+
for k in keys:
|
| 434 |
+
edge_bc[(u, v, k)] = bc
|
| 435 |
+
|
| 436 |
+
return edge_bc
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/closeness.py
ADDED
|
@@ -0,0 +1,282 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Closeness centrality measures.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import functools
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.exception import NetworkXError
|
| 9 |
+
from networkx.utils.decorators import not_implemented_for
|
| 10 |
+
|
| 11 |
+
__all__ = ["closeness_centrality", "incremental_closeness_centrality"]
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@nx._dispatchable(edge_attrs="distance")
|
| 15 |
+
def closeness_centrality(G, u=None, distance=None, wf_improved=True):
|
| 16 |
+
r"""Compute closeness centrality for nodes.
|
| 17 |
+
|
| 18 |
+
Closeness centrality [1]_ of a node `u` is the reciprocal of the
|
| 19 |
+
average shortest path distance to `u` over all `n-1` reachable nodes.
|
| 20 |
+
|
| 21 |
+
.. math::
|
| 22 |
+
|
| 23 |
+
C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
|
| 24 |
+
|
| 25 |
+
where `d(v, u)` is the shortest-path distance between `v` and `u`,
|
| 26 |
+
and `n-1` is the number of nodes reachable from `u`. Notice that the
|
| 27 |
+
closeness distance function computes the incoming distance to `u`
|
| 28 |
+
for directed graphs. To use outward distance, act on `G.reverse()`.
|
| 29 |
+
|
| 30 |
+
Notice that higher values of closeness indicate higher centrality.
|
| 31 |
+
|
| 32 |
+
Wasserman and Faust propose an improved formula for graphs with
|
| 33 |
+
more than one connected component. The result is "a ratio of the
|
| 34 |
+
fraction of actors in the group who are reachable, to the average
|
| 35 |
+
distance" from the reachable actors [2]_. You might think this
|
| 36 |
+
scale factor is inverted but it is not. As is, nodes from small
|
| 37 |
+
components receive a smaller closeness value. Letting `N` denote
|
| 38 |
+
the number of nodes in the graph,
|
| 39 |
+
|
| 40 |
+
.. math::
|
| 41 |
+
|
| 42 |
+
C_{WF}(u) = \frac{n-1}{N-1} \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
|
| 43 |
+
|
| 44 |
+
Parameters
|
| 45 |
+
----------
|
| 46 |
+
G : graph
|
| 47 |
+
A NetworkX graph
|
| 48 |
+
|
| 49 |
+
u : node, optional
|
| 50 |
+
Return only the value for node u
|
| 51 |
+
|
| 52 |
+
distance : edge attribute key, optional (default=None)
|
| 53 |
+
Use the specified edge attribute as the edge distance in shortest
|
| 54 |
+
path calculations. If `None` (the default) all edges have a distance of 1.
|
| 55 |
+
Absent edge attributes are assigned a distance of 1. Note that no check
|
| 56 |
+
is performed to ensure that edges have the provided attribute.
|
| 57 |
+
|
| 58 |
+
wf_improved : bool, optional (default=True)
|
| 59 |
+
If True, scale by the fraction of nodes reachable. This gives the
|
| 60 |
+
Wasserman and Faust improved formula. For single component graphs
|
| 61 |
+
it is the same as the original formula.
|
| 62 |
+
|
| 63 |
+
Returns
|
| 64 |
+
-------
|
| 65 |
+
nodes : dictionary
|
| 66 |
+
Dictionary of nodes with closeness centrality as the value.
|
| 67 |
+
|
| 68 |
+
Examples
|
| 69 |
+
--------
|
| 70 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
| 71 |
+
>>> nx.closeness_centrality(G)
|
| 72 |
+
{0: 1.0, 1: 1.0, 2: 0.75, 3: 0.75}
|
| 73 |
+
|
| 74 |
+
See Also
|
| 75 |
+
--------
|
| 76 |
+
betweenness_centrality, load_centrality, eigenvector_centrality,
|
| 77 |
+
degree_centrality, incremental_closeness_centrality
|
| 78 |
+
|
| 79 |
+
Notes
|
| 80 |
+
-----
|
| 81 |
+
The closeness centrality is normalized to `(n-1)/(|G|-1)` where
|
| 82 |
+
`n` is the number of nodes in the connected part of graph
|
| 83 |
+
containing the node. If the graph is not completely connected,
|
| 84 |
+
this algorithm computes the closeness centrality for each
|
| 85 |
+
connected part separately scaled by that parts size.
|
| 86 |
+
|
| 87 |
+
If the 'distance' keyword is set to an edge attribute key then the
|
| 88 |
+
shortest-path length will be computed using Dijkstra's algorithm with
|
| 89 |
+
that edge attribute as the edge weight.
|
| 90 |
+
|
| 91 |
+
The closeness centrality uses *inward* distance to a node, not outward.
|
| 92 |
+
If you want to use outword distances apply the function to `G.reverse()`
|
| 93 |
+
|
| 94 |
+
In NetworkX 2.2 and earlier a bug caused Dijkstra's algorithm to use the
|
| 95 |
+
outward distance rather than the inward distance. If you use a 'distance'
|
| 96 |
+
keyword and a DiGraph, your results will change between v2.2 and v2.3.
|
| 97 |
+
|
| 98 |
+
References
|
| 99 |
+
----------
|
| 100 |
+
.. [1] Linton C. Freeman: Centrality in networks: I.
|
| 101 |
+
Conceptual clarification. Social Networks 1:215-239, 1979.
|
| 102 |
+
https://doi.org/10.1016/0378-8733(78)90021-7
|
| 103 |
+
.. [2] pg. 201 of Wasserman, S. and Faust, K.,
|
| 104 |
+
Social Network Analysis: Methods and Applications, 1994,
|
| 105 |
+
Cambridge University Press.
|
| 106 |
+
"""
|
| 107 |
+
if G.is_directed():
|
| 108 |
+
G = G.reverse() # create a reversed graph view
|
| 109 |
+
|
| 110 |
+
if distance is not None:
|
| 111 |
+
# use Dijkstra's algorithm with specified attribute as edge weight
|
| 112 |
+
path_length = functools.partial(
|
| 113 |
+
nx.single_source_dijkstra_path_length, weight=distance
|
| 114 |
+
)
|
| 115 |
+
else:
|
| 116 |
+
path_length = nx.single_source_shortest_path_length
|
| 117 |
+
|
| 118 |
+
if u is None:
|
| 119 |
+
nodes = G.nodes
|
| 120 |
+
else:
|
| 121 |
+
nodes = [u]
|
| 122 |
+
closeness_dict = {}
|
| 123 |
+
for n in nodes:
|
| 124 |
+
sp = path_length(G, n)
|
| 125 |
+
totsp = sum(sp.values())
|
| 126 |
+
len_G = len(G)
|
| 127 |
+
_closeness_centrality = 0.0
|
| 128 |
+
if totsp > 0.0 and len_G > 1:
|
| 129 |
+
_closeness_centrality = (len(sp) - 1.0) / totsp
|
| 130 |
+
# normalize to number of nodes-1 in connected part
|
| 131 |
+
if wf_improved:
|
| 132 |
+
s = (len(sp) - 1.0) / (len_G - 1)
|
| 133 |
+
_closeness_centrality *= s
|
| 134 |
+
closeness_dict[n] = _closeness_centrality
|
| 135 |
+
if u is not None:
|
| 136 |
+
return closeness_dict[u]
|
| 137 |
+
return closeness_dict
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
@not_implemented_for("directed")
|
| 141 |
+
@nx._dispatchable(mutates_input=True)
|
| 142 |
+
def incremental_closeness_centrality(
|
| 143 |
+
G, edge, prev_cc=None, insertion=True, wf_improved=True
|
| 144 |
+
):
|
| 145 |
+
r"""Incremental closeness centrality for nodes.
|
| 146 |
+
|
| 147 |
+
Compute closeness centrality for nodes using level-based work filtering
|
| 148 |
+
as described in Incremental Algorithms for Closeness Centrality by Sariyuce et al.
|
| 149 |
+
|
| 150 |
+
Level-based work filtering detects unnecessary updates to the closeness
|
| 151 |
+
centrality and filters them out.
|
| 152 |
+
|
| 153 |
+
---
|
| 154 |
+
From "Incremental Algorithms for Closeness Centrality":
|
| 155 |
+
|
| 156 |
+
Theorem 1: Let :math:`G = (V, E)` be a graph and u and v be two vertices in V
|
| 157 |
+
such that there is no edge (u, v) in E. Let :math:`G' = (V, E \cup uv)`
|
| 158 |
+
Then :math:`cc[s] = cc'[s]` if and only if :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`.
|
| 159 |
+
|
| 160 |
+
Where :math:`dG(u, v)` denotes the length of the shortest path between
|
| 161 |
+
two vertices u, v in a graph G, cc[s] is the closeness centrality for a
|
| 162 |
+
vertex s in V, and cc'[s] is the closeness centrality for a
|
| 163 |
+
vertex s in V, with the (u, v) edge added.
|
| 164 |
+
---
|
| 165 |
+
|
| 166 |
+
We use Theorem 1 to filter out updates when adding or removing an edge.
|
| 167 |
+
When adding an edge (u, v), we compute the shortest path lengths from all
|
| 168 |
+
other nodes to u and to v before the node is added. When removing an edge,
|
| 169 |
+
we compute the shortest path lengths after the edge is removed. Then we
|
| 170 |
+
apply Theorem 1 to use previously computed closeness centrality for nodes
|
| 171 |
+
where :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`. This works only for
|
| 172 |
+
undirected, unweighted graphs; the distance argument is not supported.
|
| 173 |
+
|
| 174 |
+
Closeness centrality [1]_ of a node `u` is the reciprocal of the
|
| 175 |
+
sum of the shortest path distances from `u` to all `n-1` other nodes.
|
| 176 |
+
Since the sum of distances depends on the number of nodes in the
|
| 177 |
+
graph, closeness is normalized by the sum of minimum possible
|
| 178 |
+
distances `n-1`.
|
| 179 |
+
|
| 180 |
+
.. math::
|
| 181 |
+
|
| 182 |
+
C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
|
| 183 |
+
|
| 184 |
+
where `d(v, u)` is the shortest-path distance between `v` and `u`,
|
| 185 |
+
and `n` is the number of nodes in the graph.
|
| 186 |
+
|
| 187 |
+
Notice that higher values of closeness indicate higher centrality.
|
| 188 |
+
|
| 189 |
+
Parameters
|
| 190 |
+
----------
|
| 191 |
+
G : graph
|
| 192 |
+
A NetworkX graph
|
| 193 |
+
|
| 194 |
+
edge : tuple
|
| 195 |
+
The modified edge (u, v) in the graph.
|
| 196 |
+
|
| 197 |
+
prev_cc : dictionary
|
| 198 |
+
The previous closeness centrality for all nodes in the graph.
|
| 199 |
+
|
| 200 |
+
insertion : bool, optional
|
| 201 |
+
If True (default) the edge was inserted, otherwise it was deleted from the graph.
|
| 202 |
+
|
| 203 |
+
wf_improved : bool, optional (default=True)
|
| 204 |
+
If True, scale by the fraction of nodes reachable. This gives the
|
| 205 |
+
Wasserman and Faust improved formula. For single component graphs
|
| 206 |
+
it is the same as the original formula.
|
| 207 |
+
|
| 208 |
+
Returns
|
| 209 |
+
-------
|
| 210 |
+
nodes : dictionary
|
| 211 |
+
Dictionary of nodes with closeness centrality as the value.
|
| 212 |
+
|
| 213 |
+
See Also
|
| 214 |
+
--------
|
| 215 |
+
betweenness_centrality, load_centrality, eigenvector_centrality,
|
| 216 |
+
degree_centrality, closeness_centrality
|
| 217 |
+
|
| 218 |
+
Notes
|
| 219 |
+
-----
|
| 220 |
+
The closeness centrality is normalized to `(n-1)/(|G|-1)` where
|
| 221 |
+
`n` is the number of nodes in the connected part of graph
|
| 222 |
+
containing the node. If the graph is not completely connected,
|
| 223 |
+
this algorithm computes the closeness centrality for each
|
| 224 |
+
connected part separately.
|
| 225 |
+
|
| 226 |
+
References
|
| 227 |
+
----------
|
| 228 |
+
.. [1] Freeman, L.C., 1979. Centrality in networks: I.
|
| 229 |
+
Conceptual clarification. Social Networks 1, 215--239.
|
| 230 |
+
https://doi.org/10.1016/0378-8733(78)90021-7
|
| 231 |
+
.. [2] Sariyuce, A.E. ; Kaya, K. ; Saule, E. ; Catalyiirek, U.V. Incremental
|
| 232 |
+
Algorithms for Closeness Centrality. 2013 IEEE International Conference on Big Data
|
| 233 |
+
http://sariyuce.com/papers/bigdata13.pdf
|
| 234 |
+
"""
|
| 235 |
+
if prev_cc is not None and set(prev_cc.keys()) != set(G.nodes()):
|
| 236 |
+
raise NetworkXError("prev_cc and G do not have the same nodes")
|
| 237 |
+
|
| 238 |
+
# Unpack edge
|
| 239 |
+
(u, v) = edge
|
| 240 |
+
path_length = nx.single_source_shortest_path_length
|
| 241 |
+
|
| 242 |
+
if insertion:
|
| 243 |
+
# For edge insertion, we want shortest paths before the edge is inserted
|
| 244 |
+
du = path_length(G, u)
|
| 245 |
+
dv = path_length(G, v)
|
| 246 |
+
|
| 247 |
+
G.add_edge(u, v)
|
| 248 |
+
else:
|
| 249 |
+
G.remove_edge(u, v)
|
| 250 |
+
|
| 251 |
+
# For edge removal, we want shortest paths after the edge is removed
|
| 252 |
+
du = path_length(G, u)
|
| 253 |
+
dv = path_length(G, v)
|
| 254 |
+
|
| 255 |
+
if prev_cc is None:
|
| 256 |
+
return nx.closeness_centrality(G)
|
| 257 |
+
|
| 258 |
+
nodes = G.nodes()
|
| 259 |
+
closeness_dict = {}
|
| 260 |
+
for n in nodes:
|
| 261 |
+
if n in du and n in dv and abs(du[n] - dv[n]) <= 1:
|
| 262 |
+
closeness_dict[n] = prev_cc[n]
|
| 263 |
+
else:
|
| 264 |
+
sp = path_length(G, n)
|
| 265 |
+
totsp = sum(sp.values())
|
| 266 |
+
len_G = len(G)
|
| 267 |
+
_closeness_centrality = 0.0
|
| 268 |
+
if totsp > 0.0 and len_G > 1:
|
| 269 |
+
_closeness_centrality = (len(sp) - 1.0) / totsp
|
| 270 |
+
# normalize to number of nodes-1 in connected part
|
| 271 |
+
if wf_improved:
|
| 272 |
+
s = (len(sp) - 1.0) / (len_G - 1)
|
| 273 |
+
_closeness_centrality *= s
|
| 274 |
+
closeness_dict[n] = _closeness_centrality
|
| 275 |
+
|
| 276 |
+
# Leave the graph as we found it
|
| 277 |
+
if insertion:
|
| 278 |
+
G.remove_edge(u, v)
|
| 279 |
+
else:
|
| 280 |
+
G.add_edge(u, v)
|
| 281 |
+
|
| 282 |
+
return closeness_dict
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/current_flow_betweenness_subset.py
ADDED
|
@@ -0,0 +1,227 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Current-flow betweenness centrality measures for subsets of nodes."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.algorithms.centrality.flow_matrix import flow_matrix_row
|
| 5 |
+
from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering
|
| 6 |
+
|
| 7 |
+
__all__ = [
|
| 8 |
+
"current_flow_betweenness_centrality_subset",
|
| 9 |
+
"edge_current_flow_betweenness_centrality_subset",
|
| 10 |
+
]
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@not_implemented_for("directed")
|
| 14 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 15 |
+
def current_flow_betweenness_centrality_subset(
|
| 16 |
+
G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu"
|
| 17 |
+
):
|
| 18 |
+
r"""Compute current-flow betweenness centrality for subsets of nodes.
|
| 19 |
+
|
| 20 |
+
Current-flow betweenness centrality uses an electrical current
|
| 21 |
+
model for information spreading in contrast to betweenness
|
| 22 |
+
centrality which uses shortest paths.
|
| 23 |
+
|
| 24 |
+
Current-flow betweenness centrality is also known as
|
| 25 |
+
random-walk betweenness centrality [2]_.
|
| 26 |
+
|
| 27 |
+
Parameters
|
| 28 |
+
----------
|
| 29 |
+
G : graph
|
| 30 |
+
A NetworkX graph
|
| 31 |
+
|
| 32 |
+
sources: list of nodes
|
| 33 |
+
Nodes to use as sources for current
|
| 34 |
+
|
| 35 |
+
targets: list of nodes
|
| 36 |
+
Nodes to use as sinks for current
|
| 37 |
+
|
| 38 |
+
normalized : bool, optional (default=True)
|
| 39 |
+
If True the betweenness values are normalized by b=b/(n-1)(n-2) where
|
| 40 |
+
n is the number of nodes in G.
|
| 41 |
+
|
| 42 |
+
weight : string or None, optional (default=None)
|
| 43 |
+
Key for edge data used as the edge weight.
|
| 44 |
+
If None, then use 1 as each edge weight.
|
| 45 |
+
The weight reflects the capacity or the strength of the
|
| 46 |
+
edge.
|
| 47 |
+
|
| 48 |
+
dtype: data type (float)
|
| 49 |
+
Default data type for internal matrices.
|
| 50 |
+
Set to np.float32 for lower memory consumption.
|
| 51 |
+
|
| 52 |
+
solver: string (default='lu')
|
| 53 |
+
Type of linear solver to use for computing the flow matrix.
|
| 54 |
+
Options are "full" (uses most memory), "lu" (recommended), and
|
| 55 |
+
"cg" (uses least memory).
|
| 56 |
+
|
| 57 |
+
Returns
|
| 58 |
+
-------
|
| 59 |
+
nodes : dictionary
|
| 60 |
+
Dictionary of nodes with betweenness centrality as the value.
|
| 61 |
+
|
| 62 |
+
See Also
|
| 63 |
+
--------
|
| 64 |
+
approximate_current_flow_betweenness_centrality
|
| 65 |
+
betweenness_centrality
|
| 66 |
+
edge_betweenness_centrality
|
| 67 |
+
edge_current_flow_betweenness_centrality
|
| 68 |
+
|
| 69 |
+
Notes
|
| 70 |
+
-----
|
| 71 |
+
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
|
| 72 |
+
time [1]_, where $I(n-1)$ is the time needed to compute the
|
| 73 |
+
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
|
| 74 |
+
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
|
| 75 |
+
Laplacian matrix condition number.
|
| 76 |
+
|
| 77 |
+
The space required is $O(nw)$ where $w$ is the width of the sparse
|
| 78 |
+
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
|
| 79 |
+
|
| 80 |
+
If the edges have a 'weight' attribute they will be used as
|
| 81 |
+
weights in this algorithm. Unspecified weights are set to 1.
|
| 82 |
+
|
| 83 |
+
References
|
| 84 |
+
----------
|
| 85 |
+
.. [1] Centrality Measures Based on Current Flow.
|
| 86 |
+
Ulrik Brandes and Daniel Fleischer,
|
| 87 |
+
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
| 88 |
+
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
| 89 |
+
https://doi.org/10.1007/978-3-540-31856-9_44
|
| 90 |
+
|
| 91 |
+
.. [2] A measure of betweenness centrality based on random walks,
|
| 92 |
+
M. E. J. Newman, Social Networks 27, 39-54 (2005).
|
| 93 |
+
"""
|
| 94 |
+
import numpy as np
|
| 95 |
+
|
| 96 |
+
from networkx.utils import reverse_cuthill_mckee_ordering
|
| 97 |
+
|
| 98 |
+
if not nx.is_connected(G):
|
| 99 |
+
raise nx.NetworkXError("Graph not connected.")
|
| 100 |
+
N = G.number_of_nodes()
|
| 101 |
+
ordering = list(reverse_cuthill_mckee_ordering(G))
|
| 102 |
+
# make a copy with integer labels according to rcm ordering
|
| 103 |
+
# this could be done without a copy if we really wanted to
|
| 104 |
+
mapping = dict(zip(ordering, range(N)))
|
| 105 |
+
H = nx.relabel_nodes(G, mapping)
|
| 106 |
+
betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H
|
| 107 |
+
for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
|
| 108 |
+
for ss in sources:
|
| 109 |
+
i = mapping[ss]
|
| 110 |
+
for tt in targets:
|
| 111 |
+
j = mapping[tt]
|
| 112 |
+
betweenness[s] += 0.5 * abs(row.item(i) - row.item(j))
|
| 113 |
+
betweenness[t] += 0.5 * abs(row.item(i) - row.item(j))
|
| 114 |
+
if normalized:
|
| 115 |
+
nb = (N - 1.0) * (N - 2.0) # normalization factor
|
| 116 |
+
else:
|
| 117 |
+
nb = 2.0
|
| 118 |
+
for node in H:
|
| 119 |
+
betweenness[node] = betweenness[node] / nb + 1.0 / (2 - N)
|
| 120 |
+
return {ordering[node]: value for node, value in betweenness.items()}
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
@not_implemented_for("directed")
|
| 124 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 125 |
+
def edge_current_flow_betweenness_centrality_subset(
|
| 126 |
+
G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu"
|
| 127 |
+
):
|
| 128 |
+
r"""Compute current-flow betweenness centrality for edges using subsets
|
| 129 |
+
of nodes.
|
| 130 |
+
|
| 131 |
+
Current-flow betweenness centrality uses an electrical current
|
| 132 |
+
model for information spreading in contrast to betweenness
|
| 133 |
+
centrality which uses shortest paths.
|
| 134 |
+
|
| 135 |
+
Current-flow betweenness centrality is also known as
|
| 136 |
+
random-walk betweenness centrality [2]_.
|
| 137 |
+
|
| 138 |
+
Parameters
|
| 139 |
+
----------
|
| 140 |
+
G : graph
|
| 141 |
+
A NetworkX graph
|
| 142 |
+
|
| 143 |
+
sources: list of nodes
|
| 144 |
+
Nodes to use as sources for current
|
| 145 |
+
|
| 146 |
+
targets: list of nodes
|
| 147 |
+
Nodes to use as sinks for current
|
| 148 |
+
|
| 149 |
+
normalized : bool, optional (default=True)
|
| 150 |
+
If True the betweenness values are normalized by b=b/(n-1)(n-2) where
|
| 151 |
+
n is the number of nodes in G.
|
| 152 |
+
|
| 153 |
+
weight : string or None, optional (default=None)
|
| 154 |
+
Key for edge data used as the edge weight.
|
| 155 |
+
If None, then use 1 as each edge weight.
|
| 156 |
+
The weight reflects the capacity or the strength of the
|
| 157 |
+
edge.
|
| 158 |
+
|
| 159 |
+
dtype: data type (float)
|
| 160 |
+
Default data type for internal matrices.
|
| 161 |
+
Set to np.float32 for lower memory consumption.
|
| 162 |
+
|
| 163 |
+
solver: string (default='lu')
|
| 164 |
+
Type of linear solver to use for computing the flow matrix.
|
| 165 |
+
Options are "full" (uses most memory), "lu" (recommended), and
|
| 166 |
+
"cg" (uses least memory).
|
| 167 |
+
|
| 168 |
+
Returns
|
| 169 |
+
-------
|
| 170 |
+
nodes : dict
|
| 171 |
+
Dictionary of edge tuples with betweenness centrality as the value.
|
| 172 |
+
|
| 173 |
+
See Also
|
| 174 |
+
--------
|
| 175 |
+
betweenness_centrality
|
| 176 |
+
edge_betweenness_centrality
|
| 177 |
+
current_flow_betweenness_centrality
|
| 178 |
+
|
| 179 |
+
Notes
|
| 180 |
+
-----
|
| 181 |
+
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
|
| 182 |
+
time [1]_, where $I(n-1)$ is the time needed to compute the
|
| 183 |
+
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
|
| 184 |
+
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
|
| 185 |
+
Laplacian matrix condition number.
|
| 186 |
+
|
| 187 |
+
The space required is $O(nw)$ where $w$ is the width of the sparse
|
| 188 |
+
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
|
| 189 |
+
|
| 190 |
+
If the edges have a 'weight' attribute they will be used as
|
| 191 |
+
weights in this algorithm. Unspecified weights are set to 1.
|
| 192 |
+
|
| 193 |
+
References
|
| 194 |
+
----------
|
| 195 |
+
.. [1] Centrality Measures Based on Current Flow.
|
| 196 |
+
Ulrik Brandes and Daniel Fleischer,
|
| 197 |
+
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
| 198 |
+
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
| 199 |
+
https://doi.org/10.1007/978-3-540-31856-9_44
|
| 200 |
+
|
| 201 |
+
.. [2] A measure of betweenness centrality based on random walks,
|
| 202 |
+
M. E. J. Newman, Social Networks 27, 39-54 (2005).
|
| 203 |
+
"""
|
| 204 |
+
import numpy as np
|
| 205 |
+
|
| 206 |
+
if not nx.is_connected(G):
|
| 207 |
+
raise nx.NetworkXError("Graph not connected.")
|
| 208 |
+
N = G.number_of_nodes()
|
| 209 |
+
ordering = list(reverse_cuthill_mckee_ordering(G))
|
| 210 |
+
# make a copy with integer labels according to rcm ordering
|
| 211 |
+
# this could be done without a copy if we really wanted to
|
| 212 |
+
mapping = dict(zip(ordering, range(N)))
|
| 213 |
+
H = nx.relabel_nodes(G, mapping)
|
| 214 |
+
edges = (tuple(sorted((u, v))) for u, v in H.edges())
|
| 215 |
+
betweenness = dict.fromkeys(edges, 0.0)
|
| 216 |
+
if normalized:
|
| 217 |
+
nb = (N - 1.0) * (N - 2.0) # normalization factor
|
| 218 |
+
else:
|
| 219 |
+
nb = 2.0
|
| 220 |
+
for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
|
| 221 |
+
for ss in sources:
|
| 222 |
+
i = mapping[ss]
|
| 223 |
+
for tt in targets:
|
| 224 |
+
j = mapping[tt]
|
| 225 |
+
betweenness[e] += 0.5 * abs(row.item(i) - row.item(j))
|
| 226 |
+
betweenness[e] /= nb
|
| 227 |
+
return {(ordering[s], ordering[t]): value for (s, t), value in betweenness.items()}
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/current_flow_closeness.py
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Current-flow closeness centrality measures."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.algorithms.centrality.flow_matrix import (
|
| 5 |
+
CGInverseLaplacian,
|
| 6 |
+
FullInverseLaplacian,
|
| 7 |
+
SuperLUInverseLaplacian,
|
| 8 |
+
)
|
| 9 |
+
from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering
|
| 10 |
+
|
| 11 |
+
__all__ = ["current_flow_closeness_centrality", "information_centrality"]
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@not_implemented_for("directed")
|
| 15 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 16 |
+
def current_flow_closeness_centrality(G, weight=None, dtype=float, solver="lu"):
|
| 17 |
+
"""Compute current-flow closeness centrality for nodes.
|
| 18 |
+
|
| 19 |
+
Current-flow closeness centrality is variant of closeness
|
| 20 |
+
centrality based on effective resistance between nodes in
|
| 21 |
+
a network. This metric is also known as information centrality.
|
| 22 |
+
|
| 23 |
+
Parameters
|
| 24 |
+
----------
|
| 25 |
+
G : graph
|
| 26 |
+
A NetworkX graph.
|
| 27 |
+
|
| 28 |
+
weight : None or string, optional (default=None)
|
| 29 |
+
If None, all edge weights are considered equal.
|
| 30 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 31 |
+
The weight reflects the capacity or the strength of the
|
| 32 |
+
edge.
|
| 33 |
+
|
| 34 |
+
dtype: data type (default=float)
|
| 35 |
+
Default data type for internal matrices.
|
| 36 |
+
Set to np.float32 for lower memory consumption.
|
| 37 |
+
|
| 38 |
+
solver: string (default='lu')
|
| 39 |
+
Type of linear solver to use for computing the flow matrix.
|
| 40 |
+
Options are "full" (uses most memory), "lu" (recommended), and
|
| 41 |
+
"cg" (uses least memory).
|
| 42 |
+
|
| 43 |
+
Returns
|
| 44 |
+
-------
|
| 45 |
+
nodes : dictionary
|
| 46 |
+
Dictionary of nodes with current flow closeness centrality as the value.
|
| 47 |
+
|
| 48 |
+
See Also
|
| 49 |
+
--------
|
| 50 |
+
closeness_centrality
|
| 51 |
+
|
| 52 |
+
Notes
|
| 53 |
+
-----
|
| 54 |
+
The algorithm is from Brandes [1]_.
|
| 55 |
+
|
| 56 |
+
See also [2]_ for the original definition of information centrality.
|
| 57 |
+
|
| 58 |
+
References
|
| 59 |
+
----------
|
| 60 |
+
.. [1] Ulrik Brandes and Daniel Fleischer,
|
| 61 |
+
Centrality Measures Based on Current Flow.
|
| 62 |
+
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
| 63 |
+
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
| 64 |
+
https://doi.org/10.1007/978-3-540-31856-9_44
|
| 65 |
+
|
| 66 |
+
.. [2] Karen Stephenson and Marvin Zelen:
|
| 67 |
+
Rethinking centrality: Methods and examples.
|
| 68 |
+
Social Networks 11(1):1-37, 1989.
|
| 69 |
+
https://doi.org/10.1016/0378-8733(89)90016-6
|
| 70 |
+
"""
|
| 71 |
+
if not nx.is_connected(G):
|
| 72 |
+
raise nx.NetworkXError("Graph not connected.")
|
| 73 |
+
solvername = {
|
| 74 |
+
"full": FullInverseLaplacian,
|
| 75 |
+
"lu": SuperLUInverseLaplacian,
|
| 76 |
+
"cg": CGInverseLaplacian,
|
| 77 |
+
}
|
| 78 |
+
N = G.number_of_nodes()
|
| 79 |
+
ordering = list(reverse_cuthill_mckee_ordering(G))
|
| 80 |
+
# make a copy with integer labels according to rcm ordering
|
| 81 |
+
# this could be done without a copy if we really wanted to
|
| 82 |
+
H = nx.relabel_nodes(G, dict(zip(ordering, range(N))))
|
| 83 |
+
betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H
|
| 84 |
+
N = H.number_of_nodes()
|
| 85 |
+
L = nx.laplacian_matrix(H, nodelist=range(N), weight=weight).asformat("csc")
|
| 86 |
+
L = L.astype(dtype)
|
| 87 |
+
C2 = solvername[solver](L, width=1, dtype=dtype) # initialize solver
|
| 88 |
+
for v in H:
|
| 89 |
+
col = C2.get_row(v)
|
| 90 |
+
for w in H:
|
| 91 |
+
betweenness[v] += col.item(v) - 2 * col.item(w)
|
| 92 |
+
betweenness[w] += col.item(v)
|
| 93 |
+
return {ordering[node]: 1 / value for node, value in betweenness.items()}
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
information_centrality = current_flow_closeness_centrality
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/eigenvector.py
ADDED
|
@@ -0,0 +1,357 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for computing eigenvector centrality."""
|
| 2 |
+
|
| 3 |
+
import math
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.utils import not_implemented_for
|
| 7 |
+
|
| 8 |
+
__all__ = ["eigenvector_centrality", "eigenvector_centrality_numpy"]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@not_implemented_for("multigraph")
|
| 12 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 13 |
+
def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, weight=None):
|
| 14 |
+
r"""Compute the eigenvector centrality for the graph G.
|
| 15 |
+
|
| 16 |
+
Eigenvector centrality computes the centrality for a node by adding
|
| 17 |
+
the centrality of its predecessors. The centrality for node $i$ is the
|
| 18 |
+
$i$-th element of a left eigenvector associated with the eigenvalue $\lambda$
|
| 19 |
+
of maximum modulus that is positive. Such an eigenvector $x$ is
|
| 20 |
+
defined up to a multiplicative constant by the equation
|
| 21 |
+
|
| 22 |
+
.. math::
|
| 23 |
+
|
| 24 |
+
\lambda x^T = x^T A,
|
| 25 |
+
|
| 26 |
+
where $A$ is the adjacency matrix of the graph G. By definition of
|
| 27 |
+
row-column product, the equation above is equivalent to
|
| 28 |
+
|
| 29 |
+
.. math::
|
| 30 |
+
|
| 31 |
+
\lambda x_i = \sum_{j\to i}x_j.
|
| 32 |
+
|
| 33 |
+
That is, adding the eigenvector centralities of the predecessors of
|
| 34 |
+
$i$ one obtains the eigenvector centrality of $i$ multiplied by
|
| 35 |
+
$\lambda$. In the case of undirected graphs, $x$ also solves the familiar
|
| 36 |
+
right-eigenvector equation $Ax = \lambda x$.
|
| 37 |
+
|
| 38 |
+
By virtue of the Perron–Frobenius theorem [1]_, if G is strongly
|
| 39 |
+
connected there is a unique eigenvector $x$, and all its entries
|
| 40 |
+
are strictly positive.
|
| 41 |
+
|
| 42 |
+
If G is not strongly connected there might be several left
|
| 43 |
+
eigenvectors associated with $\lambda$, and some of their elements
|
| 44 |
+
might be zero.
|
| 45 |
+
|
| 46 |
+
Parameters
|
| 47 |
+
----------
|
| 48 |
+
G : graph
|
| 49 |
+
A networkx graph.
|
| 50 |
+
|
| 51 |
+
max_iter : integer, optional (default=100)
|
| 52 |
+
Maximum number of power iterations.
|
| 53 |
+
|
| 54 |
+
tol : float, optional (default=1.0e-6)
|
| 55 |
+
Error tolerance (in Euclidean norm) used to check convergence in
|
| 56 |
+
power iteration.
|
| 57 |
+
|
| 58 |
+
nstart : dictionary, optional (default=None)
|
| 59 |
+
Starting value of power iteration for each node. Must have a nonzero
|
| 60 |
+
projection on the desired eigenvector for the power method to converge.
|
| 61 |
+
If None, this implementation uses an all-ones vector, which is a safe
|
| 62 |
+
choice.
|
| 63 |
+
|
| 64 |
+
weight : None or string, optional (default=None)
|
| 65 |
+
If None, all edge weights are considered equal. Otherwise holds the
|
| 66 |
+
name of the edge attribute used as weight. In this measure the
|
| 67 |
+
weight is interpreted as the connection strength.
|
| 68 |
+
|
| 69 |
+
Returns
|
| 70 |
+
-------
|
| 71 |
+
nodes : dictionary
|
| 72 |
+
Dictionary of nodes with eigenvector centrality as the value. The
|
| 73 |
+
associated vector has unit Euclidean norm and the values are
|
| 74 |
+
nonegative.
|
| 75 |
+
|
| 76 |
+
Examples
|
| 77 |
+
--------
|
| 78 |
+
>>> G = nx.path_graph(4)
|
| 79 |
+
>>> centrality = nx.eigenvector_centrality(G)
|
| 80 |
+
>>> sorted((v, f"{c:0.2f}") for v, c in centrality.items())
|
| 81 |
+
[(0, '0.37'), (1, '0.60'), (2, '0.60'), (3, '0.37')]
|
| 82 |
+
|
| 83 |
+
Raises
|
| 84 |
+
------
|
| 85 |
+
NetworkXPointlessConcept
|
| 86 |
+
If the graph G is the null graph.
|
| 87 |
+
|
| 88 |
+
NetworkXError
|
| 89 |
+
If each value in `nstart` is zero.
|
| 90 |
+
|
| 91 |
+
PowerIterationFailedConvergence
|
| 92 |
+
If the algorithm fails to converge to the specified tolerance
|
| 93 |
+
within the specified number of iterations of the power iteration
|
| 94 |
+
method.
|
| 95 |
+
|
| 96 |
+
See Also
|
| 97 |
+
--------
|
| 98 |
+
eigenvector_centrality_numpy
|
| 99 |
+
:func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
|
| 100 |
+
:func:`~networkx.algorithms.link_analysis.hits_alg.hits`
|
| 101 |
+
|
| 102 |
+
Notes
|
| 103 |
+
-----
|
| 104 |
+
Eigenvector centrality was introduced by Landau [2]_ for chess
|
| 105 |
+
tournaments. It was later rediscovered by Wei [3]_ and then
|
| 106 |
+
popularized by Kendall [4]_ in the context of sport ranking. Berge
|
| 107 |
+
introduced a general definition for graphs based on social connections
|
| 108 |
+
[5]_. Bonacich [6]_ reintroduced again eigenvector centrality and made
|
| 109 |
+
it popular in link analysis.
|
| 110 |
+
|
| 111 |
+
This function computes the left dominant eigenvector, which corresponds
|
| 112 |
+
to adding the centrality of predecessors: this is the usual approach.
|
| 113 |
+
To add the centrality of successors first reverse the graph with
|
| 114 |
+
``G.reverse()``.
|
| 115 |
+
|
| 116 |
+
The implementation uses power iteration [7]_ to compute a dominant
|
| 117 |
+
eigenvector starting from the provided vector `nstart`. Convergence is
|
| 118 |
+
guaranteed as long as `nstart` has a nonzero projection on a dominant
|
| 119 |
+
eigenvector, which certainly happens using the default value.
|
| 120 |
+
|
| 121 |
+
The method stops when the change in the computed vector between two
|
| 122 |
+
iterations is smaller than an error tolerance of ``G.number_of_nodes()
|
| 123 |
+
* tol`` or after ``max_iter`` iterations, but in the second case it
|
| 124 |
+
raises an exception.
|
| 125 |
+
|
| 126 |
+
This implementation uses $(A + I)$ rather than the adjacency matrix
|
| 127 |
+
$A$ because the change preserves eigenvectors, but it shifts the
|
| 128 |
+
spectrum, thus guaranteeing convergence even for networks with
|
| 129 |
+
negative eigenvalues of maximum modulus.
|
| 130 |
+
|
| 131 |
+
References
|
| 132 |
+
----------
|
| 133 |
+
.. [1] Abraham Berman and Robert J. Plemmons.
|
| 134 |
+
"Nonnegative Matrices in the Mathematical Sciences."
|
| 135 |
+
Classics in Applied Mathematics. SIAM, 1994.
|
| 136 |
+
|
| 137 |
+
.. [2] Edmund Landau.
|
| 138 |
+
"Zur relativen Wertbemessung der Turnierresultate."
|
| 139 |
+
Deutsches Wochenschach, 11:366–369, 1895.
|
| 140 |
+
|
| 141 |
+
.. [3] Teh-Hsing Wei.
|
| 142 |
+
"The Algebraic Foundations of Ranking Theory."
|
| 143 |
+
PhD thesis, University of Cambridge, 1952.
|
| 144 |
+
|
| 145 |
+
.. [4] Maurice G. Kendall.
|
| 146 |
+
"Further contributions to the theory of paired comparisons."
|
| 147 |
+
Biometrics, 11(1):43–62, 1955.
|
| 148 |
+
https://www.jstor.org/stable/3001479
|
| 149 |
+
|
| 150 |
+
.. [5] Claude Berge
|
| 151 |
+
"Théorie des graphes et ses applications."
|
| 152 |
+
Dunod, Paris, France, 1958.
|
| 153 |
+
|
| 154 |
+
.. [6] Phillip Bonacich.
|
| 155 |
+
"Technique for analyzing overlapping memberships."
|
| 156 |
+
Sociological Methodology, 4:176–185, 1972.
|
| 157 |
+
https://www.jstor.org/stable/270732
|
| 158 |
+
|
| 159 |
+
.. [7] Power iteration:: https://en.wikipedia.org/wiki/Power_iteration
|
| 160 |
+
|
| 161 |
+
"""
|
| 162 |
+
if len(G) == 0:
|
| 163 |
+
raise nx.NetworkXPointlessConcept(
|
| 164 |
+
"cannot compute centrality for the null graph"
|
| 165 |
+
)
|
| 166 |
+
# If no initial vector is provided, start with the all-ones vector.
|
| 167 |
+
if nstart is None:
|
| 168 |
+
nstart = {v: 1 for v in G}
|
| 169 |
+
if all(v == 0 for v in nstart.values()):
|
| 170 |
+
raise nx.NetworkXError("initial vector cannot have all zero values")
|
| 171 |
+
# Normalize the initial vector so that each entry is in [0, 1]. This is
|
| 172 |
+
# guaranteed to never have a divide-by-zero error by the previous line.
|
| 173 |
+
nstart_sum = sum(nstart.values())
|
| 174 |
+
x = {k: v / nstart_sum for k, v in nstart.items()}
|
| 175 |
+
nnodes = G.number_of_nodes()
|
| 176 |
+
# make up to max_iter iterations
|
| 177 |
+
for _ in range(max_iter):
|
| 178 |
+
xlast = x
|
| 179 |
+
x = xlast.copy() # Start with xlast times I to iterate with (A+I)
|
| 180 |
+
# do the multiplication y^T = x^T A (left eigenvector)
|
| 181 |
+
for n in x:
|
| 182 |
+
for nbr in G[n]:
|
| 183 |
+
w = G[n][nbr].get(weight, 1) if weight else 1
|
| 184 |
+
x[nbr] += xlast[n] * w
|
| 185 |
+
# Normalize the vector. The normalization denominator `norm`
|
| 186 |
+
# should never be zero by the Perron--Frobenius
|
| 187 |
+
# theorem. However, in case it is due to numerical error, we
|
| 188 |
+
# assume the norm to be one instead.
|
| 189 |
+
norm = math.hypot(*x.values()) or 1
|
| 190 |
+
x = {k: v / norm for k, v in x.items()}
|
| 191 |
+
# Check for convergence (in the L_1 norm).
|
| 192 |
+
if sum(abs(x[n] - xlast[n]) for n in x) < nnodes * tol:
|
| 193 |
+
return x
|
| 194 |
+
raise nx.PowerIterationFailedConvergence(max_iter)
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 198 |
+
def eigenvector_centrality_numpy(G, weight=None, max_iter=50, tol=0):
|
| 199 |
+
r"""Compute the eigenvector centrality for the graph `G`.
|
| 200 |
+
|
| 201 |
+
Eigenvector centrality computes the centrality for a node by adding
|
| 202 |
+
the centrality of its predecessors. The centrality for node $i$ is the
|
| 203 |
+
$i$-th element of a left eigenvector associated with the eigenvalue $\lambda$
|
| 204 |
+
of maximum modulus that is positive. Such an eigenvector $x$ is
|
| 205 |
+
defined up to a multiplicative constant by the equation
|
| 206 |
+
|
| 207 |
+
.. math::
|
| 208 |
+
|
| 209 |
+
\lambda x^T = x^T A,
|
| 210 |
+
|
| 211 |
+
where $A$ is the adjacency matrix of the graph `G`. By definition of
|
| 212 |
+
row-column product, the equation above is equivalent to
|
| 213 |
+
|
| 214 |
+
.. math::
|
| 215 |
+
|
| 216 |
+
\lambda x_i = \sum_{j\to i}x_j.
|
| 217 |
+
|
| 218 |
+
That is, adding the eigenvector centralities of the predecessors of
|
| 219 |
+
$i$ one obtains the eigenvector centrality of $i$ multiplied by
|
| 220 |
+
$\lambda$. In the case of undirected graphs, $x$ also solves the familiar
|
| 221 |
+
right-eigenvector equation $Ax = \lambda x$.
|
| 222 |
+
|
| 223 |
+
By virtue of the Perron--Frobenius theorem [1]_, if `G` is (strongly)
|
| 224 |
+
connected, there is a unique eigenvector $x$, and all its entries
|
| 225 |
+
are strictly positive.
|
| 226 |
+
|
| 227 |
+
However, if `G` is not (strongly) connected, there might be several left
|
| 228 |
+
eigenvectors associated with $\lambda$, and some of their elements
|
| 229 |
+
might be zero.
|
| 230 |
+
Depending on the method used to choose eigenvectors, round-off error can affect
|
| 231 |
+
which of the infinitely many eigenvectors is reported.
|
| 232 |
+
This can lead to inconsistent results for the same graph,
|
| 233 |
+
which the underlying implementation is not robust to.
|
| 234 |
+
For this reason, only (strongly) connected graphs are accepted.
|
| 235 |
+
|
| 236 |
+
Parameters
|
| 237 |
+
----------
|
| 238 |
+
G : graph
|
| 239 |
+
A connected NetworkX graph.
|
| 240 |
+
|
| 241 |
+
weight : None or string, optional (default=None)
|
| 242 |
+
If ``None``, all edge weights are considered equal. Otherwise holds the
|
| 243 |
+
name of the edge attribute used as weight. In this measure the
|
| 244 |
+
weight is interpreted as the connection strength.
|
| 245 |
+
|
| 246 |
+
max_iter : integer, optional (default=50)
|
| 247 |
+
Maximum number of Arnoldi update iterations allowed.
|
| 248 |
+
|
| 249 |
+
tol : float, optional (default=0)
|
| 250 |
+
Relative accuracy for eigenvalues (stopping criterion).
|
| 251 |
+
The default value of 0 implies machine precision.
|
| 252 |
+
|
| 253 |
+
Returns
|
| 254 |
+
-------
|
| 255 |
+
nodes : dict of nodes
|
| 256 |
+
Dictionary of nodes with eigenvector centrality as the value. The
|
| 257 |
+
associated vector has unit Euclidean norm and the values are
|
| 258 |
+
nonnegative.
|
| 259 |
+
|
| 260 |
+
Examples
|
| 261 |
+
--------
|
| 262 |
+
>>> G = nx.path_graph(4)
|
| 263 |
+
>>> centrality = nx.eigenvector_centrality_numpy(G)
|
| 264 |
+
>>> print([f"{node} {centrality[node]:0.2f}" for node in centrality])
|
| 265 |
+
['0 0.37', '1 0.60', '2 0.60', '3 0.37']
|
| 266 |
+
|
| 267 |
+
Raises
|
| 268 |
+
------
|
| 269 |
+
NetworkXPointlessConcept
|
| 270 |
+
If the graph `G` is the null graph.
|
| 271 |
+
|
| 272 |
+
ArpackNoConvergence
|
| 273 |
+
When the requested convergence is not obtained. The currently
|
| 274 |
+
converged eigenvalues and eigenvectors can be found as
|
| 275 |
+
eigenvalues and eigenvectors attributes of the exception object.
|
| 276 |
+
|
| 277 |
+
AmbiguousSolution
|
| 278 |
+
If `G` is not connected.
|
| 279 |
+
|
| 280 |
+
See Also
|
| 281 |
+
--------
|
| 282 |
+
:func:`scipy.sparse.linalg.eigs`
|
| 283 |
+
eigenvector_centrality
|
| 284 |
+
:func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
|
| 285 |
+
:func:`~networkx.algorithms.link_analysis.hits_alg.hits`
|
| 286 |
+
|
| 287 |
+
Notes
|
| 288 |
+
-----
|
| 289 |
+
Eigenvector centrality was introduced by Landau [2]_ for chess
|
| 290 |
+
tournaments. It was later rediscovered by Wei [3]_ and then
|
| 291 |
+
popularized by Kendall [4]_ in the context of sport ranking. Berge
|
| 292 |
+
introduced a general definition for graphs based on social connections
|
| 293 |
+
[5]_. Bonacich [6]_ reintroduced again eigenvector centrality and made
|
| 294 |
+
it popular in link analysis.
|
| 295 |
+
|
| 296 |
+
This function computes the left dominant eigenvector, which corresponds
|
| 297 |
+
to adding the centrality of predecessors: this is the usual approach.
|
| 298 |
+
To add the centrality of successors first reverse the graph with
|
| 299 |
+
``G.reverse()``.
|
| 300 |
+
|
| 301 |
+
This implementation uses the
|
| 302 |
+
:func:`SciPy sparse eigenvalue solver<scipy.sparse.linalg.eigs>` (ARPACK)
|
| 303 |
+
to find the largest eigenvalue/eigenvector pair using Arnoldi iterations
|
| 304 |
+
[7]_.
|
| 305 |
+
|
| 306 |
+
References
|
| 307 |
+
----------
|
| 308 |
+
.. [1] Abraham Berman and Robert J. Plemmons.
|
| 309 |
+
"Nonnegative Matrices in the Mathematical Sciences".
|
| 310 |
+
Classics in Applied Mathematics. SIAM, 1994.
|
| 311 |
+
|
| 312 |
+
.. [2] Edmund Landau.
|
| 313 |
+
"Zur relativen Wertbemessung der Turnierresultate".
|
| 314 |
+
Deutsches Wochenschach, 11:366--369, 1895.
|
| 315 |
+
|
| 316 |
+
.. [3] Teh-Hsing Wei.
|
| 317 |
+
"The Algebraic Foundations of Ranking Theory".
|
| 318 |
+
PhD thesis, University of Cambridge, 1952.
|
| 319 |
+
|
| 320 |
+
.. [4] Maurice G. Kendall.
|
| 321 |
+
"Further contributions to the theory of paired comparisons".
|
| 322 |
+
Biometrics, 11(1):43--62, 1955.
|
| 323 |
+
https://www.jstor.org/stable/3001479
|
| 324 |
+
|
| 325 |
+
.. [5] Claude Berge.
|
| 326 |
+
"Théorie des graphes et ses applications".
|
| 327 |
+
Dunod, Paris, France, 1958.
|
| 328 |
+
|
| 329 |
+
.. [6] Phillip Bonacich.
|
| 330 |
+
"Technique for analyzing overlapping memberships".
|
| 331 |
+
Sociological Methodology, 4:176--185, 1972.
|
| 332 |
+
https://www.jstor.org/stable/270732
|
| 333 |
+
|
| 334 |
+
.. [7] Arnoldi, W. E. (1951).
|
| 335 |
+
"The principle of minimized iterations in the solution of the matrix eigenvalue problem".
|
| 336 |
+
Quarterly of Applied Mathematics. 9 (1): 17--29.
|
| 337 |
+
https://doi.org/10.1090/qam/42792
|
| 338 |
+
"""
|
| 339 |
+
import numpy as np
|
| 340 |
+
import scipy as sp
|
| 341 |
+
|
| 342 |
+
if len(G) == 0:
|
| 343 |
+
raise nx.NetworkXPointlessConcept(
|
| 344 |
+
"cannot compute centrality for the null graph"
|
| 345 |
+
)
|
| 346 |
+
connected = nx.is_strongly_connected(G) if G.is_directed() else nx.is_connected(G)
|
| 347 |
+
if not connected: # See gh-6888.
|
| 348 |
+
raise nx.AmbiguousSolution(
|
| 349 |
+
"`eigenvector_centrality_numpy` does not give consistent results for disconnected graphs"
|
| 350 |
+
)
|
| 351 |
+
M = nx.to_scipy_sparse_array(G, nodelist=list(G), weight=weight, dtype=float)
|
| 352 |
+
_, eigenvector = sp.sparse.linalg.eigs(
|
| 353 |
+
M.T, k=1, which="LR", maxiter=max_iter, tol=tol
|
| 354 |
+
)
|
| 355 |
+
largest = eigenvector.flatten().real
|
| 356 |
+
norm = np.sign(largest.sum()) * sp.linalg.norm(largest)
|
| 357 |
+
return dict(zip(G, (largest / norm).tolist()))
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/flow_matrix.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Helpers for current-flow betweenness and current-flow closeness
|
| 2 |
+
# Lazy computations for inverse Laplacian and flow-matrix rows.
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 7 |
+
def flow_matrix_row(G, weight=None, dtype=float, solver="lu"):
|
| 8 |
+
# Generate a row of the current-flow matrix
|
| 9 |
+
import numpy as np
|
| 10 |
+
|
| 11 |
+
solvername = {
|
| 12 |
+
"full": FullInverseLaplacian,
|
| 13 |
+
"lu": SuperLUInverseLaplacian,
|
| 14 |
+
"cg": CGInverseLaplacian,
|
| 15 |
+
}
|
| 16 |
+
n = G.number_of_nodes()
|
| 17 |
+
L = nx.laplacian_matrix(G, nodelist=range(n), weight=weight).asformat("csc")
|
| 18 |
+
L = L.astype(dtype)
|
| 19 |
+
C = solvername[solver](L, dtype=dtype) # initialize solver
|
| 20 |
+
w = C.w # w is the Laplacian matrix width
|
| 21 |
+
# row-by-row flow matrix
|
| 22 |
+
for u, v in sorted(sorted((u, v)) for u, v in G.edges()):
|
| 23 |
+
B = np.zeros(w, dtype=dtype)
|
| 24 |
+
c = G[u][v].get(weight, 1.0)
|
| 25 |
+
B[u % w] = c
|
| 26 |
+
B[v % w] = -c
|
| 27 |
+
# get only the rows needed in the inverse laplacian
|
| 28 |
+
# and multiply to get the flow matrix row
|
| 29 |
+
row = B @ C.get_rows(u, v)
|
| 30 |
+
yield row, (u, v)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
# Class to compute the inverse laplacian only for specified rows
|
| 34 |
+
# Allows computation of the current-flow matrix without storing entire
|
| 35 |
+
# inverse laplacian matrix
|
| 36 |
+
class InverseLaplacian:
|
| 37 |
+
def __init__(self, L, width=None, dtype=None):
|
| 38 |
+
global np
|
| 39 |
+
import numpy as np
|
| 40 |
+
|
| 41 |
+
(n, n) = L.shape
|
| 42 |
+
self.dtype = dtype
|
| 43 |
+
self.n = n
|
| 44 |
+
if width is None:
|
| 45 |
+
self.w = self.width(L)
|
| 46 |
+
else:
|
| 47 |
+
self.w = width
|
| 48 |
+
self.C = np.zeros((self.w, n), dtype=dtype)
|
| 49 |
+
self.L1 = L[1:, 1:]
|
| 50 |
+
self.init_solver(L)
|
| 51 |
+
|
| 52 |
+
def init_solver(self, L):
|
| 53 |
+
pass
|
| 54 |
+
|
| 55 |
+
def solve(self, r):
|
| 56 |
+
raise nx.NetworkXError("Implement solver")
|
| 57 |
+
|
| 58 |
+
def solve_inverse(self, r):
|
| 59 |
+
raise nx.NetworkXError("Implement solver")
|
| 60 |
+
|
| 61 |
+
def get_rows(self, r1, r2):
|
| 62 |
+
for r in range(r1, r2 + 1):
|
| 63 |
+
self.C[r % self.w, 1:] = self.solve_inverse(r)
|
| 64 |
+
return self.C
|
| 65 |
+
|
| 66 |
+
def get_row(self, r):
|
| 67 |
+
self.C[r % self.w, 1:] = self.solve_inverse(r)
|
| 68 |
+
return self.C[r % self.w]
|
| 69 |
+
|
| 70 |
+
def width(self, L):
|
| 71 |
+
m = 0
|
| 72 |
+
for i, row in enumerate(L):
|
| 73 |
+
w = 0
|
| 74 |
+
y = np.nonzero(row)[-1]
|
| 75 |
+
if len(y) > 0:
|
| 76 |
+
v = y - i
|
| 77 |
+
w = v.max() - v.min() + 1
|
| 78 |
+
m = max(w, m)
|
| 79 |
+
return m
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class FullInverseLaplacian(InverseLaplacian):
|
| 83 |
+
def init_solver(self, L):
|
| 84 |
+
self.IL = np.zeros(L.shape, dtype=self.dtype)
|
| 85 |
+
self.IL[1:, 1:] = np.linalg.inv(self.L1.todense())
|
| 86 |
+
|
| 87 |
+
def solve(self, rhs):
|
| 88 |
+
s = np.zeros(rhs.shape, dtype=self.dtype)
|
| 89 |
+
s = self.IL @ rhs
|
| 90 |
+
return s
|
| 91 |
+
|
| 92 |
+
def solve_inverse(self, r):
|
| 93 |
+
return self.IL[r, 1:]
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
class SuperLUInverseLaplacian(InverseLaplacian):
|
| 97 |
+
def init_solver(self, L):
|
| 98 |
+
import scipy as sp
|
| 99 |
+
|
| 100 |
+
self.lusolve = sp.sparse.linalg.factorized(self.L1.tocsc())
|
| 101 |
+
|
| 102 |
+
def solve_inverse(self, r):
|
| 103 |
+
rhs = np.zeros(self.n, dtype=self.dtype)
|
| 104 |
+
rhs[r] = 1
|
| 105 |
+
return self.lusolve(rhs[1:])
|
| 106 |
+
|
| 107 |
+
def solve(self, rhs):
|
| 108 |
+
s = np.zeros(rhs.shape, dtype=self.dtype)
|
| 109 |
+
s[1:] = self.lusolve(rhs[1:])
|
| 110 |
+
return s
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
class CGInverseLaplacian(InverseLaplacian):
|
| 114 |
+
def init_solver(self, L):
|
| 115 |
+
global sp
|
| 116 |
+
import scipy as sp
|
| 117 |
+
|
| 118 |
+
ilu = sp.sparse.linalg.spilu(self.L1.tocsc())
|
| 119 |
+
n = self.n - 1
|
| 120 |
+
self.M = sp.sparse.linalg.LinearOperator(shape=(n, n), matvec=ilu.solve)
|
| 121 |
+
|
| 122 |
+
def solve(self, rhs):
|
| 123 |
+
s = np.zeros(rhs.shape, dtype=self.dtype)
|
| 124 |
+
s[1:] = sp.sparse.linalg.cg(self.L1, rhs[1:], M=self.M, atol=0)[0]
|
| 125 |
+
return s
|
| 126 |
+
|
| 127 |
+
def solve_inverse(self, r):
|
| 128 |
+
rhs = np.zeros(self.n, self.dtype)
|
| 129 |
+
rhs[r] = 1
|
| 130 |
+
return sp.sparse.linalg.cg(self.L1, rhs[1:], M=self.M, atol=0)[0]
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/group.py
ADDED
|
@@ -0,0 +1,787 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Group centrality measures."""
|
| 2 |
+
|
| 3 |
+
from copy import deepcopy
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.algorithms.centrality.betweenness import (
|
| 7 |
+
_accumulate_endpoints,
|
| 8 |
+
_single_source_dijkstra_path_basic,
|
| 9 |
+
_single_source_shortest_path_basic,
|
| 10 |
+
)
|
| 11 |
+
from networkx.utils.decorators import not_implemented_for
|
| 12 |
+
|
| 13 |
+
__all__ = [
|
| 14 |
+
"group_betweenness_centrality",
|
| 15 |
+
"group_closeness_centrality",
|
| 16 |
+
"group_degree_centrality",
|
| 17 |
+
"group_in_degree_centrality",
|
| 18 |
+
"group_out_degree_centrality",
|
| 19 |
+
"prominent_group",
|
| 20 |
+
]
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 24 |
+
def group_betweenness_centrality(G, C, normalized=True, weight=None, endpoints=False):
|
| 25 |
+
r"""Compute the group betweenness centrality for a group of nodes.
|
| 26 |
+
|
| 27 |
+
Group betweenness centrality of a group of nodes $C$ is the sum of the
|
| 28 |
+
fraction of all-pairs shortest paths that pass through any vertex in $C$
|
| 29 |
+
|
| 30 |
+
.. math::
|
| 31 |
+
|
| 32 |
+
c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)}
|
| 33 |
+
|
| 34 |
+
where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
|
| 35 |
+
shortest $(s, t)$-paths, and $\sigma(s, t|C)$ is the number of
|
| 36 |
+
those paths passing through some node in group $C$. Note that
|
| 37 |
+
$(s, t)$ are not members of the group ($V-C$ is the set of nodes
|
| 38 |
+
in $V$ that are not in $C$).
|
| 39 |
+
|
| 40 |
+
Parameters
|
| 41 |
+
----------
|
| 42 |
+
G : graph
|
| 43 |
+
A NetworkX graph.
|
| 44 |
+
|
| 45 |
+
C : list or set or list of lists or list of sets
|
| 46 |
+
A group or a list of groups containing nodes which belong to G, for which group betweenness
|
| 47 |
+
centrality is to be calculated.
|
| 48 |
+
|
| 49 |
+
normalized : bool, optional (default=True)
|
| 50 |
+
If True, group betweenness is normalized by `1/((|V|-|C|)(|V|-|C|-1))`
|
| 51 |
+
where `|V|` is the number of nodes in G and `|C|` is the number of nodes in C.
|
| 52 |
+
|
| 53 |
+
weight : None or string, optional (default=None)
|
| 54 |
+
If None, all edge weights are considered equal.
|
| 55 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 56 |
+
The weight of an edge is treated as the length or distance between the two sides.
|
| 57 |
+
|
| 58 |
+
endpoints : bool, optional (default=False)
|
| 59 |
+
If True include the endpoints in the shortest path counts.
|
| 60 |
+
|
| 61 |
+
Raises
|
| 62 |
+
------
|
| 63 |
+
NodeNotFound
|
| 64 |
+
If node(s) in C are not present in G.
|
| 65 |
+
|
| 66 |
+
Returns
|
| 67 |
+
-------
|
| 68 |
+
betweenness : list of floats or float
|
| 69 |
+
If C is a single group then return a float. If C is a list with
|
| 70 |
+
several groups then return a list of group betweenness centralities.
|
| 71 |
+
|
| 72 |
+
See Also
|
| 73 |
+
--------
|
| 74 |
+
betweenness_centrality
|
| 75 |
+
|
| 76 |
+
Notes
|
| 77 |
+
-----
|
| 78 |
+
Group betweenness centrality is described in [1]_ and its importance discussed in [3]_.
|
| 79 |
+
The initial implementation of the algorithm is mentioned in [2]_. This function uses
|
| 80 |
+
an improved algorithm presented in [4]_.
|
| 81 |
+
|
| 82 |
+
The number of nodes in the group must be a maximum of n - 2 where `n`
|
| 83 |
+
is the total number of nodes in the graph.
|
| 84 |
+
|
| 85 |
+
For weighted graphs the edge weights must be greater than zero.
|
| 86 |
+
Zero edge weights can produce an infinite number of equal length
|
| 87 |
+
paths between pairs of nodes.
|
| 88 |
+
|
| 89 |
+
The total number of paths between source and target is counted
|
| 90 |
+
differently for directed and undirected graphs. Directed paths
|
| 91 |
+
between "u" and "v" are counted as two possible paths (one each
|
| 92 |
+
direction) while undirected paths between "u" and "v" are counted
|
| 93 |
+
as one path. Said another way, the sum in the expression above is
|
| 94 |
+
over all ``s != t`` for directed graphs and for ``s < t`` for undirected graphs.
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
References
|
| 98 |
+
----------
|
| 99 |
+
.. [1] M G Everett and S P Borgatti:
|
| 100 |
+
The Centrality of Groups and Classes.
|
| 101 |
+
Journal of Mathematical Sociology. 23(3): 181-201. 1999.
|
| 102 |
+
http://www.analytictech.com/borgatti/group_centrality.htm
|
| 103 |
+
.. [2] Ulrik Brandes:
|
| 104 |
+
On Variants of Shortest-Path Betweenness
|
| 105 |
+
Centrality and their Generic Computation.
|
| 106 |
+
Social Networks 30(2):136-145, 2008.
|
| 107 |
+
http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.72.9610&rep=rep1&type=pdf
|
| 108 |
+
.. [3] Sourav Medya et. al.:
|
| 109 |
+
Group Centrality Maximization via Network Design.
|
| 110 |
+
SIAM International Conference on Data Mining, SDM 2018, 126–134.
|
| 111 |
+
https://sites.cs.ucsb.edu/~arlei/pubs/sdm18.pdf
|
| 112 |
+
.. [4] Rami Puzis, Yuval Elovici, and Shlomi Dolev.
|
| 113 |
+
"Fast algorithm for successive computation of group betweenness centrality."
|
| 114 |
+
https://journals.aps.org/pre/pdf/10.1103/PhysRevE.76.056709
|
| 115 |
+
|
| 116 |
+
"""
|
| 117 |
+
GBC = [] # initialize betweenness
|
| 118 |
+
list_of_groups = True
|
| 119 |
+
# check weather C contains one or many groups
|
| 120 |
+
if any(el in G for el in C):
|
| 121 |
+
C = [C]
|
| 122 |
+
list_of_groups = False
|
| 123 |
+
set_v = {node for group in C for node in group}
|
| 124 |
+
if set_v - G.nodes: # element(s) of C not in G
|
| 125 |
+
raise nx.NodeNotFound(f"The node(s) {set_v - G.nodes} are in C but not in G.")
|
| 126 |
+
|
| 127 |
+
# pre-processing
|
| 128 |
+
PB, sigma, D = _group_preprocessing(G, set_v, weight)
|
| 129 |
+
|
| 130 |
+
# the algorithm for each group
|
| 131 |
+
for group in C:
|
| 132 |
+
group = set(group) # set of nodes in group
|
| 133 |
+
# initialize the matrices of the sigma and the PB
|
| 134 |
+
GBC_group = 0
|
| 135 |
+
sigma_m = deepcopy(sigma)
|
| 136 |
+
PB_m = deepcopy(PB)
|
| 137 |
+
sigma_m_v = deepcopy(sigma_m)
|
| 138 |
+
PB_m_v = deepcopy(PB_m)
|
| 139 |
+
for v in group:
|
| 140 |
+
GBC_group += PB_m[v][v]
|
| 141 |
+
for x in group:
|
| 142 |
+
for y in group:
|
| 143 |
+
dxvy = 0
|
| 144 |
+
dxyv = 0
|
| 145 |
+
dvxy = 0
|
| 146 |
+
if not (
|
| 147 |
+
sigma_m[x][y] == 0 or sigma_m[x][v] == 0 or sigma_m[v][y] == 0
|
| 148 |
+
):
|
| 149 |
+
if D[x][v] == D[x][y] + D[y][v]:
|
| 150 |
+
dxyv = sigma_m[x][y] * sigma_m[y][v] / sigma_m[x][v]
|
| 151 |
+
if D[x][y] == D[x][v] + D[v][y]:
|
| 152 |
+
dxvy = sigma_m[x][v] * sigma_m[v][y] / sigma_m[x][y]
|
| 153 |
+
if D[v][y] == D[v][x] + D[x][y]:
|
| 154 |
+
dvxy = sigma_m[v][x] * sigma[x][y] / sigma[v][y]
|
| 155 |
+
sigma_m_v[x][y] = sigma_m[x][y] * (1 - dxvy)
|
| 156 |
+
PB_m_v[x][y] = PB_m[x][y] - PB_m[x][y] * dxvy
|
| 157 |
+
if y != v:
|
| 158 |
+
PB_m_v[x][y] -= PB_m[x][v] * dxyv
|
| 159 |
+
if x != v:
|
| 160 |
+
PB_m_v[x][y] -= PB_m[v][y] * dvxy
|
| 161 |
+
sigma_m, sigma_m_v = sigma_m_v, sigma_m
|
| 162 |
+
PB_m, PB_m_v = PB_m_v, PB_m
|
| 163 |
+
|
| 164 |
+
# endpoints
|
| 165 |
+
v, c = len(G), len(group)
|
| 166 |
+
if not endpoints:
|
| 167 |
+
scale = 0
|
| 168 |
+
# if the graph is connected then subtract the endpoints from
|
| 169 |
+
# the count for all the nodes in the graph. else count how many
|
| 170 |
+
# nodes are connected to the group's nodes and subtract that.
|
| 171 |
+
if nx.is_directed(G):
|
| 172 |
+
if nx.is_strongly_connected(G):
|
| 173 |
+
scale = c * (2 * v - c - 1)
|
| 174 |
+
elif nx.is_connected(G):
|
| 175 |
+
scale = c * (2 * v - c - 1)
|
| 176 |
+
if scale == 0:
|
| 177 |
+
for group_node1 in group:
|
| 178 |
+
for node in D[group_node1]:
|
| 179 |
+
if node != group_node1:
|
| 180 |
+
if node in group:
|
| 181 |
+
scale += 1
|
| 182 |
+
else:
|
| 183 |
+
scale += 2
|
| 184 |
+
GBC_group -= scale
|
| 185 |
+
|
| 186 |
+
# normalized
|
| 187 |
+
if normalized:
|
| 188 |
+
scale = 1 / ((v - c) * (v - c - 1))
|
| 189 |
+
GBC_group *= scale
|
| 190 |
+
|
| 191 |
+
# If undirected than count only the undirected edges
|
| 192 |
+
elif not G.is_directed():
|
| 193 |
+
GBC_group /= 2
|
| 194 |
+
|
| 195 |
+
GBC.append(GBC_group)
|
| 196 |
+
if list_of_groups:
|
| 197 |
+
return GBC
|
| 198 |
+
return GBC[0]
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def _group_preprocessing(G, set_v, weight):
|
| 202 |
+
sigma = {}
|
| 203 |
+
delta = {}
|
| 204 |
+
D = {}
|
| 205 |
+
betweenness = dict.fromkeys(G, 0)
|
| 206 |
+
for s in G:
|
| 207 |
+
if weight is None: # use BFS
|
| 208 |
+
S, P, sigma[s], D[s] = _single_source_shortest_path_basic(G, s)
|
| 209 |
+
else: # use Dijkstra's algorithm
|
| 210 |
+
S, P, sigma[s], D[s] = _single_source_dijkstra_path_basic(G, s, weight)
|
| 211 |
+
betweenness, delta[s] = _accumulate_endpoints(betweenness, S, P, sigma[s], s)
|
| 212 |
+
for i in delta[s]: # add the paths from s to i and rescale sigma
|
| 213 |
+
if s != i:
|
| 214 |
+
delta[s][i] += 1
|
| 215 |
+
if weight is not None:
|
| 216 |
+
sigma[s][i] = sigma[s][i] / 2
|
| 217 |
+
# building the path betweenness matrix only for nodes that appear in the group
|
| 218 |
+
PB = dict.fromkeys(G)
|
| 219 |
+
for group_node1 in set_v:
|
| 220 |
+
PB[group_node1] = dict.fromkeys(G, 0.0)
|
| 221 |
+
for group_node2 in set_v:
|
| 222 |
+
if group_node2 not in D[group_node1]:
|
| 223 |
+
continue
|
| 224 |
+
for node in G:
|
| 225 |
+
# if node is connected to the two group nodes than continue
|
| 226 |
+
if group_node2 in D[node] and group_node1 in D[node]:
|
| 227 |
+
if (
|
| 228 |
+
D[node][group_node2]
|
| 229 |
+
== D[node][group_node1] + D[group_node1][group_node2]
|
| 230 |
+
):
|
| 231 |
+
PB[group_node1][group_node2] += (
|
| 232 |
+
delta[node][group_node2]
|
| 233 |
+
* sigma[node][group_node1]
|
| 234 |
+
* sigma[group_node1][group_node2]
|
| 235 |
+
/ sigma[node][group_node2]
|
| 236 |
+
)
|
| 237 |
+
return PB, sigma, D
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 241 |
+
def prominent_group(
|
| 242 |
+
G, k, weight=None, C=None, endpoints=False, normalized=True, greedy=False
|
| 243 |
+
):
|
| 244 |
+
r"""Find the prominent group of size $k$ in graph $G$. The prominence of the
|
| 245 |
+
group is evaluated by the group betweenness centrality.
|
| 246 |
+
|
| 247 |
+
Group betweenness centrality of a group of nodes $C$ is the sum of the
|
| 248 |
+
fraction of all-pairs shortest paths that pass through any vertex in $C$
|
| 249 |
+
|
| 250 |
+
.. math::
|
| 251 |
+
|
| 252 |
+
c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)}
|
| 253 |
+
|
| 254 |
+
where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
|
| 255 |
+
shortest $(s, t)$-paths, and $\sigma(s, t|C)$ is the number of
|
| 256 |
+
those paths passing through some node in group $C$. Note that
|
| 257 |
+
$(s, t)$ are not members of the group ($V-C$ is the set of nodes
|
| 258 |
+
in $V$ that are not in $C$).
|
| 259 |
+
|
| 260 |
+
Parameters
|
| 261 |
+
----------
|
| 262 |
+
G : graph
|
| 263 |
+
A NetworkX graph.
|
| 264 |
+
|
| 265 |
+
k : int
|
| 266 |
+
The number of nodes in the group.
|
| 267 |
+
|
| 268 |
+
normalized : bool, optional (default=True)
|
| 269 |
+
If True, group betweenness is normalized by ``1/((|V|-|C|)(|V|-|C|-1))``
|
| 270 |
+
where ``|V|`` is the number of nodes in G and ``|C|`` is the number of
|
| 271 |
+
nodes in C.
|
| 272 |
+
|
| 273 |
+
weight : None or string, optional (default=None)
|
| 274 |
+
If None, all edge weights are considered equal.
|
| 275 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 276 |
+
The weight of an edge is treated as the length or distance between the two sides.
|
| 277 |
+
|
| 278 |
+
endpoints : bool, optional (default=False)
|
| 279 |
+
If True include the endpoints in the shortest path counts.
|
| 280 |
+
|
| 281 |
+
C : list or set, optional (default=None)
|
| 282 |
+
list of nodes which won't be candidates of the prominent group.
|
| 283 |
+
|
| 284 |
+
greedy : bool, optional (default=False)
|
| 285 |
+
Using a naive greedy algorithm in order to find non-optimal prominent
|
| 286 |
+
group. For scale free networks the results are negligibly below the optimal
|
| 287 |
+
results.
|
| 288 |
+
|
| 289 |
+
Raises
|
| 290 |
+
------
|
| 291 |
+
NodeNotFound
|
| 292 |
+
If node(s) in C are not present in G.
|
| 293 |
+
|
| 294 |
+
Returns
|
| 295 |
+
-------
|
| 296 |
+
max_GBC : float
|
| 297 |
+
The group betweenness centrality of the prominent group.
|
| 298 |
+
|
| 299 |
+
max_group : list
|
| 300 |
+
The list of nodes in the prominent group.
|
| 301 |
+
|
| 302 |
+
See Also
|
| 303 |
+
--------
|
| 304 |
+
betweenness_centrality, group_betweenness_centrality
|
| 305 |
+
|
| 306 |
+
Notes
|
| 307 |
+
-----
|
| 308 |
+
Group betweenness centrality is described in [1]_ and its importance discussed in [3]_.
|
| 309 |
+
The algorithm is described in [2]_ and is based on techniques mentioned in [4]_.
|
| 310 |
+
|
| 311 |
+
The number of nodes in the group must be a maximum of ``n - 2`` where ``n``
|
| 312 |
+
is the total number of nodes in the graph.
|
| 313 |
+
|
| 314 |
+
For weighted graphs the edge weights must be greater than zero.
|
| 315 |
+
Zero edge weights can produce an infinite number of equal length
|
| 316 |
+
paths between pairs of nodes.
|
| 317 |
+
|
| 318 |
+
The total number of paths between source and target is counted
|
| 319 |
+
differently for directed and undirected graphs. Directed paths
|
| 320 |
+
between "u" and "v" are counted as two possible paths (one each
|
| 321 |
+
direction) while undirected paths between "u" and "v" are counted
|
| 322 |
+
as one path. Said another way, the sum in the expression above is
|
| 323 |
+
over all ``s != t`` for directed graphs and for ``s < t`` for undirected graphs.
|
| 324 |
+
|
| 325 |
+
References
|
| 326 |
+
----------
|
| 327 |
+
.. [1] M G Everett and S P Borgatti:
|
| 328 |
+
The Centrality of Groups and Classes.
|
| 329 |
+
Journal of Mathematical Sociology. 23(3): 181-201. 1999.
|
| 330 |
+
http://www.analytictech.com/borgatti/group_centrality.htm
|
| 331 |
+
.. [2] Rami Puzis, Yuval Elovici, and Shlomi Dolev:
|
| 332 |
+
"Finding the Most Prominent Group in Complex Networks"
|
| 333 |
+
AI communications 20(4): 287-296, 2007.
|
| 334 |
+
https://www.researchgate.net/profile/Rami_Puzis2/publication/220308855
|
| 335 |
+
.. [3] Sourav Medya et. al.:
|
| 336 |
+
Group Centrality Maximization via Network Design.
|
| 337 |
+
SIAM International Conference on Data Mining, SDM 2018, 126–134.
|
| 338 |
+
https://sites.cs.ucsb.edu/~arlei/pubs/sdm18.pdf
|
| 339 |
+
.. [4] Rami Puzis, Yuval Elovici, and Shlomi Dolev.
|
| 340 |
+
"Fast algorithm for successive computation of group betweenness centrality."
|
| 341 |
+
https://journals.aps.org/pre/pdf/10.1103/PhysRevE.76.056709
|
| 342 |
+
"""
|
| 343 |
+
import numpy as np
|
| 344 |
+
import pandas as pd
|
| 345 |
+
|
| 346 |
+
if C is not None:
|
| 347 |
+
C = set(C)
|
| 348 |
+
if C - G.nodes: # element(s) of C not in G
|
| 349 |
+
raise nx.NodeNotFound(f"The node(s) {C - G.nodes} are in C but not in G.")
|
| 350 |
+
nodes = list(G.nodes - C)
|
| 351 |
+
else:
|
| 352 |
+
nodes = list(G.nodes)
|
| 353 |
+
DF_tree = nx.Graph()
|
| 354 |
+
DF_tree.__networkx_cache__ = None # Disable caching
|
| 355 |
+
PB, sigma, D = _group_preprocessing(G, nodes, weight)
|
| 356 |
+
betweenness = pd.DataFrame.from_dict(PB)
|
| 357 |
+
if C is not None:
|
| 358 |
+
for node in C:
|
| 359 |
+
# remove from the betweenness all the nodes not part of the group
|
| 360 |
+
betweenness.drop(index=node, inplace=True)
|
| 361 |
+
betweenness.drop(columns=node, inplace=True)
|
| 362 |
+
CL = [node for _, node in sorted(zip(np.diag(betweenness), nodes), reverse=True)]
|
| 363 |
+
max_GBC = 0
|
| 364 |
+
max_group = []
|
| 365 |
+
DF_tree.add_node(
|
| 366 |
+
1,
|
| 367 |
+
CL=CL,
|
| 368 |
+
betweenness=betweenness,
|
| 369 |
+
GBC=0,
|
| 370 |
+
GM=[],
|
| 371 |
+
sigma=sigma,
|
| 372 |
+
cont=dict(zip(nodes, np.diag(betweenness))),
|
| 373 |
+
)
|
| 374 |
+
|
| 375 |
+
# the algorithm
|
| 376 |
+
DF_tree.nodes[1]["heu"] = 0
|
| 377 |
+
for i in range(k):
|
| 378 |
+
DF_tree.nodes[1]["heu"] += DF_tree.nodes[1]["cont"][DF_tree.nodes[1]["CL"][i]]
|
| 379 |
+
max_GBC, DF_tree, max_group = _dfbnb(
|
| 380 |
+
G, k, DF_tree, max_GBC, 1, D, max_group, nodes, greedy
|
| 381 |
+
)
|
| 382 |
+
|
| 383 |
+
v = len(G)
|
| 384 |
+
if not endpoints:
|
| 385 |
+
scale = 0
|
| 386 |
+
# if the graph is connected then subtract the endpoints from
|
| 387 |
+
# the count for all the nodes in the graph. else count how many
|
| 388 |
+
# nodes are connected to the group's nodes and subtract that.
|
| 389 |
+
if nx.is_directed(G):
|
| 390 |
+
if nx.is_strongly_connected(G):
|
| 391 |
+
scale = k * (2 * v - k - 1)
|
| 392 |
+
elif nx.is_connected(G):
|
| 393 |
+
scale = k * (2 * v - k - 1)
|
| 394 |
+
if scale == 0:
|
| 395 |
+
for group_node1 in max_group:
|
| 396 |
+
for node in D[group_node1]:
|
| 397 |
+
if node != group_node1:
|
| 398 |
+
if node in max_group:
|
| 399 |
+
scale += 1
|
| 400 |
+
else:
|
| 401 |
+
scale += 2
|
| 402 |
+
max_GBC -= scale
|
| 403 |
+
|
| 404 |
+
# normalized
|
| 405 |
+
if normalized:
|
| 406 |
+
scale = 1 / ((v - k) * (v - k - 1))
|
| 407 |
+
max_GBC *= scale
|
| 408 |
+
|
| 409 |
+
# If undirected then count only the undirected edges
|
| 410 |
+
elif not G.is_directed():
|
| 411 |
+
max_GBC /= 2
|
| 412 |
+
max_GBC = float(f"{max_GBC:.2f}")
|
| 413 |
+
return max_GBC, max_group
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
def _dfbnb(G, k, DF_tree, max_GBC, root, D, max_group, nodes, greedy):
|
| 417 |
+
# stopping condition - if we found a group of size k and with higher GBC then prune
|
| 418 |
+
if len(DF_tree.nodes[root]["GM"]) == k and DF_tree.nodes[root]["GBC"] > max_GBC:
|
| 419 |
+
return DF_tree.nodes[root]["GBC"], DF_tree, DF_tree.nodes[root]["GM"]
|
| 420 |
+
# stopping condition - if the size of group members equal to k or there are less than
|
| 421 |
+
# k - |GM| in the candidate list or the heuristic function plus the GBC is below the
|
| 422 |
+
# maximal GBC found then prune
|
| 423 |
+
if (
|
| 424 |
+
len(DF_tree.nodes[root]["GM"]) == k
|
| 425 |
+
or len(DF_tree.nodes[root]["CL"]) <= k - len(DF_tree.nodes[root]["GM"])
|
| 426 |
+
or DF_tree.nodes[root]["GBC"] + DF_tree.nodes[root]["heu"] <= max_GBC
|
| 427 |
+
):
|
| 428 |
+
return max_GBC, DF_tree, max_group
|
| 429 |
+
|
| 430 |
+
# finding the heuristic of both children
|
| 431 |
+
node_p, node_m, DF_tree = _heuristic(k, root, DF_tree, D, nodes, greedy)
|
| 432 |
+
|
| 433 |
+
# finding the child with the bigger heuristic + GBC and expand
|
| 434 |
+
# that node first if greedy then only expand the plus node
|
| 435 |
+
if greedy:
|
| 436 |
+
max_GBC, DF_tree, max_group = _dfbnb(
|
| 437 |
+
G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy
|
| 438 |
+
)
|
| 439 |
+
|
| 440 |
+
elif (
|
| 441 |
+
DF_tree.nodes[node_p]["GBC"] + DF_tree.nodes[node_p]["heu"]
|
| 442 |
+
> DF_tree.nodes[node_m]["GBC"] + DF_tree.nodes[node_m]["heu"]
|
| 443 |
+
):
|
| 444 |
+
max_GBC, DF_tree, max_group = _dfbnb(
|
| 445 |
+
G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy
|
| 446 |
+
)
|
| 447 |
+
max_GBC, DF_tree, max_group = _dfbnb(
|
| 448 |
+
G, k, DF_tree, max_GBC, node_m, D, max_group, nodes, greedy
|
| 449 |
+
)
|
| 450 |
+
else:
|
| 451 |
+
max_GBC, DF_tree, max_group = _dfbnb(
|
| 452 |
+
G, k, DF_tree, max_GBC, node_m, D, max_group, nodes, greedy
|
| 453 |
+
)
|
| 454 |
+
max_GBC, DF_tree, max_group = _dfbnb(
|
| 455 |
+
G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy
|
| 456 |
+
)
|
| 457 |
+
return max_GBC, DF_tree, max_group
|
| 458 |
+
|
| 459 |
+
|
| 460 |
+
def _heuristic(k, root, DF_tree, D, nodes, greedy):
|
| 461 |
+
import numpy as np
|
| 462 |
+
|
| 463 |
+
# This helper function add two nodes to DF_tree - one left son and the
|
| 464 |
+
# other right son, finds their heuristic, CL, GBC, and GM
|
| 465 |
+
node_p = DF_tree.number_of_nodes() + 1
|
| 466 |
+
node_m = DF_tree.number_of_nodes() + 2
|
| 467 |
+
added_node = DF_tree.nodes[root]["CL"][0]
|
| 468 |
+
|
| 469 |
+
# adding the plus node
|
| 470 |
+
DF_tree.add_nodes_from([(node_p, deepcopy(DF_tree.nodes[root]))])
|
| 471 |
+
DF_tree.nodes[node_p]["GM"].append(added_node)
|
| 472 |
+
DF_tree.nodes[node_p]["GBC"] += DF_tree.nodes[node_p]["cont"][added_node]
|
| 473 |
+
root_node = DF_tree.nodes[root]
|
| 474 |
+
for x in nodes:
|
| 475 |
+
for y in nodes:
|
| 476 |
+
dxvy = 0
|
| 477 |
+
dxyv = 0
|
| 478 |
+
dvxy = 0
|
| 479 |
+
if not (
|
| 480 |
+
root_node["sigma"][x][y] == 0
|
| 481 |
+
or root_node["sigma"][x][added_node] == 0
|
| 482 |
+
or root_node["sigma"][added_node][y] == 0
|
| 483 |
+
):
|
| 484 |
+
if D[x][added_node] == D[x][y] + D[y][added_node]:
|
| 485 |
+
dxyv = (
|
| 486 |
+
root_node["sigma"][x][y]
|
| 487 |
+
* root_node["sigma"][y][added_node]
|
| 488 |
+
/ root_node["sigma"][x][added_node]
|
| 489 |
+
)
|
| 490 |
+
if D[x][y] == D[x][added_node] + D[added_node][y]:
|
| 491 |
+
dxvy = (
|
| 492 |
+
root_node["sigma"][x][added_node]
|
| 493 |
+
* root_node["sigma"][added_node][y]
|
| 494 |
+
/ root_node["sigma"][x][y]
|
| 495 |
+
)
|
| 496 |
+
if D[added_node][y] == D[added_node][x] + D[x][y]:
|
| 497 |
+
dvxy = (
|
| 498 |
+
root_node["sigma"][added_node][x]
|
| 499 |
+
* root_node["sigma"][x][y]
|
| 500 |
+
/ root_node["sigma"][added_node][y]
|
| 501 |
+
)
|
| 502 |
+
DF_tree.nodes[node_p]["sigma"][x][y] = root_node["sigma"][x][y] * (1 - dxvy)
|
| 503 |
+
DF_tree.nodes[node_p]["betweenness"].loc[y, x] = (
|
| 504 |
+
root_node["betweenness"][x][y] - root_node["betweenness"][x][y] * dxvy
|
| 505 |
+
)
|
| 506 |
+
if y != added_node:
|
| 507 |
+
DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= (
|
| 508 |
+
root_node["betweenness"][x][added_node] * dxyv
|
| 509 |
+
)
|
| 510 |
+
if x != added_node:
|
| 511 |
+
DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= (
|
| 512 |
+
root_node["betweenness"][added_node][y] * dvxy
|
| 513 |
+
)
|
| 514 |
+
|
| 515 |
+
DF_tree.nodes[node_p]["CL"] = [
|
| 516 |
+
node
|
| 517 |
+
for _, node in sorted(
|
| 518 |
+
zip(np.diag(DF_tree.nodes[node_p]["betweenness"]), nodes), reverse=True
|
| 519 |
+
)
|
| 520 |
+
if node not in DF_tree.nodes[node_p]["GM"]
|
| 521 |
+
]
|
| 522 |
+
DF_tree.nodes[node_p]["cont"] = dict(
|
| 523 |
+
zip(nodes, np.diag(DF_tree.nodes[node_p]["betweenness"]))
|
| 524 |
+
)
|
| 525 |
+
DF_tree.nodes[node_p]["heu"] = 0
|
| 526 |
+
for i in range(k - len(DF_tree.nodes[node_p]["GM"])):
|
| 527 |
+
DF_tree.nodes[node_p]["heu"] += DF_tree.nodes[node_p]["cont"][
|
| 528 |
+
DF_tree.nodes[node_p]["CL"][i]
|
| 529 |
+
]
|
| 530 |
+
|
| 531 |
+
# adding the minus node - don't insert the first node in the CL to GM
|
| 532 |
+
# Insert minus node only if isn't greedy type algorithm
|
| 533 |
+
if not greedy:
|
| 534 |
+
DF_tree.add_nodes_from([(node_m, deepcopy(DF_tree.nodes[root]))])
|
| 535 |
+
DF_tree.nodes[node_m]["CL"].pop(0)
|
| 536 |
+
DF_tree.nodes[node_m]["cont"].pop(added_node)
|
| 537 |
+
DF_tree.nodes[node_m]["heu"] = 0
|
| 538 |
+
for i in range(k - len(DF_tree.nodes[node_m]["GM"])):
|
| 539 |
+
DF_tree.nodes[node_m]["heu"] += DF_tree.nodes[node_m]["cont"][
|
| 540 |
+
DF_tree.nodes[node_m]["CL"][i]
|
| 541 |
+
]
|
| 542 |
+
else:
|
| 543 |
+
node_m = None
|
| 544 |
+
|
| 545 |
+
return node_p, node_m, DF_tree
|
| 546 |
+
|
| 547 |
+
|
| 548 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 549 |
+
def group_closeness_centrality(G, S, weight=None):
|
| 550 |
+
r"""Compute the group closeness centrality for a group of nodes.
|
| 551 |
+
|
| 552 |
+
Group closeness centrality of a group of nodes $S$ is a measure
|
| 553 |
+
of how close the group is to the other nodes in the graph.
|
| 554 |
+
|
| 555 |
+
.. math::
|
| 556 |
+
|
| 557 |
+
c_{close}(S) = \frac{|V-S|}{\sum_{v \in V-S} d_{S, v}}
|
| 558 |
+
|
| 559 |
+
d_{S, v} = min_{u \in S} (d_{u, v})
|
| 560 |
+
|
| 561 |
+
where $V$ is the set of nodes, $d_{S, v}$ is the distance of
|
| 562 |
+
the group $S$ from $v$ defined as above. ($V-S$ is the set of nodes
|
| 563 |
+
in $V$ that are not in $S$).
|
| 564 |
+
|
| 565 |
+
Parameters
|
| 566 |
+
----------
|
| 567 |
+
G : graph
|
| 568 |
+
A NetworkX graph.
|
| 569 |
+
|
| 570 |
+
S : list or set
|
| 571 |
+
S is a group of nodes which belong to G, for which group closeness
|
| 572 |
+
centrality is to be calculated.
|
| 573 |
+
|
| 574 |
+
weight : None or string, optional (default=None)
|
| 575 |
+
If None, all edge weights are considered equal.
|
| 576 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 577 |
+
The weight of an edge is treated as the length or distance between the two sides.
|
| 578 |
+
|
| 579 |
+
Raises
|
| 580 |
+
------
|
| 581 |
+
NodeNotFound
|
| 582 |
+
If node(s) in S are not present in G.
|
| 583 |
+
|
| 584 |
+
Returns
|
| 585 |
+
-------
|
| 586 |
+
closeness : float
|
| 587 |
+
Group closeness centrality of the group S.
|
| 588 |
+
|
| 589 |
+
See Also
|
| 590 |
+
--------
|
| 591 |
+
closeness_centrality
|
| 592 |
+
|
| 593 |
+
Notes
|
| 594 |
+
-----
|
| 595 |
+
The measure was introduced in [1]_.
|
| 596 |
+
The formula implemented here is described in [2]_.
|
| 597 |
+
|
| 598 |
+
Higher values of closeness indicate greater centrality.
|
| 599 |
+
|
| 600 |
+
It is assumed that 1 / 0 is 0 (required in the case of directed graphs,
|
| 601 |
+
or when a shortest path length is 0).
|
| 602 |
+
|
| 603 |
+
The number of nodes in the group must be a maximum of n - 1 where `n`
|
| 604 |
+
is the total number of nodes in the graph.
|
| 605 |
+
|
| 606 |
+
For directed graphs, the incoming distance is utilized here. To use the
|
| 607 |
+
outward distance, act on `G.reverse()`.
|
| 608 |
+
|
| 609 |
+
For weighted graphs the edge weights must be greater than zero.
|
| 610 |
+
Zero edge weights can produce an infinite number of equal length
|
| 611 |
+
paths between pairs of nodes.
|
| 612 |
+
|
| 613 |
+
References
|
| 614 |
+
----------
|
| 615 |
+
.. [1] M G Everett and S P Borgatti:
|
| 616 |
+
The Centrality of Groups and Classes.
|
| 617 |
+
Journal of Mathematical Sociology. 23(3): 181-201. 1999.
|
| 618 |
+
http://www.analytictech.com/borgatti/group_centrality.htm
|
| 619 |
+
.. [2] J. Zhao et. al.:
|
| 620 |
+
Measuring and Maximizing Group Closeness Centrality over
|
| 621 |
+
Disk Resident Graphs.
|
| 622 |
+
WWWConference Proceedings, 2014. 689-694.
|
| 623 |
+
https://doi.org/10.1145/2567948.2579356
|
| 624 |
+
"""
|
| 625 |
+
if G.is_directed():
|
| 626 |
+
G = G.reverse() # reverse view
|
| 627 |
+
closeness = 0 # initialize to 0
|
| 628 |
+
V = set(G) # set of nodes in G
|
| 629 |
+
S = set(S) # set of nodes in group S
|
| 630 |
+
V_S = V - S # set of nodes in V but not S
|
| 631 |
+
shortest_path_lengths = nx.multi_source_dijkstra_path_length(G, S, weight=weight)
|
| 632 |
+
# accumulation
|
| 633 |
+
for v in V_S:
|
| 634 |
+
try:
|
| 635 |
+
closeness += shortest_path_lengths[v]
|
| 636 |
+
except KeyError: # no path exists
|
| 637 |
+
closeness += 0
|
| 638 |
+
try:
|
| 639 |
+
closeness = len(V_S) / closeness
|
| 640 |
+
except ZeroDivisionError: # 1 / 0 assumed as 0
|
| 641 |
+
closeness = 0
|
| 642 |
+
return closeness
|
| 643 |
+
|
| 644 |
+
|
| 645 |
+
@nx._dispatchable
|
| 646 |
+
def group_degree_centrality(G, S):
|
| 647 |
+
"""Compute the group degree centrality for a group of nodes.
|
| 648 |
+
|
| 649 |
+
Group degree centrality of a group of nodes $S$ is the fraction
|
| 650 |
+
of non-group members connected to group members.
|
| 651 |
+
|
| 652 |
+
Parameters
|
| 653 |
+
----------
|
| 654 |
+
G : graph
|
| 655 |
+
A NetworkX graph.
|
| 656 |
+
|
| 657 |
+
S : list or set
|
| 658 |
+
S is a group of nodes which belong to G, for which group degree
|
| 659 |
+
centrality is to be calculated.
|
| 660 |
+
|
| 661 |
+
Raises
|
| 662 |
+
------
|
| 663 |
+
NetworkXError
|
| 664 |
+
If node(s) in S are not in G.
|
| 665 |
+
|
| 666 |
+
Returns
|
| 667 |
+
-------
|
| 668 |
+
centrality : float
|
| 669 |
+
Group degree centrality of the group S.
|
| 670 |
+
|
| 671 |
+
See Also
|
| 672 |
+
--------
|
| 673 |
+
degree_centrality
|
| 674 |
+
group_in_degree_centrality
|
| 675 |
+
group_out_degree_centrality
|
| 676 |
+
|
| 677 |
+
Notes
|
| 678 |
+
-----
|
| 679 |
+
The measure was introduced in [1]_.
|
| 680 |
+
|
| 681 |
+
The number of nodes in the group must be a maximum of n - 1 where `n`
|
| 682 |
+
is the total number of nodes in the graph.
|
| 683 |
+
|
| 684 |
+
References
|
| 685 |
+
----------
|
| 686 |
+
.. [1] M G Everett and S P Borgatti:
|
| 687 |
+
The Centrality of Groups and Classes.
|
| 688 |
+
Journal of Mathematical Sociology. 23(3): 181-201. 1999.
|
| 689 |
+
http://www.analytictech.com/borgatti/group_centrality.htm
|
| 690 |
+
"""
|
| 691 |
+
centrality = len(set().union(*[set(G.neighbors(i)) for i in S]) - set(S))
|
| 692 |
+
centrality /= len(G.nodes()) - len(S)
|
| 693 |
+
return centrality
|
| 694 |
+
|
| 695 |
+
|
| 696 |
+
@not_implemented_for("undirected")
|
| 697 |
+
@nx._dispatchable
|
| 698 |
+
def group_in_degree_centrality(G, S):
|
| 699 |
+
"""Compute the group in-degree centrality for a group of nodes.
|
| 700 |
+
|
| 701 |
+
Group in-degree centrality of a group of nodes $S$ is the fraction
|
| 702 |
+
of non-group members connected to group members by incoming edges.
|
| 703 |
+
|
| 704 |
+
Parameters
|
| 705 |
+
----------
|
| 706 |
+
G : graph
|
| 707 |
+
A NetworkX graph.
|
| 708 |
+
|
| 709 |
+
S : list or set
|
| 710 |
+
S is a group of nodes which belong to G, for which group in-degree
|
| 711 |
+
centrality is to be calculated.
|
| 712 |
+
|
| 713 |
+
Returns
|
| 714 |
+
-------
|
| 715 |
+
centrality : float
|
| 716 |
+
Group in-degree centrality of the group S.
|
| 717 |
+
|
| 718 |
+
Raises
|
| 719 |
+
------
|
| 720 |
+
NetworkXNotImplemented
|
| 721 |
+
If G is undirected.
|
| 722 |
+
|
| 723 |
+
NodeNotFound
|
| 724 |
+
If node(s) in S are not in G.
|
| 725 |
+
|
| 726 |
+
See Also
|
| 727 |
+
--------
|
| 728 |
+
degree_centrality
|
| 729 |
+
group_degree_centrality
|
| 730 |
+
group_out_degree_centrality
|
| 731 |
+
|
| 732 |
+
Notes
|
| 733 |
+
-----
|
| 734 |
+
The number of nodes in the group must be a maximum of n - 1 where `n`
|
| 735 |
+
is the total number of nodes in the graph.
|
| 736 |
+
|
| 737 |
+
`G.neighbors(i)` gives nodes with an outward edge from i, in a DiGraph,
|
| 738 |
+
so for group in-degree centrality, the reverse graph is used.
|
| 739 |
+
"""
|
| 740 |
+
return group_degree_centrality(G.reverse(), S)
|
| 741 |
+
|
| 742 |
+
|
| 743 |
+
@not_implemented_for("undirected")
|
| 744 |
+
@nx._dispatchable
|
| 745 |
+
def group_out_degree_centrality(G, S):
|
| 746 |
+
"""Compute the group out-degree centrality for a group of nodes.
|
| 747 |
+
|
| 748 |
+
Group out-degree centrality of a group of nodes $S$ is the fraction
|
| 749 |
+
of non-group members connected to group members by outgoing edges.
|
| 750 |
+
|
| 751 |
+
Parameters
|
| 752 |
+
----------
|
| 753 |
+
G : graph
|
| 754 |
+
A NetworkX graph.
|
| 755 |
+
|
| 756 |
+
S : list or set
|
| 757 |
+
S is a group of nodes which belong to G, for which group in-degree
|
| 758 |
+
centrality is to be calculated.
|
| 759 |
+
|
| 760 |
+
Returns
|
| 761 |
+
-------
|
| 762 |
+
centrality : float
|
| 763 |
+
Group out-degree centrality of the group S.
|
| 764 |
+
|
| 765 |
+
Raises
|
| 766 |
+
------
|
| 767 |
+
NetworkXNotImplemented
|
| 768 |
+
If G is undirected.
|
| 769 |
+
|
| 770 |
+
NodeNotFound
|
| 771 |
+
If node(s) in S are not in G.
|
| 772 |
+
|
| 773 |
+
See Also
|
| 774 |
+
--------
|
| 775 |
+
degree_centrality
|
| 776 |
+
group_degree_centrality
|
| 777 |
+
group_in_degree_centrality
|
| 778 |
+
|
| 779 |
+
Notes
|
| 780 |
+
-----
|
| 781 |
+
The number of nodes in the group must be a maximum of n - 1 where `n`
|
| 782 |
+
is the total number of nodes in the graph.
|
| 783 |
+
|
| 784 |
+
`G.neighbors(i)` gives nodes with an outward edge from i, in a DiGraph,
|
| 785 |
+
so for group out-degree centrality, the graph itself is used.
|
| 786 |
+
"""
|
| 787 |
+
return group_degree_centrality(G, S)
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/katz.py
ADDED
|
@@ -0,0 +1,331 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Katz centrality."""
|
| 2 |
+
|
| 3 |
+
import math
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.utils import not_implemented_for
|
| 7 |
+
|
| 8 |
+
__all__ = ["katz_centrality", "katz_centrality_numpy"]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@not_implemented_for("multigraph")
|
| 12 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 13 |
+
def katz_centrality(
|
| 14 |
+
G,
|
| 15 |
+
alpha=0.1,
|
| 16 |
+
beta=1.0,
|
| 17 |
+
max_iter=1000,
|
| 18 |
+
tol=1.0e-6,
|
| 19 |
+
nstart=None,
|
| 20 |
+
normalized=True,
|
| 21 |
+
weight=None,
|
| 22 |
+
):
|
| 23 |
+
r"""Compute the Katz centrality for the nodes of the graph G.
|
| 24 |
+
|
| 25 |
+
Katz centrality computes the centrality for a node based on the centrality
|
| 26 |
+
of its neighbors. It is a generalization of the eigenvector centrality. The
|
| 27 |
+
Katz centrality for node $i$ is
|
| 28 |
+
|
| 29 |
+
.. math::
|
| 30 |
+
|
| 31 |
+
x_i = \alpha \sum_{j} A_{ij} x_j + \beta,
|
| 32 |
+
|
| 33 |
+
where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$.
|
| 34 |
+
|
| 35 |
+
The parameter $\beta$ controls the initial centrality and
|
| 36 |
+
|
| 37 |
+
.. math::
|
| 38 |
+
|
| 39 |
+
\alpha < \frac{1}{\lambda_{\max}}.
|
| 40 |
+
|
| 41 |
+
Katz centrality computes the relative influence of a node within a
|
| 42 |
+
network by measuring the number of the immediate neighbors (first
|
| 43 |
+
degree nodes) and also all other nodes in the network that connect
|
| 44 |
+
to the node under consideration through these immediate neighbors.
|
| 45 |
+
|
| 46 |
+
Extra weight can be provided to immediate neighbors through the
|
| 47 |
+
parameter $\beta$. Connections made with distant neighbors
|
| 48 |
+
are, however, penalized by an attenuation factor $\alpha$ which
|
| 49 |
+
should be strictly less than the inverse largest eigenvalue of the
|
| 50 |
+
adjacency matrix in order for the Katz centrality to be computed
|
| 51 |
+
correctly. More information is provided in [1]_.
|
| 52 |
+
|
| 53 |
+
Parameters
|
| 54 |
+
----------
|
| 55 |
+
G : graph
|
| 56 |
+
A NetworkX graph.
|
| 57 |
+
|
| 58 |
+
alpha : float, optional (default=0.1)
|
| 59 |
+
Attenuation factor
|
| 60 |
+
|
| 61 |
+
beta : scalar or dictionary, optional (default=1.0)
|
| 62 |
+
Weight attributed to the immediate neighborhood. If not a scalar, the
|
| 63 |
+
dictionary must have a value for every node.
|
| 64 |
+
|
| 65 |
+
max_iter : integer, optional (default=1000)
|
| 66 |
+
Maximum number of iterations in power method.
|
| 67 |
+
|
| 68 |
+
tol : float, optional (default=1.0e-6)
|
| 69 |
+
Error tolerance used to check convergence in power method iteration.
|
| 70 |
+
|
| 71 |
+
nstart : dictionary, optional
|
| 72 |
+
Starting value of Katz iteration for each node.
|
| 73 |
+
|
| 74 |
+
normalized : bool, optional (default=True)
|
| 75 |
+
If True normalize the resulting values.
|
| 76 |
+
|
| 77 |
+
weight : None or string, optional (default=None)
|
| 78 |
+
If None, all edge weights are considered equal.
|
| 79 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 80 |
+
In this measure the weight is interpreted as the connection strength.
|
| 81 |
+
|
| 82 |
+
Returns
|
| 83 |
+
-------
|
| 84 |
+
nodes : dictionary
|
| 85 |
+
Dictionary of nodes with Katz centrality as the value.
|
| 86 |
+
|
| 87 |
+
Raises
|
| 88 |
+
------
|
| 89 |
+
NetworkXError
|
| 90 |
+
If the parameter `beta` is not a scalar but lacks a value for at least
|
| 91 |
+
one node
|
| 92 |
+
|
| 93 |
+
PowerIterationFailedConvergence
|
| 94 |
+
If the algorithm fails to converge to the specified tolerance
|
| 95 |
+
within the specified number of iterations of the power iteration
|
| 96 |
+
method.
|
| 97 |
+
|
| 98 |
+
Examples
|
| 99 |
+
--------
|
| 100 |
+
>>> import math
|
| 101 |
+
>>> G = nx.path_graph(4)
|
| 102 |
+
>>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix
|
| 103 |
+
>>> centrality = nx.katz_centrality(G, 1 / phi - 0.01)
|
| 104 |
+
>>> for n, c in sorted(centrality.items()):
|
| 105 |
+
... print(f"{n} {c:.2f}")
|
| 106 |
+
0 0.37
|
| 107 |
+
1 0.60
|
| 108 |
+
2 0.60
|
| 109 |
+
3 0.37
|
| 110 |
+
|
| 111 |
+
See Also
|
| 112 |
+
--------
|
| 113 |
+
katz_centrality_numpy
|
| 114 |
+
eigenvector_centrality
|
| 115 |
+
eigenvector_centrality_numpy
|
| 116 |
+
:func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
|
| 117 |
+
:func:`~networkx.algorithms.link_analysis.hits_alg.hits`
|
| 118 |
+
|
| 119 |
+
Notes
|
| 120 |
+
-----
|
| 121 |
+
Katz centrality was introduced by [2]_.
|
| 122 |
+
|
| 123 |
+
This algorithm it uses the power method to find the eigenvector
|
| 124 |
+
corresponding to the largest eigenvalue of the adjacency matrix of ``G``.
|
| 125 |
+
The parameter ``alpha`` should be strictly less than the inverse of largest
|
| 126 |
+
eigenvalue of the adjacency matrix for the algorithm to converge.
|
| 127 |
+
You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest
|
| 128 |
+
eigenvalue of the adjacency matrix.
|
| 129 |
+
The iteration will stop after ``max_iter`` iterations or an error tolerance of
|
| 130 |
+
``number_of_nodes(G) * tol`` has been reached.
|
| 131 |
+
|
| 132 |
+
For strongly connected graphs, as $\alpha \to 1/\lambda_{\max}$, and $\beta > 0$,
|
| 133 |
+
Katz centrality approaches the results for eigenvector centrality.
|
| 134 |
+
|
| 135 |
+
For directed graphs this finds "left" eigenvectors which corresponds
|
| 136 |
+
to the in-edges in the graph. For out-edges Katz centrality,
|
| 137 |
+
first reverse the graph with ``G.reverse()``.
|
| 138 |
+
|
| 139 |
+
References
|
| 140 |
+
----------
|
| 141 |
+
.. [1] Mark E. J. Newman:
|
| 142 |
+
Networks: An Introduction.
|
| 143 |
+
Oxford University Press, USA, 2010, p. 720.
|
| 144 |
+
.. [2] Leo Katz:
|
| 145 |
+
A New Status Index Derived from Sociometric Index.
|
| 146 |
+
Psychometrika 18(1):39–43, 1953
|
| 147 |
+
https://link.springer.com/content/pdf/10.1007/BF02289026.pdf
|
| 148 |
+
"""
|
| 149 |
+
if len(G) == 0:
|
| 150 |
+
return {}
|
| 151 |
+
|
| 152 |
+
nnodes = G.number_of_nodes()
|
| 153 |
+
|
| 154 |
+
if nstart is None:
|
| 155 |
+
# choose starting vector with entries of 0
|
| 156 |
+
x = {n: 0 for n in G}
|
| 157 |
+
else:
|
| 158 |
+
x = nstart
|
| 159 |
+
|
| 160 |
+
try:
|
| 161 |
+
b = dict.fromkeys(G, float(beta))
|
| 162 |
+
except (TypeError, ValueError, AttributeError) as err:
|
| 163 |
+
b = beta
|
| 164 |
+
if set(beta) != set(G):
|
| 165 |
+
raise nx.NetworkXError(
|
| 166 |
+
"beta dictionary must have a value for every node"
|
| 167 |
+
) from err
|
| 168 |
+
|
| 169 |
+
# make up to max_iter iterations
|
| 170 |
+
for _ in range(max_iter):
|
| 171 |
+
xlast = x
|
| 172 |
+
x = dict.fromkeys(xlast, 0)
|
| 173 |
+
# do the multiplication y^T = Alpha * x^T A + Beta
|
| 174 |
+
for n in x:
|
| 175 |
+
for nbr in G[n]:
|
| 176 |
+
x[nbr] += xlast[n] * G[n][nbr].get(weight, 1)
|
| 177 |
+
for n in x:
|
| 178 |
+
x[n] = alpha * x[n] + b[n]
|
| 179 |
+
|
| 180 |
+
# check convergence
|
| 181 |
+
error = sum(abs(x[n] - xlast[n]) for n in x)
|
| 182 |
+
if error < nnodes * tol:
|
| 183 |
+
if normalized:
|
| 184 |
+
# normalize vector
|
| 185 |
+
try:
|
| 186 |
+
s = 1.0 / math.hypot(*x.values())
|
| 187 |
+
except ZeroDivisionError:
|
| 188 |
+
s = 1.0
|
| 189 |
+
else:
|
| 190 |
+
s = 1
|
| 191 |
+
for n in x:
|
| 192 |
+
x[n] *= s
|
| 193 |
+
return x
|
| 194 |
+
raise nx.PowerIterationFailedConvergence(max_iter)
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
@not_implemented_for("multigraph")
|
| 198 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 199 |
+
def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, weight=None):
|
| 200 |
+
r"""Compute the Katz centrality for the graph G.
|
| 201 |
+
|
| 202 |
+
Katz centrality computes the centrality for a node based on the centrality
|
| 203 |
+
of its neighbors. It is a generalization of the eigenvector centrality. The
|
| 204 |
+
Katz centrality for node $i$ is
|
| 205 |
+
|
| 206 |
+
.. math::
|
| 207 |
+
|
| 208 |
+
x_i = \alpha \sum_{j} A_{ij} x_j + \beta,
|
| 209 |
+
|
| 210 |
+
where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$.
|
| 211 |
+
|
| 212 |
+
The parameter $\beta$ controls the initial centrality and
|
| 213 |
+
|
| 214 |
+
.. math::
|
| 215 |
+
|
| 216 |
+
\alpha < \frac{1}{\lambda_{\max}}.
|
| 217 |
+
|
| 218 |
+
Katz centrality computes the relative influence of a node within a
|
| 219 |
+
network by measuring the number of the immediate neighbors (first
|
| 220 |
+
degree nodes) and also all other nodes in the network that connect
|
| 221 |
+
to the node under consideration through these immediate neighbors.
|
| 222 |
+
|
| 223 |
+
Extra weight can be provided to immediate neighbors through the
|
| 224 |
+
parameter $\beta$. Connections made with distant neighbors
|
| 225 |
+
are, however, penalized by an attenuation factor $\alpha$ which
|
| 226 |
+
should be strictly less than the inverse largest eigenvalue of the
|
| 227 |
+
adjacency matrix in order for the Katz centrality to be computed
|
| 228 |
+
correctly. More information is provided in [1]_.
|
| 229 |
+
|
| 230 |
+
Parameters
|
| 231 |
+
----------
|
| 232 |
+
G : graph
|
| 233 |
+
A NetworkX graph
|
| 234 |
+
|
| 235 |
+
alpha : float
|
| 236 |
+
Attenuation factor
|
| 237 |
+
|
| 238 |
+
beta : scalar or dictionary, optional (default=1.0)
|
| 239 |
+
Weight attributed to the immediate neighborhood. If not a scalar the
|
| 240 |
+
dictionary must have an value for every node.
|
| 241 |
+
|
| 242 |
+
normalized : bool
|
| 243 |
+
If True normalize the resulting values.
|
| 244 |
+
|
| 245 |
+
weight : None or string, optional
|
| 246 |
+
If None, all edge weights are considered equal.
|
| 247 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 248 |
+
In this measure the weight is interpreted as the connection strength.
|
| 249 |
+
|
| 250 |
+
Returns
|
| 251 |
+
-------
|
| 252 |
+
nodes : dictionary
|
| 253 |
+
Dictionary of nodes with Katz centrality as the value.
|
| 254 |
+
|
| 255 |
+
Raises
|
| 256 |
+
------
|
| 257 |
+
NetworkXError
|
| 258 |
+
If the parameter `beta` is not a scalar but lacks a value for at least
|
| 259 |
+
one node
|
| 260 |
+
|
| 261 |
+
Examples
|
| 262 |
+
--------
|
| 263 |
+
>>> import math
|
| 264 |
+
>>> G = nx.path_graph(4)
|
| 265 |
+
>>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix
|
| 266 |
+
>>> centrality = nx.katz_centrality_numpy(G, 1 / phi)
|
| 267 |
+
>>> for n, c in sorted(centrality.items()):
|
| 268 |
+
... print(f"{n} {c:.2f}")
|
| 269 |
+
0 0.37
|
| 270 |
+
1 0.60
|
| 271 |
+
2 0.60
|
| 272 |
+
3 0.37
|
| 273 |
+
|
| 274 |
+
See Also
|
| 275 |
+
--------
|
| 276 |
+
katz_centrality
|
| 277 |
+
eigenvector_centrality_numpy
|
| 278 |
+
eigenvector_centrality
|
| 279 |
+
:func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
|
| 280 |
+
:func:`~networkx.algorithms.link_analysis.hits_alg.hits`
|
| 281 |
+
|
| 282 |
+
Notes
|
| 283 |
+
-----
|
| 284 |
+
Katz centrality was introduced by [2]_.
|
| 285 |
+
|
| 286 |
+
This algorithm uses a direct linear solver to solve the above equation.
|
| 287 |
+
The parameter ``alpha`` should be strictly less than the inverse of largest
|
| 288 |
+
eigenvalue of the adjacency matrix for there to be a solution.
|
| 289 |
+
You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest
|
| 290 |
+
eigenvalue of the adjacency matrix.
|
| 291 |
+
|
| 292 |
+
For strongly connected graphs, as $\alpha \to 1/\lambda_{\max}$, and $\beta > 0$,
|
| 293 |
+
Katz centrality approaches the results for eigenvector centrality.
|
| 294 |
+
|
| 295 |
+
For directed graphs this finds "left" eigenvectors which corresponds
|
| 296 |
+
to the in-edges in the graph. For out-edges Katz centrality,
|
| 297 |
+
first reverse the graph with ``G.reverse()``.
|
| 298 |
+
|
| 299 |
+
References
|
| 300 |
+
----------
|
| 301 |
+
.. [1] Mark E. J. Newman:
|
| 302 |
+
Networks: An Introduction.
|
| 303 |
+
Oxford University Press, USA, 2010, p. 173.
|
| 304 |
+
.. [2] Leo Katz:
|
| 305 |
+
A New Status Index Derived from Sociometric Index.
|
| 306 |
+
Psychometrika 18(1):39–43, 1953
|
| 307 |
+
https://link.springer.com/content/pdf/10.1007/BF02289026.pdf
|
| 308 |
+
"""
|
| 309 |
+
import numpy as np
|
| 310 |
+
|
| 311 |
+
if len(G) == 0:
|
| 312 |
+
return {}
|
| 313 |
+
try:
|
| 314 |
+
nodelist = beta.keys()
|
| 315 |
+
if set(nodelist) != set(G):
|
| 316 |
+
raise nx.NetworkXError("beta dictionary must have a value for every node")
|
| 317 |
+
b = np.array(list(beta.values()), dtype=float)
|
| 318 |
+
except AttributeError:
|
| 319 |
+
nodelist = list(G)
|
| 320 |
+
try:
|
| 321 |
+
b = np.ones((len(nodelist), 1)) * beta
|
| 322 |
+
except (TypeError, ValueError, AttributeError) as err:
|
| 323 |
+
raise nx.NetworkXError("beta must be a number") from err
|
| 324 |
+
|
| 325 |
+
A = nx.adjacency_matrix(G, nodelist=nodelist, weight=weight).todense().T
|
| 326 |
+
n = A.shape[0]
|
| 327 |
+
centrality = np.linalg.solve(np.eye(n, n) - (alpha * A), b).squeeze()
|
| 328 |
+
|
| 329 |
+
# Normalize: rely on truediv to cast to float, then tolist to make Python numbers
|
| 330 |
+
norm = np.sign(sum(centrality)) * np.linalg.norm(centrality) if normalized else 1
|
| 331 |
+
return dict(zip(nodelist, (centrality / norm).tolist()))
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/percolation.py
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Percolation centrality measures."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.algorithms.centrality.betweenness import (
|
| 5 |
+
_single_source_dijkstra_path_basic as dijkstra,
|
| 6 |
+
)
|
| 7 |
+
from networkx.algorithms.centrality.betweenness import (
|
| 8 |
+
_single_source_shortest_path_basic as shortest_path,
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
__all__ = ["percolation_centrality"]
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@nx._dispatchable(node_attrs="attribute", edge_attrs="weight")
|
| 15 |
+
def percolation_centrality(G, attribute="percolation", states=None, weight=None):
|
| 16 |
+
r"""Compute the percolation centrality for nodes.
|
| 17 |
+
|
| 18 |
+
Percolation centrality of a node $v$, at a given time, is defined
|
| 19 |
+
as the proportion of ‘percolated paths’ that go through that node.
|
| 20 |
+
|
| 21 |
+
This measure quantifies relative impact of nodes based on their
|
| 22 |
+
topological connectivity, as well as their percolation states.
|
| 23 |
+
|
| 24 |
+
Percolation states of nodes are used to depict network percolation
|
| 25 |
+
scenarios (such as during infection transmission in a social network
|
| 26 |
+
of individuals, spreading of computer viruses on computer networks, or
|
| 27 |
+
transmission of disease over a network of towns) over time. In this
|
| 28 |
+
measure usually the percolation state is expressed as a decimal
|
| 29 |
+
between 0.0 and 1.0.
|
| 30 |
+
|
| 31 |
+
When all nodes are in the same percolated state this measure is
|
| 32 |
+
equivalent to betweenness centrality.
|
| 33 |
+
|
| 34 |
+
Parameters
|
| 35 |
+
----------
|
| 36 |
+
G : graph
|
| 37 |
+
A NetworkX graph.
|
| 38 |
+
|
| 39 |
+
attribute : None or string, optional (default='percolation')
|
| 40 |
+
Name of the node attribute to use for percolation state, used
|
| 41 |
+
if `states` is None. If a node does not set the attribute the
|
| 42 |
+
state of that node will be set to the default value of 1.
|
| 43 |
+
If all nodes do not have the attribute all nodes will be set to
|
| 44 |
+
1 and the centrality measure will be equivalent to betweenness centrality.
|
| 45 |
+
|
| 46 |
+
states : None or dict, optional (default=None)
|
| 47 |
+
Specify percolation states for the nodes, nodes as keys states
|
| 48 |
+
as values.
|
| 49 |
+
|
| 50 |
+
weight : None or string, optional (default=None)
|
| 51 |
+
If None, all edge weights are considered equal.
|
| 52 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 53 |
+
The weight of an edge is treated as the length or distance between the two sides.
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
Returns
|
| 57 |
+
-------
|
| 58 |
+
nodes : dictionary
|
| 59 |
+
Dictionary of nodes with percolation centrality as the value.
|
| 60 |
+
|
| 61 |
+
See Also
|
| 62 |
+
--------
|
| 63 |
+
betweenness_centrality
|
| 64 |
+
|
| 65 |
+
Notes
|
| 66 |
+
-----
|
| 67 |
+
The algorithm is from Mahendra Piraveenan, Mikhail Prokopenko, and
|
| 68 |
+
Liaquat Hossain [1]_
|
| 69 |
+
Pair dependencies are calculated and accumulated using [2]_
|
| 70 |
+
|
| 71 |
+
For weighted graphs the edge weights must be greater than zero.
|
| 72 |
+
Zero edge weights can produce an infinite number of equal length
|
| 73 |
+
paths between pairs of nodes.
|
| 74 |
+
|
| 75 |
+
References
|
| 76 |
+
----------
|
| 77 |
+
.. [1] Mahendra Piraveenan, Mikhail Prokopenko, Liaquat Hossain
|
| 78 |
+
Percolation Centrality: Quantifying Graph-Theoretic Impact of Nodes
|
| 79 |
+
during Percolation in Networks
|
| 80 |
+
http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0053095
|
| 81 |
+
.. [2] Ulrik Brandes:
|
| 82 |
+
A Faster Algorithm for Betweenness Centrality.
|
| 83 |
+
Journal of Mathematical Sociology 25(2):163-177, 2001.
|
| 84 |
+
https://doi.org/10.1080/0022250X.2001.9990249
|
| 85 |
+
"""
|
| 86 |
+
percolation = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
|
| 87 |
+
|
| 88 |
+
nodes = G
|
| 89 |
+
|
| 90 |
+
if states is None:
|
| 91 |
+
states = nx.get_node_attributes(nodes, attribute, default=1)
|
| 92 |
+
|
| 93 |
+
# sum of all percolation states
|
| 94 |
+
p_sigma_x_t = 0.0
|
| 95 |
+
for v in states.values():
|
| 96 |
+
p_sigma_x_t += v
|
| 97 |
+
|
| 98 |
+
for s in nodes:
|
| 99 |
+
# single source shortest paths
|
| 100 |
+
if weight is None: # use BFS
|
| 101 |
+
S, P, sigma, _ = shortest_path(G, s)
|
| 102 |
+
else: # use Dijkstra's algorithm
|
| 103 |
+
S, P, sigma, _ = dijkstra(G, s, weight)
|
| 104 |
+
# accumulation
|
| 105 |
+
percolation = _accumulate_percolation(
|
| 106 |
+
percolation, S, P, sigma, s, states, p_sigma_x_t
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
n = len(G)
|
| 110 |
+
|
| 111 |
+
for v in percolation:
|
| 112 |
+
percolation[v] *= 1 / (n - 2)
|
| 113 |
+
|
| 114 |
+
return percolation
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
def _accumulate_percolation(percolation, S, P, sigma, s, states, p_sigma_x_t):
|
| 118 |
+
delta = dict.fromkeys(S, 0)
|
| 119 |
+
while S:
|
| 120 |
+
w = S.pop()
|
| 121 |
+
coeff = (1 + delta[w]) / sigma[w]
|
| 122 |
+
for v in P[w]:
|
| 123 |
+
delta[v] += sigma[v] * coeff
|
| 124 |
+
if w != s:
|
| 125 |
+
# percolation weight
|
| 126 |
+
pw_s_w = states[s] / (p_sigma_x_t - states[w])
|
| 127 |
+
percolation[w] += delta[w] * pw_s_w
|
| 128 |
+
return percolation
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/subgraph_alg.py
ADDED
|
@@ -0,0 +1,340 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Subraph centrality and communicability betweenness.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.utils import not_implemented_for
|
| 7 |
+
|
| 8 |
+
__all__ = [
|
| 9 |
+
"subgraph_centrality_exp",
|
| 10 |
+
"subgraph_centrality",
|
| 11 |
+
"communicability_betweenness_centrality",
|
| 12 |
+
"estrada_index",
|
| 13 |
+
]
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@not_implemented_for("directed")
|
| 17 |
+
@not_implemented_for("multigraph")
|
| 18 |
+
@nx._dispatchable
|
| 19 |
+
def subgraph_centrality_exp(G):
|
| 20 |
+
r"""Returns the subgraph centrality for each node of G.
|
| 21 |
+
|
| 22 |
+
Subgraph centrality of a node `n` is the sum of weighted closed
|
| 23 |
+
walks of all lengths starting and ending at node `n`. The weights
|
| 24 |
+
decrease with path length. Each closed walk is associated with a
|
| 25 |
+
connected subgraph ([1]_).
|
| 26 |
+
|
| 27 |
+
Parameters
|
| 28 |
+
----------
|
| 29 |
+
G: graph
|
| 30 |
+
|
| 31 |
+
Returns
|
| 32 |
+
-------
|
| 33 |
+
nodes:dictionary
|
| 34 |
+
Dictionary of nodes with subgraph centrality as the value.
|
| 35 |
+
|
| 36 |
+
Raises
|
| 37 |
+
------
|
| 38 |
+
NetworkXError
|
| 39 |
+
If the graph is not undirected and simple.
|
| 40 |
+
|
| 41 |
+
See Also
|
| 42 |
+
--------
|
| 43 |
+
subgraph_centrality:
|
| 44 |
+
Alternative algorithm of the subgraph centrality for each node of G.
|
| 45 |
+
|
| 46 |
+
Notes
|
| 47 |
+
-----
|
| 48 |
+
This version of the algorithm exponentiates the adjacency matrix.
|
| 49 |
+
|
| 50 |
+
The subgraph centrality of a node `u` in G can be found using
|
| 51 |
+
the matrix exponential of the adjacency matrix of G [1]_,
|
| 52 |
+
|
| 53 |
+
.. math::
|
| 54 |
+
|
| 55 |
+
SC(u)=(e^A)_{uu} .
|
| 56 |
+
|
| 57 |
+
References
|
| 58 |
+
----------
|
| 59 |
+
.. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez,
|
| 60 |
+
"Subgraph centrality in complex networks",
|
| 61 |
+
Physical Review E 71, 056103 (2005).
|
| 62 |
+
https://arxiv.org/abs/cond-mat/0504730
|
| 63 |
+
|
| 64 |
+
Examples
|
| 65 |
+
--------
|
| 66 |
+
(Example from [1]_)
|
| 67 |
+
>>> G = nx.Graph(
|
| 68 |
+
... [
|
| 69 |
+
... (1, 2),
|
| 70 |
+
... (1, 5),
|
| 71 |
+
... (1, 8),
|
| 72 |
+
... (2, 3),
|
| 73 |
+
... (2, 8),
|
| 74 |
+
... (3, 4),
|
| 75 |
+
... (3, 6),
|
| 76 |
+
... (4, 5),
|
| 77 |
+
... (4, 7),
|
| 78 |
+
... (5, 6),
|
| 79 |
+
... (6, 7),
|
| 80 |
+
... (7, 8),
|
| 81 |
+
... ]
|
| 82 |
+
... )
|
| 83 |
+
>>> sc = nx.subgraph_centrality_exp(G)
|
| 84 |
+
>>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)])
|
| 85 |
+
['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90']
|
| 86 |
+
"""
|
| 87 |
+
# alternative implementation that calculates the matrix exponential
|
| 88 |
+
import scipy as sp
|
| 89 |
+
|
| 90 |
+
nodelist = list(G) # ordering of nodes in matrix
|
| 91 |
+
A = nx.to_numpy_array(G, nodelist)
|
| 92 |
+
# convert to 0-1 matrix
|
| 93 |
+
A[A != 0.0] = 1
|
| 94 |
+
expA = sp.linalg.expm(A)
|
| 95 |
+
# convert diagonal to dictionary keyed by node
|
| 96 |
+
sc = dict(zip(nodelist, map(float, expA.diagonal())))
|
| 97 |
+
return sc
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
@not_implemented_for("directed")
|
| 101 |
+
@not_implemented_for("multigraph")
|
| 102 |
+
@nx._dispatchable
|
| 103 |
+
def subgraph_centrality(G):
|
| 104 |
+
r"""Returns subgraph centrality for each node in G.
|
| 105 |
+
|
| 106 |
+
Subgraph centrality of a node `n` is the sum of weighted closed
|
| 107 |
+
walks of all lengths starting and ending at node `n`. The weights
|
| 108 |
+
decrease with path length. Each closed walk is associated with a
|
| 109 |
+
connected subgraph ([1]_).
|
| 110 |
+
|
| 111 |
+
Parameters
|
| 112 |
+
----------
|
| 113 |
+
G: graph
|
| 114 |
+
|
| 115 |
+
Returns
|
| 116 |
+
-------
|
| 117 |
+
nodes : dictionary
|
| 118 |
+
Dictionary of nodes with subgraph centrality as the value.
|
| 119 |
+
|
| 120 |
+
Raises
|
| 121 |
+
------
|
| 122 |
+
NetworkXError
|
| 123 |
+
If the graph is not undirected and simple.
|
| 124 |
+
|
| 125 |
+
See Also
|
| 126 |
+
--------
|
| 127 |
+
subgraph_centrality_exp:
|
| 128 |
+
Alternative algorithm of the subgraph centrality for each node of G.
|
| 129 |
+
|
| 130 |
+
Notes
|
| 131 |
+
-----
|
| 132 |
+
This version of the algorithm computes eigenvalues and eigenvectors
|
| 133 |
+
of the adjacency matrix.
|
| 134 |
+
|
| 135 |
+
Subgraph centrality of a node `u` in G can be found using
|
| 136 |
+
a spectral decomposition of the adjacency matrix [1]_,
|
| 137 |
+
|
| 138 |
+
.. math::
|
| 139 |
+
|
| 140 |
+
SC(u)=\sum_{j=1}^{N}(v_{j}^{u})^2 e^{\lambda_{j}},
|
| 141 |
+
|
| 142 |
+
where `v_j` is an eigenvector of the adjacency matrix `A` of G
|
| 143 |
+
corresponding to the eigenvalue `\lambda_j`.
|
| 144 |
+
|
| 145 |
+
Examples
|
| 146 |
+
--------
|
| 147 |
+
(Example from [1]_)
|
| 148 |
+
>>> G = nx.Graph(
|
| 149 |
+
... [
|
| 150 |
+
... (1, 2),
|
| 151 |
+
... (1, 5),
|
| 152 |
+
... (1, 8),
|
| 153 |
+
... (2, 3),
|
| 154 |
+
... (2, 8),
|
| 155 |
+
... (3, 4),
|
| 156 |
+
... (3, 6),
|
| 157 |
+
... (4, 5),
|
| 158 |
+
... (4, 7),
|
| 159 |
+
... (5, 6),
|
| 160 |
+
... (6, 7),
|
| 161 |
+
... (7, 8),
|
| 162 |
+
... ]
|
| 163 |
+
... )
|
| 164 |
+
>>> sc = nx.subgraph_centrality(G)
|
| 165 |
+
>>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)])
|
| 166 |
+
['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90']
|
| 167 |
+
|
| 168 |
+
References
|
| 169 |
+
----------
|
| 170 |
+
.. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez,
|
| 171 |
+
"Subgraph centrality in complex networks",
|
| 172 |
+
Physical Review E 71, 056103 (2005).
|
| 173 |
+
https://arxiv.org/abs/cond-mat/0504730
|
| 174 |
+
|
| 175 |
+
"""
|
| 176 |
+
import numpy as np
|
| 177 |
+
|
| 178 |
+
nodelist = list(G) # ordering of nodes in matrix
|
| 179 |
+
A = nx.to_numpy_array(G, nodelist)
|
| 180 |
+
# convert to 0-1 matrix
|
| 181 |
+
A[np.nonzero(A)] = 1
|
| 182 |
+
w, v = np.linalg.eigh(A)
|
| 183 |
+
vsquare = np.array(v) ** 2
|
| 184 |
+
expw = np.exp(w)
|
| 185 |
+
xg = vsquare @ expw
|
| 186 |
+
# convert vector dictionary keyed by node
|
| 187 |
+
sc = dict(zip(nodelist, map(float, xg)))
|
| 188 |
+
return sc
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
@not_implemented_for("directed")
|
| 192 |
+
@not_implemented_for("multigraph")
|
| 193 |
+
@nx._dispatchable
|
| 194 |
+
def communicability_betweenness_centrality(G):
|
| 195 |
+
r"""Returns subgraph communicability for all pairs of nodes in G.
|
| 196 |
+
|
| 197 |
+
Communicability betweenness measure makes use of the number of walks
|
| 198 |
+
connecting every pair of nodes as the basis of a betweenness centrality
|
| 199 |
+
measure.
|
| 200 |
+
|
| 201 |
+
Parameters
|
| 202 |
+
----------
|
| 203 |
+
G: graph
|
| 204 |
+
|
| 205 |
+
Returns
|
| 206 |
+
-------
|
| 207 |
+
nodes : dictionary
|
| 208 |
+
Dictionary of nodes with communicability betweenness as the value.
|
| 209 |
+
|
| 210 |
+
Raises
|
| 211 |
+
------
|
| 212 |
+
NetworkXError
|
| 213 |
+
If the graph is not undirected and simple.
|
| 214 |
+
|
| 215 |
+
Notes
|
| 216 |
+
-----
|
| 217 |
+
Let `G=(V,E)` be a simple undirected graph with `n` nodes and `m` edges,
|
| 218 |
+
and `A` denote the adjacency matrix of `G`.
|
| 219 |
+
|
| 220 |
+
Let `G(r)=(V,E(r))` be the graph resulting from
|
| 221 |
+
removing all edges connected to node `r` but not the node itself.
|
| 222 |
+
|
| 223 |
+
The adjacency matrix for `G(r)` is `A+E(r)`, where `E(r)` has nonzeros
|
| 224 |
+
only in row and column `r`.
|
| 225 |
+
|
| 226 |
+
The subraph betweenness of a node `r` is [1]_
|
| 227 |
+
|
| 228 |
+
.. math::
|
| 229 |
+
|
| 230 |
+
\omega_{r} = \frac{1}{C}\sum_{p}\sum_{q}\frac{G_{prq}}{G_{pq}},
|
| 231 |
+
p\neq q, q\neq r,
|
| 232 |
+
|
| 233 |
+
where
|
| 234 |
+
`G_{prq}=(e^{A}_{pq} - (e^{A+E(r)})_{pq}` is the number of walks
|
| 235 |
+
involving node r,
|
| 236 |
+
`G_{pq}=(e^{A})_{pq}` is the number of closed walks starting
|
| 237 |
+
at node `p` and ending at node `q`,
|
| 238 |
+
and `C=(n-1)^{2}-(n-1)` is a normalization factor equal to the
|
| 239 |
+
number of terms in the sum.
|
| 240 |
+
|
| 241 |
+
The resulting `\omega_{r}` takes values between zero and one.
|
| 242 |
+
The lower bound cannot be attained for a connected
|
| 243 |
+
graph, and the upper bound is attained in the star graph.
|
| 244 |
+
|
| 245 |
+
References
|
| 246 |
+
----------
|
| 247 |
+
.. [1] Ernesto Estrada, Desmond J. Higham, Naomichi Hatano,
|
| 248 |
+
"Communicability Betweenness in Complex Networks"
|
| 249 |
+
Physica A 388 (2009) 764-774.
|
| 250 |
+
https://arxiv.org/abs/0905.4102
|
| 251 |
+
|
| 252 |
+
Examples
|
| 253 |
+
--------
|
| 254 |
+
>>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
|
| 255 |
+
>>> cbc = nx.communicability_betweenness_centrality(G)
|
| 256 |
+
>>> print([f"{node} {cbc[node]:0.2f}" for node in sorted(cbc)])
|
| 257 |
+
['0 0.03', '1 0.45', '2 0.51', '3 0.45', '4 0.40', '5 0.19', '6 0.03']
|
| 258 |
+
"""
|
| 259 |
+
import numpy as np
|
| 260 |
+
import scipy as sp
|
| 261 |
+
|
| 262 |
+
nodelist = list(G) # ordering of nodes in matrix
|
| 263 |
+
n = len(nodelist)
|
| 264 |
+
A = nx.to_numpy_array(G, nodelist)
|
| 265 |
+
# convert to 0-1 matrix
|
| 266 |
+
A[np.nonzero(A)] = 1
|
| 267 |
+
expA = sp.linalg.expm(A)
|
| 268 |
+
mapping = dict(zip(nodelist, range(n)))
|
| 269 |
+
cbc = {}
|
| 270 |
+
for v in G:
|
| 271 |
+
# remove row and col of node v
|
| 272 |
+
i = mapping[v]
|
| 273 |
+
row = A[i, :].copy()
|
| 274 |
+
col = A[:, i].copy()
|
| 275 |
+
A[i, :] = 0
|
| 276 |
+
A[:, i] = 0
|
| 277 |
+
B = (expA - sp.linalg.expm(A)) / expA
|
| 278 |
+
# sum with row/col of node v and diag set to zero
|
| 279 |
+
B[i, :] = 0
|
| 280 |
+
B[:, i] = 0
|
| 281 |
+
B -= np.diag(np.diag(B))
|
| 282 |
+
cbc[v] = float(B.sum())
|
| 283 |
+
# put row and col back
|
| 284 |
+
A[i, :] = row
|
| 285 |
+
A[:, i] = col
|
| 286 |
+
# rescale when more than two nodes
|
| 287 |
+
order = len(cbc)
|
| 288 |
+
if order > 2:
|
| 289 |
+
scale = 1.0 / ((order - 1.0) ** 2 - (order - 1.0))
|
| 290 |
+
cbc = {node: value * scale for node, value in cbc.items()}
|
| 291 |
+
return cbc
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
@nx._dispatchable
|
| 295 |
+
def estrada_index(G):
|
| 296 |
+
r"""Returns the Estrada index of a the graph G.
|
| 297 |
+
|
| 298 |
+
The Estrada Index is a topological index of folding or 3D "compactness" ([1]_).
|
| 299 |
+
|
| 300 |
+
Parameters
|
| 301 |
+
----------
|
| 302 |
+
G: graph
|
| 303 |
+
|
| 304 |
+
Returns
|
| 305 |
+
-------
|
| 306 |
+
estrada index: float
|
| 307 |
+
|
| 308 |
+
Raises
|
| 309 |
+
------
|
| 310 |
+
NetworkXError
|
| 311 |
+
If the graph is not undirected and simple.
|
| 312 |
+
|
| 313 |
+
Notes
|
| 314 |
+
-----
|
| 315 |
+
Let `G=(V,E)` be a simple undirected graph with `n` nodes and let
|
| 316 |
+
`\lambda_{1}\leq\lambda_{2}\leq\cdots\lambda_{n}`
|
| 317 |
+
be a non-increasing ordering of the eigenvalues of its adjacency
|
| 318 |
+
matrix `A`. The Estrada index is ([1]_, [2]_)
|
| 319 |
+
|
| 320 |
+
.. math::
|
| 321 |
+
EE(G)=\sum_{j=1}^n e^{\lambda _j}.
|
| 322 |
+
|
| 323 |
+
References
|
| 324 |
+
----------
|
| 325 |
+
.. [1] E. Estrada, "Characterization of 3D molecular structure",
|
| 326 |
+
Chem. Phys. Lett. 319, 713 (2000).
|
| 327 |
+
https://doi.org/10.1016/S0009-2614(00)00158-5
|
| 328 |
+
.. [2] José Antonio de la Peñaa, Ivan Gutman, Juan Rada,
|
| 329 |
+
"Estimating the Estrada index",
|
| 330 |
+
Linear Algebra and its Applications. 427, 1 (2007).
|
| 331 |
+
https://doi.org/10.1016/j.laa.2007.06.020
|
| 332 |
+
|
| 333 |
+
Examples
|
| 334 |
+
--------
|
| 335 |
+
>>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
|
| 336 |
+
>>> ei = nx.estrada_index(G)
|
| 337 |
+
>>> print(f"{ei:0.5}")
|
| 338 |
+
20.55
|
| 339 |
+
"""
|
| 340 |
+
return sum(subgraph_centrality(G).values())
|
.venv/lib/python3.11/site-packages/networkx/algorithms/coloring/__init__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from networkx.algorithms.coloring.greedy_coloring import *
|
| 2 |
+
from networkx.algorithms.coloring.equitable_coloring import equitable_color
|
| 3 |
+
|
| 4 |
+
__all__ = ["greedy_color", "equitable_color"]
|
.venv/lib/python3.11/site-packages/networkx/algorithms/coloring/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (418 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/coloring/__pycache__/equitable_coloring.cpython-311.pyc
ADDED
|
Binary file (19 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/coloring/__pycache__/greedy_coloring.cpython-311.pyc
ADDED
|
Binary file (24.7 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/coloring/equitable_coloring.py
ADDED
|
@@ -0,0 +1,505 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Equitable coloring of graphs with bounded degree.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from collections import defaultdict
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
|
| 9 |
+
__all__ = ["equitable_color"]
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
@nx._dispatchable
|
| 13 |
+
def is_coloring(G, coloring):
|
| 14 |
+
"""Determine if the coloring is a valid coloring for the graph G."""
|
| 15 |
+
# Verify that the coloring is valid.
|
| 16 |
+
return all(coloring[s] != coloring[d] for s, d in G.edges)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@nx._dispatchable
|
| 20 |
+
def is_equitable(G, coloring, num_colors=None):
|
| 21 |
+
"""Determines if the coloring is valid and equitable for the graph G."""
|
| 22 |
+
|
| 23 |
+
if not is_coloring(G, coloring):
|
| 24 |
+
return False
|
| 25 |
+
|
| 26 |
+
# Verify whether it is equitable.
|
| 27 |
+
color_set_size = defaultdict(int)
|
| 28 |
+
for color in coloring.values():
|
| 29 |
+
color_set_size[color] += 1
|
| 30 |
+
|
| 31 |
+
if num_colors is not None:
|
| 32 |
+
for color in range(num_colors):
|
| 33 |
+
if color not in color_set_size:
|
| 34 |
+
# These colors do not have any vertices attached to them.
|
| 35 |
+
color_set_size[color] = 0
|
| 36 |
+
|
| 37 |
+
# If there are more than 2 distinct values, the coloring cannot be equitable
|
| 38 |
+
all_set_sizes = set(color_set_size.values())
|
| 39 |
+
if len(all_set_sizes) == 0 and num_colors is None: # Was an empty graph
|
| 40 |
+
return True
|
| 41 |
+
elif len(all_set_sizes) == 1:
|
| 42 |
+
return True
|
| 43 |
+
elif len(all_set_sizes) == 2:
|
| 44 |
+
a, b = list(all_set_sizes)
|
| 45 |
+
return abs(a - b) <= 1
|
| 46 |
+
else: # len(all_set_sizes) > 2:
|
| 47 |
+
return False
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def make_C_from_F(F):
|
| 51 |
+
C = defaultdict(list)
|
| 52 |
+
for node, color in F.items():
|
| 53 |
+
C[color].append(node)
|
| 54 |
+
|
| 55 |
+
return C
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def make_N_from_L_C(L, C):
|
| 59 |
+
nodes = L.keys()
|
| 60 |
+
colors = C.keys()
|
| 61 |
+
return {
|
| 62 |
+
(node, color): sum(1 for v in L[node] if v in C[color])
|
| 63 |
+
for node in nodes
|
| 64 |
+
for color in colors
|
| 65 |
+
}
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def make_H_from_C_N(C, N):
|
| 69 |
+
return {
|
| 70 |
+
(c1, c2): sum(1 for node in C[c1] if N[(node, c2)] == 0) for c1 in C for c2 in C
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def change_color(u, X, Y, N, H, F, C, L):
|
| 75 |
+
"""Change the color of 'u' from X to Y and update N, H, F, C."""
|
| 76 |
+
assert F[u] == X and X != Y
|
| 77 |
+
|
| 78 |
+
# Change the class of 'u' from X to Y
|
| 79 |
+
F[u] = Y
|
| 80 |
+
|
| 81 |
+
for k in C:
|
| 82 |
+
# 'u' witnesses an edge from k -> Y instead of from k -> X now.
|
| 83 |
+
if N[u, k] == 0:
|
| 84 |
+
H[(X, k)] -= 1
|
| 85 |
+
H[(Y, k)] += 1
|
| 86 |
+
|
| 87 |
+
for v in L[u]:
|
| 88 |
+
# 'v' has lost a neighbor in X and gained one in Y
|
| 89 |
+
N[(v, X)] -= 1
|
| 90 |
+
N[(v, Y)] += 1
|
| 91 |
+
|
| 92 |
+
if N[(v, X)] == 0:
|
| 93 |
+
# 'v' witnesses F[v] -> X
|
| 94 |
+
H[(F[v], X)] += 1
|
| 95 |
+
|
| 96 |
+
if N[(v, Y)] == 1:
|
| 97 |
+
# 'v' no longer witnesses F[v] -> Y
|
| 98 |
+
H[(F[v], Y)] -= 1
|
| 99 |
+
|
| 100 |
+
C[X].remove(u)
|
| 101 |
+
C[Y].append(u)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def move_witnesses(src_color, dst_color, N, H, F, C, T_cal, L):
|
| 105 |
+
"""Move witness along a path from src_color to dst_color."""
|
| 106 |
+
X = src_color
|
| 107 |
+
while X != dst_color:
|
| 108 |
+
Y = T_cal[X]
|
| 109 |
+
# Move _any_ witness from X to Y = T_cal[X]
|
| 110 |
+
w = next(x for x in C[X] if N[(x, Y)] == 0)
|
| 111 |
+
change_color(w, X, Y, N=N, H=H, F=F, C=C, L=L)
|
| 112 |
+
X = Y
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
@nx._dispatchable(mutates_input=True)
|
| 116 |
+
def pad_graph(G, num_colors):
|
| 117 |
+
"""Add a disconnected complete clique K_p such that the number of nodes in
|
| 118 |
+
the graph becomes a multiple of `num_colors`.
|
| 119 |
+
|
| 120 |
+
Assumes that the graph's nodes are labelled using integers.
|
| 121 |
+
|
| 122 |
+
Returns the number of nodes with each color.
|
| 123 |
+
"""
|
| 124 |
+
|
| 125 |
+
n_ = len(G)
|
| 126 |
+
r = num_colors - 1
|
| 127 |
+
|
| 128 |
+
# Ensure that the number of nodes in G is a multiple of (r + 1)
|
| 129 |
+
s = n_ // (r + 1)
|
| 130 |
+
if n_ != s * (r + 1):
|
| 131 |
+
p = (r + 1) - n_ % (r + 1)
|
| 132 |
+
s += 1
|
| 133 |
+
|
| 134 |
+
# Complete graph K_p between (imaginary) nodes [n_, ... , n_ + p]
|
| 135 |
+
K = nx.relabel_nodes(nx.complete_graph(p), {idx: idx + n_ for idx in range(p)})
|
| 136 |
+
G.add_edges_from(K.edges)
|
| 137 |
+
|
| 138 |
+
return s
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None):
|
| 142 |
+
"""Procedure P as described in the paper."""
|
| 143 |
+
|
| 144 |
+
if excluded_colors is None:
|
| 145 |
+
excluded_colors = set()
|
| 146 |
+
|
| 147 |
+
A_cal = set()
|
| 148 |
+
T_cal = {}
|
| 149 |
+
R_cal = []
|
| 150 |
+
|
| 151 |
+
# BFS to determine A_cal, i.e. colors reachable from V-
|
| 152 |
+
reachable = [V_minus]
|
| 153 |
+
marked = set(reachable)
|
| 154 |
+
idx = 0
|
| 155 |
+
|
| 156 |
+
while idx < len(reachable):
|
| 157 |
+
pop = reachable[idx]
|
| 158 |
+
idx += 1
|
| 159 |
+
|
| 160 |
+
A_cal.add(pop)
|
| 161 |
+
R_cal.append(pop)
|
| 162 |
+
|
| 163 |
+
# TODO: Checking whether a color has been visited can be made faster by
|
| 164 |
+
# using a look-up table instead of testing for membership in a set by a
|
| 165 |
+
# logarithmic factor.
|
| 166 |
+
next_layer = []
|
| 167 |
+
for k in C:
|
| 168 |
+
if (
|
| 169 |
+
H[(k, pop)] > 0
|
| 170 |
+
and k not in A_cal
|
| 171 |
+
and k not in excluded_colors
|
| 172 |
+
and k not in marked
|
| 173 |
+
):
|
| 174 |
+
next_layer.append(k)
|
| 175 |
+
|
| 176 |
+
for dst in next_layer:
|
| 177 |
+
# Record that `dst` can reach `pop`
|
| 178 |
+
T_cal[dst] = pop
|
| 179 |
+
|
| 180 |
+
marked.update(next_layer)
|
| 181 |
+
reachable.extend(next_layer)
|
| 182 |
+
|
| 183 |
+
# Variables for the algorithm
|
| 184 |
+
b = len(C) - len(A_cal)
|
| 185 |
+
|
| 186 |
+
if V_plus in A_cal:
|
| 187 |
+
# Easy case: V+ is in A_cal
|
| 188 |
+
# Move one node from V+ to V- using T_cal to find the parents.
|
| 189 |
+
move_witnesses(V_plus, V_minus, N=N, H=H, F=F, C=C, T_cal=T_cal, L=L)
|
| 190 |
+
else:
|
| 191 |
+
# If there is a solo edge, we can resolve the situation by
|
| 192 |
+
# moving witnesses from B to A, making G[A] equitable and then
|
| 193 |
+
# recursively balancing G[B - w] with a different V_minus and
|
| 194 |
+
# but the same V_plus.
|
| 195 |
+
|
| 196 |
+
A_0 = set()
|
| 197 |
+
A_cal_0 = set()
|
| 198 |
+
num_terminal_sets_found = 0
|
| 199 |
+
made_equitable = False
|
| 200 |
+
|
| 201 |
+
for W_1 in R_cal[::-1]:
|
| 202 |
+
for v in C[W_1]:
|
| 203 |
+
X = None
|
| 204 |
+
|
| 205 |
+
for U in C:
|
| 206 |
+
if N[(v, U)] == 0 and U in A_cal and U != W_1:
|
| 207 |
+
X = U
|
| 208 |
+
|
| 209 |
+
# v does not witness an edge in H[A_cal]
|
| 210 |
+
if X is None:
|
| 211 |
+
continue
|
| 212 |
+
|
| 213 |
+
for U in C:
|
| 214 |
+
# Note: Departing from the paper here.
|
| 215 |
+
if N[(v, U)] >= 1 and U not in A_cal:
|
| 216 |
+
X_prime = U
|
| 217 |
+
w = v
|
| 218 |
+
|
| 219 |
+
try:
|
| 220 |
+
# Finding the solo neighbor of w in X_prime
|
| 221 |
+
y = next(
|
| 222 |
+
node
|
| 223 |
+
for node in L[w]
|
| 224 |
+
if F[node] == X_prime and N[(node, W_1)] == 1
|
| 225 |
+
)
|
| 226 |
+
except StopIteration:
|
| 227 |
+
pass
|
| 228 |
+
else:
|
| 229 |
+
W = W_1
|
| 230 |
+
|
| 231 |
+
# Move w from W to X, now X has one extra node.
|
| 232 |
+
change_color(w, W, X, N=N, H=H, F=F, C=C, L=L)
|
| 233 |
+
|
| 234 |
+
# Move witness from X to V_minus, making the coloring
|
| 235 |
+
# equitable.
|
| 236 |
+
move_witnesses(
|
| 237 |
+
src_color=X,
|
| 238 |
+
dst_color=V_minus,
|
| 239 |
+
N=N,
|
| 240 |
+
H=H,
|
| 241 |
+
F=F,
|
| 242 |
+
C=C,
|
| 243 |
+
T_cal=T_cal,
|
| 244 |
+
L=L,
|
| 245 |
+
)
|
| 246 |
+
|
| 247 |
+
# Move y from X_prime to W, making W the correct size.
|
| 248 |
+
change_color(y, X_prime, W, N=N, H=H, F=F, C=C, L=L)
|
| 249 |
+
|
| 250 |
+
# Then call the procedure on G[B - y]
|
| 251 |
+
procedure_P(
|
| 252 |
+
V_minus=X_prime,
|
| 253 |
+
V_plus=V_plus,
|
| 254 |
+
N=N,
|
| 255 |
+
H=H,
|
| 256 |
+
C=C,
|
| 257 |
+
F=F,
|
| 258 |
+
L=L,
|
| 259 |
+
excluded_colors=excluded_colors.union(A_cal),
|
| 260 |
+
)
|
| 261 |
+
made_equitable = True
|
| 262 |
+
break
|
| 263 |
+
|
| 264 |
+
if made_equitable:
|
| 265 |
+
break
|
| 266 |
+
else:
|
| 267 |
+
# No node in W_1 was found such that
|
| 268 |
+
# it had a solo-neighbor.
|
| 269 |
+
A_cal_0.add(W_1)
|
| 270 |
+
A_0.update(C[W_1])
|
| 271 |
+
num_terminal_sets_found += 1
|
| 272 |
+
|
| 273 |
+
if num_terminal_sets_found == b:
|
| 274 |
+
# Otherwise, construct the maximal independent set and find
|
| 275 |
+
# a pair of z_1, z_2 as in Case II.
|
| 276 |
+
|
| 277 |
+
# BFS to determine B_cal': the set of colors reachable from V+
|
| 278 |
+
B_cal_prime = set()
|
| 279 |
+
T_cal_prime = {}
|
| 280 |
+
|
| 281 |
+
reachable = [V_plus]
|
| 282 |
+
marked = set(reachable)
|
| 283 |
+
idx = 0
|
| 284 |
+
while idx < len(reachable):
|
| 285 |
+
pop = reachable[idx]
|
| 286 |
+
idx += 1
|
| 287 |
+
|
| 288 |
+
B_cal_prime.add(pop)
|
| 289 |
+
|
| 290 |
+
# No need to check for excluded_colors here because
|
| 291 |
+
# they only exclude colors from A_cal
|
| 292 |
+
next_layer = [
|
| 293 |
+
k
|
| 294 |
+
for k in C
|
| 295 |
+
if H[(pop, k)] > 0 and k not in B_cal_prime and k not in marked
|
| 296 |
+
]
|
| 297 |
+
|
| 298 |
+
for dst in next_layer:
|
| 299 |
+
T_cal_prime[pop] = dst
|
| 300 |
+
|
| 301 |
+
marked.update(next_layer)
|
| 302 |
+
reachable.extend(next_layer)
|
| 303 |
+
|
| 304 |
+
# Construct the independent set of G[B']
|
| 305 |
+
I_set = set()
|
| 306 |
+
I_covered = set()
|
| 307 |
+
W_covering = {}
|
| 308 |
+
|
| 309 |
+
B_prime = [node for k in B_cal_prime for node in C[k]]
|
| 310 |
+
|
| 311 |
+
# Add the nodes in V_plus to I first.
|
| 312 |
+
for z in C[V_plus] + B_prime:
|
| 313 |
+
if z in I_covered or F[z] not in B_cal_prime:
|
| 314 |
+
continue
|
| 315 |
+
|
| 316 |
+
I_set.add(z)
|
| 317 |
+
I_covered.add(z)
|
| 318 |
+
I_covered.update(list(L[z]))
|
| 319 |
+
|
| 320 |
+
for w in L[z]:
|
| 321 |
+
if F[w] in A_cal_0 and N[(z, F[w])] == 1:
|
| 322 |
+
if w not in W_covering:
|
| 323 |
+
W_covering[w] = z
|
| 324 |
+
else:
|
| 325 |
+
# Found z1, z2 which have the same solo
|
| 326 |
+
# neighbor in some W
|
| 327 |
+
z_1 = W_covering[w]
|
| 328 |
+
# z_2 = z
|
| 329 |
+
|
| 330 |
+
Z = F[z_1]
|
| 331 |
+
W = F[w]
|
| 332 |
+
|
| 333 |
+
# shift nodes along W, V-
|
| 334 |
+
move_witnesses(
|
| 335 |
+
W, V_minus, N=N, H=H, F=F, C=C, T_cal=T_cal, L=L
|
| 336 |
+
)
|
| 337 |
+
|
| 338 |
+
# shift nodes along V+ to Z
|
| 339 |
+
move_witnesses(
|
| 340 |
+
V_plus,
|
| 341 |
+
Z,
|
| 342 |
+
N=N,
|
| 343 |
+
H=H,
|
| 344 |
+
F=F,
|
| 345 |
+
C=C,
|
| 346 |
+
T_cal=T_cal_prime,
|
| 347 |
+
L=L,
|
| 348 |
+
)
|
| 349 |
+
|
| 350 |
+
# change color of z_1 to W
|
| 351 |
+
change_color(z_1, Z, W, N=N, H=H, F=F, C=C, L=L)
|
| 352 |
+
|
| 353 |
+
# change color of w to some color in B_cal
|
| 354 |
+
W_plus = next(
|
| 355 |
+
k for k in C if N[(w, k)] == 0 and k not in A_cal
|
| 356 |
+
)
|
| 357 |
+
change_color(w, W, W_plus, N=N, H=H, F=F, C=C, L=L)
|
| 358 |
+
|
| 359 |
+
# recurse with G[B \cup W*]
|
| 360 |
+
excluded_colors.update(
|
| 361 |
+
[k for k in C if k != W and k not in B_cal_prime]
|
| 362 |
+
)
|
| 363 |
+
procedure_P(
|
| 364 |
+
V_minus=W,
|
| 365 |
+
V_plus=W_plus,
|
| 366 |
+
N=N,
|
| 367 |
+
H=H,
|
| 368 |
+
C=C,
|
| 369 |
+
F=F,
|
| 370 |
+
L=L,
|
| 371 |
+
excluded_colors=excluded_colors,
|
| 372 |
+
)
|
| 373 |
+
|
| 374 |
+
made_equitable = True
|
| 375 |
+
break
|
| 376 |
+
|
| 377 |
+
if made_equitable:
|
| 378 |
+
break
|
| 379 |
+
else:
|
| 380 |
+
assert False, (
|
| 381 |
+
"Must find a w which is the solo neighbor "
|
| 382 |
+
"of two vertices in B_cal_prime."
|
| 383 |
+
)
|
| 384 |
+
|
| 385 |
+
if made_equitable:
|
| 386 |
+
break
|
| 387 |
+
|
| 388 |
+
|
| 389 |
+
@nx._dispatchable
|
| 390 |
+
def equitable_color(G, num_colors):
|
| 391 |
+
"""Provides an equitable coloring for nodes of `G`.
|
| 392 |
+
|
| 393 |
+
Attempts to color a graph using `num_colors` colors, where no neighbors of
|
| 394 |
+
a node can have same color as the node itself and the number of nodes with
|
| 395 |
+
each color differ by at most 1. `num_colors` must be greater than the
|
| 396 |
+
maximum degree of `G`. The algorithm is described in [1]_ and has
|
| 397 |
+
complexity O(num_colors * n**2).
|
| 398 |
+
|
| 399 |
+
Parameters
|
| 400 |
+
----------
|
| 401 |
+
G : networkX graph
|
| 402 |
+
The nodes of this graph will be colored.
|
| 403 |
+
|
| 404 |
+
num_colors : number of colors to use
|
| 405 |
+
This number must be at least one more than the maximum degree of nodes
|
| 406 |
+
in the graph.
|
| 407 |
+
|
| 408 |
+
Returns
|
| 409 |
+
-------
|
| 410 |
+
A dictionary with keys representing nodes and values representing
|
| 411 |
+
corresponding coloring.
|
| 412 |
+
|
| 413 |
+
Examples
|
| 414 |
+
--------
|
| 415 |
+
>>> G = nx.cycle_graph(4)
|
| 416 |
+
>>> nx.coloring.equitable_color(G, num_colors=3) # doctest: +SKIP
|
| 417 |
+
{0: 2, 1: 1, 2: 2, 3: 0}
|
| 418 |
+
|
| 419 |
+
Raises
|
| 420 |
+
------
|
| 421 |
+
NetworkXAlgorithmError
|
| 422 |
+
If `num_colors` is not at least the maximum degree of the graph `G`
|
| 423 |
+
|
| 424 |
+
References
|
| 425 |
+
----------
|
| 426 |
+
.. [1] Kierstead, H. A., Kostochka, A. V., Mydlarz, M., & Szemerédi, E.
|
| 427 |
+
(2010). A fast algorithm for equitable coloring. Combinatorica, 30(2),
|
| 428 |
+
217-224.
|
| 429 |
+
"""
|
| 430 |
+
|
| 431 |
+
# Map nodes to integers for simplicity later.
|
| 432 |
+
nodes_to_int = {}
|
| 433 |
+
int_to_nodes = {}
|
| 434 |
+
|
| 435 |
+
for idx, node in enumerate(G.nodes):
|
| 436 |
+
nodes_to_int[node] = idx
|
| 437 |
+
int_to_nodes[idx] = node
|
| 438 |
+
|
| 439 |
+
G = nx.relabel_nodes(G, nodes_to_int, copy=True)
|
| 440 |
+
|
| 441 |
+
# Basic graph statistics and sanity check.
|
| 442 |
+
if len(G.nodes) > 0:
|
| 443 |
+
r_ = max(G.degree(node) for node in G.nodes)
|
| 444 |
+
else:
|
| 445 |
+
r_ = 0
|
| 446 |
+
|
| 447 |
+
if r_ >= num_colors:
|
| 448 |
+
raise nx.NetworkXAlgorithmError(
|
| 449 |
+
f"Graph has maximum degree {r_}, needs "
|
| 450 |
+
f"{r_ + 1} (> {num_colors}) colors for guaranteed coloring."
|
| 451 |
+
)
|
| 452 |
+
|
| 453 |
+
# Ensure that the number of nodes in G is a multiple of (r + 1)
|
| 454 |
+
pad_graph(G, num_colors)
|
| 455 |
+
|
| 456 |
+
# Starting the algorithm.
|
| 457 |
+
# L = {node: list(G.neighbors(node)) for node in G.nodes}
|
| 458 |
+
L_ = {node: [] for node in G.nodes}
|
| 459 |
+
|
| 460 |
+
# Arbitrary equitable allocation of colors to nodes.
|
| 461 |
+
F = {node: idx % num_colors for idx, node in enumerate(G.nodes)}
|
| 462 |
+
|
| 463 |
+
C = make_C_from_F(F)
|
| 464 |
+
|
| 465 |
+
# The neighborhood is empty initially.
|
| 466 |
+
N = make_N_from_L_C(L_, C)
|
| 467 |
+
|
| 468 |
+
# Currently all nodes witness all edges.
|
| 469 |
+
H = make_H_from_C_N(C, N)
|
| 470 |
+
|
| 471 |
+
# Start of algorithm.
|
| 472 |
+
edges_seen = set()
|
| 473 |
+
|
| 474 |
+
for u in sorted(G.nodes):
|
| 475 |
+
for v in sorted(G.neighbors(u)):
|
| 476 |
+
# Do not double count edges if (v, u) has already been seen.
|
| 477 |
+
if (v, u) in edges_seen:
|
| 478 |
+
continue
|
| 479 |
+
|
| 480 |
+
edges_seen.add((u, v))
|
| 481 |
+
|
| 482 |
+
L_[u].append(v)
|
| 483 |
+
L_[v].append(u)
|
| 484 |
+
|
| 485 |
+
N[(u, F[v])] += 1
|
| 486 |
+
N[(v, F[u])] += 1
|
| 487 |
+
|
| 488 |
+
if F[u] != F[v]:
|
| 489 |
+
# Were 'u' and 'v' witnesses for F[u] -> F[v] or F[v] -> F[u]?
|
| 490 |
+
if N[(u, F[v])] == 1:
|
| 491 |
+
H[F[u], F[v]] -= 1 # u cannot witness an edge between F[u], F[v]
|
| 492 |
+
|
| 493 |
+
if N[(v, F[u])] == 1:
|
| 494 |
+
H[F[v], F[u]] -= 1 # v cannot witness an edge between F[v], F[u]
|
| 495 |
+
|
| 496 |
+
if N[(u, F[u])] != 0:
|
| 497 |
+
# Find the first color where 'u' does not have any neighbors.
|
| 498 |
+
Y = next(k for k in C if N[(u, k)] == 0)
|
| 499 |
+
X = F[u]
|
| 500 |
+
change_color(u, X, Y, N=N, H=H, F=F, C=C, L=L_)
|
| 501 |
+
|
| 502 |
+
# Procedure P
|
| 503 |
+
procedure_P(V_minus=X, V_plus=Y, N=N, H=H, F=F, C=C, L=L_)
|
| 504 |
+
|
| 505 |
+
return {int_to_nodes[x]: F[x] for x in int_to_nodes}
|
.venv/lib/python3.11/site-packages/networkx/algorithms/coloring/greedy_coloring.py
ADDED
|
@@ -0,0 +1,565 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Greedy graph coloring using various strategies.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import itertools
|
| 6 |
+
from collections import defaultdict, deque
|
| 7 |
+
|
| 8 |
+
import networkx as nx
|
| 9 |
+
from networkx.utils import arbitrary_element, py_random_state
|
| 10 |
+
|
| 11 |
+
__all__ = [
|
| 12 |
+
"greedy_color",
|
| 13 |
+
"strategy_connected_sequential",
|
| 14 |
+
"strategy_connected_sequential_bfs",
|
| 15 |
+
"strategy_connected_sequential_dfs",
|
| 16 |
+
"strategy_independent_set",
|
| 17 |
+
"strategy_largest_first",
|
| 18 |
+
"strategy_random_sequential",
|
| 19 |
+
"strategy_saturation_largest_first",
|
| 20 |
+
"strategy_smallest_last",
|
| 21 |
+
]
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def strategy_largest_first(G, colors):
|
| 25 |
+
"""Returns a list of the nodes of ``G`` in decreasing order by
|
| 26 |
+
degree.
|
| 27 |
+
|
| 28 |
+
``G`` is a NetworkX graph. ``colors`` is ignored.
|
| 29 |
+
|
| 30 |
+
"""
|
| 31 |
+
return sorted(G, key=G.degree, reverse=True)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@py_random_state(2)
|
| 35 |
+
def strategy_random_sequential(G, colors, seed=None):
|
| 36 |
+
"""Returns a random permutation of the nodes of ``G`` as a list.
|
| 37 |
+
|
| 38 |
+
``G`` is a NetworkX graph. ``colors`` is ignored.
|
| 39 |
+
|
| 40 |
+
seed : integer, random_state, or None (default)
|
| 41 |
+
Indicator of random number generation state.
|
| 42 |
+
See :ref:`Randomness<randomness>`.
|
| 43 |
+
"""
|
| 44 |
+
nodes = list(G)
|
| 45 |
+
seed.shuffle(nodes)
|
| 46 |
+
return nodes
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def strategy_smallest_last(G, colors):
|
| 50 |
+
"""Returns a deque of the nodes of ``G``, "smallest" last.
|
| 51 |
+
|
| 52 |
+
Specifically, the degrees of each node are tracked in a bucket queue.
|
| 53 |
+
From this, the node of minimum degree is repeatedly popped from the
|
| 54 |
+
graph, updating its neighbors' degrees.
|
| 55 |
+
|
| 56 |
+
``G`` is a NetworkX graph. ``colors`` is ignored.
|
| 57 |
+
|
| 58 |
+
This implementation of the strategy runs in $O(n + m)$ time
|
| 59 |
+
(ignoring polylogarithmic factors), where $n$ is the number of nodes
|
| 60 |
+
and $m$ is the number of edges.
|
| 61 |
+
|
| 62 |
+
This strategy is related to :func:`strategy_independent_set`: if we
|
| 63 |
+
interpret each node removed as an independent set of size one, then
|
| 64 |
+
this strategy chooses an independent set of size one instead of a
|
| 65 |
+
maximal independent set.
|
| 66 |
+
|
| 67 |
+
"""
|
| 68 |
+
H = G.copy()
|
| 69 |
+
result = deque()
|
| 70 |
+
|
| 71 |
+
# Build initial degree list (i.e. the bucket queue data structure)
|
| 72 |
+
degrees = defaultdict(set) # set(), for fast random-access removals
|
| 73 |
+
lbound = float("inf")
|
| 74 |
+
for node, d in H.degree():
|
| 75 |
+
degrees[d].add(node)
|
| 76 |
+
lbound = min(lbound, d) # Lower bound on min-degree.
|
| 77 |
+
|
| 78 |
+
def find_min_degree():
|
| 79 |
+
# Save time by starting the iterator at `lbound`, not 0.
|
| 80 |
+
# The value that we find will be our new `lbound`, which we set later.
|
| 81 |
+
return next(d for d in itertools.count(lbound) if d in degrees)
|
| 82 |
+
|
| 83 |
+
for _ in G:
|
| 84 |
+
# Pop a min-degree node and add it to the list.
|
| 85 |
+
min_degree = find_min_degree()
|
| 86 |
+
u = degrees[min_degree].pop()
|
| 87 |
+
if not degrees[min_degree]: # Clean up the degree list.
|
| 88 |
+
del degrees[min_degree]
|
| 89 |
+
result.appendleft(u)
|
| 90 |
+
|
| 91 |
+
# Update degrees of removed node's neighbors.
|
| 92 |
+
for v in H[u]:
|
| 93 |
+
degree = H.degree(v)
|
| 94 |
+
degrees[degree].remove(v)
|
| 95 |
+
if not degrees[degree]: # Clean up the degree list.
|
| 96 |
+
del degrees[degree]
|
| 97 |
+
degrees[degree - 1].add(v)
|
| 98 |
+
|
| 99 |
+
# Finally, remove the node.
|
| 100 |
+
H.remove_node(u)
|
| 101 |
+
lbound = min_degree - 1 # Subtract 1 in case of tied neighbors.
|
| 102 |
+
|
| 103 |
+
return result
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def _maximal_independent_set(G):
|
| 107 |
+
"""Returns a maximal independent set of nodes in ``G`` by repeatedly
|
| 108 |
+
choosing an independent node of minimum degree (with respect to the
|
| 109 |
+
subgraph of unchosen nodes).
|
| 110 |
+
|
| 111 |
+
"""
|
| 112 |
+
result = set()
|
| 113 |
+
remaining = set(G)
|
| 114 |
+
while remaining:
|
| 115 |
+
G = G.subgraph(remaining)
|
| 116 |
+
v = min(remaining, key=G.degree)
|
| 117 |
+
result.add(v)
|
| 118 |
+
remaining -= set(G[v]) | {v}
|
| 119 |
+
return result
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def strategy_independent_set(G, colors):
|
| 123 |
+
"""Uses a greedy independent set removal strategy to determine the
|
| 124 |
+
colors.
|
| 125 |
+
|
| 126 |
+
This function updates ``colors`` **in-place** and return ``None``,
|
| 127 |
+
unlike the other strategy functions in this module.
|
| 128 |
+
|
| 129 |
+
This algorithm repeatedly finds and removes a maximal independent
|
| 130 |
+
set, assigning each node in the set an unused color.
|
| 131 |
+
|
| 132 |
+
``G`` is a NetworkX graph.
|
| 133 |
+
|
| 134 |
+
This strategy is related to :func:`strategy_smallest_last`: in that
|
| 135 |
+
strategy, an independent set of size one is chosen at each step
|
| 136 |
+
instead of a maximal independent set.
|
| 137 |
+
|
| 138 |
+
"""
|
| 139 |
+
remaining_nodes = set(G)
|
| 140 |
+
while len(remaining_nodes) > 0:
|
| 141 |
+
nodes = _maximal_independent_set(G.subgraph(remaining_nodes))
|
| 142 |
+
remaining_nodes -= nodes
|
| 143 |
+
yield from nodes
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def strategy_connected_sequential_bfs(G, colors):
|
| 147 |
+
"""Returns an iterable over nodes in ``G`` in the order given by a
|
| 148 |
+
breadth-first traversal.
|
| 149 |
+
|
| 150 |
+
The generated sequence has the property that for each node except
|
| 151 |
+
the first, at least one neighbor appeared earlier in the sequence.
|
| 152 |
+
|
| 153 |
+
``G`` is a NetworkX graph. ``colors`` is ignored.
|
| 154 |
+
|
| 155 |
+
"""
|
| 156 |
+
return strategy_connected_sequential(G, colors, "bfs")
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
def strategy_connected_sequential_dfs(G, colors):
|
| 160 |
+
"""Returns an iterable over nodes in ``G`` in the order given by a
|
| 161 |
+
depth-first traversal.
|
| 162 |
+
|
| 163 |
+
The generated sequence has the property that for each node except
|
| 164 |
+
the first, at least one neighbor appeared earlier in the sequence.
|
| 165 |
+
|
| 166 |
+
``G`` is a NetworkX graph. ``colors`` is ignored.
|
| 167 |
+
|
| 168 |
+
"""
|
| 169 |
+
return strategy_connected_sequential(G, colors, "dfs")
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def strategy_connected_sequential(G, colors, traversal="bfs"):
|
| 173 |
+
"""Returns an iterable over nodes in ``G`` in the order given by a
|
| 174 |
+
breadth-first or depth-first traversal.
|
| 175 |
+
|
| 176 |
+
``traversal`` must be one of the strings ``'dfs'`` or ``'bfs'``,
|
| 177 |
+
representing depth-first traversal or breadth-first traversal,
|
| 178 |
+
respectively.
|
| 179 |
+
|
| 180 |
+
The generated sequence has the property that for each node except
|
| 181 |
+
the first, at least one neighbor appeared earlier in the sequence.
|
| 182 |
+
|
| 183 |
+
``G`` is a NetworkX graph. ``colors`` is ignored.
|
| 184 |
+
|
| 185 |
+
"""
|
| 186 |
+
if traversal == "bfs":
|
| 187 |
+
traverse = nx.bfs_edges
|
| 188 |
+
elif traversal == "dfs":
|
| 189 |
+
traverse = nx.dfs_edges
|
| 190 |
+
else:
|
| 191 |
+
raise nx.NetworkXError(
|
| 192 |
+
"Please specify one of the strings 'bfs' or"
|
| 193 |
+
" 'dfs' for connected sequential ordering"
|
| 194 |
+
)
|
| 195 |
+
for component in nx.connected_components(G):
|
| 196 |
+
source = arbitrary_element(component)
|
| 197 |
+
# Yield the source node, then all the nodes in the specified
|
| 198 |
+
# traversal order.
|
| 199 |
+
yield source
|
| 200 |
+
for _, end in traverse(G.subgraph(component), source):
|
| 201 |
+
yield end
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def strategy_saturation_largest_first(G, colors):
|
| 205 |
+
"""Iterates over all the nodes of ``G`` in "saturation order" (also
|
| 206 |
+
known as "DSATUR").
|
| 207 |
+
|
| 208 |
+
``G`` is a NetworkX graph. ``colors`` is a dictionary mapping nodes of
|
| 209 |
+
``G`` to colors, for those nodes that have already been colored.
|
| 210 |
+
|
| 211 |
+
"""
|
| 212 |
+
distinct_colors = {v: set() for v in G}
|
| 213 |
+
|
| 214 |
+
# Add the node color assignments given in colors to the
|
| 215 |
+
# distinct colors set for each neighbor of that node
|
| 216 |
+
for node, color in colors.items():
|
| 217 |
+
for neighbor in G[node]:
|
| 218 |
+
distinct_colors[neighbor].add(color)
|
| 219 |
+
|
| 220 |
+
# Check that the color assignments in colors are valid
|
| 221 |
+
# i.e. no neighboring nodes have the same color
|
| 222 |
+
if len(colors) >= 2:
|
| 223 |
+
for node, color in colors.items():
|
| 224 |
+
if color in distinct_colors[node]:
|
| 225 |
+
raise nx.NetworkXError("Neighboring nodes must have different colors")
|
| 226 |
+
|
| 227 |
+
# If 0 nodes have been colored, simply choose the node of highest degree.
|
| 228 |
+
if not colors:
|
| 229 |
+
node = max(G, key=G.degree)
|
| 230 |
+
yield node
|
| 231 |
+
# Add the color 0 to the distinct colors set for each
|
| 232 |
+
# neighbor of that node.
|
| 233 |
+
for v in G[node]:
|
| 234 |
+
distinct_colors[v].add(0)
|
| 235 |
+
|
| 236 |
+
while len(G) != len(colors):
|
| 237 |
+
# Update the distinct color sets for the neighbors.
|
| 238 |
+
for node, color in colors.items():
|
| 239 |
+
for neighbor in G[node]:
|
| 240 |
+
distinct_colors[neighbor].add(color)
|
| 241 |
+
|
| 242 |
+
# Compute the maximum saturation and the set of nodes that
|
| 243 |
+
# achieve that saturation.
|
| 244 |
+
saturation = {v: len(c) for v, c in distinct_colors.items() if v not in colors}
|
| 245 |
+
# Yield the node with the highest saturation, and break ties by
|
| 246 |
+
# degree.
|
| 247 |
+
node = max(saturation, key=lambda v: (saturation[v], G.degree(v)))
|
| 248 |
+
yield node
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
#: Dictionary mapping name of a strategy as a string to the strategy function.
|
| 252 |
+
STRATEGIES = {
|
| 253 |
+
"largest_first": strategy_largest_first,
|
| 254 |
+
"random_sequential": strategy_random_sequential,
|
| 255 |
+
"smallest_last": strategy_smallest_last,
|
| 256 |
+
"independent_set": strategy_independent_set,
|
| 257 |
+
"connected_sequential_bfs": strategy_connected_sequential_bfs,
|
| 258 |
+
"connected_sequential_dfs": strategy_connected_sequential_dfs,
|
| 259 |
+
"connected_sequential": strategy_connected_sequential,
|
| 260 |
+
"saturation_largest_first": strategy_saturation_largest_first,
|
| 261 |
+
"DSATUR": strategy_saturation_largest_first,
|
| 262 |
+
}
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
@nx._dispatchable
|
| 266 |
+
def greedy_color(G, strategy="largest_first", interchange=False):
|
| 267 |
+
"""Color a graph using various strategies of greedy graph coloring.
|
| 268 |
+
|
| 269 |
+
Attempts to color a graph using as few colors as possible, where no
|
| 270 |
+
neighbors of a node can have same color as the node itself. The
|
| 271 |
+
given strategy determines the order in which nodes are colored.
|
| 272 |
+
|
| 273 |
+
The strategies are described in [1]_, and smallest-last is based on
|
| 274 |
+
[2]_.
|
| 275 |
+
|
| 276 |
+
Parameters
|
| 277 |
+
----------
|
| 278 |
+
G : NetworkX graph
|
| 279 |
+
|
| 280 |
+
strategy : string or function(G, colors)
|
| 281 |
+
A function (or a string representing a function) that provides
|
| 282 |
+
the coloring strategy, by returning nodes in the ordering they
|
| 283 |
+
should be colored. ``G`` is the graph, and ``colors`` is a
|
| 284 |
+
dictionary of the currently assigned colors, keyed by nodes. The
|
| 285 |
+
function must return an iterable over all the nodes in ``G``.
|
| 286 |
+
|
| 287 |
+
If the strategy function is an iterator generator (that is, a
|
| 288 |
+
function with ``yield`` statements), keep in mind that the
|
| 289 |
+
``colors`` dictionary will be updated after each ``yield``, since
|
| 290 |
+
this function chooses colors greedily.
|
| 291 |
+
|
| 292 |
+
If ``strategy`` is a string, it must be one of the following,
|
| 293 |
+
each of which represents one of the built-in strategy functions.
|
| 294 |
+
|
| 295 |
+
* ``'largest_first'``
|
| 296 |
+
* ``'random_sequential'``
|
| 297 |
+
* ``'smallest_last'``
|
| 298 |
+
* ``'independent_set'``
|
| 299 |
+
* ``'connected_sequential_bfs'``
|
| 300 |
+
* ``'connected_sequential_dfs'``
|
| 301 |
+
* ``'connected_sequential'`` (alias for the previous strategy)
|
| 302 |
+
* ``'saturation_largest_first'``
|
| 303 |
+
* ``'DSATUR'`` (alias for the previous strategy)
|
| 304 |
+
|
| 305 |
+
interchange: bool
|
| 306 |
+
Will use the color interchange algorithm described by [3]_ if set
|
| 307 |
+
to ``True``.
|
| 308 |
+
|
| 309 |
+
Note that ``saturation_largest_first`` and ``independent_set``
|
| 310 |
+
do not work with interchange. Furthermore, if you use
|
| 311 |
+
interchange with your own strategy function, you cannot rely
|
| 312 |
+
on the values in the ``colors`` argument.
|
| 313 |
+
|
| 314 |
+
Returns
|
| 315 |
+
-------
|
| 316 |
+
A dictionary with keys representing nodes and values representing
|
| 317 |
+
corresponding coloring.
|
| 318 |
+
|
| 319 |
+
Examples
|
| 320 |
+
--------
|
| 321 |
+
>>> G = nx.cycle_graph(4)
|
| 322 |
+
>>> d = nx.coloring.greedy_color(G, strategy="largest_first")
|
| 323 |
+
>>> d in [{0: 0, 1: 1, 2: 0, 3: 1}, {0: 1, 1: 0, 2: 1, 3: 0}]
|
| 324 |
+
True
|
| 325 |
+
|
| 326 |
+
Raises
|
| 327 |
+
------
|
| 328 |
+
NetworkXPointlessConcept
|
| 329 |
+
If ``strategy`` is ``saturation_largest_first`` or
|
| 330 |
+
``independent_set`` and ``interchange`` is ``True``.
|
| 331 |
+
|
| 332 |
+
References
|
| 333 |
+
----------
|
| 334 |
+
.. [1] Adrian Kosowski, and Krzysztof Manuszewski,
|
| 335 |
+
Classical Coloring of Graphs, Graph Colorings, 2-19, 2004.
|
| 336 |
+
ISBN 0-8218-3458-4.
|
| 337 |
+
.. [2] David W. Matula, and Leland L. Beck, "Smallest-last
|
| 338 |
+
ordering and clustering and graph coloring algorithms." *J. ACM* 30,
|
| 339 |
+
3 (July 1983), 417–427. <https://doi.org/10.1145/2402.322385>
|
| 340 |
+
.. [3] Maciej M. Sysło, Narsingh Deo, Janusz S. Kowalik,
|
| 341 |
+
Discrete Optimization Algorithms with Pascal Programs, 415-424, 1983.
|
| 342 |
+
ISBN 0-486-45353-7.
|
| 343 |
+
|
| 344 |
+
"""
|
| 345 |
+
if len(G) == 0:
|
| 346 |
+
return {}
|
| 347 |
+
# Determine the strategy provided by the caller.
|
| 348 |
+
strategy = STRATEGIES.get(strategy, strategy)
|
| 349 |
+
if not callable(strategy):
|
| 350 |
+
raise nx.NetworkXError(
|
| 351 |
+
f"strategy must be callable or a valid string. {strategy} not valid."
|
| 352 |
+
)
|
| 353 |
+
# Perform some validation on the arguments before executing any
|
| 354 |
+
# strategy functions.
|
| 355 |
+
if interchange:
|
| 356 |
+
if strategy is strategy_independent_set:
|
| 357 |
+
msg = "interchange cannot be used with independent_set"
|
| 358 |
+
raise nx.NetworkXPointlessConcept(msg)
|
| 359 |
+
if strategy is strategy_saturation_largest_first:
|
| 360 |
+
msg = "interchange cannot be used with" " saturation_largest_first"
|
| 361 |
+
raise nx.NetworkXPointlessConcept(msg)
|
| 362 |
+
colors = {}
|
| 363 |
+
nodes = strategy(G, colors)
|
| 364 |
+
if interchange:
|
| 365 |
+
return _greedy_coloring_with_interchange(G, nodes)
|
| 366 |
+
for u in nodes:
|
| 367 |
+
# Set to keep track of colors of neighbors
|
| 368 |
+
nbr_colors = {colors[v] for v in G[u] if v in colors}
|
| 369 |
+
# Find the first unused color.
|
| 370 |
+
for color in itertools.count():
|
| 371 |
+
if color not in nbr_colors:
|
| 372 |
+
break
|
| 373 |
+
# Assign the new color to the current node.
|
| 374 |
+
colors[u] = color
|
| 375 |
+
return colors
|
| 376 |
+
|
| 377 |
+
|
| 378 |
+
# Tools for coloring with interchanges
|
| 379 |
+
class _Node:
|
| 380 |
+
__slots__ = ["node_id", "color", "adj_list", "adj_color"]
|
| 381 |
+
|
| 382 |
+
def __init__(self, node_id, n):
|
| 383 |
+
self.node_id = node_id
|
| 384 |
+
self.color = -1
|
| 385 |
+
self.adj_list = None
|
| 386 |
+
self.adj_color = [None for _ in range(n)]
|
| 387 |
+
|
| 388 |
+
def __repr__(self):
|
| 389 |
+
return (
|
| 390 |
+
f"Node_id: {self.node_id}, Color: {self.color}, "
|
| 391 |
+
f"Adj_list: ({self.adj_list}), adj_color: ({self.adj_color})"
|
| 392 |
+
)
|
| 393 |
+
|
| 394 |
+
def assign_color(self, adj_entry, color):
|
| 395 |
+
adj_entry.col_prev = None
|
| 396 |
+
adj_entry.col_next = self.adj_color[color]
|
| 397 |
+
self.adj_color[color] = adj_entry
|
| 398 |
+
if adj_entry.col_next is not None:
|
| 399 |
+
adj_entry.col_next.col_prev = adj_entry
|
| 400 |
+
|
| 401 |
+
def clear_color(self, adj_entry, color):
|
| 402 |
+
if adj_entry.col_prev is None:
|
| 403 |
+
self.adj_color[color] = adj_entry.col_next
|
| 404 |
+
else:
|
| 405 |
+
adj_entry.col_prev.col_next = adj_entry.col_next
|
| 406 |
+
if adj_entry.col_next is not None:
|
| 407 |
+
adj_entry.col_next.col_prev = adj_entry.col_prev
|
| 408 |
+
|
| 409 |
+
def iter_neighbors(self):
|
| 410 |
+
adj_node = self.adj_list
|
| 411 |
+
while adj_node is not None:
|
| 412 |
+
yield adj_node
|
| 413 |
+
adj_node = adj_node.next
|
| 414 |
+
|
| 415 |
+
def iter_neighbors_color(self, color):
|
| 416 |
+
adj_color_node = self.adj_color[color]
|
| 417 |
+
while adj_color_node is not None:
|
| 418 |
+
yield adj_color_node.node_id
|
| 419 |
+
adj_color_node = adj_color_node.col_next
|
| 420 |
+
|
| 421 |
+
|
| 422 |
+
class _AdjEntry:
|
| 423 |
+
__slots__ = ["node_id", "next", "mate", "col_next", "col_prev"]
|
| 424 |
+
|
| 425 |
+
def __init__(self, node_id):
|
| 426 |
+
self.node_id = node_id
|
| 427 |
+
self.next = None
|
| 428 |
+
self.mate = None
|
| 429 |
+
self.col_next = None
|
| 430 |
+
self.col_prev = None
|
| 431 |
+
|
| 432 |
+
def __repr__(self):
|
| 433 |
+
col_next = None if self.col_next is None else self.col_next.node_id
|
| 434 |
+
col_prev = None if self.col_prev is None else self.col_prev.node_id
|
| 435 |
+
return (
|
| 436 |
+
f"Node_id: {self.node_id}, Next: ({self.next}), "
|
| 437 |
+
f"Mate: ({self.mate.node_id}), "
|
| 438 |
+
f"col_next: ({col_next}), col_prev: ({col_prev})"
|
| 439 |
+
)
|
| 440 |
+
|
| 441 |
+
|
| 442 |
+
def _greedy_coloring_with_interchange(G, nodes):
|
| 443 |
+
"""Return a coloring for `original_graph` using interchange approach
|
| 444 |
+
|
| 445 |
+
This procedure is an adaption of the algorithm described by [1]_,
|
| 446 |
+
and is an implementation of coloring with interchange. Please be
|
| 447 |
+
advised, that the datastructures used are rather complex because
|
| 448 |
+
they are optimized to minimize the time spent identifying
|
| 449 |
+
subcomponents of the graph, which are possible candidates for color
|
| 450 |
+
interchange.
|
| 451 |
+
|
| 452 |
+
Parameters
|
| 453 |
+
----------
|
| 454 |
+
G : NetworkX graph
|
| 455 |
+
The graph to be colored
|
| 456 |
+
|
| 457 |
+
nodes : list
|
| 458 |
+
nodes ordered using the strategy of choice
|
| 459 |
+
|
| 460 |
+
Returns
|
| 461 |
+
-------
|
| 462 |
+
dict :
|
| 463 |
+
A dictionary keyed by node to a color value
|
| 464 |
+
|
| 465 |
+
References
|
| 466 |
+
----------
|
| 467 |
+
.. [1] Maciej M. Syslo, Narsingh Deo, Janusz S. Kowalik,
|
| 468 |
+
Discrete Optimization Algorithms with Pascal Programs, 415-424, 1983.
|
| 469 |
+
ISBN 0-486-45353-7.
|
| 470 |
+
"""
|
| 471 |
+
n = len(G)
|
| 472 |
+
|
| 473 |
+
graph = {node: _Node(node, n) for node in G}
|
| 474 |
+
|
| 475 |
+
for node1, node2 in G.edges():
|
| 476 |
+
adj_entry1 = _AdjEntry(node2)
|
| 477 |
+
adj_entry2 = _AdjEntry(node1)
|
| 478 |
+
adj_entry1.mate = adj_entry2
|
| 479 |
+
adj_entry2.mate = adj_entry1
|
| 480 |
+
node1_head = graph[node1].adj_list
|
| 481 |
+
adj_entry1.next = node1_head
|
| 482 |
+
graph[node1].adj_list = adj_entry1
|
| 483 |
+
node2_head = graph[node2].adj_list
|
| 484 |
+
adj_entry2.next = node2_head
|
| 485 |
+
graph[node2].adj_list = adj_entry2
|
| 486 |
+
|
| 487 |
+
k = 0
|
| 488 |
+
for node in nodes:
|
| 489 |
+
# Find the smallest possible, unused color
|
| 490 |
+
neighbors = graph[node].iter_neighbors()
|
| 491 |
+
col_used = {graph[adj_node.node_id].color for adj_node in neighbors}
|
| 492 |
+
col_used.discard(-1)
|
| 493 |
+
k1 = next(itertools.dropwhile(lambda x: x in col_used, itertools.count()))
|
| 494 |
+
|
| 495 |
+
# k1 is now the lowest available color
|
| 496 |
+
if k1 > k:
|
| 497 |
+
connected = True
|
| 498 |
+
visited = set()
|
| 499 |
+
col1 = -1
|
| 500 |
+
col2 = -1
|
| 501 |
+
while connected and col1 < k:
|
| 502 |
+
col1 += 1
|
| 503 |
+
neighbor_cols = graph[node].iter_neighbors_color(col1)
|
| 504 |
+
col1_adj = list(neighbor_cols)
|
| 505 |
+
|
| 506 |
+
col2 = col1
|
| 507 |
+
while connected and col2 < k:
|
| 508 |
+
col2 += 1
|
| 509 |
+
visited = set(col1_adj)
|
| 510 |
+
frontier = list(col1_adj)
|
| 511 |
+
i = 0
|
| 512 |
+
while i < len(frontier):
|
| 513 |
+
search_node = frontier[i]
|
| 514 |
+
i += 1
|
| 515 |
+
col_opp = col2 if graph[search_node].color == col1 else col1
|
| 516 |
+
neighbor_cols = graph[search_node].iter_neighbors_color(col_opp)
|
| 517 |
+
|
| 518 |
+
for neighbor in neighbor_cols:
|
| 519 |
+
if neighbor not in visited:
|
| 520 |
+
visited.add(neighbor)
|
| 521 |
+
frontier.append(neighbor)
|
| 522 |
+
|
| 523 |
+
# Search if node is not adj to any col2 vertex
|
| 524 |
+
connected = (
|
| 525 |
+
len(
|
| 526 |
+
visited.intersection(graph[node].iter_neighbors_color(col2))
|
| 527 |
+
)
|
| 528 |
+
> 0
|
| 529 |
+
)
|
| 530 |
+
|
| 531 |
+
# If connected is false then we can swap !!!
|
| 532 |
+
if not connected:
|
| 533 |
+
# Update all the nodes in the component
|
| 534 |
+
for search_node in visited:
|
| 535 |
+
graph[search_node].color = (
|
| 536 |
+
col2 if graph[search_node].color == col1 else col1
|
| 537 |
+
)
|
| 538 |
+
col2_adj = graph[search_node].adj_color[col2]
|
| 539 |
+
graph[search_node].adj_color[col2] = graph[search_node].adj_color[
|
| 540 |
+
col1
|
| 541 |
+
]
|
| 542 |
+
graph[search_node].adj_color[col1] = col2_adj
|
| 543 |
+
|
| 544 |
+
# Update all the neighboring nodes
|
| 545 |
+
for search_node in visited:
|
| 546 |
+
col = graph[search_node].color
|
| 547 |
+
col_opp = col1 if col == col2 else col2
|
| 548 |
+
for adj_node in graph[search_node].iter_neighbors():
|
| 549 |
+
if graph[adj_node.node_id].color != col_opp:
|
| 550 |
+
# Direct reference to entry
|
| 551 |
+
adj_mate = adj_node.mate
|
| 552 |
+
graph[adj_node.node_id].clear_color(adj_mate, col_opp)
|
| 553 |
+
graph[adj_node.node_id].assign_color(adj_mate, col)
|
| 554 |
+
k1 = col1
|
| 555 |
+
|
| 556 |
+
# We can color this node color k1
|
| 557 |
+
graph[node].color = k1
|
| 558 |
+
k = max(k1, k)
|
| 559 |
+
|
| 560 |
+
# Update the neighbors of this node
|
| 561 |
+
for adj_node in graph[node].iter_neighbors():
|
| 562 |
+
adj_mate = adj_node.mate
|
| 563 |
+
graph[adj_node.node_id].assign_color(adj_mate, k1)
|
| 564 |
+
|
| 565 |
+
return {node.node_id: node.color for node in graph.values()}
|
.venv/lib/python3.11/site-packages/networkx/algorithms/coloring/tests/__init__.py
ADDED
|
File without changes
|
.venv/lib/python3.11/site-packages/networkx/algorithms/coloring/tests/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (207 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/coloring/tests/__pycache__/test_coloring.cpython-311.pyc
ADDED
|
Binary file (31.4 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/coloring/tests/test_coloring.py
ADDED
|
@@ -0,0 +1,863 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Greedy coloring test suite."""
|
| 2 |
+
|
| 3 |
+
import itertools
|
| 4 |
+
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
|
| 9 |
+
is_coloring = nx.algorithms.coloring.equitable_coloring.is_coloring
|
| 10 |
+
is_equitable = nx.algorithms.coloring.equitable_coloring.is_equitable
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
ALL_STRATEGIES = [
|
| 14 |
+
"largest_first",
|
| 15 |
+
"random_sequential",
|
| 16 |
+
"smallest_last",
|
| 17 |
+
"independent_set",
|
| 18 |
+
"connected_sequential_bfs",
|
| 19 |
+
"connected_sequential_dfs",
|
| 20 |
+
"connected_sequential",
|
| 21 |
+
"saturation_largest_first",
|
| 22 |
+
"DSATUR",
|
| 23 |
+
]
|
| 24 |
+
|
| 25 |
+
# List of strategies where interchange=True results in an error
|
| 26 |
+
INTERCHANGE_INVALID = ["independent_set", "saturation_largest_first", "DSATUR"]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class TestColoring:
|
| 30 |
+
def test_basic_cases(self):
|
| 31 |
+
def check_basic_case(graph_func, n_nodes, strategy, interchange):
|
| 32 |
+
graph = graph_func()
|
| 33 |
+
coloring = nx.coloring.greedy_color(
|
| 34 |
+
graph, strategy=strategy, interchange=interchange
|
| 35 |
+
)
|
| 36 |
+
assert verify_length(coloring, n_nodes)
|
| 37 |
+
assert verify_coloring(graph, coloring)
|
| 38 |
+
|
| 39 |
+
for graph_func, n_nodes in BASIC_TEST_CASES.items():
|
| 40 |
+
for interchange in [True, False]:
|
| 41 |
+
for strategy in ALL_STRATEGIES:
|
| 42 |
+
check_basic_case(graph_func, n_nodes, strategy, False)
|
| 43 |
+
if strategy not in INTERCHANGE_INVALID:
|
| 44 |
+
check_basic_case(graph_func, n_nodes, strategy, True)
|
| 45 |
+
|
| 46 |
+
def test_special_cases(self):
|
| 47 |
+
def check_special_case(strategy, graph_func, interchange, colors):
|
| 48 |
+
graph = graph_func()
|
| 49 |
+
coloring = nx.coloring.greedy_color(
|
| 50 |
+
graph, strategy=strategy, interchange=interchange
|
| 51 |
+
)
|
| 52 |
+
if not hasattr(colors, "__len__"):
|
| 53 |
+
colors = [colors]
|
| 54 |
+
assert any(verify_length(coloring, n_colors) for n_colors in colors)
|
| 55 |
+
assert verify_coloring(graph, coloring)
|
| 56 |
+
|
| 57 |
+
for strategy, arglist in SPECIAL_TEST_CASES.items():
|
| 58 |
+
for args in arglist:
|
| 59 |
+
check_special_case(strategy, args[0], args[1], args[2])
|
| 60 |
+
|
| 61 |
+
def test_interchange_invalid(self):
|
| 62 |
+
graph = one_node_graph()
|
| 63 |
+
for strategy in INTERCHANGE_INVALID:
|
| 64 |
+
pytest.raises(
|
| 65 |
+
nx.NetworkXPointlessConcept,
|
| 66 |
+
nx.coloring.greedy_color,
|
| 67 |
+
graph,
|
| 68 |
+
strategy=strategy,
|
| 69 |
+
interchange=True,
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
def test_bad_inputs(self):
|
| 73 |
+
graph = one_node_graph()
|
| 74 |
+
pytest.raises(
|
| 75 |
+
nx.NetworkXError,
|
| 76 |
+
nx.coloring.greedy_color,
|
| 77 |
+
graph,
|
| 78 |
+
strategy="invalid strategy",
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
def test_strategy_as_function(self):
|
| 82 |
+
graph = lf_shc()
|
| 83 |
+
colors_1 = nx.coloring.greedy_color(graph, "largest_first")
|
| 84 |
+
colors_2 = nx.coloring.greedy_color(graph, nx.coloring.strategy_largest_first)
|
| 85 |
+
assert colors_1 == colors_2
|
| 86 |
+
|
| 87 |
+
def test_seed_argument(self):
|
| 88 |
+
graph = lf_shc()
|
| 89 |
+
rs = nx.coloring.strategy_random_sequential
|
| 90 |
+
c1 = nx.coloring.greedy_color(graph, lambda g, c: rs(g, c, seed=1))
|
| 91 |
+
for u, v in graph.edges:
|
| 92 |
+
assert c1[u] != c1[v]
|
| 93 |
+
|
| 94 |
+
def test_is_coloring(self):
|
| 95 |
+
G = nx.Graph()
|
| 96 |
+
G.add_edges_from([(0, 1), (1, 2)])
|
| 97 |
+
coloring = {0: 0, 1: 1, 2: 0}
|
| 98 |
+
assert is_coloring(G, coloring)
|
| 99 |
+
|
| 100 |
+
coloring[0] = 1
|
| 101 |
+
assert not is_coloring(G, coloring)
|
| 102 |
+
assert not is_equitable(G, coloring)
|
| 103 |
+
|
| 104 |
+
def test_is_equitable(self):
|
| 105 |
+
G = nx.Graph()
|
| 106 |
+
G.add_edges_from([(0, 1), (1, 2)])
|
| 107 |
+
coloring = {0: 0, 1: 1, 2: 0}
|
| 108 |
+
assert is_equitable(G, coloring)
|
| 109 |
+
|
| 110 |
+
G.add_edges_from([(2, 3), (2, 4), (2, 5)])
|
| 111 |
+
coloring[3] = 1
|
| 112 |
+
coloring[4] = 1
|
| 113 |
+
coloring[5] = 1
|
| 114 |
+
assert is_coloring(G, coloring)
|
| 115 |
+
assert not is_equitable(G, coloring)
|
| 116 |
+
|
| 117 |
+
def test_num_colors(self):
|
| 118 |
+
G = nx.Graph()
|
| 119 |
+
G.add_edges_from([(0, 1), (0, 2), (0, 3)])
|
| 120 |
+
pytest.raises(nx.NetworkXAlgorithmError, nx.coloring.equitable_color, G, 2)
|
| 121 |
+
|
| 122 |
+
def test_equitable_color(self):
|
| 123 |
+
G = nx.fast_gnp_random_graph(n=10, p=0.2, seed=42)
|
| 124 |
+
coloring = nx.coloring.equitable_color(G, max_degree(G) + 1)
|
| 125 |
+
assert is_equitable(G, coloring)
|
| 126 |
+
|
| 127 |
+
def test_equitable_color_empty(self):
|
| 128 |
+
G = nx.empty_graph()
|
| 129 |
+
coloring = nx.coloring.equitable_color(G, max_degree(G) + 1)
|
| 130 |
+
assert is_equitable(G, coloring)
|
| 131 |
+
|
| 132 |
+
def test_equitable_color_large(self):
|
| 133 |
+
G = nx.fast_gnp_random_graph(100, 0.1, seed=42)
|
| 134 |
+
coloring = nx.coloring.equitable_color(G, max_degree(G) + 1)
|
| 135 |
+
assert is_equitable(G, coloring, num_colors=max_degree(G) + 1)
|
| 136 |
+
|
| 137 |
+
def test_case_V_plus_not_in_A_cal(self):
|
| 138 |
+
# Hand crafted case to avoid the easy case.
|
| 139 |
+
L = {
|
| 140 |
+
0: [2, 5],
|
| 141 |
+
1: [3, 4],
|
| 142 |
+
2: [0, 8],
|
| 143 |
+
3: [1, 7],
|
| 144 |
+
4: [1, 6],
|
| 145 |
+
5: [0, 6],
|
| 146 |
+
6: [4, 5],
|
| 147 |
+
7: [3],
|
| 148 |
+
8: [2],
|
| 149 |
+
}
|
| 150 |
+
|
| 151 |
+
F = {
|
| 152 |
+
# Color 0
|
| 153 |
+
0: 0,
|
| 154 |
+
1: 0,
|
| 155 |
+
# Color 1
|
| 156 |
+
2: 1,
|
| 157 |
+
3: 1,
|
| 158 |
+
4: 1,
|
| 159 |
+
5: 1,
|
| 160 |
+
# Color 2
|
| 161 |
+
6: 2,
|
| 162 |
+
7: 2,
|
| 163 |
+
8: 2,
|
| 164 |
+
}
|
| 165 |
+
|
| 166 |
+
C = nx.algorithms.coloring.equitable_coloring.make_C_from_F(F)
|
| 167 |
+
N = nx.algorithms.coloring.equitable_coloring.make_N_from_L_C(L, C)
|
| 168 |
+
H = nx.algorithms.coloring.equitable_coloring.make_H_from_C_N(C, N)
|
| 169 |
+
|
| 170 |
+
nx.algorithms.coloring.equitable_coloring.procedure_P(
|
| 171 |
+
V_minus=0, V_plus=1, N=N, H=H, F=F, C=C, L=L
|
| 172 |
+
)
|
| 173 |
+
check_state(L=L, N=N, H=H, F=F, C=C)
|
| 174 |
+
|
| 175 |
+
def test_cast_no_solo(self):
|
| 176 |
+
L = {
|
| 177 |
+
0: [8, 9],
|
| 178 |
+
1: [10, 11],
|
| 179 |
+
2: [8],
|
| 180 |
+
3: [9],
|
| 181 |
+
4: [10, 11],
|
| 182 |
+
5: [8],
|
| 183 |
+
6: [9],
|
| 184 |
+
7: [10, 11],
|
| 185 |
+
8: [0, 2, 5],
|
| 186 |
+
9: [0, 3, 6],
|
| 187 |
+
10: [1, 4, 7],
|
| 188 |
+
11: [1, 4, 7],
|
| 189 |
+
}
|
| 190 |
+
|
| 191 |
+
F = {0: 0, 1: 0, 2: 2, 3: 2, 4: 2, 5: 3, 6: 3, 7: 3, 8: 1, 9: 1, 10: 1, 11: 1}
|
| 192 |
+
|
| 193 |
+
C = nx.algorithms.coloring.equitable_coloring.make_C_from_F(F)
|
| 194 |
+
N = nx.algorithms.coloring.equitable_coloring.make_N_from_L_C(L, C)
|
| 195 |
+
H = nx.algorithms.coloring.equitable_coloring.make_H_from_C_N(C, N)
|
| 196 |
+
|
| 197 |
+
nx.algorithms.coloring.equitable_coloring.procedure_P(
|
| 198 |
+
V_minus=0, V_plus=1, N=N, H=H, F=F, C=C, L=L
|
| 199 |
+
)
|
| 200 |
+
check_state(L=L, N=N, H=H, F=F, C=C)
|
| 201 |
+
|
| 202 |
+
def test_hard_prob(self):
|
| 203 |
+
# Tests for two levels of recursion.
|
| 204 |
+
num_colors, s = 5, 5
|
| 205 |
+
|
| 206 |
+
G = nx.Graph()
|
| 207 |
+
G.add_edges_from(
|
| 208 |
+
[
|
| 209 |
+
(0, 10),
|
| 210 |
+
(0, 11),
|
| 211 |
+
(0, 12),
|
| 212 |
+
(0, 23),
|
| 213 |
+
(10, 4),
|
| 214 |
+
(10, 9),
|
| 215 |
+
(10, 20),
|
| 216 |
+
(11, 4),
|
| 217 |
+
(11, 8),
|
| 218 |
+
(11, 16),
|
| 219 |
+
(12, 9),
|
| 220 |
+
(12, 22),
|
| 221 |
+
(12, 23),
|
| 222 |
+
(23, 7),
|
| 223 |
+
(1, 17),
|
| 224 |
+
(1, 18),
|
| 225 |
+
(1, 19),
|
| 226 |
+
(1, 24),
|
| 227 |
+
(17, 5),
|
| 228 |
+
(17, 13),
|
| 229 |
+
(17, 22),
|
| 230 |
+
(18, 5),
|
| 231 |
+
(19, 5),
|
| 232 |
+
(19, 6),
|
| 233 |
+
(19, 8),
|
| 234 |
+
(24, 7),
|
| 235 |
+
(24, 16),
|
| 236 |
+
(2, 4),
|
| 237 |
+
(2, 13),
|
| 238 |
+
(2, 14),
|
| 239 |
+
(2, 15),
|
| 240 |
+
(4, 6),
|
| 241 |
+
(13, 5),
|
| 242 |
+
(13, 21),
|
| 243 |
+
(14, 6),
|
| 244 |
+
(14, 15),
|
| 245 |
+
(15, 6),
|
| 246 |
+
(15, 21),
|
| 247 |
+
(3, 16),
|
| 248 |
+
(3, 20),
|
| 249 |
+
(3, 21),
|
| 250 |
+
(3, 22),
|
| 251 |
+
(16, 8),
|
| 252 |
+
(20, 8),
|
| 253 |
+
(21, 9),
|
| 254 |
+
(22, 7),
|
| 255 |
+
]
|
| 256 |
+
)
|
| 257 |
+
F = {node: node // s for node in range(num_colors * s)}
|
| 258 |
+
F[s - 1] = num_colors - 1
|
| 259 |
+
|
| 260 |
+
params = make_params_from_graph(G=G, F=F)
|
| 261 |
+
|
| 262 |
+
nx.algorithms.coloring.equitable_coloring.procedure_P(
|
| 263 |
+
V_minus=0, V_plus=num_colors - 1, **params
|
| 264 |
+
)
|
| 265 |
+
check_state(**params)
|
| 266 |
+
|
| 267 |
+
def test_hardest_prob(self):
|
| 268 |
+
# Tests for two levels of recursion.
|
| 269 |
+
num_colors, s = 10, 4
|
| 270 |
+
|
| 271 |
+
G = nx.Graph()
|
| 272 |
+
G.add_edges_from(
|
| 273 |
+
[
|
| 274 |
+
(0, 19),
|
| 275 |
+
(0, 24),
|
| 276 |
+
(0, 29),
|
| 277 |
+
(0, 30),
|
| 278 |
+
(0, 35),
|
| 279 |
+
(19, 3),
|
| 280 |
+
(19, 7),
|
| 281 |
+
(19, 9),
|
| 282 |
+
(19, 15),
|
| 283 |
+
(19, 21),
|
| 284 |
+
(19, 24),
|
| 285 |
+
(19, 30),
|
| 286 |
+
(19, 38),
|
| 287 |
+
(24, 5),
|
| 288 |
+
(24, 11),
|
| 289 |
+
(24, 13),
|
| 290 |
+
(24, 20),
|
| 291 |
+
(24, 30),
|
| 292 |
+
(24, 37),
|
| 293 |
+
(24, 38),
|
| 294 |
+
(29, 6),
|
| 295 |
+
(29, 10),
|
| 296 |
+
(29, 13),
|
| 297 |
+
(29, 15),
|
| 298 |
+
(29, 16),
|
| 299 |
+
(29, 17),
|
| 300 |
+
(29, 20),
|
| 301 |
+
(29, 26),
|
| 302 |
+
(30, 6),
|
| 303 |
+
(30, 10),
|
| 304 |
+
(30, 15),
|
| 305 |
+
(30, 22),
|
| 306 |
+
(30, 23),
|
| 307 |
+
(30, 39),
|
| 308 |
+
(35, 6),
|
| 309 |
+
(35, 9),
|
| 310 |
+
(35, 14),
|
| 311 |
+
(35, 18),
|
| 312 |
+
(35, 22),
|
| 313 |
+
(35, 23),
|
| 314 |
+
(35, 25),
|
| 315 |
+
(35, 27),
|
| 316 |
+
(1, 20),
|
| 317 |
+
(1, 26),
|
| 318 |
+
(1, 31),
|
| 319 |
+
(1, 34),
|
| 320 |
+
(1, 38),
|
| 321 |
+
(20, 4),
|
| 322 |
+
(20, 8),
|
| 323 |
+
(20, 14),
|
| 324 |
+
(20, 18),
|
| 325 |
+
(20, 28),
|
| 326 |
+
(20, 33),
|
| 327 |
+
(26, 7),
|
| 328 |
+
(26, 10),
|
| 329 |
+
(26, 14),
|
| 330 |
+
(26, 18),
|
| 331 |
+
(26, 21),
|
| 332 |
+
(26, 32),
|
| 333 |
+
(26, 39),
|
| 334 |
+
(31, 5),
|
| 335 |
+
(31, 8),
|
| 336 |
+
(31, 13),
|
| 337 |
+
(31, 16),
|
| 338 |
+
(31, 17),
|
| 339 |
+
(31, 21),
|
| 340 |
+
(31, 25),
|
| 341 |
+
(31, 27),
|
| 342 |
+
(34, 7),
|
| 343 |
+
(34, 8),
|
| 344 |
+
(34, 13),
|
| 345 |
+
(34, 18),
|
| 346 |
+
(34, 22),
|
| 347 |
+
(34, 23),
|
| 348 |
+
(34, 25),
|
| 349 |
+
(34, 27),
|
| 350 |
+
(38, 4),
|
| 351 |
+
(38, 9),
|
| 352 |
+
(38, 12),
|
| 353 |
+
(38, 14),
|
| 354 |
+
(38, 21),
|
| 355 |
+
(38, 27),
|
| 356 |
+
(2, 3),
|
| 357 |
+
(2, 18),
|
| 358 |
+
(2, 21),
|
| 359 |
+
(2, 28),
|
| 360 |
+
(2, 32),
|
| 361 |
+
(2, 33),
|
| 362 |
+
(2, 36),
|
| 363 |
+
(2, 37),
|
| 364 |
+
(2, 39),
|
| 365 |
+
(3, 5),
|
| 366 |
+
(3, 9),
|
| 367 |
+
(3, 13),
|
| 368 |
+
(3, 22),
|
| 369 |
+
(3, 23),
|
| 370 |
+
(3, 25),
|
| 371 |
+
(3, 27),
|
| 372 |
+
(18, 6),
|
| 373 |
+
(18, 11),
|
| 374 |
+
(18, 15),
|
| 375 |
+
(18, 39),
|
| 376 |
+
(21, 4),
|
| 377 |
+
(21, 10),
|
| 378 |
+
(21, 14),
|
| 379 |
+
(21, 36),
|
| 380 |
+
(28, 6),
|
| 381 |
+
(28, 10),
|
| 382 |
+
(28, 14),
|
| 383 |
+
(28, 16),
|
| 384 |
+
(28, 17),
|
| 385 |
+
(28, 25),
|
| 386 |
+
(28, 27),
|
| 387 |
+
(32, 5),
|
| 388 |
+
(32, 10),
|
| 389 |
+
(32, 12),
|
| 390 |
+
(32, 16),
|
| 391 |
+
(32, 17),
|
| 392 |
+
(32, 22),
|
| 393 |
+
(32, 23),
|
| 394 |
+
(33, 7),
|
| 395 |
+
(33, 10),
|
| 396 |
+
(33, 12),
|
| 397 |
+
(33, 16),
|
| 398 |
+
(33, 17),
|
| 399 |
+
(33, 25),
|
| 400 |
+
(33, 27),
|
| 401 |
+
(36, 5),
|
| 402 |
+
(36, 8),
|
| 403 |
+
(36, 15),
|
| 404 |
+
(36, 16),
|
| 405 |
+
(36, 17),
|
| 406 |
+
(36, 25),
|
| 407 |
+
(36, 27),
|
| 408 |
+
(37, 5),
|
| 409 |
+
(37, 11),
|
| 410 |
+
(37, 15),
|
| 411 |
+
(37, 16),
|
| 412 |
+
(37, 17),
|
| 413 |
+
(37, 22),
|
| 414 |
+
(37, 23),
|
| 415 |
+
(39, 7),
|
| 416 |
+
(39, 8),
|
| 417 |
+
(39, 15),
|
| 418 |
+
(39, 22),
|
| 419 |
+
(39, 23),
|
| 420 |
+
]
|
| 421 |
+
)
|
| 422 |
+
F = {node: node // s for node in range(num_colors * s)}
|
| 423 |
+
F[s - 1] = num_colors - 1 # V- = 0, V+ = num_colors - 1
|
| 424 |
+
|
| 425 |
+
params = make_params_from_graph(G=G, F=F)
|
| 426 |
+
|
| 427 |
+
nx.algorithms.coloring.equitable_coloring.procedure_P(
|
| 428 |
+
V_minus=0, V_plus=num_colors - 1, **params
|
| 429 |
+
)
|
| 430 |
+
check_state(**params)
|
| 431 |
+
|
| 432 |
+
def test_strategy_saturation_largest_first(self):
|
| 433 |
+
def color_remaining_nodes(
|
| 434 |
+
G,
|
| 435 |
+
colored_nodes,
|
| 436 |
+
full_color_assignment=None,
|
| 437 |
+
nodes_to_add_between_calls=1,
|
| 438 |
+
):
|
| 439 |
+
color_assignments = []
|
| 440 |
+
aux_colored_nodes = colored_nodes.copy()
|
| 441 |
+
|
| 442 |
+
node_iterator = nx.algorithms.coloring.greedy_coloring.strategy_saturation_largest_first(
|
| 443 |
+
G, aux_colored_nodes
|
| 444 |
+
)
|
| 445 |
+
|
| 446 |
+
for u in node_iterator:
|
| 447 |
+
# Set to keep track of colors of neighbors
|
| 448 |
+
nbr_colors = {
|
| 449 |
+
aux_colored_nodes[v] for v in G[u] if v in aux_colored_nodes
|
| 450 |
+
}
|
| 451 |
+
# Find the first unused color.
|
| 452 |
+
for color in itertools.count():
|
| 453 |
+
if color not in nbr_colors:
|
| 454 |
+
break
|
| 455 |
+
aux_colored_nodes[u] = color
|
| 456 |
+
color_assignments.append((u, color))
|
| 457 |
+
|
| 458 |
+
# Color nodes between iterations
|
| 459 |
+
for i in range(nodes_to_add_between_calls - 1):
|
| 460 |
+
if not len(color_assignments) + len(colored_nodes) >= len(
|
| 461 |
+
full_color_assignment
|
| 462 |
+
):
|
| 463 |
+
full_color_assignment_node, color = full_color_assignment[
|
| 464 |
+
len(color_assignments) + len(colored_nodes)
|
| 465 |
+
]
|
| 466 |
+
|
| 467 |
+
# Assign the new color to the current node.
|
| 468 |
+
aux_colored_nodes[full_color_assignment_node] = color
|
| 469 |
+
color_assignments.append((full_color_assignment_node, color))
|
| 470 |
+
|
| 471 |
+
return color_assignments, aux_colored_nodes
|
| 472 |
+
|
| 473 |
+
for G, _, _ in SPECIAL_TEST_CASES["saturation_largest_first"]:
|
| 474 |
+
G = G()
|
| 475 |
+
|
| 476 |
+
# Check that function still works when nodes are colored between iterations
|
| 477 |
+
for nodes_to_add_between_calls in range(1, 5):
|
| 478 |
+
# Get a full color assignment, (including the order in which nodes were colored)
|
| 479 |
+
colored_nodes = {}
|
| 480 |
+
full_color_assignment, full_colored_nodes = color_remaining_nodes(
|
| 481 |
+
G, colored_nodes
|
| 482 |
+
)
|
| 483 |
+
|
| 484 |
+
# For each node in the color assignment, add it to colored_nodes and re-run the function
|
| 485 |
+
for ind, (node, color) in enumerate(full_color_assignment):
|
| 486 |
+
colored_nodes[node] = color
|
| 487 |
+
|
| 488 |
+
(
|
| 489 |
+
partial_color_assignment,
|
| 490 |
+
partial_colored_nodes,
|
| 491 |
+
) = color_remaining_nodes(
|
| 492 |
+
G,
|
| 493 |
+
colored_nodes,
|
| 494 |
+
full_color_assignment=full_color_assignment,
|
| 495 |
+
nodes_to_add_between_calls=nodes_to_add_between_calls,
|
| 496 |
+
)
|
| 497 |
+
|
| 498 |
+
# Check that the color assignment and order of remaining nodes are the same
|
| 499 |
+
assert full_color_assignment[ind + 1 :] == partial_color_assignment
|
| 500 |
+
assert full_colored_nodes == partial_colored_nodes
|
| 501 |
+
|
| 502 |
+
|
| 503 |
+
# ############################ Utility functions ############################
|
| 504 |
+
def verify_coloring(graph, coloring):
|
| 505 |
+
for node in graph.nodes():
|
| 506 |
+
if node not in coloring:
|
| 507 |
+
return False
|
| 508 |
+
|
| 509 |
+
color = coloring[node]
|
| 510 |
+
for neighbor in graph.neighbors(node):
|
| 511 |
+
if coloring[neighbor] == color:
|
| 512 |
+
return False
|
| 513 |
+
|
| 514 |
+
return True
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
def verify_length(coloring, expected):
|
| 518 |
+
coloring = dict_to_sets(coloring)
|
| 519 |
+
return len(coloring) == expected
|
| 520 |
+
|
| 521 |
+
|
| 522 |
+
def dict_to_sets(colors):
|
| 523 |
+
if len(colors) == 0:
|
| 524 |
+
return []
|
| 525 |
+
|
| 526 |
+
k = max(colors.values()) + 1
|
| 527 |
+
sets = [set() for _ in range(k)]
|
| 528 |
+
|
| 529 |
+
for node, color in colors.items():
|
| 530 |
+
sets[color].add(node)
|
| 531 |
+
|
| 532 |
+
return sets
|
| 533 |
+
|
| 534 |
+
|
| 535 |
+
# ############################ Graph Generation ############################
|
| 536 |
+
|
| 537 |
+
|
| 538 |
+
def empty_graph():
|
| 539 |
+
return nx.Graph()
|
| 540 |
+
|
| 541 |
+
|
| 542 |
+
def one_node_graph():
|
| 543 |
+
graph = nx.Graph()
|
| 544 |
+
graph.add_nodes_from([1])
|
| 545 |
+
return graph
|
| 546 |
+
|
| 547 |
+
|
| 548 |
+
def two_node_graph():
|
| 549 |
+
graph = nx.Graph()
|
| 550 |
+
graph.add_nodes_from([1, 2])
|
| 551 |
+
graph.add_edges_from([(1, 2)])
|
| 552 |
+
return graph
|
| 553 |
+
|
| 554 |
+
|
| 555 |
+
def three_node_clique():
|
| 556 |
+
graph = nx.Graph()
|
| 557 |
+
graph.add_nodes_from([1, 2, 3])
|
| 558 |
+
graph.add_edges_from([(1, 2), (1, 3), (2, 3)])
|
| 559 |
+
return graph
|
| 560 |
+
|
| 561 |
+
|
| 562 |
+
def disconnected():
|
| 563 |
+
graph = nx.Graph()
|
| 564 |
+
graph.add_edges_from([(1, 2), (2, 3), (4, 5), (5, 6)])
|
| 565 |
+
return graph
|
| 566 |
+
|
| 567 |
+
|
| 568 |
+
def rs_shc():
|
| 569 |
+
graph = nx.Graph()
|
| 570 |
+
graph.add_nodes_from([1, 2, 3, 4])
|
| 571 |
+
graph.add_edges_from([(1, 2), (2, 3), (3, 4)])
|
| 572 |
+
return graph
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
def slf_shc():
|
| 576 |
+
graph = nx.Graph()
|
| 577 |
+
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
|
| 578 |
+
graph.add_edges_from(
|
| 579 |
+
[(1, 2), (1, 5), (1, 6), (2, 3), (2, 7), (3, 4), (3, 7), (4, 5), (4, 6), (5, 6)]
|
| 580 |
+
)
|
| 581 |
+
return graph
|
| 582 |
+
|
| 583 |
+
|
| 584 |
+
def slf_hc():
|
| 585 |
+
graph = nx.Graph()
|
| 586 |
+
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
|
| 587 |
+
graph.add_edges_from(
|
| 588 |
+
[
|
| 589 |
+
(1, 2),
|
| 590 |
+
(1, 3),
|
| 591 |
+
(1, 4),
|
| 592 |
+
(1, 5),
|
| 593 |
+
(2, 3),
|
| 594 |
+
(2, 4),
|
| 595 |
+
(2, 6),
|
| 596 |
+
(5, 7),
|
| 597 |
+
(5, 8),
|
| 598 |
+
(6, 7),
|
| 599 |
+
(6, 8),
|
| 600 |
+
(7, 8),
|
| 601 |
+
]
|
| 602 |
+
)
|
| 603 |
+
return graph
|
| 604 |
+
|
| 605 |
+
|
| 606 |
+
def lf_shc():
|
| 607 |
+
graph = nx.Graph()
|
| 608 |
+
graph.add_nodes_from([1, 2, 3, 4, 5, 6])
|
| 609 |
+
graph.add_edges_from([(6, 1), (1, 4), (4, 3), (3, 2), (2, 5)])
|
| 610 |
+
return graph
|
| 611 |
+
|
| 612 |
+
|
| 613 |
+
def lf_hc():
|
| 614 |
+
graph = nx.Graph()
|
| 615 |
+
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
|
| 616 |
+
graph.add_edges_from(
|
| 617 |
+
[
|
| 618 |
+
(1, 7),
|
| 619 |
+
(1, 6),
|
| 620 |
+
(1, 3),
|
| 621 |
+
(1, 4),
|
| 622 |
+
(7, 2),
|
| 623 |
+
(2, 6),
|
| 624 |
+
(2, 3),
|
| 625 |
+
(2, 5),
|
| 626 |
+
(5, 3),
|
| 627 |
+
(5, 4),
|
| 628 |
+
(4, 3),
|
| 629 |
+
]
|
| 630 |
+
)
|
| 631 |
+
return graph
|
| 632 |
+
|
| 633 |
+
|
| 634 |
+
def sl_shc():
|
| 635 |
+
graph = nx.Graph()
|
| 636 |
+
graph.add_nodes_from([1, 2, 3, 4, 5, 6])
|
| 637 |
+
graph.add_edges_from(
|
| 638 |
+
[(1, 2), (1, 3), (2, 3), (1, 4), (2, 5), (3, 6), (4, 5), (4, 6), (5, 6)]
|
| 639 |
+
)
|
| 640 |
+
return graph
|
| 641 |
+
|
| 642 |
+
|
| 643 |
+
def sl_hc():
|
| 644 |
+
graph = nx.Graph()
|
| 645 |
+
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8])
|
| 646 |
+
graph.add_edges_from(
|
| 647 |
+
[
|
| 648 |
+
(1, 2),
|
| 649 |
+
(1, 3),
|
| 650 |
+
(1, 5),
|
| 651 |
+
(1, 7),
|
| 652 |
+
(2, 3),
|
| 653 |
+
(2, 4),
|
| 654 |
+
(2, 8),
|
| 655 |
+
(8, 4),
|
| 656 |
+
(8, 6),
|
| 657 |
+
(8, 7),
|
| 658 |
+
(7, 5),
|
| 659 |
+
(7, 6),
|
| 660 |
+
(3, 4),
|
| 661 |
+
(4, 6),
|
| 662 |
+
(6, 5),
|
| 663 |
+
(5, 3),
|
| 664 |
+
]
|
| 665 |
+
)
|
| 666 |
+
return graph
|
| 667 |
+
|
| 668 |
+
|
| 669 |
+
def gis_shc():
|
| 670 |
+
graph = nx.Graph()
|
| 671 |
+
graph.add_nodes_from([1, 2, 3, 4])
|
| 672 |
+
graph.add_edges_from([(1, 2), (2, 3), (3, 4)])
|
| 673 |
+
return graph
|
| 674 |
+
|
| 675 |
+
|
| 676 |
+
def gis_hc():
|
| 677 |
+
graph = nx.Graph()
|
| 678 |
+
graph.add_nodes_from([1, 2, 3, 4, 5, 6])
|
| 679 |
+
graph.add_edges_from([(1, 5), (2, 5), (3, 6), (4, 6), (5, 6)])
|
| 680 |
+
return graph
|
| 681 |
+
|
| 682 |
+
|
| 683 |
+
def cs_shc():
|
| 684 |
+
graph = nx.Graph()
|
| 685 |
+
graph.add_nodes_from([1, 2, 3, 4, 5])
|
| 686 |
+
graph.add_edges_from([(1, 2), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (4, 5)])
|
| 687 |
+
return graph
|
| 688 |
+
|
| 689 |
+
|
| 690 |
+
def rsi_shc():
|
| 691 |
+
graph = nx.Graph()
|
| 692 |
+
graph.add_nodes_from([1, 2, 3, 4, 5, 6])
|
| 693 |
+
graph.add_edges_from(
|
| 694 |
+
[(1, 2), (1, 5), (1, 6), (2, 3), (3, 4), (4, 5), (4, 6), (5, 6)]
|
| 695 |
+
)
|
| 696 |
+
return graph
|
| 697 |
+
|
| 698 |
+
|
| 699 |
+
def lfi_shc():
|
| 700 |
+
graph = nx.Graph()
|
| 701 |
+
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
|
| 702 |
+
graph.add_edges_from(
|
| 703 |
+
[(1, 2), (1, 5), (1, 6), (2, 3), (2, 7), (3, 4), (3, 7), (4, 5), (4, 6), (5, 6)]
|
| 704 |
+
)
|
| 705 |
+
return graph
|
| 706 |
+
|
| 707 |
+
|
| 708 |
+
def lfi_hc():
|
| 709 |
+
graph = nx.Graph()
|
| 710 |
+
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9])
|
| 711 |
+
graph.add_edges_from(
|
| 712 |
+
[
|
| 713 |
+
(1, 2),
|
| 714 |
+
(1, 5),
|
| 715 |
+
(1, 6),
|
| 716 |
+
(1, 7),
|
| 717 |
+
(2, 3),
|
| 718 |
+
(2, 8),
|
| 719 |
+
(2, 9),
|
| 720 |
+
(3, 4),
|
| 721 |
+
(3, 8),
|
| 722 |
+
(3, 9),
|
| 723 |
+
(4, 5),
|
| 724 |
+
(4, 6),
|
| 725 |
+
(4, 7),
|
| 726 |
+
(5, 6),
|
| 727 |
+
]
|
| 728 |
+
)
|
| 729 |
+
return graph
|
| 730 |
+
|
| 731 |
+
|
| 732 |
+
def sli_shc():
|
| 733 |
+
graph = nx.Graph()
|
| 734 |
+
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7])
|
| 735 |
+
graph.add_edges_from(
|
| 736 |
+
[
|
| 737 |
+
(1, 2),
|
| 738 |
+
(1, 3),
|
| 739 |
+
(1, 5),
|
| 740 |
+
(1, 7),
|
| 741 |
+
(2, 3),
|
| 742 |
+
(2, 6),
|
| 743 |
+
(3, 4),
|
| 744 |
+
(4, 5),
|
| 745 |
+
(4, 6),
|
| 746 |
+
(5, 7),
|
| 747 |
+
(6, 7),
|
| 748 |
+
]
|
| 749 |
+
)
|
| 750 |
+
return graph
|
| 751 |
+
|
| 752 |
+
|
| 753 |
+
def sli_hc():
|
| 754 |
+
graph = nx.Graph()
|
| 755 |
+
graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9])
|
| 756 |
+
graph.add_edges_from(
|
| 757 |
+
[
|
| 758 |
+
(1, 2),
|
| 759 |
+
(1, 3),
|
| 760 |
+
(1, 4),
|
| 761 |
+
(1, 5),
|
| 762 |
+
(2, 3),
|
| 763 |
+
(2, 7),
|
| 764 |
+
(2, 8),
|
| 765 |
+
(2, 9),
|
| 766 |
+
(3, 6),
|
| 767 |
+
(3, 7),
|
| 768 |
+
(3, 9),
|
| 769 |
+
(4, 5),
|
| 770 |
+
(4, 6),
|
| 771 |
+
(4, 8),
|
| 772 |
+
(4, 9),
|
| 773 |
+
(5, 6),
|
| 774 |
+
(5, 7),
|
| 775 |
+
(5, 8),
|
| 776 |
+
(6, 7),
|
| 777 |
+
(6, 9),
|
| 778 |
+
(7, 8),
|
| 779 |
+
(8, 9),
|
| 780 |
+
]
|
| 781 |
+
)
|
| 782 |
+
return graph
|
| 783 |
+
|
| 784 |
+
|
| 785 |
+
# --------------------------------------------------------------------------
|
| 786 |
+
# Basic tests for all strategies
|
| 787 |
+
# For each basic graph function, specify the number of expected colors.
|
| 788 |
+
BASIC_TEST_CASES = {
|
| 789 |
+
empty_graph: 0,
|
| 790 |
+
one_node_graph: 1,
|
| 791 |
+
two_node_graph: 2,
|
| 792 |
+
disconnected: 2,
|
| 793 |
+
three_node_clique: 3,
|
| 794 |
+
}
|
| 795 |
+
|
| 796 |
+
|
| 797 |
+
# --------------------------------------------------------------------------
|
| 798 |
+
# Special test cases. Each strategy has a list of tuples of the form
|
| 799 |
+
# (graph function, interchange, valid # of colors)
|
| 800 |
+
SPECIAL_TEST_CASES = {
|
| 801 |
+
"random_sequential": [
|
| 802 |
+
(rs_shc, False, (2, 3)),
|
| 803 |
+
(rs_shc, True, 2),
|
| 804 |
+
(rsi_shc, True, (3, 4)),
|
| 805 |
+
],
|
| 806 |
+
"saturation_largest_first": [(slf_shc, False, (3, 4)), (slf_hc, False, 4)],
|
| 807 |
+
"largest_first": [
|
| 808 |
+
(lf_shc, False, (2, 3)),
|
| 809 |
+
(lf_hc, False, 4),
|
| 810 |
+
(lf_shc, True, 2),
|
| 811 |
+
(lf_hc, True, 3),
|
| 812 |
+
(lfi_shc, True, (3, 4)),
|
| 813 |
+
(lfi_hc, True, 4),
|
| 814 |
+
],
|
| 815 |
+
"smallest_last": [
|
| 816 |
+
(sl_shc, False, (3, 4)),
|
| 817 |
+
(sl_hc, False, 5),
|
| 818 |
+
(sl_shc, True, 3),
|
| 819 |
+
(sl_hc, True, 4),
|
| 820 |
+
(sli_shc, True, (3, 4)),
|
| 821 |
+
(sli_hc, True, 5),
|
| 822 |
+
],
|
| 823 |
+
"independent_set": [(gis_shc, False, (2, 3)), (gis_hc, False, 3)],
|
| 824 |
+
"connected_sequential": [(cs_shc, False, (3, 4)), (cs_shc, True, 3)],
|
| 825 |
+
"connected_sequential_dfs": [(cs_shc, False, (3, 4))],
|
| 826 |
+
}
|
| 827 |
+
|
| 828 |
+
|
| 829 |
+
# --------------------------------------------------------------------------
|
| 830 |
+
# Helper functions to test
|
| 831 |
+
# (graph function, interchange, valid # of colors)
|
| 832 |
+
|
| 833 |
+
|
| 834 |
+
def check_state(L, N, H, F, C):
|
| 835 |
+
s = len(C[0])
|
| 836 |
+
num_colors = len(C.keys())
|
| 837 |
+
|
| 838 |
+
assert all(u in L[v] for u in L for v in L[u])
|
| 839 |
+
assert all(F[u] != F[v] for u in L for v in L[u])
|
| 840 |
+
assert all(len(L[u]) < num_colors for u in L)
|
| 841 |
+
assert all(len(C[x]) == s for x in C)
|
| 842 |
+
assert all(H[(c1, c2)] >= 0 for c1 in C for c2 in C)
|
| 843 |
+
assert all(N[(u, F[u])] == 0 for u in F)
|
| 844 |
+
|
| 845 |
+
|
| 846 |
+
def max_degree(G):
|
| 847 |
+
"""Get the maximum degree of any node in G."""
|
| 848 |
+
return max(G.degree(node) for node in G.nodes) if len(G.nodes) > 0 else 0
|
| 849 |
+
|
| 850 |
+
|
| 851 |
+
def make_params_from_graph(G, F):
|
| 852 |
+
"""Returns {N, L, H, C} from the given graph."""
|
| 853 |
+
num_nodes = len(G)
|
| 854 |
+
L = {u: [] for u in range(num_nodes)}
|
| 855 |
+
for u, v in G.edges:
|
| 856 |
+
L[u].append(v)
|
| 857 |
+
L[v].append(u)
|
| 858 |
+
|
| 859 |
+
C = nx.algorithms.coloring.equitable_coloring.make_C_from_F(F)
|
| 860 |
+
N = nx.algorithms.coloring.equitable_coloring.make_N_from_L_C(L, C)
|
| 861 |
+
H = nx.algorithms.coloring.equitable_coloring.make_H_from_C_N(C, N)
|
| 862 |
+
|
| 863 |
+
return {"N": N, "F": F, "C": C, "H": H, "L": L}
|
.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (211 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_kcomponents.cpython-311.pyc
ADDED
|
Binary file (13.3 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_stoer_wagner.cpython-311.pyc
ADDED
|
Binary file (7.08 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/shortest_paths/__init__.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from networkx.algorithms.shortest_paths.generic import *
|
| 2 |
+
from networkx.algorithms.shortest_paths.unweighted import *
|
| 3 |
+
from networkx.algorithms.shortest_paths.weighted import *
|
| 4 |
+
from networkx.algorithms.shortest_paths.astar import *
|
| 5 |
+
from networkx.algorithms.shortest_paths.dense import *
|