Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__init__.py +20 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/betweenness.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/betweenness_subset.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness_subset.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_closeness.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/dispersion.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/eigenvector.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/flow_matrix.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/group.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/harmonic.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/katz.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/laplacian.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/load.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/percolation.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/reaching.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/second_order.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/subgraph_alg.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/trophic.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/voterank_alg.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/betweenness_subset.py +275 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/current_flow_betweenness.py +342 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/degree_alg.py +150 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/dispersion.py +107 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/harmonic.py +89 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/laplacian.py +150 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/load.py +200 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/reaching.py +209 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/second_order.py +141 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__init__.py +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality_subset.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_closeness_centrality.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_degree_centrality.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_dispersion.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_eigenvector_centrality.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_harmonic_centrality.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_katz_centrality.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_laplacian_centrality.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_percolation_centrality.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_reaching.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_second_order_centrality.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_subgraph.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_trophic.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality.py +780 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py +340 -0
- .venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_closeness_centrality.py +307 -0
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__init__.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .betweenness import *
|
| 2 |
+
from .betweenness_subset import *
|
| 3 |
+
from .closeness import *
|
| 4 |
+
from .current_flow_betweenness import *
|
| 5 |
+
from .current_flow_betweenness_subset import *
|
| 6 |
+
from .current_flow_closeness import *
|
| 7 |
+
from .degree_alg import *
|
| 8 |
+
from .dispersion import *
|
| 9 |
+
from .eigenvector import *
|
| 10 |
+
from .group import *
|
| 11 |
+
from .harmonic import *
|
| 12 |
+
from .katz import *
|
| 13 |
+
from .load import *
|
| 14 |
+
from .percolation import *
|
| 15 |
+
from .reaching import *
|
| 16 |
+
from .second_order import *
|
| 17 |
+
from .subgraph_alg import *
|
| 18 |
+
from .trophic import *
|
| 19 |
+
from .voterank_alg import *
|
| 20 |
+
from .laplacian import *
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (884 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/betweenness.cpython-311.pyc
ADDED
|
Binary file (17.4 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/betweenness_subset.cpython-311.pyc
ADDED
|
Binary file (10.7 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-311.pyc
ADDED
|
Binary file (11.1 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness.cpython-311.pyc
ADDED
|
Binary file (16 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness_subset.cpython-311.pyc
ADDED
|
Binary file (10.6 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_closeness.cpython-311.pyc
ADDED
|
Binary file (4.79 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-311.pyc
ADDED
|
Binary file (5.95 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/dispersion.cpython-311.pyc
ADDED
|
Binary file (4.38 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/eigenvector.cpython-311.pyc
ADDED
|
Binary file (16.1 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/flow_matrix.cpython-311.pyc
ADDED
|
Binary file (9.15 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/group.cpython-311.pyc
ADDED
|
Binary file (32.4 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/harmonic.cpython-311.pyc
ADDED
|
Binary file (3.78 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/katz.cpython-311.pyc
ADDED
|
Binary file (13.7 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/laplacian.cpython-311.pyc
ADDED
|
Binary file (7.29 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/load.cpython-311.pyc
ADDED
|
Binary file (8.19 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/percolation.cpython-311.pyc
ADDED
|
Binary file (5.17 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/reaching.cpython-311.pyc
ADDED
|
Binary file (9.15 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/second_order.cpython-311.pyc
ADDED
|
Binary file (7.3 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/subgraph_alg.cpython-311.pyc
ADDED
|
Binary file (11.7 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/trophic.cpython-311.pyc
ADDED
|
Binary file (6.26 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/__pycache__/voterank_alg.cpython-311.pyc
ADDED
|
Binary file (4.49 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/betweenness_subset.py
ADDED
|
@@ -0,0 +1,275 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Betweenness centrality measures for subsets of nodes."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.algorithms.centrality.betweenness import (
|
| 5 |
+
_add_edge_keys,
|
| 6 |
+
)
|
| 7 |
+
from networkx.algorithms.centrality.betweenness import (
|
| 8 |
+
_single_source_dijkstra_path_basic as dijkstra,
|
| 9 |
+
)
|
| 10 |
+
from networkx.algorithms.centrality.betweenness import (
|
| 11 |
+
_single_source_shortest_path_basic as shortest_path,
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
__all__ = [
|
| 15 |
+
"betweenness_centrality_subset",
|
| 16 |
+
"edge_betweenness_centrality_subset",
|
| 17 |
+
]
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 21 |
+
def betweenness_centrality_subset(G, sources, targets, normalized=False, weight=None):
|
| 22 |
+
r"""Compute betweenness centrality for a subset of nodes.
|
| 23 |
+
|
| 24 |
+
.. math::
|
| 25 |
+
|
| 26 |
+
c_B(v) =\sum_{s\in S, t \in T} \frac{\sigma(s, t|v)}{\sigma(s, t)}
|
| 27 |
+
|
| 28 |
+
where $S$ is the set of sources, $T$ is the set of targets,
|
| 29 |
+
$\sigma(s, t)$ is the number of shortest $(s, t)$-paths,
|
| 30 |
+
and $\sigma(s, t|v)$ is the number of those paths
|
| 31 |
+
passing through some node $v$ other than $s, t$.
|
| 32 |
+
If $s = t$, $\sigma(s, t) = 1$,
|
| 33 |
+
and if $v \in {s, t}$, $\sigma(s, t|v) = 0$ [2]_.
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
Parameters
|
| 37 |
+
----------
|
| 38 |
+
G : graph
|
| 39 |
+
A NetworkX graph.
|
| 40 |
+
|
| 41 |
+
sources: list of nodes
|
| 42 |
+
Nodes to use as sources for shortest paths in betweenness
|
| 43 |
+
|
| 44 |
+
targets: list of nodes
|
| 45 |
+
Nodes to use as targets for shortest paths in betweenness
|
| 46 |
+
|
| 47 |
+
normalized : bool, optional
|
| 48 |
+
If True the betweenness values are normalized by $2/((n-1)(n-2))$
|
| 49 |
+
for graphs, and $1/((n-1)(n-2))$ for directed graphs where $n$
|
| 50 |
+
is the number of nodes in G.
|
| 51 |
+
|
| 52 |
+
weight : None or string, optional (default=None)
|
| 53 |
+
If None, all edge weights are considered equal.
|
| 54 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 55 |
+
Weights are used to calculate weighted shortest paths, so they are
|
| 56 |
+
interpreted as distances.
|
| 57 |
+
|
| 58 |
+
Returns
|
| 59 |
+
-------
|
| 60 |
+
nodes : dictionary
|
| 61 |
+
Dictionary of nodes with betweenness centrality as the value.
|
| 62 |
+
|
| 63 |
+
See Also
|
| 64 |
+
--------
|
| 65 |
+
edge_betweenness_centrality
|
| 66 |
+
load_centrality
|
| 67 |
+
|
| 68 |
+
Notes
|
| 69 |
+
-----
|
| 70 |
+
The basic algorithm is from [1]_.
|
| 71 |
+
|
| 72 |
+
For weighted graphs the edge weights must be greater than zero.
|
| 73 |
+
Zero edge weights can produce an infinite number of equal length
|
| 74 |
+
paths between pairs of nodes.
|
| 75 |
+
|
| 76 |
+
The normalization might seem a little strange but it is
|
| 77 |
+
designed to make betweenness_centrality(G) be the same as
|
| 78 |
+
betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).
|
| 79 |
+
|
| 80 |
+
The total number of paths between source and target is counted
|
| 81 |
+
differently for directed and undirected graphs. Directed paths
|
| 82 |
+
are easy to count. Undirected paths are tricky: should a path
|
| 83 |
+
from "u" to "v" count as 1 undirected path or as 2 directed paths?
|
| 84 |
+
|
| 85 |
+
For betweenness_centrality we report the number of undirected
|
| 86 |
+
paths when G is undirected.
|
| 87 |
+
|
| 88 |
+
For betweenness_centrality_subset the reporting is different.
|
| 89 |
+
If the source and target subsets are the same, then we want
|
| 90 |
+
to count undirected paths. But if the source and target subsets
|
| 91 |
+
differ -- for example, if sources is {0} and targets is {1},
|
| 92 |
+
then we are only counting the paths in one direction. They are
|
| 93 |
+
undirected paths but we are counting them in a directed way.
|
| 94 |
+
To count them as undirected paths, each should count as half a path.
|
| 95 |
+
|
| 96 |
+
References
|
| 97 |
+
----------
|
| 98 |
+
.. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
|
| 99 |
+
Journal of Mathematical Sociology 25(2):163-177, 2001.
|
| 100 |
+
https://doi.org/10.1080/0022250X.2001.9990249
|
| 101 |
+
.. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
|
| 102 |
+
Centrality and their Generic Computation.
|
| 103 |
+
Social Networks 30(2):136-145, 2008.
|
| 104 |
+
https://doi.org/10.1016/j.socnet.2007.11.001
|
| 105 |
+
"""
|
| 106 |
+
b = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
|
| 107 |
+
for s in sources:
|
| 108 |
+
# single source shortest paths
|
| 109 |
+
if weight is None: # use BFS
|
| 110 |
+
S, P, sigma, _ = shortest_path(G, s)
|
| 111 |
+
else: # use Dijkstra's algorithm
|
| 112 |
+
S, P, sigma, _ = dijkstra(G, s, weight)
|
| 113 |
+
b = _accumulate_subset(b, S, P, sigma, s, targets)
|
| 114 |
+
b = _rescale(b, len(G), normalized=normalized, directed=G.is_directed())
|
| 115 |
+
return b
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 119 |
+
def edge_betweenness_centrality_subset(
|
| 120 |
+
G, sources, targets, normalized=False, weight=None
|
| 121 |
+
):
|
| 122 |
+
r"""Compute betweenness centrality for edges for a subset of nodes.
|
| 123 |
+
|
| 124 |
+
.. math::
|
| 125 |
+
|
| 126 |
+
c_B(v) =\sum_{s\in S,t \in T} \frac{\sigma(s, t|e)}{\sigma(s, t)}
|
| 127 |
+
|
| 128 |
+
where $S$ is the set of sources, $T$ is the set of targets,
|
| 129 |
+
$\sigma(s, t)$ is the number of shortest $(s, t)$-paths,
|
| 130 |
+
and $\sigma(s, t|e)$ is the number of those paths
|
| 131 |
+
passing through edge $e$ [2]_.
|
| 132 |
+
|
| 133 |
+
Parameters
|
| 134 |
+
----------
|
| 135 |
+
G : graph
|
| 136 |
+
A networkx graph.
|
| 137 |
+
|
| 138 |
+
sources: list of nodes
|
| 139 |
+
Nodes to use as sources for shortest paths in betweenness
|
| 140 |
+
|
| 141 |
+
targets: list of nodes
|
| 142 |
+
Nodes to use as targets for shortest paths in betweenness
|
| 143 |
+
|
| 144 |
+
normalized : bool, optional
|
| 145 |
+
If True the betweenness values are normalized by `2/(n(n-1))`
|
| 146 |
+
for graphs, and `1/(n(n-1))` for directed graphs where `n`
|
| 147 |
+
is the number of nodes in G.
|
| 148 |
+
|
| 149 |
+
weight : None or string, optional (default=None)
|
| 150 |
+
If None, all edge weights are considered equal.
|
| 151 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 152 |
+
Weights are used to calculate weighted shortest paths, so they are
|
| 153 |
+
interpreted as distances.
|
| 154 |
+
|
| 155 |
+
Returns
|
| 156 |
+
-------
|
| 157 |
+
edges : dictionary
|
| 158 |
+
Dictionary of edges with Betweenness centrality as the value.
|
| 159 |
+
|
| 160 |
+
See Also
|
| 161 |
+
--------
|
| 162 |
+
betweenness_centrality
|
| 163 |
+
edge_load
|
| 164 |
+
|
| 165 |
+
Notes
|
| 166 |
+
-----
|
| 167 |
+
The basic algorithm is from [1]_.
|
| 168 |
+
|
| 169 |
+
For weighted graphs the edge weights must be greater than zero.
|
| 170 |
+
Zero edge weights can produce an infinite number of equal length
|
| 171 |
+
paths between pairs of nodes.
|
| 172 |
+
|
| 173 |
+
The normalization might seem a little strange but it is the same
|
| 174 |
+
as in edge_betweenness_centrality() and is designed to make
|
| 175 |
+
edge_betweenness_centrality(G) be the same as
|
| 176 |
+
edge_betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).
|
| 177 |
+
|
| 178 |
+
References
|
| 179 |
+
----------
|
| 180 |
+
.. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
|
| 181 |
+
Journal of Mathematical Sociology 25(2):163-177, 2001.
|
| 182 |
+
https://doi.org/10.1080/0022250X.2001.9990249
|
| 183 |
+
.. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
|
| 184 |
+
Centrality and their Generic Computation.
|
| 185 |
+
Social Networks 30(2):136-145, 2008.
|
| 186 |
+
https://doi.org/10.1016/j.socnet.2007.11.001
|
| 187 |
+
"""
|
| 188 |
+
b = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
|
| 189 |
+
b.update(dict.fromkeys(G.edges(), 0.0)) # b[e] for e in G.edges()
|
| 190 |
+
for s in sources:
|
| 191 |
+
# single source shortest paths
|
| 192 |
+
if weight is None: # use BFS
|
| 193 |
+
S, P, sigma, _ = shortest_path(G, s)
|
| 194 |
+
else: # use Dijkstra's algorithm
|
| 195 |
+
S, P, sigma, _ = dijkstra(G, s, weight)
|
| 196 |
+
b = _accumulate_edges_subset(b, S, P, sigma, s, targets)
|
| 197 |
+
for n in G: # remove nodes to only return edges
|
| 198 |
+
del b[n]
|
| 199 |
+
b = _rescale_e(b, len(G), normalized=normalized, directed=G.is_directed())
|
| 200 |
+
if G.is_multigraph():
|
| 201 |
+
b = _add_edge_keys(G, b, weight=weight)
|
| 202 |
+
return b
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
def _accumulate_subset(betweenness, S, P, sigma, s, targets):
|
| 206 |
+
delta = dict.fromkeys(S, 0.0)
|
| 207 |
+
target_set = set(targets) - {s}
|
| 208 |
+
while S:
|
| 209 |
+
w = S.pop()
|
| 210 |
+
if w in target_set:
|
| 211 |
+
coeff = (delta[w] + 1.0) / sigma[w]
|
| 212 |
+
else:
|
| 213 |
+
coeff = delta[w] / sigma[w]
|
| 214 |
+
for v in P[w]:
|
| 215 |
+
delta[v] += sigma[v] * coeff
|
| 216 |
+
if w != s:
|
| 217 |
+
betweenness[w] += delta[w]
|
| 218 |
+
return betweenness
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
def _accumulate_edges_subset(betweenness, S, P, sigma, s, targets):
|
| 222 |
+
"""edge_betweenness_centrality_subset helper."""
|
| 223 |
+
delta = dict.fromkeys(S, 0)
|
| 224 |
+
target_set = set(targets)
|
| 225 |
+
while S:
|
| 226 |
+
w = S.pop()
|
| 227 |
+
for v in P[w]:
|
| 228 |
+
if w in target_set:
|
| 229 |
+
c = (sigma[v] / sigma[w]) * (1.0 + delta[w])
|
| 230 |
+
else:
|
| 231 |
+
c = delta[w] / len(P[w])
|
| 232 |
+
if (v, w) not in betweenness:
|
| 233 |
+
betweenness[(w, v)] += c
|
| 234 |
+
else:
|
| 235 |
+
betweenness[(v, w)] += c
|
| 236 |
+
delta[v] += c
|
| 237 |
+
if w != s:
|
| 238 |
+
betweenness[w] += delta[w]
|
| 239 |
+
return betweenness
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
def _rescale(betweenness, n, normalized, directed=False):
|
| 243 |
+
"""betweenness_centrality_subset helper."""
|
| 244 |
+
if normalized:
|
| 245 |
+
if n <= 2:
|
| 246 |
+
scale = None # no normalization b=0 for all nodes
|
| 247 |
+
else:
|
| 248 |
+
scale = 1.0 / ((n - 1) * (n - 2))
|
| 249 |
+
else: # rescale by 2 for undirected graphs
|
| 250 |
+
if not directed:
|
| 251 |
+
scale = 0.5
|
| 252 |
+
else:
|
| 253 |
+
scale = None
|
| 254 |
+
if scale is not None:
|
| 255 |
+
for v in betweenness:
|
| 256 |
+
betweenness[v] *= scale
|
| 257 |
+
return betweenness
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def _rescale_e(betweenness, n, normalized, directed=False):
|
| 261 |
+
"""edge_betweenness_centrality_subset helper."""
|
| 262 |
+
if normalized:
|
| 263 |
+
if n <= 1:
|
| 264 |
+
scale = None # no normalization b=0 for all nodes
|
| 265 |
+
else:
|
| 266 |
+
scale = 1.0 / (n * (n - 1))
|
| 267 |
+
else: # rescale by 2 for undirected graphs
|
| 268 |
+
if not directed:
|
| 269 |
+
scale = 0.5
|
| 270 |
+
else:
|
| 271 |
+
scale = None
|
| 272 |
+
if scale is not None:
|
| 273 |
+
for v in betweenness:
|
| 274 |
+
betweenness[v] *= scale
|
| 275 |
+
return betweenness
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/current_flow_betweenness.py
ADDED
|
@@ -0,0 +1,342 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Current-flow betweenness centrality measures."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.algorithms.centrality.flow_matrix import (
|
| 5 |
+
CGInverseLaplacian,
|
| 6 |
+
FullInverseLaplacian,
|
| 7 |
+
SuperLUInverseLaplacian,
|
| 8 |
+
flow_matrix_row,
|
| 9 |
+
)
|
| 10 |
+
from networkx.utils import (
|
| 11 |
+
not_implemented_for,
|
| 12 |
+
py_random_state,
|
| 13 |
+
reverse_cuthill_mckee_ordering,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
__all__ = [
|
| 17 |
+
"current_flow_betweenness_centrality",
|
| 18 |
+
"approximate_current_flow_betweenness_centrality",
|
| 19 |
+
"edge_current_flow_betweenness_centrality",
|
| 20 |
+
]
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@not_implemented_for("directed")
|
| 24 |
+
@py_random_state(7)
|
| 25 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 26 |
+
def approximate_current_flow_betweenness_centrality(
|
| 27 |
+
G,
|
| 28 |
+
normalized=True,
|
| 29 |
+
weight=None,
|
| 30 |
+
dtype=float,
|
| 31 |
+
solver="full",
|
| 32 |
+
epsilon=0.5,
|
| 33 |
+
kmax=10000,
|
| 34 |
+
seed=None,
|
| 35 |
+
):
|
| 36 |
+
r"""Compute the approximate current-flow betweenness centrality for nodes.
|
| 37 |
+
|
| 38 |
+
Approximates the current-flow betweenness centrality within absolute
|
| 39 |
+
error of epsilon with high probability [1]_.
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
Parameters
|
| 43 |
+
----------
|
| 44 |
+
G : graph
|
| 45 |
+
A NetworkX graph
|
| 46 |
+
|
| 47 |
+
normalized : bool, optional (default=True)
|
| 48 |
+
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
|
| 49 |
+
n is the number of nodes in G.
|
| 50 |
+
|
| 51 |
+
weight : string or None, optional (default=None)
|
| 52 |
+
Key for edge data used as the edge weight.
|
| 53 |
+
If None, then use 1 as each edge weight.
|
| 54 |
+
The weight reflects the capacity or the strength of the
|
| 55 |
+
edge.
|
| 56 |
+
|
| 57 |
+
dtype : data type (float)
|
| 58 |
+
Default data type for internal matrices.
|
| 59 |
+
Set to np.float32 for lower memory consumption.
|
| 60 |
+
|
| 61 |
+
solver : string (default='full')
|
| 62 |
+
Type of linear solver to use for computing the flow matrix.
|
| 63 |
+
Options are "full" (uses most memory), "lu" (recommended), and
|
| 64 |
+
"cg" (uses least memory).
|
| 65 |
+
|
| 66 |
+
epsilon: float
|
| 67 |
+
Absolute error tolerance.
|
| 68 |
+
|
| 69 |
+
kmax: int
|
| 70 |
+
Maximum number of sample node pairs to use for approximation.
|
| 71 |
+
|
| 72 |
+
seed : integer, random_state, or None (default)
|
| 73 |
+
Indicator of random number generation state.
|
| 74 |
+
See :ref:`Randomness<randomness>`.
|
| 75 |
+
|
| 76 |
+
Returns
|
| 77 |
+
-------
|
| 78 |
+
nodes : dictionary
|
| 79 |
+
Dictionary of nodes with betweenness centrality as the value.
|
| 80 |
+
|
| 81 |
+
See Also
|
| 82 |
+
--------
|
| 83 |
+
current_flow_betweenness_centrality
|
| 84 |
+
|
| 85 |
+
Notes
|
| 86 |
+
-----
|
| 87 |
+
The running time is $O((1/\epsilon^2)m{\sqrt k} \log n)$
|
| 88 |
+
and the space required is $O(m)$ for $n$ nodes and $m$ edges.
|
| 89 |
+
|
| 90 |
+
If the edges have a 'weight' attribute they will be used as
|
| 91 |
+
weights in this algorithm. Unspecified weights are set to 1.
|
| 92 |
+
|
| 93 |
+
References
|
| 94 |
+
----------
|
| 95 |
+
.. [1] Ulrik Brandes and Daniel Fleischer:
|
| 96 |
+
Centrality Measures Based on Current Flow.
|
| 97 |
+
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
| 98 |
+
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
| 99 |
+
https://doi.org/10.1007/978-3-540-31856-9_44
|
| 100 |
+
"""
|
| 101 |
+
import numpy as np
|
| 102 |
+
|
| 103 |
+
if not nx.is_connected(G):
|
| 104 |
+
raise nx.NetworkXError("Graph not connected.")
|
| 105 |
+
solvername = {
|
| 106 |
+
"full": FullInverseLaplacian,
|
| 107 |
+
"lu": SuperLUInverseLaplacian,
|
| 108 |
+
"cg": CGInverseLaplacian,
|
| 109 |
+
}
|
| 110 |
+
n = G.number_of_nodes()
|
| 111 |
+
ordering = list(reverse_cuthill_mckee_ordering(G))
|
| 112 |
+
# make a copy with integer labels according to rcm ordering
|
| 113 |
+
# this could be done without a copy if we really wanted to
|
| 114 |
+
H = nx.relabel_nodes(G, dict(zip(ordering, range(n))))
|
| 115 |
+
L = nx.laplacian_matrix(H, nodelist=range(n), weight=weight).asformat("csc")
|
| 116 |
+
L = L.astype(dtype)
|
| 117 |
+
C = solvername[solver](L, dtype=dtype) # initialize solver
|
| 118 |
+
betweenness = dict.fromkeys(H, 0.0)
|
| 119 |
+
nb = (n - 1.0) * (n - 2.0) # normalization factor
|
| 120 |
+
cstar = n * (n - 1) / nb
|
| 121 |
+
l = 1 # parameter in approximation, adjustable
|
| 122 |
+
k = l * int(np.ceil((cstar / epsilon) ** 2 * np.log(n)))
|
| 123 |
+
if k > kmax:
|
| 124 |
+
msg = f"Number random pairs k>kmax ({k}>{kmax}) "
|
| 125 |
+
raise nx.NetworkXError(msg, "Increase kmax or epsilon")
|
| 126 |
+
cstar2k = cstar / (2 * k)
|
| 127 |
+
for _ in range(k):
|
| 128 |
+
s, t = pair = seed.sample(range(n), 2)
|
| 129 |
+
b = np.zeros(n, dtype=dtype)
|
| 130 |
+
b[s] = 1
|
| 131 |
+
b[t] = -1
|
| 132 |
+
p = C.solve(b)
|
| 133 |
+
for v in H:
|
| 134 |
+
if v in pair:
|
| 135 |
+
continue
|
| 136 |
+
for nbr in H[v]:
|
| 137 |
+
w = H[v][nbr].get(weight, 1.0)
|
| 138 |
+
betweenness[v] += float(w * np.abs(p[v] - p[nbr]) * cstar2k)
|
| 139 |
+
if normalized:
|
| 140 |
+
factor = 1.0
|
| 141 |
+
else:
|
| 142 |
+
factor = nb / 2.0
|
| 143 |
+
# remap to original node names and "unnormalize" if required
|
| 144 |
+
return {ordering[k]: v * factor for k, v in betweenness.items()}
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
@not_implemented_for("directed")
|
| 148 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 149 |
+
def current_flow_betweenness_centrality(
|
| 150 |
+
G, normalized=True, weight=None, dtype=float, solver="full"
|
| 151 |
+
):
|
| 152 |
+
r"""Compute current-flow betweenness centrality for nodes.
|
| 153 |
+
|
| 154 |
+
Current-flow betweenness centrality uses an electrical current
|
| 155 |
+
model for information spreading in contrast to betweenness
|
| 156 |
+
centrality which uses shortest paths.
|
| 157 |
+
|
| 158 |
+
Current-flow betweenness centrality is also known as
|
| 159 |
+
random-walk betweenness centrality [2]_.
|
| 160 |
+
|
| 161 |
+
Parameters
|
| 162 |
+
----------
|
| 163 |
+
G : graph
|
| 164 |
+
A NetworkX graph
|
| 165 |
+
|
| 166 |
+
normalized : bool, optional (default=True)
|
| 167 |
+
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
|
| 168 |
+
n is the number of nodes in G.
|
| 169 |
+
|
| 170 |
+
weight : string or None, optional (default=None)
|
| 171 |
+
Key for edge data used as the edge weight.
|
| 172 |
+
If None, then use 1 as each edge weight.
|
| 173 |
+
The weight reflects the capacity or the strength of the
|
| 174 |
+
edge.
|
| 175 |
+
|
| 176 |
+
dtype : data type (float)
|
| 177 |
+
Default data type for internal matrices.
|
| 178 |
+
Set to np.float32 for lower memory consumption.
|
| 179 |
+
|
| 180 |
+
solver : string (default='full')
|
| 181 |
+
Type of linear solver to use for computing the flow matrix.
|
| 182 |
+
Options are "full" (uses most memory), "lu" (recommended), and
|
| 183 |
+
"cg" (uses least memory).
|
| 184 |
+
|
| 185 |
+
Returns
|
| 186 |
+
-------
|
| 187 |
+
nodes : dictionary
|
| 188 |
+
Dictionary of nodes with betweenness centrality as the value.
|
| 189 |
+
|
| 190 |
+
See Also
|
| 191 |
+
--------
|
| 192 |
+
approximate_current_flow_betweenness_centrality
|
| 193 |
+
betweenness_centrality
|
| 194 |
+
edge_betweenness_centrality
|
| 195 |
+
edge_current_flow_betweenness_centrality
|
| 196 |
+
|
| 197 |
+
Notes
|
| 198 |
+
-----
|
| 199 |
+
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
|
| 200 |
+
time [1]_, where $I(n-1)$ is the time needed to compute the
|
| 201 |
+
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
|
| 202 |
+
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
|
| 203 |
+
Laplacian matrix condition number.
|
| 204 |
+
|
| 205 |
+
The space required is $O(nw)$ where $w$ is the width of the sparse
|
| 206 |
+
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
|
| 207 |
+
|
| 208 |
+
If the edges have a 'weight' attribute they will be used as
|
| 209 |
+
weights in this algorithm. Unspecified weights are set to 1.
|
| 210 |
+
|
| 211 |
+
References
|
| 212 |
+
----------
|
| 213 |
+
.. [1] Centrality Measures Based on Current Flow.
|
| 214 |
+
Ulrik Brandes and Daniel Fleischer,
|
| 215 |
+
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
| 216 |
+
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
| 217 |
+
https://doi.org/10.1007/978-3-540-31856-9_44
|
| 218 |
+
|
| 219 |
+
.. [2] A measure of betweenness centrality based on random walks,
|
| 220 |
+
M. E. J. Newman, Social Networks 27, 39-54 (2005).
|
| 221 |
+
"""
|
| 222 |
+
if not nx.is_connected(G):
|
| 223 |
+
raise nx.NetworkXError("Graph not connected.")
|
| 224 |
+
N = G.number_of_nodes()
|
| 225 |
+
ordering = list(reverse_cuthill_mckee_ordering(G))
|
| 226 |
+
# make a copy with integer labels according to rcm ordering
|
| 227 |
+
# this could be done without a copy if we really wanted to
|
| 228 |
+
H = nx.relabel_nodes(G, dict(zip(ordering, range(N))))
|
| 229 |
+
betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H
|
| 230 |
+
for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
|
| 231 |
+
pos = dict(zip(row.argsort()[::-1], range(N)))
|
| 232 |
+
for i in range(N):
|
| 233 |
+
betweenness[s] += (i - pos[i]) * row.item(i)
|
| 234 |
+
betweenness[t] += (N - i - 1 - pos[i]) * row.item(i)
|
| 235 |
+
if normalized:
|
| 236 |
+
nb = (N - 1.0) * (N - 2.0) # normalization factor
|
| 237 |
+
else:
|
| 238 |
+
nb = 2.0
|
| 239 |
+
return {ordering[n]: (b - n) * 2.0 / nb for n, b in betweenness.items()}
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
@not_implemented_for("directed")
|
| 243 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 244 |
+
def edge_current_flow_betweenness_centrality(
|
| 245 |
+
G, normalized=True, weight=None, dtype=float, solver="full"
|
| 246 |
+
):
|
| 247 |
+
r"""Compute current-flow betweenness centrality for edges.
|
| 248 |
+
|
| 249 |
+
Current-flow betweenness centrality uses an electrical current
|
| 250 |
+
model for information spreading in contrast to betweenness
|
| 251 |
+
centrality which uses shortest paths.
|
| 252 |
+
|
| 253 |
+
Current-flow betweenness centrality is also known as
|
| 254 |
+
random-walk betweenness centrality [2]_.
|
| 255 |
+
|
| 256 |
+
Parameters
|
| 257 |
+
----------
|
| 258 |
+
G : graph
|
| 259 |
+
A NetworkX graph
|
| 260 |
+
|
| 261 |
+
normalized : bool, optional (default=True)
|
| 262 |
+
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
|
| 263 |
+
n is the number of nodes in G.
|
| 264 |
+
|
| 265 |
+
weight : string or None, optional (default=None)
|
| 266 |
+
Key for edge data used as the edge weight.
|
| 267 |
+
If None, then use 1 as each edge weight.
|
| 268 |
+
The weight reflects the capacity or the strength of the
|
| 269 |
+
edge.
|
| 270 |
+
|
| 271 |
+
dtype : data type (default=float)
|
| 272 |
+
Default data type for internal matrices.
|
| 273 |
+
Set to np.float32 for lower memory consumption.
|
| 274 |
+
|
| 275 |
+
solver : string (default='full')
|
| 276 |
+
Type of linear solver to use for computing the flow matrix.
|
| 277 |
+
Options are "full" (uses most memory), "lu" (recommended), and
|
| 278 |
+
"cg" (uses least memory).
|
| 279 |
+
|
| 280 |
+
Returns
|
| 281 |
+
-------
|
| 282 |
+
nodes : dictionary
|
| 283 |
+
Dictionary of edge tuples with betweenness centrality as the value.
|
| 284 |
+
|
| 285 |
+
Raises
|
| 286 |
+
------
|
| 287 |
+
NetworkXError
|
| 288 |
+
The algorithm does not support DiGraphs.
|
| 289 |
+
If the input graph is an instance of DiGraph class, NetworkXError
|
| 290 |
+
is raised.
|
| 291 |
+
|
| 292 |
+
See Also
|
| 293 |
+
--------
|
| 294 |
+
betweenness_centrality
|
| 295 |
+
edge_betweenness_centrality
|
| 296 |
+
current_flow_betweenness_centrality
|
| 297 |
+
|
| 298 |
+
Notes
|
| 299 |
+
-----
|
| 300 |
+
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
|
| 301 |
+
time [1]_, where $I(n-1)$ is the time needed to compute the
|
| 302 |
+
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
|
| 303 |
+
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
|
| 304 |
+
Laplacian matrix condition number.
|
| 305 |
+
|
| 306 |
+
The space required is $O(nw)$ where $w$ is the width of the sparse
|
| 307 |
+
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
|
| 308 |
+
|
| 309 |
+
If the edges have a 'weight' attribute they will be used as
|
| 310 |
+
weights in this algorithm. Unspecified weights are set to 1.
|
| 311 |
+
|
| 312 |
+
References
|
| 313 |
+
----------
|
| 314 |
+
.. [1] Centrality Measures Based on Current Flow.
|
| 315 |
+
Ulrik Brandes and Daniel Fleischer,
|
| 316 |
+
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
| 317 |
+
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
| 318 |
+
https://doi.org/10.1007/978-3-540-31856-9_44
|
| 319 |
+
|
| 320 |
+
.. [2] A measure of betweenness centrality based on random walks,
|
| 321 |
+
M. E. J. Newman, Social Networks 27, 39-54 (2005).
|
| 322 |
+
"""
|
| 323 |
+
if not nx.is_connected(G):
|
| 324 |
+
raise nx.NetworkXError("Graph not connected.")
|
| 325 |
+
N = G.number_of_nodes()
|
| 326 |
+
ordering = list(reverse_cuthill_mckee_ordering(G))
|
| 327 |
+
# make a copy with integer labels according to rcm ordering
|
| 328 |
+
# this could be done without a copy if we really wanted to
|
| 329 |
+
H = nx.relabel_nodes(G, dict(zip(ordering, range(N))))
|
| 330 |
+
edges = (tuple(sorted((u, v))) for u, v in H.edges())
|
| 331 |
+
betweenness = dict.fromkeys(edges, 0.0)
|
| 332 |
+
if normalized:
|
| 333 |
+
nb = (N - 1.0) * (N - 2.0) # normalization factor
|
| 334 |
+
else:
|
| 335 |
+
nb = 2.0
|
| 336 |
+
for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
|
| 337 |
+
pos = dict(zip(row.argsort()[::-1], range(1, N + 1)))
|
| 338 |
+
for i in range(N):
|
| 339 |
+
betweenness[e] += (i + 1 - pos[i]) * row.item(i)
|
| 340 |
+
betweenness[e] += (N - i - pos[i]) * row.item(i)
|
| 341 |
+
betweenness[e] /= nb
|
| 342 |
+
return {(ordering[s], ordering[t]): b for (s, t), b in betweenness.items()}
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/degree_alg.py
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Degree centrality measures."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.utils.decorators import not_implemented_for
|
| 5 |
+
|
| 6 |
+
__all__ = ["degree_centrality", "in_degree_centrality", "out_degree_centrality"]
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@nx._dispatchable
|
| 10 |
+
def degree_centrality(G):
|
| 11 |
+
"""Compute the degree centrality for nodes.
|
| 12 |
+
|
| 13 |
+
The degree centrality for a node v is the fraction of nodes it
|
| 14 |
+
is connected to.
|
| 15 |
+
|
| 16 |
+
Parameters
|
| 17 |
+
----------
|
| 18 |
+
G : graph
|
| 19 |
+
A networkx graph
|
| 20 |
+
|
| 21 |
+
Returns
|
| 22 |
+
-------
|
| 23 |
+
nodes : dictionary
|
| 24 |
+
Dictionary of nodes with degree centrality as the value.
|
| 25 |
+
|
| 26 |
+
Examples
|
| 27 |
+
--------
|
| 28 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
| 29 |
+
>>> nx.degree_centrality(G)
|
| 30 |
+
{0: 1.0, 1: 1.0, 2: 0.6666666666666666, 3: 0.6666666666666666}
|
| 31 |
+
|
| 32 |
+
See Also
|
| 33 |
+
--------
|
| 34 |
+
betweenness_centrality, load_centrality, eigenvector_centrality
|
| 35 |
+
|
| 36 |
+
Notes
|
| 37 |
+
-----
|
| 38 |
+
The degree centrality values are normalized by dividing by the maximum
|
| 39 |
+
possible degree in a simple graph n-1 where n is the number of nodes in G.
|
| 40 |
+
|
| 41 |
+
For multigraphs or graphs with self loops the maximum degree might
|
| 42 |
+
be higher than n-1 and values of degree centrality greater than 1
|
| 43 |
+
are possible.
|
| 44 |
+
"""
|
| 45 |
+
if len(G) <= 1:
|
| 46 |
+
return {n: 1 for n in G}
|
| 47 |
+
|
| 48 |
+
s = 1.0 / (len(G) - 1.0)
|
| 49 |
+
centrality = {n: d * s for n, d in G.degree()}
|
| 50 |
+
return centrality
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
@not_implemented_for("undirected")
|
| 54 |
+
@nx._dispatchable
|
| 55 |
+
def in_degree_centrality(G):
|
| 56 |
+
"""Compute the in-degree centrality for nodes.
|
| 57 |
+
|
| 58 |
+
The in-degree centrality for a node v is the fraction of nodes its
|
| 59 |
+
incoming edges are connected to.
|
| 60 |
+
|
| 61 |
+
Parameters
|
| 62 |
+
----------
|
| 63 |
+
G : graph
|
| 64 |
+
A NetworkX graph
|
| 65 |
+
|
| 66 |
+
Returns
|
| 67 |
+
-------
|
| 68 |
+
nodes : dictionary
|
| 69 |
+
Dictionary of nodes with in-degree centrality as values.
|
| 70 |
+
|
| 71 |
+
Raises
|
| 72 |
+
------
|
| 73 |
+
NetworkXNotImplemented
|
| 74 |
+
If G is undirected.
|
| 75 |
+
|
| 76 |
+
Examples
|
| 77 |
+
--------
|
| 78 |
+
>>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
| 79 |
+
>>> nx.in_degree_centrality(G)
|
| 80 |
+
{0: 0.0, 1: 0.3333333333333333, 2: 0.6666666666666666, 3: 0.6666666666666666}
|
| 81 |
+
|
| 82 |
+
See Also
|
| 83 |
+
--------
|
| 84 |
+
degree_centrality, out_degree_centrality
|
| 85 |
+
|
| 86 |
+
Notes
|
| 87 |
+
-----
|
| 88 |
+
The degree centrality values are normalized by dividing by the maximum
|
| 89 |
+
possible degree in a simple graph n-1 where n is the number of nodes in G.
|
| 90 |
+
|
| 91 |
+
For multigraphs or graphs with self loops the maximum degree might
|
| 92 |
+
be higher than n-1 and values of degree centrality greater than 1
|
| 93 |
+
are possible.
|
| 94 |
+
"""
|
| 95 |
+
if len(G) <= 1:
|
| 96 |
+
return {n: 1 for n in G}
|
| 97 |
+
|
| 98 |
+
s = 1.0 / (len(G) - 1.0)
|
| 99 |
+
centrality = {n: d * s for n, d in G.in_degree()}
|
| 100 |
+
return centrality
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
@not_implemented_for("undirected")
|
| 104 |
+
@nx._dispatchable
|
| 105 |
+
def out_degree_centrality(G):
|
| 106 |
+
"""Compute the out-degree centrality for nodes.
|
| 107 |
+
|
| 108 |
+
The out-degree centrality for a node v is the fraction of nodes its
|
| 109 |
+
outgoing edges are connected to.
|
| 110 |
+
|
| 111 |
+
Parameters
|
| 112 |
+
----------
|
| 113 |
+
G : graph
|
| 114 |
+
A NetworkX graph
|
| 115 |
+
|
| 116 |
+
Returns
|
| 117 |
+
-------
|
| 118 |
+
nodes : dictionary
|
| 119 |
+
Dictionary of nodes with out-degree centrality as values.
|
| 120 |
+
|
| 121 |
+
Raises
|
| 122 |
+
------
|
| 123 |
+
NetworkXNotImplemented
|
| 124 |
+
If G is undirected.
|
| 125 |
+
|
| 126 |
+
Examples
|
| 127 |
+
--------
|
| 128 |
+
>>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
| 129 |
+
>>> nx.out_degree_centrality(G)
|
| 130 |
+
{0: 1.0, 1: 0.6666666666666666, 2: 0.0, 3: 0.0}
|
| 131 |
+
|
| 132 |
+
See Also
|
| 133 |
+
--------
|
| 134 |
+
degree_centrality, in_degree_centrality
|
| 135 |
+
|
| 136 |
+
Notes
|
| 137 |
+
-----
|
| 138 |
+
The degree centrality values are normalized by dividing by the maximum
|
| 139 |
+
possible degree in a simple graph n-1 where n is the number of nodes in G.
|
| 140 |
+
|
| 141 |
+
For multigraphs or graphs with self loops the maximum degree might
|
| 142 |
+
be higher than n-1 and values of degree centrality greater than 1
|
| 143 |
+
are possible.
|
| 144 |
+
"""
|
| 145 |
+
if len(G) <= 1:
|
| 146 |
+
return {n: 1 for n in G}
|
| 147 |
+
|
| 148 |
+
s = 1.0 / (len(G) - 1.0)
|
| 149 |
+
centrality = {n: d * s for n, d in G.out_degree()}
|
| 150 |
+
return centrality
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/dispersion.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import combinations
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
__all__ = ["dispersion"]
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@nx._dispatchable
|
| 9 |
+
def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0):
|
| 10 |
+
r"""Calculate dispersion between `u` and `v` in `G`.
|
| 11 |
+
|
| 12 |
+
A link between two actors (`u` and `v`) has a high dispersion when their
|
| 13 |
+
mutual ties (`s` and `t`) are not well connected with each other.
|
| 14 |
+
|
| 15 |
+
Parameters
|
| 16 |
+
----------
|
| 17 |
+
G : graph
|
| 18 |
+
A NetworkX graph.
|
| 19 |
+
u : node, optional
|
| 20 |
+
The source for the dispersion score (e.g. ego node of the network).
|
| 21 |
+
v : node, optional
|
| 22 |
+
The target of the dispersion score if specified.
|
| 23 |
+
normalized : bool
|
| 24 |
+
If True (default) normalize by the embeddedness of the nodes (u and v).
|
| 25 |
+
alpha, b, c : float
|
| 26 |
+
Parameters for the normalization procedure. When `normalized` is True,
|
| 27 |
+
the dispersion value is normalized by::
|
| 28 |
+
|
| 29 |
+
result = ((dispersion + b) ** alpha) / (embeddedness + c)
|
| 30 |
+
|
| 31 |
+
as long as the denominator is nonzero.
|
| 32 |
+
|
| 33 |
+
Returns
|
| 34 |
+
-------
|
| 35 |
+
nodes : dictionary
|
| 36 |
+
If u (v) is specified, returns a dictionary of nodes with dispersion
|
| 37 |
+
score for all "target" ("source") nodes. If neither u nor v is
|
| 38 |
+
specified, returns a dictionary of dictionaries for all nodes 'u' in the
|
| 39 |
+
graph with a dispersion score for each node 'v'.
|
| 40 |
+
|
| 41 |
+
Notes
|
| 42 |
+
-----
|
| 43 |
+
This implementation follows Lars Backstrom and Jon Kleinberg [1]_. Typical
|
| 44 |
+
usage would be to run dispersion on the ego network $G_u$ if $u$ were
|
| 45 |
+
specified. Running :func:`dispersion` with neither $u$ nor $v$ specified
|
| 46 |
+
can take some time to complete.
|
| 47 |
+
|
| 48 |
+
References
|
| 49 |
+
----------
|
| 50 |
+
.. [1] Romantic Partnerships and the Dispersion of Social Ties:
|
| 51 |
+
A Network Analysis of Relationship Status on Facebook.
|
| 52 |
+
Lars Backstrom, Jon Kleinberg.
|
| 53 |
+
https://arxiv.org/pdf/1310.6753v1.pdf
|
| 54 |
+
|
| 55 |
+
"""
|
| 56 |
+
|
| 57 |
+
def _dispersion(G_u, u, v):
|
| 58 |
+
"""dispersion for all nodes 'v' in a ego network G_u of node 'u'"""
|
| 59 |
+
u_nbrs = set(G_u[u])
|
| 60 |
+
ST = {n for n in G_u[v] if n in u_nbrs}
|
| 61 |
+
set_uv = {u, v}
|
| 62 |
+
# all possible ties of connections that u and b share
|
| 63 |
+
possib = combinations(ST, 2)
|
| 64 |
+
total = 0
|
| 65 |
+
for s, t in possib:
|
| 66 |
+
# neighbors of s that are in G_u, not including u and v
|
| 67 |
+
nbrs_s = u_nbrs.intersection(G_u[s]) - set_uv
|
| 68 |
+
# s and t are not directly connected
|
| 69 |
+
if t not in nbrs_s:
|
| 70 |
+
# s and t do not share a connection
|
| 71 |
+
if nbrs_s.isdisjoint(G_u[t]):
|
| 72 |
+
# tick for disp(u, v)
|
| 73 |
+
total += 1
|
| 74 |
+
# neighbors that u and v share
|
| 75 |
+
embeddedness = len(ST)
|
| 76 |
+
|
| 77 |
+
dispersion_val = total
|
| 78 |
+
if normalized:
|
| 79 |
+
dispersion_val = (total + b) ** alpha
|
| 80 |
+
if embeddedness + c != 0:
|
| 81 |
+
dispersion_val /= embeddedness + c
|
| 82 |
+
|
| 83 |
+
return dispersion_val
|
| 84 |
+
|
| 85 |
+
if u is None:
|
| 86 |
+
# v and u are not specified
|
| 87 |
+
if v is None:
|
| 88 |
+
results = {n: {} for n in G}
|
| 89 |
+
for u in G:
|
| 90 |
+
for v in G[u]:
|
| 91 |
+
results[u][v] = _dispersion(G, u, v)
|
| 92 |
+
# u is not specified, but v is
|
| 93 |
+
else:
|
| 94 |
+
results = dict.fromkeys(G[v], {})
|
| 95 |
+
for u in G[v]:
|
| 96 |
+
results[u] = _dispersion(G, v, u)
|
| 97 |
+
else:
|
| 98 |
+
# u is specified with no target v
|
| 99 |
+
if v is None:
|
| 100 |
+
results = dict.fromkeys(G[u], {})
|
| 101 |
+
for v in G[u]:
|
| 102 |
+
results[v] = _dispersion(G, u, v)
|
| 103 |
+
# both u and v are specified
|
| 104 |
+
else:
|
| 105 |
+
results = _dispersion(G, u, v)
|
| 106 |
+
|
| 107 |
+
return results
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/harmonic.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for computing the harmonic centrality of a graph."""
|
| 2 |
+
|
| 3 |
+
from functools import partial
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
__all__ = ["harmonic_centrality"]
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@nx._dispatchable(edge_attrs="distance")
|
| 11 |
+
def harmonic_centrality(G, nbunch=None, distance=None, sources=None):
|
| 12 |
+
r"""Compute harmonic centrality for nodes.
|
| 13 |
+
|
| 14 |
+
Harmonic centrality [1]_ of a node `u` is the sum of the reciprocal
|
| 15 |
+
of the shortest path distances from all other nodes to `u`
|
| 16 |
+
|
| 17 |
+
.. math::
|
| 18 |
+
|
| 19 |
+
C(u) = \sum_{v \neq u} \frac{1}{d(v, u)}
|
| 20 |
+
|
| 21 |
+
where `d(v, u)` is the shortest-path distance between `v` and `u`.
|
| 22 |
+
|
| 23 |
+
If `sources` is given as an argument, the returned harmonic centrality
|
| 24 |
+
values are calculated as the sum of the reciprocals of the shortest
|
| 25 |
+
path distances from the nodes specified in `sources` to `u` instead
|
| 26 |
+
of from all nodes to `u`.
|
| 27 |
+
|
| 28 |
+
Notice that higher values indicate higher centrality.
|
| 29 |
+
|
| 30 |
+
Parameters
|
| 31 |
+
----------
|
| 32 |
+
G : graph
|
| 33 |
+
A NetworkX graph
|
| 34 |
+
|
| 35 |
+
nbunch : container (default: all nodes in G)
|
| 36 |
+
Container of nodes for which harmonic centrality values are calculated.
|
| 37 |
+
|
| 38 |
+
sources : container (default: all nodes in G)
|
| 39 |
+
Container of nodes `v` over which reciprocal distances are computed.
|
| 40 |
+
Nodes not in `G` are silently ignored.
|
| 41 |
+
|
| 42 |
+
distance : edge attribute key, optional (default=None)
|
| 43 |
+
Use the specified edge attribute as the edge distance in shortest
|
| 44 |
+
path calculations. If `None`, then each edge will have distance equal to 1.
|
| 45 |
+
|
| 46 |
+
Returns
|
| 47 |
+
-------
|
| 48 |
+
nodes : dictionary
|
| 49 |
+
Dictionary of nodes with harmonic centrality as the value.
|
| 50 |
+
|
| 51 |
+
See Also
|
| 52 |
+
--------
|
| 53 |
+
betweenness_centrality, load_centrality, eigenvector_centrality,
|
| 54 |
+
degree_centrality, closeness_centrality
|
| 55 |
+
|
| 56 |
+
Notes
|
| 57 |
+
-----
|
| 58 |
+
If the 'distance' keyword is set to an edge attribute key then the
|
| 59 |
+
shortest-path length will be computed using Dijkstra's algorithm with
|
| 60 |
+
that edge attribute as the edge weight.
|
| 61 |
+
|
| 62 |
+
References
|
| 63 |
+
----------
|
| 64 |
+
.. [1] Boldi, Paolo, and Sebastiano Vigna. "Axioms for centrality."
|
| 65 |
+
Internet Mathematics 10.3-4 (2014): 222-262.
|
| 66 |
+
"""
|
| 67 |
+
|
| 68 |
+
nbunch = set(G.nbunch_iter(nbunch) if nbunch is not None else G.nodes)
|
| 69 |
+
sources = set(G.nbunch_iter(sources) if sources is not None else G.nodes)
|
| 70 |
+
|
| 71 |
+
centrality = {u: 0 for u in nbunch}
|
| 72 |
+
|
| 73 |
+
transposed = False
|
| 74 |
+
if len(nbunch) < len(sources):
|
| 75 |
+
transposed = True
|
| 76 |
+
nbunch, sources = sources, nbunch
|
| 77 |
+
if nx.is_directed(G):
|
| 78 |
+
G = nx.reverse(G, copy=False)
|
| 79 |
+
|
| 80 |
+
spl = partial(nx.shortest_path_length, G, weight=distance)
|
| 81 |
+
for v in sources:
|
| 82 |
+
dist = spl(v)
|
| 83 |
+
for u in nbunch.intersection(dist):
|
| 84 |
+
d = dist[u]
|
| 85 |
+
if d == 0: # handle u == v and edges with 0 weight
|
| 86 |
+
continue
|
| 87 |
+
centrality[v if transposed else u] += 1 / d
|
| 88 |
+
|
| 89 |
+
return centrality
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/laplacian.py
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Laplacian centrality measures.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
__all__ = ["laplacian_centrality"]
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 11 |
+
def laplacian_centrality(
|
| 12 |
+
G, normalized=True, nodelist=None, weight="weight", walk_type=None, alpha=0.95
|
| 13 |
+
):
|
| 14 |
+
r"""Compute the Laplacian centrality for nodes in the graph `G`.
|
| 15 |
+
|
| 16 |
+
The Laplacian Centrality of a node ``i`` is measured by the drop in the
|
| 17 |
+
Laplacian Energy after deleting node ``i`` from the graph. The Laplacian Energy
|
| 18 |
+
is the sum of the squared eigenvalues of a graph's Laplacian matrix.
|
| 19 |
+
|
| 20 |
+
.. math::
|
| 21 |
+
|
| 22 |
+
C_L(u_i,G) = \frac{(\Delta E)_i}{E_L (G)} = \frac{E_L (G)-E_L (G_i)}{E_L (G)}
|
| 23 |
+
|
| 24 |
+
E_L (G) = \sum_{i=0}^n \lambda_i^2
|
| 25 |
+
|
| 26 |
+
Where $E_L (G)$ is the Laplacian energy of graph `G`,
|
| 27 |
+
E_L (G_i) is the Laplacian energy of graph `G` after deleting node ``i``
|
| 28 |
+
and $\lambda_i$ are the eigenvalues of `G`'s Laplacian matrix.
|
| 29 |
+
This formula shows the normalized value. Without normalization,
|
| 30 |
+
the numerator on the right side is returned.
|
| 31 |
+
|
| 32 |
+
Parameters
|
| 33 |
+
----------
|
| 34 |
+
G : graph
|
| 35 |
+
A networkx graph
|
| 36 |
+
|
| 37 |
+
normalized : bool (default = True)
|
| 38 |
+
If True the centrality score is scaled so the sum over all nodes is 1.
|
| 39 |
+
If False the centrality score for each node is the drop in Laplacian
|
| 40 |
+
energy when that node is removed.
|
| 41 |
+
|
| 42 |
+
nodelist : list, optional (default = None)
|
| 43 |
+
The rows and columns are ordered according to the nodes in nodelist.
|
| 44 |
+
If nodelist is None, then the ordering is produced by G.nodes().
|
| 45 |
+
|
| 46 |
+
weight: string or None, optional (default=`weight`)
|
| 47 |
+
Optional parameter `weight` to compute the Laplacian matrix.
|
| 48 |
+
The edge data key used to compute each value in the matrix.
|
| 49 |
+
If None, then each edge has weight 1.
|
| 50 |
+
|
| 51 |
+
walk_type : string or None, optional (default=None)
|
| 52 |
+
Optional parameter `walk_type` used when calling
|
| 53 |
+
:func:`directed_laplacian_matrix <networkx.directed_laplacian_matrix>`.
|
| 54 |
+
One of ``"random"``, ``"lazy"``, or ``"pagerank"``. If ``walk_type=None``
|
| 55 |
+
(the default), then a value is selected according to the properties of `G`:
|
| 56 |
+
- ``walk_type="random"`` if `G` is strongly connected and aperiodic
|
| 57 |
+
- ``walk_type="lazy"`` if `G` is strongly connected but not aperiodic
|
| 58 |
+
- ``walk_type="pagerank"`` for all other cases.
|
| 59 |
+
|
| 60 |
+
alpha : real (default = 0.95)
|
| 61 |
+
Optional parameter `alpha` used when calling
|
| 62 |
+
:func:`directed_laplacian_matrix <networkx.directed_laplacian_matrix>`.
|
| 63 |
+
(1 - alpha) is the teleportation probability used with pagerank.
|
| 64 |
+
|
| 65 |
+
Returns
|
| 66 |
+
-------
|
| 67 |
+
nodes : dictionary
|
| 68 |
+
Dictionary of nodes with Laplacian centrality as the value.
|
| 69 |
+
|
| 70 |
+
Examples
|
| 71 |
+
--------
|
| 72 |
+
>>> G = nx.Graph()
|
| 73 |
+
>>> edges = [(0, 1, 4), (0, 2, 2), (2, 1, 1), (1, 3, 2), (1, 4, 2), (4, 5, 1)]
|
| 74 |
+
>>> G.add_weighted_edges_from(edges)
|
| 75 |
+
>>> sorted((v, f"{c:0.2f}") for v, c in laplacian_centrality(G).items())
|
| 76 |
+
[(0, '0.70'), (1, '0.90'), (2, '0.28'), (3, '0.22'), (4, '0.26'), (5, '0.04')]
|
| 77 |
+
|
| 78 |
+
Notes
|
| 79 |
+
-----
|
| 80 |
+
The algorithm is implemented based on [1]_ with an extension to directed graphs
|
| 81 |
+
using the ``directed_laplacian_matrix`` function.
|
| 82 |
+
|
| 83 |
+
Raises
|
| 84 |
+
------
|
| 85 |
+
NetworkXPointlessConcept
|
| 86 |
+
If the graph `G` is the null graph.
|
| 87 |
+
ZeroDivisionError
|
| 88 |
+
If the graph `G` has no edges (is empty) and normalization is requested.
|
| 89 |
+
|
| 90 |
+
References
|
| 91 |
+
----------
|
| 92 |
+
.. [1] Qi, X., Fuller, E., Wu, Q., Wu, Y., and Zhang, C.-Q. (2012).
|
| 93 |
+
Laplacian centrality: A new centrality measure for weighted networks.
|
| 94 |
+
Information Sciences, 194:240-253.
|
| 95 |
+
https://math.wvu.edu/~cqzhang/Publication-files/my-paper/INS-2012-Laplacian-W.pdf
|
| 96 |
+
|
| 97 |
+
See Also
|
| 98 |
+
--------
|
| 99 |
+
:func:`~networkx.linalg.laplacianmatrix.directed_laplacian_matrix`
|
| 100 |
+
:func:`~networkx.linalg.laplacianmatrix.laplacian_matrix`
|
| 101 |
+
"""
|
| 102 |
+
import numpy as np
|
| 103 |
+
import scipy as sp
|
| 104 |
+
|
| 105 |
+
if len(G) == 0:
|
| 106 |
+
raise nx.NetworkXPointlessConcept("null graph has no centrality defined")
|
| 107 |
+
if G.size(weight=weight) == 0:
|
| 108 |
+
if normalized:
|
| 109 |
+
raise ZeroDivisionError("graph with no edges has zero full energy")
|
| 110 |
+
return {n: 0 for n in G}
|
| 111 |
+
|
| 112 |
+
if nodelist is not None:
|
| 113 |
+
nodeset = set(G.nbunch_iter(nodelist))
|
| 114 |
+
if len(nodeset) != len(nodelist):
|
| 115 |
+
raise nx.NetworkXError("nodelist has duplicate nodes or nodes not in G")
|
| 116 |
+
nodes = nodelist + [n for n in G if n not in nodeset]
|
| 117 |
+
else:
|
| 118 |
+
nodelist = nodes = list(G)
|
| 119 |
+
|
| 120 |
+
if G.is_directed():
|
| 121 |
+
lap_matrix = nx.directed_laplacian_matrix(G, nodes, weight, walk_type, alpha)
|
| 122 |
+
else:
|
| 123 |
+
lap_matrix = nx.laplacian_matrix(G, nodes, weight).toarray()
|
| 124 |
+
|
| 125 |
+
full_energy = np.power(sp.linalg.eigh(lap_matrix, eigvals_only=True), 2).sum()
|
| 126 |
+
|
| 127 |
+
# calculate laplacian centrality
|
| 128 |
+
laplace_centralities_dict = {}
|
| 129 |
+
for i, node in enumerate(nodelist):
|
| 130 |
+
# remove row and col i from lap_matrix
|
| 131 |
+
all_but_i = list(np.arange(lap_matrix.shape[0]))
|
| 132 |
+
all_but_i.remove(i)
|
| 133 |
+
A_2 = lap_matrix[all_but_i, :][:, all_but_i]
|
| 134 |
+
|
| 135 |
+
# Adjust diagonal for removed row
|
| 136 |
+
new_diag = lap_matrix.diagonal() - abs(lap_matrix[:, i])
|
| 137 |
+
np.fill_diagonal(A_2, new_diag[all_but_i])
|
| 138 |
+
|
| 139 |
+
if len(all_but_i) > 0: # catches degenerate case of single node
|
| 140 |
+
new_energy = np.power(sp.linalg.eigh(A_2, eigvals_only=True), 2).sum()
|
| 141 |
+
else:
|
| 142 |
+
new_energy = 0.0
|
| 143 |
+
|
| 144 |
+
lapl_cent = full_energy - new_energy
|
| 145 |
+
if normalized:
|
| 146 |
+
lapl_cent = lapl_cent / full_energy
|
| 147 |
+
|
| 148 |
+
laplace_centralities_dict[node] = float(lapl_cent)
|
| 149 |
+
|
| 150 |
+
return laplace_centralities_dict
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/load.py
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Load centrality."""
|
| 2 |
+
|
| 3 |
+
from operator import itemgetter
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
__all__ = ["load_centrality", "edge_load_centrality"]
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 11 |
+
def newman_betweenness_centrality(G, v=None, cutoff=None, normalized=True, weight=None):
|
| 12 |
+
"""Compute load centrality for nodes.
|
| 13 |
+
|
| 14 |
+
The load centrality of a node is the fraction of all shortest
|
| 15 |
+
paths that pass through that node.
|
| 16 |
+
|
| 17 |
+
Parameters
|
| 18 |
+
----------
|
| 19 |
+
G : graph
|
| 20 |
+
A networkx graph.
|
| 21 |
+
|
| 22 |
+
normalized : bool, optional (default=True)
|
| 23 |
+
If True the betweenness values are normalized by b=b/(n-1)(n-2) where
|
| 24 |
+
n is the number of nodes in G.
|
| 25 |
+
|
| 26 |
+
weight : None or string, optional (default=None)
|
| 27 |
+
If None, edge weights are ignored.
|
| 28 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 29 |
+
The weight of an edge is treated as the length or distance between the two sides.
|
| 30 |
+
|
| 31 |
+
cutoff : bool, optional (default=None)
|
| 32 |
+
If specified, only consider paths of length <= cutoff.
|
| 33 |
+
|
| 34 |
+
Returns
|
| 35 |
+
-------
|
| 36 |
+
nodes : dictionary
|
| 37 |
+
Dictionary of nodes with centrality as the value.
|
| 38 |
+
|
| 39 |
+
See Also
|
| 40 |
+
--------
|
| 41 |
+
betweenness_centrality
|
| 42 |
+
|
| 43 |
+
Notes
|
| 44 |
+
-----
|
| 45 |
+
Load centrality is slightly different than betweenness. It was originally
|
| 46 |
+
introduced by [2]_. For this load algorithm see [1]_.
|
| 47 |
+
|
| 48 |
+
References
|
| 49 |
+
----------
|
| 50 |
+
.. [1] Mark E. J. Newman:
|
| 51 |
+
Scientific collaboration networks. II.
|
| 52 |
+
Shortest paths, weighted networks, and centrality.
|
| 53 |
+
Physical Review E 64, 016132, 2001.
|
| 54 |
+
http://journals.aps.org/pre/abstract/10.1103/PhysRevE.64.016132
|
| 55 |
+
.. [2] Kwang-Il Goh, Byungnam Kahng and Doochul Kim
|
| 56 |
+
Universal behavior of Load Distribution in Scale-Free Networks.
|
| 57 |
+
Physical Review Letters 87(27):1–4, 2001.
|
| 58 |
+
https://doi.org/10.1103/PhysRevLett.87.278701
|
| 59 |
+
"""
|
| 60 |
+
if v is not None: # only one node
|
| 61 |
+
betweenness = 0.0
|
| 62 |
+
for source in G:
|
| 63 |
+
ubetween = _node_betweenness(G, source, cutoff, False, weight)
|
| 64 |
+
betweenness += ubetween[v] if v in ubetween else 0
|
| 65 |
+
if normalized:
|
| 66 |
+
order = G.order()
|
| 67 |
+
if order <= 2:
|
| 68 |
+
return betweenness # no normalization b=0 for all nodes
|
| 69 |
+
betweenness *= 1.0 / ((order - 1) * (order - 2))
|
| 70 |
+
else:
|
| 71 |
+
betweenness = {}.fromkeys(G, 0.0)
|
| 72 |
+
for source in betweenness:
|
| 73 |
+
ubetween = _node_betweenness(G, source, cutoff, False, weight)
|
| 74 |
+
for vk in ubetween:
|
| 75 |
+
betweenness[vk] += ubetween[vk]
|
| 76 |
+
if normalized:
|
| 77 |
+
order = G.order()
|
| 78 |
+
if order <= 2:
|
| 79 |
+
return betweenness # no normalization b=0 for all nodes
|
| 80 |
+
scale = 1.0 / ((order - 1) * (order - 2))
|
| 81 |
+
for v in betweenness:
|
| 82 |
+
betweenness[v] *= scale
|
| 83 |
+
return betweenness # all nodes
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def _node_betweenness(G, source, cutoff=False, normalized=True, weight=None):
|
| 87 |
+
"""Node betweenness_centrality helper:
|
| 88 |
+
|
| 89 |
+
See betweenness_centrality for what you probably want.
|
| 90 |
+
This actually computes "load" and not betweenness.
|
| 91 |
+
See https://networkx.lanl.gov/ticket/103
|
| 92 |
+
|
| 93 |
+
This calculates the load of each node for paths from a single source.
|
| 94 |
+
(The fraction of number of shortests paths from source that go
|
| 95 |
+
through each node.)
|
| 96 |
+
|
| 97 |
+
To get the load for a node you need to do all-pairs shortest paths.
|
| 98 |
+
|
| 99 |
+
If weight is not None then use Dijkstra for finding shortest paths.
|
| 100 |
+
"""
|
| 101 |
+
# get the predecessor and path length data
|
| 102 |
+
if weight is None:
|
| 103 |
+
(pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True)
|
| 104 |
+
else:
|
| 105 |
+
(pred, length) = nx.dijkstra_predecessor_and_distance(G, source, cutoff, weight)
|
| 106 |
+
|
| 107 |
+
# order the nodes by path length
|
| 108 |
+
onodes = [(l, vert) for (vert, l) in length.items()]
|
| 109 |
+
onodes.sort()
|
| 110 |
+
onodes[:] = [vert for (l, vert) in onodes if l > 0]
|
| 111 |
+
|
| 112 |
+
# initialize betweenness
|
| 113 |
+
between = {}.fromkeys(length, 1.0)
|
| 114 |
+
|
| 115 |
+
while onodes:
|
| 116 |
+
v = onodes.pop()
|
| 117 |
+
if v in pred:
|
| 118 |
+
num_paths = len(pred[v]) # Discount betweenness if more than
|
| 119 |
+
for x in pred[v]: # one shortest path.
|
| 120 |
+
if x == source: # stop if hit source because all remaining v
|
| 121 |
+
break # also have pred[v]==[source]
|
| 122 |
+
between[x] += between[v] / num_paths
|
| 123 |
+
# remove source
|
| 124 |
+
for v in between:
|
| 125 |
+
between[v] -= 1
|
| 126 |
+
# rescale to be between 0 and 1
|
| 127 |
+
if normalized:
|
| 128 |
+
l = len(between)
|
| 129 |
+
if l > 2:
|
| 130 |
+
# scale by 1/the number of possible paths
|
| 131 |
+
scale = 1 / ((l - 1) * (l - 2))
|
| 132 |
+
for v in between:
|
| 133 |
+
between[v] *= scale
|
| 134 |
+
return between
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
load_centrality = newman_betweenness_centrality
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
@nx._dispatchable
|
| 141 |
+
def edge_load_centrality(G, cutoff=False):
|
| 142 |
+
"""Compute edge load.
|
| 143 |
+
|
| 144 |
+
WARNING: This concept of edge load has not been analysed
|
| 145 |
+
or discussed outside of NetworkX that we know of.
|
| 146 |
+
It is based loosely on load_centrality in the sense that
|
| 147 |
+
it counts the number of shortest paths which cross each edge.
|
| 148 |
+
This function is for demonstration and testing purposes.
|
| 149 |
+
|
| 150 |
+
Parameters
|
| 151 |
+
----------
|
| 152 |
+
G : graph
|
| 153 |
+
A networkx graph
|
| 154 |
+
|
| 155 |
+
cutoff : bool, optional (default=False)
|
| 156 |
+
If specified, only consider paths of length <= cutoff.
|
| 157 |
+
|
| 158 |
+
Returns
|
| 159 |
+
-------
|
| 160 |
+
A dict keyed by edge 2-tuple to the number of shortest paths
|
| 161 |
+
which use that edge. Where more than one path is shortest
|
| 162 |
+
the count is divided equally among paths.
|
| 163 |
+
"""
|
| 164 |
+
betweenness = {}
|
| 165 |
+
for u, v in G.edges():
|
| 166 |
+
betweenness[(u, v)] = 0.0
|
| 167 |
+
betweenness[(v, u)] = 0.0
|
| 168 |
+
|
| 169 |
+
for source in G:
|
| 170 |
+
ubetween = _edge_betweenness(G, source, cutoff=cutoff)
|
| 171 |
+
for e, ubetweenv in ubetween.items():
|
| 172 |
+
betweenness[e] += ubetweenv # cumulative total
|
| 173 |
+
return betweenness
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
def _edge_betweenness(G, source, nodes=None, cutoff=False):
|
| 177 |
+
"""Edge betweenness helper."""
|
| 178 |
+
# get the predecessor data
|
| 179 |
+
(pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True)
|
| 180 |
+
# order the nodes by path length
|
| 181 |
+
onodes = [n for n, d in sorted(length.items(), key=itemgetter(1))]
|
| 182 |
+
# initialize betweenness, doesn't account for any edge weights
|
| 183 |
+
between = {}
|
| 184 |
+
for u, v in G.edges(nodes):
|
| 185 |
+
between[(u, v)] = 1.0
|
| 186 |
+
between[(v, u)] = 1.0
|
| 187 |
+
|
| 188 |
+
while onodes: # work through all paths
|
| 189 |
+
v = onodes.pop()
|
| 190 |
+
if v in pred:
|
| 191 |
+
# Discount betweenness if more than one shortest path.
|
| 192 |
+
num_paths = len(pred[v])
|
| 193 |
+
for w in pred[v]:
|
| 194 |
+
if w in pred:
|
| 195 |
+
# Discount betweenness, mult path
|
| 196 |
+
num_paths = len(pred[w])
|
| 197 |
+
for x in pred[w]:
|
| 198 |
+
between[(w, x)] += between[(v, w)] / num_paths
|
| 199 |
+
between[(x, w)] += between[(w, v)] / num_paths
|
| 200 |
+
return between
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/reaching.py
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for computing reaching centrality of a node or a graph."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.utils import pairwise
|
| 5 |
+
|
| 6 |
+
__all__ = ["global_reaching_centrality", "local_reaching_centrality"]
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def _average_weight(G, path, weight=None):
|
| 10 |
+
"""Returns the average weight of an edge in a weighted path.
|
| 11 |
+
|
| 12 |
+
Parameters
|
| 13 |
+
----------
|
| 14 |
+
G : graph
|
| 15 |
+
A networkx graph.
|
| 16 |
+
|
| 17 |
+
path: list
|
| 18 |
+
A list of vertices that define the path.
|
| 19 |
+
|
| 20 |
+
weight : None or string, optional (default=None)
|
| 21 |
+
If None, edge weights are ignored. Then the average weight of an edge
|
| 22 |
+
is assumed to be the multiplicative inverse of the length of the path.
|
| 23 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 24 |
+
"""
|
| 25 |
+
path_length = len(path) - 1
|
| 26 |
+
if path_length <= 0:
|
| 27 |
+
return 0
|
| 28 |
+
if weight is None:
|
| 29 |
+
return 1 / path_length
|
| 30 |
+
total_weight = sum(G.edges[i, j][weight] for i, j in pairwise(path))
|
| 31 |
+
return total_weight / path_length
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 35 |
+
def global_reaching_centrality(G, weight=None, normalized=True):
|
| 36 |
+
"""Returns the global reaching centrality of a directed graph.
|
| 37 |
+
|
| 38 |
+
The *global reaching centrality* of a weighted directed graph is the
|
| 39 |
+
average over all nodes of the difference between the local reaching
|
| 40 |
+
centrality of the node and the greatest local reaching centrality of
|
| 41 |
+
any node in the graph [1]_. For more information on the local
|
| 42 |
+
reaching centrality, see :func:`local_reaching_centrality`.
|
| 43 |
+
Informally, the local reaching centrality is the proportion of the
|
| 44 |
+
graph that is reachable from the neighbors of the node.
|
| 45 |
+
|
| 46 |
+
Parameters
|
| 47 |
+
----------
|
| 48 |
+
G : DiGraph
|
| 49 |
+
A networkx DiGraph.
|
| 50 |
+
|
| 51 |
+
weight : None or string, optional (default=None)
|
| 52 |
+
Attribute to use for edge weights. If ``None``, each edge weight
|
| 53 |
+
is assumed to be one. A higher weight implies a stronger
|
| 54 |
+
connection between nodes and a *shorter* path length.
|
| 55 |
+
|
| 56 |
+
normalized : bool, optional (default=True)
|
| 57 |
+
Whether to normalize the edge weights by the total sum of edge
|
| 58 |
+
weights.
|
| 59 |
+
|
| 60 |
+
Returns
|
| 61 |
+
-------
|
| 62 |
+
h : float
|
| 63 |
+
The global reaching centrality of the graph.
|
| 64 |
+
|
| 65 |
+
Examples
|
| 66 |
+
--------
|
| 67 |
+
>>> G = nx.DiGraph()
|
| 68 |
+
>>> G.add_edge(1, 2)
|
| 69 |
+
>>> G.add_edge(1, 3)
|
| 70 |
+
>>> nx.global_reaching_centrality(G)
|
| 71 |
+
1.0
|
| 72 |
+
>>> G.add_edge(3, 2)
|
| 73 |
+
>>> nx.global_reaching_centrality(G)
|
| 74 |
+
0.75
|
| 75 |
+
|
| 76 |
+
See also
|
| 77 |
+
--------
|
| 78 |
+
local_reaching_centrality
|
| 79 |
+
|
| 80 |
+
References
|
| 81 |
+
----------
|
| 82 |
+
.. [1] Mones, Enys, Lilla Vicsek, and Tamás Vicsek.
|
| 83 |
+
"Hierarchy Measure for Complex Networks."
|
| 84 |
+
*PLoS ONE* 7.3 (2012): e33799.
|
| 85 |
+
https://doi.org/10.1371/journal.pone.0033799
|
| 86 |
+
"""
|
| 87 |
+
if nx.is_negatively_weighted(G, weight=weight):
|
| 88 |
+
raise nx.NetworkXError("edge weights must be positive")
|
| 89 |
+
total_weight = G.size(weight=weight)
|
| 90 |
+
if total_weight <= 0:
|
| 91 |
+
raise nx.NetworkXError("Size of G must be positive")
|
| 92 |
+
# If provided, weights must be interpreted as connection strength
|
| 93 |
+
# (so higher weights are more likely to be chosen). However, the
|
| 94 |
+
# shortest path algorithms in NetworkX assume the provided "weight"
|
| 95 |
+
# is actually a distance (so edges with higher weight are less
|
| 96 |
+
# likely to be chosen). Therefore we need to invert the weights when
|
| 97 |
+
# computing shortest paths.
|
| 98 |
+
#
|
| 99 |
+
# If weight is None, we leave it as-is so that the shortest path
|
| 100 |
+
# algorithm can use a faster, unweighted algorithm.
|
| 101 |
+
if weight is not None:
|
| 102 |
+
|
| 103 |
+
def as_distance(u, v, d):
|
| 104 |
+
return total_weight / d.get(weight, 1)
|
| 105 |
+
|
| 106 |
+
shortest_paths = nx.shortest_path(G, weight=as_distance)
|
| 107 |
+
else:
|
| 108 |
+
shortest_paths = nx.shortest_path(G)
|
| 109 |
+
|
| 110 |
+
centrality = local_reaching_centrality
|
| 111 |
+
# TODO This can be trivially parallelized.
|
| 112 |
+
lrc = [
|
| 113 |
+
centrality(G, node, paths=paths, weight=weight, normalized=normalized)
|
| 114 |
+
for node, paths in shortest_paths.items()
|
| 115 |
+
]
|
| 116 |
+
|
| 117 |
+
max_lrc = max(lrc)
|
| 118 |
+
return sum(max_lrc - c for c in lrc) / (len(G) - 1)
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 122 |
+
def local_reaching_centrality(G, v, paths=None, weight=None, normalized=True):
|
| 123 |
+
"""Returns the local reaching centrality of a node in a directed
|
| 124 |
+
graph.
|
| 125 |
+
|
| 126 |
+
The *local reaching centrality* of a node in a directed graph is the
|
| 127 |
+
proportion of other nodes reachable from that node [1]_.
|
| 128 |
+
|
| 129 |
+
Parameters
|
| 130 |
+
----------
|
| 131 |
+
G : DiGraph
|
| 132 |
+
A NetworkX DiGraph.
|
| 133 |
+
|
| 134 |
+
v : node
|
| 135 |
+
A node in the directed graph `G`.
|
| 136 |
+
|
| 137 |
+
paths : dictionary (default=None)
|
| 138 |
+
If this is not `None` it must be a dictionary representation
|
| 139 |
+
of single-source shortest paths, as computed by, for example,
|
| 140 |
+
:func:`networkx.shortest_path` with source node `v`. Use this
|
| 141 |
+
keyword argument if you intend to invoke this function many
|
| 142 |
+
times but don't want the paths to be recomputed each time.
|
| 143 |
+
|
| 144 |
+
weight : None or string, optional (default=None)
|
| 145 |
+
Attribute to use for edge weights. If `None`, each edge weight
|
| 146 |
+
is assumed to be one. A higher weight implies a stronger
|
| 147 |
+
connection between nodes and a *shorter* path length.
|
| 148 |
+
|
| 149 |
+
normalized : bool, optional (default=True)
|
| 150 |
+
Whether to normalize the edge weights by the total sum of edge
|
| 151 |
+
weights.
|
| 152 |
+
|
| 153 |
+
Returns
|
| 154 |
+
-------
|
| 155 |
+
h : float
|
| 156 |
+
The local reaching centrality of the node ``v`` in the graph
|
| 157 |
+
``G``.
|
| 158 |
+
|
| 159 |
+
Examples
|
| 160 |
+
--------
|
| 161 |
+
>>> G = nx.DiGraph()
|
| 162 |
+
>>> G.add_edges_from([(1, 2), (1, 3)])
|
| 163 |
+
>>> nx.local_reaching_centrality(G, 3)
|
| 164 |
+
0.0
|
| 165 |
+
>>> G.add_edge(3, 2)
|
| 166 |
+
>>> nx.local_reaching_centrality(G, 3)
|
| 167 |
+
0.5
|
| 168 |
+
|
| 169 |
+
See also
|
| 170 |
+
--------
|
| 171 |
+
global_reaching_centrality
|
| 172 |
+
|
| 173 |
+
References
|
| 174 |
+
----------
|
| 175 |
+
.. [1] Mones, Enys, Lilla Vicsek, and Tamás Vicsek.
|
| 176 |
+
"Hierarchy Measure for Complex Networks."
|
| 177 |
+
*PLoS ONE* 7.3 (2012): e33799.
|
| 178 |
+
https://doi.org/10.1371/journal.pone.0033799
|
| 179 |
+
"""
|
| 180 |
+
# Corner case: graph with single node containing a self-loop
|
| 181 |
+
if (total_weight := G.size(weight=weight)) > 0 and len(G) == 1:
|
| 182 |
+
raise nx.NetworkXError(
|
| 183 |
+
"local_reaching_centrality of a single node with self-loop not well-defined"
|
| 184 |
+
)
|
| 185 |
+
if paths is None:
|
| 186 |
+
if nx.is_negatively_weighted(G, weight=weight):
|
| 187 |
+
raise nx.NetworkXError("edge weights must be positive")
|
| 188 |
+
if total_weight <= 0:
|
| 189 |
+
raise nx.NetworkXError("Size of G must be positive")
|
| 190 |
+
if weight is not None:
|
| 191 |
+
# Interpret weights as lengths.
|
| 192 |
+
def as_distance(u, v, d):
|
| 193 |
+
return total_weight / d.get(weight, 1)
|
| 194 |
+
|
| 195 |
+
paths = nx.shortest_path(G, source=v, weight=as_distance)
|
| 196 |
+
else:
|
| 197 |
+
paths = nx.shortest_path(G, source=v)
|
| 198 |
+
# If the graph is unweighted, simply return the proportion of nodes
|
| 199 |
+
# reachable from the source node ``v``.
|
| 200 |
+
if weight is None and G.is_directed():
|
| 201 |
+
return (len(paths) - 1) / (len(G) - 1)
|
| 202 |
+
if normalized and weight is not None:
|
| 203 |
+
norm = G.size(weight=weight) / G.size()
|
| 204 |
+
else:
|
| 205 |
+
norm = 1
|
| 206 |
+
# TODO This can be trivially parallelized.
|
| 207 |
+
avgw = (_average_weight(G, path, weight=weight) for path in paths.values())
|
| 208 |
+
sum_avg_weight = sum(avgw) / norm
|
| 209 |
+
return sum_avg_weight / (len(G) - 1)
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/second_order.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Copyright (c) 2015 – Thomson Licensing, SAS
|
| 2 |
+
|
| 3 |
+
Redistribution and use in source and binary forms, with or without
|
| 4 |
+
modification, are permitted (subject to the limitations in the
|
| 5 |
+
disclaimer below) provided that the following conditions are met:
|
| 6 |
+
|
| 7 |
+
* Redistributions of source code must retain the above copyright
|
| 8 |
+
notice, this list of conditions and the following disclaimer.
|
| 9 |
+
|
| 10 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 11 |
+
notice, this list of conditions and the following disclaimer in the
|
| 12 |
+
documentation and/or other materials provided with the distribution.
|
| 13 |
+
|
| 14 |
+
* Neither the name of Thomson Licensing, or Technicolor, nor the names
|
| 15 |
+
of its contributors may be used to endorse or promote products derived
|
| 16 |
+
from this software without specific prior written permission.
|
| 17 |
+
|
| 18 |
+
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE
|
| 19 |
+
GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
|
| 20 |
+
HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
|
| 21 |
+
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
| 22 |
+
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 23 |
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
| 24 |
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
| 25 |
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
| 26 |
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
| 27 |
+
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
| 28 |
+
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
| 29 |
+
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
|
| 30 |
+
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
import networkx as nx
|
| 34 |
+
from networkx.utils import not_implemented_for
|
| 35 |
+
|
| 36 |
+
# Authors: Erwan Le Merrer (erwan.lemerrer@technicolor.com)
|
| 37 |
+
|
| 38 |
+
__all__ = ["second_order_centrality"]
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
@not_implemented_for("directed")
|
| 42 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 43 |
+
def second_order_centrality(G, weight="weight"):
|
| 44 |
+
"""Compute the second order centrality for nodes of G.
|
| 45 |
+
|
| 46 |
+
The second order centrality of a given node is the standard deviation of
|
| 47 |
+
the return times to that node of a perpetual random walk on G:
|
| 48 |
+
|
| 49 |
+
Parameters
|
| 50 |
+
----------
|
| 51 |
+
G : graph
|
| 52 |
+
A NetworkX connected and undirected graph.
|
| 53 |
+
|
| 54 |
+
weight : string or None, optional (default="weight")
|
| 55 |
+
The name of an edge attribute that holds the numerical value
|
| 56 |
+
used as a weight. If None then each edge has weight 1.
|
| 57 |
+
|
| 58 |
+
Returns
|
| 59 |
+
-------
|
| 60 |
+
nodes : dictionary
|
| 61 |
+
Dictionary keyed by node with second order centrality as the value.
|
| 62 |
+
|
| 63 |
+
Examples
|
| 64 |
+
--------
|
| 65 |
+
>>> G = nx.star_graph(10)
|
| 66 |
+
>>> soc = nx.second_order_centrality(G)
|
| 67 |
+
>>> print(sorted(soc.items(), key=lambda x: x[1])[0][0]) # pick first id
|
| 68 |
+
0
|
| 69 |
+
|
| 70 |
+
Raises
|
| 71 |
+
------
|
| 72 |
+
NetworkXException
|
| 73 |
+
If the graph G is empty, non connected or has negative weights.
|
| 74 |
+
|
| 75 |
+
See Also
|
| 76 |
+
--------
|
| 77 |
+
betweenness_centrality
|
| 78 |
+
|
| 79 |
+
Notes
|
| 80 |
+
-----
|
| 81 |
+
Lower values of second order centrality indicate higher centrality.
|
| 82 |
+
|
| 83 |
+
The algorithm is from Kermarrec, Le Merrer, Sericola and Trédan [1]_.
|
| 84 |
+
|
| 85 |
+
This code implements the analytical version of the algorithm, i.e.,
|
| 86 |
+
there is no simulation of a random walk process involved. The random walk
|
| 87 |
+
is here unbiased (corresponding to eq 6 of the paper [1]_), thus the
|
| 88 |
+
centrality values are the standard deviations for random walk return times
|
| 89 |
+
on the transformed input graph G (equal in-degree at each nodes by adding
|
| 90 |
+
self-loops).
|
| 91 |
+
|
| 92 |
+
Complexity of this implementation, made to run locally on a single machine,
|
| 93 |
+
is O(n^3), with n the size of G, which makes it viable only for small
|
| 94 |
+
graphs.
|
| 95 |
+
|
| 96 |
+
References
|
| 97 |
+
----------
|
| 98 |
+
.. [1] Anne-Marie Kermarrec, Erwan Le Merrer, Bruno Sericola, Gilles Trédan
|
| 99 |
+
"Second order centrality: Distributed assessment of nodes criticity in
|
| 100 |
+
complex networks", Elsevier Computer Communications 34(5):619-628, 2011.
|
| 101 |
+
"""
|
| 102 |
+
import numpy as np
|
| 103 |
+
|
| 104 |
+
n = len(G)
|
| 105 |
+
|
| 106 |
+
if n == 0:
|
| 107 |
+
raise nx.NetworkXException("Empty graph.")
|
| 108 |
+
if not nx.is_connected(G):
|
| 109 |
+
raise nx.NetworkXException("Non connected graph.")
|
| 110 |
+
if any(d.get(weight, 0) < 0 for u, v, d in G.edges(data=True)):
|
| 111 |
+
raise nx.NetworkXException("Graph has negative edge weights.")
|
| 112 |
+
|
| 113 |
+
# balancing G for Metropolis-Hastings random walks
|
| 114 |
+
G = nx.DiGraph(G)
|
| 115 |
+
in_deg = dict(G.in_degree(weight=weight))
|
| 116 |
+
d_max = max(in_deg.values())
|
| 117 |
+
for i, deg in in_deg.items():
|
| 118 |
+
if deg < d_max:
|
| 119 |
+
G.add_edge(i, i, weight=d_max - deg)
|
| 120 |
+
|
| 121 |
+
P = nx.to_numpy_array(G)
|
| 122 |
+
P /= P.sum(axis=1)[:, np.newaxis] # to transition probability matrix
|
| 123 |
+
|
| 124 |
+
def _Qj(P, j):
|
| 125 |
+
P = P.copy()
|
| 126 |
+
P[:, j] = 0
|
| 127 |
+
return P
|
| 128 |
+
|
| 129 |
+
M = np.empty([n, n])
|
| 130 |
+
|
| 131 |
+
for i in range(n):
|
| 132 |
+
M[:, i] = np.linalg.solve(
|
| 133 |
+
np.identity(n) - _Qj(P, i), np.ones([n, 1])[:, 0]
|
| 134 |
+
) # eq 3
|
| 135 |
+
|
| 136 |
+
return dict(
|
| 137 |
+
zip(
|
| 138 |
+
G.nodes,
|
| 139 |
+
(float(np.sqrt(2 * np.sum(M[:, i]) - n * (n + 1))) for i in range(n)),
|
| 140 |
+
)
|
| 141 |
+
) # eq 6
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__init__.py
ADDED
|
File without changes
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (209 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality_subset.cpython-311.pyc
ADDED
|
Binary file (21.9 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_closeness_centrality.cpython-311.pyc
ADDED
|
Binary file (16.7 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_degree_centrality.cpython-311.pyc
ADDED
|
Binary file (8.14 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_dispersion.cpython-311.pyc
ADDED
|
Binary file (3.47 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_eigenvector_centrality.cpython-311.pyc
ADDED
|
Binary file (12.1 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_harmonic_centrality.cpython-311.pyc
ADDED
|
Binary file (9.01 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_katz_centrality.cpython-311.pyc
ADDED
|
Binary file (20 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_laplacian_centrality.cpython-311.pyc
ADDED
|
Binary file (10.7 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_percolation_centrality.cpython-311.pyc
ADDED
|
Binary file (4.85 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_reaching.cpython-311.pyc
ADDED
|
Binary file (12.5 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_second_order_centrality.cpython-311.pyc
ADDED
|
Binary file (5.25 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_subgraph.cpython-311.pyc
ADDED
|
Binary file (5.63 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_trophic.cpython-311.pyc
ADDED
|
Binary file (15.6 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality.py
ADDED
|
@@ -0,0 +1,780 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def weighted_G():
|
| 7 |
+
G = nx.Graph()
|
| 8 |
+
G.add_edge(0, 1, weight=3)
|
| 9 |
+
G.add_edge(0, 2, weight=2)
|
| 10 |
+
G.add_edge(0, 3, weight=6)
|
| 11 |
+
G.add_edge(0, 4, weight=4)
|
| 12 |
+
G.add_edge(1, 3, weight=5)
|
| 13 |
+
G.add_edge(1, 5, weight=5)
|
| 14 |
+
G.add_edge(2, 4, weight=1)
|
| 15 |
+
G.add_edge(3, 4, weight=2)
|
| 16 |
+
G.add_edge(3, 5, weight=1)
|
| 17 |
+
G.add_edge(4, 5, weight=4)
|
| 18 |
+
return G
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class TestBetweennessCentrality:
|
| 22 |
+
def test_K5(self):
|
| 23 |
+
"""Betweenness centrality: K5"""
|
| 24 |
+
G = nx.complete_graph(5)
|
| 25 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=False)
|
| 26 |
+
b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}
|
| 27 |
+
for n in sorted(G):
|
| 28 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 29 |
+
|
| 30 |
+
def test_K5_endpoints(self):
|
| 31 |
+
"""Betweenness centrality: K5 endpoints"""
|
| 32 |
+
G = nx.complete_graph(5)
|
| 33 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True)
|
| 34 |
+
b_answer = {0: 4.0, 1: 4.0, 2: 4.0, 3: 4.0, 4: 4.0}
|
| 35 |
+
for n in sorted(G):
|
| 36 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 37 |
+
# normalized = True case
|
| 38 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True)
|
| 39 |
+
b_answer = {0: 0.4, 1: 0.4, 2: 0.4, 3: 0.4, 4: 0.4}
|
| 40 |
+
for n in sorted(G):
|
| 41 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 42 |
+
|
| 43 |
+
def test_P3_normalized(self):
|
| 44 |
+
"""Betweenness centrality: P3 normalized"""
|
| 45 |
+
G = nx.path_graph(3)
|
| 46 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=True)
|
| 47 |
+
b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
|
| 48 |
+
for n in sorted(G):
|
| 49 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 50 |
+
|
| 51 |
+
def test_P3(self):
|
| 52 |
+
"""Betweenness centrality: P3"""
|
| 53 |
+
G = nx.path_graph(3)
|
| 54 |
+
b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
|
| 55 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=False)
|
| 56 |
+
for n in sorted(G):
|
| 57 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 58 |
+
|
| 59 |
+
def test_sample_from_P3(self):
|
| 60 |
+
"""Betweenness centrality: P3 sample"""
|
| 61 |
+
G = nx.path_graph(3)
|
| 62 |
+
b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
|
| 63 |
+
b = nx.betweenness_centrality(G, k=3, weight=None, normalized=False, seed=1)
|
| 64 |
+
for n in sorted(G):
|
| 65 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 66 |
+
b = nx.betweenness_centrality(G, k=2, weight=None, normalized=False, seed=1)
|
| 67 |
+
# python versions give different results with same seed
|
| 68 |
+
b_approx1 = {0: 0.0, 1: 1.5, 2: 0.0}
|
| 69 |
+
b_approx2 = {0: 0.0, 1: 0.75, 2: 0.0}
|
| 70 |
+
for n in sorted(G):
|
| 71 |
+
assert b[n] in (b_approx1[n], b_approx2[n])
|
| 72 |
+
|
| 73 |
+
def test_P3_endpoints(self):
|
| 74 |
+
"""Betweenness centrality: P3 endpoints"""
|
| 75 |
+
G = nx.path_graph(3)
|
| 76 |
+
b_answer = {0: 2.0, 1: 3.0, 2: 2.0}
|
| 77 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True)
|
| 78 |
+
for n in sorted(G):
|
| 79 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 80 |
+
# normalized = True case
|
| 81 |
+
b_answer = {0: 2 / 3, 1: 1.0, 2: 2 / 3}
|
| 82 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True)
|
| 83 |
+
for n in sorted(G):
|
| 84 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 85 |
+
|
| 86 |
+
def test_krackhardt_kite_graph(self):
|
| 87 |
+
"""Betweenness centrality: Krackhardt kite graph"""
|
| 88 |
+
G = nx.krackhardt_kite_graph()
|
| 89 |
+
b_answer = {
|
| 90 |
+
0: 1.667,
|
| 91 |
+
1: 1.667,
|
| 92 |
+
2: 0.000,
|
| 93 |
+
3: 7.333,
|
| 94 |
+
4: 0.000,
|
| 95 |
+
5: 16.667,
|
| 96 |
+
6: 16.667,
|
| 97 |
+
7: 28.000,
|
| 98 |
+
8: 16.000,
|
| 99 |
+
9: 0.000,
|
| 100 |
+
}
|
| 101 |
+
for b in b_answer:
|
| 102 |
+
b_answer[b] /= 2
|
| 103 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=False)
|
| 104 |
+
for n in sorted(G):
|
| 105 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
|
| 106 |
+
|
| 107 |
+
def test_krackhardt_kite_graph_normalized(self):
|
| 108 |
+
"""Betweenness centrality: Krackhardt kite graph normalized"""
|
| 109 |
+
G = nx.krackhardt_kite_graph()
|
| 110 |
+
b_answer = {
|
| 111 |
+
0: 0.023,
|
| 112 |
+
1: 0.023,
|
| 113 |
+
2: 0.000,
|
| 114 |
+
3: 0.102,
|
| 115 |
+
4: 0.000,
|
| 116 |
+
5: 0.231,
|
| 117 |
+
6: 0.231,
|
| 118 |
+
7: 0.389,
|
| 119 |
+
8: 0.222,
|
| 120 |
+
9: 0.000,
|
| 121 |
+
}
|
| 122 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=True)
|
| 123 |
+
for n in sorted(G):
|
| 124 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
|
| 125 |
+
|
| 126 |
+
def test_florentine_families_graph(self):
|
| 127 |
+
"""Betweenness centrality: Florentine families graph"""
|
| 128 |
+
G = nx.florentine_families_graph()
|
| 129 |
+
b_answer = {
|
| 130 |
+
"Acciaiuoli": 0.000,
|
| 131 |
+
"Albizzi": 0.212,
|
| 132 |
+
"Barbadori": 0.093,
|
| 133 |
+
"Bischeri": 0.104,
|
| 134 |
+
"Castellani": 0.055,
|
| 135 |
+
"Ginori": 0.000,
|
| 136 |
+
"Guadagni": 0.255,
|
| 137 |
+
"Lamberteschi": 0.000,
|
| 138 |
+
"Medici": 0.522,
|
| 139 |
+
"Pazzi": 0.000,
|
| 140 |
+
"Peruzzi": 0.022,
|
| 141 |
+
"Ridolfi": 0.114,
|
| 142 |
+
"Salviati": 0.143,
|
| 143 |
+
"Strozzi": 0.103,
|
| 144 |
+
"Tornabuoni": 0.092,
|
| 145 |
+
}
|
| 146 |
+
|
| 147 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=True)
|
| 148 |
+
for n in sorted(G):
|
| 149 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
|
| 150 |
+
|
| 151 |
+
def test_les_miserables_graph(self):
|
| 152 |
+
"""Betweenness centrality: Les Miserables graph"""
|
| 153 |
+
G = nx.les_miserables_graph()
|
| 154 |
+
b_answer = {
|
| 155 |
+
"Napoleon": 0.000,
|
| 156 |
+
"Myriel": 0.177,
|
| 157 |
+
"MlleBaptistine": 0.000,
|
| 158 |
+
"MmeMagloire": 0.000,
|
| 159 |
+
"CountessDeLo": 0.000,
|
| 160 |
+
"Geborand": 0.000,
|
| 161 |
+
"Champtercier": 0.000,
|
| 162 |
+
"Cravatte": 0.000,
|
| 163 |
+
"Count": 0.000,
|
| 164 |
+
"OldMan": 0.000,
|
| 165 |
+
"Valjean": 0.570,
|
| 166 |
+
"Labarre": 0.000,
|
| 167 |
+
"Marguerite": 0.000,
|
| 168 |
+
"MmeDeR": 0.000,
|
| 169 |
+
"Isabeau": 0.000,
|
| 170 |
+
"Gervais": 0.000,
|
| 171 |
+
"Listolier": 0.000,
|
| 172 |
+
"Tholomyes": 0.041,
|
| 173 |
+
"Fameuil": 0.000,
|
| 174 |
+
"Blacheville": 0.000,
|
| 175 |
+
"Favourite": 0.000,
|
| 176 |
+
"Dahlia": 0.000,
|
| 177 |
+
"Zephine": 0.000,
|
| 178 |
+
"Fantine": 0.130,
|
| 179 |
+
"MmeThenardier": 0.029,
|
| 180 |
+
"Thenardier": 0.075,
|
| 181 |
+
"Cosette": 0.024,
|
| 182 |
+
"Javert": 0.054,
|
| 183 |
+
"Fauchelevent": 0.026,
|
| 184 |
+
"Bamatabois": 0.008,
|
| 185 |
+
"Perpetue": 0.000,
|
| 186 |
+
"Simplice": 0.009,
|
| 187 |
+
"Scaufflaire": 0.000,
|
| 188 |
+
"Woman1": 0.000,
|
| 189 |
+
"Judge": 0.000,
|
| 190 |
+
"Champmathieu": 0.000,
|
| 191 |
+
"Brevet": 0.000,
|
| 192 |
+
"Chenildieu": 0.000,
|
| 193 |
+
"Cochepaille": 0.000,
|
| 194 |
+
"Pontmercy": 0.007,
|
| 195 |
+
"Boulatruelle": 0.000,
|
| 196 |
+
"Eponine": 0.011,
|
| 197 |
+
"Anzelma": 0.000,
|
| 198 |
+
"Woman2": 0.000,
|
| 199 |
+
"MotherInnocent": 0.000,
|
| 200 |
+
"Gribier": 0.000,
|
| 201 |
+
"MmeBurgon": 0.026,
|
| 202 |
+
"Jondrette": 0.000,
|
| 203 |
+
"Gavroche": 0.165,
|
| 204 |
+
"Gillenormand": 0.020,
|
| 205 |
+
"Magnon": 0.000,
|
| 206 |
+
"MlleGillenormand": 0.048,
|
| 207 |
+
"MmePontmercy": 0.000,
|
| 208 |
+
"MlleVaubois": 0.000,
|
| 209 |
+
"LtGillenormand": 0.000,
|
| 210 |
+
"Marius": 0.132,
|
| 211 |
+
"BaronessT": 0.000,
|
| 212 |
+
"Mabeuf": 0.028,
|
| 213 |
+
"Enjolras": 0.043,
|
| 214 |
+
"Combeferre": 0.001,
|
| 215 |
+
"Prouvaire": 0.000,
|
| 216 |
+
"Feuilly": 0.001,
|
| 217 |
+
"Courfeyrac": 0.005,
|
| 218 |
+
"Bahorel": 0.002,
|
| 219 |
+
"Bossuet": 0.031,
|
| 220 |
+
"Joly": 0.002,
|
| 221 |
+
"Grantaire": 0.000,
|
| 222 |
+
"MotherPlutarch": 0.000,
|
| 223 |
+
"Gueulemer": 0.005,
|
| 224 |
+
"Babet": 0.005,
|
| 225 |
+
"Claquesous": 0.005,
|
| 226 |
+
"Montparnasse": 0.004,
|
| 227 |
+
"Toussaint": 0.000,
|
| 228 |
+
"Child1": 0.000,
|
| 229 |
+
"Child2": 0.000,
|
| 230 |
+
"Brujon": 0.000,
|
| 231 |
+
"MmeHucheloup": 0.000,
|
| 232 |
+
}
|
| 233 |
+
|
| 234 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=True)
|
| 235 |
+
for n in sorted(G):
|
| 236 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
|
| 237 |
+
|
| 238 |
+
def test_ladder_graph(self):
|
| 239 |
+
"""Betweenness centrality: Ladder graph"""
|
| 240 |
+
G = nx.Graph() # ladder_graph(3)
|
| 241 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
|
| 242 |
+
b_answer = {0: 1.667, 1: 1.667, 2: 6.667, 3: 6.667, 4: 1.667, 5: 1.667}
|
| 243 |
+
for b in b_answer:
|
| 244 |
+
b_answer[b] /= 2
|
| 245 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=False)
|
| 246 |
+
for n in sorted(G):
|
| 247 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
|
| 248 |
+
|
| 249 |
+
def test_disconnected_path(self):
|
| 250 |
+
"""Betweenness centrality: disconnected path"""
|
| 251 |
+
G = nx.Graph()
|
| 252 |
+
nx.add_path(G, [0, 1, 2])
|
| 253 |
+
nx.add_path(G, [3, 4, 5, 6])
|
| 254 |
+
b_answer = {0: 0, 1: 1, 2: 0, 3: 0, 4: 2, 5: 2, 6: 0}
|
| 255 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=False)
|
| 256 |
+
for n in sorted(G):
|
| 257 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 258 |
+
|
| 259 |
+
def test_disconnected_path_endpoints(self):
|
| 260 |
+
"""Betweenness centrality: disconnected path endpoints"""
|
| 261 |
+
G = nx.Graph()
|
| 262 |
+
nx.add_path(G, [0, 1, 2])
|
| 263 |
+
nx.add_path(G, [3, 4, 5, 6])
|
| 264 |
+
b_answer = {0: 2, 1: 3, 2: 2, 3: 3, 4: 5, 5: 5, 6: 3}
|
| 265 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True)
|
| 266 |
+
for n in sorted(G):
|
| 267 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 268 |
+
# normalized = True case
|
| 269 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True)
|
| 270 |
+
for n in sorted(G):
|
| 271 |
+
assert b[n] == pytest.approx(b_answer[n] / 21, abs=1e-7)
|
| 272 |
+
|
| 273 |
+
def test_directed_path(self):
|
| 274 |
+
"""Betweenness centrality: directed path"""
|
| 275 |
+
G = nx.DiGraph()
|
| 276 |
+
nx.add_path(G, [0, 1, 2])
|
| 277 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=False)
|
| 278 |
+
b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
|
| 279 |
+
for n in sorted(G):
|
| 280 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 281 |
+
|
| 282 |
+
def test_directed_path_normalized(self):
|
| 283 |
+
"""Betweenness centrality: directed path normalized"""
|
| 284 |
+
G = nx.DiGraph()
|
| 285 |
+
nx.add_path(G, [0, 1, 2])
|
| 286 |
+
b = nx.betweenness_centrality(G, weight=None, normalized=True)
|
| 287 |
+
b_answer = {0: 0.0, 1: 0.5, 2: 0.0}
|
| 288 |
+
for n in sorted(G):
|
| 289 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
class TestWeightedBetweennessCentrality:
|
| 293 |
+
def test_K5(self):
|
| 294 |
+
"""Weighted betweenness centrality: K5"""
|
| 295 |
+
G = nx.complete_graph(5)
|
| 296 |
+
b = nx.betweenness_centrality(G, weight="weight", normalized=False)
|
| 297 |
+
b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}
|
| 298 |
+
for n in sorted(G):
|
| 299 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 300 |
+
|
| 301 |
+
def test_P3_normalized(self):
|
| 302 |
+
"""Weighted betweenness centrality: P3 normalized"""
|
| 303 |
+
G = nx.path_graph(3)
|
| 304 |
+
b = nx.betweenness_centrality(G, weight="weight", normalized=True)
|
| 305 |
+
b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
|
| 306 |
+
for n in sorted(G):
|
| 307 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 308 |
+
|
| 309 |
+
def test_P3(self):
|
| 310 |
+
"""Weighted betweenness centrality: P3"""
|
| 311 |
+
G = nx.path_graph(3)
|
| 312 |
+
b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
|
| 313 |
+
b = nx.betweenness_centrality(G, weight="weight", normalized=False)
|
| 314 |
+
for n in sorted(G):
|
| 315 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 316 |
+
|
| 317 |
+
def test_krackhardt_kite_graph(self):
|
| 318 |
+
"""Weighted betweenness centrality: Krackhardt kite graph"""
|
| 319 |
+
G = nx.krackhardt_kite_graph()
|
| 320 |
+
b_answer = {
|
| 321 |
+
0: 1.667,
|
| 322 |
+
1: 1.667,
|
| 323 |
+
2: 0.000,
|
| 324 |
+
3: 7.333,
|
| 325 |
+
4: 0.000,
|
| 326 |
+
5: 16.667,
|
| 327 |
+
6: 16.667,
|
| 328 |
+
7: 28.000,
|
| 329 |
+
8: 16.000,
|
| 330 |
+
9: 0.000,
|
| 331 |
+
}
|
| 332 |
+
for b in b_answer:
|
| 333 |
+
b_answer[b] /= 2
|
| 334 |
+
|
| 335 |
+
b = nx.betweenness_centrality(G, weight="weight", normalized=False)
|
| 336 |
+
|
| 337 |
+
for n in sorted(G):
|
| 338 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
|
| 339 |
+
|
| 340 |
+
def test_krackhardt_kite_graph_normalized(self):
|
| 341 |
+
"""Weighted betweenness centrality:
|
| 342 |
+
Krackhardt kite graph normalized
|
| 343 |
+
"""
|
| 344 |
+
G = nx.krackhardt_kite_graph()
|
| 345 |
+
b_answer = {
|
| 346 |
+
0: 0.023,
|
| 347 |
+
1: 0.023,
|
| 348 |
+
2: 0.000,
|
| 349 |
+
3: 0.102,
|
| 350 |
+
4: 0.000,
|
| 351 |
+
5: 0.231,
|
| 352 |
+
6: 0.231,
|
| 353 |
+
7: 0.389,
|
| 354 |
+
8: 0.222,
|
| 355 |
+
9: 0.000,
|
| 356 |
+
}
|
| 357 |
+
b = nx.betweenness_centrality(G, weight="weight", normalized=True)
|
| 358 |
+
|
| 359 |
+
for n in sorted(G):
|
| 360 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
|
| 361 |
+
|
| 362 |
+
def test_florentine_families_graph(self):
|
| 363 |
+
"""Weighted betweenness centrality:
|
| 364 |
+
Florentine families graph"""
|
| 365 |
+
G = nx.florentine_families_graph()
|
| 366 |
+
b_answer = {
|
| 367 |
+
"Acciaiuoli": 0.000,
|
| 368 |
+
"Albizzi": 0.212,
|
| 369 |
+
"Barbadori": 0.093,
|
| 370 |
+
"Bischeri": 0.104,
|
| 371 |
+
"Castellani": 0.055,
|
| 372 |
+
"Ginori": 0.000,
|
| 373 |
+
"Guadagni": 0.255,
|
| 374 |
+
"Lamberteschi": 0.000,
|
| 375 |
+
"Medici": 0.522,
|
| 376 |
+
"Pazzi": 0.000,
|
| 377 |
+
"Peruzzi": 0.022,
|
| 378 |
+
"Ridolfi": 0.114,
|
| 379 |
+
"Salviati": 0.143,
|
| 380 |
+
"Strozzi": 0.103,
|
| 381 |
+
"Tornabuoni": 0.092,
|
| 382 |
+
}
|
| 383 |
+
|
| 384 |
+
b = nx.betweenness_centrality(G, weight="weight", normalized=True)
|
| 385 |
+
for n in sorted(G):
|
| 386 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
|
| 387 |
+
|
| 388 |
+
def test_les_miserables_graph(self):
|
| 389 |
+
"""Weighted betweenness centrality: Les Miserables graph"""
|
| 390 |
+
G = nx.les_miserables_graph()
|
| 391 |
+
b_answer = {
|
| 392 |
+
"Napoleon": 0.000,
|
| 393 |
+
"Myriel": 0.177,
|
| 394 |
+
"MlleBaptistine": 0.000,
|
| 395 |
+
"MmeMagloire": 0.000,
|
| 396 |
+
"CountessDeLo": 0.000,
|
| 397 |
+
"Geborand": 0.000,
|
| 398 |
+
"Champtercier": 0.000,
|
| 399 |
+
"Cravatte": 0.000,
|
| 400 |
+
"Count": 0.000,
|
| 401 |
+
"OldMan": 0.000,
|
| 402 |
+
"Valjean": 0.454,
|
| 403 |
+
"Labarre": 0.000,
|
| 404 |
+
"Marguerite": 0.009,
|
| 405 |
+
"MmeDeR": 0.000,
|
| 406 |
+
"Isabeau": 0.000,
|
| 407 |
+
"Gervais": 0.000,
|
| 408 |
+
"Listolier": 0.000,
|
| 409 |
+
"Tholomyes": 0.066,
|
| 410 |
+
"Fameuil": 0.000,
|
| 411 |
+
"Blacheville": 0.000,
|
| 412 |
+
"Favourite": 0.000,
|
| 413 |
+
"Dahlia": 0.000,
|
| 414 |
+
"Zephine": 0.000,
|
| 415 |
+
"Fantine": 0.114,
|
| 416 |
+
"MmeThenardier": 0.046,
|
| 417 |
+
"Thenardier": 0.129,
|
| 418 |
+
"Cosette": 0.075,
|
| 419 |
+
"Javert": 0.193,
|
| 420 |
+
"Fauchelevent": 0.026,
|
| 421 |
+
"Bamatabois": 0.080,
|
| 422 |
+
"Perpetue": 0.000,
|
| 423 |
+
"Simplice": 0.001,
|
| 424 |
+
"Scaufflaire": 0.000,
|
| 425 |
+
"Woman1": 0.000,
|
| 426 |
+
"Judge": 0.000,
|
| 427 |
+
"Champmathieu": 0.000,
|
| 428 |
+
"Brevet": 0.000,
|
| 429 |
+
"Chenildieu": 0.000,
|
| 430 |
+
"Cochepaille": 0.000,
|
| 431 |
+
"Pontmercy": 0.023,
|
| 432 |
+
"Boulatruelle": 0.000,
|
| 433 |
+
"Eponine": 0.023,
|
| 434 |
+
"Anzelma": 0.000,
|
| 435 |
+
"Woman2": 0.000,
|
| 436 |
+
"MotherInnocent": 0.000,
|
| 437 |
+
"Gribier": 0.000,
|
| 438 |
+
"MmeBurgon": 0.026,
|
| 439 |
+
"Jondrette": 0.000,
|
| 440 |
+
"Gavroche": 0.285,
|
| 441 |
+
"Gillenormand": 0.024,
|
| 442 |
+
"Magnon": 0.005,
|
| 443 |
+
"MlleGillenormand": 0.036,
|
| 444 |
+
"MmePontmercy": 0.005,
|
| 445 |
+
"MlleVaubois": 0.000,
|
| 446 |
+
"LtGillenormand": 0.015,
|
| 447 |
+
"Marius": 0.072,
|
| 448 |
+
"BaronessT": 0.004,
|
| 449 |
+
"Mabeuf": 0.089,
|
| 450 |
+
"Enjolras": 0.003,
|
| 451 |
+
"Combeferre": 0.000,
|
| 452 |
+
"Prouvaire": 0.000,
|
| 453 |
+
"Feuilly": 0.004,
|
| 454 |
+
"Courfeyrac": 0.001,
|
| 455 |
+
"Bahorel": 0.007,
|
| 456 |
+
"Bossuet": 0.028,
|
| 457 |
+
"Joly": 0.000,
|
| 458 |
+
"Grantaire": 0.036,
|
| 459 |
+
"MotherPlutarch": 0.000,
|
| 460 |
+
"Gueulemer": 0.025,
|
| 461 |
+
"Babet": 0.015,
|
| 462 |
+
"Claquesous": 0.042,
|
| 463 |
+
"Montparnasse": 0.050,
|
| 464 |
+
"Toussaint": 0.011,
|
| 465 |
+
"Child1": 0.000,
|
| 466 |
+
"Child2": 0.000,
|
| 467 |
+
"Brujon": 0.002,
|
| 468 |
+
"MmeHucheloup": 0.034,
|
| 469 |
+
}
|
| 470 |
+
|
| 471 |
+
b = nx.betweenness_centrality(G, weight="weight", normalized=True)
|
| 472 |
+
for n in sorted(G):
|
| 473 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
|
| 474 |
+
|
| 475 |
+
def test_ladder_graph(self):
|
| 476 |
+
"""Weighted betweenness centrality: Ladder graph"""
|
| 477 |
+
G = nx.Graph() # ladder_graph(3)
|
| 478 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
|
| 479 |
+
b_answer = {0: 1.667, 1: 1.667, 2: 6.667, 3: 6.667, 4: 1.667, 5: 1.667}
|
| 480 |
+
for b in b_answer:
|
| 481 |
+
b_answer[b] /= 2
|
| 482 |
+
b = nx.betweenness_centrality(G, weight="weight", normalized=False)
|
| 483 |
+
for n in sorted(G):
|
| 484 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-3)
|
| 485 |
+
|
| 486 |
+
def test_G(self):
|
| 487 |
+
"""Weighted betweenness centrality: G"""
|
| 488 |
+
G = weighted_G()
|
| 489 |
+
b_answer = {0: 2.0, 1: 0.0, 2: 4.0, 3: 3.0, 4: 4.0, 5: 0.0}
|
| 490 |
+
b = nx.betweenness_centrality(G, weight="weight", normalized=False)
|
| 491 |
+
for n in sorted(G):
|
| 492 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 493 |
+
|
| 494 |
+
def test_G2(self):
|
| 495 |
+
"""Weighted betweenness centrality: G2"""
|
| 496 |
+
G = nx.DiGraph()
|
| 497 |
+
G.add_weighted_edges_from(
|
| 498 |
+
[
|
| 499 |
+
("s", "u", 10),
|
| 500 |
+
("s", "x", 5),
|
| 501 |
+
("u", "v", 1),
|
| 502 |
+
("u", "x", 2),
|
| 503 |
+
("v", "y", 1),
|
| 504 |
+
("x", "u", 3),
|
| 505 |
+
("x", "v", 5),
|
| 506 |
+
("x", "y", 2),
|
| 507 |
+
("y", "s", 7),
|
| 508 |
+
("y", "v", 6),
|
| 509 |
+
]
|
| 510 |
+
)
|
| 511 |
+
|
| 512 |
+
b_answer = {"y": 5.0, "x": 5.0, "s": 4.0, "u": 2.0, "v": 2.0}
|
| 513 |
+
|
| 514 |
+
b = nx.betweenness_centrality(G, weight="weight", normalized=False)
|
| 515 |
+
for n in sorted(G):
|
| 516 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 517 |
+
|
| 518 |
+
def test_G3(self):
|
| 519 |
+
"""Weighted betweenness centrality: G3"""
|
| 520 |
+
G = nx.MultiGraph(weighted_G())
|
| 521 |
+
es = list(G.edges(data=True))[::2] # duplicate every other edge
|
| 522 |
+
G.add_edges_from(es)
|
| 523 |
+
b_answer = {0: 2.0, 1: 0.0, 2: 4.0, 3: 3.0, 4: 4.0, 5: 0.0}
|
| 524 |
+
b = nx.betweenness_centrality(G, weight="weight", normalized=False)
|
| 525 |
+
for n in sorted(G):
|
| 526 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 527 |
+
|
| 528 |
+
def test_G4(self):
|
| 529 |
+
"""Weighted betweenness centrality: G4"""
|
| 530 |
+
G = nx.MultiDiGraph()
|
| 531 |
+
G.add_weighted_edges_from(
|
| 532 |
+
[
|
| 533 |
+
("s", "u", 10),
|
| 534 |
+
("s", "x", 5),
|
| 535 |
+
("s", "x", 6),
|
| 536 |
+
("u", "v", 1),
|
| 537 |
+
("u", "x", 2),
|
| 538 |
+
("v", "y", 1),
|
| 539 |
+
("v", "y", 1),
|
| 540 |
+
("x", "u", 3),
|
| 541 |
+
("x", "v", 5),
|
| 542 |
+
("x", "y", 2),
|
| 543 |
+
("x", "y", 3),
|
| 544 |
+
("y", "s", 7),
|
| 545 |
+
("y", "v", 6),
|
| 546 |
+
("y", "v", 6),
|
| 547 |
+
]
|
| 548 |
+
)
|
| 549 |
+
|
| 550 |
+
b_answer = {"y": 5.0, "x": 5.0, "s": 4.0, "u": 2.0, "v": 2.0}
|
| 551 |
+
|
| 552 |
+
b = nx.betweenness_centrality(G, weight="weight", normalized=False)
|
| 553 |
+
for n in sorted(G):
|
| 554 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 555 |
+
|
| 556 |
+
|
| 557 |
+
class TestEdgeBetweennessCentrality:
|
| 558 |
+
def test_K5(self):
|
| 559 |
+
"""Edge betweenness centrality: K5"""
|
| 560 |
+
G = nx.complete_graph(5)
|
| 561 |
+
b = nx.edge_betweenness_centrality(G, weight=None, normalized=False)
|
| 562 |
+
b_answer = dict.fromkeys(G.edges(), 1)
|
| 563 |
+
for n in sorted(G.edges()):
|
| 564 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 565 |
+
|
| 566 |
+
def test_normalized_K5(self):
|
| 567 |
+
"""Edge betweenness centrality: K5"""
|
| 568 |
+
G = nx.complete_graph(5)
|
| 569 |
+
b = nx.edge_betweenness_centrality(G, weight=None, normalized=True)
|
| 570 |
+
b_answer = dict.fromkeys(G.edges(), 1 / 10)
|
| 571 |
+
for n in sorted(G.edges()):
|
| 572 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 573 |
+
|
| 574 |
+
def test_C4(self):
|
| 575 |
+
"""Edge betweenness centrality: C4"""
|
| 576 |
+
G = nx.cycle_graph(4)
|
| 577 |
+
b = nx.edge_betweenness_centrality(G, weight=None, normalized=True)
|
| 578 |
+
b_answer = {(0, 1): 2, (0, 3): 2, (1, 2): 2, (2, 3): 2}
|
| 579 |
+
for n in sorted(G.edges()):
|
| 580 |
+
assert b[n] == pytest.approx(b_answer[n] / 6, abs=1e-7)
|
| 581 |
+
|
| 582 |
+
def test_P4(self):
|
| 583 |
+
"""Edge betweenness centrality: P4"""
|
| 584 |
+
G = nx.path_graph(4)
|
| 585 |
+
b = nx.edge_betweenness_centrality(G, weight=None, normalized=False)
|
| 586 |
+
b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3}
|
| 587 |
+
for n in sorted(G.edges()):
|
| 588 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 589 |
+
|
| 590 |
+
def test_normalized_P4(self):
|
| 591 |
+
"""Edge betweenness centrality: P4"""
|
| 592 |
+
G = nx.path_graph(4)
|
| 593 |
+
b = nx.edge_betweenness_centrality(G, weight=None, normalized=True)
|
| 594 |
+
b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3}
|
| 595 |
+
for n in sorted(G.edges()):
|
| 596 |
+
assert b[n] == pytest.approx(b_answer[n] / 6, abs=1e-7)
|
| 597 |
+
|
| 598 |
+
def test_balanced_tree(self):
|
| 599 |
+
"""Edge betweenness centrality: balanced tree"""
|
| 600 |
+
G = nx.balanced_tree(r=2, h=2)
|
| 601 |
+
b = nx.edge_betweenness_centrality(G, weight=None, normalized=False)
|
| 602 |
+
b_answer = {(0, 1): 12, (0, 2): 12, (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6}
|
| 603 |
+
for n in sorted(G.edges()):
|
| 604 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 605 |
+
|
| 606 |
+
|
| 607 |
+
class TestWeightedEdgeBetweennessCentrality:
|
| 608 |
+
def test_K5(self):
|
| 609 |
+
"""Edge betweenness centrality: K5"""
|
| 610 |
+
G = nx.complete_graph(5)
|
| 611 |
+
b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False)
|
| 612 |
+
b_answer = dict.fromkeys(G.edges(), 1)
|
| 613 |
+
for n in sorted(G.edges()):
|
| 614 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 615 |
+
|
| 616 |
+
def test_C4(self):
|
| 617 |
+
"""Edge betweenness centrality: C4"""
|
| 618 |
+
G = nx.cycle_graph(4)
|
| 619 |
+
b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False)
|
| 620 |
+
b_answer = {(0, 1): 2, (0, 3): 2, (1, 2): 2, (2, 3): 2}
|
| 621 |
+
for n in sorted(G.edges()):
|
| 622 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 623 |
+
|
| 624 |
+
def test_P4(self):
|
| 625 |
+
"""Edge betweenness centrality: P4"""
|
| 626 |
+
G = nx.path_graph(4)
|
| 627 |
+
b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False)
|
| 628 |
+
b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3}
|
| 629 |
+
for n in sorted(G.edges()):
|
| 630 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 631 |
+
|
| 632 |
+
def test_balanced_tree(self):
|
| 633 |
+
"""Edge betweenness centrality: balanced tree"""
|
| 634 |
+
G = nx.balanced_tree(r=2, h=2)
|
| 635 |
+
b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False)
|
| 636 |
+
b_answer = {(0, 1): 12, (0, 2): 12, (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6}
|
| 637 |
+
for n in sorted(G.edges()):
|
| 638 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 639 |
+
|
| 640 |
+
def test_weighted_graph(self):
|
| 641 |
+
"""Edge betweenness centrality: weighted"""
|
| 642 |
+
eList = [
|
| 643 |
+
(0, 1, 5),
|
| 644 |
+
(0, 2, 4),
|
| 645 |
+
(0, 3, 3),
|
| 646 |
+
(0, 4, 2),
|
| 647 |
+
(1, 2, 4),
|
| 648 |
+
(1, 3, 1),
|
| 649 |
+
(1, 4, 3),
|
| 650 |
+
(2, 4, 5),
|
| 651 |
+
(3, 4, 4),
|
| 652 |
+
]
|
| 653 |
+
G = nx.Graph()
|
| 654 |
+
G.add_weighted_edges_from(eList)
|
| 655 |
+
b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False)
|
| 656 |
+
b_answer = {
|
| 657 |
+
(0, 1): 0.0,
|
| 658 |
+
(0, 2): 1.0,
|
| 659 |
+
(0, 3): 2.0,
|
| 660 |
+
(0, 4): 1.0,
|
| 661 |
+
(1, 2): 2.0,
|
| 662 |
+
(1, 3): 3.5,
|
| 663 |
+
(1, 4): 1.5,
|
| 664 |
+
(2, 4): 1.0,
|
| 665 |
+
(3, 4): 0.5,
|
| 666 |
+
}
|
| 667 |
+
for n in sorted(G.edges()):
|
| 668 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 669 |
+
|
| 670 |
+
def test_normalized_weighted_graph(self):
|
| 671 |
+
"""Edge betweenness centrality: normalized weighted"""
|
| 672 |
+
eList = [
|
| 673 |
+
(0, 1, 5),
|
| 674 |
+
(0, 2, 4),
|
| 675 |
+
(0, 3, 3),
|
| 676 |
+
(0, 4, 2),
|
| 677 |
+
(1, 2, 4),
|
| 678 |
+
(1, 3, 1),
|
| 679 |
+
(1, 4, 3),
|
| 680 |
+
(2, 4, 5),
|
| 681 |
+
(3, 4, 4),
|
| 682 |
+
]
|
| 683 |
+
G = nx.Graph()
|
| 684 |
+
G.add_weighted_edges_from(eList)
|
| 685 |
+
b = nx.edge_betweenness_centrality(G, weight="weight", normalized=True)
|
| 686 |
+
b_answer = {
|
| 687 |
+
(0, 1): 0.0,
|
| 688 |
+
(0, 2): 1.0,
|
| 689 |
+
(0, 3): 2.0,
|
| 690 |
+
(0, 4): 1.0,
|
| 691 |
+
(1, 2): 2.0,
|
| 692 |
+
(1, 3): 3.5,
|
| 693 |
+
(1, 4): 1.5,
|
| 694 |
+
(2, 4): 1.0,
|
| 695 |
+
(3, 4): 0.5,
|
| 696 |
+
}
|
| 697 |
+
norm = len(G) * (len(G) - 1) / 2
|
| 698 |
+
for n in sorted(G.edges()):
|
| 699 |
+
assert b[n] == pytest.approx(b_answer[n] / norm, abs=1e-7)
|
| 700 |
+
|
| 701 |
+
def test_weighted_multigraph(self):
|
| 702 |
+
"""Edge betweenness centrality: weighted multigraph"""
|
| 703 |
+
eList = [
|
| 704 |
+
(0, 1, 5),
|
| 705 |
+
(0, 1, 4),
|
| 706 |
+
(0, 2, 4),
|
| 707 |
+
(0, 3, 3),
|
| 708 |
+
(0, 3, 3),
|
| 709 |
+
(0, 4, 2),
|
| 710 |
+
(1, 2, 4),
|
| 711 |
+
(1, 3, 1),
|
| 712 |
+
(1, 3, 2),
|
| 713 |
+
(1, 4, 3),
|
| 714 |
+
(1, 4, 4),
|
| 715 |
+
(2, 4, 5),
|
| 716 |
+
(3, 4, 4),
|
| 717 |
+
(3, 4, 4),
|
| 718 |
+
]
|
| 719 |
+
G = nx.MultiGraph()
|
| 720 |
+
G.add_weighted_edges_from(eList)
|
| 721 |
+
b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False)
|
| 722 |
+
b_answer = {
|
| 723 |
+
(0, 1, 0): 0.0,
|
| 724 |
+
(0, 1, 1): 0.5,
|
| 725 |
+
(0, 2, 0): 1.0,
|
| 726 |
+
(0, 3, 0): 0.75,
|
| 727 |
+
(0, 3, 1): 0.75,
|
| 728 |
+
(0, 4, 0): 1.0,
|
| 729 |
+
(1, 2, 0): 2.0,
|
| 730 |
+
(1, 3, 0): 3.0,
|
| 731 |
+
(1, 3, 1): 0.0,
|
| 732 |
+
(1, 4, 0): 1.5,
|
| 733 |
+
(1, 4, 1): 0.0,
|
| 734 |
+
(2, 4, 0): 1.0,
|
| 735 |
+
(3, 4, 0): 0.25,
|
| 736 |
+
(3, 4, 1): 0.25,
|
| 737 |
+
}
|
| 738 |
+
for n in sorted(G.edges(keys=True)):
|
| 739 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 740 |
+
|
| 741 |
+
def test_normalized_weighted_multigraph(self):
|
| 742 |
+
"""Edge betweenness centrality: normalized weighted multigraph"""
|
| 743 |
+
eList = [
|
| 744 |
+
(0, 1, 5),
|
| 745 |
+
(0, 1, 4),
|
| 746 |
+
(0, 2, 4),
|
| 747 |
+
(0, 3, 3),
|
| 748 |
+
(0, 3, 3),
|
| 749 |
+
(0, 4, 2),
|
| 750 |
+
(1, 2, 4),
|
| 751 |
+
(1, 3, 1),
|
| 752 |
+
(1, 3, 2),
|
| 753 |
+
(1, 4, 3),
|
| 754 |
+
(1, 4, 4),
|
| 755 |
+
(2, 4, 5),
|
| 756 |
+
(3, 4, 4),
|
| 757 |
+
(3, 4, 4),
|
| 758 |
+
]
|
| 759 |
+
G = nx.MultiGraph()
|
| 760 |
+
G.add_weighted_edges_from(eList)
|
| 761 |
+
b = nx.edge_betweenness_centrality(G, weight="weight", normalized=True)
|
| 762 |
+
b_answer = {
|
| 763 |
+
(0, 1, 0): 0.0,
|
| 764 |
+
(0, 1, 1): 0.5,
|
| 765 |
+
(0, 2, 0): 1.0,
|
| 766 |
+
(0, 3, 0): 0.75,
|
| 767 |
+
(0, 3, 1): 0.75,
|
| 768 |
+
(0, 4, 0): 1.0,
|
| 769 |
+
(1, 2, 0): 2.0,
|
| 770 |
+
(1, 3, 0): 3.0,
|
| 771 |
+
(1, 3, 1): 0.0,
|
| 772 |
+
(1, 4, 0): 1.5,
|
| 773 |
+
(1, 4, 1): 0.0,
|
| 774 |
+
(2, 4, 0): 1.0,
|
| 775 |
+
(3, 4, 0): 0.25,
|
| 776 |
+
(3, 4, 1): 0.25,
|
| 777 |
+
}
|
| 778 |
+
norm = len(G) * (len(G) - 1) / 2
|
| 779 |
+
for n in sorted(G.edges(keys=True)):
|
| 780 |
+
assert b[n] == pytest.approx(b_answer[n] / norm, abs=1e-7)
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py
ADDED
|
@@ -0,0 +1,340 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestSubsetBetweennessCentrality:
|
| 7 |
+
def test_K5(self):
|
| 8 |
+
"""Betweenness Centrality Subset: K5"""
|
| 9 |
+
G = nx.complete_graph(5)
|
| 10 |
+
b = nx.betweenness_centrality_subset(
|
| 11 |
+
G, sources=[0], targets=[1, 3], weight=None
|
| 12 |
+
)
|
| 13 |
+
b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}
|
| 14 |
+
for n in sorted(G):
|
| 15 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 16 |
+
|
| 17 |
+
def test_P5_directed(self):
|
| 18 |
+
"""Betweenness Centrality Subset: P5 directed"""
|
| 19 |
+
G = nx.DiGraph()
|
| 20 |
+
nx.add_path(G, range(5))
|
| 21 |
+
b_answer = {0: 0, 1: 1, 2: 1, 3: 0, 4: 0, 5: 0}
|
| 22 |
+
b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None)
|
| 23 |
+
for n in sorted(G):
|
| 24 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 25 |
+
|
| 26 |
+
def test_P5(self):
|
| 27 |
+
"""Betweenness Centrality Subset: P5"""
|
| 28 |
+
G = nx.Graph()
|
| 29 |
+
nx.add_path(G, range(5))
|
| 30 |
+
b_answer = {0: 0, 1: 0.5, 2: 0.5, 3: 0, 4: 0, 5: 0}
|
| 31 |
+
b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None)
|
| 32 |
+
for n in sorted(G):
|
| 33 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 34 |
+
|
| 35 |
+
def test_P5_multiple_target(self):
|
| 36 |
+
"""Betweenness Centrality Subset: P5 multiple target"""
|
| 37 |
+
G = nx.Graph()
|
| 38 |
+
nx.add_path(G, range(5))
|
| 39 |
+
b_answer = {0: 0, 1: 1, 2: 1, 3: 0.5, 4: 0, 5: 0}
|
| 40 |
+
b = nx.betweenness_centrality_subset(
|
| 41 |
+
G, sources=[0], targets=[3, 4], weight=None
|
| 42 |
+
)
|
| 43 |
+
for n in sorted(G):
|
| 44 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 45 |
+
|
| 46 |
+
def test_box(self):
|
| 47 |
+
"""Betweenness Centrality Subset: box"""
|
| 48 |
+
G = nx.Graph()
|
| 49 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
|
| 50 |
+
b_answer = {0: 0, 1: 0.25, 2: 0.25, 3: 0}
|
| 51 |
+
b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None)
|
| 52 |
+
for n in sorted(G):
|
| 53 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 54 |
+
|
| 55 |
+
def test_box_and_path(self):
|
| 56 |
+
"""Betweenness Centrality Subset: box and path"""
|
| 57 |
+
G = nx.Graph()
|
| 58 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (3, 4), (4, 5)])
|
| 59 |
+
b_answer = {0: 0, 1: 0.5, 2: 0.5, 3: 0.5, 4: 0, 5: 0}
|
| 60 |
+
b = nx.betweenness_centrality_subset(
|
| 61 |
+
G, sources=[0], targets=[3, 4], weight=None
|
| 62 |
+
)
|
| 63 |
+
for n in sorted(G):
|
| 64 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 65 |
+
|
| 66 |
+
def test_box_and_path2(self):
|
| 67 |
+
"""Betweenness Centrality Subset: box and path multiple target"""
|
| 68 |
+
G = nx.Graph()
|
| 69 |
+
G.add_edges_from([(0, 1), (1, 2), (2, 3), (1, 20), (20, 3), (3, 4)])
|
| 70 |
+
b_answer = {0: 0, 1: 1.0, 2: 0.5, 20: 0.5, 3: 0.5, 4: 0}
|
| 71 |
+
b = nx.betweenness_centrality_subset(
|
| 72 |
+
G, sources=[0], targets=[3, 4], weight=None
|
| 73 |
+
)
|
| 74 |
+
for n in sorted(G):
|
| 75 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 76 |
+
|
| 77 |
+
def test_diamond_multi_path(self):
|
| 78 |
+
"""Betweenness Centrality Subset: Diamond Multi Path"""
|
| 79 |
+
G = nx.Graph()
|
| 80 |
+
G.add_edges_from(
|
| 81 |
+
[
|
| 82 |
+
(1, 2),
|
| 83 |
+
(1, 3),
|
| 84 |
+
(1, 4),
|
| 85 |
+
(1, 5),
|
| 86 |
+
(1, 10),
|
| 87 |
+
(10, 11),
|
| 88 |
+
(11, 12),
|
| 89 |
+
(12, 9),
|
| 90 |
+
(2, 6),
|
| 91 |
+
(3, 6),
|
| 92 |
+
(4, 6),
|
| 93 |
+
(5, 7),
|
| 94 |
+
(7, 8),
|
| 95 |
+
(6, 8),
|
| 96 |
+
(8, 9),
|
| 97 |
+
]
|
| 98 |
+
)
|
| 99 |
+
b = nx.betweenness_centrality_subset(G, sources=[1], targets=[9], weight=None)
|
| 100 |
+
|
| 101 |
+
expected_b = {
|
| 102 |
+
1: 0,
|
| 103 |
+
2: 1.0 / 10,
|
| 104 |
+
3: 1.0 / 10,
|
| 105 |
+
4: 1.0 / 10,
|
| 106 |
+
5: 1.0 / 10,
|
| 107 |
+
6: 3.0 / 10,
|
| 108 |
+
7: 1.0 / 10,
|
| 109 |
+
8: 4.0 / 10,
|
| 110 |
+
9: 0,
|
| 111 |
+
10: 1.0 / 10,
|
| 112 |
+
11: 1.0 / 10,
|
| 113 |
+
12: 1.0 / 10,
|
| 114 |
+
}
|
| 115 |
+
|
| 116 |
+
for n in sorted(G):
|
| 117 |
+
assert b[n] == pytest.approx(expected_b[n], abs=1e-7)
|
| 118 |
+
|
| 119 |
+
def test_normalized_p2(self):
|
| 120 |
+
"""
|
| 121 |
+
Betweenness Centrality Subset: Normalized P2
|
| 122 |
+
if n <= 2: no normalization, betweenness centrality should be 0 for all nodes.
|
| 123 |
+
"""
|
| 124 |
+
G = nx.Graph()
|
| 125 |
+
nx.add_path(G, range(2))
|
| 126 |
+
b_answer = {0: 0, 1: 0.0}
|
| 127 |
+
b = nx.betweenness_centrality_subset(
|
| 128 |
+
G, sources=[0], targets=[1], normalized=True, weight=None
|
| 129 |
+
)
|
| 130 |
+
for n in sorted(G):
|
| 131 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 132 |
+
|
| 133 |
+
def test_normalized_P5_directed(self):
|
| 134 |
+
"""Betweenness Centrality Subset: Normalized Directed P5"""
|
| 135 |
+
G = nx.DiGraph()
|
| 136 |
+
nx.add_path(G, range(5))
|
| 137 |
+
b_answer = {0: 0, 1: 1.0 / 12.0, 2: 1.0 / 12.0, 3: 0, 4: 0, 5: 0}
|
| 138 |
+
b = nx.betweenness_centrality_subset(
|
| 139 |
+
G, sources=[0], targets=[3], normalized=True, weight=None
|
| 140 |
+
)
|
| 141 |
+
for n in sorted(G):
|
| 142 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 143 |
+
|
| 144 |
+
def test_weighted_graph(self):
|
| 145 |
+
"""Betweenness Centrality Subset: Weighted Graph"""
|
| 146 |
+
G = nx.DiGraph()
|
| 147 |
+
G.add_edge(0, 1, weight=3)
|
| 148 |
+
G.add_edge(0, 2, weight=2)
|
| 149 |
+
G.add_edge(0, 3, weight=6)
|
| 150 |
+
G.add_edge(0, 4, weight=4)
|
| 151 |
+
G.add_edge(1, 3, weight=5)
|
| 152 |
+
G.add_edge(1, 5, weight=5)
|
| 153 |
+
G.add_edge(2, 4, weight=1)
|
| 154 |
+
G.add_edge(3, 4, weight=2)
|
| 155 |
+
G.add_edge(3, 5, weight=1)
|
| 156 |
+
G.add_edge(4, 5, weight=4)
|
| 157 |
+
b_answer = {0: 0.0, 1: 0.0, 2: 0.5, 3: 0.5, 4: 0.5, 5: 0.0}
|
| 158 |
+
b = nx.betweenness_centrality_subset(
|
| 159 |
+
G, sources=[0], targets=[5], normalized=False, weight="weight"
|
| 160 |
+
)
|
| 161 |
+
for n in sorted(G):
|
| 162 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
class TestEdgeSubsetBetweennessCentrality:
|
| 166 |
+
def test_K5(self):
|
| 167 |
+
"""Edge betweenness subset centrality: K5"""
|
| 168 |
+
G = nx.complete_graph(5)
|
| 169 |
+
b = nx.edge_betweenness_centrality_subset(
|
| 170 |
+
G, sources=[0], targets=[1, 3], weight=None
|
| 171 |
+
)
|
| 172 |
+
b_answer = dict.fromkeys(G.edges(), 0)
|
| 173 |
+
b_answer[(0, 3)] = b_answer[(0, 1)] = 0.5
|
| 174 |
+
for n in sorted(G.edges()):
|
| 175 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 176 |
+
|
| 177 |
+
def test_P5_directed(self):
|
| 178 |
+
"""Edge betweenness subset centrality: P5 directed"""
|
| 179 |
+
G = nx.DiGraph()
|
| 180 |
+
nx.add_path(G, range(5))
|
| 181 |
+
b_answer = dict.fromkeys(G.edges(), 0)
|
| 182 |
+
b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 1
|
| 183 |
+
b = nx.edge_betweenness_centrality_subset(
|
| 184 |
+
G, sources=[0], targets=[3], weight=None
|
| 185 |
+
)
|
| 186 |
+
for n in sorted(G.edges()):
|
| 187 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 188 |
+
|
| 189 |
+
def test_P5(self):
|
| 190 |
+
"""Edge betweenness subset centrality: P5"""
|
| 191 |
+
G = nx.Graph()
|
| 192 |
+
nx.add_path(G, range(5))
|
| 193 |
+
b_answer = dict.fromkeys(G.edges(), 0)
|
| 194 |
+
b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 0.5
|
| 195 |
+
b = nx.edge_betweenness_centrality_subset(
|
| 196 |
+
G, sources=[0], targets=[3], weight=None
|
| 197 |
+
)
|
| 198 |
+
for n in sorted(G.edges()):
|
| 199 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 200 |
+
|
| 201 |
+
def test_P5_multiple_target(self):
|
| 202 |
+
"""Edge betweenness subset centrality: P5 multiple target"""
|
| 203 |
+
G = nx.Graph()
|
| 204 |
+
nx.add_path(G, range(5))
|
| 205 |
+
b_answer = dict.fromkeys(G.edges(), 0)
|
| 206 |
+
b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 1
|
| 207 |
+
b_answer[(3, 4)] = 0.5
|
| 208 |
+
b = nx.edge_betweenness_centrality_subset(
|
| 209 |
+
G, sources=[0], targets=[3, 4], weight=None
|
| 210 |
+
)
|
| 211 |
+
for n in sorted(G.edges()):
|
| 212 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 213 |
+
|
| 214 |
+
def test_box(self):
|
| 215 |
+
"""Edge betweenness subset centrality: box"""
|
| 216 |
+
G = nx.Graph()
|
| 217 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
|
| 218 |
+
b_answer = dict.fromkeys(G.edges(), 0)
|
| 219 |
+
b_answer[(0, 1)] = b_answer[(0, 2)] = 0.25
|
| 220 |
+
b_answer[(1, 3)] = b_answer[(2, 3)] = 0.25
|
| 221 |
+
b = nx.edge_betweenness_centrality_subset(
|
| 222 |
+
G, sources=[0], targets=[3], weight=None
|
| 223 |
+
)
|
| 224 |
+
for n in sorted(G.edges()):
|
| 225 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 226 |
+
|
| 227 |
+
def test_box_and_path(self):
|
| 228 |
+
"""Edge betweenness subset centrality: box and path"""
|
| 229 |
+
G = nx.Graph()
|
| 230 |
+
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (3, 4), (4, 5)])
|
| 231 |
+
b_answer = dict.fromkeys(G.edges(), 0)
|
| 232 |
+
b_answer[(0, 1)] = b_answer[(0, 2)] = 0.5
|
| 233 |
+
b_answer[(1, 3)] = b_answer[(2, 3)] = 0.5
|
| 234 |
+
b_answer[(3, 4)] = 0.5
|
| 235 |
+
b = nx.edge_betweenness_centrality_subset(
|
| 236 |
+
G, sources=[0], targets=[3, 4], weight=None
|
| 237 |
+
)
|
| 238 |
+
for n in sorted(G.edges()):
|
| 239 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 240 |
+
|
| 241 |
+
def test_box_and_path2(self):
|
| 242 |
+
"""Edge betweenness subset centrality: box and path multiple target"""
|
| 243 |
+
G = nx.Graph()
|
| 244 |
+
G.add_edges_from([(0, 1), (1, 2), (2, 3), (1, 20), (20, 3), (3, 4)])
|
| 245 |
+
b_answer = dict.fromkeys(G.edges(), 0)
|
| 246 |
+
b_answer[(0, 1)] = 1.0
|
| 247 |
+
b_answer[(1, 20)] = b_answer[(3, 20)] = 0.5
|
| 248 |
+
b_answer[(1, 2)] = b_answer[(2, 3)] = 0.5
|
| 249 |
+
b_answer[(3, 4)] = 0.5
|
| 250 |
+
b = nx.edge_betweenness_centrality_subset(
|
| 251 |
+
G, sources=[0], targets=[3, 4], weight=None
|
| 252 |
+
)
|
| 253 |
+
for n in sorted(G.edges()):
|
| 254 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 255 |
+
|
| 256 |
+
def test_diamond_multi_path(self):
|
| 257 |
+
"""Edge betweenness subset centrality: Diamond Multi Path"""
|
| 258 |
+
G = nx.Graph()
|
| 259 |
+
G.add_edges_from(
|
| 260 |
+
[
|
| 261 |
+
(1, 2),
|
| 262 |
+
(1, 3),
|
| 263 |
+
(1, 4),
|
| 264 |
+
(1, 5),
|
| 265 |
+
(1, 10),
|
| 266 |
+
(10, 11),
|
| 267 |
+
(11, 12),
|
| 268 |
+
(12, 9),
|
| 269 |
+
(2, 6),
|
| 270 |
+
(3, 6),
|
| 271 |
+
(4, 6),
|
| 272 |
+
(5, 7),
|
| 273 |
+
(7, 8),
|
| 274 |
+
(6, 8),
|
| 275 |
+
(8, 9),
|
| 276 |
+
]
|
| 277 |
+
)
|
| 278 |
+
b_answer = dict.fromkeys(G.edges(), 0)
|
| 279 |
+
b_answer[(8, 9)] = 0.4
|
| 280 |
+
b_answer[(6, 8)] = b_answer[(7, 8)] = 0.2
|
| 281 |
+
b_answer[(2, 6)] = b_answer[(3, 6)] = b_answer[(4, 6)] = 0.2 / 3.0
|
| 282 |
+
b_answer[(1, 2)] = b_answer[(1, 3)] = b_answer[(1, 4)] = 0.2 / 3.0
|
| 283 |
+
b_answer[(5, 7)] = 0.2
|
| 284 |
+
b_answer[(1, 5)] = 0.2
|
| 285 |
+
b_answer[(9, 12)] = 0.1
|
| 286 |
+
b_answer[(11, 12)] = b_answer[(10, 11)] = b_answer[(1, 10)] = 0.1
|
| 287 |
+
b = nx.edge_betweenness_centrality_subset(
|
| 288 |
+
G, sources=[1], targets=[9], weight=None
|
| 289 |
+
)
|
| 290 |
+
for n in G.edges():
|
| 291 |
+
sort_n = tuple(sorted(n))
|
| 292 |
+
assert b[n] == pytest.approx(b_answer[sort_n], abs=1e-7)
|
| 293 |
+
|
| 294 |
+
def test_normalized_p1(self):
|
| 295 |
+
"""
|
| 296 |
+
Edge betweenness subset centrality: P1
|
| 297 |
+
if n <= 1: no normalization b=0 for all nodes
|
| 298 |
+
"""
|
| 299 |
+
G = nx.Graph()
|
| 300 |
+
nx.add_path(G, range(1))
|
| 301 |
+
b_answer = dict.fromkeys(G.edges(), 0)
|
| 302 |
+
b = nx.edge_betweenness_centrality_subset(
|
| 303 |
+
G, sources=[0], targets=[0], normalized=True, weight=None
|
| 304 |
+
)
|
| 305 |
+
for n in G.edges():
|
| 306 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 307 |
+
|
| 308 |
+
def test_normalized_P5_directed(self):
|
| 309 |
+
"""Edge betweenness subset centrality: Normalized Directed P5"""
|
| 310 |
+
G = nx.DiGraph()
|
| 311 |
+
nx.add_path(G, range(5))
|
| 312 |
+
b_answer = dict.fromkeys(G.edges(), 0)
|
| 313 |
+
b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 0.05
|
| 314 |
+
b = nx.edge_betweenness_centrality_subset(
|
| 315 |
+
G, sources=[0], targets=[3], normalized=True, weight=None
|
| 316 |
+
)
|
| 317 |
+
for n in G.edges():
|
| 318 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
| 319 |
+
|
| 320 |
+
def test_weighted_graph(self):
|
| 321 |
+
"""Edge betweenness subset centrality: Weighted Graph"""
|
| 322 |
+
G = nx.DiGraph()
|
| 323 |
+
G.add_edge(0, 1, weight=3)
|
| 324 |
+
G.add_edge(0, 2, weight=2)
|
| 325 |
+
G.add_edge(0, 3, weight=6)
|
| 326 |
+
G.add_edge(0, 4, weight=4)
|
| 327 |
+
G.add_edge(1, 3, weight=5)
|
| 328 |
+
G.add_edge(1, 5, weight=5)
|
| 329 |
+
G.add_edge(2, 4, weight=1)
|
| 330 |
+
G.add_edge(3, 4, weight=2)
|
| 331 |
+
G.add_edge(3, 5, weight=1)
|
| 332 |
+
G.add_edge(4, 5, weight=4)
|
| 333 |
+
b_answer = dict.fromkeys(G.edges(), 0)
|
| 334 |
+
b_answer[(0, 2)] = b_answer[(2, 4)] = b_answer[(4, 5)] = 0.5
|
| 335 |
+
b_answer[(0, 3)] = b_answer[(3, 5)] = 0.5
|
| 336 |
+
b = nx.edge_betweenness_centrality_subset(
|
| 337 |
+
G, sources=[0], targets=[5], normalized=False, weight="weight"
|
| 338 |
+
)
|
| 339 |
+
for n in G.edges():
|
| 340 |
+
assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
|
.venv/lib/python3.11/site-packages/networkx/algorithms/centrality/tests/test_closeness_centrality.py
ADDED
|
@@ -0,0 +1,307 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for closeness centrality.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TestClosenessCentrality:
|
| 11 |
+
@classmethod
|
| 12 |
+
def setup_class(cls):
|
| 13 |
+
cls.K = nx.krackhardt_kite_graph()
|
| 14 |
+
cls.P3 = nx.path_graph(3)
|
| 15 |
+
cls.P4 = nx.path_graph(4)
|
| 16 |
+
cls.K5 = nx.complete_graph(5)
|
| 17 |
+
|
| 18 |
+
cls.C4 = nx.cycle_graph(4)
|
| 19 |
+
cls.T = nx.balanced_tree(r=2, h=2)
|
| 20 |
+
cls.Gb = nx.Graph()
|
| 21 |
+
cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
|
| 22 |
+
|
| 23 |
+
F = nx.florentine_families_graph()
|
| 24 |
+
cls.F = F
|
| 25 |
+
|
| 26 |
+
cls.LM = nx.les_miserables_graph()
|
| 27 |
+
|
| 28 |
+
# Create random undirected, unweighted graph for testing incremental version
|
| 29 |
+
cls.undirected_G = nx.fast_gnp_random_graph(n=100, p=0.6, seed=123)
|
| 30 |
+
cls.undirected_G_cc = nx.closeness_centrality(cls.undirected_G)
|
| 31 |
+
|
| 32 |
+
def test_wf_improved(self):
|
| 33 |
+
G = nx.union(self.P4, nx.path_graph([4, 5, 6]))
|
| 34 |
+
c = nx.closeness_centrality(G)
|
| 35 |
+
cwf = nx.closeness_centrality(G, wf_improved=False)
|
| 36 |
+
res = {0: 0.25, 1: 0.375, 2: 0.375, 3: 0.25, 4: 0.222, 5: 0.333, 6: 0.222}
|
| 37 |
+
wf_res = {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5, 4: 0.667, 5: 1.0, 6: 0.667}
|
| 38 |
+
for n in G:
|
| 39 |
+
assert c[n] == pytest.approx(res[n], abs=1e-3)
|
| 40 |
+
assert cwf[n] == pytest.approx(wf_res[n], abs=1e-3)
|
| 41 |
+
|
| 42 |
+
def test_digraph(self):
|
| 43 |
+
G = nx.path_graph(3, create_using=nx.DiGraph())
|
| 44 |
+
c = nx.closeness_centrality(G)
|
| 45 |
+
cr = nx.closeness_centrality(G.reverse())
|
| 46 |
+
d = {0: 0.0, 1: 0.500, 2: 0.667}
|
| 47 |
+
dr = {0: 0.667, 1: 0.500, 2: 0.0}
|
| 48 |
+
for n in sorted(self.P3):
|
| 49 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 50 |
+
assert cr[n] == pytest.approx(dr[n], abs=1e-3)
|
| 51 |
+
|
| 52 |
+
def test_k5_closeness(self):
|
| 53 |
+
c = nx.closeness_centrality(self.K5)
|
| 54 |
+
d = {0: 1.000, 1: 1.000, 2: 1.000, 3: 1.000, 4: 1.000}
|
| 55 |
+
for n in sorted(self.K5):
|
| 56 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 57 |
+
|
| 58 |
+
def test_p3_closeness(self):
|
| 59 |
+
c = nx.closeness_centrality(self.P3)
|
| 60 |
+
d = {0: 0.667, 1: 1.000, 2: 0.667}
|
| 61 |
+
for n in sorted(self.P3):
|
| 62 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 63 |
+
|
| 64 |
+
def test_krackhardt_closeness(self):
|
| 65 |
+
c = nx.closeness_centrality(self.K)
|
| 66 |
+
d = {
|
| 67 |
+
0: 0.529,
|
| 68 |
+
1: 0.529,
|
| 69 |
+
2: 0.500,
|
| 70 |
+
3: 0.600,
|
| 71 |
+
4: 0.500,
|
| 72 |
+
5: 0.643,
|
| 73 |
+
6: 0.643,
|
| 74 |
+
7: 0.600,
|
| 75 |
+
8: 0.429,
|
| 76 |
+
9: 0.310,
|
| 77 |
+
}
|
| 78 |
+
for n in sorted(self.K):
|
| 79 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 80 |
+
|
| 81 |
+
def test_florentine_families_closeness(self):
|
| 82 |
+
c = nx.closeness_centrality(self.F)
|
| 83 |
+
d = {
|
| 84 |
+
"Acciaiuoli": 0.368,
|
| 85 |
+
"Albizzi": 0.483,
|
| 86 |
+
"Barbadori": 0.4375,
|
| 87 |
+
"Bischeri": 0.400,
|
| 88 |
+
"Castellani": 0.389,
|
| 89 |
+
"Ginori": 0.333,
|
| 90 |
+
"Guadagni": 0.467,
|
| 91 |
+
"Lamberteschi": 0.326,
|
| 92 |
+
"Medici": 0.560,
|
| 93 |
+
"Pazzi": 0.286,
|
| 94 |
+
"Peruzzi": 0.368,
|
| 95 |
+
"Ridolfi": 0.500,
|
| 96 |
+
"Salviati": 0.389,
|
| 97 |
+
"Strozzi": 0.4375,
|
| 98 |
+
"Tornabuoni": 0.483,
|
| 99 |
+
}
|
| 100 |
+
for n in sorted(self.F):
|
| 101 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 102 |
+
|
| 103 |
+
def test_les_miserables_closeness(self):
|
| 104 |
+
c = nx.closeness_centrality(self.LM)
|
| 105 |
+
d = {
|
| 106 |
+
"Napoleon": 0.302,
|
| 107 |
+
"Myriel": 0.429,
|
| 108 |
+
"MlleBaptistine": 0.413,
|
| 109 |
+
"MmeMagloire": 0.413,
|
| 110 |
+
"CountessDeLo": 0.302,
|
| 111 |
+
"Geborand": 0.302,
|
| 112 |
+
"Champtercier": 0.302,
|
| 113 |
+
"Cravatte": 0.302,
|
| 114 |
+
"Count": 0.302,
|
| 115 |
+
"OldMan": 0.302,
|
| 116 |
+
"Valjean": 0.644,
|
| 117 |
+
"Labarre": 0.394,
|
| 118 |
+
"Marguerite": 0.413,
|
| 119 |
+
"MmeDeR": 0.394,
|
| 120 |
+
"Isabeau": 0.394,
|
| 121 |
+
"Gervais": 0.394,
|
| 122 |
+
"Listolier": 0.341,
|
| 123 |
+
"Tholomyes": 0.392,
|
| 124 |
+
"Fameuil": 0.341,
|
| 125 |
+
"Blacheville": 0.341,
|
| 126 |
+
"Favourite": 0.341,
|
| 127 |
+
"Dahlia": 0.341,
|
| 128 |
+
"Zephine": 0.341,
|
| 129 |
+
"Fantine": 0.461,
|
| 130 |
+
"MmeThenardier": 0.461,
|
| 131 |
+
"Thenardier": 0.517,
|
| 132 |
+
"Cosette": 0.478,
|
| 133 |
+
"Javert": 0.517,
|
| 134 |
+
"Fauchelevent": 0.402,
|
| 135 |
+
"Bamatabois": 0.427,
|
| 136 |
+
"Perpetue": 0.318,
|
| 137 |
+
"Simplice": 0.418,
|
| 138 |
+
"Scaufflaire": 0.394,
|
| 139 |
+
"Woman1": 0.396,
|
| 140 |
+
"Judge": 0.404,
|
| 141 |
+
"Champmathieu": 0.404,
|
| 142 |
+
"Brevet": 0.404,
|
| 143 |
+
"Chenildieu": 0.404,
|
| 144 |
+
"Cochepaille": 0.404,
|
| 145 |
+
"Pontmercy": 0.373,
|
| 146 |
+
"Boulatruelle": 0.342,
|
| 147 |
+
"Eponine": 0.396,
|
| 148 |
+
"Anzelma": 0.352,
|
| 149 |
+
"Woman2": 0.402,
|
| 150 |
+
"MotherInnocent": 0.398,
|
| 151 |
+
"Gribier": 0.288,
|
| 152 |
+
"MmeBurgon": 0.344,
|
| 153 |
+
"Jondrette": 0.257,
|
| 154 |
+
"Gavroche": 0.514,
|
| 155 |
+
"Gillenormand": 0.442,
|
| 156 |
+
"Magnon": 0.335,
|
| 157 |
+
"MlleGillenormand": 0.442,
|
| 158 |
+
"MmePontmercy": 0.315,
|
| 159 |
+
"MlleVaubois": 0.308,
|
| 160 |
+
"LtGillenormand": 0.365,
|
| 161 |
+
"Marius": 0.531,
|
| 162 |
+
"BaronessT": 0.352,
|
| 163 |
+
"Mabeuf": 0.396,
|
| 164 |
+
"Enjolras": 0.481,
|
| 165 |
+
"Combeferre": 0.392,
|
| 166 |
+
"Prouvaire": 0.357,
|
| 167 |
+
"Feuilly": 0.392,
|
| 168 |
+
"Courfeyrac": 0.400,
|
| 169 |
+
"Bahorel": 0.394,
|
| 170 |
+
"Bossuet": 0.475,
|
| 171 |
+
"Joly": 0.394,
|
| 172 |
+
"Grantaire": 0.358,
|
| 173 |
+
"MotherPlutarch": 0.285,
|
| 174 |
+
"Gueulemer": 0.463,
|
| 175 |
+
"Babet": 0.463,
|
| 176 |
+
"Claquesous": 0.452,
|
| 177 |
+
"Montparnasse": 0.458,
|
| 178 |
+
"Toussaint": 0.402,
|
| 179 |
+
"Child1": 0.342,
|
| 180 |
+
"Child2": 0.342,
|
| 181 |
+
"Brujon": 0.380,
|
| 182 |
+
"MmeHucheloup": 0.353,
|
| 183 |
+
}
|
| 184 |
+
for n in sorted(self.LM):
|
| 185 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 186 |
+
|
| 187 |
+
def test_weighted_closeness(self):
|
| 188 |
+
edges = [
|
| 189 |
+
("s", "u", 10),
|
| 190 |
+
("s", "x", 5),
|
| 191 |
+
("u", "v", 1),
|
| 192 |
+
("u", "x", 2),
|
| 193 |
+
("v", "y", 1),
|
| 194 |
+
("x", "u", 3),
|
| 195 |
+
("x", "v", 5),
|
| 196 |
+
("x", "y", 2),
|
| 197 |
+
("y", "s", 7),
|
| 198 |
+
("y", "v", 6),
|
| 199 |
+
]
|
| 200 |
+
XG = nx.Graph()
|
| 201 |
+
XG.add_weighted_edges_from(edges)
|
| 202 |
+
c = nx.closeness_centrality(XG, distance="weight")
|
| 203 |
+
d = {"y": 0.200, "x": 0.286, "s": 0.138, "u": 0.235, "v": 0.200}
|
| 204 |
+
for n in sorted(XG):
|
| 205 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 206 |
+
|
| 207 |
+
#
|
| 208 |
+
# Tests for incremental closeness centrality.
|
| 209 |
+
#
|
| 210 |
+
@staticmethod
|
| 211 |
+
def pick_add_edge(g):
|
| 212 |
+
u = nx.utils.arbitrary_element(g)
|
| 213 |
+
possible_nodes = set(g.nodes())
|
| 214 |
+
neighbors = list(g.neighbors(u)) + [u]
|
| 215 |
+
possible_nodes.difference_update(neighbors)
|
| 216 |
+
v = nx.utils.arbitrary_element(possible_nodes)
|
| 217 |
+
return (u, v)
|
| 218 |
+
|
| 219 |
+
@staticmethod
|
| 220 |
+
def pick_remove_edge(g):
|
| 221 |
+
u = nx.utils.arbitrary_element(g)
|
| 222 |
+
possible_nodes = list(g.neighbors(u))
|
| 223 |
+
v = nx.utils.arbitrary_element(possible_nodes)
|
| 224 |
+
return (u, v)
|
| 225 |
+
|
| 226 |
+
def test_directed_raises(self):
|
| 227 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 228 |
+
dir_G = nx.gn_graph(n=5)
|
| 229 |
+
prev_cc = None
|
| 230 |
+
edge = self.pick_add_edge(dir_G)
|
| 231 |
+
insert = True
|
| 232 |
+
nx.incremental_closeness_centrality(dir_G, edge, prev_cc, insert)
|
| 233 |
+
|
| 234 |
+
def test_wrong_size_prev_cc_raises(self):
|
| 235 |
+
with pytest.raises(nx.NetworkXError):
|
| 236 |
+
G = self.undirected_G.copy()
|
| 237 |
+
edge = self.pick_add_edge(G)
|
| 238 |
+
insert = True
|
| 239 |
+
prev_cc = self.undirected_G_cc.copy()
|
| 240 |
+
prev_cc.pop(0)
|
| 241 |
+
nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
|
| 242 |
+
|
| 243 |
+
def test_wrong_nodes_prev_cc_raises(self):
|
| 244 |
+
with pytest.raises(nx.NetworkXError):
|
| 245 |
+
G = self.undirected_G.copy()
|
| 246 |
+
edge = self.pick_add_edge(G)
|
| 247 |
+
insert = True
|
| 248 |
+
prev_cc = self.undirected_G_cc.copy()
|
| 249 |
+
num_nodes = len(prev_cc)
|
| 250 |
+
prev_cc.pop(0)
|
| 251 |
+
prev_cc[num_nodes] = 0.5
|
| 252 |
+
nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
|
| 253 |
+
|
| 254 |
+
def test_zero_centrality(self):
|
| 255 |
+
G = nx.path_graph(3)
|
| 256 |
+
prev_cc = nx.closeness_centrality(G)
|
| 257 |
+
edge = self.pick_remove_edge(G)
|
| 258 |
+
test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insertion=False)
|
| 259 |
+
G.remove_edges_from([edge])
|
| 260 |
+
real_cc = nx.closeness_centrality(G)
|
| 261 |
+
shared_items = set(test_cc.items()) & set(real_cc.items())
|
| 262 |
+
assert len(shared_items) == len(real_cc)
|
| 263 |
+
assert 0 in test_cc.values()
|
| 264 |
+
|
| 265 |
+
def test_incremental(self):
|
| 266 |
+
# Check that incremental and regular give same output
|
| 267 |
+
G = self.undirected_G.copy()
|
| 268 |
+
prev_cc = None
|
| 269 |
+
for i in range(5):
|
| 270 |
+
if i % 2 == 0:
|
| 271 |
+
# Remove an edge
|
| 272 |
+
insert = False
|
| 273 |
+
edge = self.pick_remove_edge(G)
|
| 274 |
+
else:
|
| 275 |
+
# Add an edge
|
| 276 |
+
insert = True
|
| 277 |
+
edge = self.pick_add_edge(G)
|
| 278 |
+
|
| 279 |
+
# start = timeit.default_timer()
|
| 280 |
+
test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
|
| 281 |
+
# inc_elapsed = (timeit.default_timer() - start)
|
| 282 |
+
# print(f"incremental time: {inc_elapsed}")
|
| 283 |
+
|
| 284 |
+
if insert:
|
| 285 |
+
G.add_edges_from([edge])
|
| 286 |
+
else:
|
| 287 |
+
G.remove_edges_from([edge])
|
| 288 |
+
|
| 289 |
+
# start = timeit.default_timer()
|
| 290 |
+
real_cc = nx.closeness_centrality(G)
|
| 291 |
+
# reg_elapsed = (timeit.default_timer() - start)
|
| 292 |
+
# print(f"regular time: {reg_elapsed}")
|
| 293 |
+
# Example output:
|
| 294 |
+
# incremental time: 0.208
|
| 295 |
+
# regular time: 0.276
|
| 296 |
+
# incremental time: 0.00683
|
| 297 |
+
# regular time: 0.260
|
| 298 |
+
# incremental time: 0.0224
|
| 299 |
+
# regular time: 0.278
|
| 300 |
+
# incremental time: 0.00804
|
| 301 |
+
# regular time: 0.208
|
| 302 |
+
# incremental time: 0.00947
|
| 303 |
+
# regular time: 0.188
|
| 304 |
+
|
| 305 |
+
assert set(test_cc.items()) == set(real_cc.items())
|
| 306 |
+
|
| 307 |
+
prev_cc = test_cc
|