ADAPT-Chase commited on
Commit
bc7f6e6
·
verified ·
1 Parent(s): 88a96d6

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -2
  2. tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/__init__.cpython-312.pyc +0 -0
  3. tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/conftest.cpython-312.pyc +0 -0
  4. tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/convert.cpython-312.pyc +0 -0
  5. tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/convert_matrix.cpython-312.pyc +0 -0
  6. tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/exception.cpython-312.pyc +0 -0
  7. tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/lazy_imports.cpython-312.pyc +0 -0
  8. tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/relabel.cpython-312.pyc +0 -0
  9. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/__init__.py +133 -0
  10. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__init__.py +26 -0
  11. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clique.py +259 -0
  12. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clustering_coefficient.py +71 -0
  13. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/connectivity.py +412 -0
  14. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/density.py +396 -0
  15. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/distance_measures.py +150 -0
  16. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/dominating_set.py +149 -0
  17. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/kcomponents.py +367 -0
  18. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/matching.py +44 -0
  19. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/maxcut.py +143 -0
  20. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/ramsey.py +53 -0
  21. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/steinertree.py +248 -0
  22. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/traveling_salesman.py +1508 -0
  23. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/treewidth.py +255 -0
  24. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/vertex_cover.py +83 -0
  25. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__init__.py +5 -0
  26. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/connectivity.py +122 -0
  27. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/correlation.py +302 -0
  28. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/mixing.py +255 -0
  29. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/neighbor_degree.py +160 -0
  30. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/pairs.py +127 -0
  31. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/asteroidal.py +164 -0
  32. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__init__.py +88 -0
  33. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/basic.py +322 -0
  34. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/centrality.py +290 -0
  35. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/cluster.py +289 -0
  36. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/covering.py +57 -0
  37. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/edgelist.py +360 -0
  38. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/extendability.py +105 -0
  39. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/generators.py +603 -0
  40. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/boundary.py +168 -0
  41. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bridges.py +205 -0
  42. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/broadcasting.py +155 -0
  43. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/chains.py +172 -0
  44. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/chordal.py +443 -0
  45. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/clique.py +757 -0
  46. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/cluster.py +658 -0
  47. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/communicability_alg.py +163 -0
  48. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/core.py +588 -0
  49. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/covering.py +142 -0
  50. tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/cuts.py +398 -0
.gitattributes CHANGED
@@ -4023,5 +4023,4 @@ tool_server/.venv/lib/python3.12/site-packages/setuptools/command/__pycache__/ea
4023
  tool_server/.venv/lib/python3.12/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_validations.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
4024
  tool_server/.venv/lib/python3.12/site-packages/regex/__pycache__/_regex_core.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
4025
  tool_server/.venv/lib/python3.12/site-packages/regex/__pycache__/test_regex.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
4026
- tool_server/.venv/lib/python3.12/site-packages/zmq/backend/cython/_zmq.abi3.so filter=lfs diff=lfs merge=lfs -text
4027
- tool_server/.venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/lisp.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
 
4023
  tool_server/.venv/lib/python3.12/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_validations.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
4024
  tool_server/.venv/lib/python3.12/site-packages/regex/__pycache__/_regex_core.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
4025
  tool_server/.venv/lib/python3.12/site-packages/regex/__pycache__/test_regex.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
4026
+ tool_server/.venv/lib/python3.12/site-packages/psutil/tests/__pycache__/test_linux.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
 
tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (1.47 kB). View file
 
tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/conftest.cpython-312.pyc ADDED
Binary file (8.42 kB). View file
 
tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/convert.cpython-312.pyc ADDED
Binary file (18.9 kB). View file
 
tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/convert_matrix.cpython-312.pyc ADDED
Binary file (50.8 kB). View file
 
tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/exception.cpython-312.pyc ADDED
Binary file (5.39 kB). View file
 
tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/lazy_imports.cpython-312.pyc ADDED
Binary file (7.33 kB). View file
 
tool_server/.venv/lib/python3.12/site-packages/networkx/__pycache__/relabel.cpython-312.pyc ADDED
Binary file (13.3 kB). View file
 
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/__init__.py ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from networkx.algorithms.assortativity import *
2
+ from networkx.algorithms.asteroidal import *
3
+ from networkx.algorithms.boundary import *
4
+ from networkx.algorithms.broadcasting import *
5
+ from networkx.algorithms.bridges import *
6
+ from networkx.algorithms.chains import *
7
+ from networkx.algorithms.centrality import *
8
+ from networkx.algorithms.chordal import *
9
+ from networkx.algorithms.cluster import *
10
+ from networkx.algorithms.clique import *
11
+ from networkx.algorithms.communicability_alg import *
12
+ from networkx.algorithms.components import *
13
+ from networkx.algorithms.coloring import *
14
+ from networkx.algorithms.core import *
15
+ from networkx.algorithms.covering import *
16
+ from networkx.algorithms.cycles import *
17
+ from networkx.algorithms.cuts import *
18
+ from networkx.algorithms.d_separation import *
19
+ from networkx.algorithms.dag import *
20
+ from networkx.algorithms.distance_measures import *
21
+ from networkx.algorithms.distance_regular import *
22
+ from networkx.algorithms.dominance import *
23
+ from networkx.algorithms.dominating import *
24
+ from networkx.algorithms.efficiency_measures import *
25
+ from networkx.algorithms.euler import *
26
+ from networkx.algorithms.graphical import *
27
+ from networkx.algorithms.hierarchy import *
28
+ from networkx.algorithms.hybrid import *
29
+ from networkx.algorithms.link_analysis import *
30
+ from networkx.algorithms.link_prediction import *
31
+ from networkx.algorithms.lowest_common_ancestors import *
32
+ from networkx.algorithms.isolate import *
33
+ from networkx.algorithms.matching import *
34
+ from networkx.algorithms.minors import *
35
+ from networkx.algorithms.mis import *
36
+ from networkx.algorithms.moral import *
37
+ from networkx.algorithms.non_randomness import *
38
+ from networkx.algorithms.operators import *
39
+ from networkx.algorithms.planarity import *
40
+ from networkx.algorithms.planar_drawing import *
41
+ from networkx.algorithms.polynomials import *
42
+ from networkx.algorithms.reciprocity import *
43
+ from networkx.algorithms.regular import *
44
+ from networkx.algorithms.richclub import *
45
+ from networkx.algorithms.shortest_paths import *
46
+ from networkx.algorithms.similarity import *
47
+ from networkx.algorithms.graph_hashing import *
48
+ from networkx.algorithms.simple_paths import *
49
+ from networkx.algorithms.smallworld import *
50
+ from networkx.algorithms.smetric import *
51
+ from networkx.algorithms.structuralholes import *
52
+ from networkx.algorithms.sparsifiers import *
53
+ from networkx.algorithms.summarization import *
54
+ from networkx.algorithms.swap import *
55
+ from networkx.algorithms.time_dependent import *
56
+ from networkx.algorithms.traversal import *
57
+ from networkx.algorithms.triads import *
58
+ from networkx.algorithms.vitality import *
59
+ from networkx.algorithms.voronoi import *
60
+ from networkx.algorithms.walks import *
61
+ from networkx.algorithms.wiener import *
62
+
63
+ # Make certain subpackages available to the user as direct imports from
64
+ # the `networkx` namespace.
65
+ from networkx.algorithms import approximation
66
+ from networkx.algorithms import assortativity
67
+ from networkx.algorithms import bipartite
68
+ from networkx.algorithms import node_classification
69
+ from networkx.algorithms import centrality
70
+ from networkx.algorithms import chordal
71
+ from networkx.algorithms import cluster
72
+ from networkx.algorithms import clique
73
+ from networkx.algorithms import components
74
+ from networkx.algorithms import connectivity
75
+ from networkx.algorithms import community
76
+ from networkx.algorithms import coloring
77
+ from networkx.algorithms import flow
78
+ from networkx.algorithms import isomorphism
79
+ from networkx.algorithms import link_analysis
80
+ from networkx.algorithms import lowest_common_ancestors
81
+ from networkx.algorithms import operators
82
+ from networkx.algorithms import shortest_paths
83
+ from networkx.algorithms import tournament
84
+ from networkx.algorithms import traversal
85
+ from networkx.algorithms import tree
86
+
87
+ # Make certain functions from some of the previous subpackages available
88
+ # to the user as direct imports from the `networkx` namespace.
89
+ from networkx.algorithms.bipartite import complete_bipartite_graph
90
+ from networkx.algorithms.bipartite import is_bipartite
91
+ from networkx.algorithms.bipartite import projected_graph
92
+ from networkx.algorithms.connectivity import all_pairs_node_connectivity
93
+ from networkx.algorithms.connectivity import all_node_cuts
94
+ from networkx.algorithms.connectivity import average_node_connectivity
95
+ from networkx.algorithms.connectivity import edge_connectivity
96
+ from networkx.algorithms.connectivity import edge_disjoint_paths
97
+ from networkx.algorithms.connectivity import k_components
98
+ from networkx.algorithms.connectivity import k_edge_components
99
+ from networkx.algorithms.connectivity import k_edge_subgraphs
100
+ from networkx.algorithms.connectivity import k_edge_augmentation
101
+ from networkx.algorithms.connectivity import is_k_edge_connected
102
+ from networkx.algorithms.connectivity import minimum_edge_cut
103
+ from networkx.algorithms.connectivity import minimum_node_cut
104
+ from networkx.algorithms.connectivity import node_connectivity
105
+ from networkx.algorithms.connectivity import node_disjoint_paths
106
+ from networkx.algorithms.connectivity import stoer_wagner
107
+ from networkx.algorithms.flow import capacity_scaling
108
+ from networkx.algorithms.flow import cost_of_flow
109
+ from networkx.algorithms.flow import gomory_hu_tree
110
+ from networkx.algorithms.flow import max_flow_min_cost
111
+ from networkx.algorithms.flow import maximum_flow
112
+ from networkx.algorithms.flow import maximum_flow_value
113
+ from networkx.algorithms.flow import min_cost_flow
114
+ from networkx.algorithms.flow import min_cost_flow_cost
115
+ from networkx.algorithms.flow import minimum_cut
116
+ from networkx.algorithms.flow import minimum_cut_value
117
+ from networkx.algorithms.flow import network_simplex
118
+ from networkx.algorithms.isomorphism import could_be_isomorphic
119
+ from networkx.algorithms.isomorphism import fast_could_be_isomorphic
120
+ from networkx.algorithms.isomorphism import faster_could_be_isomorphic
121
+ from networkx.algorithms.isomorphism import is_isomorphic
122
+ from networkx.algorithms.isomorphism.vf2pp import *
123
+ from networkx.algorithms.tree.branchings import maximum_branching
124
+ from networkx.algorithms.tree.branchings import maximum_spanning_arborescence
125
+ from networkx.algorithms.tree.branchings import minimum_branching
126
+ from networkx.algorithms.tree.branchings import minimum_spanning_arborescence
127
+ from networkx.algorithms.tree.branchings import ArborescenceIterator
128
+ from networkx.algorithms.tree.coding import *
129
+ from networkx.algorithms.tree.decomposition import *
130
+ from networkx.algorithms.tree.mst import *
131
+ from networkx.algorithms.tree.operations import *
132
+ from networkx.algorithms.tree.recognition import *
133
+ from networkx.algorithms.tournament import is_tournament
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__init__.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Approximations of graph properties and Heuristic methods for optimization.
2
+
3
+ The functions in this class are not imported into the top-level ``networkx``
4
+ namespace so the easiest way to use them is with::
5
+
6
+ >>> from networkx.algorithms import approximation
7
+
8
+ Another option is to import the specific function with
9
+ ``from networkx.algorithms.approximation import function_name``.
10
+
11
+ """
12
+
13
+ from networkx.algorithms.approximation.clustering_coefficient import *
14
+ from networkx.algorithms.approximation.clique import *
15
+ from networkx.algorithms.approximation.connectivity import *
16
+ from networkx.algorithms.approximation.distance_measures import *
17
+ from networkx.algorithms.approximation.dominating_set import *
18
+ from networkx.algorithms.approximation.kcomponents import *
19
+ from networkx.algorithms.approximation.matching import *
20
+ from networkx.algorithms.approximation.ramsey import *
21
+ from networkx.algorithms.approximation.steinertree import *
22
+ from networkx.algorithms.approximation.traveling_salesman import *
23
+ from networkx.algorithms.approximation.treewidth import *
24
+ from networkx.algorithms.approximation.vertex_cover import *
25
+ from networkx.algorithms.approximation.maxcut import *
26
+ from networkx.algorithms.approximation.density import *
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clique.py ADDED
@@ -0,0 +1,259 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing large cliques and maximum independent sets."""
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms.approximation import ramsey
5
+ from networkx.utils import not_implemented_for
6
+
7
+ __all__ = [
8
+ "clique_removal",
9
+ "max_clique",
10
+ "large_clique_size",
11
+ "maximum_independent_set",
12
+ ]
13
+
14
+
15
+ @not_implemented_for("directed")
16
+ @not_implemented_for("multigraph")
17
+ @nx._dispatchable
18
+ def maximum_independent_set(G):
19
+ """Returns an approximate maximum independent set.
20
+
21
+ Independent set or stable set is a set of vertices in a graph, no two of
22
+ which are adjacent. That is, it is a set I of vertices such that for every
23
+ two vertices in I, there is no edge connecting the two. Equivalently, each
24
+ edge in the graph has at most one endpoint in I. The size of an independent
25
+ set is the number of vertices it contains [1]_.
26
+
27
+ A maximum independent set is a largest independent set for a given graph G
28
+ and its size is denoted $\\alpha(G)$. The problem of finding such a set is called
29
+ the maximum independent set problem and is an NP-hard optimization problem.
30
+ As such, it is unlikely that there exists an efficient algorithm for finding
31
+ a maximum independent set of a graph.
32
+
33
+ The Independent Set algorithm is based on [2]_.
34
+
35
+ Parameters
36
+ ----------
37
+ G : NetworkX graph
38
+ Undirected graph
39
+
40
+ Returns
41
+ -------
42
+ iset : Set
43
+ The apx-maximum independent set
44
+
45
+ Examples
46
+ --------
47
+ >>> G = nx.path_graph(10)
48
+ >>> nx.approximation.maximum_independent_set(G)
49
+ {0, 2, 4, 6, 9}
50
+
51
+ Raises
52
+ ------
53
+ NetworkXNotImplemented
54
+ If the graph is directed or is a multigraph.
55
+
56
+ Notes
57
+ -----
58
+ Finds the $O(|V|/(log|V|)^2)$ apx of independent set in the worst case.
59
+
60
+ References
61
+ ----------
62
+ .. [1] `Wikipedia: Independent set
63
+ <https://en.wikipedia.org/wiki/Independent_set_(graph_theory)>`_
64
+ .. [2] Boppana, R., & Halldórsson, M. M. (1992).
65
+ Approximating maximum independent sets by excluding subgraphs.
66
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
67
+ """
68
+ iset, _ = clique_removal(G)
69
+ return iset
70
+
71
+
72
+ @not_implemented_for("directed")
73
+ @not_implemented_for("multigraph")
74
+ @nx._dispatchable
75
+ def max_clique(G):
76
+ r"""Find the Maximum Clique
77
+
78
+ Finds the $O(|V|/(log|V|)^2)$ apx of maximum clique/independent set
79
+ in the worst case.
80
+
81
+ Parameters
82
+ ----------
83
+ G : NetworkX graph
84
+ Undirected graph
85
+
86
+ Returns
87
+ -------
88
+ clique : set
89
+ The apx-maximum clique of the graph
90
+
91
+ Examples
92
+ --------
93
+ >>> G = nx.path_graph(10)
94
+ >>> nx.approximation.max_clique(G)
95
+ {8, 9}
96
+
97
+ Raises
98
+ ------
99
+ NetworkXNotImplemented
100
+ If the graph is directed or is a multigraph.
101
+
102
+ Notes
103
+ -----
104
+ A clique in an undirected graph G = (V, E) is a subset of the vertex set
105
+ `C \subseteq V` such that for every two vertices in C there exists an edge
106
+ connecting the two. This is equivalent to saying that the subgraph
107
+ induced by C is complete (in some cases, the term clique may also refer
108
+ to the subgraph).
109
+
110
+ A maximum clique is a clique of the largest possible size in a given graph.
111
+ The clique number `\omega(G)` of a graph G is the number of
112
+ vertices in a maximum clique in G. The intersection number of
113
+ G is the smallest number of cliques that together cover all edges of G.
114
+
115
+ https://en.wikipedia.org/wiki/Maximum_clique
116
+
117
+ References
118
+ ----------
119
+ .. [1] Boppana, R., & Halldórsson, M. M. (1992).
120
+ Approximating maximum independent sets by excluding subgraphs.
121
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
122
+ doi:10.1007/BF01994876
123
+ """
124
+ # finding the maximum clique in a graph is equivalent to finding
125
+ # the independent set in the complementary graph
126
+ cgraph = nx.complement(G)
127
+ iset, _ = clique_removal(cgraph)
128
+ return iset
129
+
130
+
131
+ @not_implemented_for("directed")
132
+ @not_implemented_for("multigraph")
133
+ @nx._dispatchable
134
+ def clique_removal(G):
135
+ r"""Repeatedly remove cliques from the graph.
136
+
137
+ Results in a $O(|V|/(\log |V|)^2)$ approximation of maximum clique
138
+ and independent set. Returns the largest independent set found, along
139
+ with found maximal cliques.
140
+
141
+ Parameters
142
+ ----------
143
+ G : NetworkX graph
144
+ Undirected graph
145
+
146
+ Returns
147
+ -------
148
+ max_ind_cliques : (set, list) tuple
149
+ 2-tuple of Maximal Independent Set and list of maximal cliques (sets).
150
+
151
+ Examples
152
+ --------
153
+ >>> G = nx.path_graph(10)
154
+ >>> nx.approximation.clique_removal(G)
155
+ ({0, 2, 4, 6, 9}, [{0, 1}, {2, 3}, {4, 5}, {6, 7}, {8, 9}])
156
+
157
+ Raises
158
+ ------
159
+ NetworkXNotImplemented
160
+ If the graph is directed or is a multigraph.
161
+
162
+ References
163
+ ----------
164
+ .. [1] Boppana, R., & Halldórsson, M. M. (1992).
165
+ Approximating maximum independent sets by excluding subgraphs.
166
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
167
+ """
168
+ graph = G.copy()
169
+ c_i, i_i = ramsey.ramsey_R2(graph)
170
+ cliques = [c_i]
171
+ isets = [i_i]
172
+ while graph:
173
+ graph.remove_nodes_from(c_i)
174
+ c_i, i_i = ramsey.ramsey_R2(graph)
175
+ if c_i:
176
+ cliques.append(c_i)
177
+ if i_i:
178
+ isets.append(i_i)
179
+ # Determine the largest independent set as measured by cardinality.
180
+ maxiset = max(isets, key=len)
181
+ return maxiset, cliques
182
+
183
+
184
+ @not_implemented_for("directed")
185
+ @not_implemented_for("multigraph")
186
+ @nx._dispatchable
187
+ def large_clique_size(G):
188
+ """Find the size of a large clique in a graph.
189
+
190
+ A *clique* is a subset of nodes in which each pair of nodes is
191
+ adjacent. This function is a heuristic for finding the size of a
192
+ large clique in the graph.
193
+
194
+ Parameters
195
+ ----------
196
+ G : NetworkX graph
197
+
198
+ Returns
199
+ -------
200
+ k: integer
201
+ The size of a large clique in the graph.
202
+
203
+ Examples
204
+ --------
205
+ >>> G = nx.path_graph(10)
206
+ >>> nx.approximation.large_clique_size(G)
207
+ 2
208
+
209
+ Raises
210
+ ------
211
+ NetworkXNotImplemented
212
+ If the graph is directed or is a multigraph.
213
+
214
+ Notes
215
+ -----
216
+ This implementation is from [1]_. Its worst case time complexity is
217
+ :math:`O(n d^2)`, where *n* is the number of nodes in the graph and
218
+ *d* is the maximum degree.
219
+
220
+ This function is a heuristic, which means it may work well in
221
+ practice, but there is no rigorous mathematical guarantee on the
222
+ ratio between the returned number and the actual largest clique size
223
+ in the graph.
224
+
225
+ References
226
+ ----------
227
+ .. [1] Pattabiraman, Bharath, et al.
228
+ "Fast Algorithms for the Maximum Clique Problem on Massive Graphs
229
+ with Applications to Overlapping Community Detection."
230
+ *Internet Mathematics* 11.4-5 (2015): 421--448.
231
+ <https://doi.org/10.1080/15427951.2014.986778>
232
+
233
+ See also
234
+ --------
235
+
236
+ :func:`networkx.algorithms.approximation.clique.max_clique`
237
+ A function that returns an approximate maximum clique with a
238
+ guarantee on the approximation ratio.
239
+
240
+ :mod:`networkx.algorithms.clique`
241
+ Functions for finding the exact maximum clique in a graph.
242
+
243
+ """
244
+ degrees = G.degree
245
+
246
+ def _clique_heuristic(G, U, size, best_size):
247
+ if not U:
248
+ return max(best_size, size)
249
+ u = max(U, key=degrees)
250
+ U.remove(u)
251
+ N_prime = {v for v in G[u] if degrees[v] >= best_size}
252
+ return _clique_heuristic(G, U & N_prime, size + 1, best_size)
253
+
254
+ best_size = 0
255
+ nodes = (u for u in G if degrees[u] >= best_size)
256
+ for u in nodes:
257
+ neighbors = {v for v in G[u] if degrees[v] >= best_size}
258
+ best_size = _clique_heuristic(G, neighbors, 1, best_size)
259
+ return best_size
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clustering_coefficient.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+ from networkx.utils import not_implemented_for, py_random_state
3
+
4
+ __all__ = ["average_clustering"]
5
+
6
+
7
+ @not_implemented_for("directed")
8
+ @py_random_state(2)
9
+ @nx._dispatchable(name="approximate_average_clustering")
10
+ def average_clustering(G, trials=1000, seed=None):
11
+ r"""Estimates the average clustering coefficient of G.
12
+
13
+ The local clustering of each node in `G` is the fraction of triangles
14
+ that actually exist over all possible triangles in its neighborhood.
15
+ The average clustering coefficient of a graph `G` is the mean of
16
+ local clusterings.
17
+
18
+ This function finds an approximate average clustering coefficient
19
+ for G by repeating `n` times (defined in `trials`) the following
20
+ experiment: choose a node at random, choose two of its neighbors
21
+ at random, and check if they are connected. The approximate
22
+ coefficient is the fraction of triangles found over the number
23
+ of trials [1]_.
24
+
25
+ Parameters
26
+ ----------
27
+ G : NetworkX graph
28
+
29
+ trials : integer
30
+ Number of trials to perform (default 1000).
31
+
32
+ seed : integer, random_state, or None (default)
33
+ Indicator of random number generation state.
34
+ See :ref:`Randomness<randomness>`.
35
+
36
+ Returns
37
+ -------
38
+ c : float
39
+ Approximated average clustering coefficient.
40
+
41
+ Examples
42
+ --------
43
+ >>> from networkx.algorithms import approximation
44
+ >>> G = nx.erdos_renyi_graph(10, 0.2, seed=10)
45
+ >>> approximation.average_clustering(G, trials=1000, seed=10)
46
+ 0.214
47
+
48
+ Raises
49
+ ------
50
+ NetworkXNotImplemented
51
+ If G is directed.
52
+
53
+ References
54
+ ----------
55
+ .. [1] Schank, Thomas, and Dorothea Wagner. Approximating clustering
56
+ coefficient and transitivity. Universität Karlsruhe, Fakultät für
57
+ Informatik, 2004.
58
+ https://doi.org/10.5445/IR/1000001239
59
+
60
+ """
61
+ n = len(G)
62
+ triangles = 0
63
+ nodes = list(G)
64
+ for i in [int(seed.random() * n) for i in range(trials)]:
65
+ nbrs = list(G[nodes[i]])
66
+ if len(nbrs) < 2:
67
+ continue
68
+ u, v = seed.sample(nbrs, 2)
69
+ if u in G[v]:
70
+ triangles += 1
71
+ return triangles / trials
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/connectivity.py ADDED
@@ -0,0 +1,412 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Fast approximation for node connectivity"""
2
+
3
+ import itertools
4
+ from operator import itemgetter
5
+
6
+ import networkx as nx
7
+
8
+ __all__ = [
9
+ "local_node_connectivity",
10
+ "node_connectivity",
11
+ "all_pairs_node_connectivity",
12
+ ]
13
+
14
+
15
+ @nx._dispatchable(name="approximate_local_node_connectivity")
16
+ def local_node_connectivity(G, source, target, cutoff=None):
17
+ """Compute node connectivity between source and target.
18
+
19
+ Pairwise or local node connectivity between two distinct and nonadjacent
20
+ nodes is the minimum number of nodes that must be removed (minimum
21
+ separating cutset) to disconnect them. By Menger's theorem, this is equal
22
+ to the number of node independent paths (paths that share no nodes other
23
+ than source and target). Which is what we compute in this function.
24
+
25
+ This algorithm is a fast approximation that gives an strict lower
26
+ bound on the actual number of node independent paths between two nodes [1]_.
27
+ It works for both directed and undirected graphs.
28
+
29
+ Parameters
30
+ ----------
31
+
32
+ G : NetworkX graph
33
+
34
+ source : node
35
+ Starting node for node connectivity
36
+
37
+ target : node
38
+ Ending node for node connectivity
39
+
40
+ cutoff : integer
41
+ Maximum node connectivity to consider. If None, the minimum degree
42
+ of source or target is used as a cutoff. Default value None.
43
+
44
+ Returns
45
+ -------
46
+ k: integer
47
+ pairwise node connectivity
48
+
49
+ Examples
50
+ --------
51
+ >>> # Platonic octahedral graph has node connectivity 4
52
+ >>> # for each non adjacent node pair
53
+ >>> from networkx.algorithms import approximation as approx
54
+ >>> G = nx.octahedral_graph()
55
+ >>> approx.local_node_connectivity(G, 0, 5)
56
+ 4
57
+
58
+ Notes
59
+ -----
60
+ This algorithm [1]_ finds node independents paths between two nodes by
61
+ computing their shortest path using BFS, marking the nodes of the path
62
+ found as 'used' and then searching other shortest paths excluding the
63
+ nodes marked as used until no more paths exist. It is not exact because
64
+ a shortest path could use nodes that, if the path were longer, may belong
65
+ to two different node independent paths. Thus it only guarantees an
66
+ strict lower bound on node connectivity.
67
+
68
+ Note that the authors propose a further refinement, losing accuracy and
69
+ gaining speed, which is not implemented yet.
70
+
71
+ See also
72
+ --------
73
+ all_pairs_node_connectivity
74
+ node_connectivity
75
+
76
+ References
77
+ ----------
78
+ .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
79
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
80
+ http://eclectic.ss.uci.edu/~drwhite/working.pdf
81
+
82
+ """
83
+ if target == source:
84
+ raise nx.NetworkXError("source and target have to be different nodes.")
85
+
86
+ # Maximum possible node independent paths
87
+ if G.is_directed():
88
+ possible = min(G.out_degree(source), G.in_degree(target))
89
+ else:
90
+ possible = min(G.degree(source), G.degree(target))
91
+
92
+ K = 0
93
+ if not possible:
94
+ return K
95
+
96
+ if cutoff is None:
97
+ cutoff = float("inf")
98
+
99
+ exclude = set()
100
+ for i in range(min(possible, cutoff)):
101
+ try:
102
+ path = _bidirectional_shortest_path(G, source, target, exclude)
103
+ exclude.update(set(path))
104
+ K += 1
105
+ except nx.NetworkXNoPath:
106
+ break
107
+
108
+ return K
109
+
110
+
111
+ @nx._dispatchable(name="approximate_node_connectivity")
112
+ def node_connectivity(G, s=None, t=None):
113
+ r"""Returns an approximation for node connectivity for a graph or digraph G.
114
+
115
+ Node connectivity is equal to the minimum number of nodes that
116
+ must be removed to disconnect G or render it trivial. By Menger's theorem,
117
+ this is equal to the number of node independent paths (paths that
118
+ share no nodes other than source and target).
119
+
120
+ If source and target nodes are provided, this function returns the
121
+ local node connectivity: the minimum number of nodes that must be
122
+ removed to break all paths from source to target in G.
123
+
124
+ This algorithm is based on a fast approximation that gives an strict lower
125
+ bound on the actual number of node independent paths between two nodes [1]_.
126
+ It works for both directed and undirected graphs.
127
+
128
+ Parameters
129
+ ----------
130
+ G : NetworkX graph
131
+ Undirected graph
132
+
133
+ s : node
134
+ Source node. Optional. Default value: None.
135
+
136
+ t : node
137
+ Target node. Optional. Default value: None.
138
+
139
+ Returns
140
+ -------
141
+ K : integer
142
+ Node connectivity of G, or local node connectivity if source
143
+ and target are provided.
144
+
145
+ Examples
146
+ --------
147
+ >>> # Platonic octahedral graph is 4-node-connected
148
+ >>> from networkx.algorithms import approximation as approx
149
+ >>> G = nx.octahedral_graph()
150
+ >>> approx.node_connectivity(G)
151
+ 4
152
+
153
+ Notes
154
+ -----
155
+ This algorithm [1]_ finds node independents paths between two nodes by
156
+ computing their shortest path using BFS, marking the nodes of the path
157
+ found as 'used' and then searching other shortest paths excluding the
158
+ nodes marked as used until no more paths exist. It is not exact because
159
+ a shortest path could use nodes that, if the path were longer, may belong
160
+ to two different node independent paths. Thus it only guarantees an
161
+ strict lower bound on node connectivity.
162
+
163
+ See also
164
+ --------
165
+ all_pairs_node_connectivity
166
+ local_node_connectivity
167
+
168
+ References
169
+ ----------
170
+ .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
171
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
172
+ http://eclectic.ss.uci.edu/~drwhite/working.pdf
173
+
174
+ """
175
+ if (s is not None and t is None) or (s is None and t is not None):
176
+ raise nx.NetworkXError("Both source and target must be specified.")
177
+
178
+ # Local node connectivity
179
+ if s is not None and t is not None:
180
+ if s not in G:
181
+ raise nx.NetworkXError(f"node {s} not in graph")
182
+ if t not in G:
183
+ raise nx.NetworkXError(f"node {t} not in graph")
184
+ return local_node_connectivity(G, s, t)
185
+
186
+ # Global node connectivity
187
+ if G.is_directed():
188
+ connected_func = nx.is_weakly_connected
189
+ iter_func = itertools.permutations
190
+
191
+ def neighbors(v):
192
+ return itertools.chain(G.predecessors(v), G.successors(v))
193
+
194
+ else:
195
+ connected_func = nx.is_connected
196
+ iter_func = itertools.combinations
197
+ neighbors = G.neighbors
198
+
199
+ if not connected_func(G):
200
+ return 0
201
+
202
+ # Choose a node with minimum degree
203
+ v, minimum_degree = min(G.degree(), key=itemgetter(1))
204
+ # Node connectivity is bounded by minimum degree
205
+ K = minimum_degree
206
+ # compute local node connectivity with all non-neighbors nodes
207
+ # and store the minimum
208
+ for w in set(G) - set(neighbors(v)) - {v}:
209
+ K = min(K, local_node_connectivity(G, v, w, cutoff=K))
210
+ # Same for non adjacent pairs of neighbors of v
211
+ for x, y in iter_func(neighbors(v), 2):
212
+ if y not in G[x] and x != y:
213
+ K = min(K, local_node_connectivity(G, x, y, cutoff=K))
214
+ return K
215
+
216
+
217
+ @nx._dispatchable(name="approximate_all_pairs_node_connectivity")
218
+ def all_pairs_node_connectivity(G, nbunch=None, cutoff=None):
219
+ """Compute node connectivity between all pairs of nodes.
220
+
221
+ Pairwise or local node connectivity between two distinct and nonadjacent
222
+ nodes is the minimum number of nodes that must be removed (minimum
223
+ separating cutset) to disconnect them. By Menger's theorem, this is equal
224
+ to the number of node independent paths (paths that share no nodes other
225
+ than source and target). Which is what we compute in this function.
226
+
227
+ This algorithm is a fast approximation that gives an strict lower
228
+ bound on the actual number of node independent paths between two nodes [1]_.
229
+ It works for both directed and undirected graphs.
230
+
231
+
232
+ Parameters
233
+ ----------
234
+ G : NetworkX graph
235
+
236
+ nbunch: container
237
+ Container of nodes. If provided node connectivity will be computed
238
+ only over pairs of nodes in nbunch.
239
+
240
+ cutoff : integer
241
+ Maximum node connectivity to consider. If None, the minimum degree
242
+ of source or target is used as a cutoff in each pair of nodes.
243
+ Default value None.
244
+
245
+ Returns
246
+ -------
247
+ K : dictionary
248
+ Dictionary, keyed by source and target, of pairwise node connectivity
249
+
250
+ Examples
251
+ --------
252
+ A 3 node cycle with one extra node attached has connectivity 2 between all
253
+ nodes in the cycle and connectivity 1 between the extra node and the rest:
254
+
255
+ >>> G = nx.cycle_graph(3)
256
+ >>> G.add_edge(2, 3)
257
+ >>> import pprint # for nice dictionary formatting
258
+ >>> pprint.pprint(nx.all_pairs_node_connectivity(G))
259
+ {0: {1: 2, 2: 2, 3: 1},
260
+ 1: {0: 2, 2: 2, 3: 1},
261
+ 2: {0: 2, 1: 2, 3: 1},
262
+ 3: {0: 1, 1: 1, 2: 1}}
263
+
264
+ See Also
265
+ --------
266
+ local_node_connectivity
267
+ node_connectivity
268
+
269
+ References
270
+ ----------
271
+ .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
272
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
273
+ http://eclectic.ss.uci.edu/~drwhite/working.pdf
274
+ """
275
+ if nbunch is None:
276
+ nbunch = G
277
+ else:
278
+ nbunch = set(nbunch)
279
+
280
+ directed = G.is_directed()
281
+ if directed:
282
+ iter_func = itertools.permutations
283
+ else:
284
+ iter_func = itertools.combinations
285
+
286
+ all_pairs = {n: {} for n in nbunch}
287
+
288
+ for u, v in iter_func(nbunch, 2):
289
+ k = local_node_connectivity(G, u, v, cutoff=cutoff)
290
+ all_pairs[u][v] = k
291
+ if not directed:
292
+ all_pairs[v][u] = k
293
+
294
+ return all_pairs
295
+
296
+
297
+ def _bidirectional_shortest_path(G, source, target, exclude):
298
+ """Returns shortest path between source and target ignoring nodes in the
299
+ container 'exclude'.
300
+
301
+ Parameters
302
+ ----------
303
+
304
+ G : NetworkX graph
305
+
306
+ source : node
307
+ Starting node for path
308
+
309
+ target : node
310
+ Ending node for path
311
+
312
+ exclude: container
313
+ Container for nodes to exclude from the search for shortest paths
314
+
315
+ Returns
316
+ -------
317
+ path: list
318
+ Shortest path between source and target ignoring nodes in 'exclude'
319
+
320
+ Raises
321
+ ------
322
+ NetworkXNoPath
323
+ If there is no path or if nodes are adjacent and have only one path
324
+ between them
325
+
326
+ Notes
327
+ -----
328
+ This function and its helper are originally from
329
+ networkx.algorithms.shortest_paths.unweighted and are modified to
330
+ accept the extra parameter 'exclude', which is a container for nodes
331
+ already used in other paths that should be ignored.
332
+
333
+ References
334
+ ----------
335
+ .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
336
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
337
+ http://eclectic.ss.uci.edu/~drwhite/working.pdf
338
+
339
+ """
340
+ # call helper to do the real work
341
+ results = _bidirectional_pred_succ(G, source, target, exclude)
342
+ pred, succ, w = results
343
+
344
+ # build path from pred+w+succ
345
+ path = []
346
+ # from source to w
347
+ while w is not None:
348
+ path.append(w)
349
+ w = pred[w]
350
+ path.reverse()
351
+ # from w to target
352
+ w = succ[path[-1]]
353
+ while w is not None:
354
+ path.append(w)
355
+ w = succ[w]
356
+
357
+ return path
358
+
359
+
360
+ def _bidirectional_pred_succ(G, source, target, exclude):
361
+ # does BFS from both source and target and meets in the middle
362
+ # excludes nodes in the container "exclude" from the search
363
+
364
+ # handle either directed or undirected
365
+ if G.is_directed():
366
+ Gpred = G.predecessors
367
+ Gsucc = G.successors
368
+ else:
369
+ Gpred = G.neighbors
370
+ Gsucc = G.neighbors
371
+
372
+ # predecessor and successors in search
373
+ pred = {source: None}
374
+ succ = {target: None}
375
+
376
+ # initialize fringes, start with forward
377
+ forward_fringe = [source]
378
+ reverse_fringe = [target]
379
+
380
+ level = 0
381
+
382
+ while forward_fringe and reverse_fringe:
383
+ # Make sure that we iterate one step forward and one step backwards
384
+ # thus source and target will only trigger "found path" when they are
385
+ # adjacent and then they can be safely included in the container 'exclude'
386
+ level += 1
387
+ if level % 2 != 0:
388
+ this_level = forward_fringe
389
+ forward_fringe = []
390
+ for v in this_level:
391
+ for w in Gsucc(v):
392
+ if w in exclude:
393
+ continue
394
+ if w not in pred:
395
+ forward_fringe.append(w)
396
+ pred[w] = v
397
+ if w in succ:
398
+ return pred, succ, w # found path
399
+ else:
400
+ this_level = reverse_fringe
401
+ reverse_fringe = []
402
+ for v in this_level:
403
+ for w in Gpred(v):
404
+ if w in exclude:
405
+ continue
406
+ if w not in succ:
407
+ succ[w] = v
408
+ reverse_fringe.append(w)
409
+ if w in pred:
410
+ return pred, succ, w # found path
411
+
412
+ raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/density.py ADDED
@@ -0,0 +1,396 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Fast algorithms for the densest subgraph problem"""
2
+
3
+ import math
4
+
5
+ import networkx as nx
6
+
7
+ __all__ = ["densest_subgraph"]
8
+
9
+
10
+ def _greedy_plus_plus(G, iterations):
11
+ if G.number_of_edges() == 0:
12
+ return 0.0, set()
13
+ if iterations < 1:
14
+ raise ValueError(
15
+ f"The number of iterations must be an integer >= 1. Provided: {iterations}"
16
+ )
17
+
18
+ loads = dict.fromkeys(G.nodes, 0) # Load vector for Greedy++.
19
+ best_density = 0.0 # Highest density encountered.
20
+ best_subgraph = set() # Nodes of the best subgraph found.
21
+
22
+ for _ in range(iterations):
23
+ # Initialize heap for fast access to minimum weighted degree.
24
+ heap = nx.utils.BinaryHeap()
25
+
26
+ # Compute initial weighted degrees and add nodes to the heap.
27
+ for node, degree in G.degree:
28
+ heap.insert(node, loads[node] + degree)
29
+ # Set up tracking for current graph state.
30
+ remaining_nodes = set(G.nodes)
31
+ num_edges = G.number_of_edges()
32
+ current_degrees = dict(G.degree)
33
+
34
+ while remaining_nodes:
35
+ num_nodes = len(remaining_nodes)
36
+
37
+ # Current density of the (implicit) graph
38
+ current_density = num_edges / num_nodes
39
+
40
+ # Update the best density.
41
+ if current_density > best_density:
42
+ best_density = current_density
43
+ best_subgraph = set(remaining_nodes)
44
+
45
+ # Pop the node with the smallest weighted degree.
46
+ node, _ = heap.pop()
47
+ if node not in remaining_nodes:
48
+ continue # Skip nodes already removed.
49
+
50
+ # Update the load of the popped node.
51
+ loads[node] += current_degrees[node]
52
+
53
+ # Update neighbors' degrees and the heap.
54
+ for neighbor in G.neighbors(node):
55
+ if neighbor in remaining_nodes:
56
+ current_degrees[neighbor] -= 1
57
+ num_edges -= 1
58
+ heap.insert(neighbor, loads[neighbor] + current_degrees[neighbor])
59
+
60
+ # Remove the node from the remaining nodes.
61
+ remaining_nodes.remove(node)
62
+
63
+ return best_density, best_subgraph
64
+
65
+
66
+ def _fractional_peeling(G, b, x, node_to_idx, edge_to_idx):
67
+ """
68
+ Optimized fractional peeling using NumPy arrays.
69
+
70
+ Parameters
71
+ ----------
72
+ G : networkx.Graph
73
+ The input graph.
74
+ b : numpy.ndarray
75
+ Induced load vector.
76
+ x : numpy.ndarray
77
+ Fractional edge values.
78
+ node_to_idx : dict
79
+ Mapping from node to index.
80
+ edge_to_idx : dict
81
+ Mapping from edge to index.
82
+
83
+ Returns
84
+ -------
85
+ best_density : float
86
+ The best density found.
87
+ best_subgraph : set
88
+ The subset of nodes defining the densest subgraph.
89
+ """
90
+ heap = nx.utils.BinaryHeap()
91
+
92
+ remaining_nodes = set(G.nodes)
93
+
94
+ # Initialize heap with b values
95
+ for idx in remaining_nodes:
96
+ heap.insert(idx, b[idx])
97
+
98
+ num_edges = G.number_of_edges()
99
+
100
+ best_density = 0.0
101
+ best_subgraph = set()
102
+
103
+ while remaining_nodes:
104
+ num_nodes = len(remaining_nodes)
105
+ current_density = num_edges / num_nodes
106
+
107
+ if current_density > best_density:
108
+ best_density = current_density
109
+ best_subgraph = set(remaining_nodes)
110
+
111
+ # Pop the node with the smallest b
112
+ node, _ = heap.pop()
113
+ while node not in remaining_nodes:
114
+ node, _ = heap.pop() # Clean the heap from stale values
115
+
116
+ # Update neighbors b values by subtracting fractional x value
117
+ for neighbor in G.neighbors(node):
118
+ if neighbor in remaining_nodes:
119
+ neighbor_idx = node_to_idx[neighbor]
120
+ # Take off fractional value
121
+ b[neighbor_idx] -= x[edge_to_idx[(neighbor, node)]]
122
+ num_edges -= 1
123
+ heap.insert(neighbor, b[neighbor_idx])
124
+
125
+ remaining_nodes.remove(node) # peel off node
126
+
127
+ return best_density, best_subgraph
128
+
129
+
130
+ def _fista(G, iterations):
131
+ if G.number_of_edges() == 0:
132
+ return 0.0, set()
133
+ if iterations < 1:
134
+ raise ValueError(
135
+ f"The number of iterations must be an integer >= 1. Provided: {iterations}"
136
+ )
137
+ import numpy as np
138
+
139
+ # 1. Node Mapping: Assign a unique index to each node and edge
140
+ node_to_idx = {node: idx for idx, node in enumerate(G)}
141
+ num_nodes = G.number_of_nodes()
142
+ num_undirected_edges = G.number_of_edges()
143
+
144
+ # 2. Edge Mapping: Assign a unique index to each bidirectional edge
145
+ bidirectional_edges = [(u, v) for u, v in G.edges] + [(v, u) for u, v in G.edges]
146
+ edge_to_idx = {edge: idx for idx, edge in enumerate(bidirectional_edges)}
147
+
148
+ num_edges = len(bidirectional_edges)
149
+
150
+ # 3. Reverse Edge Mapping: Map each (bidirectional) edge to its reverse edge index
151
+ reverse_edge_idx = np.empty(num_edges, dtype=np.int32)
152
+ for idx in range(num_undirected_edges):
153
+ reverse_edge_idx[idx] = num_undirected_edges + idx
154
+ for idx in range(num_undirected_edges, 2 * num_undirected_edges):
155
+ reverse_edge_idx[idx] = idx - num_undirected_edges
156
+
157
+ # 4. Initialize Variables as NumPy Arrays
158
+ x = np.full(num_edges, 0.5, dtype=np.float32)
159
+ y = x.copy()
160
+ z = np.zeros(num_edges, dtype=np.float32)
161
+ b = np.zeros(num_nodes, dtype=np.float32) # Induced load vector
162
+ tk = 1.0 # Momentum term
163
+
164
+ # 5. Precompute Edge Source Indices
165
+ edge_src_indices = np.array(
166
+ [node_to_idx[u] for u, _ in bidirectional_edges], dtype=np.int32
167
+ )
168
+
169
+ # 6. Compute Learning Rate
170
+ max_degree = max(deg for _, deg in G.degree)
171
+ # 0.9 for floating point errs when max_degree is very large
172
+ learning_rate = 0.9 / max_degree
173
+
174
+ # 7. Iterative Updates
175
+ for _ in range(iterations):
176
+ # 7a. Update b: sum y over outgoing edges for each node
177
+ b[:] = 0.0 # Reset b to zero
178
+ np.add.at(b, edge_src_indices, y) # b_u = \sum_{v : (u,v) \in E(G)} y_{uv}
179
+
180
+ # 7b. Compute z, z_{uv} = y_{uv} - 2 * learning_rate * b_u
181
+ z = y - 2.0 * learning_rate * b[edge_src_indices]
182
+
183
+ # 7c. Update Momentum Term
184
+ tknew = (1.0 + math.sqrt(1 + 4.0 * tk**2)) / 2.0
185
+
186
+ # 7d. Update x in a vectorized manner, x_{uv} = (z_{uv} - z_{vu} + 1.0) / 2.0
187
+ new_xuv = (z - z[reverse_edge_idx] + 1.0) / 2.0
188
+ clamped_x = np.clip(new_xuv, 0.0, 1.0) # Clamp x_{uv} between 0 and 1
189
+
190
+ # Update y using the FISTA update formula (similar to gradient descent)
191
+ y = (
192
+ clamped_x
193
+ + ((tk - 1.0) / tknew) * (clamped_x - x)
194
+ + (tk / tknew) * (clamped_x - y)
195
+ )
196
+
197
+ # Update x
198
+ x = clamped_x
199
+
200
+ # Update tk, the momemntum term
201
+ tk = tknew
202
+
203
+ # Rebalance the b values! Otherwise performance is a bit suboptimal.
204
+ b[:] = 0.0
205
+ np.add.at(b, edge_src_indices, x) # b_u = \sum_{v : (u,v) \in E(G)} x_{uv}
206
+
207
+ # Extract the actual (approximate) dense subgraph.
208
+ return _fractional_peeling(G, b, x, node_to_idx, edge_to_idx)
209
+
210
+
211
+ ALGORITHMS = {"greedy++": _greedy_plus_plus, "fista": _fista}
212
+
213
+
214
+ @nx.utils.not_implemented_for("directed")
215
+ @nx.utils.not_implemented_for("multigraph")
216
+ @nx._dispatchable
217
+ def densest_subgraph(G, iterations=1, *, method="fista"):
218
+ r"""Returns an approximate densest subgraph for a graph `G`.
219
+
220
+ This function runs an iterative algorithm to find the densest subgraph,
221
+ and returns both the density and the subgraph. For a discussion on the
222
+ notion of density used and the different algorithms available on
223
+ networkx, please see the Notes section below.
224
+
225
+ Parameters
226
+ ----------
227
+ G : NetworkX graph
228
+ Undirected graph.
229
+
230
+ iterations : int, optional (default=1)
231
+ Number of iterations to use for the iterative algorithm. Can be
232
+ specified positionally or as a keyword argument.
233
+
234
+ method : string, optional (default='fista')
235
+ The algorithm to use to approximate the densest subgraph. Supported
236
+ options: 'greedy++' by Boob et al. [2]_ and 'fista' by Harb et al. [3]_.
237
+ Must be specified as a keyword argument. Other inputs produce a
238
+ ValueError.
239
+
240
+ Returns
241
+ -------
242
+ d : float
243
+ The density of the approximate subgraph found.
244
+
245
+ S : set
246
+ The subset of nodes defining the approximate densest subgraph.
247
+
248
+ Examples
249
+ --------
250
+ >>> G = nx.star_graph(4)
251
+ >>> nx.approximation.densest_subgraph(G, iterations=1)
252
+ (0.8, {0, 1, 2, 3, 4})
253
+
254
+ Notes
255
+ -----
256
+ **Problem Definition:**
257
+ The densest subgraph problem (DSG) asks to find the subgraph
258
+ $S \subseteq V(G)$ with maximum density. For a subset of the nodes of
259
+ $G$, $S \subseteq V(G)$, define $E(S) = \{ (u,v) : (u,v)\in E(G),
260
+ u\in S, v\in S \}$ as the set of edges with both endpoints in $S$.
261
+ The density of $S$ is defined as $|E(S)|/|S|$, the ratio between the
262
+ edges in the subgraph $G[S]$ and the number of nodes in that subgraph.
263
+ Note that this is different from the standard graph theoretic definition
264
+ of density, defined as $\frac{2|E(S)|}{|S|(|S|-1)}$, for historical
265
+ reasons.
266
+
267
+ **Exact Algorithms:**
268
+ The densest subgraph problem is polynomial time solvable using maximum
269
+ flow, commonly referred to as Goldberg's algorithm. However, the
270
+ algorithm is quite involved. It first binary searches on the optimal
271
+ density, $d^\ast$. For a guess of the density $d$, it sets up a flow
272
+ network $G'$ with size $O(m)$. The maximum flow solution either
273
+ informs the algorithm that no subgraph with density $d$ exists, or it
274
+ provides a subgraph with density at least $d$. However, this is
275
+ inherently bottlenecked by the maximum flow algorithm. For example, [2]_
276
+ notes that Goldberg’s algorithm was not feasible on many large graphs
277
+ even though they used a highly optimized maximum flow library.
278
+
279
+ **Charikar's Greedy Peeling:**
280
+ While exact solution algorithms are quite involved, there are several
281
+ known approximation algorithms for the densest subgraph problem.
282
+
283
+ Charikar [1]_ described a very simple 1/2-approximation algorithm for DSG
284
+ known as the greedy "peeling" algorithm. The algorithm creates an
285
+ ordering of the nodes as follows. The first node $v_1$ is the one with
286
+ the smallest degree in $G$ (ties broken arbitrarily). It selects
287
+ $v_2$ to be the smallest degree node in $G \setminus v_1$. Letting
288
+ $G_i$ be the graph after removing $v_1, ..., v_i$ (with $G_0=G$),
289
+ the algorithm returns the graph among $G_0, ..., G_n$ with the highest
290
+ density.
291
+
292
+ **Greedy++:**
293
+ Boob et al. [2]_ generalized this algorithm into Greedy++, an iterative
294
+ algorithm that runs several rounds of "peeling". In fact, Greedy++ with 1
295
+ iteration is precisely Charikar's algorithm. The algorithm converges to a
296
+ $(1-\epsilon)$ approximate densest subgraph in $O(\Delta(G)\log
297
+ n/\epsilon^2)$ iterations, where $\Delta(G)$ is the maximum degree,
298
+ and $n$ is the number of nodes in $G$. The algorithm also has other
299
+ desirable properties as shown by [4]_ and [5]_.
300
+
301
+ **FISTA Algorithm:**
302
+ Harb et al. [3]_ gave a faster and more scalable algorithm using ideas
303
+ from quadratic programming for the densest subgraph, which is based on a
304
+ fast iterative shrinkage-thresholding algorithm (FISTA) algorithm. It is
305
+ known that computing the densest subgraph can be formulated as the
306
+ following convex optimization problem:
307
+
308
+ Minimize $\sum_{u \in V(G)} b_u^2$
309
+
310
+ Subject to:
311
+
312
+ $b_u = \sum_{v: \{u,v\} \in E(G)} x_{uv}$ for all $u \in V(G)$
313
+
314
+ $x_{uv} + x_{vu} = 1.0$ for all $\{u,v\} \in E(G)$
315
+
316
+ $x_{uv} \geq 0, x_{vu} \geq 0$ for all $\{u,v\} \in E(G)$
317
+
318
+ Here, $x_{uv}$ represents the fraction of edge $\{u,v\}$ assigned to
319
+ $u$, and $x_{vu}$ to $v$.
320
+
321
+ The FISTA algorithm efficiently solves this convex program using gradient
322
+ descent with projections. For a learning rate $\alpha$, the algorithm
323
+ does:
324
+
325
+ 1. **Initialization**: Set $x^{(0)}_{uv} = x^{(0)}_{vu} = 0.5$ for all
326
+ edges as a feasible solution.
327
+
328
+ 2. **Gradient Update**: For iteration $k\geq 1$, set
329
+ $x^{(k+1)}_{uv} = x^{(k)}_{uv} - 2 \alpha \sum_{v: \{u,v\} \in E(G)}
330
+ x^{(k)}_{uv}$. However, now $x^{(k+1)}_{uv}$ might be infeasible!
331
+ To ensure feasibility, we project $x^{(k+1)}_{uv}$.
332
+
333
+ 3. **Projection to the Feasible Set**: Compute
334
+ $b^{(k+1)}_u = \sum_{v: \{u,v\} \in E(G)} x^{(k)}_{uv}$ for all
335
+ nodes $u$. Define $z^{(k+1)}_{uv} = x^{(k+1)}_{uv} - 2 \alpha
336
+ b^{(k+1)}_u$. Update $x^{(k+1)}_{uv} =
337
+ CLAMP((z^{(k+1)}_{uv} - z^{(k+1)}_{vu} + 1.0) / 2.0)$, where
338
+ $CLAMP(x) = \max(0, \min(1, x))$.
339
+
340
+ With a learning rate of $\alpha=1/\Delta(G)$, where $\Delta(G)$ is
341
+ the maximum degree, the algorithm converges to the optimum solution of
342
+ the convex program.
343
+
344
+ **Fractional Peeling:**
345
+ To obtain a **discrete** subgraph, we use fractional peeling, an
346
+ adaptation of the standard peeling algorithm which peels the minimum
347
+ degree vertex in each iteration, and returns the densest subgraph found
348
+ along the way. Here, we instead peel the vertex with the smallest
349
+ induced load $b_u$:
350
+
351
+ 1. Compute $b_u$ and $x_{uv}$.
352
+
353
+ 2. Iteratively remove the vertex with the smallest $b_u$, updating its
354
+ neighbors' load by $x_{vu}$.
355
+
356
+ Fractional peeling transforms the approximately optimal fractional
357
+ values $b_u, x_{uv}$ into a discrete subgraph. Unlike traditional
358
+ peeling, which removes the lowest-degree node, this method accounts for
359
+ fractional edge contributions from the convex program.
360
+
361
+ This approach is both scalable and theoretically sound, ensuring a quick
362
+ approximation of the densest subgraph while leveraging fractional load
363
+ balancing.
364
+
365
+ References
366
+ ----------
367
+ .. [1] Charikar, Moses. "Greedy approximation algorithms for finding dense
368
+ components in a graph." In International workshop on approximation
369
+ algorithms for combinatorial optimization, pp. 84-95. Berlin, Heidelberg:
370
+ Springer Berlin Heidelberg, 2000.
371
+
372
+ .. [2] Boob, Digvijay, Yu Gao, Richard Peng, Saurabh Sawlani, Charalampos
373
+ Tsourakakis, Di Wang, and Junxing Wang. "Flowless: Extracting densest
374
+ subgraphs without flow computations." In Proceedings of The Web Conference
375
+ 2020, pp. 573-583. 2020.
376
+
377
+ .. [3] Harb, Elfarouk, Kent Quanrud, and Chandra Chekuri. "Faster and scalable
378
+ algorithms for densest subgraph and decomposition." Advances in Neural
379
+ Information Processing Systems 35 (2022): 26966-26979.
380
+
381
+ .. [4] Harb, Elfarouk, Kent Quanrud, and Chandra Chekuri. "Convergence to
382
+ lexicographically optimal base in a (contra) polymatroid and applications
383
+ to densest subgraph and tree packing." arXiv preprint arXiv:2305.02987
384
+ (2023).
385
+
386
+ .. [5] Chekuri, Chandra, Kent Quanrud, and Manuel R. Torres. "Densest
387
+ subgraph: Supermodularity, iterative peeling, and flow." In Proceedings of
388
+ the 2022 Annual ACM-SIAM Symposium on Discrete Algorithms (SODA), pp.
389
+ 1531-1555. Society for Industrial and Applied Mathematics, 2022.
390
+ """
391
+ try:
392
+ algo = ALGORITHMS[method]
393
+ except KeyError as e:
394
+ raise ValueError(f"{method} is not a valid choice for an algorithm.") from e
395
+
396
+ return algo(G, iterations)
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/distance_measures.py ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Distance measures approximated metrics."""
2
+
3
+ import networkx as nx
4
+ from networkx.utils.decorators import py_random_state
5
+
6
+ __all__ = ["diameter"]
7
+
8
+
9
+ @py_random_state(1)
10
+ @nx._dispatchable(name="approximate_diameter")
11
+ def diameter(G, seed=None):
12
+ """Returns a lower bound on the diameter of the graph G.
13
+
14
+ The function computes a lower bound on the diameter (i.e., the maximum eccentricity)
15
+ of a directed or undirected graph G. The procedure used varies depending on the graph
16
+ being directed or not.
17
+
18
+ If G is an `undirected` graph, then the function uses the `2-sweep` algorithm [1]_.
19
+ The main idea is to pick the farthest node from a random node and return its eccentricity.
20
+
21
+ Otherwise, if G is a `directed` graph, the function uses the `2-dSweep` algorithm [2]_,
22
+ The procedure starts by selecting a random source node $s$ from which it performs a
23
+ forward and a backward BFS. Let $a_1$ and $a_2$ be the farthest nodes in the forward and
24
+ backward cases, respectively. Then, it computes the backward eccentricity of $a_1$ using
25
+ a backward BFS and the forward eccentricity of $a_2$ using a forward BFS.
26
+ Finally, it returns the best lower bound between the two.
27
+
28
+ In both cases, the time complexity is linear with respect to the size of G.
29
+
30
+ Parameters
31
+ ----------
32
+ G : NetworkX graph
33
+
34
+ seed : integer, random_state, or None (default)
35
+ Indicator of random number generation state.
36
+ See :ref:`Randomness<randomness>`.
37
+
38
+ Returns
39
+ -------
40
+ d : integer
41
+ Lower Bound on the Diameter of G
42
+
43
+ Examples
44
+ --------
45
+ >>> G = nx.path_graph(10) # undirected graph
46
+ >>> nx.diameter(G)
47
+ 9
48
+ >>> G = nx.cycle_graph(3, create_using=nx.DiGraph) # directed graph
49
+ >>> nx.diameter(G)
50
+ 2
51
+
52
+ Raises
53
+ ------
54
+ NetworkXError
55
+ If the graph is empty or
56
+ If the graph is undirected and not connected or
57
+ If the graph is directed and not strongly connected.
58
+
59
+ See Also
60
+ --------
61
+ networkx.algorithms.distance_measures.diameter
62
+
63
+ References
64
+ ----------
65
+ .. [1] Magnien, Clémence, Matthieu Latapy, and Michel Habib.
66
+ *Fast computation of empirically tight bounds for the diameter of massive graphs.*
67
+ Journal of Experimental Algorithmics (JEA), 2009.
68
+ https://arxiv.org/pdf/0904.2728.pdf
69
+ .. [2] Crescenzi, Pierluigi, Roberto Grossi, Leonardo Lanzi, and Andrea Marino.
70
+ *On computing the diameter of real-world directed (weighted) graphs.*
71
+ International Symposium on Experimental Algorithms. Springer, Berlin, Heidelberg, 2012.
72
+ https://courses.cs.ut.ee/MTAT.03.238/2014_fall/uploads/Main/diameter.pdf
73
+ """
74
+ # if G is empty
75
+ if not G:
76
+ raise nx.NetworkXError("Expected non-empty NetworkX graph!")
77
+ # if there's only a node
78
+ if G.number_of_nodes() == 1:
79
+ return 0
80
+ # if G is directed
81
+ if G.is_directed():
82
+ return _two_sweep_directed(G, seed)
83
+ # else if G is undirected
84
+ return _two_sweep_undirected(G, seed)
85
+
86
+
87
+ def _two_sweep_undirected(G, seed):
88
+ """Helper function for finding a lower bound on the diameter
89
+ for undirected Graphs.
90
+
91
+ The idea is to pick the farthest node from a random node
92
+ and return its eccentricity.
93
+
94
+ ``G`` is a NetworkX undirected graph.
95
+
96
+ .. note::
97
+
98
+ ``seed`` is a random.Random or numpy.random.RandomState instance
99
+ """
100
+ # select a random source node
101
+ source = seed.choice(list(G))
102
+ # get the distances to the other nodes
103
+ distances = nx.shortest_path_length(G, source)
104
+ # if some nodes have not been visited, then the graph is not connected
105
+ if len(distances) != len(G):
106
+ raise nx.NetworkXError("Graph not connected.")
107
+ # take a node that is (one of) the farthest nodes from the source
108
+ *_, node = distances
109
+ # return the eccentricity of the node
110
+ return nx.eccentricity(G, node)
111
+
112
+
113
+ def _two_sweep_directed(G, seed):
114
+ """Helper function for finding a lower bound on the diameter
115
+ for directed Graphs.
116
+
117
+ It implements 2-dSweep, the directed version of the 2-sweep algorithm.
118
+ The algorithm follows the following steps.
119
+ 1. Select a source node $s$ at random.
120
+ 2. Perform a forward BFS from $s$ to select a node $a_1$ at the maximum
121
+ distance from the source, and compute $LB_1$, the backward eccentricity of $a_1$.
122
+ 3. Perform a backward BFS from $s$ to select a node $a_2$ at the maximum
123
+ distance from the source, and compute $LB_2$, the forward eccentricity of $a_2$.
124
+ 4. Return the maximum between $LB_1$ and $LB_2$.
125
+
126
+ ``G`` is a NetworkX directed graph.
127
+
128
+ .. note::
129
+
130
+ ``seed`` is a random.Random or numpy.random.RandomState instance
131
+ """
132
+ # get a new digraph G' with the edges reversed in the opposite direction
133
+ G_reversed = G.reverse()
134
+ # select a random source node
135
+ source = seed.choice(list(G))
136
+ # compute forward distances from source
137
+ forward_distances = nx.shortest_path_length(G, source)
138
+ # compute backward distances from source
139
+ backward_distances = nx.shortest_path_length(G_reversed, source)
140
+ # if either the source can't reach every node or not every node
141
+ # can reach the source, then the graph is not strongly connected
142
+ n = len(G)
143
+ if len(forward_distances) != n or len(backward_distances) != n:
144
+ raise nx.NetworkXError("DiGraph not strongly connected.")
145
+ # take a node a_1 at the maximum distance from the source in G
146
+ *_, a_1 = forward_distances
147
+ # take a node a_2 at the maximum distance from the source in G_reversed
148
+ *_, a_2 = backward_distances
149
+ # return the max between the backward eccentricity of a_1 and the forward eccentricity of a_2
150
+ return max(nx.eccentricity(G_reversed, a_1), nx.eccentricity(G, a_2))
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/dominating_set.py ADDED
@@ -0,0 +1,149 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for finding node and edge dominating sets.
2
+
3
+ A `dominating set`_ for an undirected graph *G* with vertex set *V*
4
+ and edge set *E* is a subset *D* of *V* such that every vertex not in
5
+ *D* is adjacent to at least one member of *D*. An `edge dominating set`_
6
+ is a subset *F* of *E* such that every edge not in *F* is
7
+ incident to an endpoint of at least one edge in *F*.
8
+
9
+ .. _dominating set: https://en.wikipedia.org/wiki/Dominating_set
10
+ .. _edge dominating set: https://en.wikipedia.org/wiki/Edge_dominating_set
11
+
12
+ """
13
+
14
+ import networkx as nx
15
+
16
+ from ...utils import not_implemented_for
17
+ from ..matching import maximal_matching
18
+
19
+ __all__ = ["min_weighted_dominating_set", "min_edge_dominating_set"]
20
+
21
+
22
+ # TODO Why doesn't this algorithm work for directed graphs?
23
+ @not_implemented_for("directed")
24
+ @nx._dispatchable(node_attrs="weight")
25
+ def min_weighted_dominating_set(G, weight=None):
26
+ r"""Returns a dominating set that approximates the minimum weight node
27
+ dominating set.
28
+
29
+ Parameters
30
+ ----------
31
+ G : NetworkX graph
32
+ Undirected graph.
33
+
34
+ weight : string
35
+ The node attribute storing the weight of an node. If provided,
36
+ the node attribute with this key must be a number for each
37
+ node. If not provided, each node is assumed to have weight one.
38
+
39
+ Returns
40
+ -------
41
+ min_weight_dominating_set : set
42
+ A set of nodes, the sum of whose weights is no more than `(\log
43
+ w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of
44
+ each node in the graph and `w(V^*)` denotes the sum of the
45
+ weights of each node in the minimum weight dominating set.
46
+
47
+ Examples
48
+ --------
49
+ >>> G = nx.Graph([(0, 1), (0, 4), (1, 4), (1, 2), (2, 3), (3, 4), (2, 5)])
50
+ >>> nx.approximation.min_weighted_dominating_set(G)
51
+ {1, 2, 4}
52
+
53
+ Raises
54
+ ------
55
+ NetworkXNotImplemented
56
+ If G is directed.
57
+
58
+ Notes
59
+ -----
60
+ This algorithm computes an approximate minimum weighted dominating
61
+ set for the graph `G`. The returned solution has weight `(\log
62
+ w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of each
63
+ node in the graph and `w(V^*)` denotes the sum of the weights of
64
+ each node in the minimum weight dominating set for the graph.
65
+
66
+ This implementation of the algorithm runs in $O(m)$ time, where $m$
67
+ is the number of edges in the graph.
68
+
69
+ References
70
+ ----------
71
+ .. [1] Vazirani, Vijay V.
72
+ *Approximation Algorithms*.
73
+ Springer Science & Business Media, 2001.
74
+
75
+ """
76
+ # The unique dominating set for the null graph is the empty set.
77
+ if len(G) == 0:
78
+ return set()
79
+
80
+ # This is the dominating set that will eventually be returned.
81
+ dom_set = set()
82
+
83
+ def _cost(node_and_neighborhood):
84
+ """Returns the cost-effectiveness of greedily choosing the given
85
+ node.
86
+
87
+ `node_and_neighborhood` is a two-tuple comprising a node and its
88
+ closed neighborhood.
89
+
90
+ """
91
+ v, neighborhood = node_and_neighborhood
92
+ return G.nodes[v].get(weight, 1) / len(neighborhood - dom_set)
93
+
94
+ # This is a set of all vertices not already covered by the
95
+ # dominating set.
96
+ vertices = set(G)
97
+ # This is a dictionary mapping each node to the closed neighborhood
98
+ # of that node.
99
+ neighborhoods = {v: {v} | set(G[v]) for v in G}
100
+
101
+ # Continue until all vertices are adjacent to some node in the
102
+ # dominating set.
103
+ while vertices:
104
+ # Find the most cost-effective node to add, along with its
105
+ # closed neighborhood.
106
+ dom_node, min_set = min(neighborhoods.items(), key=_cost)
107
+ # Add the node to the dominating set and reduce the remaining
108
+ # set of nodes to cover.
109
+ dom_set.add(dom_node)
110
+ del neighborhoods[dom_node]
111
+ vertices -= min_set
112
+
113
+ return dom_set
114
+
115
+
116
+ @nx._dispatchable
117
+ def min_edge_dominating_set(G):
118
+ r"""Returns minimum cardinality edge dominating set.
119
+
120
+ Parameters
121
+ ----------
122
+ G : NetworkX graph
123
+ Undirected graph
124
+
125
+ Returns
126
+ -------
127
+ min_edge_dominating_set : set
128
+ Returns a set of dominating edges whose size is no more than 2 * OPT.
129
+
130
+ Examples
131
+ --------
132
+ >>> G = nx.petersen_graph()
133
+ >>> nx.approximation.min_edge_dominating_set(G)
134
+ {(0, 1), (4, 9), (6, 8), (5, 7), (2, 3)}
135
+
136
+ Raises
137
+ ------
138
+ ValueError
139
+ If the input graph `G` is empty.
140
+
141
+ Notes
142
+ -----
143
+ The algorithm computes an approximate solution to the edge dominating set
144
+ problem. The result is no more than 2 * OPT in terms of size of the set.
145
+ Runtime of the algorithm is $O(|E|)$.
146
+ """
147
+ if not G:
148
+ raise ValueError("Expected non-empty NetworkX graph!")
149
+ return maximal_matching(G)
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/kcomponents.py ADDED
@@ -0,0 +1,367 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Fast approximation for k-component structure"""
2
+
3
+ import itertools
4
+ from collections import defaultdict
5
+ from collections.abc import Mapping
6
+ from functools import cached_property
7
+
8
+ import networkx as nx
9
+ from networkx.algorithms.approximation import local_node_connectivity
10
+ from networkx.exception import NetworkXError
11
+ from networkx.utils import not_implemented_for
12
+
13
+ __all__ = ["k_components"]
14
+
15
+
16
+ @not_implemented_for("directed")
17
+ @nx._dispatchable(name="approximate_k_components")
18
+ def k_components(G, min_density=0.95):
19
+ r"""Returns the approximate k-component structure of a graph G.
20
+
21
+ A `k`-component is a maximal subgraph of a graph G that has, at least,
22
+ node connectivity `k`: we need to remove at least `k` nodes to break it
23
+ into more components. `k`-components have an inherent hierarchical
24
+ structure because they are nested in terms of connectivity: a connected
25
+ graph can contain several 2-components, each of which can contain
26
+ one or more 3-components, and so forth.
27
+
28
+ This implementation is based on the fast heuristics to approximate
29
+ the `k`-component structure of a graph [1]_. Which, in turn, it is based on
30
+ a fast approximation algorithm for finding good lower bounds of the number
31
+ of node independent paths between two nodes [2]_.
32
+
33
+ Parameters
34
+ ----------
35
+ G : NetworkX graph
36
+ Undirected graph
37
+
38
+ min_density : Float
39
+ Density relaxation threshold. Default value 0.95
40
+
41
+ Returns
42
+ -------
43
+ k_components : dict
44
+ Dictionary with connectivity level `k` as key and a list of
45
+ sets of nodes that form a k-component of level `k` as values.
46
+
47
+ Raises
48
+ ------
49
+ NetworkXNotImplemented
50
+ If G is directed.
51
+
52
+ Examples
53
+ --------
54
+ >>> # Petersen graph has 10 nodes and it is triconnected, thus all
55
+ >>> # nodes are in a single component on all three connectivity levels
56
+ >>> from networkx.algorithms import approximation as apxa
57
+ >>> G = nx.petersen_graph()
58
+ >>> k_components = apxa.k_components(G)
59
+
60
+ Notes
61
+ -----
62
+ The logic of the approximation algorithm for computing the `k`-component
63
+ structure [1]_ is based on repeatedly applying simple and fast algorithms
64
+ for `k`-cores and biconnected components in order to narrow down the
65
+ number of pairs of nodes over which we have to compute White and Newman's
66
+ approximation algorithm for finding node independent paths [2]_. More
67
+ formally, this algorithm is based on Whitney's theorem, which states
68
+ an inclusion relation among node connectivity, edge connectivity, and
69
+ minimum degree for any graph G. This theorem implies that every
70
+ `k`-component is nested inside a `k`-edge-component, which in turn,
71
+ is contained in a `k`-core. Thus, this algorithm computes node independent
72
+ paths among pairs of nodes in each biconnected part of each `k`-core,
73
+ and repeats this procedure for each `k` from 3 to the maximal core number
74
+ of a node in the input graph.
75
+
76
+ Because, in practice, many nodes of the core of level `k` inside a
77
+ bicomponent actually are part of a component of level k, the auxiliary
78
+ graph needed for the algorithm is likely to be very dense. Thus, we use
79
+ a complement graph data structure (see `AntiGraph`) to save memory.
80
+ AntiGraph only stores information of the edges that are *not* present
81
+ in the actual auxiliary graph. When applying algorithms to this
82
+ complement graph data structure, it behaves as if it were the dense
83
+ version.
84
+
85
+ See also
86
+ --------
87
+ k_components
88
+
89
+ References
90
+ ----------
91
+ .. [1] Torrents, J. and F. Ferraro (2015) Structural Cohesion:
92
+ Visualization and Heuristics for Fast Computation.
93
+ https://arxiv.org/pdf/1503.04476v1
94
+
95
+ .. [2] White, Douglas R., and Mark Newman (2001) A Fast Algorithm for
96
+ Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
97
+ https://www.santafe.edu/research/results/working-papers/fast-approximation-algorithms-for-finding-node-ind
98
+
99
+ .. [3] Moody, J. and D. White (2003). Social cohesion and embeddedness:
100
+ A hierarchical conception of social groups.
101
+ American Sociological Review 68(1), 103--28.
102
+ https://doi.org/10.2307/3088904
103
+
104
+ """
105
+ # Dictionary with connectivity level (k) as keys and a list of
106
+ # sets of nodes that form a k-component as values
107
+ k_components = defaultdict(list)
108
+ # make a few functions local for speed
109
+ node_connectivity = local_node_connectivity
110
+ k_core = nx.k_core
111
+ core_number = nx.core_number
112
+ biconnected_components = nx.biconnected_components
113
+ combinations = itertools.combinations
114
+ # Exact solution for k = {1,2}
115
+ # There is a linear time algorithm for triconnectivity, if we had an
116
+ # implementation available we could start from k = 4.
117
+ for component in nx.connected_components(G):
118
+ # isolated nodes have connectivity 0
119
+ comp = set(component)
120
+ if len(comp) > 1:
121
+ k_components[1].append(comp)
122
+ for bicomponent in nx.biconnected_components(G):
123
+ # avoid considering dyads as bicomponents
124
+ bicomp = set(bicomponent)
125
+ if len(bicomp) > 2:
126
+ k_components[2].append(bicomp)
127
+ # There is no k-component of k > maximum core number
128
+ # \kappa(G) <= \lambda(G) <= \delta(G)
129
+ g_cnumber = core_number(G)
130
+ max_core = max(g_cnumber.values())
131
+ for k in range(3, max_core + 1):
132
+ C = k_core(G, k, core_number=g_cnumber)
133
+ for nodes in biconnected_components(C):
134
+ # Build a subgraph SG induced by the nodes that are part of
135
+ # each biconnected component of the k-core subgraph C.
136
+ if len(nodes) < k:
137
+ continue
138
+ SG = G.subgraph(nodes)
139
+ # Build auxiliary graph
140
+ H = _AntiGraph()
141
+ H.add_nodes_from(SG.nodes())
142
+ for u, v in combinations(SG, 2):
143
+ K = node_connectivity(SG, u, v, cutoff=k)
144
+ if k > K:
145
+ H.add_edge(u, v)
146
+ for h_nodes in biconnected_components(H):
147
+ if len(h_nodes) <= k:
148
+ continue
149
+ SH = H.subgraph(h_nodes)
150
+ for Gc in _cliques_heuristic(SG, SH, k, min_density):
151
+ for k_nodes in biconnected_components(Gc):
152
+ Gk = nx.k_core(SG.subgraph(k_nodes), k)
153
+ if len(Gk) <= k:
154
+ continue
155
+ k_components[k].append(set(Gk))
156
+ return k_components
157
+
158
+
159
+ def _cliques_heuristic(G, H, k, min_density):
160
+ h_cnumber = nx.core_number(H)
161
+ for i, c_value in enumerate(sorted(set(h_cnumber.values()), reverse=True)):
162
+ cands = {n for n, c in h_cnumber.items() if c == c_value}
163
+ # Skip checking for overlap for the highest core value
164
+ if i == 0:
165
+ overlap = False
166
+ else:
167
+ overlap = set.intersection(
168
+ *[{x for x in H[n] if x not in cands} for n in cands]
169
+ )
170
+ if overlap and len(overlap) < k:
171
+ SH = H.subgraph(cands | overlap)
172
+ else:
173
+ SH = H.subgraph(cands)
174
+ sh_cnumber = nx.core_number(SH)
175
+ SG = nx.k_core(G.subgraph(SH), k)
176
+ while not (_same(sh_cnumber) and nx.density(SH) >= min_density):
177
+ # This subgraph must be writable => .copy()
178
+ SH = H.subgraph(SG).copy()
179
+ if len(SH) <= k:
180
+ break
181
+ sh_cnumber = nx.core_number(SH)
182
+ sh_deg = dict(SH.degree())
183
+ min_deg = min(sh_deg.values())
184
+ SH.remove_nodes_from(n for n, d in sh_deg.items() if d == min_deg)
185
+ SG = nx.k_core(G.subgraph(SH), k)
186
+ else:
187
+ yield SG
188
+
189
+
190
+ def _same(measure, tol=0):
191
+ vals = set(measure.values())
192
+ if (max(vals) - min(vals)) <= tol:
193
+ return True
194
+ return False
195
+
196
+
197
+ class _AntiGraph(nx.Graph):
198
+ """
199
+ Class for complement graphs.
200
+
201
+ The main goal is to be able to work with big and dense graphs with
202
+ a low memory footprint.
203
+
204
+ In this class you add the edges that *do not exist* in the dense graph,
205
+ the report methods of the class return the neighbors, the edges and
206
+ the degree as if it was the dense graph. Thus it's possible to use
207
+ an instance of this class with some of NetworkX functions. In this
208
+ case we only use k-core, connected_components, and biconnected_components.
209
+ """
210
+
211
+ all_edge_dict = {"weight": 1}
212
+
213
+ def single_edge_dict(self):
214
+ return self.all_edge_dict
215
+
216
+ edge_attr_dict_factory = single_edge_dict # type: ignore[assignment]
217
+
218
+ def __getitem__(self, n):
219
+ """Returns a dict of neighbors of node n in the dense graph.
220
+
221
+ Parameters
222
+ ----------
223
+ n : node
224
+ A node in the graph.
225
+
226
+ Returns
227
+ -------
228
+ adj_dict : dictionary
229
+ The adjacency dictionary for nodes connected to n.
230
+
231
+ """
232
+ all_edge_dict = self.all_edge_dict
233
+ return dict.fromkeys(set(self._adj) - set(self._adj[n]) - {n}, all_edge_dict)
234
+
235
+ def neighbors(self, n):
236
+ """Returns an iterator over all neighbors of node n in the
237
+ dense graph.
238
+ """
239
+ try:
240
+ return iter(set(self._adj) - set(self._adj[n]) - {n})
241
+ except KeyError as err:
242
+ raise NetworkXError(f"The node {n} is not in the graph.") from err
243
+
244
+ class AntiAtlasView(Mapping):
245
+ """An adjacency inner dict for AntiGraph"""
246
+
247
+ def __init__(self, graph, node):
248
+ self._graph = graph
249
+ self._atlas = graph._adj[node]
250
+ self._node = node
251
+
252
+ def __len__(self):
253
+ return len(self._graph) - len(self._atlas) - 1
254
+
255
+ def __iter__(self):
256
+ return (n for n in self._graph if n not in self._atlas and n != self._node)
257
+
258
+ def __getitem__(self, nbr):
259
+ nbrs = set(self._graph._adj) - set(self._atlas) - {self._node}
260
+ if nbr in nbrs:
261
+ return self._graph.all_edge_dict
262
+ raise KeyError(nbr)
263
+
264
+ class AntiAdjacencyView(AntiAtlasView):
265
+ """An adjacency outer dict for AntiGraph"""
266
+
267
+ def __init__(self, graph):
268
+ self._graph = graph
269
+ self._atlas = graph._adj
270
+
271
+ def __len__(self):
272
+ return len(self._atlas)
273
+
274
+ def __iter__(self):
275
+ return iter(self._graph)
276
+
277
+ def __getitem__(self, node):
278
+ if node not in self._graph:
279
+ raise KeyError(node)
280
+ return self._graph.AntiAtlasView(self._graph, node)
281
+
282
+ @cached_property
283
+ def adj(self):
284
+ return self.AntiAdjacencyView(self)
285
+
286
+ def subgraph(self, nodes):
287
+ """This subgraph method returns a full AntiGraph. Not a View"""
288
+ nodes = set(nodes)
289
+ G = _AntiGraph()
290
+ G.add_nodes_from(nodes)
291
+ for n in G:
292
+ Gnbrs = G.adjlist_inner_dict_factory()
293
+ G._adj[n] = Gnbrs
294
+ for nbr, d in self._adj[n].items():
295
+ if nbr in G._adj:
296
+ Gnbrs[nbr] = d
297
+ G._adj[nbr][n] = d
298
+ G.graph = self.graph
299
+ return G
300
+
301
+ class AntiDegreeView(nx.reportviews.DegreeView):
302
+ def __iter__(self):
303
+ all_nodes = set(self._succ)
304
+ for n in self._nodes:
305
+ nbrs = all_nodes - set(self._succ[n]) - {n}
306
+ yield (n, len(nbrs))
307
+
308
+ def __getitem__(self, n):
309
+ nbrs = set(self._succ) - set(self._succ[n]) - {n}
310
+ # AntiGraph is a ThinGraph so all edges have weight 1
311
+ return len(nbrs) + (n in nbrs)
312
+
313
+ @cached_property
314
+ def degree(self):
315
+ """Returns an iterator for (node, degree) and degree for single node.
316
+
317
+ The node degree is the number of edges adjacent to the node.
318
+
319
+ Parameters
320
+ ----------
321
+ nbunch : iterable container, optional (default=all nodes)
322
+ A container of nodes. The container will be iterated
323
+ through once.
324
+
325
+ weight : string or None, optional (default=None)
326
+ The edge attribute that holds the numerical value used
327
+ as a weight. If None, then each edge has weight 1.
328
+ The degree is the sum of the edge weights adjacent to the node.
329
+
330
+ Returns
331
+ -------
332
+ deg:
333
+ Degree of the node, if a single node is passed as argument.
334
+ nd_iter : an iterator
335
+ The iterator returns two-tuples of (node, degree).
336
+
337
+ See Also
338
+ --------
339
+ degree
340
+
341
+ Examples
342
+ --------
343
+ >>> G = nx.path_graph(4)
344
+ >>> G.degree(0) # node 0 with degree 1
345
+ 1
346
+ >>> list(G.degree([0, 1]))
347
+ [(0, 1), (1, 2)]
348
+
349
+ """
350
+ return self.AntiDegreeView(self)
351
+
352
+ def adjacency(self):
353
+ """Returns an iterator of (node, adjacency set) tuples for all nodes
354
+ in the dense graph.
355
+
356
+ This is the fastest way to look at every edge.
357
+ For directed graphs, only outgoing adjacencies are included.
358
+
359
+ Returns
360
+ -------
361
+ adj_iter : iterator
362
+ An iterator of (node, adjacency set) for all nodes in
363
+ the graph.
364
+
365
+ """
366
+ for n in self._adj:
367
+ yield (n, set(self._adj) - set(self._adj[n]) - {n})
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/matching.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ **************
3
+ Graph Matching
4
+ **************
5
+
6
+ Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent
7
+ edges; that is, no two edges share a common vertex.
8
+
9
+ `Wikipedia: Matching <https://en.wikipedia.org/wiki/Matching_(graph_theory)>`_
10
+ """
11
+
12
+ import networkx as nx
13
+
14
+ __all__ = ["min_maximal_matching"]
15
+
16
+
17
+ @nx._dispatchable
18
+ def min_maximal_matching(G):
19
+ r"""Returns the minimum maximal matching of G. That is, out of all maximal
20
+ matchings of the graph G, the smallest is returned.
21
+
22
+ Parameters
23
+ ----------
24
+ G : NetworkX graph
25
+ Undirected graph
26
+
27
+ Returns
28
+ -------
29
+ min_maximal_matching : set
30
+ Returns a set of edges such that no two edges share a common endpoint
31
+ and every edge not in the set shares some common endpoint in the set.
32
+ Cardinality will be 2*OPT in the worst case.
33
+
34
+ Notes
35
+ -----
36
+ The algorithm computes an approximate solution for the minimum maximal
37
+ cardinality matching problem. The solution is no more than 2 * OPT in size.
38
+ Runtime is $O(|E|)$.
39
+
40
+ References
41
+ ----------
42
+ .. [1] Vazirani, Vijay Approximation Algorithms (2001)
43
+ """
44
+ return nx.maximal_matching(G)
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/maxcut.py ADDED
@@ -0,0 +1,143 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+ from networkx.utils.decorators import not_implemented_for, py_random_state
3
+
4
+ __all__ = ["randomized_partitioning", "one_exchange"]
5
+
6
+
7
+ @not_implemented_for("directed")
8
+ @not_implemented_for("multigraph")
9
+ @py_random_state(1)
10
+ @nx._dispatchable(edge_attrs="weight")
11
+ def randomized_partitioning(G, seed=None, p=0.5, weight=None):
12
+ """Compute a random partitioning of the graph nodes and its cut value.
13
+
14
+ A partitioning is calculated by observing each node
15
+ and deciding to add it to the partition with probability `p`,
16
+ returning a random cut and its corresponding value (the
17
+ sum of weights of edges connecting different partitions).
18
+
19
+ Parameters
20
+ ----------
21
+ G : NetworkX graph
22
+
23
+ seed : integer, random_state, or None (default)
24
+ Indicator of random number generation state.
25
+ See :ref:`Randomness<randomness>`.
26
+
27
+ p : scalar
28
+ Probability for each node to be part of the first partition.
29
+ Should be in [0,1]
30
+
31
+ weight : object
32
+ Edge attribute key to use as weight. If not specified, edges
33
+ have weight one.
34
+
35
+ Returns
36
+ -------
37
+ cut_size : scalar
38
+ Value of the minimum cut.
39
+
40
+ partition : pair of node sets
41
+ A partitioning of the nodes that defines a minimum cut.
42
+
43
+ Examples
44
+ --------
45
+ >>> G = nx.complete_graph(5)
46
+ >>> cut_size, partition = nx.approximation.randomized_partitioning(G, seed=1)
47
+ >>> cut_size
48
+ 6
49
+ >>> partition
50
+ ({0, 3, 4}, {1, 2})
51
+
52
+ Raises
53
+ ------
54
+ NetworkXNotImplemented
55
+ If the graph is directed or is a multigraph.
56
+ """
57
+ cut = {node for node in G.nodes() if seed.random() < p}
58
+ cut_size = nx.algorithms.cut_size(G, cut, weight=weight)
59
+ partition = (cut, G.nodes - cut)
60
+ return cut_size, partition
61
+
62
+
63
+ def _swap_node_partition(cut, node):
64
+ return cut - {node} if node in cut else cut.union({node})
65
+
66
+
67
+ @not_implemented_for("directed")
68
+ @not_implemented_for("multigraph")
69
+ @py_random_state(2)
70
+ @nx._dispatchable(edge_attrs="weight")
71
+ def one_exchange(G, initial_cut=None, seed=None, weight=None):
72
+ """Compute a partitioning of the graphs nodes and the corresponding cut value.
73
+
74
+ Use a greedy one exchange strategy to find a locally maximal cut
75
+ and its value, it works by finding the best node (one that gives
76
+ the highest gain to the cut value) to add to the current cut
77
+ and repeats this process until no improvement can be made.
78
+
79
+ Parameters
80
+ ----------
81
+ G : networkx Graph
82
+ Graph to find a maximum cut for.
83
+
84
+ initial_cut : set
85
+ Cut to use as a starting point. If not supplied the algorithm
86
+ starts with an empty cut.
87
+
88
+ seed : integer, random_state, or None (default)
89
+ Indicator of random number generation state.
90
+ See :ref:`Randomness<randomness>`.
91
+
92
+ weight : object
93
+ Edge attribute key to use as weight. If not specified, edges
94
+ have weight one.
95
+
96
+ Returns
97
+ -------
98
+ cut_value : scalar
99
+ Value of the maximum cut.
100
+
101
+ partition : pair of node sets
102
+ A partitioning of the nodes that defines a maximum cut.
103
+
104
+ Examples
105
+ --------
106
+ >>> G = nx.complete_graph(5)
107
+ >>> curr_cut_size, partition = nx.approximation.one_exchange(G, seed=1)
108
+ >>> curr_cut_size
109
+ 6
110
+ >>> partition
111
+ ({0, 2}, {1, 3, 4})
112
+
113
+ Raises
114
+ ------
115
+ NetworkXNotImplemented
116
+ If the graph is directed or is a multigraph.
117
+ """
118
+ if initial_cut is None:
119
+ initial_cut = set()
120
+ cut = set(initial_cut)
121
+ current_cut_size = nx.algorithms.cut_size(G, cut, weight=weight)
122
+ while True:
123
+ nodes = list(G.nodes())
124
+ # Shuffling the nodes ensures random tie-breaks in the following call to max
125
+ seed.shuffle(nodes)
126
+ best_node_to_swap = max(
127
+ nodes,
128
+ key=lambda v: nx.algorithms.cut_size(
129
+ G, _swap_node_partition(cut, v), weight=weight
130
+ ),
131
+ default=None,
132
+ )
133
+ potential_cut = _swap_node_partition(cut, best_node_to_swap)
134
+ potential_cut_size = nx.algorithms.cut_size(G, potential_cut, weight=weight)
135
+
136
+ if potential_cut_size > current_cut_size:
137
+ cut = potential_cut
138
+ current_cut_size = potential_cut_size
139
+ else:
140
+ break
141
+
142
+ partition = (cut, G.nodes - cut)
143
+ return current_cut_size, partition
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/ramsey.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Ramsey numbers.
3
+ """
4
+
5
+ import networkx as nx
6
+ from networkx.utils import not_implemented_for
7
+
8
+ from ...utils import arbitrary_element
9
+
10
+ __all__ = ["ramsey_R2"]
11
+
12
+
13
+ @not_implemented_for("directed")
14
+ @not_implemented_for("multigraph")
15
+ @nx._dispatchable
16
+ def ramsey_R2(G):
17
+ r"""Compute the largest clique and largest independent set in `G`.
18
+
19
+ This can be used to estimate bounds for the 2-color
20
+ Ramsey number `R(2;s,t)` for `G`.
21
+
22
+ This is a recursive implementation which could run into trouble
23
+ for large recursions. Note that self-loop edges are ignored.
24
+
25
+ Parameters
26
+ ----------
27
+ G : NetworkX graph
28
+ Undirected graph
29
+
30
+ Returns
31
+ -------
32
+ max_pair : (set, set) tuple
33
+ Maximum clique, Maximum independent set.
34
+
35
+ Raises
36
+ ------
37
+ NetworkXNotImplemented
38
+ If the graph is directed or is a multigraph.
39
+ """
40
+ if not G:
41
+ return set(), set()
42
+
43
+ node = arbitrary_element(G)
44
+ nbrs = (nbr for nbr in nx.all_neighbors(G, node) if nbr != node)
45
+ nnbrs = nx.non_neighbors(G, node)
46
+ c_1, i_1 = ramsey_R2(G.subgraph(nbrs).copy())
47
+ c_2, i_2 = ramsey_R2(G.subgraph(nnbrs).copy())
48
+
49
+ c_1.add(node)
50
+ i_2.add(node)
51
+ # Choose the larger of the two cliques and the larger of the two
52
+ # independent sets, according to cardinality.
53
+ return max(c_1, c_2, key=len), max(i_1, i_2, key=len)
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/steinertree.py ADDED
@@ -0,0 +1,248 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from itertools import chain
2
+
3
+ import networkx as nx
4
+ from networkx.utils import not_implemented_for, pairwise
5
+
6
+ __all__ = ["metric_closure", "steiner_tree"]
7
+
8
+
9
+ @not_implemented_for("directed")
10
+ @nx._dispatchable(edge_attrs="weight", returns_graph=True)
11
+ def metric_closure(G, weight="weight"):
12
+ """Return the metric closure of a graph.
13
+
14
+ The metric closure of a graph *G* is the complete graph in which each edge
15
+ is weighted by the shortest path distance between the nodes in *G* .
16
+
17
+ Parameters
18
+ ----------
19
+ G : NetworkX graph
20
+
21
+ Returns
22
+ -------
23
+ NetworkX graph
24
+ Metric closure of the graph `G`.
25
+
26
+ """
27
+ M = nx.Graph()
28
+
29
+ Gnodes = set(G)
30
+
31
+ # check for connected graph while processing first node
32
+ all_paths_iter = nx.all_pairs_dijkstra(G, weight=weight)
33
+ u, (distance, path) = next(all_paths_iter)
34
+ if len(G) != len(distance):
35
+ msg = "G is not a connected graph. metric_closure is not defined."
36
+ raise nx.NetworkXError(msg)
37
+ Gnodes.remove(u)
38
+ for v in Gnodes:
39
+ M.add_edge(u, v, distance=distance[v], path=path[v])
40
+
41
+ # first node done -- now process the rest
42
+ for u, (distance, path) in all_paths_iter:
43
+ Gnodes.remove(u)
44
+ for v in Gnodes:
45
+ M.add_edge(u, v, distance=distance[v], path=path[v])
46
+
47
+ return M
48
+
49
+
50
+ def _mehlhorn_steiner_tree(G, terminal_nodes, weight):
51
+ paths = nx.multi_source_dijkstra_path(G, terminal_nodes)
52
+
53
+ d_1 = {}
54
+ s = {}
55
+ for v in G.nodes():
56
+ s[v] = paths[v][0]
57
+ d_1[(v, s[v])] = len(paths[v]) - 1
58
+
59
+ # G1-G4 names match those from the Mehlhorn 1988 paper.
60
+ G_1_prime = nx.Graph()
61
+ for u, v, data in G.edges(data=True):
62
+ su, sv = s[u], s[v]
63
+ weight_here = d_1[(u, su)] + data.get(weight, 1) + d_1[(v, sv)]
64
+ if not G_1_prime.has_edge(su, sv):
65
+ G_1_prime.add_edge(su, sv, weight=weight_here)
66
+ else:
67
+ new_weight = min(weight_here, G_1_prime[su][sv]["weight"])
68
+ G_1_prime.add_edge(su, sv, weight=new_weight)
69
+
70
+ G_2 = nx.minimum_spanning_edges(G_1_prime, data=True)
71
+
72
+ G_3 = nx.Graph()
73
+ for u, v, d in G_2:
74
+ path = nx.shortest_path(G, u, v, weight)
75
+ for n1, n2 in pairwise(path):
76
+ G_3.add_edge(n1, n2)
77
+
78
+ G_3_mst = list(nx.minimum_spanning_edges(G_3, data=False))
79
+ if G.is_multigraph():
80
+ G_3_mst = (
81
+ (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in G_3_mst
82
+ )
83
+ G_4 = G.edge_subgraph(G_3_mst).copy()
84
+ _remove_nonterminal_leaves(G_4, terminal_nodes)
85
+ return G_4.edges()
86
+
87
+
88
+ def _kou_steiner_tree(G, terminal_nodes, weight):
89
+ # Compute the metric closure only for terminal nodes
90
+ # Create a complete graph H from the metric edges
91
+ H = nx.Graph()
92
+ unvisited_terminals = set(terminal_nodes)
93
+
94
+ # check for connected graph while processing first node
95
+ u = unvisited_terminals.pop()
96
+ distances, paths = nx.single_source_dijkstra(G, source=u, weight=weight)
97
+ if len(G) != len(distances):
98
+ msg = "G is not a connected graph."
99
+ raise nx.NetworkXError(msg)
100
+ for v in unvisited_terminals:
101
+ H.add_edge(u, v, distance=distances[v], path=paths[v])
102
+
103
+ # first node done -- now process the rest
104
+ for u in unvisited_terminals.copy():
105
+ distances, paths = nx.single_source_dijkstra(G, source=u, weight=weight)
106
+ unvisited_terminals.remove(u)
107
+ for v in unvisited_terminals:
108
+ H.add_edge(u, v, distance=distances[v], path=paths[v])
109
+
110
+ # Use the 'distance' attribute of each edge provided by H.
111
+ mst_edges = nx.minimum_spanning_edges(H, weight="distance", data=True)
112
+
113
+ # Create an iterator over each edge in each shortest path; repeats are okay
114
+ mst_all_edges = chain.from_iterable(pairwise(d["path"]) for u, v, d in mst_edges)
115
+ if G.is_multigraph():
116
+ mst_all_edges = (
117
+ (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight]))
118
+ for u, v in mst_all_edges
119
+ )
120
+
121
+ # Find the MST again, over this new set of edges
122
+ G_S = G.edge_subgraph(mst_all_edges)
123
+ T_S = nx.minimum_spanning_edges(G_S, weight="weight", data=False)
124
+
125
+ # Leaf nodes that are not terminal might still remain; remove them here
126
+ T_H = G.edge_subgraph(T_S).copy()
127
+ _remove_nonterminal_leaves(T_H, terminal_nodes)
128
+
129
+ return T_H.edges()
130
+
131
+
132
+ def _remove_nonterminal_leaves(G, terminals):
133
+ terminal_set = set(terminals)
134
+ leaves = {n for n in G if len(set(G[n]) - {n}) == 1}
135
+ nonterminal_leaves = leaves - terminal_set
136
+
137
+ while nonterminal_leaves:
138
+ # Removing a node may create new non-terminal leaves, so we limit
139
+ # search for candidate non-terminal nodes to neighbors of current
140
+ # non-terminal nodes
141
+ candidate_leaves = set.union(*(set(G[n]) for n in nonterminal_leaves))
142
+ candidate_leaves -= nonterminal_leaves | terminal_set
143
+ # Remove current set of non-terminal nodes
144
+ G.remove_nodes_from(nonterminal_leaves)
145
+ # Find any new non-terminal nodes from the set of candidates
146
+ leaves = {n for n in candidate_leaves if len(set(G[n]) - {n}) == 1}
147
+ nonterminal_leaves = leaves - terminal_set
148
+
149
+
150
+ ALGORITHMS = {
151
+ "kou": _kou_steiner_tree,
152
+ "mehlhorn": _mehlhorn_steiner_tree,
153
+ }
154
+
155
+
156
+ @not_implemented_for("directed")
157
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
158
+ def steiner_tree(G, terminal_nodes, weight="weight", method=None):
159
+ r"""Return an approximation to the minimum Steiner tree of a graph.
160
+
161
+ The minimum Steiner tree of `G` w.r.t a set of `terminal_nodes` (also *S*)
162
+ is a tree within `G` that spans those nodes and has minimum size (sum of
163
+ edge weights) among all such trees.
164
+
165
+ The approximation algorithm is specified with the `method` keyword
166
+ argument. All three available algorithms produce a tree whose weight is
167
+ within a ``(2 - (2 / l))`` factor of the weight of the optimal Steiner tree,
168
+ where ``l`` is the minimum number of leaf nodes across all possible Steiner
169
+ trees.
170
+
171
+ * ``"kou"`` [2]_ (runtime $O(|S| |V|^2)$) computes the minimum spanning tree of
172
+ the subgraph of the metric closure of *G* induced by the terminal nodes,
173
+ where the metric closure of *G* is the complete graph in which each edge is
174
+ weighted by the shortest path distance between the nodes in *G*.
175
+
176
+ * ``"mehlhorn"`` [3]_ (runtime $O(|E|+|V|\log|V|)$) modifies Kou et al.'s
177
+ algorithm, beginning by finding the closest terminal node for each
178
+ non-terminal. This data is used to create a complete graph containing only
179
+ the terminal nodes, in which edge is weighted with the shortest path
180
+ distance between them. The algorithm then proceeds in the same way as Kou
181
+ et al..
182
+
183
+ Parameters
184
+ ----------
185
+ G : NetworkX graph
186
+
187
+ terminal_nodes : list
188
+ A list of terminal nodes for which minimum steiner tree is
189
+ to be found.
190
+
191
+ weight : string (default = 'weight')
192
+ Use the edge attribute specified by this string as the edge weight.
193
+ Any edge attribute not present defaults to 1.
194
+
195
+ method : string, optional (default = 'mehlhorn')
196
+ The algorithm to use to approximate the Steiner tree.
197
+ Supported options: 'kou', 'mehlhorn'.
198
+ Other inputs produce a ValueError.
199
+
200
+ Returns
201
+ -------
202
+ NetworkX graph
203
+ Approximation to the minimum steiner tree of `G` induced by
204
+ `terminal_nodes` .
205
+
206
+ Raises
207
+ ------
208
+ NetworkXNotImplemented
209
+ If `G` is directed.
210
+
211
+ ValueError
212
+ If the specified `method` is not supported.
213
+
214
+ Notes
215
+ -----
216
+ For multigraphs, the edge between two nodes with minimum weight is the
217
+ edge put into the Steiner tree.
218
+
219
+
220
+ References
221
+ ----------
222
+ .. [1] Steiner_tree_problem on Wikipedia.
223
+ https://en.wikipedia.org/wiki/Steiner_tree_problem
224
+ .. [2] Kou, L., G. Markowsky, and L. Berman. 1981.
225
+ ‘A Fast Algorithm for Steiner Trees’.
226
+ Acta Informatica 15 (2): 141–45.
227
+ https://doi.org/10.1007/BF00288961.
228
+ .. [3] Mehlhorn, Kurt. 1988.
229
+ ‘A Faster Approximation Algorithm for the Steiner Problem in Graphs’.
230
+ Information Processing Letters 27 (3): 125–28.
231
+ https://doi.org/10.1016/0020-0190(88)90066-X.
232
+ """
233
+ if method is None:
234
+ method = "mehlhorn"
235
+
236
+ try:
237
+ algo = ALGORITHMS[method]
238
+ except KeyError as e:
239
+ raise ValueError(f"{method} is not a valid choice for an algorithm.") from e
240
+
241
+ edges = algo(G, terminal_nodes, weight)
242
+ # For multigraph we should add the minimal weight edge keys
243
+ if G.is_multigraph():
244
+ edges = (
245
+ (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in edges
246
+ )
247
+ T = G.edge_subgraph(edges)
248
+ return T
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/traveling_salesman.py ADDED
@@ -0,0 +1,1508 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ =================================
3
+ Travelling Salesman Problem (TSP)
4
+ =================================
5
+
6
+ Implementation of approximate algorithms
7
+ for solving and approximating the TSP problem.
8
+
9
+ Categories of algorithms which are implemented:
10
+
11
+ - Christofides (provides a 3/2-approximation of TSP)
12
+ - Greedy
13
+ - Simulated Annealing (SA)
14
+ - Threshold Accepting (TA)
15
+ - Asadpour Asymmetric Traveling Salesman Algorithm
16
+
17
+ The Travelling Salesman Problem tries to find, given the weight
18
+ (distance) between all points where a salesman has to visit, the
19
+ route so that:
20
+
21
+ - The total distance (cost) which the salesman travels is minimized.
22
+ - The salesman returns to the starting point.
23
+ - Note that for a complete graph, the salesman visits each point once.
24
+
25
+ The function `travelling_salesman_problem` allows for incomplete
26
+ graphs by finding all-pairs shortest paths, effectively converting
27
+ the problem to a complete graph problem. It calls one of the
28
+ approximate methods on that problem and then converts the result
29
+ back to the original graph using the previously found shortest paths.
30
+
31
+ TSP is an NP-hard problem in combinatorial optimization,
32
+ important in operations research and theoretical computer science.
33
+
34
+ http://en.wikipedia.org/wiki/Travelling_salesman_problem
35
+ """
36
+
37
+ import math
38
+
39
+ import networkx as nx
40
+ from networkx.algorithms.tree.mst import random_spanning_tree
41
+ from networkx.utils import not_implemented_for, pairwise, py_random_state
42
+
43
+ __all__ = [
44
+ "traveling_salesman_problem",
45
+ "christofides",
46
+ "asadpour_atsp",
47
+ "greedy_tsp",
48
+ "simulated_annealing_tsp",
49
+ "threshold_accepting_tsp",
50
+ ]
51
+
52
+
53
+ def swap_two_nodes(soln, seed):
54
+ """Swap two nodes in `soln` to give a neighbor solution.
55
+
56
+ Parameters
57
+ ----------
58
+ soln : list of nodes
59
+ Current cycle of nodes
60
+
61
+ seed : integer, random_state, or None (default)
62
+ Indicator of random number generation state.
63
+ See :ref:`Randomness<randomness>`.
64
+
65
+ Returns
66
+ -------
67
+ list
68
+ The solution after move is applied. (A neighbor solution.)
69
+
70
+ Notes
71
+ -----
72
+ This function assumes that the incoming list `soln` is a cycle
73
+ (that the first and last element are the same) and also that
74
+ we don't want any move to change the first node in the list
75
+ (and thus not the last node either).
76
+
77
+ The input list is changed as well as returned. Make a copy if needed.
78
+
79
+ See Also
80
+ --------
81
+ move_one_node
82
+ """
83
+ a, b = seed.sample(range(1, len(soln) - 1), k=2)
84
+ soln[a], soln[b] = soln[b], soln[a]
85
+ return soln
86
+
87
+
88
+ def move_one_node(soln, seed):
89
+ """Move one node to another position to give a neighbor solution.
90
+
91
+ The node to move and the position to move to are chosen randomly.
92
+ The first and last nodes are left untouched as soln must be a cycle
93
+ starting at that node.
94
+
95
+ Parameters
96
+ ----------
97
+ soln : list of nodes
98
+ Current cycle of nodes
99
+
100
+ seed : integer, random_state, or None (default)
101
+ Indicator of random number generation state.
102
+ See :ref:`Randomness<randomness>`.
103
+
104
+ Returns
105
+ -------
106
+ list
107
+ The solution after move is applied. (A neighbor solution.)
108
+
109
+ Notes
110
+ -----
111
+ This function assumes that the incoming list `soln` is a cycle
112
+ (that the first and last element are the same) and also that
113
+ we don't want any move to change the first node in the list
114
+ (and thus not the last node either).
115
+
116
+ The input list is changed as well as returned. Make a copy if needed.
117
+
118
+ See Also
119
+ --------
120
+ swap_two_nodes
121
+ """
122
+ a, b = seed.sample(range(1, len(soln) - 1), k=2)
123
+ soln.insert(b, soln.pop(a))
124
+ return soln
125
+
126
+
127
+ @not_implemented_for("directed")
128
+ @nx._dispatchable(edge_attrs="weight")
129
+ def christofides(G, weight="weight", tree=None):
130
+ """Approximate a solution of the traveling salesman problem
131
+
132
+ Compute a 3/2-approximation of the traveling salesman problem
133
+ in a complete undirected graph using Christofides [1]_ algorithm.
134
+
135
+ Parameters
136
+ ----------
137
+ G : Graph
138
+ `G` should be a complete weighted undirected graph.
139
+ The distance between all pairs of nodes should be included.
140
+
141
+ weight : string, optional (default="weight")
142
+ Edge data key corresponding to the edge weight.
143
+ If any edge does not have this attribute the weight is set to 1.
144
+
145
+ tree : NetworkX graph or None (default: None)
146
+ A minimum spanning tree of G. Or, if None, the minimum spanning
147
+ tree is computed using :func:`networkx.minimum_spanning_tree`
148
+
149
+ Returns
150
+ -------
151
+ list
152
+ List of nodes in `G` along a cycle with a 3/2-approximation of
153
+ the minimal Hamiltonian cycle.
154
+
155
+ References
156
+ ----------
157
+ .. [1] Christofides, Nicos. "Worst-case analysis of a new heuristic for
158
+ the travelling salesman problem." No. RR-388. Carnegie-Mellon Univ
159
+ Pittsburgh Pa Management Sciences Research Group, 1976.
160
+ """
161
+ # Remove selfloops if necessary
162
+ loop_nodes = nx.nodes_with_selfloops(G)
163
+ try:
164
+ node = next(loop_nodes)
165
+ except StopIteration:
166
+ pass
167
+ else:
168
+ G = G.copy()
169
+ G.remove_edge(node, node)
170
+ G.remove_edges_from((n, n) for n in loop_nodes)
171
+ # Check that G is a complete graph
172
+ N = len(G) - 1
173
+ # This check ignores selfloops which is what we want here.
174
+ if any(len(nbrdict) != N for n, nbrdict in G.adj.items()):
175
+ raise nx.NetworkXError("G must be a complete graph.")
176
+
177
+ if tree is None:
178
+ tree = nx.minimum_spanning_tree(G, weight=weight)
179
+ L = G.copy()
180
+ L.remove_nodes_from([v for v, degree in tree.degree if not (degree % 2)])
181
+ MG = nx.MultiGraph()
182
+ MG.add_edges_from(tree.edges)
183
+ edges = nx.min_weight_matching(L, weight=weight)
184
+ MG.add_edges_from(edges)
185
+ return _shortcutting(nx.eulerian_circuit(MG))
186
+
187
+
188
+ def _shortcutting(circuit):
189
+ """Remove duplicate nodes in the path"""
190
+ nodes = []
191
+ for u, v in circuit:
192
+ if v in nodes:
193
+ continue
194
+ if not nodes:
195
+ nodes.append(u)
196
+ nodes.append(v)
197
+ nodes.append(nodes[0])
198
+ return nodes
199
+
200
+
201
+ @nx._dispatchable(edge_attrs="weight")
202
+ def traveling_salesman_problem(
203
+ G, weight="weight", nodes=None, cycle=True, method=None, **kwargs
204
+ ):
205
+ """Find the shortest path in `G` connecting specified nodes
206
+
207
+ This function allows approximate solution to the traveling salesman
208
+ problem on networks that are not complete graphs and/or where the
209
+ salesman does not need to visit all nodes.
210
+
211
+ This function proceeds in two steps. First, it creates a complete
212
+ graph using the all-pairs shortest_paths between nodes in `nodes`.
213
+ Edge weights in the new graph are the lengths of the paths
214
+ between each pair of nodes in the original graph.
215
+ Second, an algorithm (default: `christofides` for undirected and
216
+ `asadpour_atsp` for directed) is used to approximate the minimal Hamiltonian
217
+ cycle on this new graph. The available algorithms are:
218
+
219
+ - christofides
220
+ - greedy_tsp
221
+ - simulated_annealing_tsp
222
+ - threshold_accepting_tsp
223
+ - asadpour_atsp
224
+
225
+ Once the Hamiltonian Cycle is found, this function post-processes to
226
+ accommodate the structure of the original graph. If `cycle` is ``False``,
227
+ the biggest weight edge is removed to make a Hamiltonian path.
228
+ Then each edge on the new complete graph used for that analysis is
229
+ replaced by the shortest_path between those nodes on the original graph.
230
+ If the input graph `G` includes edges with weights that do not adhere to
231
+ the triangle inequality, such as when `G` is not a complete graph (i.e
232
+ length of non-existent edges is infinity), then the returned path may
233
+ contain some repeating nodes (other than the starting node).
234
+
235
+ Parameters
236
+ ----------
237
+ G : NetworkX graph
238
+ A possibly weighted graph
239
+
240
+ nodes : collection of nodes (default=G.nodes)
241
+ collection (list, set, etc.) of nodes to visit
242
+
243
+ weight : string, optional (default="weight")
244
+ Edge data key corresponding to the edge weight.
245
+ If any edge does not have this attribute the weight is set to 1.
246
+
247
+ cycle : bool (default: True)
248
+ Indicates whether a cycle should be returned, or a path.
249
+ Note: the cycle is the approximate minimal cycle.
250
+ The path simply removes the biggest edge in that cycle.
251
+
252
+ method : function (default: None)
253
+ A function that returns a cycle on all nodes and approximates
254
+ the solution to the traveling salesman problem on a complete
255
+ graph. The returned cycle is then used to find a corresponding
256
+ solution on `G`. `method` should be callable; take inputs
257
+ `G`, and `weight`; and return a list of nodes along the cycle.
258
+
259
+ Provided options include :func:`christofides`, :func:`greedy_tsp`,
260
+ :func:`simulated_annealing_tsp` and :func:`threshold_accepting_tsp`.
261
+
262
+ If `method is None`: use :func:`christofides` for undirected `G` and
263
+ :func:`asadpour_atsp` for directed `G`.
264
+
265
+ **kwargs : dict
266
+ Other keyword arguments to be passed to the `method` function passed in.
267
+
268
+ Returns
269
+ -------
270
+ list
271
+ List of nodes in `G` along a path with an approximation of the minimal
272
+ path through `nodes`.
273
+
274
+ Raises
275
+ ------
276
+ NetworkXError
277
+ If `G` is a directed graph it has to be strongly connected or the
278
+ complete version cannot be generated.
279
+
280
+ Examples
281
+ --------
282
+ >>> tsp = nx.approximation.traveling_salesman_problem
283
+ >>> G = nx.cycle_graph(9)
284
+ >>> G[4][5]["weight"] = 5 # all other weights are 1
285
+ >>> tsp(G, nodes=[3, 6])
286
+ [3, 2, 1, 0, 8, 7, 6, 7, 8, 0, 1, 2, 3]
287
+ >>> path = tsp(G, cycle=False)
288
+ >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
289
+ True
290
+
291
+ While no longer required, you can still build (curry) your own function
292
+ to provide parameter values to the methods.
293
+
294
+ >>> SA_tsp = nx.approximation.simulated_annealing_tsp
295
+ >>> method = lambda G, weight: SA_tsp(G, "greedy", weight=weight, temp=500)
296
+ >>> path = tsp(G, cycle=False, method=method)
297
+ >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
298
+ True
299
+
300
+ Otherwise, pass other keyword arguments directly into the tsp function.
301
+
302
+ >>> path = tsp(
303
+ ... G,
304
+ ... cycle=False,
305
+ ... method=nx.approximation.simulated_annealing_tsp,
306
+ ... init_cycle="greedy",
307
+ ... temp=500,
308
+ ... )
309
+ >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4])
310
+ True
311
+ """
312
+ if method is None:
313
+ if G.is_directed():
314
+ method = asadpour_atsp
315
+ else:
316
+ method = christofides
317
+ if nodes is None:
318
+ nodes = list(G.nodes)
319
+
320
+ dist = {}
321
+ path = {}
322
+ for n, (d, p) in nx.all_pairs_dijkstra(G, weight=weight):
323
+ dist[n] = d
324
+ path[n] = p
325
+
326
+ if G.is_directed():
327
+ # If the graph is not strongly connected, raise an exception
328
+ if not nx.is_strongly_connected(G):
329
+ raise nx.NetworkXError("G is not strongly connected")
330
+ GG = nx.DiGraph()
331
+ else:
332
+ GG = nx.Graph()
333
+ for u in nodes:
334
+ for v in nodes:
335
+ if u == v:
336
+ continue
337
+ # Ensure that the weight attribute on GG has the
338
+ # same name as the input graph
339
+ GG.add_edge(u, v, **{weight: dist[u][v]})
340
+
341
+ best_GG = method(GG, weight=weight, **kwargs)
342
+
343
+ if not cycle:
344
+ # find and remove the biggest edge
345
+ (u, v) = max(pairwise(best_GG), key=lambda x: dist[x[0]][x[1]])
346
+ pos = best_GG.index(u) + 1
347
+ while best_GG[pos] != v:
348
+ pos = best_GG[pos:].index(u) + 1
349
+ best_GG = best_GG[pos:-1] + best_GG[:pos]
350
+
351
+ best_path = []
352
+ for u, v in pairwise(best_GG):
353
+ best_path.extend(path[u][v][:-1])
354
+ best_path.append(v)
355
+ return best_path
356
+
357
+
358
+ @not_implemented_for("undirected")
359
+ @py_random_state(2)
360
+ @nx._dispatchable(edge_attrs="weight", mutates_input=True)
361
+ def asadpour_atsp(G, weight="weight", seed=None, source=None):
362
+ """
363
+ Returns an approximate solution to the traveling salesman problem.
364
+
365
+ This approximate solution is one of the best known approximations for the
366
+ asymmetric traveling salesman problem developed by Asadpour et al,
367
+ [1]_. The algorithm first solves the Held-Karp relaxation to find a lower
368
+ bound for the weight of the cycle. Next, it constructs an exponential
369
+ distribution of undirected spanning trees where the probability of an
370
+ edge being in the tree corresponds to the weight of that edge using a
371
+ maximum entropy rounding scheme. Next we sample that distribution
372
+ $2 \\lceil \\ln n \\rceil$ times and save the minimum sampled tree once the
373
+ direction of the arcs is added back to the edges. Finally, we augment
374
+ then short circuit that graph to find the approximate tour for the
375
+ salesman.
376
+
377
+ Parameters
378
+ ----------
379
+ G : nx.DiGraph
380
+ The graph should be a complete weighted directed graph. The
381
+ distance between all paris of nodes should be included and the triangle
382
+ inequality should hold. That is, the direct edge between any two nodes
383
+ should be the path of least cost.
384
+
385
+ weight : string, optional (default="weight")
386
+ Edge data key corresponding to the edge weight.
387
+ If any edge does not have this attribute the weight is set to 1.
388
+
389
+ seed : integer, random_state, or None (default)
390
+ Indicator of random number generation state.
391
+ See :ref:`Randomness<randomness>`.
392
+
393
+ source : node label (default=`None`)
394
+ If given, return the cycle starting and ending at the given node.
395
+
396
+ Returns
397
+ -------
398
+ cycle : list of nodes
399
+ Returns the cycle (list of nodes) that a salesman can follow to minimize
400
+ the total weight of the trip.
401
+
402
+ Raises
403
+ ------
404
+ NetworkXError
405
+ If `G` is not complete or has less than two nodes, the algorithm raises
406
+ an exception.
407
+
408
+ NetworkXError
409
+ If `source` is not `None` and is not a node in `G`, the algorithm raises
410
+ an exception.
411
+
412
+ NetworkXNotImplemented
413
+ If `G` is an undirected graph.
414
+
415
+ References
416
+ ----------
417
+ .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi,
418
+ An o(log n/log log n)-approximation algorithm for the asymmetric
419
+ traveling salesman problem, Operations research, 65 (2017),
420
+ pp. 1043–1061
421
+
422
+ Examples
423
+ --------
424
+ >>> import networkx as nx
425
+ >>> import networkx.algorithms.approximation as approx
426
+ >>> G = nx.complete_graph(3, create_using=nx.DiGraph)
427
+ >>> nx.set_edge_attributes(
428
+ ... G,
429
+ ... {(0, 1): 2, (1, 2): 2, (2, 0): 2, (0, 2): 1, (2, 1): 1, (1, 0): 1},
430
+ ... "weight",
431
+ ... )
432
+ >>> tour = approx.asadpour_atsp(G, source=0)
433
+ >>> tour
434
+ [0, 2, 1, 0]
435
+ """
436
+ from math import ceil, exp
437
+ from math import log as ln
438
+
439
+ # Check that G is a complete graph
440
+ N = len(G) - 1
441
+ if N < 1:
442
+ raise nx.NetworkXError("G must have at least two nodes")
443
+ # This check ignores selfloops which is what we want here.
444
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
445
+ raise nx.NetworkXError("G is not a complete DiGraph")
446
+ # Check that the source vertex, if given, is in the graph
447
+ if source is not None and source not in G.nodes:
448
+ raise nx.NetworkXError("Given source node not in G.")
449
+ # handle 2 node case
450
+ if N == 1:
451
+ if source is None:
452
+ return list(G)
453
+ return [source, next(n for n in G if n != source)]
454
+
455
+ opt_hk, z_star = held_karp_ascent(G, weight)
456
+
457
+ # Test to see if the ascent method found an integer solution or a fractional
458
+ # solution. If it is integral then z_star is a nx.Graph, otherwise it is
459
+ # a dict
460
+ if not isinstance(z_star, dict):
461
+ # Here we are using the shortcutting method to go from the list of edges
462
+ # returned from eulerian_circuit to a list of nodes
463
+ return _shortcutting(nx.eulerian_circuit(z_star, source=source))
464
+
465
+ # Create the undirected support of z_star
466
+ z_support = nx.MultiGraph()
467
+ for u, v in z_star:
468
+ if (u, v) not in z_support.edges:
469
+ edge_weight = min(G[u][v][weight], G[v][u][weight])
470
+ z_support.add_edge(u, v, **{weight: edge_weight})
471
+
472
+ # Create the exponential distribution of spanning trees
473
+ gamma = spanning_tree_distribution(z_support, z_star)
474
+
475
+ # Write the lambda values to the edges of z_support
476
+ z_support = nx.Graph(z_support)
477
+ lambda_dict = {(u, v): exp(gamma[(u, v)]) for u, v in z_support.edges()}
478
+ nx.set_edge_attributes(z_support, lambda_dict, "weight")
479
+ del gamma, lambda_dict
480
+
481
+ # Sample 2 * ceil( ln(n) ) spanning trees and record the minimum one
482
+ minimum_sampled_tree = None
483
+ minimum_sampled_tree_weight = math.inf
484
+ for _ in range(2 * ceil(ln(G.number_of_nodes()))):
485
+ sampled_tree = random_spanning_tree(z_support, "weight", seed=seed)
486
+ sampled_tree_weight = sampled_tree.size(weight)
487
+ if sampled_tree_weight < minimum_sampled_tree_weight:
488
+ minimum_sampled_tree = sampled_tree.copy()
489
+ minimum_sampled_tree_weight = sampled_tree_weight
490
+
491
+ # Orient the edges in that tree to keep the cost of the tree the same.
492
+ t_star = nx.MultiDiGraph()
493
+ for u, v, d in minimum_sampled_tree.edges(data=weight):
494
+ if d == G[u][v][weight]:
495
+ t_star.add_edge(u, v, **{weight: d})
496
+ else:
497
+ t_star.add_edge(v, u, **{weight: d})
498
+
499
+ # Find the node demands needed to neutralize the flow of t_star in G
500
+ node_demands = {n: t_star.out_degree(n) - t_star.in_degree(n) for n in t_star}
501
+ nx.set_node_attributes(G, node_demands, "demand")
502
+
503
+ # Find the min_cost_flow
504
+ flow_dict = nx.min_cost_flow(G, "demand")
505
+
506
+ # Build the flow into t_star
507
+ for source, values in flow_dict.items():
508
+ for target in values:
509
+ if (source, target) not in t_star.edges and values[target] > 0:
510
+ # IF values[target] > 0 we have to add that many edges
511
+ for _ in range(values[target]):
512
+ t_star.add_edge(source, target)
513
+
514
+ # Return the shortcut eulerian circuit
515
+ circuit = nx.eulerian_circuit(t_star, source=source)
516
+ return _shortcutting(circuit)
517
+
518
+
519
+ @nx._dispatchable(edge_attrs="weight", mutates_input=True, returns_graph=True)
520
+ def held_karp_ascent(G, weight="weight"):
521
+ """
522
+ Minimizes the Held-Karp relaxation of the TSP for `G`
523
+
524
+ Solves the Held-Karp relaxation of the input complete digraph and scales
525
+ the output solution for use in the Asadpour [1]_ ASTP algorithm.
526
+
527
+ The Held-Karp relaxation defines the lower bound for solutions to the
528
+ ATSP, although it does return a fractional solution. This is used in the
529
+ Asadpour algorithm as an initial solution which is later rounded to a
530
+ integral tree within the spanning tree polytopes. This function solves
531
+ the relaxation with the branch and bound method in [2]_.
532
+
533
+ Parameters
534
+ ----------
535
+ G : nx.DiGraph
536
+ The graph should be a complete weighted directed graph.
537
+ The distance between all paris of nodes should be included.
538
+
539
+ weight : string, optional (default="weight")
540
+ Edge data key corresponding to the edge weight.
541
+ If any edge does not have this attribute the weight is set to 1.
542
+
543
+ Returns
544
+ -------
545
+ OPT : float
546
+ The cost for the optimal solution to the Held-Karp relaxation
547
+ z : dict or nx.Graph
548
+ A symmetrized and scaled version of the optimal solution to the
549
+ Held-Karp relaxation for use in the Asadpour algorithm.
550
+
551
+ If an integral solution is found, then that is an optimal solution for
552
+ the ATSP problem and that is returned instead.
553
+
554
+ References
555
+ ----------
556
+ .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi,
557
+ An o(log n/log log n)-approximation algorithm for the asymmetric
558
+ traveling salesman problem, Operations research, 65 (2017),
559
+ pp. 1043–1061
560
+
561
+ .. [2] M. Held, R. M. Karp, The traveling-salesman problem and minimum
562
+ spanning trees, Operations Research, 1970-11-01, Vol. 18 (6),
563
+ pp.1138-1162
564
+ """
565
+ import numpy as np
566
+ import scipy as sp
567
+
568
+ def k_pi():
569
+ """
570
+ Find the set of minimum 1-Arborescences for G at point pi.
571
+
572
+ Returns
573
+ -------
574
+ Set
575
+ The set of minimum 1-Arborescences
576
+ """
577
+ # Create a copy of G without vertex 1.
578
+ G_1 = G.copy()
579
+ minimum_1_arborescences = set()
580
+ minimum_1_arborescence_weight = math.inf
581
+
582
+ # node is node '1' in the Held and Karp paper
583
+ n = next(G.__iter__())
584
+ G_1.remove_node(n)
585
+
586
+ # Iterate over the spanning arborescences of the graph until we know
587
+ # that we have found the minimum 1-arborescences. My proposed strategy
588
+ # is to find the most extensive root to connect to from 'node 1' and
589
+ # the least expensive one. We then iterate over arborescences until
590
+ # the cost of the basic arborescence is the cost of the minimum one
591
+ # plus the difference between the most and least expensive roots,
592
+ # that way the cost of connecting 'node 1' will by definition not by
593
+ # minimum
594
+ min_root = {"node": None, weight: math.inf}
595
+ max_root = {"node": None, weight: -math.inf}
596
+ for u, v, d in G.edges(n, data=True):
597
+ if d[weight] < min_root[weight]:
598
+ min_root = {"node": v, weight: d[weight]}
599
+ if d[weight] > max_root[weight]:
600
+ max_root = {"node": v, weight: d[weight]}
601
+
602
+ min_in_edge = min(G.in_edges(n, data=True), key=lambda x: x[2][weight])
603
+ min_root[weight] = min_root[weight] + min_in_edge[2][weight]
604
+ max_root[weight] = max_root[weight] + min_in_edge[2][weight]
605
+
606
+ min_arb_weight = math.inf
607
+ for arb in nx.ArborescenceIterator(G_1):
608
+ arb_weight = arb.size(weight)
609
+ if min_arb_weight == math.inf:
610
+ min_arb_weight = arb_weight
611
+ elif arb_weight > min_arb_weight + max_root[weight] - min_root[weight]:
612
+ break
613
+ # We have to pick the root node of the arborescence for the out
614
+ # edge of the first vertex as that is the only node without an
615
+ # edge directed into it.
616
+ for N, deg in arb.in_degree:
617
+ if deg == 0:
618
+ # root found
619
+ arb.add_edge(n, N, **{weight: G[n][N][weight]})
620
+ arb_weight += G[n][N][weight]
621
+ break
622
+
623
+ # We can pick the minimum weight in-edge for the vertex with
624
+ # a cycle. If there are multiple edges with the same, minimum
625
+ # weight, We need to add all of them.
626
+ #
627
+ # Delete the edge (N, v) so that we cannot pick it.
628
+ edge_data = G[N][n]
629
+ G.remove_edge(N, n)
630
+ min_weight = min(G.in_edges(n, data=weight), key=lambda x: x[2])[2]
631
+ min_edges = [
632
+ (u, v, d) for u, v, d in G.in_edges(n, data=weight) if d == min_weight
633
+ ]
634
+ for u, v, d in min_edges:
635
+ new_arb = arb.copy()
636
+ new_arb.add_edge(u, v, **{weight: d})
637
+ new_arb_weight = arb_weight + d
638
+ # Check to see the weight of the arborescence, if it is a
639
+ # new minimum, clear all of the old potential minimum
640
+ # 1-arborescences and add this is the only one. If its
641
+ # weight is above the known minimum, do not add it.
642
+ if new_arb_weight < minimum_1_arborescence_weight:
643
+ minimum_1_arborescences.clear()
644
+ minimum_1_arborescence_weight = new_arb_weight
645
+ # We have a 1-arborescence, add it to the set
646
+ if new_arb_weight == minimum_1_arborescence_weight:
647
+ minimum_1_arborescences.add(new_arb)
648
+ G.add_edge(N, n, **edge_data)
649
+
650
+ return minimum_1_arborescences
651
+
652
+ def direction_of_ascent():
653
+ """
654
+ Find the direction of ascent at point pi.
655
+
656
+ See [1]_ for more information.
657
+
658
+ Returns
659
+ -------
660
+ dict
661
+ A mapping from the nodes of the graph which represents the direction
662
+ of ascent.
663
+
664
+ References
665
+ ----------
666
+ .. [1] M. Held, R. M. Karp, The traveling-salesman problem and minimum
667
+ spanning trees, Operations Research, 1970-11-01, Vol. 18 (6),
668
+ pp.1138-1162
669
+ """
670
+ # 1. Set d equal to the zero n-vector.
671
+ d = {}
672
+ for n in G:
673
+ d[n] = 0
674
+ del n
675
+ # 2. Find a 1-Arborescence T^k such that k is in K(pi, d).
676
+ minimum_1_arborescences = k_pi()
677
+ while True:
678
+ # Reduce K(pi) to K(pi, d)
679
+ # Find the arborescence in K(pi) which increases the lest in
680
+ # direction d
681
+ min_k_d_weight = math.inf
682
+ min_k_d = None
683
+ for arborescence in minimum_1_arborescences:
684
+ weighted_cost = 0
685
+ for n, deg in arborescence.degree:
686
+ weighted_cost += d[n] * (deg - 2)
687
+ if weighted_cost < min_k_d_weight:
688
+ min_k_d_weight = weighted_cost
689
+ min_k_d = arborescence
690
+
691
+ # 3. If sum of d_i * v_{i, k} is greater than zero, terminate
692
+ if min_k_d_weight > 0:
693
+ return d, min_k_d
694
+ # 4. d_i = d_i + v_{i, k}
695
+ for n, deg in min_k_d.degree:
696
+ d[n] += deg - 2
697
+ # Check that we do not need to terminate because the direction
698
+ # of ascent does not exist. This is done with linear
699
+ # programming.
700
+ c = np.full(len(minimum_1_arborescences), -1, dtype=int)
701
+ a_eq = np.empty((len(G) + 1, len(minimum_1_arborescences)), dtype=int)
702
+ b_eq = np.zeros(len(G) + 1, dtype=int)
703
+ b_eq[len(G)] = 1
704
+ for arb_count, arborescence in enumerate(minimum_1_arborescences):
705
+ n_count = len(G) - 1
706
+ for n, deg in arborescence.degree:
707
+ a_eq[n_count][arb_count] = deg - 2
708
+ n_count -= 1
709
+ a_eq[len(G)][arb_count] = 1
710
+ program_result = sp.optimize.linprog(
711
+ c, A_eq=a_eq, b_eq=b_eq, method="highs-ipm"
712
+ )
713
+ # If the constants exist, then the direction of ascent doesn't
714
+ if program_result.success:
715
+ # There is no direction of ascent
716
+ return None, minimum_1_arborescences
717
+
718
+ # 5. GO TO 2
719
+
720
+ def find_epsilon(k, d):
721
+ """
722
+ Given the direction of ascent at pi, find the maximum distance we can go
723
+ in that direction.
724
+
725
+ Parameters
726
+ ----------
727
+ k_xy : set
728
+ The set of 1-arborescences which have the minimum rate of increase
729
+ in the direction of ascent
730
+
731
+ d : dict
732
+ The direction of ascent
733
+
734
+ Returns
735
+ -------
736
+ float
737
+ The distance we can travel in direction `d`
738
+ """
739
+ min_epsilon = math.inf
740
+ for e_u, e_v, e_w in G.edges(data=weight):
741
+ if (e_u, e_v) in k.edges:
742
+ continue
743
+ # Now, I have found a condition which MUST be true for the edges to
744
+ # be a valid substitute. The edge in the graph which is the
745
+ # substitute is the one with the same terminated end. This can be
746
+ # checked rather simply.
747
+ #
748
+ # Find the edge within k which is the substitute. Because k is a
749
+ # 1-arborescence, we know that they is only one such edges
750
+ # leading into every vertex.
751
+ if len(k.in_edges(e_v, data=weight)) > 1:
752
+ raise Exception
753
+ sub_u, sub_v, sub_w = next(k.in_edges(e_v, data=weight).__iter__())
754
+ k.add_edge(e_u, e_v, **{weight: e_w})
755
+ k.remove_edge(sub_u, sub_v)
756
+ if (
757
+ max(d for n, d in k.in_degree()) <= 1
758
+ and len(G) == k.number_of_edges()
759
+ and nx.is_weakly_connected(k)
760
+ ):
761
+ # Ascent method calculation
762
+ if d[sub_u] == d[e_u] or sub_w == e_w:
763
+ # Revert to the original graph
764
+ k.remove_edge(e_u, e_v)
765
+ k.add_edge(sub_u, sub_v, **{weight: sub_w})
766
+ continue
767
+ epsilon = (sub_w - e_w) / (d[e_u] - d[sub_u])
768
+ if 0 < epsilon < min_epsilon:
769
+ min_epsilon = epsilon
770
+ # Revert to the original graph
771
+ k.remove_edge(e_u, e_v)
772
+ k.add_edge(sub_u, sub_v, **{weight: sub_w})
773
+
774
+ return min_epsilon
775
+
776
+ # I have to know that the elements in pi correspond to the correct elements
777
+ # in the direction of ascent, even if the node labels are not integers.
778
+ # Thus, I will use dictionaries to made that mapping.
779
+ pi_dict = {}
780
+ for n in G:
781
+ pi_dict[n] = 0
782
+ del n
783
+ original_edge_weights = {}
784
+ for u, v, d in G.edges(data=True):
785
+ original_edge_weights[(u, v)] = d[weight]
786
+ dir_ascent, k_d = direction_of_ascent()
787
+ while dir_ascent is not None:
788
+ max_distance = find_epsilon(k_d, dir_ascent)
789
+ for n, v in dir_ascent.items():
790
+ pi_dict[n] += max_distance * v
791
+ for u, v, d in G.edges(data=True):
792
+ d[weight] = original_edge_weights[(u, v)] + pi_dict[u]
793
+ dir_ascent, k_d = direction_of_ascent()
794
+ nx._clear_cache(G)
795
+ # k_d is no longer an individual 1-arborescence but rather a set of
796
+ # minimal 1-arborescences at the maximum point of the polytope and should
797
+ # be reflected as such
798
+ k_max = k_d
799
+
800
+ # Search for a cycle within k_max. If a cycle exists, return it as the
801
+ # solution
802
+ for k in k_max:
803
+ if len([n for n in k if k.degree(n) == 2]) == G.order():
804
+ # Tour found
805
+ # TODO: this branch does not restore original_edge_weights of G!
806
+ return k.size(weight), k
807
+
808
+ # Write the original edge weights back to G and every member of k_max at
809
+ # the maximum point. Also average the number of times that edge appears in
810
+ # the set of minimal 1-arborescences.
811
+ x_star = {}
812
+ size_k_max = len(k_max)
813
+ for u, v, d in G.edges(data=True):
814
+ edge_count = 0
815
+ d[weight] = original_edge_weights[(u, v)]
816
+ for k in k_max:
817
+ if (u, v) in k.edges():
818
+ edge_count += 1
819
+ k[u][v][weight] = original_edge_weights[(u, v)]
820
+ x_star[(u, v)] = edge_count / size_k_max
821
+ # Now symmetrize the edges in x_star and scale them according to (5) in
822
+ # reference [1]
823
+ z_star = {}
824
+ scale_factor = (G.order() - 1) / G.order()
825
+ for u, v in x_star:
826
+ frequency = x_star[(u, v)] + x_star[(v, u)]
827
+ if frequency > 0:
828
+ z_star[(u, v)] = scale_factor * frequency
829
+ del x_star
830
+ # Return the optimal weight and the z dict
831
+ return next(k_max.__iter__()).size(weight), z_star
832
+
833
+
834
+ @nx._dispatchable
835
+ def spanning_tree_distribution(G, z):
836
+ """
837
+ Find the asadpour exponential distribution of spanning trees.
838
+
839
+ Solves the Maximum Entropy Convex Program in the Asadpour algorithm [1]_
840
+ using the approach in section 7 to build an exponential distribution of
841
+ undirected spanning trees.
842
+
843
+ This algorithm ensures that the probability of any edge in a spanning
844
+ tree is proportional to the sum of the probabilities of the tress
845
+ containing that edge over the sum of the probabilities of all spanning
846
+ trees of the graph.
847
+
848
+ Parameters
849
+ ----------
850
+ G : nx.MultiGraph
851
+ The undirected support graph for the Held Karp relaxation
852
+
853
+ z : dict
854
+ The output of `held_karp_ascent()`, a scaled version of the Held-Karp
855
+ solution.
856
+
857
+ Returns
858
+ -------
859
+ gamma : dict
860
+ The probability distribution which approximately preserves the marginal
861
+ probabilities of `z`.
862
+ """
863
+ from math import exp
864
+ from math import log as ln
865
+
866
+ def q(e):
867
+ """
868
+ The value of q(e) is described in the Asadpour paper is "the
869
+ probability that edge e will be included in a spanning tree T that is
870
+ chosen with probability proportional to exp(gamma(T))" which
871
+ basically means that it is the total probability of the edge appearing
872
+ across the whole distribution.
873
+
874
+ Parameters
875
+ ----------
876
+ e : tuple
877
+ The `(u, v)` tuple describing the edge we are interested in
878
+
879
+ Returns
880
+ -------
881
+ float
882
+ The probability that a spanning tree chosen according to the
883
+ current values of gamma will include edge `e`.
884
+ """
885
+ # Create the laplacian matrices
886
+ for u, v, d in G.edges(data=True):
887
+ d[lambda_key] = exp(gamma[(u, v)])
888
+ G_Kirchhoff = nx.number_of_spanning_trees(G, weight=lambda_key)
889
+ G_e = nx.contracted_edge(G, e, self_loops=False)
890
+ G_e_Kirchhoff = nx.number_of_spanning_trees(G_e, weight=lambda_key)
891
+
892
+ # Multiply by the weight of the contracted edge since it is not included
893
+ # in the total weight of the contracted graph.
894
+ return exp(gamma[(e[0], e[1])]) * G_e_Kirchhoff / G_Kirchhoff
895
+
896
+ # initialize gamma to the zero dict
897
+ gamma = {}
898
+ for u, v, _ in G.edges:
899
+ gamma[(u, v)] = 0
900
+
901
+ # set epsilon
902
+ EPSILON = 0.2
903
+
904
+ # pick an edge attribute name that is unlikely to be in the graph
905
+ lambda_key = "spanning_tree_distribution's secret attribute name for lambda"
906
+
907
+ while True:
908
+ # We need to know that know that no values of q_e are greater than
909
+ # (1 + epsilon) * z_e, however changing one gamma value can increase the
910
+ # value of a different q_e, so we have to complete the for loop without
911
+ # changing anything for the condition to be meet
912
+ in_range_count = 0
913
+ # Search for an edge with q_e > (1 + epsilon) * z_e
914
+ for u, v in gamma:
915
+ e = (u, v)
916
+ q_e = q(e)
917
+ z_e = z[e]
918
+ if q_e > (1 + EPSILON) * z_e:
919
+ delta = ln(
920
+ (q_e * (1 - (1 + EPSILON / 2) * z_e))
921
+ / ((1 - q_e) * (1 + EPSILON / 2) * z_e)
922
+ )
923
+ gamma[e] -= delta
924
+ # Check that delta had the desired effect
925
+ new_q_e = q(e)
926
+ desired_q_e = (1 + EPSILON / 2) * z_e
927
+ if round(new_q_e, 8) != round(desired_q_e, 8):
928
+ raise nx.NetworkXError(
929
+ f"Unable to modify probability for edge ({u}, {v})"
930
+ )
931
+ else:
932
+ in_range_count += 1
933
+ # Check if the for loop terminated without changing any gamma
934
+ if in_range_count == len(gamma):
935
+ break
936
+
937
+ # Remove the new edge attributes
938
+ for _, _, d in G.edges(data=True):
939
+ if lambda_key in d:
940
+ del d[lambda_key]
941
+
942
+ return gamma
943
+
944
+
945
+ @nx._dispatchable(edge_attrs="weight")
946
+ def greedy_tsp(G, weight="weight", source=None):
947
+ """Return a low cost cycle starting at `source` and its cost.
948
+
949
+ This approximates a solution to the traveling salesman problem.
950
+ It finds a cycle of all the nodes that a salesman can visit in order
951
+ to visit many nodes while minimizing total distance.
952
+ It uses a simple greedy algorithm.
953
+ In essence, this function returns a large cycle given a source point
954
+ for which the total cost of the cycle is minimized.
955
+
956
+ Parameters
957
+ ----------
958
+ G : Graph
959
+ The Graph should be a complete weighted undirected graph.
960
+ The distance between all pairs of nodes should be included.
961
+
962
+ weight : string, optional (default="weight")
963
+ Edge data key corresponding to the edge weight.
964
+ If any edge does not have this attribute the weight is set to 1.
965
+
966
+ source : node, optional (default: first node in list(G))
967
+ Starting node. If None, defaults to ``next(iter(G))``
968
+
969
+ Returns
970
+ -------
971
+ cycle : list of nodes
972
+ Returns the cycle (list of nodes) that a salesman
973
+ can follow to minimize total weight of the trip.
974
+
975
+ Raises
976
+ ------
977
+ NetworkXError
978
+ If `G` is not complete, the algorithm raises an exception.
979
+
980
+ Examples
981
+ --------
982
+ >>> from networkx.algorithms import approximation as approx
983
+ >>> G = nx.DiGraph()
984
+ >>> G.add_weighted_edges_from(
985
+ ... {
986
+ ... ("A", "B", 3),
987
+ ... ("A", "C", 17),
988
+ ... ("A", "D", 14),
989
+ ... ("B", "A", 3),
990
+ ... ("B", "C", 12),
991
+ ... ("B", "D", 16),
992
+ ... ("C", "A", 13),
993
+ ... ("C", "B", 12),
994
+ ... ("C", "D", 4),
995
+ ... ("D", "A", 14),
996
+ ... ("D", "B", 15),
997
+ ... ("D", "C", 2),
998
+ ... }
999
+ ... )
1000
+ >>> cycle = approx.greedy_tsp(G, source="D")
1001
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1002
+ >>> cycle
1003
+ ['D', 'C', 'B', 'A', 'D']
1004
+ >>> cost
1005
+ 31
1006
+
1007
+ Notes
1008
+ -----
1009
+ This implementation of a greedy algorithm is based on the following:
1010
+
1011
+ - The algorithm adds a node to the solution at every iteration.
1012
+ - The algorithm selects a node not already in the cycle whose connection
1013
+ to the previous node adds the least cost to the cycle.
1014
+
1015
+ A greedy algorithm does not always give the best solution.
1016
+ However, it can construct a first feasible solution which can
1017
+ be passed as a parameter to an iterative improvement algorithm such
1018
+ as Simulated Annealing, or Threshold Accepting.
1019
+
1020
+ Time complexity: It has a running time $O(|V|^2)$
1021
+ """
1022
+ # Check that G is a complete graph
1023
+ N = len(G) - 1
1024
+ # This check ignores selfloops which is what we want here.
1025
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
1026
+ raise nx.NetworkXError("G must be a complete graph.")
1027
+
1028
+ if source is None:
1029
+ source = nx.utils.arbitrary_element(G)
1030
+
1031
+ if G.number_of_nodes() == 2:
1032
+ neighbor = next(G.neighbors(source))
1033
+ return [source, neighbor, source]
1034
+
1035
+ nodeset = set(G)
1036
+ nodeset.remove(source)
1037
+ cycle = [source]
1038
+ next_node = source
1039
+ while nodeset:
1040
+ nbrdict = G[next_node]
1041
+ next_node = min(nodeset, key=lambda n: nbrdict[n].get(weight, 1))
1042
+ cycle.append(next_node)
1043
+ nodeset.remove(next_node)
1044
+ cycle.append(cycle[0])
1045
+ return cycle
1046
+
1047
+
1048
+ @py_random_state(9)
1049
+ @nx._dispatchable(edge_attrs="weight")
1050
+ def simulated_annealing_tsp(
1051
+ G,
1052
+ init_cycle,
1053
+ weight="weight",
1054
+ source=None,
1055
+ temp=100,
1056
+ move="1-1",
1057
+ max_iterations=10,
1058
+ N_inner=100,
1059
+ alpha=0.01,
1060
+ seed=None,
1061
+ ):
1062
+ """Returns an approximate solution to the traveling salesman problem.
1063
+
1064
+ This function uses simulated annealing to approximate the minimal cost
1065
+ cycle through the nodes. Starting from a suboptimal solution, simulated
1066
+ annealing perturbs that solution, occasionally accepting changes that make
1067
+ the solution worse to escape from a locally optimal solution. The chance
1068
+ of accepting such changes decreases over the iterations to encourage
1069
+ an optimal result. In summary, the function returns a cycle starting
1070
+ at `source` for which the total cost is minimized. It also returns the cost.
1071
+
1072
+ The chance of accepting a proposed change is related to a parameter called
1073
+ the temperature (annealing has a physical analogue of steel hardening
1074
+ as it cools). As the temperature is reduced, the chance of moves that
1075
+ increase cost goes down.
1076
+
1077
+ Parameters
1078
+ ----------
1079
+ G : Graph
1080
+ `G` should be a complete weighted graph.
1081
+ The distance between all pairs of nodes should be included.
1082
+
1083
+ init_cycle : list of all nodes or "greedy"
1084
+ The initial solution (a cycle through all nodes returning to the start).
1085
+ This argument has no default to make you think about it.
1086
+ If "greedy", use `greedy_tsp(G, weight)`.
1087
+ Other common starting cycles are `list(G) + [next(iter(G))]` or the final
1088
+ result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`.
1089
+
1090
+ weight : string, optional (default="weight")
1091
+ Edge data key corresponding to the edge weight.
1092
+ If any edge does not have this attribute the weight is set to 1.
1093
+
1094
+ source : node, optional (default: first node in list(G))
1095
+ Starting node. If None, defaults to ``next(iter(G))``
1096
+
1097
+ temp : int, optional (default=100)
1098
+ The algorithm's temperature parameter. It represents the initial
1099
+ value of temperature
1100
+
1101
+ move : "1-1" or "1-0" or function, optional (default="1-1")
1102
+ Indicator of what move to use when finding new trial solutions.
1103
+ Strings indicate two special built-in moves:
1104
+
1105
+ - "1-1": 1-1 exchange which transposes the position
1106
+ of two elements of the current solution.
1107
+ The function called is :func:`swap_two_nodes`.
1108
+ For example if we apply 1-1 exchange in the solution
1109
+ ``A = [3, 2, 1, 4, 3]``
1110
+ we can get the following by the transposition of 1 and 4 elements:
1111
+ ``A' = [3, 2, 4, 1, 3]``
1112
+ - "1-0": 1-0 exchange which moves an node in the solution
1113
+ to a new position.
1114
+ The function called is :func:`move_one_node`.
1115
+ For example if we apply 1-0 exchange in the solution
1116
+ ``A = [3, 2, 1, 4, 3]``
1117
+ we can transfer the fourth element to the second position:
1118
+ ``A' = [3, 4, 2, 1, 3]``
1119
+
1120
+ You may provide your own functions to enact a move from
1121
+ one solution to a neighbor solution. The function must take
1122
+ the solution as input along with a `seed` input to control
1123
+ random number generation (see the `seed` input here).
1124
+ Your function should maintain the solution as a cycle with
1125
+ equal first and last node and all others appearing once.
1126
+ Your function should return the new solution.
1127
+
1128
+ max_iterations : int, optional (default=10)
1129
+ Declared done when this number of consecutive iterations of
1130
+ the outer loop occurs without any change in the best cost solution.
1131
+
1132
+ N_inner : int, optional (default=100)
1133
+ The number of iterations of the inner loop.
1134
+
1135
+ alpha : float between (0, 1), optional (default=0.01)
1136
+ Percentage of temperature decrease in each iteration
1137
+ of outer loop
1138
+
1139
+ seed : integer, random_state, or None (default)
1140
+ Indicator of random number generation state.
1141
+ See :ref:`Randomness<randomness>`.
1142
+
1143
+ Returns
1144
+ -------
1145
+ cycle : list of nodes
1146
+ Returns the cycle (list of nodes) that a salesman
1147
+ can follow to minimize total weight of the trip.
1148
+
1149
+ Raises
1150
+ ------
1151
+ NetworkXError
1152
+ If `G` is not complete the algorithm raises an exception.
1153
+
1154
+ Examples
1155
+ --------
1156
+ >>> from networkx.algorithms import approximation as approx
1157
+ >>> G = nx.DiGraph()
1158
+ >>> G.add_weighted_edges_from(
1159
+ ... {
1160
+ ... ("A", "B", 3),
1161
+ ... ("A", "C", 17),
1162
+ ... ("A", "D", 14),
1163
+ ... ("B", "A", 3),
1164
+ ... ("B", "C", 12),
1165
+ ... ("B", "D", 16),
1166
+ ... ("C", "A", 13),
1167
+ ... ("C", "B", 12),
1168
+ ... ("C", "D", 4),
1169
+ ... ("D", "A", 14),
1170
+ ... ("D", "B", 15),
1171
+ ... ("D", "C", 2),
1172
+ ... }
1173
+ ... )
1174
+ >>> cycle = approx.simulated_annealing_tsp(G, "greedy", source="D")
1175
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1176
+ >>> cycle
1177
+ ['D', 'C', 'B', 'A', 'D']
1178
+ >>> cost
1179
+ 31
1180
+ >>> incycle = ["D", "B", "A", "C", "D"]
1181
+ >>> cycle = approx.simulated_annealing_tsp(G, incycle, source="D")
1182
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1183
+ >>> cycle
1184
+ ['D', 'C', 'B', 'A', 'D']
1185
+ >>> cost
1186
+ 31
1187
+
1188
+ Notes
1189
+ -----
1190
+ Simulated Annealing is a metaheuristic local search algorithm.
1191
+ The main characteristic of this algorithm is that it accepts
1192
+ even solutions which lead to the increase of the cost in order
1193
+ to escape from low quality local optimal solutions.
1194
+
1195
+ This algorithm needs an initial solution. If not provided, it is
1196
+ constructed by a simple greedy algorithm. At every iteration, the
1197
+ algorithm selects thoughtfully a neighbor solution.
1198
+ Consider $c(x)$ cost of current solution and $c(x')$ cost of a
1199
+ neighbor solution.
1200
+ If $c(x') - c(x) <= 0$ then the neighbor solution becomes the current
1201
+ solution for the next iteration. Otherwise, the algorithm accepts
1202
+ the neighbor solution with probability $p = exp - ([c(x') - c(x)] / temp)$.
1203
+ Otherwise the current solution is retained.
1204
+
1205
+ `temp` is a parameter of the algorithm and represents temperature.
1206
+
1207
+ Time complexity:
1208
+ For $N_i$ iterations of the inner loop and $N_o$ iterations of the
1209
+ outer loop, this algorithm has running time $O(N_i * N_o * |V|)$.
1210
+
1211
+ For more information and how the algorithm is inspired see:
1212
+ http://en.wikipedia.org/wiki/Simulated_annealing
1213
+ """
1214
+ if move == "1-1":
1215
+ move = swap_two_nodes
1216
+ elif move == "1-0":
1217
+ move = move_one_node
1218
+ if init_cycle == "greedy":
1219
+ # Construct an initial solution using a greedy algorithm.
1220
+ cycle = greedy_tsp(G, weight=weight, source=source)
1221
+ if G.number_of_nodes() == 2:
1222
+ return cycle
1223
+
1224
+ else:
1225
+ cycle = list(init_cycle)
1226
+ if source is None:
1227
+ source = cycle[0]
1228
+ elif source != cycle[0]:
1229
+ raise nx.NetworkXError("source must be first node in init_cycle")
1230
+ if cycle[0] != cycle[-1]:
1231
+ raise nx.NetworkXError("init_cycle must be a cycle. (return to start)")
1232
+
1233
+ if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G):
1234
+ raise nx.NetworkXError("init_cycle should be a cycle over all nodes in G.")
1235
+
1236
+ # Check that G is a complete graph
1237
+ N = len(G) - 1
1238
+ # This check ignores selfloops which is what we want here.
1239
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
1240
+ raise nx.NetworkXError("G must be a complete graph.")
1241
+
1242
+ if G.number_of_nodes() == 2:
1243
+ neighbor = next(G.neighbors(source))
1244
+ return [source, neighbor, source]
1245
+
1246
+ # Find the cost of initial solution
1247
+ cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle))
1248
+
1249
+ count = 0
1250
+ best_cycle = cycle.copy()
1251
+ best_cost = cost
1252
+ while count <= max_iterations and temp > 0:
1253
+ count += 1
1254
+ for i in range(N_inner):
1255
+ adj_sol = move(cycle, seed)
1256
+ adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol))
1257
+ delta = adj_cost - cost
1258
+ if delta <= 0:
1259
+ # Set current solution the adjacent solution.
1260
+ cycle = adj_sol
1261
+ cost = adj_cost
1262
+
1263
+ if cost < best_cost:
1264
+ count = 0
1265
+ best_cycle = cycle.copy()
1266
+ best_cost = cost
1267
+ else:
1268
+ # Accept even a worse solution with probability p.
1269
+ p = math.exp(-delta / temp)
1270
+ if p >= seed.random():
1271
+ cycle = adj_sol
1272
+ cost = adj_cost
1273
+ temp -= temp * alpha
1274
+
1275
+ return best_cycle
1276
+
1277
+
1278
+ @py_random_state(9)
1279
+ @nx._dispatchable(edge_attrs="weight")
1280
+ def threshold_accepting_tsp(
1281
+ G,
1282
+ init_cycle,
1283
+ weight="weight",
1284
+ source=None,
1285
+ threshold=1,
1286
+ move="1-1",
1287
+ max_iterations=10,
1288
+ N_inner=100,
1289
+ alpha=0.1,
1290
+ seed=None,
1291
+ ):
1292
+ """Returns an approximate solution to the traveling salesman problem.
1293
+
1294
+ This function uses threshold accepting methods to approximate the minimal cost
1295
+ cycle through the nodes. Starting from a suboptimal solution, threshold
1296
+ accepting methods perturb that solution, accepting any changes that make
1297
+ the solution no worse than increasing by a threshold amount. Improvements
1298
+ in cost are accepted, but so are changes leading to small increases in cost.
1299
+ This allows the solution to leave suboptimal local minima in solution space.
1300
+ The threshold is decreased slowly as iterations proceed helping to ensure
1301
+ an optimum. In summary, the function returns a cycle starting at `source`
1302
+ for which the total cost is minimized.
1303
+
1304
+ Parameters
1305
+ ----------
1306
+ G : Graph
1307
+ `G` should be a complete weighted graph.
1308
+ The distance between all pairs of nodes should be included.
1309
+
1310
+ init_cycle : list or "greedy"
1311
+ The initial solution (a cycle through all nodes returning to the start).
1312
+ This argument has no default to make you think about it.
1313
+ If "greedy", use `greedy_tsp(G, weight)`.
1314
+ Other common starting cycles are `list(G) + [next(iter(G))]` or the final
1315
+ result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`.
1316
+
1317
+ weight : string, optional (default="weight")
1318
+ Edge data key corresponding to the edge weight.
1319
+ If any edge does not have this attribute the weight is set to 1.
1320
+
1321
+ source : node, optional (default: first node in list(G))
1322
+ Starting node. If None, defaults to ``next(iter(G))``
1323
+
1324
+ threshold : int, optional (default=1)
1325
+ The algorithm's threshold parameter. It represents the initial
1326
+ threshold's value
1327
+
1328
+ move : "1-1" or "1-0" or function, optional (default="1-1")
1329
+ Indicator of what move to use when finding new trial solutions.
1330
+ Strings indicate two special built-in moves:
1331
+
1332
+ - "1-1": 1-1 exchange which transposes the position
1333
+ of two elements of the current solution.
1334
+ The function called is :func:`swap_two_nodes`.
1335
+ For example if we apply 1-1 exchange in the solution
1336
+ ``A = [3, 2, 1, 4, 3]``
1337
+ we can get the following by the transposition of 1 and 4 elements:
1338
+ ``A' = [3, 2, 4, 1, 3]``
1339
+ - "1-0": 1-0 exchange which moves an node in the solution
1340
+ to a new position.
1341
+ The function called is :func:`move_one_node`.
1342
+ For example if we apply 1-0 exchange in the solution
1343
+ ``A = [3, 2, 1, 4, 3]``
1344
+ we can transfer the fourth element to the second position:
1345
+ ``A' = [3, 4, 2, 1, 3]``
1346
+
1347
+ You may provide your own functions to enact a move from
1348
+ one solution to a neighbor solution. The function must take
1349
+ the solution as input along with a `seed` input to control
1350
+ random number generation (see the `seed` input here).
1351
+ Your function should maintain the solution as a cycle with
1352
+ equal first and last node and all others appearing once.
1353
+ Your function should return the new solution.
1354
+
1355
+ max_iterations : int, optional (default=10)
1356
+ Declared done when this number of consecutive iterations of
1357
+ the outer loop occurs without any change in the best cost solution.
1358
+
1359
+ N_inner : int, optional (default=100)
1360
+ The number of iterations of the inner loop.
1361
+
1362
+ alpha : float between (0, 1), optional (default=0.1)
1363
+ Percentage of threshold decrease when there is at
1364
+ least one acceptance of a neighbor solution.
1365
+ If no inner loop moves are accepted the threshold remains unchanged.
1366
+
1367
+ seed : integer, random_state, or None (default)
1368
+ Indicator of random number generation state.
1369
+ See :ref:`Randomness<randomness>`.
1370
+
1371
+ Returns
1372
+ -------
1373
+ cycle : list of nodes
1374
+ Returns the cycle (list of nodes) that a salesman
1375
+ can follow to minimize total weight of the trip.
1376
+
1377
+ Raises
1378
+ ------
1379
+ NetworkXError
1380
+ If `G` is not complete the algorithm raises an exception.
1381
+
1382
+ Examples
1383
+ --------
1384
+ >>> from networkx.algorithms import approximation as approx
1385
+ >>> G = nx.DiGraph()
1386
+ >>> G.add_weighted_edges_from(
1387
+ ... {
1388
+ ... ("A", "B", 3),
1389
+ ... ("A", "C", 17),
1390
+ ... ("A", "D", 14),
1391
+ ... ("B", "A", 3),
1392
+ ... ("B", "C", 12),
1393
+ ... ("B", "D", 16),
1394
+ ... ("C", "A", 13),
1395
+ ... ("C", "B", 12),
1396
+ ... ("C", "D", 4),
1397
+ ... ("D", "A", 14),
1398
+ ... ("D", "B", 15),
1399
+ ... ("D", "C", 2),
1400
+ ... }
1401
+ ... )
1402
+ >>> cycle = approx.threshold_accepting_tsp(G, "greedy", source="D")
1403
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1404
+ >>> cycle
1405
+ ['D', 'C', 'B', 'A', 'D']
1406
+ >>> cost
1407
+ 31
1408
+ >>> incycle = ["D", "B", "A", "C", "D"]
1409
+ >>> cycle = approx.threshold_accepting_tsp(G, incycle, source="D")
1410
+ >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle))
1411
+ >>> cycle
1412
+ ['D', 'C', 'B', 'A', 'D']
1413
+ >>> cost
1414
+ 31
1415
+
1416
+ Notes
1417
+ -----
1418
+ Threshold Accepting is a metaheuristic local search algorithm.
1419
+ The main characteristic of this algorithm is that it accepts
1420
+ even solutions which lead to the increase of the cost in order
1421
+ to escape from low quality local optimal solutions.
1422
+
1423
+ This algorithm needs an initial solution. This solution can be
1424
+ constructed by a simple greedy algorithm. At every iteration, it
1425
+ selects thoughtfully a neighbor solution.
1426
+ Consider $c(x)$ cost of current solution and $c(x')$ cost of
1427
+ neighbor solution.
1428
+ If $c(x') - c(x) <= threshold$ then the neighbor solution becomes the current
1429
+ solution for the next iteration, where the threshold is named threshold.
1430
+
1431
+ In comparison to the Simulated Annealing algorithm, the Threshold
1432
+ Accepting algorithm does not accept very low quality solutions
1433
+ (due to the presence of the threshold value). In the case of
1434
+ Simulated Annealing, even a very low quality solution can
1435
+ be accepted with probability $p$.
1436
+
1437
+ Time complexity:
1438
+ It has a running time $O(m * n * |V|)$ where $m$ and $n$ are the number
1439
+ of times the outer and inner loop run respectively.
1440
+
1441
+ For more information and how algorithm is inspired see:
1442
+ https://doi.org/10.1016/0021-9991(90)90201-B
1443
+
1444
+ See Also
1445
+ --------
1446
+ simulated_annealing_tsp
1447
+
1448
+ """
1449
+ if move == "1-1":
1450
+ move = swap_two_nodes
1451
+ elif move == "1-0":
1452
+ move = move_one_node
1453
+ if init_cycle == "greedy":
1454
+ # Construct an initial solution using a greedy algorithm.
1455
+ cycle = greedy_tsp(G, weight=weight, source=source)
1456
+ if G.number_of_nodes() == 2:
1457
+ return cycle
1458
+
1459
+ else:
1460
+ cycle = list(init_cycle)
1461
+ if source is None:
1462
+ source = cycle[0]
1463
+ elif source != cycle[0]:
1464
+ raise nx.NetworkXError("source must be first node in init_cycle")
1465
+ if cycle[0] != cycle[-1]:
1466
+ raise nx.NetworkXError("init_cycle must be a cycle. (return to start)")
1467
+
1468
+ if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G):
1469
+ raise nx.NetworkXError("init_cycle is not all and only nodes.")
1470
+
1471
+ # Check that G is a complete graph
1472
+ N = len(G) - 1
1473
+ # This check ignores selfloops which is what we want here.
1474
+ if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()):
1475
+ raise nx.NetworkXError("G must be a complete graph.")
1476
+
1477
+ if G.number_of_nodes() == 2:
1478
+ neighbor = list(G.neighbors(source))[0]
1479
+ return [source, neighbor, source]
1480
+
1481
+ # Find the cost of initial solution
1482
+ cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle))
1483
+
1484
+ count = 0
1485
+ best_cycle = cycle.copy()
1486
+ best_cost = cost
1487
+ while count <= max_iterations:
1488
+ count += 1
1489
+ accepted = False
1490
+ for i in range(N_inner):
1491
+ adj_sol = move(cycle, seed)
1492
+ adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol))
1493
+ delta = adj_cost - cost
1494
+ if delta <= threshold:
1495
+ accepted = True
1496
+
1497
+ # Set current solution the adjacent solution.
1498
+ cycle = adj_sol
1499
+ cost = adj_cost
1500
+
1501
+ if cost < best_cost:
1502
+ count = 0
1503
+ best_cycle = cycle.copy()
1504
+ best_cost = cost
1505
+ if accepted:
1506
+ threshold -= threshold * alpha
1507
+
1508
+ return best_cycle
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/treewidth.py ADDED
@@ -0,0 +1,255 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing treewidth decomposition.
2
+
3
+ Treewidth of an undirected graph is a number associated with the graph.
4
+ It can be defined as the size of the largest vertex set (bag) in a tree
5
+ decomposition of the graph minus one.
6
+
7
+ `Wikipedia: Treewidth <https://en.wikipedia.org/wiki/Treewidth>`_
8
+
9
+ The notions of treewidth and tree decomposition have gained their
10
+ attractiveness partly because many graph and network problems that are
11
+ intractable (e.g., NP-hard) on arbitrary graphs become efficiently
12
+ solvable (e.g., with a linear time algorithm) when the treewidth of the
13
+ input graphs is bounded by a constant [1]_ [2]_.
14
+
15
+ There are two different functions for computing a tree decomposition:
16
+ :func:`treewidth_min_degree` and :func:`treewidth_min_fill_in`.
17
+
18
+ .. [1] Hans L. Bodlaender and Arie M. C. A. Koster. 2010. "Treewidth
19
+ computations I.Upper bounds". Inf. Comput. 208, 3 (March 2010),259-275.
20
+ http://dx.doi.org/10.1016/j.ic.2009.03.008
21
+
22
+ .. [2] Hans L. Bodlaender. "Discovering Treewidth". Institute of Information
23
+ and Computing Sciences, Utrecht University.
24
+ Technical Report UU-CS-2005-018.
25
+ http://www.cs.uu.nl
26
+
27
+ .. [3] K. Wang, Z. Lu, and J. Hicks *Treewidth*.
28
+ https://web.archive.org/web/20210507025929/http://web.eecs.utk.edu/~cphill25/cs594_spring2015_projects/treewidth.pdf
29
+
30
+ """
31
+
32
+ import itertools
33
+ import sys
34
+ from heapq import heapify, heappop, heappush
35
+
36
+ import networkx as nx
37
+ from networkx.utils import not_implemented_for
38
+
39
+ __all__ = ["treewidth_min_degree", "treewidth_min_fill_in"]
40
+
41
+
42
+ @not_implemented_for("directed")
43
+ @not_implemented_for("multigraph")
44
+ @nx._dispatchable(returns_graph=True)
45
+ def treewidth_min_degree(G):
46
+ """Returns a treewidth decomposition using the Minimum Degree heuristic.
47
+
48
+ The heuristic chooses the nodes according to their degree, i.e., first
49
+ the node with the lowest degree is chosen, then the graph is updated
50
+ and the corresponding node is removed. Next, a new node with the lowest
51
+ degree is chosen, and so on.
52
+
53
+ Parameters
54
+ ----------
55
+ G : NetworkX graph
56
+
57
+ Returns
58
+ -------
59
+ Treewidth decomposition : (int, Graph) tuple
60
+ 2-tuple with treewidth and the corresponding decomposed tree.
61
+ """
62
+ deg_heuristic = MinDegreeHeuristic(G)
63
+ return treewidth_decomp(G, lambda graph: deg_heuristic.best_node(graph))
64
+
65
+
66
+ @not_implemented_for("directed")
67
+ @not_implemented_for("multigraph")
68
+ @nx._dispatchable(returns_graph=True)
69
+ def treewidth_min_fill_in(G):
70
+ """Returns a treewidth decomposition using the Minimum Fill-in heuristic.
71
+
72
+ The heuristic chooses a node from the graph, where the number of edges
73
+ added turning the neighborhood of the chosen node into clique is as
74
+ small as possible.
75
+
76
+ Parameters
77
+ ----------
78
+ G : NetworkX graph
79
+
80
+ Returns
81
+ -------
82
+ Treewidth decomposition : (int, Graph) tuple
83
+ 2-tuple with treewidth and the corresponding decomposed tree.
84
+ """
85
+ return treewidth_decomp(G, min_fill_in_heuristic)
86
+
87
+
88
+ class MinDegreeHeuristic:
89
+ """Implements the Minimum Degree heuristic.
90
+
91
+ The heuristic chooses the nodes according to their degree
92
+ (number of neighbors), i.e., first the node with the lowest degree is
93
+ chosen, then the graph is updated and the corresponding node is
94
+ removed. Next, a new node with the lowest degree is chosen, and so on.
95
+ """
96
+
97
+ def __init__(self, graph):
98
+ self._graph = graph
99
+
100
+ # nodes that have to be updated in the heap before each iteration
101
+ self._update_nodes = []
102
+
103
+ self._degreeq = [] # a heapq with 3-tuples (degree,unique_id,node)
104
+ self.count = itertools.count()
105
+
106
+ # build heap with initial degrees
107
+ for n in graph:
108
+ self._degreeq.append((len(graph[n]), next(self.count), n))
109
+ heapify(self._degreeq)
110
+
111
+ def best_node(self, graph):
112
+ # update nodes in self._update_nodes
113
+ for n in self._update_nodes:
114
+ # insert changed degrees into degreeq
115
+ heappush(self._degreeq, (len(graph[n]), next(self.count), n))
116
+
117
+ # get the next valid (minimum degree) node
118
+ while self._degreeq:
119
+ (min_degree, _, elim_node) = heappop(self._degreeq)
120
+ if elim_node not in graph or len(graph[elim_node]) != min_degree:
121
+ # outdated entry in degreeq
122
+ continue
123
+ elif min_degree == len(graph) - 1:
124
+ # fully connected: abort condition
125
+ return None
126
+
127
+ # remember to update nodes in the heap before getting the next node
128
+ self._update_nodes = graph[elim_node]
129
+ return elim_node
130
+
131
+ # the heap is empty: abort
132
+ return None
133
+
134
+
135
+ def min_fill_in_heuristic(graph_dict):
136
+ """Implements the Minimum Degree heuristic.
137
+
138
+ graph_dict: dict keyed by node to sets of neighbors (no self-loops)
139
+
140
+ Returns the node from the graph, where the number of edges added when
141
+ turning the neighborhood of the chosen node into clique is as small as
142
+ possible. This algorithm chooses the nodes using the Minimum Fill-In
143
+ heuristic. The running time of the algorithm is :math:`O(V^3)` and it uses
144
+ additional constant memory.
145
+ """
146
+
147
+ if len(graph_dict) == 0:
148
+ return None
149
+
150
+ min_fill_in_node = None
151
+
152
+ min_fill_in = sys.maxsize
153
+
154
+ # sort nodes by degree
155
+ nodes_by_degree = sorted(graph_dict, key=lambda x: len(graph_dict[x]))
156
+ min_degree = len(graph_dict[nodes_by_degree[0]])
157
+
158
+ # abort condition (handle complete graph)
159
+ if min_degree == len(graph_dict) - 1:
160
+ return None
161
+
162
+ for node in nodes_by_degree:
163
+ num_fill_in = 0
164
+ nbrs = graph_dict[node]
165
+ for nbr in nbrs:
166
+ # count how many nodes in nbrs current nbr is not connected to
167
+ # subtract 1 for the node itself
168
+ num_fill_in += len(nbrs - graph_dict[nbr]) - 1
169
+ if num_fill_in >= 2 * min_fill_in:
170
+ break
171
+
172
+ num_fill_in /= 2 # divide by 2 because of double counting
173
+
174
+ if num_fill_in < min_fill_in: # update min-fill-in node
175
+ if num_fill_in == 0:
176
+ return node
177
+ min_fill_in = num_fill_in
178
+ min_fill_in_node = node
179
+
180
+ return min_fill_in_node
181
+
182
+
183
+ @nx._dispatchable(returns_graph=True)
184
+ def treewidth_decomp(G, heuristic=min_fill_in_heuristic):
185
+ """Returns a treewidth decomposition using the passed heuristic.
186
+
187
+ Parameters
188
+ ----------
189
+ G : NetworkX graph
190
+ heuristic : heuristic function
191
+
192
+ Returns
193
+ -------
194
+ Treewidth decomposition : (int, Graph) tuple
195
+ 2-tuple with treewidth and the corresponding decomposed tree.
196
+ """
197
+
198
+ # make dict-of-sets structure
199
+ graph_dict = {n: set(G[n]) - {n} for n in G}
200
+
201
+ # stack containing nodes and neighbors in the order from the heuristic
202
+ node_stack = []
203
+
204
+ # get first node from heuristic
205
+ elim_node = heuristic(graph_dict)
206
+ while elim_node is not None:
207
+ # connect all neighbors with each other
208
+ nbrs = graph_dict[elim_node]
209
+ for u, v in itertools.permutations(nbrs, 2):
210
+ if v not in graph_dict[u]:
211
+ graph_dict[u].add(v)
212
+
213
+ # push node and its current neighbors on stack
214
+ node_stack.append((elim_node, nbrs))
215
+
216
+ # remove node from graph_dict
217
+ for u in graph_dict[elim_node]:
218
+ graph_dict[u].remove(elim_node)
219
+
220
+ del graph_dict[elim_node]
221
+ elim_node = heuristic(graph_dict)
222
+
223
+ # the abort condition is met; put all remaining nodes into one bag
224
+ decomp = nx.Graph()
225
+ first_bag = frozenset(graph_dict.keys())
226
+ decomp.add_node(first_bag)
227
+
228
+ treewidth = len(first_bag) - 1
229
+
230
+ while node_stack:
231
+ # get node and its neighbors from the stack
232
+ (curr_node, nbrs) = node_stack.pop()
233
+
234
+ # find a bag all neighbors are in
235
+ old_bag = None
236
+ for bag in decomp.nodes:
237
+ if nbrs <= bag:
238
+ old_bag = bag
239
+ break
240
+
241
+ if old_bag is None:
242
+ # no old_bag was found: just connect to the first_bag
243
+ old_bag = first_bag
244
+
245
+ # create new node for decomposition
246
+ nbrs.add(curr_node)
247
+ new_bag = frozenset(nbrs)
248
+
249
+ # update treewidth
250
+ treewidth = max(treewidth, len(new_bag) - 1)
251
+
252
+ # add edge to decomposition (implicitly also adds the new node)
253
+ decomp.add_edge(old_bag, new_bag)
254
+
255
+ return treewidth, decomp
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/vertex_cover.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing an approximate minimum weight vertex cover.
2
+
3
+ A |vertex cover|_ is a subset of nodes such that each edge in the graph
4
+ is incident to at least one node in the subset.
5
+
6
+ .. _vertex cover: https://en.wikipedia.org/wiki/Vertex_cover
7
+ .. |vertex cover| replace:: *vertex cover*
8
+
9
+ """
10
+
11
+ import networkx as nx
12
+
13
+ __all__ = ["min_weighted_vertex_cover"]
14
+
15
+
16
+ @nx._dispatchable(node_attrs="weight")
17
+ def min_weighted_vertex_cover(G, weight=None):
18
+ r"""Returns an approximate minimum weighted vertex cover.
19
+
20
+ The set of nodes returned by this function is guaranteed to be a
21
+ vertex cover, and the total weight of the set is guaranteed to be at
22
+ most twice the total weight of the minimum weight vertex cover. In
23
+ other words,
24
+
25
+ .. math::
26
+
27
+ w(S) \leq 2 * w(S^*),
28
+
29
+ where $S$ is the vertex cover returned by this function,
30
+ $S^*$ is the vertex cover of minimum weight out of all vertex
31
+ covers of the graph, and $w$ is the function that computes the
32
+ sum of the weights of each node in that given set.
33
+
34
+ Parameters
35
+ ----------
36
+ G : NetworkX graph
37
+
38
+ weight : string, optional (default = None)
39
+ If None, every node has weight 1. If a string, use this node
40
+ attribute as the node weight. A node without this attribute is
41
+ assumed to have weight 1.
42
+
43
+ Returns
44
+ -------
45
+ min_weighted_cover : set
46
+ Returns a set of nodes whose weight sum is no more than twice
47
+ the weight sum of the minimum weight vertex cover.
48
+
49
+ Notes
50
+ -----
51
+ For a directed graph, a vertex cover has the same definition: a set
52
+ of nodes such that each edge in the graph is incident to at least
53
+ one node in the set. Whether the node is the head or tail of the
54
+ directed edge is ignored.
55
+
56
+ This is the local-ratio algorithm for computing an approximate
57
+ vertex cover. The algorithm greedily reduces the costs over edges,
58
+ iteratively building a cover. The worst-case runtime of this
59
+ implementation is $O(m \log n)$, where $n$ is the number
60
+ of nodes and $m$ the number of edges in the graph.
61
+
62
+ References
63
+ ----------
64
+ .. [1] Bar-Yehuda, R., and Even, S. (1985). "A local-ratio theorem for
65
+ approximating the weighted vertex cover problem."
66
+ *Annals of Discrete Mathematics*, 25, 27–46
67
+ <http://www.cs.technion.ac.il/~reuven/PDF/vc_lr.pdf>
68
+
69
+ """
70
+ cost = dict(G.nodes(data=weight, default=1))
71
+ # While there are uncovered edges, choose an uncovered and update
72
+ # the cost of the remaining edges.
73
+ cover = set()
74
+ for u, v in G.edges():
75
+ if u in cover or v in cover:
76
+ continue
77
+ if cost[u] <= cost[v]:
78
+ cover.add(u)
79
+ cost[v] -= cost[u]
80
+ else:
81
+ cover.add(v)
82
+ cost[u] -= cost[v]
83
+ return cover
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__init__.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ from networkx.algorithms.assortativity.connectivity import *
2
+ from networkx.algorithms.assortativity.correlation import *
3
+ from networkx.algorithms.assortativity.mixing import *
4
+ from networkx.algorithms.assortativity.neighbor_degree import *
5
+ from networkx.algorithms.assortativity.pairs import *
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/connectivity.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import defaultdict
2
+
3
+ import networkx as nx
4
+
5
+ __all__ = ["average_degree_connectivity"]
6
+
7
+
8
+ @nx._dispatchable(edge_attrs="weight")
9
+ def average_degree_connectivity(
10
+ G, source="in+out", target="in+out", nodes=None, weight=None
11
+ ):
12
+ r"""Compute the average degree connectivity of graph.
13
+
14
+ The average degree connectivity is the average nearest neighbor degree of
15
+ nodes with degree k. For weighted graphs, an analogous measure can
16
+ be computed using the weighted average neighbors degree defined in
17
+ [1]_, for a node `i`, as
18
+
19
+ .. math::
20
+
21
+ k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j
22
+
23
+ where `s_i` is the weighted degree of node `i`,
24
+ `w_{ij}` is the weight of the edge that links `i` and `j`,
25
+ and `N(i)` are the neighbors of node `i`.
26
+
27
+ Parameters
28
+ ----------
29
+ G : NetworkX graph
30
+
31
+ source : "in"|"out"|"in+out" (default:"in+out")
32
+ Directed graphs only. Use "in"- or "out"-degree for source node.
33
+
34
+ target : "in"|"out"|"in+out" (default:"in+out"
35
+ Directed graphs only. Use "in"- or "out"-degree for target node.
36
+
37
+ nodes : list or iterable (optional)
38
+ Compute neighbor connectivity for these nodes. The default is all
39
+ nodes.
40
+
41
+ weight : string or None, optional (default=None)
42
+ The edge attribute that holds the numerical value used as a weight.
43
+ If None, then each edge has weight 1.
44
+
45
+ Returns
46
+ -------
47
+ d : dict
48
+ A dictionary keyed by degree k with the value of average connectivity.
49
+
50
+ Raises
51
+ ------
52
+ NetworkXError
53
+ If either `source` or `target` are not one of 'in',
54
+ 'out', or 'in+out'.
55
+ If either `source` or `target` is passed for an undirected graph.
56
+
57
+ Examples
58
+ --------
59
+ >>> G = nx.path_graph(4)
60
+ >>> G.edges[1, 2]["weight"] = 3
61
+ >>> nx.average_degree_connectivity(G)
62
+ {1: 2.0, 2: 1.5}
63
+ >>> nx.average_degree_connectivity(G, weight="weight")
64
+ {1: 2.0, 2: 1.75}
65
+
66
+ See Also
67
+ --------
68
+ average_neighbor_degree
69
+
70
+ References
71
+ ----------
72
+ .. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani,
73
+ "The architecture of complex weighted networks".
74
+ PNAS 101 (11): 3747–3752 (2004).
75
+ """
76
+ # First, determine the type of neighbors and the type of degree to use.
77
+ if G.is_directed():
78
+ if source not in ("in", "out", "in+out"):
79
+ raise nx.NetworkXError('source must be one of "in", "out", or "in+out"')
80
+ if target not in ("in", "out", "in+out"):
81
+ raise nx.NetworkXError('target must be one of "in", "out", or "in+out"')
82
+ direction = {"out": G.out_degree, "in": G.in_degree, "in+out": G.degree}
83
+ neighbor_funcs = {
84
+ "out": G.successors,
85
+ "in": G.predecessors,
86
+ "in+out": G.neighbors,
87
+ }
88
+ source_degree = direction[source]
89
+ target_degree = direction[target]
90
+ neighbors = neighbor_funcs[source]
91
+ # `reverse` indicates whether to look at the in-edge when
92
+ # computing the weight of an edge.
93
+ reverse = source == "in"
94
+ else:
95
+ if source != "in+out" or target != "in+out":
96
+ raise nx.NetworkXError(
97
+ f"source and target arguments are only supported for directed graphs"
98
+ )
99
+ source_degree = G.degree
100
+ target_degree = G.degree
101
+ neighbors = G.neighbors
102
+ reverse = False
103
+ dsum = defaultdict(int)
104
+ dnorm = defaultdict(int)
105
+ # Check if `source_nodes` is actually a single node in the graph.
106
+ source_nodes = source_degree(nodes)
107
+ if nodes in G:
108
+ source_nodes = [(nodes, source_degree(nodes))]
109
+ for n, k in source_nodes:
110
+ nbrdeg = target_degree(neighbors(n))
111
+ if weight is None:
112
+ s = sum(d for n, d in nbrdeg)
113
+ else: # weight nbr degree by weight of (n,nbr) edge
114
+ if reverse:
115
+ s = sum(G[nbr][n].get(weight, 1) * d for nbr, d in nbrdeg)
116
+ else:
117
+ s = sum(G[n][nbr].get(weight, 1) * d for nbr, d in nbrdeg)
118
+ dnorm[k] += source_degree(n, weight=weight)
119
+ dsum[k] += s
120
+
121
+ # normalize
122
+ return {k: avg if dnorm[k] == 0 else avg / dnorm[k] for k, avg in dsum.items()}
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/correlation.py ADDED
@@ -0,0 +1,302 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Node assortativity coefficients and correlation measures."""
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms.assortativity.mixing import (
5
+ attribute_mixing_matrix,
6
+ degree_mixing_matrix,
7
+ )
8
+ from networkx.algorithms.assortativity.pairs import node_degree_xy
9
+
10
+ __all__ = [
11
+ "degree_pearson_correlation_coefficient",
12
+ "degree_assortativity_coefficient",
13
+ "attribute_assortativity_coefficient",
14
+ "numeric_assortativity_coefficient",
15
+ ]
16
+
17
+
18
+ @nx._dispatchable(edge_attrs="weight")
19
+ def degree_assortativity_coefficient(G, x="out", y="in", weight=None, nodes=None):
20
+ """Compute degree assortativity of graph.
21
+
22
+ Assortativity measures the similarity of connections
23
+ in the graph with respect to the node degree.
24
+
25
+ Parameters
26
+ ----------
27
+ G : NetworkX graph
28
+
29
+ x: string ('in','out')
30
+ The degree type for source node (directed graphs only).
31
+
32
+ y: string ('in','out')
33
+ The degree type for target node (directed graphs only).
34
+
35
+ weight: string or None, optional (default=None)
36
+ The edge attribute that holds the numerical value used
37
+ as a weight. If None, then each edge has weight 1.
38
+ The degree is the sum of the edge weights adjacent to the node.
39
+
40
+ nodes: list or iterable (optional)
41
+ Compute degree assortativity only for nodes in container.
42
+ The default is all nodes.
43
+
44
+ Returns
45
+ -------
46
+ r : float
47
+ Assortativity of graph by degree.
48
+
49
+ Examples
50
+ --------
51
+ >>> G = nx.path_graph(4)
52
+ >>> r = nx.degree_assortativity_coefficient(G)
53
+ >>> print(f"{r:3.1f}")
54
+ -0.5
55
+
56
+ See Also
57
+ --------
58
+ attribute_assortativity_coefficient
59
+ numeric_assortativity_coefficient
60
+ degree_mixing_dict
61
+ degree_mixing_matrix
62
+
63
+ Notes
64
+ -----
65
+ This computes Eq. (21) in Ref. [1]_ , where e is the joint
66
+ probability distribution (mixing matrix) of the degrees. If G is
67
+ directed than the matrix e is the joint probability of the
68
+ user-specified degree type for the source and target.
69
+
70
+ References
71
+ ----------
72
+ .. [1] M. E. J. Newman, Mixing patterns in networks,
73
+ Physical Review E, 67 026126, 2003
74
+ .. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M.
75
+ Edge direction and the structure of networks, PNAS 107, 10815-20 (2010).
76
+ """
77
+ if nodes is None:
78
+ nodes = G.nodes
79
+
80
+ degrees = None
81
+
82
+ if G.is_directed():
83
+ indeg = (
84
+ {d for _, d in G.in_degree(nodes, weight=weight)}
85
+ if "in" in (x, y)
86
+ else set()
87
+ )
88
+ outdeg = (
89
+ {d for _, d in G.out_degree(nodes, weight=weight)}
90
+ if "out" in (x, y)
91
+ else set()
92
+ )
93
+ degrees = set.union(indeg, outdeg)
94
+ else:
95
+ degrees = {d for _, d in G.degree(nodes, weight=weight)}
96
+
97
+ mapping = {d: i for i, d in enumerate(degrees)}
98
+ M = degree_mixing_matrix(G, x=x, y=y, nodes=nodes, weight=weight, mapping=mapping)
99
+
100
+ return _numeric_ac(M, mapping=mapping)
101
+
102
+
103
+ @nx._dispatchable(edge_attrs="weight")
104
+ def degree_pearson_correlation_coefficient(G, x="out", y="in", weight=None, nodes=None):
105
+ """Compute degree assortativity of graph.
106
+
107
+ Assortativity measures the similarity of connections
108
+ in the graph with respect to the node degree.
109
+
110
+ This is the same as degree_assortativity_coefficient but uses the
111
+ potentially faster scipy.stats.pearsonr function.
112
+
113
+ Parameters
114
+ ----------
115
+ G : NetworkX graph
116
+
117
+ x: string ('in','out')
118
+ The degree type for source node (directed graphs only).
119
+
120
+ y: string ('in','out')
121
+ The degree type for target node (directed graphs only).
122
+
123
+ weight: string or None, optional (default=None)
124
+ The edge attribute that holds the numerical value used
125
+ as a weight. If None, then each edge has weight 1.
126
+ The degree is the sum of the edge weights adjacent to the node.
127
+
128
+ nodes: list or iterable (optional)
129
+ Compute pearson correlation of degrees only for specified nodes.
130
+ The default is all nodes.
131
+
132
+ Returns
133
+ -------
134
+ r : float
135
+ Assortativity of graph by degree.
136
+
137
+ Examples
138
+ --------
139
+ >>> G = nx.path_graph(4)
140
+ >>> r = nx.degree_pearson_correlation_coefficient(G)
141
+ >>> print(f"{r:3.1f}")
142
+ -0.5
143
+
144
+ Notes
145
+ -----
146
+ This calls scipy.stats.pearsonr.
147
+
148
+ References
149
+ ----------
150
+ .. [1] M. E. J. Newman, Mixing patterns in networks
151
+ Physical Review E, 67 026126, 2003
152
+ .. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M.
153
+ Edge direction and the structure of networks, PNAS 107, 10815-20 (2010).
154
+ """
155
+ import scipy as sp
156
+
157
+ xy = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight)
158
+ x, y = zip(*xy)
159
+ return float(sp.stats.pearsonr(x, y)[0])
160
+
161
+
162
+ @nx._dispatchable(node_attrs="attribute")
163
+ def attribute_assortativity_coefficient(G, attribute, nodes=None):
164
+ """Compute assortativity for node attributes.
165
+
166
+ Assortativity measures the similarity of connections
167
+ in the graph with respect to the given attribute.
168
+
169
+ Parameters
170
+ ----------
171
+ G : NetworkX graph
172
+
173
+ attribute : string
174
+ Node attribute key
175
+
176
+ nodes: list or iterable (optional)
177
+ Compute attribute assortativity for nodes in container.
178
+ The default is all nodes.
179
+
180
+ Returns
181
+ -------
182
+ r: float
183
+ Assortativity of graph for given attribute
184
+
185
+ Examples
186
+ --------
187
+ >>> G = nx.Graph()
188
+ >>> G.add_nodes_from([0, 1], color="red")
189
+ >>> G.add_nodes_from([2, 3], color="blue")
190
+ >>> G.add_edges_from([(0, 1), (2, 3)])
191
+ >>> print(nx.attribute_assortativity_coefficient(G, "color"))
192
+ 1.0
193
+
194
+ Notes
195
+ -----
196
+ This computes Eq. (2) in Ref. [1]_ , (trace(M)-sum(M^2))/(1-sum(M^2)),
197
+ where M is the joint probability distribution (mixing matrix)
198
+ of the specified attribute.
199
+
200
+ References
201
+ ----------
202
+ .. [1] M. E. J. Newman, Mixing patterns in networks,
203
+ Physical Review E, 67 026126, 2003
204
+ """
205
+ M = attribute_mixing_matrix(G, attribute, nodes)
206
+ return attribute_ac(M)
207
+
208
+
209
+ @nx._dispatchable(node_attrs="attribute")
210
+ def numeric_assortativity_coefficient(G, attribute, nodes=None):
211
+ """Compute assortativity for numerical node attributes.
212
+
213
+ Assortativity measures the similarity of connections
214
+ in the graph with respect to the given numeric attribute.
215
+
216
+ Parameters
217
+ ----------
218
+ G : NetworkX graph
219
+
220
+ attribute : string
221
+ Node attribute key.
222
+
223
+ nodes: list or iterable (optional)
224
+ Compute numeric assortativity only for attributes of nodes in
225
+ container. The default is all nodes.
226
+
227
+ Returns
228
+ -------
229
+ r: float
230
+ Assortativity of graph for given attribute
231
+
232
+ Examples
233
+ --------
234
+ >>> G = nx.Graph()
235
+ >>> G.add_nodes_from([0, 1], size=2)
236
+ >>> G.add_nodes_from([2, 3], size=3)
237
+ >>> G.add_edges_from([(0, 1), (2, 3)])
238
+ >>> print(nx.numeric_assortativity_coefficient(G, "size"))
239
+ 1.0
240
+
241
+ Notes
242
+ -----
243
+ This computes Eq. (21) in Ref. [1]_ , which is the Pearson correlation
244
+ coefficient of the specified (scalar valued) attribute across edges.
245
+
246
+ References
247
+ ----------
248
+ .. [1] M. E. J. Newman, Mixing patterns in networks
249
+ Physical Review E, 67 026126, 2003
250
+ """
251
+ if nodes is None:
252
+ nodes = G.nodes
253
+ vals = {G.nodes[n][attribute] for n in nodes}
254
+ mapping = {d: i for i, d in enumerate(vals)}
255
+ M = attribute_mixing_matrix(G, attribute, nodes, mapping)
256
+ return _numeric_ac(M, mapping)
257
+
258
+
259
+ def attribute_ac(M):
260
+ """Compute assortativity for attribute matrix M.
261
+
262
+ Parameters
263
+ ----------
264
+ M : numpy.ndarray
265
+ 2D ndarray representing the attribute mixing matrix.
266
+
267
+ Notes
268
+ -----
269
+ This computes Eq. (2) in Ref. [1]_ , (trace(e)-sum(e^2))/(1-sum(e^2)),
270
+ where e is the joint probability distribution (mixing matrix)
271
+ of the specified attribute.
272
+
273
+ References
274
+ ----------
275
+ .. [1] M. E. J. Newman, Mixing patterns in networks,
276
+ Physical Review E, 67 026126, 2003
277
+ """
278
+ if M.sum() != 1.0:
279
+ M = M / M.sum()
280
+ s = (M @ M).sum()
281
+ t = M.trace()
282
+ r = (t - s) / (1 - s)
283
+ return float(r)
284
+
285
+
286
+ def _numeric_ac(M, mapping):
287
+ # M is a 2D numpy array
288
+ # numeric assortativity coefficient, pearsonr
289
+ import numpy as np
290
+
291
+ if M.sum() != 1.0:
292
+ M = M / M.sum()
293
+ x = np.array(list(mapping.keys()))
294
+ y = x # x and y have the same support
295
+ idx = list(mapping.values())
296
+ a = M.sum(axis=0)
297
+ b = M.sum(axis=1)
298
+ vara = (a[idx] * x**2).sum() - ((a[idx] * x).sum()) ** 2
299
+ varb = (b[idx] * y**2).sum() - ((b[idx] * y).sum()) ** 2
300
+ xy = np.outer(x, y)
301
+ ab = np.outer(a[idx], b[idx])
302
+ return float((xy * (M - ab)).sum() / np.sqrt(vara * varb))
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/mixing.py ADDED
@@ -0,0 +1,255 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Mixing matrices for node attributes and degree.
3
+ """
4
+
5
+ import networkx as nx
6
+ from networkx.algorithms.assortativity.pairs import node_attribute_xy, node_degree_xy
7
+ from networkx.utils import dict_to_numpy_array
8
+
9
+ __all__ = [
10
+ "attribute_mixing_matrix",
11
+ "attribute_mixing_dict",
12
+ "degree_mixing_matrix",
13
+ "degree_mixing_dict",
14
+ "mixing_dict",
15
+ ]
16
+
17
+
18
+ @nx._dispatchable(node_attrs="attribute")
19
+ def attribute_mixing_dict(G, attribute, nodes=None, normalized=False):
20
+ """Returns dictionary representation of mixing matrix for attribute.
21
+
22
+ Parameters
23
+ ----------
24
+ G : graph
25
+ NetworkX graph object.
26
+
27
+ attribute : string
28
+ Node attribute key.
29
+
30
+ nodes: list or iterable (optional)
31
+ Unse nodes in container to build the dict. The default is all nodes.
32
+
33
+ normalized : bool (default=False)
34
+ Return counts if False or probabilities if True.
35
+
36
+ Examples
37
+ --------
38
+ >>> G = nx.Graph()
39
+ >>> G.add_nodes_from([0, 1], color="red")
40
+ >>> G.add_nodes_from([2, 3], color="blue")
41
+ >>> G.add_edge(1, 3)
42
+ >>> d = nx.attribute_mixing_dict(G, "color")
43
+ >>> print(d["red"]["blue"])
44
+ 1
45
+ >>> print(d["blue"]["red"]) # d symmetric for undirected graphs
46
+ 1
47
+
48
+ Returns
49
+ -------
50
+ d : dictionary
51
+ Counts or joint probability of occurrence of attribute pairs.
52
+ """
53
+ xy_iter = node_attribute_xy(G, attribute, nodes)
54
+ return mixing_dict(xy_iter, normalized=normalized)
55
+
56
+
57
+ @nx._dispatchable(node_attrs="attribute")
58
+ def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, normalized=True):
59
+ """Returns mixing matrix for attribute.
60
+
61
+ Parameters
62
+ ----------
63
+ G : graph
64
+ NetworkX graph object.
65
+
66
+ attribute : string
67
+ Node attribute key.
68
+
69
+ nodes: list or iterable (optional)
70
+ Use only nodes in container to build the matrix. The default is
71
+ all nodes.
72
+
73
+ mapping : dictionary, optional
74
+ Mapping from node attribute to integer index in matrix.
75
+ If not specified, an arbitrary ordering will be used.
76
+
77
+ normalized : bool (default=True)
78
+ Return counts if False or probabilities if True.
79
+
80
+ Returns
81
+ -------
82
+ m: numpy array
83
+ Counts or joint probability of occurrence of attribute pairs.
84
+
85
+ Notes
86
+ -----
87
+ If each node has a unique attribute value, the unnormalized mixing matrix
88
+ will be equal to the adjacency matrix. To get a denser mixing matrix,
89
+ the rounding can be performed to form groups of nodes with equal values.
90
+ For example, the exact height of persons in cm (180.79155222, 163.9080892,
91
+ 163.30095355, 167.99016217, 168.21590163, ...) can be rounded to (180, 163,
92
+ 163, 168, 168, ...).
93
+
94
+ Definitions of attribute mixing matrix vary on whether the matrix
95
+ should include rows for attribute values that don't arise. Here we
96
+ do not include such empty-rows. But you can force them to appear
97
+ by inputting a `mapping` that includes those values.
98
+
99
+ Examples
100
+ --------
101
+ >>> G = nx.path_graph(3)
102
+ >>> gender = {0: "male", 1: "female", 2: "female"}
103
+ >>> nx.set_node_attributes(G, gender, "gender")
104
+ >>> mapping = {"male": 0, "female": 1}
105
+ >>> mix_mat = nx.attribute_mixing_matrix(G, "gender", mapping=mapping)
106
+ >>> mix_mat
107
+ array([[0. , 0.25],
108
+ [0.25, 0.5 ]])
109
+ """
110
+ d = attribute_mixing_dict(G, attribute, nodes)
111
+ a = dict_to_numpy_array(d, mapping=mapping)
112
+ if normalized:
113
+ a = a / a.sum()
114
+ return a
115
+
116
+
117
+ @nx._dispatchable(edge_attrs="weight")
118
+ def degree_mixing_dict(G, x="out", y="in", weight=None, nodes=None, normalized=False):
119
+ """Returns dictionary representation of mixing matrix for degree.
120
+
121
+ Parameters
122
+ ----------
123
+ G : graph
124
+ NetworkX graph object.
125
+
126
+ x: string ('in','out')
127
+ The degree type for source node (directed graphs only).
128
+
129
+ y: string ('in','out')
130
+ The degree type for target node (directed graphs only).
131
+
132
+ weight: string or None, optional (default=None)
133
+ The edge attribute that holds the numerical value used
134
+ as a weight. If None, then each edge has weight 1.
135
+ The degree is the sum of the edge weights adjacent to the node.
136
+
137
+ normalized : bool (default=False)
138
+ Return counts if False or probabilities if True.
139
+
140
+ Returns
141
+ -------
142
+ d: dictionary
143
+ Counts or joint probability of occurrence of degree pairs.
144
+ """
145
+ xy_iter = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight)
146
+ return mixing_dict(xy_iter, normalized=normalized)
147
+
148
+
149
+ @nx._dispatchable(edge_attrs="weight")
150
+ def degree_mixing_matrix(
151
+ G, x="out", y="in", weight=None, nodes=None, normalized=True, mapping=None
152
+ ):
153
+ """Returns mixing matrix for attribute.
154
+
155
+ Parameters
156
+ ----------
157
+ G : graph
158
+ NetworkX graph object.
159
+
160
+ x: string ('in','out')
161
+ The degree type for source node (directed graphs only).
162
+
163
+ y: string ('in','out')
164
+ The degree type for target node (directed graphs only).
165
+
166
+ nodes: list or iterable (optional)
167
+ Build the matrix using only nodes in container.
168
+ The default is all nodes.
169
+
170
+ weight: string or None, optional (default=None)
171
+ The edge attribute that holds the numerical value used
172
+ as a weight. If None, then each edge has weight 1.
173
+ The degree is the sum of the edge weights adjacent to the node.
174
+
175
+ normalized : bool (default=True)
176
+ Return counts if False or probabilities if True.
177
+
178
+ mapping : dictionary, optional
179
+ Mapping from node degree to integer index in matrix.
180
+ If not specified, an arbitrary ordering will be used.
181
+
182
+ Returns
183
+ -------
184
+ m: numpy array
185
+ Counts, or joint probability, of occurrence of node degree.
186
+
187
+ Notes
188
+ -----
189
+ Definitions of degree mixing matrix vary on whether the matrix
190
+ should include rows for degree values that don't arise. Here we
191
+ do not include such empty-rows. But you can force them to appear
192
+ by inputting a `mapping` that includes those values. See examples.
193
+
194
+ Examples
195
+ --------
196
+ >>> G = nx.star_graph(3)
197
+ >>> mix_mat = nx.degree_mixing_matrix(G)
198
+ >>> mix_mat
199
+ array([[0. , 0.5],
200
+ [0.5, 0. ]])
201
+
202
+ If you want every possible degree to appear as a row, even if no nodes
203
+ have that degree, use `mapping` as follows,
204
+
205
+ >>> max_degree = max(deg for n, deg in G.degree)
206
+ >>> mapping = {x: x for x in range(max_degree + 1)} # identity mapping
207
+ >>> mix_mat = nx.degree_mixing_matrix(G, mapping=mapping)
208
+ >>> mix_mat
209
+ array([[0. , 0. , 0. , 0. ],
210
+ [0. , 0. , 0. , 0.5],
211
+ [0. , 0. , 0. , 0. ],
212
+ [0. , 0.5, 0. , 0. ]])
213
+ """
214
+ d = degree_mixing_dict(G, x=x, y=y, nodes=nodes, weight=weight)
215
+ a = dict_to_numpy_array(d, mapping=mapping)
216
+ if normalized:
217
+ a = a / a.sum()
218
+ return a
219
+
220
+
221
+ def mixing_dict(xy, normalized=False):
222
+ """Returns a dictionary representation of mixing matrix.
223
+
224
+ Parameters
225
+ ----------
226
+ xy : list or container of two-tuples
227
+ Pairs of (x,y) items.
228
+
229
+ attribute : string
230
+ Node attribute key
231
+
232
+ normalized : bool (default=False)
233
+ Return counts if False or probabilities if True.
234
+
235
+ Returns
236
+ -------
237
+ d: dictionary
238
+ Counts or Joint probability of occurrence of values in xy.
239
+ """
240
+ d = {}
241
+ psum = 0.0
242
+ for x, y in xy:
243
+ if x not in d:
244
+ d[x] = {}
245
+ if y not in d:
246
+ d[y] = {}
247
+ v = d[x].get(y, 0)
248
+ d[x][y] = v + 1
249
+ psum += 1
250
+
251
+ if normalized:
252
+ for _, jdict in d.items():
253
+ for j in jdict:
254
+ jdict[j] /= psum
255
+ return d
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/neighbor_degree.py ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+
3
+ __all__ = ["average_neighbor_degree"]
4
+
5
+
6
+ @nx._dispatchable(edge_attrs="weight")
7
+ def average_neighbor_degree(G, source="out", target="out", nodes=None, weight=None):
8
+ r"""Returns the average degree of the neighborhood of each node.
9
+
10
+ In an undirected graph, the neighborhood `N(i)` of node `i` contains the
11
+ nodes that are connected to `i` by an edge.
12
+
13
+ For directed graphs, `N(i)` is defined according to the parameter `source`:
14
+
15
+ - if source is 'in', then `N(i)` consists of predecessors of node `i`.
16
+ - if source is 'out', then `N(i)` consists of successors of node `i`.
17
+ - if source is 'in+out', then `N(i)` is both predecessors and successors.
18
+
19
+ The average neighborhood degree of a node `i` is
20
+
21
+ .. math::
22
+
23
+ k_{nn,i} = \frac{1}{|N(i)|} \sum_{j \in N(i)} k_j
24
+
25
+ where `N(i)` are the neighbors of node `i` and `k_j` is
26
+ the degree of node `j` which belongs to `N(i)`. For weighted
27
+ graphs, an analogous measure can be defined [1]_,
28
+
29
+ .. math::
30
+
31
+ k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j
32
+
33
+ where `s_i` is the weighted degree of node `i`, `w_{ij}`
34
+ is the weight of the edge that links `i` and `j` and
35
+ `N(i)` are the neighbors of node `i`.
36
+
37
+
38
+ Parameters
39
+ ----------
40
+ G : NetworkX graph
41
+
42
+ source : string ("in"|"out"|"in+out"), optional (default="out")
43
+ Directed graphs only.
44
+ Use "in"- or "out"-neighbors of source node.
45
+
46
+ target : string ("in"|"out"|"in+out"), optional (default="out")
47
+ Directed graphs only.
48
+ Use "in"- or "out"-degree for target node.
49
+
50
+ nodes : list or iterable, optional (default=G.nodes)
51
+ Compute neighbor degree only for specified nodes.
52
+
53
+ weight : string or None, optional (default=None)
54
+ The edge attribute that holds the numerical value used as a weight.
55
+ If None, then each edge has weight 1.
56
+
57
+ Returns
58
+ -------
59
+ d: dict
60
+ A dictionary keyed by node to the average degree of its neighbors.
61
+
62
+ Raises
63
+ ------
64
+ NetworkXError
65
+ If either `source` or `target` are not one of 'in', 'out', or 'in+out'.
66
+ If either `source` or `target` is passed for an undirected graph.
67
+
68
+ Examples
69
+ --------
70
+ >>> G = nx.path_graph(4)
71
+ >>> G.edges[0, 1]["weight"] = 5
72
+ >>> G.edges[2, 3]["weight"] = 3
73
+
74
+ >>> nx.average_neighbor_degree(G)
75
+ {0: 2.0, 1: 1.5, 2: 1.5, 3: 2.0}
76
+ >>> nx.average_neighbor_degree(G, weight="weight")
77
+ {0: 2.0, 1: 1.1666666666666667, 2: 1.25, 3: 2.0}
78
+
79
+ >>> G = nx.DiGraph()
80
+ >>> nx.add_path(G, [0, 1, 2, 3])
81
+ >>> nx.average_neighbor_degree(G, source="in", target="in")
82
+ {0: 0.0, 1: 0.0, 2: 1.0, 3: 1.0}
83
+
84
+ >>> nx.average_neighbor_degree(G, source="out", target="out")
85
+ {0: 1.0, 1: 1.0, 2: 0.0, 3: 0.0}
86
+
87
+ See Also
88
+ --------
89
+ average_degree_connectivity
90
+
91
+ References
92
+ ----------
93
+ .. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani,
94
+ "The architecture of complex weighted networks".
95
+ PNAS 101 (11): 3747–3752 (2004).
96
+ """
97
+ if G.is_directed():
98
+ if source == "in":
99
+ source_degree = G.in_degree
100
+ elif source == "out":
101
+ source_degree = G.out_degree
102
+ elif source == "in+out":
103
+ source_degree = G.degree
104
+ else:
105
+ raise nx.NetworkXError(
106
+ f"source argument {source} must be 'in', 'out' or 'in+out'"
107
+ )
108
+
109
+ if target == "in":
110
+ target_degree = G.in_degree
111
+ elif target == "out":
112
+ target_degree = G.out_degree
113
+ elif target == "in+out":
114
+ target_degree = G.degree
115
+ else:
116
+ raise nx.NetworkXError(
117
+ f"target argument {target} must be 'in', 'out' or 'in+out'"
118
+ )
119
+ else:
120
+ if source != "out" or target != "out":
121
+ raise nx.NetworkXError(
122
+ f"source and target arguments are only supported for directed graphs"
123
+ )
124
+ source_degree = target_degree = G.degree
125
+
126
+ # precompute target degrees -- should *not* be weighted degree
127
+ t_deg = dict(target_degree())
128
+
129
+ # Set up both predecessor and successor neighbor dicts leaving empty if not needed
130
+ G_P = G_S = {n: {} for n in G}
131
+ if G.is_directed():
132
+ # "in" or "in+out" cases: G_P contains predecessors
133
+ if "in" in source:
134
+ G_P = G.pred
135
+ # "out" or "in+out" cases: G_S contains successors
136
+ if "out" in source:
137
+ G_S = G.succ
138
+ else:
139
+ # undirected leave G_P empty but G_S is the adjacency
140
+ G_S = G.adj
141
+
142
+ # Main loop: Compute average degree of neighbors
143
+ avg = {}
144
+ for n, deg in source_degree(nodes, weight=weight):
145
+ # handle degree zero average
146
+ if deg == 0:
147
+ avg[n] = 0.0
148
+ continue
149
+
150
+ # we sum over both G_P and G_S, but one of the two is usually empty.
151
+ if weight is None:
152
+ avg[n] = (
153
+ sum(t_deg[nbr] for nbr in G_S[n]) + sum(t_deg[nbr] for nbr in G_P[n])
154
+ ) / deg
155
+ else:
156
+ avg[n] = (
157
+ sum(dd.get(weight, 1) * t_deg[nbr] for nbr, dd in G_S[n].items())
158
+ + sum(dd.get(weight, 1) * t_deg[nbr] for nbr, dd in G_P[n].items())
159
+ ) / deg
160
+ return avg
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/pairs.py ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Generators of x-y pairs of node data."""
2
+
3
+ import networkx as nx
4
+
5
+ __all__ = ["node_attribute_xy", "node_degree_xy"]
6
+
7
+
8
+ @nx._dispatchable(node_attrs="attribute")
9
+ def node_attribute_xy(G, attribute, nodes=None):
10
+ """Yields 2-tuples of node attribute values for all edges in `G`.
11
+
12
+ This generator yields, for each edge in `G` incident to a node in `nodes`,
13
+ a 2-tuple of form ``(attribute value, attribute value)`` for the parameter
14
+ specified node-attribute.
15
+
16
+ Parameters
17
+ ----------
18
+ G: NetworkX graph
19
+
20
+ attribute: key
21
+ The node attribute key.
22
+
23
+ nodes: list or iterable (optional)
24
+ Use only edges that are incident to specified nodes.
25
+ The default is all nodes.
26
+
27
+ Yields
28
+ ------
29
+ (x, y): 2-tuple
30
+ Generates 2-tuple of (attribute, attribute) values.
31
+
32
+ Examples
33
+ --------
34
+ >>> G = nx.DiGraph()
35
+ >>> G.add_node(1, color="red")
36
+ >>> G.add_node(2, color="blue")
37
+ >>> G.add_node(3, color="green")
38
+ >>> G.add_edge(1, 2)
39
+ >>> list(nx.node_attribute_xy(G, "color"))
40
+ [('red', 'blue')]
41
+
42
+ Notes
43
+ -----
44
+ For undirected graphs, each edge is produced twice, once for each edge
45
+ representation (u, v) and (v, u), with the exception of self-loop edges
46
+ which only appear once.
47
+ """
48
+ if nodes is None:
49
+ nodes = set(G)
50
+ else:
51
+ nodes = set(nodes)
52
+ Gnodes = G.nodes
53
+ for u, nbrsdict in G.adjacency():
54
+ if u not in nodes:
55
+ continue
56
+ uattr = Gnodes[u].get(attribute, None)
57
+ if G.is_multigraph():
58
+ for v, keys in nbrsdict.items():
59
+ vattr = Gnodes[v].get(attribute, None)
60
+ for _ in keys:
61
+ yield (uattr, vattr)
62
+ else:
63
+ for v in nbrsdict:
64
+ vattr = Gnodes[v].get(attribute, None)
65
+ yield (uattr, vattr)
66
+
67
+
68
+ @nx._dispatchable(edge_attrs="weight")
69
+ def node_degree_xy(G, x="out", y="in", weight=None, nodes=None):
70
+ """Yields 2-tuples of ``(degree, degree)`` values for edges in `G`.
71
+
72
+ This generator yields, for each edge in `G` incident to a node in `nodes`,
73
+ a 2-tuple of form ``(degree, degree)``. The node degrees are weighted
74
+ when a `weight` attribute is specified.
75
+
76
+ Parameters
77
+ ----------
78
+ G: NetworkX graph
79
+
80
+ x: string ('in','out')
81
+ The degree type for source node (directed graphs only).
82
+
83
+ y: string ('in','out')
84
+ The degree type for target node (directed graphs only).
85
+
86
+ weight: string or None, optional (default=None)
87
+ The edge attribute that holds the numerical value used
88
+ as a weight. If None, then each edge has weight 1.
89
+ The degree is the sum of the edge weights adjacent to the node.
90
+
91
+ nodes: list or iterable (optional)
92
+ Use only edges that are adjacency to specified nodes.
93
+ The default is all nodes.
94
+
95
+ Yields
96
+ ------
97
+ (x, y): 2-tuple
98
+ Generates 2-tuple of (degree, degree) values.
99
+
100
+ Examples
101
+ --------
102
+ >>> G = nx.DiGraph()
103
+ >>> G.add_edge(1, 2)
104
+ >>> list(nx.node_degree_xy(G, x="out", y="in"))
105
+ [(1, 1)]
106
+ >>> list(nx.node_degree_xy(G, x="in", y="out"))
107
+ [(0, 0)]
108
+
109
+ Notes
110
+ -----
111
+ For undirected graphs, each edge is produced twice, once for each edge
112
+ representation (u, v) and (v, u), with the exception of self-loop edges
113
+ which only appear once.
114
+ """
115
+ nodes = set(G) if nodes is None else set(nodes)
116
+ if G.is_directed():
117
+ direction = {"out": G.out_degree, "in": G.in_degree}
118
+ xdeg = direction[x]
119
+ ydeg = direction[y]
120
+ else:
121
+ xdeg = ydeg = G.degree
122
+
123
+ for u, degu in xdeg(nodes, weight=weight):
124
+ # use G.edges to treat multigraphs correctly
125
+ neighbors = (nbr for _, nbr in G.edges(u) if nbr in nodes)
126
+ for _, degv in ydeg(neighbors, weight=weight):
127
+ yield degu, degv
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/asteroidal.py ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Algorithms for asteroidal triples and asteroidal numbers in graphs.
3
+
4
+ An asteroidal triple in a graph G is a set of three non-adjacent vertices
5
+ u, v and w such that there exist a path between any two of them that avoids
6
+ closed neighborhood of the third. More formally, v_j, v_k belongs to the same
7
+ connected component of G - N[v_i], where N[v_i] denotes the closed neighborhood
8
+ of v_i. A graph which does not contain any asteroidal triples is called
9
+ an AT-free graph. The class of AT-free graphs is a graph class for which
10
+ many NP-complete problems are solvable in polynomial time. Amongst them,
11
+ independent set and coloring.
12
+ """
13
+
14
+ import networkx as nx
15
+ from networkx.utils import not_implemented_for
16
+
17
+ __all__ = ["is_at_free", "find_asteroidal_triple"]
18
+
19
+
20
+ @not_implemented_for("directed")
21
+ @not_implemented_for("multigraph")
22
+ @nx._dispatchable
23
+ def find_asteroidal_triple(G):
24
+ r"""Find an asteroidal triple in the given graph.
25
+
26
+ An asteroidal triple is a triple of non-adjacent vertices such that
27
+ there exists a path between any two of them which avoids the closed
28
+ neighborhood of the third. It checks all independent triples of vertices
29
+ and whether they are an asteroidal triple or not. This is done with the
30
+ help of a data structure called a component structure.
31
+ A component structure encodes information about which vertices belongs to
32
+ the same connected component when the closed neighborhood of a given vertex
33
+ is removed from the graph. The algorithm used to check is the trivial
34
+ one, outlined in [1]_, which has a runtime of
35
+ :math:`O(|V||\overline{E} + |V||E|)`, where the second term is the
36
+ creation of the component structure.
37
+
38
+ Parameters
39
+ ----------
40
+ G : NetworkX Graph
41
+ The graph to check whether is AT-free or not
42
+
43
+ Returns
44
+ -------
45
+ list or None
46
+ An asteroidal triple is returned as a list of nodes. If no asteroidal
47
+ triple exists, i.e. the graph is AT-free, then None is returned.
48
+
49
+ Notes
50
+ -----
51
+ The component structure and the algorithm is described in [1]_. The current
52
+ implementation implements the trivial algorithm for simple graphs.
53
+
54
+ References
55
+ ----------
56
+ .. [1] Ekkehard Köhler,
57
+ "Recognizing Graphs without asteroidal triples",
58
+ Journal of Discrete Algorithms 2, pages 439-452, 2004.
59
+ https://www.sciencedirect.com/science/article/pii/S157086670400019X
60
+ """
61
+ V = set(G.nodes)
62
+
63
+ if len(V) < 6:
64
+ # An asteroidal triple cannot exist in a graph with 5 or less vertices.
65
+ return None
66
+
67
+ component_structure = create_component_structure(G)
68
+
69
+ for u, v in nx.non_edges(G):
70
+ u_neighborhood = set(G[u]).union([u])
71
+ v_neighborhood = set(G[v]).union([v])
72
+ union_of_neighborhoods = u_neighborhood.union(v_neighborhood)
73
+ for w in V - union_of_neighborhoods:
74
+ # Check for each pair of vertices whether they belong to the
75
+ # same connected component when the closed neighborhood of the
76
+ # third is removed.
77
+ if (
78
+ component_structure[u][v] == component_structure[u][w]
79
+ and component_structure[v][u] == component_structure[v][w]
80
+ and component_structure[w][u] == component_structure[w][v]
81
+ ):
82
+ return [u, v, w]
83
+ return None
84
+
85
+
86
+ @not_implemented_for("directed")
87
+ @not_implemented_for("multigraph")
88
+ @nx._dispatchable
89
+ def is_at_free(G):
90
+ """Check if a graph is AT-free.
91
+
92
+ The method uses the `find_asteroidal_triple` method to recognize
93
+ an AT-free graph. If no asteroidal triple is found the graph is
94
+ AT-free and True is returned. If at least one asteroidal triple is
95
+ found the graph is not AT-free and False is returned.
96
+
97
+ Parameters
98
+ ----------
99
+ G : NetworkX Graph
100
+ The graph to check whether is AT-free or not.
101
+
102
+ Returns
103
+ -------
104
+ bool
105
+ True if G is AT-free and False otherwise.
106
+
107
+ Examples
108
+ --------
109
+ >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)])
110
+ >>> nx.is_at_free(G)
111
+ True
112
+
113
+ >>> G = nx.cycle_graph(6)
114
+ >>> nx.is_at_free(G)
115
+ False
116
+ """
117
+ return find_asteroidal_triple(G) is None
118
+
119
+
120
+ @not_implemented_for("directed")
121
+ @not_implemented_for("multigraph")
122
+ @nx._dispatchable
123
+ def create_component_structure(G):
124
+ r"""Create component structure for G.
125
+
126
+ A *component structure* is an `nxn` array, denoted `c`, where `n` is
127
+ the number of vertices, where each row and column corresponds to a vertex.
128
+
129
+ .. math::
130
+ c_{uv} = \begin{cases} 0, if v \in N[u] \\
131
+ k, if v \in component k of G \setminus N[u] \end{cases}
132
+
133
+ Where `k` is an arbitrary label for each component. The structure is used
134
+ to simplify the detection of asteroidal triples.
135
+
136
+ Parameters
137
+ ----------
138
+ G : NetworkX Graph
139
+ Undirected, simple graph.
140
+
141
+ Returns
142
+ -------
143
+ component_structure : dictionary
144
+ A dictionary of dictionaries, keyed by pairs of vertices.
145
+
146
+ """
147
+ V = set(G.nodes)
148
+ component_structure = {}
149
+ for v in V:
150
+ label = 0
151
+ closed_neighborhood = set(G[v]).union({v})
152
+ row_dict = {}
153
+ for u in closed_neighborhood:
154
+ row_dict[u] = 0
155
+
156
+ G_reduced = G.subgraph(set(G.nodes) - closed_neighborhood)
157
+ for cc in nx.connected_components(G_reduced):
158
+ label += 1
159
+ for u in cc:
160
+ row_dict[u] = label
161
+
162
+ component_structure[v] = row_dict
163
+
164
+ return component_structure
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__init__.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ r"""This module provides functions and operations for bipartite
2
+ graphs. Bipartite graphs `B = (U, V, E)` have two node sets `U,V` and edges in
3
+ `E` that only connect nodes from opposite sets. It is common in the literature
4
+ to use an spatial analogy referring to the two node sets as top and bottom nodes.
5
+
6
+ The bipartite algorithms are not imported into the networkx namespace
7
+ at the top level so the easiest way to use them is with:
8
+
9
+ >>> from networkx.algorithms import bipartite
10
+
11
+ NetworkX does not have a custom bipartite graph class but the Graph()
12
+ or DiGraph() classes can be used to represent bipartite graphs. However,
13
+ you have to keep track of which set each node belongs to, and make
14
+ sure that there is no edge between nodes of the same set. The convention used
15
+ in NetworkX is to use a node attribute named `bipartite` with values 0 or 1 to
16
+ identify the sets each node belongs to. This convention is not enforced in
17
+ the source code of bipartite functions, it's only a recommendation.
18
+
19
+ For example:
20
+
21
+ >>> B = nx.Graph()
22
+ >>> # Add nodes with the node attribute "bipartite"
23
+ >>> B.add_nodes_from([1, 2, 3, 4], bipartite=0)
24
+ >>> B.add_nodes_from(["a", "b", "c"], bipartite=1)
25
+ >>> # Add edges only between nodes of opposite node sets
26
+ >>> B.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
27
+
28
+ Many algorithms of the bipartite module of NetworkX require, as an argument, a
29
+ container with all the nodes that belong to one set, in addition to the bipartite
30
+ graph `B`. The functions in the bipartite package do not check that the node set
31
+ is actually correct nor that the input graph is actually bipartite.
32
+ If `B` is connected, you can find the two node sets using a two-coloring
33
+ algorithm:
34
+
35
+ >>> nx.is_connected(B)
36
+ True
37
+ >>> bottom_nodes, top_nodes = bipartite.sets(B)
38
+
39
+ However, if the input graph is not connected, there are more than one possible
40
+ colorations. This is the reason why we require the user to pass a container
41
+ with all nodes of one bipartite node set as an argument to most bipartite
42
+ functions. In the face of ambiguity, we refuse the temptation to guess and
43
+ raise an :exc:`AmbiguousSolution <networkx.AmbiguousSolution>`
44
+ Exception if the input graph for
45
+ :func:`bipartite.sets <networkx.algorithms.bipartite.basic.sets>`
46
+ is disconnected.
47
+
48
+ Using the `bipartite` node attribute, you can easily get the two node sets:
49
+
50
+ >>> top_nodes = {n for n, d in B.nodes(data=True) if d["bipartite"] == 0}
51
+ >>> bottom_nodes = set(B) - top_nodes
52
+
53
+ So you can easily use the bipartite algorithms that require, as an argument, a
54
+ container with all nodes that belong to one node set:
55
+
56
+ >>> print(round(bipartite.density(B, bottom_nodes), 2))
57
+ 0.5
58
+ >>> G = bipartite.projected_graph(B, top_nodes)
59
+
60
+ All bipartite graph generators in NetworkX build bipartite graphs with the
61
+ `bipartite` node attribute. Thus, you can use the same approach:
62
+
63
+ >>> RB = bipartite.random_graph(5, 7, 0.2)
64
+ >>> RB_top = {n for n, d in RB.nodes(data=True) if d["bipartite"] == 0}
65
+ >>> RB_bottom = set(RB) - RB_top
66
+ >>> list(RB_top)
67
+ [0, 1, 2, 3, 4]
68
+ >>> list(RB_bottom)
69
+ [5, 6, 7, 8, 9, 10, 11]
70
+
71
+ For other bipartite graph generators see
72
+ :mod:`Generators <networkx.algorithms.bipartite.generators>`.
73
+
74
+ """
75
+
76
+ from networkx.algorithms.bipartite.basic import *
77
+ from networkx.algorithms.bipartite.centrality import *
78
+ from networkx.algorithms.bipartite.cluster import *
79
+ from networkx.algorithms.bipartite.covering import *
80
+ from networkx.algorithms.bipartite.edgelist import *
81
+ from networkx.algorithms.bipartite.matching import *
82
+ from networkx.algorithms.bipartite.matrix import *
83
+ from networkx.algorithms.bipartite.projection import *
84
+ from networkx.algorithms.bipartite.redundancy import *
85
+ from networkx.algorithms.bipartite.spectral import *
86
+ from networkx.algorithms.bipartite.generators import *
87
+ from networkx.algorithms.bipartite.extendability import *
88
+ from networkx.algorithms.bipartite.link_analysis import *
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/basic.py ADDED
@@ -0,0 +1,322 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ==========================
3
+ Bipartite Graph Algorithms
4
+ ==========================
5
+ """
6
+
7
+ import networkx as nx
8
+ from networkx.algorithms.components import connected_components
9
+ from networkx.exception import AmbiguousSolution
10
+
11
+ __all__ = [
12
+ "is_bipartite",
13
+ "is_bipartite_node_set",
14
+ "color",
15
+ "sets",
16
+ "density",
17
+ "degrees",
18
+ ]
19
+
20
+
21
+ @nx._dispatchable
22
+ def color(G):
23
+ """Returns a two-coloring of the graph.
24
+
25
+ Raises an exception if the graph is not bipartite.
26
+
27
+ Parameters
28
+ ----------
29
+ G : NetworkX graph
30
+
31
+ Returns
32
+ -------
33
+ color : dictionary
34
+ A dictionary keyed by node with a 1 or 0 as data for each node color.
35
+
36
+ Raises
37
+ ------
38
+ NetworkXError
39
+ If the graph is not two-colorable.
40
+
41
+ Examples
42
+ --------
43
+ >>> from networkx.algorithms import bipartite
44
+ >>> G = nx.path_graph(4)
45
+ >>> c = bipartite.color(G)
46
+ >>> print(c)
47
+ {0: 1, 1: 0, 2: 1, 3: 0}
48
+
49
+ You can use this to set a node attribute indicating the bipartite set:
50
+
51
+ >>> nx.set_node_attributes(G, c, "bipartite")
52
+ >>> print(G.nodes[0]["bipartite"])
53
+ 1
54
+ >>> print(G.nodes[1]["bipartite"])
55
+ 0
56
+ """
57
+ if G.is_directed():
58
+ import itertools
59
+
60
+ def neighbors(v):
61
+ return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)])
62
+
63
+ else:
64
+ neighbors = G.neighbors
65
+
66
+ color = {}
67
+ for n in G: # handle disconnected graphs
68
+ if n in color or len(G[n]) == 0: # skip isolates
69
+ continue
70
+ queue = [n]
71
+ color[n] = 1 # nodes seen with color (1 or 0)
72
+ while queue:
73
+ v = queue.pop()
74
+ c = 1 - color[v] # opposite color of node v
75
+ for w in neighbors(v):
76
+ if w in color:
77
+ if color[w] == color[v]:
78
+ raise nx.NetworkXError("Graph is not bipartite.")
79
+ else:
80
+ color[w] = c
81
+ queue.append(w)
82
+ # color isolates with 0
83
+ color.update(dict.fromkeys(nx.isolates(G), 0))
84
+ return color
85
+
86
+
87
+ @nx._dispatchable
88
+ def is_bipartite(G):
89
+ """Returns True if graph G is bipartite, False if not.
90
+
91
+ Parameters
92
+ ----------
93
+ G : NetworkX graph
94
+
95
+ Examples
96
+ --------
97
+ >>> from networkx.algorithms import bipartite
98
+ >>> G = nx.path_graph(4)
99
+ >>> print(bipartite.is_bipartite(G))
100
+ True
101
+
102
+ See Also
103
+ --------
104
+ color, is_bipartite_node_set
105
+ """
106
+ try:
107
+ color(G)
108
+ return True
109
+ except nx.NetworkXError:
110
+ return False
111
+
112
+
113
+ @nx._dispatchable
114
+ def is_bipartite_node_set(G, nodes):
115
+ """Returns True if nodes and G/nodes are a bipartition of G.
116
+
117
+ Parameters
118
+ ----------
119
+ G : NetworkX graph
120
+
121
+ nodes: list or container
122
+ Check if nodes are a one of a bipartite set.
123
+
124
+ Examples
125
+ --------
126
+ >>> from networkx.algorithms import bipartite
127
+ >>> G = nx.path_graph(4)
128
+ >>> X = set([1, 3])
129
+ >>> bipartite.is_bipartite_node_set(G, X)
130
+ True
131
+
132
+ Notes
133
+ -----
134
+ An exception is raised if the input nodes are not distinct, because in this
135
+ case some bipartite algorithms will yield incorrect results.
136
+ For connected graphs the bipartite sets are unique. This function handles
137
+ disconnected graphs.
138
+ """
139
+ S = set(nodes)
140
+
141
+ if len(S) < len(nodes):
142
+ # this should maybe just return False?
143
+ raise AmbiguousSolution(
144
+ "The input node set contains duplicates.\n"
145
+ "This may lead to incorrect results when using it in bipartite algorithms.\n"
146
+ "Consider using set(nodes) as the input"
147
+ )
148
+
149
+ for CC in (G.subgraph(c).copy() for c in connected_components(G)):
150
+ X, Y = sets(CC)
151
+ if not (
152
+ (X.issubset(S) and Y.isdisjoint(S)) or (Y.issubset(S) and X.isdisjoint(S))
153
+ ):
154
+ return False
155
+ return True
156
+
157
+
158
+ @nx._dispatchable
159
+ def sets(G, top_nodes=None):
160
+ """Returns bipartite node sets of graph G.
161
+
162
+ Raises an exception if the graph is not bipartite or if the input
163
+ graph is disconnected and thus more than one valid solution exists.
164
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
165
+ for further details on how bipartite graphs are handled in NetworkX.
166
+
167
+ Parameters
168
+ ----------
169
+ G : NetworkX graph
170
+
171
+ top_nodes : container, optional
172
+ Container with all nodes in one bipartite node set. If not supplied
173
+ it will be computed. But if more than one solution exists an exception
174
+ will be raised.
175
+
176
+ Returns
177
+ -------
178
+ X : set
179
+ Nodes from one side of the bipartite graph.
180
+ Y : set
181
+ Nodes from the other side.
182
+
183
+ Raises
184
+ ------
185
+ AmbiguousSolution
186
+ Raised if the input bipartite graph is disconnected and no container
187
+ with all nodes in one bipartite set is provided. When determining
188
+ the nodes in each bipartite set more than one valid solution is
189
+ possible if the input graph is disconnected.
190
+ NetworkXError
191
+ Raised if the input graph is not bipartite.
192
+
193
+ Examples
194
+ --------
195
+ >>> from networkx.algorithms import bipartite
196
+ >>> G = nx.path_graph(4)
197
+ >>> X, Y = bipartite.sets(G)
198
+ >>> list(X)
199
+ [0, 2]
200
+ >>> list(Y)
201
+ [1, 3]
202
+
203
+ See Also
204
+ --------
205
+ color
206
+
207
+ """
208
+ if G.is_directed():
209
+ is_connected = nx.is_weakly_connected
210
+ else:
211
+ is_connected = nx.is_connected
212
+ if top_nodes is not None:
213
+ X = set(top_nodes)
214
+ Y = set(G) - X
215
+ else:
216
+ if not is_connected(G):
217
+ msg = "Disconnected graph: Ambiguous solution for bipartite sets."
218
+ raise nx.AmbiguousSolution(msg)
219
+ c = color(G)
220
+ X = {n for n, is_top in c.items() if is_top}
221
+ Y = {n for n, is_top in c.items() if not is_top}
222
+ return (X, Y)
223
+
224
+
225
+ @nx._dispatchable(graphs="B")
226
+ def density(B, nodes):
227
+ """Returns density of bipartite graph B.
228
+
229
+ Parameters
230
+ ----------
231
+ B : NetworkX graph
232
+
233
+ nodes: list or container
234
+ Nodes in one node set of the bipartite graph.
235
+
236
+ Returns
237
+ -------
238
+ d : float
239
+ The bipartite density
240
+
241
+ Examples
242
+ --------
243
+ >>> from networkx.algorithms import bipartite
244
+ >>> G = nx.complete_bipartite_graph(3, 2)
245
+ >>> X = set([0, 1, 2])
246
+ >>> bipartite.density(G, X)
247
+ 1.0
248
+ >>> Y = set([3, 4])
249
+ >>> bipartite.density(G, Y)
250
+ 1.0
251
+
252
+ Notes
253
+ -----
254
+ The container of nodes passed as argument must contain all nodes
255
+ in one of the two bipartite node sets to avoid ambiguity in the
256
+ case of disconnected graphs.
257
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
258
+ for further details on how bipartite graphs are handled in NetworkX.
259
+
260
+ See Also
261
+ --------
262
+ color
263
+ """
264
+ n = len(B)
265
+ m = nx.number_of_edges(B)
266
+ nb = len(nodes)
267
+ nt = n - nb
268
+ if m == 0: # includes cases n==0 and n==1
269
+ d = 0.0
270
+ else:
271
+ if B.is_directed():
272
+ d = m / (2 * nb * nt)
273
+ else:
274
+ d = m / (nb * nt)
275
+ return d
276
+
277
+
278
+ @nx._dispatchable(graphs="B", edge_attrs="weight")
279
+ def degrees(B, nodes, weight=None):
280
+ """Returns the degrees of the two node sets in the bipartite graph B.
281
+
282
+ Parameters
283
+ ----------
284
+ B : NetworkX graph
285
+
286
+ nodes: list or container
287
+ Nodes in one node set of the bipartite graph.
288
+
289
+ weight : string or None, optional (default=None)
290
+ The edge attribute that holds the numerical value used as a weight.
291
+ If None, then each edge has weight 1.
292
+ The degree is the sum of the edge weights adjacent to the node.
293
+
294
+ Returns
295
+ -------
296
+ (degX,degY) : tuple of dictionaries
297
+ The degrees of the two bipartite sets as dictionaries keyed by node.
298
+
299
+ Examples
300
+ --------
301
+ >>> from networkx.algorithms import bipartite
302
+ >>> G = nx.complete_bipartite_graph(3, 2)
303
+ >>> Y = set([3, 4])
304
+ >>> degX, degY = bipartite.degrees(G, Y)
305
+ >>> dict(degX)
306
+ {0: 2, 1: 2, 2: 2}
307
+
308
+ Notes
309
+ -----
310
+ The container of nodes passed as argument must contain all nodes
311
+ in one of the two bipartite node sets to avoid ambiguity in the
312
+ case of disconnected graphs.
313
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
314
+ for further details on how bipartite graphs are handled in NetworkX.
315
+
316
+ See Also
317
+ --------
318
+ color, density
319
+ """
320
+ bottom = set(nodes)
321
+ top = set(B) - bottom
322
+ return (B.degree(top, weight), B.degree(bottom, weight))
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/centrality.py ADDED
@@ -0,0 +1,290 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import networkx as nx
2
+
3
+ __all__ = ["degree_centrality", "betweenness_centrality", "closeness_centrality"]
4
+
5
+
6
+ @nx._dispatchable(name="bipartite_degree_centrality")
7
+ def degree_centrality(G, nodes):
8
+ r"""Compute the degree centrality for nodes in a bipartite network.
9
+
10
+ The degree centrality for a node `v` is the fraction of nodes
11
+ connected to it.
12
+
13
+ Parameters
14
+ ----------
15
+ G : graph
16
+ A bipartite network
17
+
18
+ nodes : list or container
19
+ Container with all nodes in one bipartite node set.
20
+
21
+ Returns
22
+ -------
23
+ centrality : dictionary
24
+ Dictionary keyed by node with bipartite degree centrality as the value.
25
+
26
+ Examples
27
+ --------
28
+ >>> G = nx.wheel_graph(5)
29
+ >>> top_nodes = {0, 1, 2}
30
+ >>> nx.bipartite.degree_centrality(G, nodes=top_nodes)
31
+ {0: 2.0, 1: 1.5, 2: 1.5, 3: 1.0, 4: 1.0}
32
+
33
+ See Also
34
+ --------
35
+ betweenness_centrality
36
+ closeness_centrality
37
+ :func:`~networkx.algorithms.bipartite.basic.sets`
38
+ :func:`~networkx.algorithms.bipartite.basic.is_bipartite`
39
+
40
+ Notes
41
+ -----
42
+ The nodes input parameter must contain all nodes in one bipartite node set,
43
+ but the dictionary returned contains all nodes from both bipartite node
44
+ sets. See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
45
+ for further details on how bipartite graphs are handled in NetworkX.
46
+
47
+ For unipartite networks, the degree centrality values are
48
+ normalized by dividing by the maximum possible degree (which is
49
+ `n-1` where `n` is the number of nodes in G).
50
+
51
+ In the bipartite case, the maximum possible degree of a node in a
52
+ bipartite node set is the number of nodes in the opposite node set
53
+ [1]_. The degree centrality for a node `v` in the bipartite
54
+ sets `U` with `n` nodes and `V` with `m` nodes is
55
+
56
+ .. math::
57
+
58
+ d_{v} = \frac{deg(v)}{m}, \mbox{for} v \in U ,
59
+
60
+ d_{v} = \frac{deg(v)}{n}, \mbox{for} v \in V ,
61
+
62
+
63
+ where `deg(v)` is the degree of node `v`.
64
+
65
+ References
66
+ ----------
67
+ .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
68
+ Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
69
+ of Social Network Analysis. Sage Publications.
70
+ https://dx.doi.org/10.4135/9781446294413.n28
71
+ """
72
+ top = set(nodes)
73
+ bottom = set(G) - top
74
+ s = 1.0 / len(bottom)
75
+ centrality = {n: d * s for n, d in G.degree(top)}
76
+ s = 1.0 / len(top)
77
+ centrality.update({n: d * s for n, d in G.degree(bottom)})
78
+ return centrality
79
+
80
+
81
+ @nx._dispatchable(name="bipartite_betweenness_centrality")
82
+ def betweenness_centrality(G, nodes):
83
+ r"""Compute betweenness centrality for nodes in a bipartite network.
84
+
85
+ Betweenness centrality of a node `v` is the sum of the
86
+ fraction of all-pairs shortest paths that pass through `v`.
87
+
88
+ Values of betweenness are normalized by the maximum possible
89
+ value which for bipartite graphs is limited by the relative size
90
+ of the two node sets [1]_.
91
+
92
+ Let `n` be the number of nodes in the node set `U` and
93
+ `m` be the number of nodes in the node set `V`, then
94
+ nodes in `U` are normalized by dividing by
95
+
96
+ .. math::
97
+
98
+ \frac{1}{2} [m^2 (s + 1)^2 + m (s + 1)(2t - s - 1) - t (2s - t + 3)] ,
99
+
100
+ where
101
+
102
+ .. math::
103
+
104
+ s = (n - 1) \div m , t = (n - 1) \mod m ,
105
+
106
+ and nodes in `V` are normalized by dividing by
107
+
108
+ .. math::
109
+
110
+ \frac{1}{2} [n^2 (p + 1)^2 + n (p + 1)(2r - p - 1) - r (2p - r + 3)] ,
111
+
112
+ where,
113
+
114
+ .. math::
115
+
116
+ p = (m - 1) \div n , r = (m - 1) \mod n .
117
+
118
+ Parameters
119
+ ----------
120
+ G : graph
121
+ A bipartite graph
122
+
123
+ nodes : list or container
124
+ Container with all nodes in one bipartite node set.
125
+
126
+ Returns
127
+ -------
128
+ betweenness : dictionary
129
+ Dictionary keyed by node with bipartite betweenness centrality
130
+ as the value.
131
+
132
+ Examples
133
+ --------
134
+ >>> G = nx.cycle_graph(4)
135
+ >>> top_nodes = {1, 2}
136
+ >>> nx.bipartite.betweenness_centrality(G, nodes=top_nodes)
137
+ {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
138
+
139
+ See Also
140
+ --------
141
+ degree_centrality
142
+ closeness_centrality
143
+ :func:`~networkx.algorithms.bipartite.basic.sets`
144
+ :func:`~networkx.algorithms.bipartite.basic.is_bipartite`
145
+
146
+ Notes
147
+ -----
148
+ The nodes input parameter must contain all nodes in one bipartite node set,
149
+ but the dictionary returned contains all nodes from both node sets.
150
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
151
+ for further details on how bipartite graphs are handled in NetworkX.
152
+
153
+
154
+ References
155
+ ----------
156
+ .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
157
+ Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
158
+ of Social Network Analysis. Sage Publications.
159
+ https://dx.doi.org/10.4135/9781446294413.n28
160
+ """
161
+ top = set(nodes)
162
+ bottom = set(G) - top
163
+ n = len(top)
164
+ m = len(bottom)
165
+ s, t = divmod(n - 1, m)
166
+ bet_max_top = (
167
+ ((m**2) * ((s + 1) ** 2))
168
+ + (m * (s + 1) * (2 * t - s - 1))
169
+ - (t * ((2 * s) - t + 3))
170
+ ) / 2.0
171
+ p, r = divmod(m - 1, n)
172
+ bet_max_bot = (
173
+ ((n**2) * ((p + 1) ** 2))
174
+ + (n * (p + 1) * (2 * r - p - 1))
175
+ - (r * ((2 * p) - r + 3))
176
+ ) / 2.0
177
+ betweenness = nx.betweenness_centrality(G, normalized=False, weight=None)
178
+ for node in top:
179
+ betweenness[node] /= bet_max_top
180
+ for node in bottom:
181
+ betweenness[node] /= bet_max_bot
182
+ return betweenness
183
+
184
+
185
+ @nx._dispatchable(name="bipartite_closeness_centrality")
186
+ def closeness_centrality(G, nodes, normalized=True):
187
+ r"""Compute the closeness centrality for nodes in a bipartite network.
188
+
189
+ The closeness of a node is the distance to all other nodes in the
190
+ graph or in the case that the graph is not connected to all other nodes
191
+ in the connected component containing that node.
192
+
193
+ Parameters
194
+ ----------
195
+ G : graph
196
+ A bipartite network
197
+
198
+ nodes : list or container
199
+ Container with all nodes in one bipartite node set.
200
+
201
+ normalized : bool, optional
202
+ If True (default) normalize by connected component size.
203
+
204
+ Returns
205
+ -------
206
+ closeness : dictionary
207
+ Dictionary keyed by node with bipartite closeness centrality
208
+ as the value.
209
+
210
+ Examples
211
+ --------
212
+ >>> G = nx.wheel_graph(5)
213
+ >>> top_nodes = {0, 1, 2}
214
+ >>> nx.bipartite.closeness_centrality(G, nodes=top_nodes)
215
+ {0: 1.5, 1: 1.2, 2: 1.2, 3: 1.0, 4: 1.0}
216
+
217
+ See Also
218
+ --------
219
+ betweenness_centrality
220
+ degree_centrality
221
+ :func:`~networkx.algorithms.bipartite.basic.sets`
222
+ :func:`~networkx.algorithms.bipartite.basic.is_bipartite`
223
+
224
+ Notes
225
+ -----
226
+ The nodes input parameter must contain all nodes in one bipartite node set,
227
+ but the dictionary returned contains all nodes from both node sets.
228
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
229
+ for further details on how bipartite graphs are handled in NetworkX.
230
+
231
+
232
+ Closeness centrality is normalized by the minimum distance possible.
233
+ In the bipartite case the minimum distance for a node in one bipartite
234
+ node set is 1 from all nodes in the other node set and 2 from all
235
+ other nodes in its own set [1]_. Thus the closeness centrality
236
+ for node `v` in the two bipartite sets `U` with
237
+ `n` nodes and `V` with `m` nodes is
238
+
239
+ .. math::
240
+
241
+ c_{v} = \frac{m + 2(n - 1)}{d}, \mbox{for} v \in U,
242
+
243
+ c_{v} = \frac{n + 2(m - 1)}{d}, \mbox{for} v \in V,
244
+
245
+ where `d` is the sum of the distances from `v` to all
246
+ other nodes.
247
+
248
+ Higher values of closeness indicate higher centrality.
249
+
250
+ As in the unipartite case, setting normalized=True causes the
251
+ values to normalized further to n-1 / size(G)-1 where n is the
252
+ number of nodes in the connected part of graph containing the
253
+ node. If the graph is not completely connected, this algorithm
254
+ computes the closeness centrality for each connected part
255
+ separately.
256
+
257
+ References
258
+ ----------
259
+ .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
260
+ Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
261
+ of Social Network Analysis. Sage Publications.
262
+ https://dx.doi.org/10.4135/9781446294413.n28
263
+ """
264
+ closeness = {}
265
+ path_length = nx.single_source_shortest_path_length
266
+ top = set(nodes)
267
+ bottom = set(G) - top
268
+ n = len(top)
269
+ m = len(bottom)
270
+ for node in top:
271
+ sp = dict(path_length(G, node))
272
+ totsp = sum(sp.values())
273
+ if totsp > 0.0 and len(G) > 1:
274
+ closeness[node] = (m + 2 * (n - 1)) / totsp
275
+ if normalized:
276
+ s = (len(sp) - 1) / (len(G) - 1)
277
+ closeness[node] *= s
278
+ else:
279
+ closeness[node] = 0.0
280
+ for node in bottom:
281
+ sp = dict(path_length(G, node))
282
+ totsp = sum(sp.values())
283
+ if totsp > 0.0 and len(G) > 1:
284
+ closeness[node] = (n + 2 * (m - 1)) / totsp
285
+ if normalized:
286
+ s = (len(sp) - 1) / (len(G) - 1)
287
+ closeness[node] *= s
288
+ else:
289
+ closeness[node] = 0.0
290
+ return closeness
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/cluster.py ADDED
@@ -0,0 +1,289 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for computing clustering of pairs"""
2
+
3
+ import itertools
4
+
5
+ import networkx as nx
6
+
7
+ __all__ = [
8
+ "clustering",
9
+ "average_clustering",
10
+ "latapy_clustering",
11
+ "robins_alexander_clustering",
12
+ ]
13
+
14
+
15
+ def cc_dot(nu, nv):
16
+ return len(nu & nv) / len(nu | nv)
17
+
18
+
19
+ def cc_max(nu, nv):
20
+ return len(nu & nv) / max(len(nu), len(nv))
21
+
22
+
23
+ def cc_min(nu, nv):
24
+ return len(nu & nv) / min(len(nu), len(nv))
25
+
26
+
27
+ modes = {"dot": cc_dot, "min": cc_min, "max": cc_max}
28
+
29
+
30
+ @nx._dispatchable
31
+ def latapy_clustering(G, nodes=None, mode="dot"):
32
+ r"""Compute a bipartite clustering coefficient for nodes.
33
+
34
+ The bipartite clustering coefficient is a measure of local density
35
+ of connections defined as [1]_:
36
+
37
+ .. math::
38
+
39
+ c_u = \frac{\sum_{v \in N(N(u))} c_{uv} }{|N(N(u))|}
40
+
41
+ where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`,
42
+ and `c_{uv}` is the pairwise clustering coefficient between nodes
43
+ `u` and `v`.
44
+
45
+ The mode selects the function for `c_{uv}` which can be:
46
+
47
+ `dot`:
48
+
49
+ .. math::
50
+
51
+ c_{uv}=\frac{|N(u)\cap N(v)|}{|N(u) \cup N(v)|}
52
+
53
+ `min`:
54
+
55
+ .. math::
56
+
57
+ c_{uv}=\frac{|N(u)\cap N(v)|}{min(|N(u)|,|N(v)|)}
58
+
59
+ `max`:
60
+
61
+ .. math::
62
+
63
+ c_{uv}=\frac{|N(u)\cap N(v)|}{max(|N(u)|,|N(v)|)}
64
+
65
+
66
+ Parameters
67
+ ----------
68
+ G : graph
69
+ A bipartite graph
70
+
71
+ nodes : list or iterable (optional)
72
+ Compute bipartite clustering for these nodes. The default
73
+ is all nodes in G.
74
+
75
+ mode : string
76
+ The pairwise bipartite clustering method to be used in the computation.
77
+ It must be "dot", "max", or "min".
78
+
79
+ Returns
80
+ -------
81
+ clustering : dictionary
82
+ A dictionary keyed by node with the clustering coefficient value.
83
+
84
+
85
+ Examples
86
+ --------
87
+ >>> from networkx.algorithms import bipartite
88
+ >>> G = nx.path_graph(4) # path graphs are bipartite
89
+ >>> c = bipartite.clustering(G)
90
+ >>> c[0]
91
+ 0.5
92
+ >>> c = bipartite.clustering(G, mode="min")
93
+ >>> c[0]
94
+ 1.0
95
+
96
+ See Also
97
+ --------
98
+ robins_alexander_clustering
99
+ average_clustering
100
+ networkx.algorithms.cluster.square_clustering
101
+
102
+ References
103
+ ----------
104
+ .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
105
+ Basic notions for the analysis of large two-mode networks.
106
+ Social Networks 30(1), 31--48.
107
+ """
108
+ if not nx.algorithms.bipartite.is_bipartite(G):
109
+ raise nx.NetworkXError("Graph is not bipartite")
110
+
111
+ try:
112
+ cc_func = modes[mode]
113
+ except KeyError as err:
114
+ raise nx.NetworkXError(
115
+ "Mode for bipartite clustering must be: dot, min or max"
116
+ ) from err
117
+
118
+ if nodes is None:
119
+ nodes = G
120
+ ccs = {}
121
+ for v in nodes:
122
+ cc = 0.0
123
+ nbrs2 = {u for nbr in G[v] for u in G[nbr]} - {v}
124
+ for u in nbrs2:
125
+ cc += cc_func(set(G[u]), set(G[v]))
126
+ if cc > 0.0: # len(nbrs2)>0
127
+ cc /= len(nbrs2)
128
+ ccs[v] = cc
129
+ return ccs
130
+
131
+
132
+ clustering = latapy_clustering
133
+
134
+
135
+ @nx._dispatchable(name="bipartite_average_clustering")
136
+ def average_clustering(G, nodes=None, mode="dot"):
137
+ r"""Compute the average bipartite clustering coefficient.
138
+
139
+ A clustering coefficient for the whole graph is the average,
140
+
141
+ .. math::
142
+
143
+ C = \frac{1}{n}\sum_{v \in G} c_v,
144
+
145
+ where `n` is the number of nodes in `G`.
146
+
147
+ Similar measures for the two bipartite sets can be defined [1]_
148
+
149
+ .. math::
150
+
151
+ C_X = \frac{1}{|X|}\sum_{v \in X} c_v,
152
+
153
+ where `X` is a bipartite set of `G`.
154
+
155
+ Parameters
156
+ ----------
157
+ G : graph
158
+ a bipartite graph
159
+
160
+ nodes : list or iterable, optional
161
+ A container of nodes to use in computing the average.
162
+ The nodes should be either the entire graph (the default) or one of the
163
+ bipartite sets.
164
+
165
+ mode : string
166
+ The pairwise bipartite clustering method.
167
+ It must be "dot", "max", or "min"
168
+
169
+ Returns
170
+ -------
171
+ clustering : float
172
+ The average bipartite clustering for the given set of nodes or the
173
+ entire graph if no nodes are specified.
174
+
175
+ Examples
176
+ --------
177
+ >>> from networkx.algorithms import bipartite
178
+ >>> G = nx.star_graph(3) # star graphs are bipartite
179
+ >>> bipartite.average_clustering(G)
180
+ 0.75
181
+ >>> X, Y = bipartite.sets(G)
182
+ >>> bipartite.average_clustering(G, X)
183
+ 0.0
184
+ >>> bipartite.average_clustering(G, Y)
185
+ 1.0
186
+
187
+ See Also
188
+ --------
189
+ clustering
190
+
191
+ Notes
192
+ -----
193
+ The container of nodes passed to this function must contain all of the nodes
194
+ in one of the bipartite sets ("top" or "bottom") in order to compute
195
+ the correct average bipartite clustering coefficients.
196
+ See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
197
+ for further details on how bipartite graphs are handled in NetworkX.
198
+
199
+
200
+ References
201
+ ----------
202
+ .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
203
+ Basic notions for the analysis of large two-mode networks.
204
+ Social Networks 30(1), 31--48.
205
+ """
206
+ if nodes is None:
207
+ nodes = G
208
+ ccs = latapy_clustering(G, nodes=nodes, mode=mode)
209
+ return sum(ccs[v] for v in nodes) / len(nodes)
210
+
211
+
212
+ @nx._dispatchable
213
+ def robins_alexander_clustering(G):
214
+ r"""Compute the bipartite clustering of G.
215
+
216
+ Robins and Alexander [1]_ defined bipartite clustering coefficient as
217
+ four times the number of four cycles `C_4` divided by the number of
218
+ three paths `L_3` in a bipartite graph:
219
+
220
+ .. math::
221
+
222
+ CC_4 = \frac{4 * C_4}{L_3}
223
+
224
+ Parameters
225
+ ----------
226
+ G : graph
227
+ a bipartite graph
228
+
229
+ Returns
230
+ -------
231
+ clustering : float
232
+ The Robins and Alexander bipartite clustering for the input graph.
233
+
234
+ Examples
235
+ --------
236
+ >>> from networkx.algorithms import bipartite
237
+ >>> G = nx.davis_southern_women_graph()
238
+ >>> print(round(bipartite.robins_alexander_clustering(G), 3))
239
+ 0.468
240
+
241
+ See Also
242
+ --------
243
+ latapy_clustering
244
+ networkx.algorithms.cluster.square_clustering
245
+
246
+ References
247
+ ----------
248
+ .. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking
249
+ directors: Network structure and distance in bipartite graphs.
250
+ Computational & Mathematical Organization Theory 10(1), 69–94.
251
+
252
+ """
253
+ if G.order() < 4 or G.size() < 3:
254
+ return 0
255
+ L_3 = _threepaths(G)
256
+ if L_3 == 0:
257
+ return 0
258
+ C_4 = _four_cycles(G)
259
+ return (4.0 * C_4) / L_3
260
+
261
+
262
+ def _four_cycles(G):
263
+ # Also see `square_clustering` which counts squares in a similar way
264
+ cycles = 0
265
+ seen = set()
266
+ G_adj = G._adj
267
+ for v in G:
268
+ seen.add(v)
269
+ v_neighbors = set(G_adj[v])
270
+ if len(v_neighbors) < 2:
271
+ # Can't form a square without at least two neighbors
272
+ continue
273
+ two_hop_neighbors = set().union(*(G_adj[u] for u in v_neighbors))
274
+ two_hop_neighbors -= seen
275
+ for x in two_hop_neighbors:
276
+ p2 = len(v_neighbors.intersection(G_adj[x]))
277
+ cycles += p2 * (p2 - 1)
278
+ return cycles / 4
279
+
280
+
281
+ def _threepaths(G):
282
+ paths = 0
283
+ for v in G:
284
+ for u in G[v]:
285
+ for w in set(G[u]) - {v}:
286
+ paths += len(set(G[w]) - {v, u})
287
+ # Divide by two because we count each three path twice
288
+ # one for each possible starting point
289
+ return paths / 2
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/covering.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions related to graph covers."""
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms.bipartite.matching import hopcroft_karp_matching
5
+ from networkx.algorithms.covering import min_edge_cover as _min_edge_cover
6
+ from networkx.utils import not_implemented_for
7
+
8
+ __all__ = ["min_edge_cover"]
9
+
10
+
11
+ @not_implemented_for("directed")
12
+ @not_implemented_for("multigraph")
13
+ @nx._dispatchable(name="bipartite_min_edge_cover")
14
+ def min_edge_cover(G, matching_algorithm=None):
15
+ """Returns a set of edges which constitutes
16
+ the minimum edge cover of the graph.
17
+
18
+ The smallest edge cover can be found in polynomial time by finding
19
+ a maximum matching and extending it greedily so that all nodes
20
+ are covered.
21
+
22
+ Parameters
23
+ ----------
24
+ G : NetworkX graph
25
+ An undirected bipartite graph.
26
+
27
+ matching_algorithm : function
28
+ A function that returns a maximum cardinality matching in a
29
+ given bipartite graph. The function must take one input, the
30
+ graph ``G``, and return a dictionary mapping each node to its
31
+ mate. If not specified,
32
+ :func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching`
33
+ will be used. Other possibilities include
34
+ :func:`~networkx.algorithms.bipartite.matching.eppstein_matching`,
35
+
36
+ Returns
37
+ -------
38
+ set
39
+ A set of the edges in a minimum edge cover of the graph, given as
40
+ pairs of nodes. It contains both the edges `(u, v)` and `(v, u)`
41
+ for given nodes `u` and `v` among the edges of minimum edge cover.
42
+
43
+ Notes
44
+ -----
45
+ An edge cover of a graph is a set of edges such that every node of
46
+ the graph is incident to at least one edge of the set.
47
+ A minimum edge cover is an edge covering of smallest cardinality.
48
+
49
+ Due to its implementation, the worst-case running time of this algorithm
50
+ is bounded by the worst-case running time of the function
51
+ ``matching_algorithm``.
52
+ """
53
+ if G.order() == 0: # Special case for the empty graph
54
+ return set()
55
+ if matching_algorithm is None:
56
+ matching_algorithm = hopcroft_karp_matching
57
+ return _min_edge_cover(G, matching_algorithm=matching_algorithm)
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/edgelist.py ADDED
@@ -0,0 +1,360 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ********************
3
+ Bipartite Edge Lists
4
+ ********************
5
+ Read and write NetworkX graphs as bipartite edge lists.
6
+
7
+ Format
8
+ ------
9
+ You can read or write three formats of edge lists with these functions.
10
+
11
+ Node pairs with no data::
12
+
13
+ 1 2
14
+
15
+ Python dictionary as data::
16
+
17
+ 1 2 {'weight':7, 'color':'green'}
18
+
19
+ Arbitrary data::
20
+
21
+ 1 2 7 green
22
+
23
+ For each edge (u, v) the node u is assigned to part 0 and the node v to part 1.
24
+ """
25
+
26
+ __all__ = ["generate_edgelist", "write_edgelist", "parse_edgelist", "read_edgelist"]
27
+
28
+ import networkx as nx
29
+ from networkx.utils import not_implemented_for, open_file
30
+
31
+
32
+ @open_file(1, mode="wb")
33
+ def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="utf-8"):
34
+ """Write a bipartite graph as a list of edges.
35
+
36
+ Parameters
37
+ ----------
38
+ G : Graph
39
+ A NetworkX bipartite graph
40
+ path : file or string
41
+ File or filename to write. If a file is provided, it must be
42
+ opened in 'wb' mode. Filenames ending in .gz or .bz2 will be compressed.
43
+ comments : string, optional
44
+ The character used to indicate the start of a comment
45
+ delimiter : string, optional
46
+ The string used to separate values. The default is whitespace.
47
+ data : bool or list, optional
48
+ If False write no edge data.
49
+ If True write a string representation of the edge data dictionary..
50
+ If a list (or other iterable) is provided, write the keys specified
51
+ in the list.
52
+ encoding: string, optional
53
+ Specify which encoding to use when writing file.
54
+
55
+ Examples
56
+ --------
57
+ >>> G = nx.path_graph(4)
58
+ >>> G.add_nodes_from([0, 2], bipartite=0)
59
+ >>> G.add_nodes_from([1, 3], bipartite=1)
60
+ >>> nx.write_edgelist(G, "test.edgelist")
61
+ >>> fh = open("test.edgelist_open", "wb")
62
+ >>> nx.write_edgelist(G, fh)
63
+ >>> nx.write_edgelist(G, "test.edgelist.gz")
64
+ >>> nx.write_edgelist(G, "test.edgelist_nodata.gz", data=False)
65
+
66
+ >>> G = nx.Graph()
67
+ >>> G.add_edge(1, 2, weight=7, color="red")
68
+ >>> nx.write_edgelist(G, "test.edgelist_bigger_nodata", data=False)
69
+ >>> nx.write_edgelist(G, "test.edgelist_color", data=["color"])
70
+ >>> nx.write_edgelist(G, "test.edgelist_color_weight", data=["color", "weight"])
71
+
72
+ See Also
73
+ --------
74
+ write_edgelist
75
+ generate_edgelist
76
+ """
77
+ for line in generate_edgelist(G, delimiter, data):
78
+ line += "\n"
79
+ path.write(line.encode(encoding))
80
+
81
+
82
+ @not_implemented_for("directed")
83
+ def generate_edgelist(G, delimiter=" ", data=True):
84
+ """Generate a single line of the bipartite graph G in edge list format.
85
+
86
+ Parameters
87
+ ----------
88
+ G : NetworkX graph
89
+ The graph is assumed to have node attribute `part` set to 0,1 representing
90
+ the two graph parts
91
+
92
+ delimiter : string, optional
93
+ Separator for node labels
94
+
95
+ data : bool or list of keys
96
+ If False generate no edge data. If True use a dictionary
97
+ representation of edge data. If a list of keys use a list of data
98
+ values corresponding to the keys.
99
+
100
+ Returns
101
+ -------
102
+ lines : string
103
+ Lines of data in adjlist format.
104
+
105
+ Examples
106
+ --------
107
+ >>> from networkx.algorithms import bipartite
108
+ >>> G = nx.path_graph(4)
109
+ >>> G.add_nodes_from([0, 2], bipartite=0)
110
+ >>> G.add_nodes_from([1, 3], bipartite=1)
111
+ >>> G[1][2]["weight"] = 3
112
+ >>> G[2][3]["capacity"] = 12
113
+ >>> for line in bipartite.generate_edgelist(G, data=False):
114
+ ... print(line)
115
+ 0 1
116
+ 2 1
117
+ 2 3
118
+
119
+ >>> for line in bipartite.generate_edgelist(G):
120
+ ... print(line)
121
+ 0 1 {}
122
+ 2 1 {'weight': 3}
123
+ 2 3 {'capacity': 12}
124
+
125
+ >>> for line in bipartite.generate_edgelist(G, data=["weight"]):
126
+ ... print(line)
127
+ 0 1
128
+ 2 1 3
129
+ 2 3
130
+ """
131
+ try:
132
+ part0 = [n for n, d in G.nodes.items() if d["bipartite"] == 0]
133
+ except BaseException as err:
134
+ raise AttributeError("Missing node attribute `bipartite`") from err
135
+ if data is True or data is False:
136
+ for n in part0:
137
+ for edge in G.edges(n, data=data):
138
+ yield delimiter.join(map(str, edge))
139
+ else:
140
+ for n in part0:
141
+ for u, v, d in G.edges(n, data=True):
142
+ edge = [u, v]
143
+ try:
144
+ edge.extend(d[k] for k in data)
145
+ except KeyError:
146
+ pass # missing data for this edge, should warn?
147
+ yield delimiter.join(map(str, edge))
148
+
149
+
150
+ @nx._dispatchable(name="bipartite_parse_edgelist", graphs=None, returns_graph=True)
151
+ def parse_edgelist(
152
+ lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True
153
+ ):
154
+ """Parse lines of an edge list representation of a bipartite graph.
155
+
156
+ Parameters
157
+ ----------
158
+ lines : list or iterator of strings
159
+ Input data in edgelist format
160
+ comments : string, optional
161
+ Marker for comment lines
162
+ delimiter : string, optional
163
+ Separator for node labels
164
+ create_using: NetworkX graph container, optional
165
+ Use given NetworkX graph for holding nodes or edges.
166
+ nodetype : Python type, optional
167
+ Convert nodes to this type.
168
+ data : bool or list of (label,type) tuples
169
+ If False generate no edge data or if True use a dictionary
170
+ representation of edge data or a list tuples specifying dictionary
171
+ key names and types for edge data.
172
+
173
+ Returns
174
+ -------
175
+ G: NetworkX Graph
176
+ The bipartite graph corresponding to lines
177
+
178
+ Examples
179
+ --------
180
+ Edgelist with no data:
181
+
182
+ >>> from networkx.algorithms import bipartite
183
+ >>> lines = ["1 2", "2 3", "3 4"]
184
+ >>> G = bipartite.parse_edgelist(lines, nodetype=int)
185
+ >>> sorted(G.nodes())
186
+ [1, 2, 3, 4]
187
+ >>> sorted(G.nodes(data=True))
188
+ [(1, {'bipartite': 0}), (2, {'bipartite': 0}), (3, {'bipartite': 0}), (4, {'bipartite': 1})]
189
+ >>> sorted(G.edges())
190
+ [(1, 2), (2, 3), (3, 4)]
191
+
192
+ Edgelist with data in Python dictionary representation:
193
+
194
+ >>> lines = ["1 2 {'weight':3}", "2 3 {'weight':27}", "3 4 {'weight':3.0}"]
195
+ >>> G = bipartite.parse_edgelist(lines, nodetype=int)
196
+ >>> sorted(G.nodes())
197
+ [1, 2, 3, 4]
198
+ >>> sorted(G.edges(data=True))
199
+ [(1, 2, {'weight': 3}), (2, 3, {'weight': 27}), (3, 4, {'weight': 3.0})]
200
+
201
+ Edgelist with data in a list:
202
+
203
+ >>> lines = ["1 2 3", "2 3 27", "3 4 3.0"]
204
+ >>> G = bipartite.parse_edgelist(lines, nodetype=int, data=(("weight", float),))
205
+ >>> sorted(G.nodes())
206
+ [1, 2, 3, 4]
207
+ >>> sorted(G.edges(data=True))
208
+ [(1, 2, {'weight': 3.0}), (2, 3, {'weight': 27.0}), (3, 4, {'weight': 3.0})]
209
+
210
+ See Also
211
+ --------
212
+ """
213
+ from ast import literal_eval
214
+
215
+ G = nx.empty_graph(0, create_using)
216
+ for line in lines:
217
+ p = line.find(comments)
218
+ if p >= 0:
219
+ line = line[:p]
220
+ if not len(line):
221
+ continue
222
+ # split line, should have 2 or more
223
+ s = line.rstrip("\n").split(delimiter)
224
+ if len(s) < 2:
225
+ continue
226
+ u = s.pop(0)
227
+ v = s.pop(0)
228
+ d = s
229
+ if nodetype is not None:
230
+ try:
231
+ u = nodetype(u)
232
+ v = nodetype(v)
233
+ except BaseException as err:
234
+ raise TypeError(
235
+ f"Failed to convert nodes {u},{v} to type {nodetype}."
236
+ ) from err
237
+
238
+ if len(d) == 0 or data is False:
239
+ # no data or data type specified
240
+ edgedata = {}
241
+ elif data is True:
242
+ # no edge types specified
243
+ try: # try to evaluate as dictionary
244
+ edgedata = dict(literal_eval(" ".join(d)))
245
+ except BaseException as err:
246
+ raise TypeError(
247
+ f"Failed to convert edge data ({d}) to dictionary."
248
+ ) from err
249
+ else:
250
+ # convert edge data to dictionary with specified keys and type
251
+ if len(d) != len(data):
252
+ raise IndexError(
253
+ f"Edge data {d} and data_keys {data} are not the same length"
254
+ )
255
+ edgedata = {}
256
+ for (edge_key, edge_type), edge_value in zip(data, d):
257
+ try:
258
+ edge_value = edge_type(edge_value)
259
+ except BaseException as err:
260
+ raise TypeError(
261
+ f"Failed to convert {edge_key} data "
262
+ f"{edge_value} to type {edge_type}."
263
+ ) from err
264
+ edgedata.update({edge_key: edge_value})
265
+ G.add_node(u, bipartite=0)
266
+ G.add_node(v, bipartite=1)
267
+ G.add_edge(u, v, **edgedata)
268
+ return G
269
+
270
+
271
+ @open_file(0, mode="rb")
272
+ @nx._dispatchable(name="bipartite_read_edgelist", graphs=None, returns_graph=True)
273
+ def read_edgelist(
274
+ path,
275
+ comments="#",
276
+ delimiter=None,
277
+ create_using=None,
278
+ nodetype=None,
279
+ data=True,
280
+ edgetype=None,
281
+ encoding="utf-8",
282
+ ):
283
+ """Read a bipartite graph from a list of edges.
284
+
285
+ Parameters
286
+ ----------
287
+ path : file or string
288
+ File or filename to read. If a file is provided, it must be
289
+ opened in 'rb' mode.
290
+ Filenames ending in .gz or .bz2 will be decompressed.
291
+ comments : string, optional
292
+ The character used to indicate the start of a comment.
293
+ delimiter : string, optional
294
+ The string used to separate values. The default is whitespace.
295
+ create_using : Graph container, optional,
296
+ Use specified container to build graph. The default is networkx.Graph,
297
+ an undirected graph.
298
+ nodetype : int, float, str, Python type, optional
299
+ Convert node data from strings to specified type
300
+ data : bool or list of (label,type) tuples
301
+ Tuples specifying dictionary key names and types for edge data
302
+ edgetype : int, float, str, Python type, optional OBSOLETE
303
+ Convert edge data from strings to specified type and use as 'weight'
304
+ encoding: string, optional
305
+ Specify which encoding to use when reading file.
306
+
307
+ Returns
308
+ -------
309
+ G : graph
310
+ A networkx Graph or other type specified with create_using
311
+
312
+ Examples
313
+ --------
314
+ >>> from networkx.algorithms import bipartite
315
+ >>> G = nx.path_graph(4)
316
+ >>> G.add_nodes_from([0, 2], bipartite=0)
317
+ >>> G.add_nodes_from([1, 3], bipartite=1)
318
+ >>> bipartite.write_edgelist(G, "test.edgelist")
319
+ >>> G = bipartite.read_edgelist("test.edgelist")
320
+
321
+ >>> fh = open("test.edgelist", "rb")
322
+ >>> G = bipartite.read_edgelist(fh)
323
+ >>> fh.close()
324
+
325
+ >>> G = bipartite.read_edgelist("test.edgelist", nodetype=int)
326
+
327
+ Edgelist with data in a list:
328
+
329
+ >>> textline = "1 2 3"
330
+ >>> fh = open("test.edgelist", "w")
331
+ >>> d = fh.write(textline)
332
+ >>> fh.close()
333
+ >>> G = bipartite.read_edgelist(
334
+ ... "test.edgelist", nodetype=int, data=(("weight", float),)
335
+ ... )
336
+ >>> list(G)
337
+ [1, 2]
338
+ >>> list(G.edges(data=True))
339
+ [(1, 2, {'weight': 3.0})]
340
+
341
+ See parse_edgelist() for more examples of formatting.
342
+
343
+ See Also
344
+ --------
345
+ parse_edgelist
346
+
347
+ Notes
348
+ -----
349
+ Since nodes must be hashable, the function nodetype must return hashable
350
+ types (e.g. int, float, str, frozenset - or tuples of those, etc.)
351
+ """
352
+ lines = (line.decode(encoding) for line in path)
353
+ return parse_edgelist(
354
+ lines,
355
+ comments=comments,
356
+ delimiter=delimiter,
357
+ create_using=create_using,
358
+ nodetype=nodetype,
359
+ data=data,
360
+ )
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/extendability.py ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Provides a function for computing the extendability of a graph which is
2
+ undirected, simple, connected and bipartite and contains at least one perfect matching."""
3
+
4
+ import networkx as nx
5
+ from networkx.utils import not_implemented_for
6
+
7
+ __all__ = ["maximal_extendability"]
8
+
9
+
10
+ @not_implemented_for("directed")
11
+ @not_implemented_for("multigraph")
12
+ @nx._dispatchable
13
+ def maximal_extendability(G):
14
+ """Computes the extendability of a graph.
15
+
16
+ The extendability of a graph is defined as the maximum $k$ for which `G`
17
+ is $k$-extendable. Graph `G` is $k$-extendable if and only if `G` has a
18
+ perfect matching and every set of $k$ independent edges can be extended
19
+ to a perfect matching in `G`.
20
+
21
+ Parameters
22
+ ----------
23
+ G : NetworkX Graph
24
+ A fully-connected bipartite graph without self-loops
25
+
26
+ Returns
27
+ -------
28
+ extendability : int
29
+
30
+ Raises
31
+ ------
32
+ NetworkXError
33
+ If the graph `G` is disconnected.
34
+ If the graph `G` is not bipartite.
35
+ If the graph `G` does not contain a perfect matching.
36
+ If the residual graph of `G` is not strongly connected.
37
+
38
+ Notes
39
+ -----
40
+ Definition:
41
+ Let `G` be a simple, connected, undirected and bipartite graph with a perfect
42
+ matching M and bipartition (U,V). The residual graph of `G`, denoted by $G_M$,
43
+ is the graph obtained from G by directing the edges of M from V to U and the
44
+ edges that do not belong to M from U to V.
45
+
46
+ Lemma [1]_ :
47
+ Let M be a perfect matching of `G`. `G` is $k$-extendable if and only if its residual
48
+ graph $G_M$ is strongly connected and there are $k$ vertex-disjoint directed
49
+ paths between every vertex of U and every vertex of V.
50
+
51
+ Assuming that input graph `G` is undirected, simple, connected, bipartite and contains
52
+ a perfect matching M, this function constructs the residual graph $G_M$ of G and
53
+ returns the minimum value among the maximum vertex-disjoint directed paths between
54
+ every vertex of U and every vertex of V in $G_M$. By combining the definitions
55
+ and the lemma, this value represents the extendability of the graph `G`.
56
+
57
+ Time complexity O($n^3$ $m^2$)) where $n$ is the number of vertices
58
+ and $m$ is the number of edges.
59
+
60
+ References
61
+ ----------
62
+ .. [1] "A polynomial algorithm for the extendability problem in bipartite graphs",
63
+ J. Lakhal, L. Litzler, Information Processing Letters, 1998.
64
+ .. [2] "On n-extendible graphs", M. D. Plummer, Discrete Mathematics, 31:201–210, 1980
65
+ https://doi.org/10.1016/0012-365X(80)90037-0
66
+
67
+ """
68
+ if not nx.is_connected(G):
69
+ raise nx.NetworkXError("Graph G is not connected")
70
+
71
+ if not nx.bipartite.is_bipartite(G):
72
+ raise nx.NetworkXError("Graph G is not bipartite")
73
+
74
+ U, V = nx.bipartite.sets(G)
75
+
76
+ maximum_matching = nx.bipartite.hopcroft_karp_matching(G)
77
+
78
+ if not nx.is_perfect_matching(G, maximum_matching):
79
+ raise nx.NetworkXError("Graph G does not contain a perfect matching")
80
+
81
+ # list of edges in perfect matching, directed from V to U
82
+ pm = [(node, maximum_matching[node]) for node in V & maximum_matching.keys()]
83
+
84
+ # Direct all the edges of G, from V to U if in matching, else from U to V
85
+ directed_edges = [
86
+ (x, y) if (x in V and (x, y) in pm) or (x in U and (y, x) not in pm) else (y, x)
87
+ for x, y in G.edges
88
+ ]
89
+
90
+ # Construct the residual graph of G
91
+ residual_G = nx.DiGraph()
92
+ residual_G.add_nodes_from(G)
93
+ residual_G.add_edges_from(directed_edges)
94
+
95
+ if not nx.is_strongly_connected(residual_G):
96
+ raise nx.NetworkXError("The residual graph of G is not strongly connected")
97
+
98
+ # For node-pairs between V & U, keep min of max number of node-disjoint paths
99
+ # Variable $k$ stands for the extendability of graph G
100
+ k = float("inf")
101
+ for u in U:
102
+ for v in V:
103
+ num_paths = sum(1 for _ in nx.node_disjoint_paths(residual_G, u, v))
104
+ k = k if k < num_paths else num_paths
105
+ return k
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/generators.py ADDED
@@ -0,0 +1,603 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Generators and functions for bipartite graphs.
3
+ """
4
+
5
+ import math
6
+ import numbers
7
+ from functools import reduce
8
+
9
+ import networkx as nx
10
+ from networkx.utils import nodes_or_number, py_random_state
11
+
12
+ __all__ = [
13
+ "configuration_model",
14
+ "havel_hakimi_graph",
15
+ "reverse_havel_hakimi_graph",
16
+ "alternating_havel_hakimi_graph",
17
+ "preferential_attachment_graph",
18
+ "random_graph",
19
+ "gnmk_random_graph",
20
+ "complete_bipartite_graph",
21
+ ]
22
+
23
+
24
+ @nx._dispatchable(graphs=None, returns_graph=True)
25
+ @nodes_or_number([0, 1])
26
+ def complete_bipartite_graph(n1, n2, create_using=None):
27
+ """Returns the complete bipartite graph `K_{n_1,n_2}`.
28
+
29
+ The graph is composed of two partitions with nodes 0 to (n1 - 1)
30
+ in the first and nodes n1 to (n1 + n2 - 1) in the second.
31
+ Each node in the first is connected to each node in the second.
32
+
33
+ Parameters
34
+ ----------
35
+ n1, n2 : integer or iterable container of nodes
36
+ If integers, nodes are from `range(n1)` and `range(n1, n1 + n2)`.
37
+ If a container, the elements are the nodes.
38
+ create_using : NetworkX graph instance, (default: nx.Graph)
39
+ Return graph of this type.
40
+
41
+ Notes
42
+ -----
43
+ Nodes are the integers 0 to `n1 + n2 - 1` unless either n1 or n2 are
44
+ containers of nodes. If only one of n1 or n2 are integers, that
45
+ integer is replaced by `range` of that integer.
46
+
47
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
48
+ to indicate which bipartite set the node belongs to.
49
+
50
+ This function is not imported in the main namespace.
51
+ To use it use nx.bipartite.complete_bipartite_graph
52
+ """
53
+ G = nx.empty_graph(0, create_using)
54
+ if G.is_directed():
55
+ raise nx.NetworkXError("Directed Graph not supported")
56
+
57
+ n1, top = n1
58
+ n2, bottom = n2
59
+ if isinstance(n1, numbers.Integral) and isinstance(n2, numbers.Integral):
60
+ bottom = [n1 + i for i in bottom]
61
+ G.add_nodes_from(top, bipartite=0)
62
+ G.add_nodes_from(bottom, bipartite=1)
63
+ if len(G) != len(top) + len(bottom):
64
+ raise nx.NetworkXError("Inputs n1 and n2 must contain distinct nodes")
65
+ G.add_edges_from((u, v) for u in top for v in bottom)
66
+ G.graph["name"] = f"complete_bipartite_graph({len(top)}, {len(bottom)})"
67
+ return G
68
+
69
+
70
+ @py_random_state(3)
71
+ @nx._dispatchable(name="bipartite_configuration_model", graphs=None, returns_graph=True)
72
+ def configuration_model(aseq, bseq, create_using=None, seed=None):
73
+ """Returns a random bipartite graph from two given degree sequences.
74
+
75
+ Parameters
76
+ ----------
77
+ aseq : list
78
+ Degree sequence for node set A.
79
+ bseq : list
80
+ Degree sequence for node set B.
81
+ create_using : NetworkX graph instance, optional
82
+ Return graph of this type.
83
+ seed : integer, random_state, or None (default)
84
+ Indicator of random number generation state.
85
+ See :ref:`Randomness<randomness>`.
86
+
87
+ The graph is composed of two partitions. Set A has nodes 0 to
88
+ (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
89
+ Nodes from set A are connected to nodes in set B by choosing
90
+ randomly from the possible free stubs, one in A and one in B.
91
+
92
+ Notes
93
+ -----
94
+ The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
95
+ If no graph type is specified use MultiGraph with parallel edges.
96
+ If you want a graph with no parallel edges use create_using=Graph()
97
+ but then the resulting degree sequences might not be exact.
98
+
99
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
100
+ to indicate which bipartite set the node belongs to.
101
+
102
+ This function is not imported in the main namespace.
103
+ To use it use nx.bipartite.configuration_model
104
+ """
105
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
106
+ if G.is_directed():
107
+ raise nx.NetworkXError("Directed Graph not supported")
108
+
109
+ # length and sum of each sequence
110
+ lena = len(aseq)
111
+ lenb = len(bseq)
112
+ suma = sum(aseq)
113
+ sumb = sum(bseq)
114
+
115
+ if not suma == sumb:
116
+ raise nx.NetworkXError(
117
+ f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
118
+ )
119
+
120
+ G = _add_nodes_with_bipartite_label(G, lena, lenb)
121
+
122
+ if len(aseq) == 0 or max(aseq) == 0:
123
+ return G # done if no edges
124
+
125
+ # build lists of degree-repeated vertex numbers
126
+ stubs = [[v] * aseq[v] for v in range(lena)]
127
+ astubs = [x for subseq in stubs for x in subseq]
128
+
129
+ stubs = [[v] * bseq[v - lena] for v in range(lena, lena + lenb)]
130
+ bstubs = [x for subseq in stubs for x in subseq]
131
+
132
+ # shuffle lists
133
+ seed.shuffle(astubs)
134
+ seed.shuffle(bstubs)
135
+
136
+ G.add_edges_from([astubs[i], bstubs[i]] for i in range(suma))
137
+
138
+ G.name = "bipartite_configuration_model"
139
+ return G
140
+
141
+
142
+ @nx._dispatchable(name="bipartite_havel_hakimi_graph", graphs=None, returns_graph=True)
143
+ def havel_hakimi_graph(aseq, bseq, create_using=None):
144
+ """Returns a bipartite graph from two given degree sequences using a
145
+ Havel-Hakimi style construction.
146
+
147
+ The graph is composed of two partitions. Set A has nodes 0 to
148
+ (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
149
+ Nodes from the set A are connected to nodes in the set B by
150
+ connecting the highest degree nodes in set A to the highest degree
151
+ nodes in set B until all stubs are connected.
152
+
153
+ Parameters
154
+ ----------
155
+ aseq : list
156
+ Degree sequence for node set A.
157
+ bseq : list
158
+ Degree sequence for node set B.
159
+ create_using : NetworkX graph instance, optional
160
+ Return graph of this type.
161
+
162
+ Notes
163
+ -----
164
+ The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
165
+ If no graph type is specified use MultiGraph with parallel edges.
166
+ If you want a graph with no parallel edges use create_using=Graph()
167
+ but then the resulting degree sequences might not be exact.
168
+
169
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
170
+ to indicate which bipartite set the node belongs to.
171
+
172
+ This function is not imported in the main namespace.
173
+ To use it use nx.bipartite.havel_hakimi_graph
174
+ """
175
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
176
+ if G.is_directed():
177
+ raise nx.NetworkXError("Directed Graph not supported")
178
+
179
+ # length of the each sequence
180
+ naseq = len(aseq)
181
+ nbseq = len(bseq)
182
+
183
+ suma = sum(aseq)
184
+ sumb = sum(bseq)
185
+
186
+ if not suma == sumb:
187
+ raise nx.NetworkXError(
188
+ f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
189
+ )
190
+
191
+ G = _add_nodes_with_bipartite_label(G, naseq, nbseq)
192
+
193
+ if len(aseq) == 0 or max(aseq) == 0:
194
+ return G # done if no edges
195
+
196
+ # build list of degree-repeated vertex numbers
197
+ astubs = [[aseq[v], v] for v in range(naseq)]
198
+ bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)]
199
+ astubs.sort()
200
+ while astubs:
201
+ (degree, u) = astubs.pop() # take of largest degree node in the a set
202
+ if degree == 0:
203
+ break # done, all are zero
204
+ # connect the source to largest degree nodes in the b set
205
+ bstubs.sort()
206
+ for target in bstubs[-degree:]:
207
+ v = target[1]
208
+ G.add_edge(u, v)
209
+ target[0] -= 1 # note this updates bstubs too.
210
+ if target[0] == 0:
211
+ bstubs.remove(target)
212
+
213
+ G.name = "bipartite_havel_hakimi_graph"
214
+ return G
215
+
216
+
217
+ @nx._dispatchable(graphs=None, returns_graph=True)
218
+ def reverse_havel_hakimi_graph(aseq, bseq, create_using=None):
219
+ """Returns a bipartite graph from two given degree sequences using a
220
+ Havel-Hakimi style construction.
221
+
222
+ The graph is composed of two partitions. Set A has nodes 0 to
223
+ (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
224
+ Nodes from set A are connected to nodes in the set B by connecting
225
+ the highest degree nodes in set A to the lowest degree nodes in
226
+ set B until all stubs are connected.
227
+
228
+ Parameters
229
+ ----------
230
+ aseq : list
231
+ Degree sequence for node set A.
232
+ bseq : list
233
+ Degree sequence for node set B.
234
+ create_using : NetworkX graph instance, optional
235
+ Return graph of this type.
236
+
237
+ Notes
238
+ -----
239
+ The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
240
+ If no graph type is specified use MultiGraph with parallel edges.
241
+ If you want a graph with no parallel edges use create_using=Graph()
242
+ but then the resulting degree sequences might not be exact.
243
+
244
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
245
+ to indicate which bipartite set the node belongs to.
246
+
247
+ This function is not imported in the main namespace.
248
+ To use it use nx.bipartite.reverse_havel_hakimi_graph
249
+ """
250
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
251
+ if G.is_directed():
252
+ raise nx.NetworkXError("Directed Graph not supported")
253
+
254
+ # length of the each sequence
255
+ lena = len(aseq)
256
+ lenb = len(bseq)
257
+ suma = sum(aseq)
258
+ sumb = sum(bseq)
259
+
260
+ if not suma == sumb:
261
+ raise nx.NetworkXError(
262
+ f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
263
+ )
264
+
265
+ G = _add_nodes_with_bipartite_label(G, lena, lenb)
266
+
267
+ if len(aseq) == 0 or max(aseq) == 0:
268
+ return G # done if no edges
269
+
270
+ # build list of degree-repeated vertex numbers
271
+ astubs = [[aseq[v], v] for v in range(lena)]
272
+ bstubs = [[bseq[v - lena], v] for v in range(lena, lena + lenb)]
273
+ astubs.sort()
274
+ bstubs.sort()
275
+ while astubs:
276
+ (degree, u) = astubs.pop() # take of largest degree node in the a set
277
+ if degree == 0:
278
+ break # done, all are zero
279
+ # connect the source to the smallest degree nodes in the b set
280
+ for target in bstubs[0:degree]:
281
+ v = target[1]
282
+ G.add_edge(u, v)
283
+ target[0] -= 1 # note this updates bstubs too.
284
+ if target[0] == 0:
285
+ bstubs.remove(target)
286
+
287
+ G.name = "bipartite_reverse_havel_hakimi_graph"
288
+ return G
289
+
290
+
291
+ @nx._dispatchable(graphs=None, returns_graph=True)
292
+ def alternating_havel_hakimi_graph(aseq, bseq, create_using=None):
293
+ """Returns a bipartite graph from two given degree sequences using
294
+ an alternating Havel-Hakimi style construction.
295
+
296
+ The graph is composed of two partitions. Set A has nodes 0 to
297
+ (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
298
+ Nodes from the set A are connected to nodes in the set B by
299
+ connecting the highest degree nodes in set A to alternatively the
300
+ highest and the lowest degree nodes in set B until all stubs are
301
+ connected.
302
+
303
+ Parameters
304
+ ----------
305
+ aseq : list
306
+ Degree sequence for node set A.
307
+ bseq : list
308
+ Degree sequence for node set B.
309
+ create_using : NetworkX graph instance, optional
310
+ Return graph of this type.
311
+
312
+ Notes
313
+ -----
314
+ The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
315
+ If no graph type is specified use MultiGraph with parallel edges.
316
+ If you want a graph with no parallel edges use create_using=Graph()
317
+ but then the resulting degree sequences might not be exact.
318
+
319
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
320
+ to indicate which bipartite set the node belongs to.
321
+
322
+ This function is not imported in the main namespace.
323
+ To use it use nx.bipartite.alternating_havel_hakimi_graph
324
+ """
325
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
326
+ if G.is_directed():
327
+ raise nx.NetworkXError("Directed Graph not supported")
328
+
329
+ # length of the each sequence
330
+ naseq = len(aseq)
331
+ nbseq = len(bseq)
332
+ suma = sum(aseq)
333
+ sumb = sum(bseq)
334
+
335
+ if not suma == sumb:
336
+ raise nx.NetworkXError(
337
+ f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
338
+ )
339
+
340
+ G = _add_nodes_with_bipartite_label(G, naseq, nbseq)
341
+
342
+ if len(aseq) == 0 or max(aseq) == 0:
343
+ return G # done if no edges
344
+ # build list of degree-repeated vertex numbers
345
+ astubs = [[aseq[v], v] for v in range(naseq)]
346
+ bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)]
347
+ while astubs:
348
+ astubs.sort()
349
+ (degree, u) = astubs.pop() # take of largest degree node in the a set
350
+ if degree == 0:
351
+ break # done, all are zero
352
+ bstubs.sort()
353
+ small = bstubs[0 : degree // 2] # add these low degree targets
354
+ large = bstubs[(-degree + degree // 2) :] # now high degree targets
355
+ stubs = [x for z in zip(large, small) for x in z] # combine, sorry
356
+ if len(stubs) < len(small) + len(large): # check for zip truncation
357
+ stubs.append(large.pop())
358
+ for target in stubs:
359
+ v = target[1]
360
+ G.add_edge(u, v)
361
+ target[0] -= 1 # note this updates bstubs too.
362
+ if target[0] == 0:
363
+ bstubs.remove(target)
364
+
365
+ G.name = "bipartite_alternating_havel_hakimi_graph"
366
+ return G
367
+
368
+
369
+ @py_random_state(3)
370
+ @nx._dispatchable(graphs=None, returns_graph=True)
371
+ def preferential_attachment_graph(aseq, p, create_using=None, seed=None):
372
+ """Create a bipartite graph with a preferential attachment model from
373
+ a given single degree sequence.
374
+
375
+ The graph is composed of two partitions. Set A has nodes 0 to
376
+ (len(aseq) - 1) and set B has nodes starting with node len(aseq).
377
+ The number of nodes in set B is random.
378
+
379
+ Parameters
380
+ ----------
381
+ aseq : list
382
+ Degree sequence for node set A.
383
+ p : float
384
+ Probability that a new bottom node is added.
385
+ create_using : NetworkX graph instance, optional
386
+ Return graph of this type.
387
+ seed : integer, random_state, or None (default)
388
+ Indicator of random number generation state.
389
+ See :ref:`Randomness<randomness>`.
390
+
391
+ References
392
+ ----------
393
+ .. [1] Guillaume, J.L. and Latapy, M.,
394
+ Bipartite graphs as models of complex networks.
395
+ Physica A: Statistical Mechanics and its Applications,
396
+ 2006, 371(2), pp.795-813.
397
+ .. [2] Jean-Loup Guillaume and Matthieu Latapy,
398
+ Bipartite structure of all complex networks,
399
+ Inf. Process. Lett. 90, 2004, pg. 215-221
400
+ https://doi.org/10.1016/j.ipl.2004.03.007
401
+
402
+ Notes
403
+ -----
404
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
405
+ to indicate which bipartite set the node belongs to.
406
+
407
+ This function is not imported in the main namespace.
408
+ To use it use nx.bipartite.preferential_attachment_graph
409
+ """
410
+ G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
411
+ if G.is_directed():
412
+ raise nx.NetworkXError("Directed Graph not supported")
413
+
414
+ if p > 1:
415
+ raise nx.NetworkXError(f"probability {p} > 1")
416
+
417
+ naseq = len(aseq)
418
+ G = _add_nodes_with_bipartite_label(G, naseq, 0)
419
+ vv = [[v] * aseq[v] for v in range(naseq)]
420
+ while vv:
421
+ while vv[0]:
422
+ source = vv[0][0]
423
+ vv[0].remove(source)
424
+ if seed.random() < p or len(G) == naseq:
425
+ target = len(G)
426
+ G.add_node(target, bipartite=1)
427
+ G.add_edge(source, target)
428
+ else:
429
+ bb = [[b] * G.degree(b) for b in range(naseq, len(G))]
430
+ # flatten the list of lists into a list.
431
+ bbstubs = reduce(lambda x, y: x + y, bb)
432
+ # choose preferentially a bottom node.
433
+ target = seed.choice(bbstubs)
434
+ G.add_node(target, bipartite=1)
435
+ G.add_edge(source, target)
436
+ vv.remove(vv[0])
437
+ G.name = "bipartite_preferential_attachment_model"
438
+ return G
439
+
440
+
441
+ @py_random_state(3)
442
+ @nx._dispatchable(graphs=None, returns_graph=True)
443
+ def random_graph(n, m, p, seed=None, directed=False):
444
+ """Returns a bipartite random graph.
445
+
446
+ This is a bipartite version of the binomial (Erdős-Rényi) graph.
447
+ The graph is composed of two partitions. Set A has nodes 0 to
448
+ (n - 1) and set B has nodes n to (n + m - 1).
449
+
450
+ Parameters
451
+ ----------
452
+ n : int
453
+ The number of nodes in the first bipartite set.
454
+ m : int
455
+ The number of nodes in the second bipartite set.
456
+ p : float
457
+ Probability for edge creation.
458
+ seed : integer, random_state, or None (default)
459
+ Indicator of random number generation state.
460
+ See :ref:`Randomness<randomness>`.
461
+ directed : bool, optional (default=False)
462
+ If True return a directed graph
463
+
464
+ Notes
465
+ -----
466
+ The bipartite random graph algorithm chooses each of the n*m (undirected)
467
+ or 2*nm (directed) possible edges with probability p.
468
+
469
+ This algorithm is $O(n+m)$ where $m$ is the expected number of edges.
470
+
471
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
472
+ to indicate which bipartite set the node belongs to.
473
+
474
+ This function is not imported in the main namespace.
475
+ To use it use nx.bipartite.random_graph
476
+
477
+ See Also
478
+ --------
479
+ gnp_random_graph, configuration_model
480
+
481
+ References
482
+ ----------
483
+ .. [1] Vladimir Batagelj and Ulrik Brandes,
484
+ "Efficient generation of large random networks",
485
+ Phys. Rev. E, 71, 036113, 2005.
486
+ """
487
+ G = nx.Graph()
488
+ G = _add_nodes_with_bipartite_label(G, n, m)
489
+ if directed:
490
+ G = nx.DiGraph(G)
491
+ G.name = f"fast_gnp_random_graph({n},{m},{p})"
492
+
493
+ if p <= 0:
494
+ return G
495
+ if p >= 1:
496
+ return nx.complete_bipartite_graph(n, m)
497
+
498
+ lp = math.log(1.0 - p)
499
+
500
+ v = 0
501
+ w = -1
502
+ while v < n:
503
+ lr = math.log(1.0 - seed.random())
504
+ w = w + 1 + int(lr / lp)
505
+ while w >= m and v < n:
506
+ w = w - m
507
+ v = v + 1
508
+ if v < n:
509
+ G.add_edge(v, n + w)
510
+
511
+ if directed:
512
+ # use the same algorithm to
513
+ # add edges from the "m" to "n" set
514
+ v = 0
515
+ w = -1
516
+ while v < n:
517
+ lr = math.log(1.0 - seed.random())
518
+ w = w + 1 + int(lr / lp)
519
+ while w >= m and v < n:
520
+ w = w - m
521
+ v = v + 1
522
+ if v < n:
523
+ G.add_edge(n + w, v)
524
+
525
+ return G
526
+
527
+
528
+ @py_random_state(3)
529
+ @nx._dispatchable(graphs=None, returns_graph=True)
530
+ def gnmk_random_graph(n, m, k, seed=None, directed=False):
531
+ """Returns a random bipartite graph G_{n,m,k}.
532
+
533
+ Produces a bipartite graph chosen randomly out of the set of all graphs
534
+ with n top nodes, m bottom nodes, and k edges.
535
+ The graph is composed of two sets of nodes.
536
+ Set A has nodes 0 to (n - 1) and set B has nodes n to (n + m - 1).
537
+
538
+ Parameters
539
+ ----------
540
+ n : int
541
+ The number of nodes in the first bipartite set.
542
+ m : int
543
+ The number of nodes in the second bipartite set.
544
+ k : int
545
+ The number of edges
546
+ seed : integer, random_state, or None (default)
547
+ Indicator of random number generation state.
548
+ See :ref:`Randomness<randomness>`.
549
+ directed : bool, optional (default=False)
550
+ If True return a directed graph
551
+
552
+ Examples
553
+ --------
554
+ >>> G = nx.bipartite.gnmk_random_graph(10, 20, 50)
555
+
556
+ See Also
557
+ --------
558
+ gnm_random_graph
559
+
560
+ Notes
561
+ -----
562
+ If k > m * n then a complete bipartite graph is returned.
563
+
564
+ This graph is a bipartite version of the `G_{nm}` random graph model.
565
+
566
+ The nodes are assigned the attribute 'bipartite' with the value 0 or 1
567
+ to indicate which bipartite set the node belongs to.
568
+
569
+ This function is not imported in the main namespace.
570
+ To use it use nx.bipartite.gnmk_random_graph
571
+ """
572
+ G = nx.Graph()
573
+ G = _add_nodes_with_bipartite_label(G, n, m)
574
+ if directed:
575
+ G = nx.DiGraph(G)
576
+ G.name = f"bipartite_gnm_random_graph({n},{m},{k})"
577
+ if n == 1 or m == 1:
578
+ return G
579
+ max_edges = n * m # max_edges for bipartite networks
580
+ if k >= max_edges: # Maybe we should raise an exception here
581
+ return nx.complete_bipartite_graph(n, m, create_using=G)
582
+
583
+ top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
584
+ bottom = list(set(G) - set(top))
585
+ edge_count = 0
586
+ while edge_count < k:
587
+ # generate random edge,u,v
588
+ u = seed.choice(top)
589
+ v = seed.choice(bottom)
590
+ if v in G[u]:
591
+ continue
592
+ else:
593
+ G.add_edge(u, v)
594
+ edge_count += 1
595
+ return G
596
+
597
+
598
+ def _add_nodes_with_bipartite_label(G, lena, lenb):
599
+ G.add_nodes_from(range(lena + lenb))
600
+ b = dict(zip(range(lena), [0] * lena))
601
+ b.update(dict(zip(range(lena, lena + lenb), [1] * lenb)))
602
+ nx.set_node_attributes(G, b, "bipartite")
603
+ return G
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/boundary.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Routines to find the boundary of a set of nodes.
2
+
3
+ An edge boundary is a set of edges, each of which has exactly one
4
+ endpoint in a given set of nodes (or, in the case of directed graphs,
5
+ the set of edges whose source node is in the set).
6
+
7
+ A node boundary of a set *S* of nodes is the set of (out-)neighbors of
8
+ nodes in *S* that are outside *S*.
9
+
10
+ """
11
+
12
+ from itertools import chain
13
+
14
+ import networkx as nx
15
+
16
+ __all__ = ["edge_boundary", "node_boundary"]
17
+
18
+
19
+ @nx._dispatchable(edge_attrs={"data": "default"}, preserve_edge_attrs="data")
20
+ def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None):
21
+ """Returns the edge boundary of `nbunch1`.
22
+
23
+ The *edge boundary* of a set *S* with respect to a set *T* is the
24
+ set of edges (*u*, *v*) such that *u* is in *S* and *v* is in *T*.
25
+ If *T* is not specified, it is assumed to be the set of all nodes
26
+ not in *S*.
27
+
28
+ Parameters
29
+ ----------
30
+ G : NetworkX graph
31
+
32
+ nbunch1 : iterable
33
+ Iterable of nodes in the graph representing the set of nodes
34
+ whose edge boundary will be returned. (This is the set *S* from
35
+ the definition above.)
36
+
37
+ nbunch2 : iterable
38
+ Iterable of nodes representing the target (or "exterior") set of
39
+ nodes. (This is the set *T* from the definition above.) If not
40
+ specified, this is assumed to be the set of all nodes in `G`
41
+ not in `nbunch1`.
42
+
43
+ keys : bool
44
+ This parameter has the same meaning as in
45
+ :meth:`MultiGraph.edges`.
46
+
47
+ data : bool or object
48
+ This parameter has the same meaning as in
49
+ :meth:`MultiGraph.edges`.
50
+
51
+ default : object
52
+ This parameter has the same meaning as in
53
+ :meth:`MultiGraph.edges`.
54
+
55
+ Returns
56
+ -------
57
+ iterator
58
+ An iterator over the edges in the boundary of `nbunch1` with
59
+ respect to `nbunch2`. If `keys`, `data`, or `default`
60
+ are specified and `G` is a multigraph, then edges are returned
61
+ with keys and/or data, as in :meth:`MultiGraph.edges`.
62
+
63
+ Examples
64
+ --------
65
+ >>> G = nx.wheel_graph(6)
66
+
67
+ When nbunch2=None:
68
+
69
+ >>> list(nx.edge_boundary(G, (1, 3)))
70
+ [(1, 0), (1, 2), (1, 5), (3, 0), (3, 2), (3, 4)]
71
+
72
+ When nbunch2 is given:
73
+
74
+ >>> list(nx.edge_boundary(G, (1, 3), (2, 0)))
75
+ [(1, 0), (1, 2), (3, 0), (3, 2)]
76
+
77
+ Notes
78
+ -----
79
+ Any element of `nbunch` that is not in the graph `G` will be
80
+ ignored.
81
+
82
+ `nbunch1` and `nbunch2` are usually meant to be disjoint, but in
83
+ the interest of speed and generality, that is not required here.
84
+
85
+ """
86
+ nset1 = {n for n in nbunch1 if n in G}
87
+ # Here we create an iterator over edges incident to nodes in the set
88
+ # `nset1`. The `Graph.edges()` method does not provide a guarantee
89
+ # on the orientation of the edges, so our algorithm below must
90
+ # handle the case in which exactly one orientation, either (u, v) or
91
+ # (v, u), appears in this iterable.
92
+ if G.is_multigraph():
93
+ edges = G.edges(nset1, data=data, keys=keys, default=default)
94
+ else:
95
+ edges = G.edges(nset1, data=data, default=default)
96
+ # If `nbunch2` is not provided, then it is assumed to be the set
97
+ # complement of `nbunch1`. For the sake of efficiency, this is
98
+ # implemented by using the `not in` operator, instead of by creating
99
+ # an additional set and using the `in` operator.
100
+ if nbunch2 is None:
101
+ return (e for e in edges if (e[0] in nset1) ^ (e[1] in nset1))
102
+ nset2 = set(nbunch2)
103
+ return (
104
+ e
105
+ for e in edges
106
+ if (e[0] in nset1 and e[1] in nset2) or (e[1] in nset1 and e[0] in nset2)
107
+ )
108
+
109
+
110
+ @nx._dispatchable
111
+ def node_boundary(G, nbunch1, nbunch2=None):
112
+ """Returns the node boundary of `nbunch1`.
113
+
114
+ The *node boundary* of a set *S* with respect to a set *T* is the
115
+ set of nodes *v* in *T* such that for some *u* in *S*, there is an
116
+ edge joining *u* to *v*. If *T* is not specified, it is assumed to
117
+ be the set of all nodes not in *S*.
118
+
119
+ Parameters
120
+ ----------
121
+ G : NetworkX graph
122
+
123
+ nbunch1 : iterable
124
+ Iterable of nodes in the graph representing the set of nodes
125
+ whose node boundary will be returned. (This is the set *S* from
126
+ the definition above.)
127
+
128
+ nbunch2 : iterable
129
+ Iterable of nodes representing the target (or "exterior") set of
130
+ nodes. (This is the set *T* from the definition above.) If not
131
+ specified, this is assumed to be the set of all nodes in `G`
132
+ not in `nbunch1`.
133
+
134
+ Returns
135
+ -------
136
+ set
137
+ The node boundary of `nbunch1` with respect to `nbunch2`.
138
+
139
+ Examples
140
+ --------
141
+ >>> G = nx.wheel_graph(6)
142
+
143
+ When nbunch2=None:
144
+
145
+ >>> list(nx.node_boundary(G, (3, 4)))
146
+ [0, 2, 5]
147
+
148
+ When nbunch2 is given:
149
+
150
+ >>> list(nx.node_boundary(G, (3, 4), (0, 1, 5)))
151
+ [0, 5]
152
+
153
+ Notes
154
+ -----
155
+ Any element of `nbunch` that is not in the graph `G` will be
156
+ ignored.
157
+
158
+ `nbunch1` and `nbunch2` are usually meant to be disjoint, but in
159
+ the interest of speed and generality, that is not required here.
160
+
161
+ """
162
+ nset1 = {n for n in nbunch1 if n in G}
163
+ bdy = set(chain.from_iterable(G[v] for v in nset1)) - nset1
164
+ # If `nbunch2` is not specified, it is assumed to be the set
165
+ # complement of `nbunch1`.
166
+ if nbunch2 is not None:
167
+ bdy &= set(nbunch2)
168
+ return bdy
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/bridges.py ADDED
@@ -0,0 +1,205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Bridge-finding algorithms."""
2
+
3
+ from itertools import chain
4
+
5
+ import networkx as nx
6
+ from networkx.utils import not_implemented_for
7
+
8
+ __all__ = ["bridges", "has_bridges", "local_bridges"]
9
+
10
+
11
+ @not_implemented_for("directed")
12
+ @nx._dispatchable
13
+ def bridges(G, root=None):
14
+ """Generate all bridges in a graph.
15
+
16
+ A *bridge* in a graph is an edge whose removal causes the number of
17
+ connected components of the graph to increase. Equivalently, a bridge is an
18
+ edge that does not belong to any cycle. Bridges are also known as cut-edges,
19
+ isthmuses, or cut arcs.
20
+
21
+ Parameters
22
+ ----------
23
+ G : undirected graph
24
+
25
+ root : node (optional)
26
+ A node in the graph `G`. If specified, only the bridges in the
27
+ connected component containing this node will be returned.
28
+
29
+ Yields
30
+ ------
31
+ e : edge
32
+ An edge in the graph whose removal disconnects the graph (or
33
+ causes the number of connected components to increase).
34
+
35
+ Raises
36
+ ------
37
+ NodeNotFound
38
+ If `root` is not in the graph `G`.
39
+
40
+ NetworkXNotImplemented
41
+ If `G` is a directed graph.
42
+
43
+ Examples
44
+ --------
45
+ The barbell graph with parameter zero has a single bridge:
46
+
47
+ >>> G = nx.barbell_graph(10, 0)
48
+ >>> list(nx.bridges(G))
49
+ [(9, 10)]
50
+
51
+ Notes
52
+ -----
53
+ This is an implementation of the algorithm described in [1]_. An edge is a
54
+ bridge if and only if it is not contained in any chain. Chains are found
55
+ using the :func:`networkx.chain_decomposition` function.
56
+
57
+ The algorithm described in [1]_ requires a simple graph. If the provided
58
+ graph is a multigraph, we convert it to a simple graph and verify that any
59
+ bridges discovered by the chain decomposition algorithm are not multi-edges.
60
+
61
+ Ignoring polylogarithmic factors, the worst-case time complexity is the
62
+ same as the :func:`networkx.chain_decomposition` function,
63
+ $O(m + n)$, where $n$ is the number of nodes in the graph and $m$ is
64
+ the number of edges.
65
+
66
+ References
67
+ ----------
68
+ .. [1] https://en.wikipedia.org/wiki/Bridge_%28graph_theory%29#Bridge-Finding_with_Chain_Decompositions
69
+ """
70
+ multigraph = G.is_multigraph()
71
+ H = nx.Graph(G) if multigraph else G
72
+ chains = nx.chain_decomposition(H, root=root)
73
+ chain_edges = set(chain.from_iterable(chains))
74
+ if root is not None:
75
+ H = H.subgraph(nx.node_connected_component(H, root)).copy()
76
+ for u, v in H.edges():
77
+ if (u, v) not in chain_edges and (v, u) not in chain_edges:
78
+ if multigraph and len(G[u][v]) > 1:
79
+ continue
80
+ yield u, v
81
+
82
+
83
+ @not_implemented_for("directed")
84
+ @nx._dispatchable
85
+ def has_bridges(G, root=None):
86
+ """Decide whether a graph has any bridges.
87
+
88
+ A *bridge* in a graph is an edge whose removal causes the number of
89
+ connected components of the graph to increase.
90
+
91
+ Parameters
92
+ ----------
93
+ G : undirected graph
94
+
95
+ root : node (optional)
96
+ A node in the graph `G`. If specified, only the bridges in the
97
+ connected component containing this node will be considered.
98
+
99
+ Returns
100
+ -------
101
+ bool
102
+ Whether the graph (or the connected component containing `root`)
103
+ has any bridges.
104
+
105
+ Raises
106
+ ------
107
+ NodeNotFound
108
+ If `root` is not in the graph `G`.
109
+
110
+ NetworkXNotImplemented
111
+ If `G` is a directed graph.
112
+
113
+ Examples
114
+ --------
115
+ The barbell graph with parameter zero has a single bridge::
116
+
117
+ >>> G = nx.barbell_graph(10, 0)
118
+ >>> nx.has_bridges(G)
119
+ True
120
+
121
+ On the other hand, the cycle graph has no bridges::
122
+
123
+ >>> G = nx.cycle_graph(5)
124
+ >>> nx.has_bridges(G)
125
+ False
126
+
127
+ Notes
128
+ -----
129
+ This implementation uses the :func:`networkx.bridges` function, so
130
+ it shares its worst-case time complexity, $O(m + n)$, ignoring
131
+ polylogarithmic factors, where $n$ is the number of nodes in the
132
+ graph and $m$ is the number of edges.
133
+
134
+ """
135
+ try:
136
+ next(bridges(G, root=root))
137
+ except StopIteration:
138
+ return False
139
+ else:
140
+ return True
141
+
142
+
143
+ @not_implemented_for("multigraph")
144
+ @not_implemented_for("directed")
145
+ @nx._dispatchable(edge_attrs="weight")
146
+ def local_bridges(G, with_span=True, weight=None):
147
+ """Iterate over local bridges of `G` optionally computing the span
148
+
149
+ A *local bridge* is an edge whose endpoints have no common neighbors.
150
+ That is, the edge is not part of a triangle in the graph.
151
+
152
+ The *span* of a *local bridge* is the shortest path length between
153
+ the endpoints if the local bridge is removed.
154
+
155
+ Parameters
156
+ ----------
157
+ G : undirected graph
158
+
159
+ with_span : bool
160
+ If True, yield a 3-tuple `(u, v, span)`
161
+
162
+ weight : function, string or None (default: None)
163
+ If function, used to compute edge weights for the span.
164
+ If string, the edge data attribute used in calculating span.
165
+ If None, all edges have weight 1.
166
+
167
+ Yields
168
+ ------
169
+ e : edge
170
+ The local bridges as an edge 2-tuple of nodes `(u, v)` or
171
+ as a 3-tuple `(u, v, span)` when `with_span is True`.
172
+
173
+ Raises
174
+ ------
175
+ NetworkXNotImplemented
176
+ If `G` is a directed graph or multigraph.
177
+
178
+ Examples
179
+ --------
180
+ A cycle graph has every edge a local bridge with span N-1.
181
+
182
+ >>> G = nx.cycle_graph(9)
183
+ >>> (0, 8, 8) in set(nx.local_bridges(G))
184
+ True
185
+ """
186
+ if with_span is not True:
187
+ for u, v in G.edges:
188
+ if not (set(G[u]) & set(G[v])):
189
+ yield u, v
190
+ else:
191
+ wt = nx.weighted._weight_function(G, weight)
192
+ for u, v in G.edges:
193
+ if not (set(G[u]) & set(G[v])):
194
+ enodes = {u, v}
195
+
196
+ def hide_edge(n, nbr, d):
197
+ if n not in enodes or nbr not in enodes:
198
+ return wt(n, nbr, d)
199
+ return None
200
+
201
+ try:
202
+ span = nx.shortest_path_length(G, u, v, weight=hide_edge)
203
+ yield u, v, span
204
+ except nx.NetworkXNoPath:
205
+ yield u, v, float("inf")
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/broadcasting.py ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Routines to calculate the broadcast time of certain graphs.
2
+
3
+ Broadcasting is an information dissemination problem in which a node in a graph,
4
+ called the originator, must distribute a message to all other nodes by placing
5
+ a series of calls along the edges of the graph. Once informed, other nodes aid
6
+ the originator in distributing the message.
7
+
8
+ The broadcasting must be completed as quickly as possible subject to the
9
+ following constraints:
10
+ - Each call requires one unit of time.
11
+ - A node can only participate in one call per unit of time.
12
+ - Each call only involves two adjacent nodes: a sender and a receiver.
13
+ """
14
+
15
+ import networkx as nx
16
+ from networkx import NetworkXError
17
+ from networkx.utils import not_implemented_for
18
+
19
+ __all__ = [
20
+ "tree_broadcast_center",
21
+ "tree_broadcast_time",
22
+ ]
23
+
24
+
25
+ def _get_max_broadcast_value(G, U, v, values):
26
+ adj = sorted(set(G.neighbors(v)) & U, key=values.get, reverse=True)
27
+ return max(values[u] + i for i, u in enumerate(adj, start=1))
28
+
29
+
30
+ def _get_broadcast_centers(G, v, values, target):
31
+ adj = sorted(G.neighbors(v), key=values.get, reverse=True)
32
+ j = next(i for i, u in enumerate(adj, start=1) if values[u] + i == target)
33
+ return set([v] + adj[:j])
34
+
35
+
36
+ @not_implemented_for("directed")
37
+ @not_implemented_for("multigraph")
38
+ @nx._dispatchable
39
+ def tree_broadcast_center(G):
40
+ """Return the Broadcast Center of the tree `G`.
41
+
42
+ The broadcast center of a graph G denotes the set of nodes having
43
+ minimum broadcast time [1]_. This is a linear algorithm for determining
44
+ the broadcast center of a tree with ``N`` nodes, as a by-product it also
45
+ determines the broadcast time from the broadcast center.
46
+
47
+ Parameters
48
+ ----------
49
+ G : undirected graph
50
+ The graph should be an undirected tree
51
+
52
+ Returns
53
+ -------
54
+ BC : (int, set) tuple
55
+ minimum broadcast number of the tree, set of broadcast centers
56
+
57
+ Raises
58
+ ------
59
+ NetworkXNotImplemented
60
+ If the graph is directed or is a multigraph.
61
+
62
+ References
63
+ ----------
64
+ .. [1] Slater, P.J., Cockayne, E.J., Hedetniemi, S.T,
65
+ Information dissemination in trees. SIAM J.Comput. 10(4), 692–701 (1981)
66
+ """
67
+ # Assert that the graph G is a tree
68
+ if not nx.is_tree(G):
69
+ NetworkXError("Input graph is not a tree")
70
+ # step 0
71
+ if G.number_of_nodes() == 2:
72
+ return 1, set(G.nodes())
73
+ if G.number_of_nodes() == 1:
74
+ return 0, set(G.nodes())
75
+
76
+ # step 1
77
+ U = {node for node, deg in G.degree if deg == 1}
78
+ values = dict.fromkeys(U, 0)
79
+ T = G.copy()
80
+ T.remove_nodes_from(U)
81
+
82
+ # step 2
83
+ W = {node for node, deg in T.degree if deg == 1}
84
+ values.update((w, G.degree[w] - 1) for w in W)
85
+
86
+ # step 3
87
+ while T.number_of_nodes() >= 2:
88
+ # step 4
89
+ w = min(W, key=lambda n: values[n])
90
+ v = next(T.neighbors(w))
91
+
92
+ # step 5
93
+ U.add(w)
94
+ W.remove(w)
95
+ T.remove_node(w)
96
+
97
+ # step 6
98
+ if T.degree(v) == 1:
99
+ # update t(v)
100
+ values.update({v: _get_max_broadcast_value(G, U, v, values)})
101
+ W.add(v)
102
+
103
+ # step 7
104
+ v = nx.utils.arbitrary_element(T)
105
+ b_T = _get_max_broadcast_value(G, U, v, values)
106
+ return b_T, _get_broadcast_centers(G, v, values, b_T)
107
+
108
+
109
+ @not_implemented_for("directed")
110
+ @not_implemented_for("multigraph")
111
+ @nx._dispatchable
112
+ def tree_broadcast_time(G, node=None):
113
+ """Return the Broadcast Time of the tree `G`.
114
+
115
+ The minimum broadcast time of a node is defined as the minimum amount
116
+ of time required to complete broadcasting starting from the
117
+ originator. The broadcast time of a graph is the maximum over
118
+ all nodes of the minimum broadcast time from that node [1]_.
119
+ This function returns the minimum broadcast time of `node`.
120
+ If `node` is None the broadcast time for the graph is returned.
121
+
122
+ Parameters
123
+ ----------
124
+ G : undirected graph
125
+ The graph should be an undirected tree
126
+ node: int, optional
127
+ index of starting node. If `None`, the algorithm returns the broadcast
128
+ time of the tree.
129
+
130
+ Returns
131
+ -------
132
+ BT : int
133
+ Broadcast Time of a node in a tree
134
+
135
+ Raises
136
+ ------
137
+ NetworkXNotImplemented
138
+ If the graph is directed or is a multigraph.
139
+
140
+ References
141
+ ----------
142
+ .. [1] Harutyunyan, H. A. and Li, Z.
143
+ "A Simple Construction of Broadcast Graphs."
144
+ In Computing and Combinatorics. COCOON 2019
145
+ (Ed. D. Z. Du and C. Tian.) Springer, pp. 240-253, 2019.
146
+ """
147
+ b_T, b_C = tree_broadcast_center(G)
148
+ if node is not None:
149
+ return b_T + min(nx.shortest_path_length(G, node, u) for u in b_C)
150
+ dist_from_center = dict.fromkeys(G, len(G))
151
+ for u in b_C:
152
+ for v, dist in nx.shortest_path_length(G, u).items():
153
+ if dist < dist_from_center[v]:
154
+ dist_from_center[v] = dist
155
+ return b_T + max(dist_from_center.values())
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/chains.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for finding chains in a graph."""
2
+
3
+ import networkx as nx
4
+ from networkx.utils import not_implemented_for
5
+
6
+ __all__ = ["chain_decomposition"]
7
+
8
+
9
+ @not_implemented_for("directed")
10
+ @not_implemented_for("multigraph")
11
+ @nx._dispatchable
12
+ def chain_decomposition(G, root=None):
13
+ """Returns the chain decomposition of a graph.
14
+
15
+ The *chain decomposition* of a graph with respect a depth-first
16
+ search tree is a set of cycles or paths derived from the set of
17
+ fundamental cycles of the tree in the following manner. Consider
18
+ each fundamental cycle with respect to the given tree, represented
19
+ as a list of edges beginning with the nontree edge oriented away
20
+ from the root of the tree. For each fundamental cycle, if it
21
+ overlaps with any previous fundamental cycle, just take the initial
22
+ non-overlapping segment, which is a path instead of a cycle. Each
23
+ cycle or path is called a *chain*. For more information, see [1]_.
24
+
25
+ Parameters
26
+ ----------
27
+ G : undirected graph
28
+
29
+ root : node (optional)
30
+ A node in the graph `G`. If specified, only the chain
31
+ decomposition for the connected component containing this node
32
+ will be returned. This node indicates the root of the depth-first
33
+ search tree.
34
+
35
+ Yields
36
+ ------
37
+ chain : list
38
+ A list of edges representing a chain. There is no guarantee on
39
+ the orientation of the edges in each chain (for example, if a
40
+ chain includes the edge joining nodes 1 and 2, the chain may
41
+ include either (1, 2) or (2, 1)).
42
+
43
+ Raises
44
+ ------
45
+ NodeNotFound
46
+ If `root` is not in the graph `G`.
47
+
48
+ Examples
49
+ --------
50
+ >>> G = nx.Graph([(0, 1), (1, 4), (3, 4), (3, 5), (4, 5)])
51
+ >>> list(nx.chain_decomposition(G))
52
+ [[(4, 5), (5, 3), (3, 4)]]
53
+
54
+ Notes
55
+ -----
56
+ The worst-case running time of this implementation is linear in the
57
+ number of nodes and number of edges [1]_.
58
+
59
+ References
60
+ ----------
61
+ .. [1] Jens M. Schmidt (2013). "A simple test on 2-vertex-
62
+ and 2-edge-connectivity." *Information Processing Letters*,
63
+ 113, 241–244. Elsevier. <https://doi.org/10.1016/j.ipl.2013.01.016>
64
+
65
+ """
66
+
67
+ def _dfs_cycle_forest(G, root=None):
68
+ """Builds a directed graph composed of cycles from the given graph.
69
+
70
+ `G` is an undirected simple graph. `root` is a node in the graph
71
+ from which the depth-first search is started.
72
+
73
+ This function returns both the depth-first search cycle graph
74
+ (as a :class:`~networkx.DiGraph`) and the list of nodes in
75
+ depth-first preorder. The depth-first search cycle graph is a
76
+ directed graph whose edges are the edges of `G` oriented toward
77
+ the root if the edge is a tree edge and away from the root if
78
+ the edge is a non-tree edge. If `root` is not specified, this
79
+ performs a depth-first search on each connected component of `G`
80
+ and returns a directed forest instead.
81
+
82
+ If `root` is not in the graph, this raises :exc:`KeyError`.
83
+
84
+ """
85
+ # Create a directed graph from the depth-first search tree with
86
+ # root node `root` in which tree edges are directed toward the
87
+ # root and nontree edges are directed away from the root. For
88
+ # each node with an incident nontree edge, this creates a
89
+ # directed cycle starting with the nontree edge and returning to
90
+ # that node.
91
+ #
92
+ # The `parent` node attribute stores the parent of each node in
93
+ # the DFS tree. The `nontree` edge attribute indicates whether
94
+ # the edge is a tree edge or a nontree edge.
95
+ #
96
+ # We also store the order of the nodes found in the depth-first
97
+ # search in the `nodes` list.
98
+ H = nx.DiGraph()
99
+ nodes = []
100
+ for u, v, d in nx.dfs_labeled_edges(G, source=root):
101
+ if d == "forward":
102
+ # `dfs_labeled_edges()` yields (root, root, 'forward')
103
+ # if it is beginning the search on a new connected
104
+ # component.
105
+ if u == v:
106
+ H.add_node(v, parent=None)
107
+ nodes.append(v)
108
+ else:
109
+ H.add_node(v, parent=u)
110
+ H.add_edge(v, u, nontree=False)
111
+ nodes.append(v)
112
+ # `dfs_labeled_edges` considers nontree edges in both
113
+ # orientations, so we need to not add the edge if it its
114
+ # other orientation has been added.
115
+ elif d == "nontree" and v not in H[u]:
116
+ H.add_edge(v, u, nontree=True)
117
+ else:
118
+ # Do nothing on 'reverse' edges; we only care about
119
+ # forward and nontree edges.
120
+ pass
121
+ return H, nodes
122
+
123
+ def _build_chain(G, u, v, visited):
124
+ """Generate the chain starting from the given nontree edge.
125
+
126
+ `G` is a DFS cycle graph as constructed by
127
+ :func:`_dfs_cycle_graph`. The edge (`u`, `v`) is a nontree edge
128
+ that begins a chain. `visited` is a set representing the nodes
129
+ in `G` that have already been visited.
130
+
131
+ This function yields the edges in an initial segment of the
132
+ fundamental cycle of `G` starting with the nontree edge (`u`,
133
+ `v`) that includes all the edges up until the first node that
134
+ appears in `visited`. The tree edges are given by the 'parent'
135
+ node attribute. The `visited` set is updated to add each node in
136
+ an edge yielded by this function.
137
+
138
+ """
139
+ while v not in visited:
140
+ yield u, v
141
+ visited.add(v)
142
+ u, v = v, G.nodes[v]["parent"]
143
+ yield u, v
144
+
145
+ # Check if the root is in the graph G. If not, raise NodeNotFound
146
+ if root is not None and root not in G:
147
+ raise nx.NodeNotFound(f"Root node {root} is not in graph")
148
+
149
+ # Create a directed version of H that has the DFS edges directed
150
+ # toward the root and the nontree edges directed away from the root
151
+ # (in each connected component).
152
+ H, nodes = _dfs_cycle_forest(G, root)
153
+
154
+ # Visit the nodes again in DFS order. For each node, and for each
155
+ # nontree edge leaving that node, compute the fundamental cycle for
156
+ # that nontree edge starting with that edge. If the fundamental
157
+ # cycle overlaps with any visited nodes, just take the prefix of the
158
+ # cycle up to the point of visited nodes.
159
+ #
160
+ # We repeat this process for each connected component (implicitly,
161
+ # since `nodes` already has a list of the nodes grouped by connected
162
+ # component).
163
+ visited = set()
164
+ for u in nodes:
165
+ visited.add(u)
166
+ # For each nontree edge going out of node u...
167
+ edges = ((u, v) for u, v, d in H.out_edges(u, data="nontree") if d)
168
+ for u, v in edges:
169
+ # Create the cycle or cycle prefix starting with the
170
+ # nontree edge.
171
+ chain = list(_build_chain(H, u, v, visited))
172
+ yield chain
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/chordal.py ADDED
@@ -0,0 +1,443 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Algorithms for chordal graphs.
3
+
4
+ A graph is chordal if every cycle of length at least 4 has a chord
5
+ (an edge joining two nodes not adjacent in the cycle).
6
+ https://en.wikipedia.org/wiki/Chordal_graph
7
+ """
8
+
9
+ import sys
10
+
11
+ import networkx as nx
12
+ from networkx.algorithms.components import connected_components
13
+ from networkx.utils import arbitrary_element, not_implemented_for
14
+
15
+ __all__ = [
16
+ "is_chordal",
17
+ "find_induced_nodes",
18
+ "chordal_graph_cliques",
19
+ "chordal_graph_treewidth",
20
+ "NetworkXTreewidthBoundExceeded",
21
+ "complete_to_chordal_graph",
22
+ ]
23
+
24
+
25
+ class NetworkXTreewidthBoundExceeded(nx.NetworkXException):
26
+ """Exception raised when a treewidth bound has been provided and it has
27
+ been exceeded"""
28
+
29
+
30
+ @not_implemented_for("directed")
31
+ @not_implemented_for("multigraph")
32
+ @nx._dispatchable
33
+ def is_chordal(G):
34
+ """Checks whether G is a chordal graph.
35
+
36
+ A graph is chordal if every cycle of length at least 4 has a chord
37
+ (an edge joining two nodes not adjacent in the cycle).
38
+
39
+ Parameters
40
+ ----------
41
+ G : graph
42
+ A NetworkX graph.
43
+
44
+ Returns
45
+ -------
46
+ chordal : bool
47
+ True if G is a chordal graph and False otherwise.
48
+
49
+ Raises
50
+ ------
51
+ NetworkXNotImplemented
52
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
53
+
54
+ Examples
55
+ --------
56
+ >>> e = [
57
+ ... (1, 2),
58
+ ... (1, 3),
59
+ ... (2, 3),
60
+ ... (2, 4),
61
+ ... (3, 4),
62
+ ... (3, 5),
63
+ ... (3, 6),
64
+ ... (4, 5),
65
+ ... (4, 6),
66
+ ... (5, 6),
67
+ ... ]
68
+ >>> G = nx.Graph(e)
69
+ >>> nx.is_chordal(G)
70
+ True
71
+
72
+ Notes
73
+ -----
74
+ The routine tries to go through every node following maximum cardinality
75
+ search. It returns False when it finds that the separator for any node
76
+ is not a clique. Based on the algorithms in [1]_.
77
+
78
+ Self loops are ignored.
79
+
80
+ References
81
+ ----------
82
+ .. [1] R. E. Tarjan and M. Yannakakis, Simple linear-time algorithms
83
+ to test chordality of graphs, test acyclicity of hypergraphs, and
84
+ selectively reduce acyclic hypergraphs, SIAM J. Comput., 13 (1984),
85
+ pp. 566–579.
86
+ """
87
+ if len(G.nodes) <= 3:
88
+ return True
89
+ return len(_find_chordality_breaker(G)) == 0
90
+
91
+
92
+ @nx._dispatchable
93
+ def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize):
94
+ """Returns the set of induced nodes in the path from s to t.
95
+
96
+ Parameters
97
+ ----------
98
+ G : graph
99
+ A chordal NetworkX graph
100
+ s : node
101
+ Source node to look for induced nodes
102
+ t : node
103
+ Destination node to look for induced nodes
104
+ treewidth_bound: float
105
+ Maximum treewidth acceptable for the graph H. The search
106
+ for induced nodes will end as soon as the treewidth_bound is exceeded.
107
+
108
+ Returns
109
+ -------
110
+ induced_nodes : Set of nodes
111
+ The set of induced nodes in the path from s to t in G
112
+
113
+ Raises
114
+ ------
115
+ NetworkXError
116
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
117
+ If the input graph is an instance of one of these classes, a
118
+ :exc:`NetworkXError` is raised.
119
+ The algorithm can only be applied to chordal graphs. If the input
120
+ graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
121
+
122
+ Examples
123
+ --------
124
+ >>> G = nx.Graph()
125
+ >>> G = nx.generators.classic.path_graph(10)
126
+ >>> induced_nodes = nx.find_induced_nodes(G, 1, 9, 2)
127
+ >>> sorted(induced_nodes)
128
+ [1, 2, 3, 4, 5, 6, 7, 8, 9]
129
+
130
+ Notes
131
+ -----
132
+ G must be a chordal graph and (s,t) an edge that is not in G.
133
+
134
+ If a treewidth_bound is provided, the search for induced nodes will end
135
+ as soon as the treewidth_bound is exceeded.
136
+
137
+ The algorithm is inspired by Algorithm 4 in [1]_.
138
+ A formal definition of induced node can also be found on that reference.
139
+
140
+ Self Loops are ignored
141
+
142
+ References
143
+ ----------
144
+ .. [1] Learning Bounded Treewidth Bayesian Networks.
145
+ Gal Elidan, Stephen Gould; JMLR, 9(Dec):2699--2731, 2008.
146
+ http://jmlr.csail.mit.edu/papers/volume9/elidan08a/elidan08a.pdf
147
+ """
148
+ if not is_chordal(G):
149
+ raise nx.NetworkXError("Input graph is not chordal.")
150
+
151
+ H = nx.Graph(G)
152
+ H.add_edge(s, t)
153
+ induced_nodes = set()
154
+ triplet = _find_chordality_breaker(H, s, treewidth_bound)
155
+ while triplet:
156
+ (u, v, w) = triplet
157
+ induced_nodes.update(triplet)
158
+ for n in triplet:
159
+ if n != s:
160
+ H.add_edge(s, n)
161
+ triplet = _find_chordality_breaker(H, s, treewidth_bound)
162
+ if induced_nodes:
163
+ # Add t and the second node in the induced path from s to t.
164
+ induced_nodes.add(t)
165
+ for u in G[s]:
166
+ if len(induced_nodes & set(G[u])) == 2:
167
+ induced_nodes.add(u)
168
+ break
169
+ return induced_nodes
170
+
171
+
172
+ @nx._dispatchable
173
+ def chordal_graph_cliques(G):
174
+ """Returns all maximal cliques of a chordal graph.
175
+
176
+ The algorithm breaks the graph in connected components and performs a
177
+ maximum cardinality search in each component to get the cliques.
178
+
179
+ Parameters
180
+ ----------
181
+ G : graph
182
+ A NetworkX graph
183
+
184
+ Yields
185
+ ------
186
+ frozenset of nodes
187
+ Maximal cliques, each of which is a frozenset of
188
+ nodes in `G`. The order of cliques is arbitrary.
189
+
190
+ Raises
191
+ ------
192
+ NetworkXError
193
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
194
+ The algorithm can only be applied to chordal graphs. If the input
195
+ graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
196
+
197
+ Examples
198
+ --------
199
+ >>> e = [
200
+ ... (1, 2),
201
+ ... (1, 3),
202
+ ... (2, 3),
203
+ ... (2, 4),
204
+ ... (3, 4),
205
+ ... (3, 5),
206
+ ... (3, 6),
207
+ ... (4, 5),
208
+ ... (4, 6),
209
+ ... (5, 6),
210
+ ... (7, 8),
211
+ ... ]
212
+ >>> G = nx.Graph(e)
213
+ >>> G.add_node(9)
214
+ >>> cliques = [c for c in chordal_graph_cliques(G)]
215
+ >>> cliques[0]
216
+ frozenset({1, 2, 3})
217
+ """
218
+ for C in (G.subgraph(c).copy() for c in connected_components(G)):
219
+ if C.number_of_nodes() == 1:
220
+ if nx.number_of_selfloops(C) > 0:
221
+ raise nx.NetworkXError("Input graph is not chordal.")
222
+ yield frozenset(C.nodes())
223
+ else:
224
+ unnumbered = set(C.nodes())
225
+ v = arbitrary_element(C)
226
+ unnumbered.remove(v)
227
+ numbered = {v}
228
+ clique_wanna_be = {v}
229
+ while unnumbered:
230
+ v = _max_cardinality_node(C, unnumbered, numbered)
231
+ unnumbered.remove(v)
232
+ numbered.add(v)
233
+ new_clique_wanna_be = set(C.neighbors(v)) & numbered
234
+ sg = C.subgraph(clique_wanna_be)
235
+ if _is_complete_graph(sg):
236
+ new_clique_wanna_be.add(v)
237
+ if not new_clique_wanna_be >= clique_wanna_be:
238
+ yield frozenset(clique_wanna_be)
239
+ clique_wanna_be = new_clique_wanna_be
240
+ else:
241
+ raise nx.NetworkXError("Input graph is not chordal.")
242
+ yield frozenset(clique_wanna_be)
243
+
244
+
245
+ @nx._dispatchable
246
+ def chordal_graph_treewidth(G):
247
+ """Returns the treewidth of the chordal graph G.
248
+
249
+ Parameters
250
+ ----------
251
+ G : graph
252
+ A NetworkX graph
253
+
254
+ Returns
255
+ -------
256
+ treewidth : int
257
+ The size of the largest clique in the graph minus one.
258
+
259
+ Raises
260
+ ------
261
+ NetworkXError
262
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
263
+ The algorithm can only be applied to chordal graphs. If the input
264
+ graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
265
+
266
+ Examples
267
+ --------
268
+ >>> e = [
269
+ ... (1, 2),
270
+ ... (1, 3),
271
+ ... (2, 3),
272
+ ... (2, 4),
273
+ ... (3, 4),
274
+ ... (3, 5),
275
+ ... (3, 6),
276
+ ... (4, 5),
277
+ ... (4, 6),
278
+ ... (5, 6),
279
+ ... (7, 8),
280
+ ... ]
281
+ >>> G = nx.Graph(e)
282
+ >>> G.add_node(9)
283
+ >>> nx.chordal_graph_treewidth(G)
284
+ 3
285
+
286
+ References
287
+ ----------
288
+ .. [1] https://en.wikipedia.org/wiki/Tree_decomposition#Treewidth
289
+ """
290
+ if not is_chordal(G):
291
+ raise nx.NetworkXError("Input graph is not chordal.")
292
+
293
+ max_clique = -1
294
+ for clique in nx.chordal_graph_cliques(G):
295
+ max_clique = max(max_clique, len(clique))
296
+ return max_clique - 1
297
+
298
+
299
+ def _is_complete_graph(G):
300
+ """Returns True if G is a complete graph."""
301
+ if nx.number_of_selfloops(G) > 0:
302
+ raise nx.NetworkXError("Self loop found in _is_complete_graph()")
303
+ n = G.number_of_nodes()
304
+ if n < 2:
305
+ return True
306
+ e = G.number_of_edges()
307
+ max_edges = (n * (n - 1)) / 2
308
+ return e == max_edges
309
+
310
+
311
+ def _find_missing_edge(G):
312
+ """Given a non-complete graph G, returns a missing edge."""
313
+ nodes = set(G)
314
+ for u in G:
315
+ missing = nodes - set(list(G[u].keys()) + [u])
316
+ if missing:
317
+ return (u, missing.pop())
318
+
319
+
320
+ def _max_cardinality_node(G, choices, wanna_connect):
321
+ """Returns a the node in choices that has more connections in G
322
+ to nodes in wanna_connect.
323
+ """
324
+ max_number = -1
325
+ for x in choices:
326
+ number = len([y for y in G[x] if y in wanna_connect])
327
+ if number > max_number:
328
+ max_number = number
329
+ max_cardinality_node = x
330
+ return max_cardinality_node
331
+
332
+
333
+ def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize):
334
+ """Given a graph G, starts a max cardinality search
335
+ (starting from s if s is given and from an arbitrary node otherwise)
336
+ trying to find a non-chordal cycle.
337
+
338
+ If it does find one, it returns (u,v,w) where u,v,w are the three
339
+ nodes that together with s are involved in the cycle.
340
+
341
+ It ignores any self loops.
342
+ """
343
+ if len(G) == 0:
344
+ raise nx.NetworkXPointlessConcept("Graph has no nodes.")
345
+ unnumbered = set(G)
346
+ if s is None:
347
+ s = arbitrary_element(G)
348
+ unnumbered.remove(s)
349
+ numbered = {s}
350
+ current_treewidth = -1
351
+ while unnumbered: # and current_treewidth <= treewidth_bound:
352
+ v = _max_cardinality_node(G, unnumbered, numbered)
353
+ unnumbered.remove(v)
354
+ numbered.add(v)
355
+ clique_wanna_be = set(G[v]) & numbered
356
+ sg = G.subgraph(clique_wanna_be)
357
+ if _is_complete_graph(sg):
358
+ # The graph seems to be chordal by now. We update the treewidth
359
+ current_treewidth = max(current_treewidth, len(clique_wanna_be))
360
+ if current_treewidth > treewidth_bound:
361
+ raise nx.NetworkXTreewidthBoundExceeded(
362
+ f"treewidth_bound exceeded: {current_treewidth}"
363
+ )
364
+ else:
365
+ # sg is not a clique,
366
+ # look for an edge that is not included in sg
367
+ (u, w) = _find_missing_edge(sg)
368
+ return (u, v, w)
369
+ return ()
370
+
371
+
372
+ @not_implemented_for("directed")
373
+ @nx._dispatchable(returns_graph=True)
374
+ def complete_to_chordal_graph(G):
375
+ """Return a copy of G completed to a chordal graph
376
+
377
+ Adds edges to a copy of G to create a chordal graph. A graph G=(V,E) is
378
+ called chordal if for each cycle with length bigger than 3, there exist
379
+ two non-adjacent nodes connected by an edge (called a chord).
380
+
381
+ Parameters
382
+ ----------
383
+ G : NetworkX graph
384
+ Undirected graph
385
+
386
+ Returns
387
+ -------
388
+ H : NetworkX graph
389
+ The chordal enhancement of G
390
+ alpha : Dictionary
391
+ The elimination ordering of nodes of G
392
+
393
+ Notes
394
+ -----
395
+ There are different approaches to calculate the chordal
396
+ enhancement of a graph. The algorithm used here is called
397
+ MCS-M and gives at least minimal (local) triangulation of graph. Note
398
+ that this triangulation is not necessarily a global minimum.
399
+
400
+ https://en.wikipedia.org/wiki/Chordal_graph
401
+
402
+ References
403
+ ----------
404
+ .. [1] Berry, Anne & Blair, Jean & Heggernes, Pinar & Peyton, Barry. (2004)
405
+ Maximum Cardinality Search for Computing Minimal Triangulations of
406
+ Graphs. Algorithmica. 39. 287-298. 10.1007/s00453-004-1084-3.
407
+
408
+ Examples
409
+ --------
410
+ >>> from networkx.algorithms.chordal import complete_to_chordal_graph
411
+ >>> G = nx.wheel_graph(10)
412
+ >>> H, alpha = complete_to_chordal_graph(G)
413
+ """
414
+ H = G.copy()
415
+ alpha = dict.fromkeys(H, 0)
416
+ if nx.is_chordal(H):
417
+ return H, alpha
418
+ chords = set()
419
+ weight = dict.fromkeys(H.nodes(), 0)
420
+ unnumbered_nodes = list(H.nodes())
421
+ for i in range(len(H.nodes()), 0, -1):
422
+ # get the node in unnumbered_nodes with the maximum weight
423
+ z = max(unnumbered_nodes, key=lambda node: weight[node])
424
+ unnumbered_nodes.remove(z)
425
+ alpha[z] = i
426
+ update_nodes = []
427
+ for y in unnumbered_nodes:
428
+ if G.has_edge(y, z):
429
+ update_nodes.append(y)
430
+ else:
431
+ # y_weight will be bigger than node weights between y and z
432
+ y_weight = weight[y]
433
+ lower_nodes = [
434
+ node for node in unnumbered_nodes if weight[node] < y_weight
435
+ ]
436
+ if nx.has_path(H.subgraph(lower_nodes + [z, y]), y, z):
437
+ update_nodes.append(y)
438
+ chords.add((z, y))
439
+ # during calculation of paths the weights should not be updated
440
+ for node in update_nodes:
441
+ weight[node] += 1
442
+ H.add_edges_from(chords)
443
+ return H, alpha
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/clique.py ADDED
@@ -0,0 +1,757 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for finding and manipulating cliques.
2
+
3
+ Finding the largest clique in a graph is NP-complete problem, so most of
4
+ these algorithms have an exponential running time; for more information,
5
+ see the Wikipedia article on the clique problem [1]_.
6
+
7
+ .. [1] clique problem:: https://en.wikipedia.org/wiki/Clique_problem
8
+
9
+ """
10
+
11
+ from collections import Counter, defaultdict, deque
12
+ from itertools import chain, combinations, islice
13
+
14
+ import networkx as nx
15
+ from networkx.utils import not_implemented_for
16
+
17
+ __all__ = [
18
+ "find_cliques",
19
+ "find_cliques_recursive",
20
+ "make_max_clique_graph",
21
+ "make_clique_bipartite",
22
+ "node_clique_number",
23
+ "number_of_cliques",
24
+ "enumerate_all_cliques",
25
+ "max_weight_clique",
26
+ ]
27
+
28
+
29
+ @not_implemented_for("directed")
30
+ @nx._dispatchable
31
+ def enumerate_all_cliques(G):
32
+ """Returns all cliques in an undirected graph.
33
+
34
+ This function returns an iterator over cliques, each of which is a
35
+ list of nodes. The iteration is ordered by cardinality of the
36
+ cliques: first all cliques of size one, then all cliques of size
37
+ two, etc.
38
+
39
+ Parameters
40
+ ----------
41
+ G : NetworkX graph
42
+ An undirected graph.
43
+
44
+ Returns
45
+ -------
46
+ iterator
47
+ An iterator over cliques, each of which is a list of nodes in
48
+ `G`. The cliques are ordered according to size.
49
+
50
+ Notes
51
+ -----
52
+ To obtain a list of all cliques, use
53
+ `list(enumerate_all_cliques(G))`. However, be aware that in the
54
+ worst-case, the length of this list can be exponential in the number
55
+ of nodes in the graph (for example, when the graph is the complete
56
+ graph). This function avoids storing all cliques in memory by only
57
+ keeping current candidate node lists in memory during its search.
58
+
59
+ The implementation is adapted from the algorithm by Zhang, et
60
+ al. (2005) [1]_ to output all cliques discovered.
61
+
62
+ This algorithm ignores self-loops and parallel edges, since cliques
63
+ are not conventionally defined with such edges.
64
+
65
+ References
66
+ ----------
67
+ .. [1] Yun Zhang, Abu-Khzam, F.N., Baldwin, N.E., Chesler, E.J.,
68
+ Langston, M.A., Samatova, N.F.,
69
+ "Genome-Scale Computational Approaches to Memory-Intensive
70
+ Applications in Systems Biology".
71
+ *Supercomputing*, 2005. Proceedings of the ACM/IEEE SC 2005
72
+ Conference, pp. 12, 12--18 Nov. 2005.
73
+ <https://doi.org/10.1109/SC.2005.29>.
74
+
75
+ """
76
+ index = {}
77
+ nbrs = {}
78
+ for u in G:
79
+ index[u] = len(index)
80
+ # Neighbors of u that appear after u in the iteration order of G.
81
+ nbrs[u] = {v for v in G[u] if v not in index}
82
+
83
+ queue = deque(([u], sorted(nbrs[u], key=index.__getitem__)) for u in G)
84
+ # Loop invariants:
85
+ # 1. len(base) is nondecreasing.
86
+ # 2. (base + cnbrs) is sorted with respect to the iteration order of G.
87
+ # 3. cnbrs is a set of common neighbors of nodes in base.
88
+ while queue:
89
+ base, cnbrs = map(list, queue.popleft())
90
+ yield base
91
+ for i, u in enumerate(cnbrs):
92
+ # Use generators to reduce memory consumption.
93
+ queue.append(
94
+ (
95
+ chain(base, [u]),
96
+ filter(nbrs[u].__contains__, islice(cnbrs, i + 1, None)),
97
+ )
98
+ )
99
+
100
+
101
+ @not_implemented_for("directed")
102
+ @nx._dispatchable
103
+ def find_cliques(G, nodes=None):
104
+ """Returns all maximal cliques in an undirected graph.
105
+
106
+ For each node *n*, a *maximal clique for n* is a largest complete
107
+ subgraph containing *n*. The largest maximal clique is sometimes
108
+ called the *maximum clique*.
109
+
110
+ This function returns an iterator over cliques, each of which is a
111
+ list of nodes. It is an iterative implementation, so should not
112
+ suffer from recursion depth issues.
113
+
114
+ This function accepts a list of `nodes` and only the maximal cliques
115
+ containing all of these `nodes` are returned. It can considerably speed up
116
+ the running time if some specific cliques are desired.
117
+
118
+ Parameters
119
+ ----------
120
+ G : NetworkX graph
121
+ An undirected graph.
122
+
123
+ nodes : list, optional (default=None)
124
+ If provided, only yield *maximal cliques* containing all nodes in `nodes`.
125
+ If `nodes` isn't a clique itself, a ValueError is raised.
126
+
127
+ Returns
128
+ -------
129
+ iterator
130
+ An iterator over maximal cliques, each of which is a list of
131
+ nodes in `G`. If `nodes` is provided, only the maximal cliques
132
+ containing all the nodes in `nodes` are returned. The order of
133
+ cliques is arbitrary.
134
+
135
+ Raises
136
+ ------
137
+ ValueError
138
+ If `nodes` is not a clique.
139
+
140
+ Examples
141
+ --------
142
+ >>> from pprint import pprint # For nice dict formatting
143
+ >>> G = nx.karate_club_graph()
144
+ >>> sum(1 for c in nx.find_cliques(G)) # The number of maximal cliques in G
145
+ 36
146
+ >>> max(nx.find_cliques(G), key=len) # The largest maximal clique in G
147
+ [0, 1, 2, 3, 13]
148
+
149
+ The size of the largest maximal clique is known as the *clique number* of
150
+ the graph, which can be found directly with:
151
+
152
+ >>> max(len(c) for c in nx.find_cliques(G))
153
+ 5
154
+
155
+ One can also compute the number of maximal cliques in `G` that contain a given
156
+ node. The following produces a dictionary keyed by node whose
157
+ values are the number of maximal cliques in `G` that contain the node:
158
+
159
+ >>> from collections import Counter
160
+ >>> from itertools import chain
161
+ >>> counts = Counter(chain.from_iterable(nx.find_cliques(G)))
162
+ >>> pprint(dict(counts))
163
+ {0: 13,
164
+ 1: 6,
165
+ 2: 7,
166
+ 3: 3,
167
+ 4: 2,
168
+ 5: 3,
169
+ 6: 3,
170
+ 7: 1,
171
+ 8: 3,
172
+ 9: 2,
173
+ 10: 2,
174
+ 11: 1,
175
+ 12: 1,
176
+ 13: 2,
177
+ 14: 1,
178
+ 15: 1,
179
+ 16: 1,
180
+ 17: 1,
181
+ 18: 1,
182
+ 19: 2,
183
+ 20: 1,
184
+ 21: 1,
185
+ 22: 1,
186
+ 23: 3,
187
+ 24: 2,
188
+ 25: 2,
189
+ 26: 1,
190
+ 27: 3,
191
+ 28: 2,
192
+ 29: 2,
193
+ 30: 2,
194
+ 31: 4,
195
+ 32: 9,
196
+ 33: 14}
197
+
198
+ Or, similarly, the maximal cliques in `G` that contain a given node.
199
+ For example, the 4 maximal cliques that contain node 31:
200
+
201
+ >>> [c for c in nx.find_cliques(G) if 31 in c]
202
+ [[0, 31], [33, 32, 31], [33, 28, 31], [24, 25, 31]]
203
+
204
+ See Also
205
+ --------
206
+ find_cliques_recursive
207
+ A recursive version of the same algorithm.
208
+
209
+ Notes
210
+ -----
211
+ To obtain a list of all maximal cliques, use
212
+ `list(find_cliques(G))`. However, be aware that in the worst-case,
213
+ the length of this list can be exponential in the number of nodes in
214
+ the graph. This function avoids storing all cliques in memory by
215
+ only keeping current candidate node lists in memory during its search.
216
+
217
+ This implementation is based on the algorithm published by Bron and
218
+ Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi
219
+ (2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. It
220
+ essentially unrolls the recursion used in the references to avoid
221
+ issues of recursion stack depth (for a recursive implementation, see
222
+ :func:`find_cliques_recursive`).
223
+
224
+ This algorithm ignores self-loops and parallel edges, since cliques
225
+ are not conventionally defined with such edges.
226
+
227
+ References
228
+ ----------
229
+ .. [1] Bron, C. and Kerbosch, J.
230
+ "Algorithm 457: finding all cliques of an undirected graph".
231
+ *Communications of the ACM* 16, 9 (Sep. 1973), 575--577.
232
+ <http://portal.acm.org/citation.cfm?doid=362342.362367>
233
+
234
+ .. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi,
235
+ "The worst-case time complexity for generating all maximal
236
+ cliques and computational experiments",
237
+ *Theoretical Computer Science*, Volume 363, Issue 1,
238
+ Computing and Combinatorics,
239
+ 10th Annual International Conference on
240
+ Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42
241
+ <https://doi.org/10.1016/j.tcs.2006.06.015>
242
+
243
+ .. [3] F. Cazals, C. Karande,
244
+ "A note on the problem of reporting maximal cliques",
245
+ *Theoretical Computer Science*,
246
+ Volume 407, Issues 1--3, 6 November 2008, Pages 564--568,
247
+ <https://doi.org/10.1016/j.tcs.2008.05.010>
248
+
249
+ """
250
+ if len(G) == 0:
251
+ return
252
+
253
+ adj = {u: {v for v in G[u] if v != u} for u in G}
254
+
255
+ # Initialize Q with the given nodes and subg, cand with their nbrs
256
+ Q = nodes[:] if nodes is not None else []
257
+ cand = set(G)
258
+ for node in Q:
259
+ if node not in cand:
260
+ raise ValueError(f"The given `nodes` {nodes} do not form a clique")
261
+ cand &= adj[node]
262
+
263
+ if not cand:
264
+ yield Q[:]
265
+ return
266
+
267
+ subg = cand.copy()
268
+ stack = []
269
+ Q.append(None)
270
+
271
+ u = max(subg, key=lambda u: len(cand & adj[u]))
272
+ ext_u = cand - adj[u]
273
+
274
+ try:
275
+ while True:
276
+ if ext_u:
277
+ q = ext_u.pop()
278
+ cand.remove(q)
279
+ Q[-1] = q
280
+ adj_q = adj[q]
281
+ subg_q = subg & adj_q
282
+ if not subg_q:
283
+ yield Q[:]
284
+ else:
285
+ cand_q = cand & adj_q
286
+ if cand_q:
287
+ stack.append((subg, cand, ext_u))
288
+ Q.append(None)
289
+ subg = subg_q
290
+ cand = cand_q
291
+ u = max(subg, key=lambda u: len(cand & adj[u]))
292
+ ext_u = cand - adj[u]
293
+ else:
294
+ Q.pop()
295
+ subg, cand, ext_u = stack.pop()
296
+ except IndexError:
297
+ pass
298
+
299
+
300
+ # TODO Should this also be not implemented for directed graphs?
301
+ @nx._dispatchable
302
+ def find_cliques_recursive(G, nodes=None):
303
+ """Returns all maximal cliques in a graph.
304
+
305
+ For each node *v*, a *maximal clique for v* is a largest complete
306
+ subgraph containing *v*. The largest maximal clique is sometimes
307
+ called the *maximum clique*.
308
+
309
+ This function returns an iterator over cliques, each of which is a
310
+ list of nodes. It is a recursive implementation, so may suffer from
311
+ recursion depth issues, but is included for pedagogical reasons.
312
+ For a non-recursive implementation, see :func:`find_cliques`.
313
+
314
+ This function accepts a list of `nodes` and only the maximal cliques
315
+ containing all of these `nodes` are returned. It can considerably speed up
316
+ the running time if some specific cliques are desired.
317
+
318
+ Parameters
319
+ ----------
320
+ G : NetworkX graph
321
+
322
+ nodes : list, optional (default=None)
323
+ If provided, only yield *maximal cliques* containing all nodes in `nodes`.
324
+ If `nodes` isn't a clique itself, a ValueError is raised.
325
+
326
+ Returns
327
+ -------
328
+ iterator
329
+ An iterator over maximal cliques, each of which is a list of
330
+ nodes in `G`. If `nodes` is provided, only the maximal cliques
331
+ containing all the nodes in `nodes` are yielded. The order of
332
+ cliques is arbitrary.
333
+
334
+ Raises
335
+ ------
336
+ ValueError
337
+ If `nodes` is not a clique.
338
+
339
+ See Also
340
+ --------
341
+ find_cliques
342
+ An iterative version of the same algorithm. See docstring for examples.
343
+
344
+ Notes
345
+ -----
346
+ To obtain a list of all maximal cliques, use
347
+ `list(find_cliques_recursive(G))`. However, be aware that in the
348
+ worst-case, the length of this list can be exponential in the number
349
+ of nodes in the graph. This function avoids storing all cliques in memory
350
+ by only keeping current candidate node lists in memory during its search.
351
+
352
+ This implementation is based on the algorithm published by Bron and
353
+ Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi
354
+ (2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. For a
355
+ non-recursive implementation, see :func:`find_cliques`.
356
+
357
+ This algorithm ignores self-loops and parallel edges, since cliques
358
+ are not conventionally defined with such edges.
359
+
360
+ References
361
+ ----------
362
+ .. [1] Bron, C. and Kerbosch, J.
363
+ "Algorithm 457: finding all cliques of an undirected graph".
364
+ *Communications of the ACM* 16, 9 (Sep. 1973), 575--577.
365
+ <http://portal.acm.org/citation.cfm?doid=362342.362367>
366
+
367
+ .. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi,
368
+ "The worst-case time complexity for generating all maximal
369
+ cliques and computational experiments",
370
+ *Theoretical Computer Science*, Volume 363, Issue 1,
371
+ Computing and Combinatorics,
372
+ 10th Annual International Conference on
373
+ Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42
374
+ <https://doi.org/10.1016/j.tcs.2006.06.015>
375
+
376
+ .. [3] F. Cazals, C. Karande,
377
+ "A note on the problem of reporting maximal cliques",
378
+ *Theoretical Computer Science*,
379
+ Volume 407, Issues 1--3, 6 November 2008, Pages 564--568,
380
+ <https://doi.org/10.1016/j.tcs.2008.05.010>
381
+
382
+ """
383
+ if len(G) == 0:
384
+ return iter([])
385
+
386
+ adj = {u: {v for v in G[u] if v != u} for u in G}
387
+
388
+ # Initialize Q with the given nodes and subg, cand with their nbrs
389
+ Q = nodes[:] if nodes is not None else []
390
+ cand_init = set(G)
391
+ for node in Q:
392
+ if node not in cand_init:
393
+ raise ValueError(f"The given `nodes` {nodes} do not form a clique")
394
+ cand_init &= adj[node]
395
+
396
+ if not cand_init:
397
+ return iter([Q])
398
+
399
+ subg_init = cand_init.copy()
400
+
401
+ def expand(subg, cand):
402
+ u = max(subg, key=lambda u: len(cand & adj[u]))
403
+ for q in cand - adj[u]:
404
+ cand.remove(q)
405
+ Q.append(q)
406
+ adj_q = adj[q]
407
+ subg_q = subg & adj_q
408
+ if not subg_q:
409
+ yield Q[:]
410
+ else:
411
+ cand_q = cand & adj_q
412
+ if cand_q:
413
+ yield from expand(subg_q, cand_q)
414
+ Q.pop()
415
+
416
+ return expand(subg_init, cand_init)
417
+
418
+
419
+ @nx._dispatchable(returns_graph=True)
420
+ def make_max_clique_graph(G, create_using=None):
421
+ """Returns the maximal clique graph of the given graph.
422
+
423
+ The nodes of the maximal clique graph of `G` are the cliques of
424
+ `G` and an edge joins two cliques if the cliques are not disjoint.
425
+
426
+ Parameters
427
+ ----------
428
+ G : NetworkX graph
429
+
430
+ create_using : NetworkX graph constructor, optional (default=nx.Graph)
431
+ Graph type to create. If graph instance, then cleared before populated.
432
+
433
+ Returns
434
+ -------
435
+ NetworkX graph
436
+ A graph whose nodes are the cliques of `G` and whose edges
437
+ join two cliques if they are not disjoint.
438
+
439
+ Notes
440
+ -----
441
+ This function behaves like the following code::
442
+
443
+ import networkx as nx
444
+
445
+ G = nx.make_clique_bipartite(G)
446
+ cliques = [v for v in G.nodes() if G.nodes[v]["bipartite"] == 0]
447
+ G = nx.bipartite.projected_graph(G, cliques)
448
+ G = nx.relabel_nodes(G, {-v: v - 1 for v in G})
449
+
450
+ It should be faster, though, since it skips all the intermediate
451
+ steps.
452
+
453
+ """
454
+ if create_using is None:
455
+ B = G.__class__()
456
+ else:
457
+ B = nx.empty_graph(0, create_using)
458
+ cliques = list(enumerate(set(c) for c in find_cliques(G)))
459
+ # Add a numbered node for each clique.
460
+ B.add_nodes_from(i for i, c in cliques)
461
+ # Join cliques by an edge if they share a node.
462
+ clique_pairs = combinations(cliques, 2)
463
+ B.add_edges_from((i, j) for (i, c1), (j, c2) in clique_pairs if c1 & c2)
464
+ return B
465
+
466
+
467
+ @nx._dispatchable(returns_graph=True)
468
+ def make_clique_bipartite(G, fpos=None, create_using=None, name=None):
469
+ """Returns the bipartite clique graph corresponding to `G`.
470
+
471
+ In the returned bipartite graph, the "bottom" nodes are the nodes of
472
+ `G` and the "top" nodes represent the maximal cliques of `G`.
473
+ There is an edge from node *v* to clique *C* in the returned graph
474
+ if and only if *v* is an element of *C*.
475
+
476
+ Parameters
477
+ ----------
478
+ G : NetworkX graph
479
+ An undirected graph.
480
+
481
+ fpos : bool
482
+ If True or not None, the returned graph will have an
483
+ additional attribute, `pos`, a dictionary mapping node to
484
+ position in the Euclidean plane.
485
+
486
+ create_using : NetworkX graph constructor, optional (default=nx.Graph)
487
+ Graph type to create. If graph instance, then cleared before populated.
488
+
489
+ Returns
490
+ -------
491
+ NetworkX graph
492
+ A bipartite graph whose "bottom" set is the nodes of the graph
493
+ `G`, whose "top" set is the cliques of `G`, and whose edges
494
+ join nodes of `G` to the cliques that contain them.
495
+
496
+ The nodes of the graph `G` have the node attribute
497
+ 'bipartite' set to 1 and the nodes representing cliques
498
+ have the node attribute 'bipartite' set to 0, as is the
499
+ convention for bipartite graphs in NetworkX.
500
+
501
+ """
502
+ B = nx.empty_graph(0, create_using)
503
+ B.clear()
504
+ # The "bottom" nodes in the bipartite graph are the nodes of the
505
+ # original graph, G.
506
+ B.add_nodes_from(G, bipartite=1)
507
+ for i, cl in enumerate(find_cliques(G)):
508
+ # The "top" nodes in the bipartite graph are the cliques. These
509
+ # nodes get negative numbers as labels.
510
+ name = -i - 1
511
+ B.add_node(name, bipartite=0)
512
+ B.add_edges_from((v, name) for v in cl)
513
+ return B
514
+
515
+
516
+ @nx._dispatchable
517
+ def node_clique_number(G, nodes=None, cliques=None, separate_nodes=False):
518
+ """Returns the size of the largest maximal clique containing each given node.
519
+
520
+ Returns a single or list depending on input nodes.
521
+ An optional list of cliques can be input if already computed.
522
+
523
+ Parameters
524
+ ----------
525
+ G : NetworkX graph
526
+ An undirected graph.
527
+
528
+ cliques : list, optional (default=None)
529
+ A list of cliques, each of which is itself a list of nodes.
530
+ If not specified, the list of all cliques will be computed
531
+ using :func:`find_cliques`.
532
+
533
+ Returns
534
+ -------
535
+ int or dict
536
+ If `nodes` is a single node, returns the size of the
537
+ largest maximal clique in `G` containing that node.
538
+ Otherwise return a dict keyed by node to the size
539
+ of the largest maximal clique containing that node.
540
+
541
+ See Also
542
+ --------
543
+ find_cliques
544
+ find_cliques yields the maximal cliques of G.
545
+ It accepts a `nodes` argument which restricts consideration to
546
+ maximal cliques containing all the given `nodes`.
547
+ The search for the cliques is optimized for `nodes`.
548
+ """
549
+ if cliques is None:
550
+ if nodes is not None:
551
+ # Use ego_graph to decrease size of graph
552
+ # check for single node
553
+ if nodes in G:
554
+ return max(len(c) for c in find_cliques(nx.ego_graph(G, nodes)))
555
+ # handle multiple nodes
556
+ return {
557
+ n: max(len(c) for c in find_cliques(nx.ego_graph(G, n))) for n in nodes
558
+ }
559
+
560
+ # nodes is None--find all cliques
561
+ cliques = list(find_cliques(G))
562
+
563
+ # single node requested
564
+ if nodes in G:
565
+ return max(len(c) for c in cliques if nodes in c)
566
+
567
+ # multiple nodes requested
568
+ # preprocess all nodes (faster than one at a time for even 2 nodes)
569
+ size_for_n = defaultdict(int)
570
+ for c in cliques:
571
+ size_of_c = len(c)
572
+ for n in c:
573
+ if size_for_n[n] < size_of_c:
574
+ size_for_n[n] = size_of_c
575
+ if nodes is None:
576
+ return size_for_n
577
+ return {n: size_for_n[n] for n in nodes}
578
+
579
+
580
+ def number_of_cliques(G, nodes=None, cliques=None):
581
+ """Returns the number of maximal cliques for each node.
582
+
583
+ Returns a single or list depending on input nodes.
584
+ Optional list of cliques can be input if already computed.
585
+ """
586
+ if cliques is None:
587
+ cliques = find_cliques(G)
588
+
589
+ if nodes is None:
590
+ nodes = list(G.nodes()) # none, get entire graph
591
+
592
+ if not isinstance(nodes, list): # check for a list
593
+ v = nodes
594
+ # assume it is a single value
595
+ numcliq = sum(1 for c in cliques if v in c)
596
+ else:
597
+ numcliq = Counter(chain.from_iterable(cliques))
598
+ numcliq = {v: numcliq[v] for v in nodes} # return a dict
599
+ return numcliq
600
+
601
+
602
+ class MaxWeightClique:
603
+ """A class for the maximum weight clique algorithm.
604
+
605
+ This class is a helper for the `max_weight_clique` function. The class
606
+ should not normally be used directly.
607
+
608
+ Parameters
609
+ ----------
610
+ G : NetworkX graph
611
+ The undirected graph for which a maximum weight clique is sought
612
+ weight : string or None, optional (default='weight')
613
+ The node attribute that holds the integer value used as a weight.
614
+ If None, then each node has weight 1.
615
+
616
+ Attributes
617
+ ----------
618
+ G : NetworkX graph
619
+ The undirected graph for which a maximum weight clique is sought
620
+ node_weights: dict
621
+ The weight of each node
622
+ incumbent_nodes : list
623
+ The nodes of the incumbent clique (the best clique found so far)
624
+ incumbent_weight: int
625
+ The weight of the incumbent clique
626
+ """
627
+
628
+ def __init__(self, G, weight):
629
+ self.G = G
630
+ self.incumbent_nodes = []
631
+ self.incumbent_weight = 0
632
+
633
+ if weight is None:
634
+ self.node_weights = dict.fromkeys(G.nodes(), 1)
635
+ else:
636
+ for v in G.nodes():
637
+ if weight not in G.nodes[v]:
638
+ errmsg = f"Node {v!r} does not have the requested weight field."
639
+ raise KeyError(errmsg)
640
+ if not isinstance(G.nodes[v][weight], int):
641
+ errmsg = f"The {weight!r} field of node {v!r} is not an integer."
642
+ raise ValueError(errmsg)
643
+ self.node_weights = {v: G.nodes[v][weight] for v in G.nodes()}
644
+
645
+ def update_incumbent_if_improved(self, C, C_weight):
646
+ """Update the incumbent if the node set C has greater weight.
647
+
648
+ C is assumed to be a clique.
649
+ """
650
+ if C_weight > self.incumbent_weight:
651
+ self.incumbent_nodes = C[:]
652
+ self.incumbent_weight = C_weight
653
+
654
+ def greedily_find_independent_set(self, P):
655
+ """Greedily find an independent set of nodes from a set of
656
+ nodes P."""
657
+ independent_set = []
658
+ P = P[:]
659
+ while P:
660
+ v = P[0]
661
+ independent_set.append(v)
662
+ P = [w for w in P if v != w and not self.G.has_edge(v, w)]
663
+ return independent_set
664
+
665
+ def find_branching_nodes(self, P, target):
666
+ """Find a set of nodes to branch on."""
667
+ residual_wt = {v: self.node_weights[v] for v in P}
668
+ total_wt = 0
669
+ P = P[:]
670
+ while P:
671
+ independent_set = self.greedily_find_independent_set(P)
672
+ min_wt_in_class = min(residual_wt[v] for v in independent_set)
673
+ total_wt += min_wt_in_class
674
+ if total_wt > target:
675
+ break
676
+ for v in independent_set:
677
+ residual_wt[v] -= min_wt_in_class
678
+ P = [v for v in P if residual_wt[v] != 0]
679
+ return P
680
+
681
+ def expand(self, C, C_weight, P):
682
+ """Look for the best clique that contains all the nodes in C and zero or
683
+ more of the nodes in P, backtracking if it can be shown that no such
684
+ clique has greater weight than the incumbent.
685
+ """
686
+ self.update_incumbent_if_improved(C, C_weight)
687
+ branching_nodes = self.find_branching_nodes(P, self.incumbent_weight - C_weight)
688
+ while branching_nodes:
689
+ v = branching_nodes.pop()
690
+ P.remove(v)
691
+ new_C = C + [v]
692
+ new_C_weight = C_weight + self.node_weights[v]
693
+ new_P = [w for w in P if self.G.has_edge(v, w)]
694
+ self.expand(new_C, new_C_weight, new_P)
695
+
696
+ def find_max_weight_clique(self):
697
+ """Find a maximum weight clique."""
698
+ # Sort nodes in reverse order of degree for speed
699
+ nodes = sorted(self.G.nodes(), key=lambda v: self.G.degree(v), reverse=True)
700
+ nodes = [v for v in nodes if self.node_weights[v] > 0]
701
+ self.expand([], 0, nodes)
702
+
703
+
704
+ @not_implemented_for("directed")
705
+ @nx._dispatchable(node_attrs="weight")
706
+ def max_weight_clique(G, weight="weight"):
707
+ """Find a maximum weight clique in G.
708
+
709
+ A *clique* in a graph is a set of nodes such that every two distinct nodes
710
+ are adjacent. The *weight* of a clique is the sum of the weights of its
711
+ nodes. A *maximum weight clique* of graph G is a clique C in G such that
712
+ no clique in G has weight greater than the weight of C.
713
+
714
+ Parameters
715
+ ----------
716
+ G : NetworkX graph
717
+ Undirected graph
718
+ weight : string or None, optional (default='weight')
719
+ The node attribute that holds the integer value used as a weight.
720
+ If None, then each node has weight 1.
721
+
722
+ Returns
723
+ -------
724
+ clique : list
725
+ the nodes of a maximum weight clique
726
+ weight : int
727
+ the weight of a maximum weight clique
728
+
729
+ Notes
730
+ -----
731
+ The implementation is recursive, and therefore it may run into recursion
732
+ depth issues if G contains a clique whose number of nodes is close to the
733
+ recursion depth limit.
734
+
735
+ At each search node, the algorithm greedily constructs a weighted
736
+ independent set cover of part of the graph in order to find a small set of
737
+ nodes on which to branch. The algorithm is very similar to the algorithm
738
+ of Tavares et al. [1]_, other than the fact that the NetworkX version does
739
+ not use bitsets. This style of algorithm for maximum weight clique (and
740
+ maximum weight independent set, which is the same problem but on the
741
+ complement graph) has a decades-long history. See Algorithm B of Warren
742
+ and Hicks [2]_ and the references in that paper.
743
+
744
+ References
745
+ ----------
746
+ .. [1] Tavares, W.A., Neto, M.B.C., Rodrigues, C.D., Michelon, P.: Um
747
+ algoritmo de branch and bound para o problema da clique máxima
748
+ ponderada. Proceedings of XLVII SBPO 1 (2015).
749
+
750
+ .. [2] Warren, Jeffrey S, Hicks, Illya V.: Combinatorial Branch-and-Bound
751
+ for the Maximum Weight Independent Set Problem. Technical Report,
752
+ Texas A&M University (2016).
753
+ """
754
+
755
+ mwc = MaxWeightClique(G, weight)
756
+ mwc.find_max_weight_clique()
757
+ return mwc.incumbent_nodes, mwc.incumbent_weight
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/cluster.py ADDED
@@ -0,0 +1,658 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Algorithms to characterize the number of triangles in a graph."""
2
+
3
+ from collections import Counter
4
+ from itertools import chain, combinations
5
+
6
+ import networkx as nx
7
+ from networkx.utils import not_implemented_for
8
+
9
+ __all__ = [
10
+ "triangles",
11
+ "average_clustering",
12
+ "clustering",
13
+ "transitivity",
14
+ "square_clustering",
15
+ "generalized_degree",
16
+ ]
17
+
18
+
19
+ @not_implemented_for("directed")
20
+ @nx._dispatchable
21
+ def triangles(G, nodes=None):
22
+ """Compute the number of triangles.
23
+
24
+ Finds the number of triangles that include a node as one vertex.
25
+
26
+ Parameters
27
+ ----------
28
+ G : graph
29
+ A networkx graph
30
+
31
+ nodes : node, iterable of nodes, or None (default=None)
32
+ If a singleton node, return the number of triangles for that node.
33
+ If an iterable, compute the number of triangles for each of those nodes.
34
+ If `None` (the default) compute the number of triangles for all nodes in `G`.
35
+
36
+ Returns
37
+ -------
38
+ out : dict or int
39
+ If `nodes` is a container of nodes, returns number of triangles keyed by node (dict).
40
+ If `nodes` is a specific node, returns number of triangles for the node (int).
41
+
42
+ Examples
43
+ --------
44
+ >>> G = nx.complete_graph(5)
45
+ >>> print(nx.triangles(G, 0))
46
+ 6
47
+ >>> print(nx.triangles(G))
48
+ {0: 6, 1: 6, 2: 6, 3: 6, 4: 6}
49
+ >>> print(list(nx.triangles(G, [0, 1]).values()))
50
+ [6, 6]
51
+
52
+ Notes
53
+ -----
54
+ Self loops are ignored.
55
+
56
+ """
57
+ if nodes is not None:
58
+ # If `nodes` represents a single node, return only its number of triangles
59
+ if nodes in G:
60
+ return next(_triangles_and_degree_iter(G, nodes))[2] // 2
61
+
62
+ # if `nodes` is a container of nodes, then return a
63
+ # dictionary mapping node to number of triangles.
64
+ return {v: t // 2 for v, d, t, _ in _triangles_and_degree_iter(G, nodes)}
65
+
66
+ # if nodes is None, then compute triangles for the complete graph
67
+
68
+ # dict used to avoid visiting the same nodes twice
69
+ # this allows calculating/counting each triangle only once
70
+ later_nbrs = {}
71
+
72
+ # iterate over the nodes in a graph
73
+ for node, neighbors in G.adjacency():
74
+ later_nbrs[node] = {n for n in neighbors if n not in later_nbrs and n != node}
75
+
76
+ # instantiate Counter for each node to include isolated nodes
77
+ # add 1 to the count if a nodes neighbor's neighbor is also a neighbor
78
+ triangle_counts = Counter(dict.fromkeys(G, 0))
79
+ for node1, neighbors in later_nbrs.items():
80
+ for node2 in neighbors:
81
+ third_nodes = neighbors & later_nbrs[node2]
82
+ m = len(third_nodes)
83
+ triangle_counts[node1] += m
84
+ triangle_counts[node2] += m
85
+ triangle_counts.update(third_nodes)
86
+
87
+ return dict(triangle_counts)
88
+
89
+
90
+ @not_implemented_for("multigraph")
91
+ def _triangles_and_degree_iter(G, nodes=None):
92
+ """Return an iterator of (node, degree, triangles, generalized degree).
93
+
94
+ This double counts triangles so you may want to divide by 2.
95
+ See degree(), triangles() and generalized_degree() for definitions
96
+ and details.
97
+
98
+ """
99
+ if nodes is None:
100
+ nodes_nbrs = G.adj.items()
101
+ else:
102
+ nodes_nbrs = ((n, G[n]) for n in G.nbunch_iter(nodes))
103
+
104
+ for v, v_nbrs in nodes_nbrs:
105
+ vs = set(v_nbrs) - {v}
106
+ gen_degree = Counter(len(vs & (set(G[w]) - {w})) for w in vs)
107
+ ntriangles = sum(k * val for k, val in gen_degree.items())
108
+ yield (v, len(vs), ntriangles, gen_degree)
109
+
110
+
111
+ @not_implemented_for("multigraph")
112
+ def _weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"):
113
+ """Return an iterator of (node, degree, weighted_triangles).
114
+
115
+ Used for weighted clustering.
116
+ Note: this returns the geometric average weight of edges in the triangle.
117
+ Also, each triangle is counted twice (each direction).
118
+ So you may want to divide by 2.
119
+
120
+ """
121
+ import numpy as np
122
+
123
+ if weight is None or G.number_of_edges() == 0:
124
+ max_weight = 1
125
+ else:
126
+ max_weight = max(d.get(weight, 1) for u, v, d in G.edges(data=True))
127
+ if nodes is None:
128
+ nodes_nbrs = G.adj.items()
129
+ else:
130
+ nodes_nbrs = ((n, G[n]) for n in G.nbunch_iter(nodes))
131
+
132
+ def wt(u, v):
133
+ return G[u][v].get(weight, 1) / max_weight
134
+
135
+ for i, nbrs in nodes_nbrs:
136
+ inbrs = set(nbrs) - {i}
137
+ weighted_triangles = 0
138
+ seen = set()
139
+ for j in inbrs:
140
+ seen.add(j)
141
+ # This avoids counting twice -- we double at the end.
142
+ jnbrs = set(G[j]) - seen
143
+ # Only compute the edge weight once, before the inner inner
144
+ # loop.
145
+ wij = wt(i, j)
146
+ weighted_triangles += np.cbrt(
147
+ [(wij * wt(j, k) * wt(k, i)) for k in inbrs & jnbrs]
148
+ ).sum()
149
+ yield (i, len(inbrs), 2 * float(weighted_triangles))
150
+
151
+
152
+ @not_implemented_for("multigraph")
153
+ def _directed_triangles_and_degree_iter(G, nodes=None):
154
+ """Return an iterator of
155
+ (node, total_degree, reciprocal_degree, directed_triangles).
156
+
157
+ Used for directed clustering.
158
+ Note that unlike `_triangles_and_degree_iter()`, this function counts
159
+ directed triangles so does not count triangles twice.
160
+
161
+ """
162
+ nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes))
163
+
164
+ for i, preds, succs in nodes_nbrs:
165
+ ipreds = set(preds) - {i}
166
+ isuccs = set(succs) - {i}
167
+
168
+ directed_triangles = 0
169
+ for j in chain(ipreds, isuccs):
170
+ jpreds = set(G._pred[j]) - {j}
171
+ jsuccs = set(G._succ[j]) - {j}
172
+ directed_triangles += sum(
173
+ 1
174
+ for k in chain(
175
+ (ipreds & jpreds),
176
+ (ipreds & jsuccs),
177
+ (isuccs & jpreds),
178
+ (isuccs & jsuccs),
179
+ )
180
+ )
181
+ dtotal = len(ipreds) + len(isuccs)
182
+ dbidirectional = len(ipreds & isuccs)
183
+ yield (i, dtotal, dbidirectional, directed_triangles)
184
+
185
+
186
+ @not_implemented_for("multigraph")
187
+ def _directed_weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"):
188
+ """Return an iterator of
189
+ (node, total_degree, reciprocal_degree, directed_weighted_triangles).
190
+
191
+ Used for directed weighted clustering.
192
+ Note that unlike `_weighted_triangles_and_degree_iter()`, this function counts
193
+ directed triangles so does not count triangles twice.
194
+
195
+ """
196
+ import numpy as np
197
+
198
+ if weight is None or G.number_of_edges() == 0:
199
+ max_weight = 1
200
+ else:
201
+ max_weight = max(d.get(weight, 1) for u, v, d in G.edges(data=True))
202
+
203
+ nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes))
204
+
205
+ def wt(u, v):
206
+ return G[u][v].get(weight, 1) / max_weight
207
+
208
+ for i, preds, succs in nodes_nbrs:
209
+ ipreds = set(preds) - {i}
210
+ isuccs = set(succs) - {i}
211
+
212
+ directed_triangles = 0
213
+ for j in ipreds:
214
+ jpreds = set(G._pred[j]) - {j}
215
+ jsuccs = set(G._succ[j]) - {j}
216
+ directed_triangles += np.cbrt(
217
+ [(wt(j, i) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]
218
+ ).sum()
219
+ directed_triangles += np.cbrt(
220
+ [(wt(j, i) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]
221
+ ).sum()
222
+ directed_triangles += np.cbrt(
223
+ [(wt(j, i) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]
224
+ ).sum()
225
+ directed_triangles += np.cbrt(
226
+ [(wt(j, i) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]
227
+ ).sum()
228
+
229
+ for j in isuccs:
230
+ jpreds = set(G._pred[j]) - {j}
231
+ jsuccs = set(G._succ[j]) - {j}
232
+ directed_triangles += np.cbrt(
233
+ [(wt(i, j) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]
234
+ ).sum()
235
+ directed_triangles += np.cbrt(
236
+ [(wt(i, j) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]
237
+ ).sum()
238
+ directed_triangles += np.cbrt(
239
+ [(wt(i, j) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]
240
+ ).sum()
241
+ directed_triangles += np.cbrt(
242
+ [(wt(i, j) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]
243
+ ).sum()
244
+
245
+ dtotal = len(ipreds) + len(isuccs)
246
+ dbidirectional = len(ipreds & isuccs)
247
+ yield (i, dtotal, dbidirectional, float(directed_triangles))
248
+
249
+
250
+ @nx._dispatchable(edge_attrs="weight")
251
+ def average_clustering(G, nodes=None, weight=None, count_zeros=True):
252
+ r"""Compute the average clustering coefficient for the graph G.
253
+
254
+ The clustering coefficient for the graph is the average,
255
+
256
+ .. math::
257
+
258
+ C = \frac{1}{n}\sum_{v \in G} c_v,
259
+
260
+ where :math:`n` is the number of nodes in `G`.
261
+
262
+ Parameters
263
+ ----------
264
+ G : graph
265
+
266
+ nodes : container of nodes, optional (default=all nodes in G)
267
+ Compute average clustering for nodes in this container.
268
+
269
+ weight : string or None, optional (default=None)
270
+ The edge attribute that holds the numerical value used as a weight.
271
+ If None, then each edge has weight 1.
272
+
273
+ count_zeros : bool
274
+ If False include only the nodes with nonzero clustering in the average.
275
+
276
+ Returns
277
+ -------
278
+ avg : float
279
+ Average clustering
280
+
281
+ Examples
282
+ --------
283
+ >>> G = nx.complete_graph(5)
284
+ >>> print(nx.average_clustering(G))
285
+ 1.0
286
+
287
+ Notes
288
+ -----
289
+ This is a space saving routine; it might be faster
290
+ to use the clustering function to get a list and then take the average.
291
+
292
+ Self loops are ignored.
293
+
294
+ References
295
+ ----------
296
+ .. [1] Generalizations of the clustering coefficient to weighted
297
+ complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela,
298
+ K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007).
299
+ http://jponnela.com/web_documents/a9.pdf
300
+ .. [2] Marcus Kaiser, Mean clustering coefficients: the role of isolated
301
+ nodes and leafs on clustering measures for small-world networks.
302
+ https://arxiv.org/abs/0802.2512
303
+ """
304
+ c = clustering(G, nodes, weight=weight).values()
305
+ if not count_zeros:
306
+ c = [v for v in c if abs(v) > 0]
307
+ return sum(c) / len(c)
308
+
309
+
310
+ @nx._dispatchable(edge_attrs="weight")
311
+ def clustering(G, nodes=None, weight=None):
312
+ r"""Compute the clustering coefficient for nodes.
313
+
314
+ For unweighted graphs, the clustering of a node :math:`u`
315
+ is the fraction of possible triangles through that node that exist,
316
+
317
+ .. math::
318
+
319
+ c_u = \frac{2 T(u)}{deg(u)(deg(u)-1)},
320
+
321
+ where :math:`T(u)` is the number of triangles through node :math:`u` and
322
+ :math:`deg(u)` is the degree of :math:`u`.
323
+
324
+ For weighted graphs, there are several ways to define clustering [1]_.
325
+ the one used here is defined
326
+ as the geometric average of the subgraph edge weights [2]_,
327
+
328
+ .. math::
329
+
330
+ c_u = \frac{1}{deg(u)(deg(u)-1))}
331
+ \sum_{vw} (\hat{w}_{uv} \hat{w}_{uw} \hat{w}_{vw})^{1/3}.
332
+
333
+ The edge weights :math:`\hat{w}_{uv}` are normalized by the maximum weight
334
+ in the network :math:`\hat{w}_{uv} = w_{uv}/\max(w)`.
335
+
336
+ The value of :math:`c_u` is assigned to 0 if :math:`deg(u) < 2`.
337
+
338
+ Additionally, this weighted definition has been generalized to support negative edge weights [3]_.
339
+
340
+ For directed graphs, the clustering is similarly defined as the fraction
341
+ of all possible directed triangles or geometric average of the subgraph
342
+ edge weights for unweighted and weighted directed graph respectively [4]_.
343
+
344
+ .. math::
345
+
346
+ c_u = \frac{T(u)}{2(deg^{tot}(u)(deg^{tot}(u)-1) - 2deg^{\leftrightarrow}(u))},
347
+
348
+ where :math:`T(u)` is the number of directed triangles through node
349
+ :math:`u`, :math:`deg^{tot}(u)` is the sum of in degree and out degree of
350
+ :math:`u` and :math:`deg^{\leftrightarrow}(u)` is the reciprocal degree of
351
+ :math:`u`.
352
+
353
+
354
+ Parameters
355
+ ----------
356
+ G : graph
357
+
358
+ nodes : node, iterable of nodes, or None (default=None)
359
+ If a singleton node, return the number of triangles for that node.
360
+ If an iterable, compute the number of triangles for each of those nodes.
361
+ If `None` (the default) compute the number of triangles for all nodes in `G`.
362
+
363
+ weight : string or None, optional (default=None)
364
+ The edge attribute that holds the numerical value used as a weight.
365
+ If None, then each edge has weight 1.
366
+
367
+ Returns
368
+ -------
369
+ out : float, or dictionary
370
+ Clustering coefficient at specified nodes
371
+
372
+ Examples
373
+ --------
374
+ >>> G = nx.complete_graph(5)
375
+ >>> print(nx.clustering(G, 0))
376
+ 1.0
377
+ >>> print(nx.clustering(G))
378
+ {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
379
+
380
+ Notes
381
+ -----
382
+ Self loops are ignored.
383
+
384
+ References
385
+ ----------
386
+ .. [1] Generalizations of the clustering coefficient to weighted
387
+ complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela,
388
+ K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007).
389
+ http://jponnela.com/web_documents/a9.pdf
390
+ .. [2] Intensity and coherence of motifs in weighted complex
391
+ networks by J. P. Onnela, J. Saramäki, J. Kertész, and K. Kaski,
392
+ Physical Review E, 71(6), 065103 (2005).
393
+ .. [3] Generalization of Clustering Coefficients to Signed Correlation Networks
394
+ by G. Costantini and M. Perugini, PloS one, 9(2), e88669 (2014).
395
+ .. [4] Clustering in complex directed networks by G. Fagiolo,
396
+ Physical Review E, 76(2), 026107 (2007).
397
+ """
398
+ if G.is_directed():
399
+ if weight is not None:
400
+ td_iter = _directed_weighted_triangles_and_degree_iter(G, nodes, weight)
401
+ clusterc = {
402
+ v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2)
403
+ for v, dt, db, t in td_iter
404
+ }
405
+ else:
406
+ td_iter = _directed_triangles_and_degree_iter(G, nodes)
407
+ clusterc = {
408
+ v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2)
409
+ for v, dt, db, t in td_iter
410
+ }
411
+ else:
412
+ # The formula 2*T/(d*(d-1)) from docs is t/(d*(d-1)) here b/c t==2*T
413
+ if weight is not None:
414
+ td_iter = _weighted_triangles_and_degree_iter(G, nodes, weight)
415
+ clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t in td_iter}
416
+ else:
417
+ td_iter = _triangles_and_degree_iter(G, nodes)
418
+ clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t, _ in td_iter}
419
+ if nodes in G:
420
+ # Return the value of the sole entry in the dictionary.
421
+ return clusterc[nodes]
422
+ return clusterc
423
+
424
+
425
+ @nx._dispatchable
426
+ def transitivity(G):
427
+ r"""Compute graph transitivity, the fraction of all possible triangles
428
+ present in G.
429
+
430
+ Possible triangles are identified by the number of "triads"
431
+ (two edges with a shared vertex).
432
+
433
+ The transitivity is
434
+
435
+ .. math::
436
+
437
+ T = 3\frac{\#triangles}{\#triads}.
438
+
439
+ Parameters
440
+ ----------
441
+ G : graph
442
+
443
+ Returns
444
+ -------
445
+ out : float
446
+ Transitivity
447
+
448
+ Notes
449
+ -----
450
+ Self loops are ignored.
451
+
452
+ Examples
453
+ --------
454
+ >>> G = nx.complete_graph(5)
455
+ >>> print(nx.transitivity(G))
456
+ 1.0
457
+ """
458
+ triangles_contri = [
459
+ (t, d * (d - 1)) for v, d, t, _ in _triangles_and_degree_iter(G)
460
+ ]
461
+ # If the graph is empty
462
+ if len(triangles_contri) == 0:
463
+ return 0
464
+ triangles, contri = map(sum, zip(*triangles_contri))
465
+ return 0 if triangles == 0 else triangles / contri
466
+
467
+
468
+ @nx._dispatchable
469
+ def square_clustering(G, nodes=None):
470
+ r"""Compute the squares clustering coefficient for nodes.
471
+
472
+ For each node return the fraction of possible squares that exist at
473
+ the node [1]_
474
+
475
+ .. math::
476
+ C_4(v) = \frac{ \sum_{u=1}^{k_v}
477
+ \sum_{w=u+1}^{k_v} q_v(u,w) }{ \sum_{u=1}^{k_v}
478
+ \sum_{w=u+1}^{k_v} [a_v(u,w) + q_v(u,w)]},
479
+
480
+ where :math:`q_v(u,w)` are the number of common neighbors of :math:`u` and
481
+ :math:`w` other than :math:`v` (ie squares), and :math:`a_v(u,w) = (k_u -
482
+ (1+q_v(u,w)+\theta_{uv})) + (k_w - (1+q_v(u,w)+\theta_{uw}))`, where
483
+ :math:`\theta_{uw} = 1` if :math:`u` and :math:`w` are connected and 0
484
+ otherwise. [2]_
485
+
486
+ Parameters
487
+ ----------
488
+ G : graph
489
+
490
+ nodes : container of nodes, optional (default=all nodes in G)
491
+ Compute clustering for nodes in this container.
492
+
493
+ Returns
494
+ -------
495
+ c4 : dictionary
496
+ A dictionary keyed by node with the square clustering coefficient value.
497
+
498
+ Examples
499
+ --------
500
+ >>> G = nx.complete_graph(5)
501
+ >>> print(nx.square_clustering(G, 0))
502
+ 1.0
503
+ >>> print(nx.square_clustering(G))
504
+ {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
505
+
506
+ Notes
507
+ -----
508
+ Self loops are ignored.
509
+
510
+ While :math:`C_3(v)` (triangle clustering) gives the probability that
511
+ two neighbors of node v are connected with each other, :math:`C_4(v)` is
512
+ the probability that two neighbors of node v share a common
513
+ neighbor different from v. This algorithm can be applied to both
514
+ bipartite and unipartite networks.
515
+
516
+ References
517
+ ----------
518
+ .. [1] Pedro G. Lind, Marta C. González, and Hans J. Herrmann. 2005
519
+ Cycles and clustering in bipartite networks.
520
+ Physical Review E (72) 056127.
521
+ .. [2] Zhang, Peng et al. Clustering Coefficient and Community Structure of
522
+ Bipartite Networks. Physica A: Statistical Mechanics and its Applications 387.27 (2008): 6869–6875.
523
+ https://arxiv.org/abs/0710.0117v1
524
+ """
525
+ if nodes is None:
526
+ node_iter = G
527
+ else:
528
+ node_iter = G.nbunch_iter(nodes)
529
+ clustering = {}
530
+ _G_adj = G._adj
531
+
532
+ class GAdj(dict):
533
+ """Calculate (and cache) node neighbor sets excluding self-loops."""
534
+
535
+ def __missing__(self, v):
536
+ v_neighbors = self[v] = set(_G_adj[v])
537
+ v_neighbors.discard(v) # Ignore self-loops
538
+ return v_neighbors
539
+
540
+ G_adj = GAdj() # Values are sets of neighbors (no self-loops)
541
+
542
+ for v in node_iter:
543
+ v_neighbors = G_adj[v]
544
+ v_degrees_m1 = len(v_neighbors) - 1 # degrees[v] - 1 (used below)
545
+ if v_degrees_m1 <= 0:
546
+ # Can't form a square without at least two neighbors
547
+ clustering[v] = 0
548
+ continue
549
+
550
+ # Count squares with nodes u-v-w-x from the current node v.
551
+ # Terms of the denominator: potential = uw_degrees - uw_count - triangles - squares
552
+ # uw_degrees: degrees[u] + degrees[w] for each u-w combo
553
+ uw_degrees = 0
554
+ # uw_count: 1 for each u and 1 for each w for all combos (degrees * (degrees - 1))
555
+ uw_count = len(v_neighbors) * v_degrees_m1
556
+ # triangles: 1 for each edge where u-w or w-u are connected (i.e. triangles)
557
+ triangles = 0
558
+ # squares: the number of squares (also the numerator)
559
+ squares = 0
560
+
561
+ # Iterate over all neighbors
562
+ for u in v_neighbors:
563
+ u_neighbors = G_adj[u]
564
+ uw_degrees += len(u_neighbors) * v_degrees_m1
565
+ # P2 from https://arxiv.org/abs/2007.11111
566
+ p2 = len(u_neighbors & v_neighbors)
567
+ # triangles is C_3, sigma_4 from https://arxiv.org/abs/2007.11111
568
+ # This double-counts triangles compared to `triangles` function
569
+ triangles += p2
570
+ # squares is C_4, sigma_12 from https://arxiv.org/abs/2007.11111
571
+ # Include this term, b/c a neighbor u can also be a neighbor of neighbor x
572
+ squares += p2 * (p2 - 1) # Will divide by 2 later
573
+
574
+ # And iterate over all neighbors of neighbors.
575
+ # These nodes x may be the corners opposite v in squares u-v-w-x.
576
+ two_hop_neighbors = set.union(*(G_adj[u] for u in v_neighbors))
577
+ two_hop_neighbors -= v_neighbors # Neighbors already counted above
578
+ two_hop_neighbors.discard(v)
579
+ for x in two_hop_neighbors:
580
+ p2 = len(v_neighbors & G_adj[x])
581
+ squares += p2 * (p2 - 1) # Will divide by 2 later
582
+
583
+ squares //= 2
584
+ potential = uw_degrees - uw_count - triangles - squares
585
+ if potential > 0:
586
+ clustering[v] = squares / potential
587
+ else:
588
+ clustering[v] = 0
589
+ if nodes in G:
590
+ # Return the value of the sole entry in the dictionary.
591
+ return clustering[nodes]
592
+ return clustering
593
+
594
+
595
+ @not_implemented_for("directed")
596
+ @nx._dispatchable
597
+ def generalized_degree(G, nodes=None):
598
+ r"""Compute the generalized degree for nodes.
599
+
600
+ For each node, the generalized degree shows how many edges of given
601
+ triangle multiplicity the node is connected to. The triangle multiplicity
602
+ of an edge is the number of triangles an edge participates in. The
603
+ generalized degree of node :math:`i` can be written as a vector
604
+ :math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc, k_i^{(N-2)})` where
605
+ :math:`k_i^{(j)}` is the number of edges attached to node :math:`i` that
606
+ participate in :math:`j` triangles.
607
+
608
+ Parameters
609
+ ----------
610
+ G : graph
611
+
612
+ nodes : container of nodes, optional (default=all nodes in G)
613
+ Compute the generalized degree for nodes in this container.
614
+
615
+ Returns
616
+ -------
617
+ out : Counter, or dictionary of Counters
618
+ Generalized degree of specified nodes. The Counter is keyed by edge
619
+ triangle multiplicity.
620
+
621
+ Examples
622
+ --------
623
+ >>> G = nx.complete_graph(5)
624
+ >>> print(nx.generalized_degree(G, 0))
625
+ Counter({3: 4})
626
+ >>> print(nx.generalized_degree(G))
627
+ {0: Counter({3: 4}), 1: Counter({3: 4}), 2: Counter({3: 4}), 3: Counter({3: 4}), 4: Counter({3: 4})}
628
+
629
+ To recover the number of triangles attached to a node:
630
+
631
+ >>> k1 = nx.generalized_degree(G, 0)
632
+ >>> sum([k * v for k, v in k1.items()]) / 2 == nx.triangles(G, 0)
633
+ True
634
+
635
+ Notes
636
+ -----
637
+ Self loops are ignored.
638
+
639
+ In a network of N nodes, the highest triangle multiplicity an edge can have
640
+ is N-2.
641
+
642
+ The return value does not include a `zero` entry if no edges of a
643
+ particular triangle multiplicity are present.
644
+
645
+ The number of triangles node :math:`i` is attached to can be recovered from
646
+ the generalized degree :math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc,
647
+ k_i^{(N-2)})` by :math:`(k_i^{(1)}+2k_i^{(2)}+\dotsc +(N-2)k_i^{(N-2)})/2`.
648
+
649
+ References
650
+ ----------
651
+ .. [1] Networks with arbitrary edge multiplicities by V. Zlatić,
652
+ D. Garlaschelli and G. Caldarelli, EPL (Europhysics Letters),
653
+ Volume 97, Number 2 (2012).
654
+ https://iopscience.iop.org/article/10.1209/0295-5075/97/28005
655
+ """
656
+ if nodes in G:
657
+ return next(_triangles_and_degree_iter(G, nodes))[3]
658
+ return {v: gd for v, d, t, gd in _triangles_and_degree_iter(G, nodes)}
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/communicability_alg.py ADDED
@@ -0,0 +1,163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Communicability.
3
+ """
4
+
5
+ import networkx as nx
6
+ from networkx.utils import not_implemented_for
7
+
8
+ __all__ = ["communicability", "communicability_exp"]
9
+
10
+
11
+ @not_implemented_for("directed")
12
+ @not_implemented_for("multigraph")
13
+ @nx._dispatchable
14
+ def communicability(G):
15
+ r"""Returns communicability between all pairs of nodes in G.
16
+
17
+ The communicability between pairs of nodes in G is the sum of
18
+ walks of different lengths starting at node u and ending at node v.
19
+
20
+ Parameters
21
+ ----------
22
+ G: graph
23
+
24
+ Returns
25
+ -------
26
+ comm: dictionary of dictionaries
27
+ Dictionary of dictionaries keyed by nodes with communicability
28
+ as the value.
29
+
30
+ Raises
31
+ ------
32
+ NetworkXError
33
+ If the graph is not undirected and simple.
34
+
35
+ See Also
36
+ --------
37
+ communicability_exp:
38
+ Communicability between all pairs of nodes in G using spectral
39
+ decomposition.
40
+ communicability_betweenness_centrality:
41
+ Communicability betweenness centrality for each node in G.
42
+
43
+ Notes
44
+ -----
45
+ This algorithm uses a spectral decomposition of the adjacency matrix.
46
+ Let G=(V,E) be a simple undirected graph. Using the connection between
47
+ the powers of the adjacency matrix and the number of walks in the graph,
48
+ the communicability between nodes `u` and `v` based on the graph spectrum
49
+ is [1]_
50
+
51
+ .. math::
52
+ C(u,v)=\sum_{j=1}^{n}\phi_{j}(u)\phi_{j}(v)e^{\lambda_{j}},
53
+
54
+ where `\phi_{j}(u)` is the `u\rm{th}` element of the `j\rm{th}` orthonormal
55
+ eigenvector of the adjacency matrix associated with the eigenvalue
56
+ `\lambda_{j}`.
57
+
58
+ References
59
+ ----------
60
+ .. [1] Ernesto Estrada, Naomichi Hatano,
61
+ "Communicability in complex networks",
62
+ Phys. Rev. E 77, 036111 (2008).
63
+ https://arxiv.org/abs/0707.0756
64
+
65
+ Examples
66
+ --------
67
+ >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
68
+ >>> c = nx.communicability(G)
69
+ """
70
+ import numpy as np
71
+
72
+ nodelist = list(G) # ordering of nodes in matrix
73
+ A = nx.to_numpy_array(G, nodelist)
74
+ # convert to 0-1 matrix
75
+ A[A != 0.0] = 1
76
+ w, vec = np.linalg.eigh(A)
77
+ expw = np.exp(w)
78
+ mapping = dict(zip(nodelist, range(len(nodelist))))
79
+ c = {}
80
+ # computing communicabilities
81
+ for u in G:
82
+ c[u] = {}
83
+ for v in G:
84
+ s = 0
85
+ p = mapping[u]
86
+ q = mapping[v]
87
+ for j in range(len(nodelist)):
88
+ s += vec[:, j][p] * vec[:, j][q] * expw[j]
89
+ c[u][v] = float(s)
90
+ return c
91
+
92
+
93
+ @not_implemented_for("directed")
94
+ @not_implemented_for("multigraph")
95
+ @nx._dispatchable
96
+ def communicability_exp(G):
97
+ r"""Returns communicability between all pairs of nodes in G.
98
+
99
+ Communicability between pair of node (u,v) of node in G is the sum of
100
+ walks of different lengths starting at node u and ending at node v.
101
+
102
+ Parameters
103
+ ----------
104
+ G: graph
105
+
106
+ Returns
107
+ -------
108
+ comm: dictionary of dictionaries
109
+ Dictionary of dictionaries keyed by nodes with communicability
110
+ as the value.
111
+
112
+ Raises
113
+ ------
114
+ NetworkXError
115
+ If the graph is not undirected and simple.
116
+
117
+ See Also
118
+ --------
119
+ communicability:
120
+ Communicability between pairs of nodes in G.
121
+ communicability_betweenness_centrality:
122
+ Communicability betweenness centrality for each node in G.
123
+
124
+ Notes
125
+ -----
126
+ This algorithm uses matrix exponentiation of the adjacency matrix.
127
+
128
+ Let G=(V,E) be a simple undirected graph. Using the connection between
129
+ the powers of the adjacency matrix and the number of walks in the graph,
130
+ the communicability between nodes u and v is [1]_,
131
+
132
+ .. math::
133
+ C(u,v) = (e^A)_{uv},
134
+
135
+ where `A` is the adjacency matrix of G.
136
+
137
+ References
138
+ ----------
139
+ .. [1] Ernesto Estrada, Naomichi Hatano,
140
+ "Communicability in complex networks",
141
+ Phys. Rev. E 77, 036111 (2008).
142
+ https://arxiv.org/abs/0707.0756
143
+
144
+ Examples
145
+ --------
146
+ >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
147
+ >>> c = nx.communicability_exp(G)
148
+ """
149
+ import scipy as sp
150
+
151
+ nodelist = list(G) # ordering of nodes in matrix
152
+ A = nx.to_numpy_array(G, nodelist)
153
+ # convert to 0-1 matrix
154
+ A[A != 0.0] = 1
155
+ # communicability matrix
156
+ expA = sp.linalg.expm(A)
157
+ mapping = dict(zip(nodelist, range(len(nodelist))))
158
+ c = {}
159
+ for u in G:
160
+ c[u] = {}
161
+ for v in G:
162
+ c[u][v] = float(expA[mapping[u], mapping[v]])
163
+ return c
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/core.py ADDED
@@ -0,0 +1,588 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Find the k-cores of a graph.
3
+
4
+ The k-core is found by recursively pruning nodes with degrees less than k.
5
+
6
+ See the following references for details:
7
+
8
+ An O(m) Algorithm for Cores Decomposition of Networks
9
+ Vladimir Batagelj and Matjaz Zaversnik, 2003.
10
+ https://arxiv.org/abs/cs.DS/0310049
11
+
12
+ Generalized Cores
13
+ Vladimir Batagelj and Matjaz Zaversnik, 2002.
14
+ https://arxiv.org/pdf/cs/0202039
15
+
16
+ For directed graphs a more general notion is that of D-cores which
17
+ looks at (k, l) restrictions on (in, out) degree. The (k, k) D-core
18
+ is the k-core.
19
+
20
+ D-cores: Measuring Collaboration of Directed Graphs Based on Degeneracy
21
+ Christos Giatsidis, Dimitrios M. Thilikos, Michalis Vazirgiannis, ICDM 2011.
22
+ http://www.graphdegeneracy.org/dcores_ICDM_2011.pdf
23
+
24
+ Multi-scale structure and topological anomaly detection via a new network \
25
+ statistic: The onion decomposition
26
+ L. Hébert-Dufresne, J. A. Grochow, and A. Allard
27
+ Scientific Reports 6, 31708 (2016)
28
+ http://doi.org/10.1038/srep31708
29
+
30
+ """
31
+
32
+ import networkx as nx
33
+
34
+ __all__ = [
35
+ "core_number",
36
+ "k_core",
37
+ "k_shell",
38
+ "k_crust",
39
+ "k_corona",
40
+ "k_truss",
41
+ "onion_layers",
42
+ ]
43
+
44
+
45
+ @nx.utils.not_implemented_for("multigraph")
46
+ @nx._dispatchable
47
+ def core_number(G):
48
+ """Returns the core number for each node.
49
+
50
+ A k-core is a maximal subgraph that contains nodes of degree k or more.
51
+
52
+ The core number of a node is the largest value k of a k-core containing
53
+ that node.
54
+
55
+ Parameters
56
+ ----------
57
+ G : NetworkX graph
58
+ An undirected or directed graph
59
+
60
+ Returns
61
+ -------
62
+ core_number : dictionary
63
+ A dictionary keyed by node to the core number.
64
+
65
+ Raises
66
+ ------
67
+ NetworkXNotImplemented
68
+ If `G` is a multigraph or contains self loops.
69
+
70
+ Notes
71
+ -----
72
+ For directed graphs the node degree is defined to be the
73
+ in-degree + out-degree.
74
+
75
+ Examples
76
+ --------
77
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
78
+ >>> H = nx.havel_hakimi_graph(degrees)
79
+ >>> nx.core_number(H)
80
+ {0: 1, 1: 2, 2: 2, 3: 2, 4: 1, 5: 2, 6: 0}
81
+ >>> G = nx.DiGraph()
82
+ >>> G.add_edges_from([(1, 2), (2, 1), (2, 3), (2, 4), (3, 4), (4, 3)])
83
+ >>> nx.core_number(G)
84
+ {1: 2, 2: 2, 3: 2, 4: 2}
85
+
86
+ References
87
+ ----------
88
+ .. [1] An O(m) Algorithm for Cores Decomposition of Networks
89
+ Vladimir Batagelj and Matjaz Zaversnik, 2003.
90
+ https://arxiv.org/abs/cs.DS/0310049
91
+ """
92
+ if nx.number_of_selfloops(G) > 0:
93
+ msg = (
94
+ "Input graph has self loops which is not permitted; "
95
+ "Consider using G.remove_edges_from(nx.selfloop_edges(G))."
96
+ )
97
+ raise nx.NetworkXNotImplemented(msg)
98
+ degrees = dict(G.degree())
99
+ # Sort nodes by degree.
100
+ nodes = sorted(degrees, key=degrees.get)
101
+ bin_boundaries = [0]
102
+ curr_degree = 0
103
+ for i, v in enumerate(nodes):
104
+ if degrees[v] > curr_degree:
105
+ bin_boundaries.extend([i] * (degrees[v] - curr_degree))
106
+ curr_degree = degrees[v]
107
+ node_pos = {v: pos for pos, v in enumerate(nodes)}
108
+ # The initial guess for the core number of a node is its degree.
109
+ core = degrees
110
+ nbrs = {v: list(nx.all_neighbors(G, v)) for v in G}
111
+ for v in nodes:
112
+ for u in nbrs[v]:
113
+ if core[u] > core[v]:
114
+ nbrs[u].remove(v)
115
+ pos = node_pos[u]
116
+ bin_start = bin_boundaries[core[u]]
117
+ node_pos[u] = bin_start
118
+ node_pos[nodes[bin_start]] = pos
119
+ nodes[bin_start], nodes[pos] = nodes[pos], nodes[bin_start]
120
+ bin_boundaries[core[u]] += 1
121
+ core[u] -= 1
122
+ return core
123
+
124
+
125
+ def _core_subgraph(G, k_filter, k=None, core=None):
126
+ """Returns the subgraph induced by nodes passing filter `k_filter`.
127
+
128
+ Parameters
129
+ ----------
130
+ G : NetworkX graph
131
+ The graph or directed graph to process
132
+ k_filter : filter function
133
+ This function filters the nodes chosen. It takes three inputs:
134
+ A node of G, the filter's cutoff, and the core dict of the graph.
135
+ The function should return a Boolean value.
136
+ k : int, optional
137
+ The order of the core. If not specified use the max core number.
138
+ This value is used as the cutoff for the filter.
139
+ core : dict, optional
140
+ Precomputed core numbers keyed by node for the graph `G`.
141
+ If not specified, the core numbers will be computed from `G`.
142
+
143
+ """
144
+ if core is None:
145
+ core = core_number(G)
146
+ if k is None:
147
+ k = max(core.values())
148
+ nodes = (v for v in core if k_filter(v, k, core))
149
+ return G.subgraph(nodes).copy()
150
+
151
+
152
+ @nx.utils.not_implemented_for("multigraph")
153
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
154
+ def k_core(G, k=None, core_number=None):
155
+ """Returns the k-core of G.
156
+
157
+ A k-core is a maximal subgraph that contains nodes of degree `k` or more.
158
+
159
+ Parameters
160
+ ----------
161
+ G : NetworkX graph
162
+ A graph or directed graph
163
+ k : int, optional
164
+ The order of the core. If not specified return the main core.
165
+ core_number : dictionary, optional
166
+ Precomputed core numbers for the graph G.
167
+
168
+ Returns
169
+ -------
170
+ G : NetworkX graph
171
+ The k-core subgraph
172
+
173
+ Raises
174
+ ------
175
+ NetworkXNotImplemented
176
+ The k-core is not defined for multigraphs or graphs with self loops.
177
+
178
+ Notes
179
+ -----
180
+ The main core is the core with `k` as the largest core_number.
181
+
182
+ For directed graphs the node degree is defined to be the
183
+ in-degree + out-degree.
184
+
185
+ Graph, node, and edge attributes are copied to the subgraph.
186
+
187
+ Examples
188
+ --------
189
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
190
+ >>> H = nx.havel_hakimi_graph(degrees)
191
+ >>> H.degree
192
+ DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
193
+ >>> nx.k_core(H).nodes
194
+ NodeView((1, 2, 3, 5))
195
+
196
+ See Also
197
+ --------
198
+ core_number
199
+
200
+ References
201
+ ----------
202
+ .. [1] An O(m) Algorithm for Cores Decomposition of Networks
203
+ Vladimir Batagelj and Matjaz Zaversnik, 2003.
204
+ https://arxiv.org/abs/cs.DS/0310049
205
+ """
206
+
207
+ def k_filter(v, k, c):
208
+ return c[v] >= k
209
+
210
+ return _core_subgraph(G, k_filter, k, core_number)
211
+
212
+
213
+ @nx.utils.not_implemented_for("multigraph")
214
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
215
+ def k_shell(G, k=None, core_number=None):
216
+ """Returns the k-shell of G.
217
+
218
+ The k-shell is the subgraph induced by nodes with core number k.
219
+ That is, nodes in the k-core that are not in the (k+1)-core.
220
+
221
+ Parameters
222
+ ----------
223
+ G : NetworkX graph
224
+ A graph or directed graph.
225
+ k : int, optional
226
+ The order of the shell. If not specified return the outer shell.
227
+ core_number : dictionary, optional
228
+ Precomputed core numbers for the graph G.
229
+
230
+
231
+ Returns
232
+ -------
233
+ G : NetworkX graph
234
+ The k-shell subgraph
235
+
236
+ Raises
237
+ ------
238
+ NetworkXNotImplemented
239
+ The k-shell is not implemented for multigraphs or graphs with self loops.
240
+
241
+ Notes
242
+ -----
243
+ This is similar to k_corona but in that case only neighbors in the
244
+ k-core are considered.
245
+
246
+ For directed graphs the node degree is defined to be the
247
+ in-degree + out-degree.
248
+
249
+ Graph, node, and edge attributes are copied to the subgraph.
250
+
251
+ Examples
252
+ --------
253
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
254
+ >>> H = nx.havel_hakimi_graph(degrees)
255
+ >>> H.degree
256
+ DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
257
+ >>> nx.k_shell(H, k=1).nodes
258
+ NodeView((0, 4))
259
+
260
+ See Also
261
+ --------
262
+ core_number
263
+ k_corona
264
+
265
+
266
+ References
267
+ ----------
268
+ .. [1] A model of Internet topology using k-shell decomposition
269
+ Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt,
270
+ and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154
271
+ http://www.pnas.org/content/104/27/11150.full
272
+ """
273
+
274
+ def k_filter(v, k, c):
275
+ return c[v] == k
276
+
277
+ return _core_subgraph(G, k_filter, k, core_number)
278
+
279
+
280
+ @nx.utils.not_implemented_for("multigraph")
281
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
282
+ def k_crust(G, k=None, core_number=None):
283
+ """Returns the k-crust of G.
284
+
285
+ The k-crust is the graph G with the edges of the k-core removed
286
+ and isolated nodes found after the removal of edges are also removed.
287
+
288
+ Parameters
289
+ ----------
290
+ G : NetworkX graph
291
+ A graph or directed graph.
292
+ k : int, optional
293
+ The order of the shell. If not specified return the main crust.
294
+ core_number : dictionary, optional
295
+ Precomputed core numbers for the graph G.
296
+
297
+ Returns
298
+ -------
299
+ G : NetworkX graph
300
+ The k-crust subgraph
301
+
302
+ Raises
303
+ ------
304
+ NetworkXNotImplemented
305
+ The k-crust is not implemented for multigraphs or graphs with self loops.
306
+
307
+ Notes
308
+ -----
309
+ This definition of k-crust is different than the definition in [1]_.
310
+ The k-crust in [1]_ is equivalent to the k+1 crust of this algorithm.
311
+
312
+ For directed graphs the node degree is defined to be the
313
+ in-degree + out-degree.
314
+
315
+ Graph, node, and edge attributes are copied to the subgraph.
316
+
317
+ Examples
318
+ --------
319
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
320
+ >>> H = nx.havel_hakimi_graph(degrees)
321
+ >>> H.degree
322
+ DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
323
+ >>> nx.k_crust(H, k=1).nodes
324
+ NodeView((0, 4, 6))
325
+
326
+ See Also
327
+ --------
328
+ core_number
329
+
330
+ References
331
+ ----------
332
+ .. [1] A model of Internet topology using k-shell decomposition
333
+ Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt,
334
+ and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154
335
+ http://www.pnas.org/content/104/27/11150.full
336
+ """
337
+ # Default for k is one less than in _core_subgraph, so just inline.
338
+ # Filter is c[v] <= k
339
+ if core_number is None:
340
+ core_number = nx.core_number(G)
341
+ if k is None:
342
+ k = max(core_number.values()) - 1
343
+ nodes = (v for v in core_number if core_number[v] <= k)
344
+ return G.subgraph(nodes).copy()
345
+
346
+
347
+ @nx.utils.not_implemented_for("multigraph")
348
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
349
+ def k_corona(G, k, core_number=None):
350
+ """Returns the k-corona of G.
351
+
352
+ The k-corona is the subgraph of nodes in the k-core which have
353
+ exactly k neighbors in the k-core.
354
+
355
+ Parameters
356
+ ----------
357
+ G : NetworkX graph
358
+ A graph or directed graph
359
+ k : int
360
+ The order of the corona.
361
+ core_number : dictionary, optional
362
+ Precomputed core numbers for the graph G.
363
+
364
+ Returns
365
+ -------
366
+ G : NetworkX graph
367
+ The k-corona subgraph
368
+
369
+ Raises
370
+ ------
371
+ NetworkXNotImplemented
372
+ The k-corona is not defined for multigraphs or graphs with self loops.
373
+
374
+ Notes
375
+ -----
376
+ For directed graphs the node degree is defined to be the
377
+ in-degree + out-degree.
378
+
379
+ Graph, node, and edge attributes are copied to the subgraph.
380
+
381
+ Examples
382
+ --------
383
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
384
+ >>> H = nx.havel_hakimi_graph(degrees)
385
+ >>> H.degree
386
+ DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
387
+ >>> nx.k_corona(H, k=2).nodes
388
+ NodeView((1, 2, 3, 5))
389
+
390
+ See Also
391
+ --------
392
+ core_number
393
+
394
+ References
395
+ ----------
396
+ .. [1] k -core (bootstrap) percolation on complex networks:
397
+ Critical phenomena and nonlocal effects,
398
+ A. V. Goltsev, S. N. Dorogovtsev, and J. F. F. Mendes,
399
+ Phys. Rev. E 73, 056101 (2006)
400
+ http://link.aps.org/doi/10.1103/PhysRevE.73.056101
401
+ """
402
+
403
+ def func(v, k, c):
404
+ return c[v] == k and k == sum(1 for w in G[v] if c[w] >= k)
405
+
406
+ return _core_subgraph(G, func, k, core_number)
407
+
408
+
409
+ @nx.utils.not_implemented_for("directed")
410
+ @nx.utils.not_implemented_for("multigraph")
411
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
412
+ def k_truss(G, k):
413
+ """Returns the k-truss of `G`.
414
+
415
+ The k-truss is the maximal induced subgraph of `G` which contains at least
416
+ three vertices where every edge is incident to at least `k-2` triangles.
417
+
418
+ Parameters
419
+ ----------
420
+ G : NetworkX graph
421
+ An undirected graph
422
+ k : int
423
+ The order of the truss
424
+
425
+ Returns
426
+ -------
427
+ H : NetworkX graph
428
+ The k-truss subgraph
429
+
430
+ Raises
431
+ ------
432
+ NetworkXNotImplemented
433
+ If `G` is a multigraph or directed graph or if it contains self loops.
434
+
435
+ Notes
436
+ -----
437
+ A k-clique is a (k-2)-truss and a k-truss is a (k+1)-core.
438
+
439
+ Graph, node, and edge attributes are copied to the subgraph.
440
+
441
+ K-trusses were originally defined in [2] which states that the k-truss
442
+ is the maximal induced subgraph where each edge belongs to at least
443
+ `k-2` triangles. A more recent paper, [1], uses a slightly different
444
+ definition requiring that each edge belong to at least `k` triangles.
445
+ This implementation uses the original definition of `k-2` triangles.
446
+
447
+ Examples
448
+ --------
449
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
450
+ >>> H = nx.havel_hakimi_graph(degrees)
451
+ >>> H.degree
452
+ DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
453
+ >>> nx.k_truss(H, k=2).nodes
454
+ NodeView((0, 1, 2, 3, 4, 5))
455
+
456
+ References
457
+ ----------
458
+ .. [1] Bounds and Algorithms for k-truss. Paul Burkhardt, Vance Faber,
459
+ David G. Harris, 2018. https://arxiv.org/abs/1806.05523v2
460
+ .. [2] Trusses: Cohesive Subgraphs for Social Network Analysis. Jonathan
461
+ Cohen, 2005.
462
+ """
463
+ if nx.number_of_selfloops(G) > 0:
464
+ msg = (
465
+ "Input graph has self loops which is not permitted; "
466
+ "Consider using G.remove_edges_from(nx.selfloop_edges(G))."
467
+ )
468
+ raise nx.NetworkXNotImplemented(msg)
469
+
470
+ H = G.copy()
471
+
472
+ n_dropped = 1
473
+ while n_dropped > 0:
474
+ n_dropped = 0
475
+ to_drop = []
476
+ seen = set()
477
+ for u in H:
478
+ nbrs_u = set(H[u])
479
+ seen.add(u)
480
+ new_nbrs = [v for v in nbrs_u if v not in seen]
481
+ for v in new_nbrs:
482
+ if len(nbrs_u & set(H[v])) < (k - 2):
483
+ to_drop.append((u, v))
484
+ H.remove_edges_from(to_drop)
485
+ n_dropped = len(to_drop)
486
+ H.remove_nodes_from(list(nx.isolates(H)))
487
+
488
+ return H
489
+
490
+
491
+ @nx.utils.not_implemented_for("multigraph")
492
+ @nx.utils.not_implemented_for("directed")
493
+ @nx._dispatchable
494
+ def onion_layers(G):
495
+ """Returns the layer of each vertex in an onion decomposition of the graph.
496
+
497
+ The onion decomposition refines the k-core decomposition by providing
498
+ information on the internal organization of each k-shell. It is usually
499
+ used alongside the `core numbers`.
500
+
501
+ Parameters
502
+ ----------
503
+ G : NetworkX graph
504
+ An undirected graph without self loops.
505
+
506
+ Returns
507
+ -------
508
+ od_layers : dictionary
509
+ A dictionary keyed by node to the onion layer. The layers are
510
+ contiguous integers starting at 1.
511
+
512
+ Raises
513
+ ------
514
+ NetworkXNotImplemented
515
+ If `G` is a multigraph or directed graph or if it contains self loops.
516
+
517
+ Examples
518
+ --------
519
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
520
+ >>> H = nx.havel_hakimi_graph(degrees)
521
+ >>> H.degree
522
+ DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
523
+ >>> nx.onion_layers(H)
524
+ {6: 1, 0: 2, 4: 3, 1: 4, 2: 4, 3: 4, 5: 4}
525
+
526
+ See Also
527
+ --------
528
+ core_number
529
+
530
+ References
531
+ ----------
532
+ .. [1] Multi-scale structure and topological anomaly detection via a new
533
+ network statistic: The onion decomposition
534
+ L. Hébert-Dufresne, J. A. Grochow, and A. Allard
535
+ Scientific Reports 6, 31708 (2016)
536
+ http://doi.org/10.1038/srep31708
537
+ .. [2] Percolation and the effective structure of complex networks
538
+ A. Allard and L. Hébert-Dufresne
539
+ Physical Review X 9, 011023 (2019)
540
+ http://doi.org/10.1103/PhysRevX.9.011023
541
+ """
542
+ if nx.number_of_selfloops(G) > 0:
543
+ msg = (
544
+ "Input graph contains self loops which is not permitted; "
545
+ "Consider using G.remove_edges_from(nx.selfloop_edges(G))."
546
+ )
547
+ raise nx.NetworkXNotImplemented(msg)
548
+ # Dictionaries to register the k-core/onion decompositions.
549
+ od_layers = {}
550
+ # Adjacency list
551
+ neighbors = {v: list(nx.all_neighbors(G, v)) for v in G}
552
+ # Effective degree of nodes.
553
+ degrees = dict(G.degree())
554
+ # Performs the onion decomposition.
555
+ current_core = 1
556
+ current_layer = 1
557
+ # Sets vertices of degree 0 to layer 1, if any.
558
+ isolated_nodes = list(nx.isolates(G))
559
+ if len(isolated_nodes) > 0:
560
+ for v in isolated_nodes:
561
+ od_layers[v] = current_layer
562
+ degrees.pop(v)
563
+ current_layer = 2
564
+ # Finds the layer for the remaining nodes.
565
+ while len(degrees) > 0:
566
+ # Sets the order for looking at nodes.
567
+ nodes = sorted(degrees, key=degrees.get)
568
+ # Sets properly the current core.
569
+ min_degree = degrees[nodes[0]]
570
+ if min_degree > current_core:
571
+ current_core = min_degree
572
+ # Identifies vertices in the current layer.
573
+ this_layer = []
574
+ for n in nodes:
575
+ if degrees[n] > current_core:
576
+ break
577
+ this_layer.append(n)
578
+ # Identifies the core/layer of the vertices in the current layer.
579
+ for v in this_layer:
580
+ od_layers[v] = current_layer
581
+ for n in neighbors[v]:
582
+ neighbors[n].remove(v)
583
+ degrees[n] = degrees[n] - 1
584
+ degrees.pop(v)
585
+ # Updates the layer count.
586
+ current_layer = current_layer + 1
587
+ # Returns the dictionaries containing the onion layer of each vertices.
588
+ return od_layers
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/covering.py ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions related to graph covers."""
2
+
3
+ from functools import partial
4
+ from itertools import chain
5
+
6
+ import networkx as nx
7
+ from networkx.utils import arbitrary_element, not_implemented_for
8
+
9
+ __all__ = ["min_edge_cover", "is_edge_cover"]
10
+
11
+
12
+ @not_implemented_for("directed")
13
+ @not_implemented_for("multigraph")
14
+ @nx._dispatchable
15
+ def min_edge_cover(G, matching_algorithm=None):
16
+ """Returns the min cardinality edge cover of the graph as a set of edges.
17
+
18
+ A smallest edge cover can be found in polynomial time by finding
19
+ a maximum matching and extending it greedily so that all nodes
20
+ are covered. This function follows that process. A maximum matching
21
+ algorithm can be specified for the first step of the algorithm.
22
+ The resulting set may return a set with one 2-tuple for each edge,
23
+ (the usual case) or with both 2-tuples `(u, v)` and `(v, u)` for
24
+ each edge. The latter is only done when a bipartite matching algorithm
25
+ is specified as `matching_algorithm`.
26
+
27
+ Parameters
28
+ ----------
29
+ G : NetworkX graph
30
+ An undirected graph.
31
+
32
+ matching_algorithm : function
33
+ A function that returns a maximum cardinality matching for `G`.
34
+ The function must take one input, the graph `G`, and return
35
+ either a set of edges (with only one direction for the pair of nodes)
36
+ or a dictionary mapping each node to its mate. If not specified,
37
+ :func:`~networkx.algorithms.matching.max_weight_matching` is used.
38
+ Common bipartite matching functions include
39
+ :func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching`
40
+ or
41
+ :func:`~networkx.algorithms.bipartite.matching.eppstein_matching`.
42
+
43
+ Returns
44
+ -------
45
+ min_cover : set
46
+
47
+ A set of the edges in a minimum edge cover in the form of tuples.
48
+ It contains only one of the equivalent 2-tuples `(u, v)` and `(v, u)`
49
+ for each edge. If a bipartite method is used to compute the matching,
50
+ the returned set contains both the 2-tuples `(u, v)` and `(v, u)`
51
+ for each edge of a minimum edge cover.
52
+
53
+ Examples
54
+ --------
55
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
56
+ >>> sorted(nx.min_edge_cover(G))
57
+ [(2, 1), (3, 0)]
58
+
59
+ Notes
60
+ -----
61
+ An edge cover of a graph is a set of edges such that every node of
62
+ the graph is incident to at least one edge of the set.
63
+ The minimum edge cover is an edge covering of smallest cardinality.
64
+
65
+ Due to its implementation, the worst-case running time of this algorithm
66
+ is bounded by the worst-case running time of the function
67
+ ``matching_algorithm``.
68
+
69
+ Minimum edge cover for `G` can also be found using
70
+ :func:`~networkx.algorithms.bipartite.covering.min_edge_covering` which is
71
+ simply this function with a default matching algorithm of
72
+ :func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching`
73
+ """
74
+ if len(G) == 0:
75
+ return set()
76
+ if nx.number_of_isolates(G) > 0:
77
+ # ``min_cover`` does not exist as there is an isolated node
78
+ raise nx.NetworkXException(
79
+ "Graph has a node with no edge incident on it, so no edge cover exists."
80
+ )
81
+ if matching_algorithm is None:
82
+ matching_algorithm = partial(nx.max_weight_matching, maxcardinality=True)
83
+ maximum_matching = matching_algorithm(G)
84
+ # ``min_cover`` is superset of ``maximum_matching``
85
+ try:
86
+ # bipartite matching algs return dict so convert if needed
87
+ min_cover = set(maximum_matching.items())
88
+ bipartite_cover = True
89
+ except AttributeError:
90
+ min_cover = maximum_matching
91
+ bipartite_cover = False
92
+ # iterate for uncovered nodes
93
+ uncovered_nodes = set(G) - {v for u, v in min_cover} - {u for u, v in min_cover}
94
+ for v in uncovered_nodes:
95
+ # Since `v` is uncovered, each edge incident to `v` will join it
96
+ # with a covered node (otherwise, if there were an edge joining
97
+ # uncovered nodes `u` and `v`, the maximum matching algorithm
98
+ # would have found it), so we can choose an arbitrary edge
99
+ # incident to `v`. (This applies only in a simple graph, not a
100
+ # multigraph.)
101
+ u = arbitrary_element(G[v])
102
+ min_cover.add((u, v))
103
+ if bipartite_cover:
104
+ min_cover.add((v, u))
105
+ return min_cover
106
+
107
+
108
+ @not_implemented_for("directed")
109
+ @nx._dispatchable
110
+ def is_edge_cover(G, cover):
111
+ """Decides whether a set of edges is a valid edge cover of the graph.
112
+
113
+ Given a set of edges, whether it is an edge covering can
114
+ be decided if we just check whether all nodes of the graph
115
+ has an edge from the set, incident on it.
116
+
117
+ Parameters
118
+ ----------
119
+ G : NetworkX graph
120
+ An undirected bipartite graph.
121
+
122
+ cover : set
123
+ Set of edges to be checked.
124
+
125
+ Returns
126
+ -------
127
+ bool
128
+ Whether the set of edges is a valid edge cover of the graph.
129
+
130
+ Examples
131
+ --------
132
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
133
+ >>> cover = {(2, 1), (3, 0)}
134
+ >>> nx.is_edge_cover(G, cover)
135
+ True
136
+
137
+ Notes
138
+ -----
139
+ An edge cover of a graph is a set of edges such that every node of
140
+ the graph is incident to at least one edge of the set.
141
+ """
142
+ return set(G) <= set(chain.from_iterable(cover))
tool_server/.venv/lib/python3.12/site-packages/networkx/algorithms/cuts.py ADDED
@@ -0,0 +1,398 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions for finding and evaluating cuts in a graph."""
2
+
3
+ from itertools import chain
4
+
5
+ import networkx as nx
6
+
7
+ __all__ = [
8
+ "boundary_expansion",
9
+ "conductance",
10
+ "cut_size",
11
+ "edge_expansion",
12
+ "mixing_expansion",
13
+ "node_expansion",
14
+ "normalized_cut_size",
15
+ "volume",
16
+ ]
17
+
18
+
19
+ # TODO STILL NEED TO UPDATE ALL THE DOCUMENTATION!
20
+
21
+
22
+ @nx._dispatchable(edge_attrs="weight")
23
+ def cut_size(G, S, T=None, weight=None):
24
+ """Returns the size of the cut between two sets of nodes.
25
+
26
+ A *cut* is a partition of the nodes of a graph into two sets. The
27
+ *cut size* is the sum of the weights of the edges "between" the two
28
+ sets of nodes.
29
+
30
+ Parameters
31
+ ----------
32
+ G : NetworkX graph
33
+
34
+ S : collection
35
+ A collection of nodes in `G`.
36
+
37
+ T : collection
38
+ A collection of nodes in `G`. If not specified, this is taken to
39
+ be the set complement of `S`.
40
+
41
+ weight : object
42
+ Edge attribute key to use as weight. If not specified, edges
43
+ have weight one.
44
+
45
+ Returns
46
+ -------
47
+ number
48
+ Total weight of all edges from nodes in set `S` to nodes in
49
+ set `T` (and, in the case of directed graphs, all edges from
50
+ nodes in `T` to nodes in `S`).
51
+
52
+ Examples
53
+ --------
54
+ In the graph with two cliques joined by a single edges, the natural
55
+ bipartition of the graph into two blocks, one for each clique,
56
+ yields a cut of weight one::
57
+
58
+ >>> G = nx.barbell_graph(3, 0)
59
+ >>> S = {0, 1, 2}
60
+ >>> T = {3, 4, 5}
61
+ >>> nx.cut_size(G, S, T)
62
+ 1
63
+
64
+ Each parallel edge in a multigraph is counted when determining the
65
+ cut size::
66
+
67
+ >>> G = nx.MultiGraph(["ab", "ab"])
68
+ >>> S = {"a"}
69
+ >>> T = {"b"}
70
+ >>> nx.cut_size(G, S, T)
71
+ 2
72
+
73
+ Notes
74
+ -----
75
+ In a multigraph, the cut size is the total weight of edges including
76
+ multiplicity.
77
+
78
+ """
79
+ edges = nx.edge_boundary(G, S, T, data=weight, default=1)
80
+ if G.is_directed():
81
+ edges = chain(edges, nx.edge_boundary(G, T, S, data=weight, default=1))
82
+ return sum(weight for u, v, weight in edges)
83
+
84
+
85
+ @nx._dispatchable(edge_attrs="weight")
86
+ def volume(G, S, weight=None):
87
+ """Returns the volume of a set of nodes.
88
+
89
+ The *volume* of a set *S* is the sum of the (out-)degrees of nodes
90
+ in *S* (taking into account parallel edges in multigraphs). [1]
91
+
92
+ Parameters
93
+ ----------
94
+ G : NetworkX graph
95
+
96
+ S : collection
97
+ A collection of nodes in `G`.
98
+
99
+ weight : object
100
+ Edge attribute key to use as weight. If not specified, edges
101
+ have weight one.
102
+
103
+ Returns
104
+ -------
105
+ number
106
+ The volume of the set of nodes represented by `S` in the graph
107
+ `G`.
108
+
109
+ See also
110
+ --------
111
+ conductance
112
+ cut_size
113
+ edge_expansion
114
+ edge_boundary
115
+ normalized_cut_size
116
+
117
+ References
118
+ ----------
119
+ .. [1] David Gleich.
120
+ *Hierarchical Directed Spectral Graph Partitioning*.
121
+ <https://www.cs.purdue.edu/homes/dgleich/publications/Gleich%202005%20-%20hierarchical%20directed%20spectral.pdf>
122
+
123
+ """
124
+ degree = G.out_degree if G.is_directed() else G.degree
125
+ return sum(d for v, d in degree(S, weight=weight))
126
+
127
+
128
+ @nx._dispatchable(edge_attrs="weight")
129
+ def normalized_cut_size(G, S, T=None, weight=None):
130
+ """Returns the normalized size of the cut between two sets of nodes.
131
+
132
+ The *normalized cut size* is the cut size times the sum of the
133
+ reciprocal sizes of the volumes of the two sets. [1]
134
+
135
+ Parameters
136
+ ----------
137
+ G : NetworkX graph
138
+
139
+ S : collection
140
+ A collection of nodes in `G`.
141
+
142
+ T : collection
143
+ A collection of nodes in `G`.
144
+
145
+ weight : object
146
+ Edge attribute key to use as weight. If not specified, edges
147
+ have weight one.
148
+
149
+ Returns
150
+ -------
151
+ number
152
+ The normalized cut size between the two sets `S` and `T`.
153
+
154
+ Notes
155
+ -----
156
+ In a multigraph, the cut size is the total weight of edges including
157
+ multiplicity.
158
+
159
+ See also
160
+ --------
161
+ conductance
162
+ cut_size
163
+ edge_expansion
164
+ volume
165
+
166
+ References
167
+ ----------
168
+ .. [1] David Gleich.
169
+ *Hierarchical Directed Spectral Graph Partitioning*.
170
+ <https://www.cs.purdue.edu/homes/dgleich/publications/Gleich%202005%20-%20hierarchical%20directed%20spectral.pdf>
171
+
172
+ """
173
+ if T is None:
174
+ T = set(G) - set(S)
175
+ num_cut_edges = cut_size(G, S, T=T, weight=weight)
176
+ volume_S = volume(G, S, weight=weight)
177
+ volume_T = volume(G, T, weight=weight)
178
+ return num_cut_edges * ((1 / volume_S) + (1 / volume_T))
179
+
180
+
181
+ @nx._dispatchable(edge_attrs="weight")
182
+ def conductance(G, S, T=None, weight=None):
183
+ """Returns the conductance of two sets of nodes.
184
+
185
+ The *conductance* is the quotient of the cut size and the smaller of
186
+ the volumes of the two sets. [1]
187
+
188
+ Parameters
189
+ ----------
190
+ G : NetworkX graph
191
+
192
+ S : collection
193
+ A collection of nodes in `G`.
194
+
195
+ T : collection
196
+ A collection of nodes in `G`.
197
+
198
+ weight : object
199
+ Edge attribute key to use as weight. If not specified, edges
200
+ have weight one.
201
+
202
+ Returns
203
+ -------
204
+ number
205
+ The conductance between the two sets `S` and `T`.
206
+
207
+ See also
208
+ --------
209
+ cut_size
210
+ edge_expansion
211
+ normalized_cut_size
212
+ volume
213
+
214
+ References
215
+ ----------
216
+ .. [1] David Gleich.
217
+ *Hierarchical Directed Spectral Graph Partitioning*.
218
+ <https://www.cs.purdue.edu/homes/dgleich/publications/Gleich%202005%20-%20hierarchical%20directed%20spectral.pdf>
219
+
220
+ """
221
+ if T is None:
222
+ T = set(G) - set(S)
223
+ num_cut_edges = cut_size(G, S, T, weight=weight)
224
+ volume_S = volume(G, S, weight=weight)
225
+ volume_T = volume(G, T, weight=weight)
226
+ return num_cut_edges / min(volume_S, volume_T)
227
+
228
+
229
+ @nx._dispatchable(edge_attrs="weight")
230
+ def edge_expansion(G, S, T=None, weight=None):
231
+ """Returns the edge expansion between two node sets.
232
+
233
+ The *edge expansion* is the quotient of the cut size and the smaller
234
+ of the cardinalities of the two sets. [1]
235
+
236
+ Parameters
237
+ ----------
238
+ G : NetworkX graph
239
+
240
+ S : collection
241
+ A collection of nodes in `G`.
242
+
243
+ T : collection
244
+ A collection of nodes in `G`.
245
+
246
+ weight : object
247
+ Edge attribute key to use as weight. If not specified, edges
248
+ have weight one.
249
+
250
+ Returns
251
+ -------
252
+ number
253
+ The edge expansion between the two sets `S` and `T`.
254
+
255
+ See also
256
+ --------
257
+ boundary_expansion
258
+ mixing_expansion
259
+ node_expansion
260
+
261
+ References
262
+ ----------
263
+ .. [1] Fan Chung.
264
+ *Spectral Graph Theory*.
265
+ (CBMS Regional Conference Series in Mathematics, No. 92),
266
+ American Mathematical Society, 1997, ISBN 0-8218-0315-8
267
+ <http://www.math.ucsd.edu/~fan/research/revised.html>
268
+
269
+ """
270
+ if T is None:
271
+ T = set(G) - set(S)
272
+ num_cut_edges = cut_size(G, S, T=T, weight=weight)
273
+ return num_cut_edges / min(len(S), len(T))
274
+
275
+
276
+ @nx._dispatchable(edge_attrs="weight")
277
+ def mixing_expansion(G, S, T=None, weight=None):
278
+ """Returns the mixing expansion between two node sets.
279
+
280
+ The *mixing expansion* is the quotient of the cut size and twice the
281
+ number of edges in the graph. [1]
282
+
283
+ Parameters
284
+ ----------
285
+ G : NetworkX graph
286
+
287
+ S : collection
288
+ A collection of nodes in `G`.
289
+
290
+ T : collection
291
+ A collection of nodes in `G`.
292
+
293
+ weight : object
294
+ Edge attribute key to use as weight. If not specified, edges
295
+ have weight one.
296
+
297
+ Returns
298
+ -------
299
+ number
300
+ The mixing expansion between the two sets `S` and `T`.
301
+
302
+ See also
303
+ --------
304
+ boundary_expansion
305
+ edge_expansion
306
+ node_expansion
307
+
308
+ References
309
+ ----------
310
+ .. [1] Vadhan, Salil P.
311
+ "Pseudorandomness."
312
+ *Foundations and Trends
313
+ in Theoretical Computer Science* 7.1–3 (2011): 1–336.
314
+ <https://doi.org/10.1561/0400000010>
315
+
316
+ """
317
+ num_cut_edges = cut_size(G, S, T=T, weight=weight)
318
+ num_total_edges = G.number_of_edges()
319
+ return num_cut_edges / (2 * num_total_edges)
320
+
321
+
322
+ # TODO What is the generalization to two arguments, S and T? Does the
323
+ # denominator become `min(len(S), len(T))`?
324
+ @nx._dispatchable
325
+ def node_expansion(G, S):
326
+ """Returns the node expansion of the set `S`.
327
+
328
+ The *node expansion* is the quotient of the size of the node
329
+ boundary of *S* and the cardinality of *S*. [1]
330
+
331
+ Parameters
332
+ ----------
333
+ G : NetworkX graph
334
+
335
+ S : collection
336
+ A collection of nodes in `G`.
337
+
338
+ Returns
339
+ -------
340
+ number
341
+ The node expansion of the set `S`.
342
+
343
+ See also
344
+ --------
345
+ boundary_expansion
346
+ edge_expansion
347
+ mixing_expansion
348
+
349
+ References
350
+ ----------
351
+ .. [1] Vadhan, Salil P.
352
+ "Pseudorandomness."
353
+ *Foundations and Trends
354
+ in Theoretical Computer Science* 7.1–3 (2011): 1–336.
355
+ <https://doi.org/10.1561/0400000010>
356
+
357
+ """
358
+ neighborhood = set(chain.from_iterable(G.neighbors(v) for v in S))
359
+ return len(neighborhood) / len(S)
360
+
361
+
362
+ # TODO What is the generalization to two arguments, S and T? Does the
363
+ # denominator become `min(len(S), len(T))`?
364
+ @nx._dispatchable
365
+ def boundary_expansion(G, S):
366
+ """Returns the boundary expansion of the set `S`.
367
+
368
+ The *boundary expansion* is the quotient of the size
369
+ of the node boundary and the cardinality of *S*. [1]
370
+
371
+ Parameters
372
+ ----------
373
+ G : NetworkX graph
374
+
375
+ S : collection
376
+ A collection of nodes in `G`.
377
+
378
+ Returns
379
+ -------
380
+ number
381
+ The boundary expansion of the set `S`.
382
+
383
+ See also
384
+ --------
385
+ edge_expansion
386
+ mixing_expansion
387
+ node_expansion
388
+
389
+ References
390
+ ----------
391
+ .. [1] Vadhan, Salil P.
392
+ "Pseudorandomness."
393
+ *Foundations and Trends in Theoretical Computer Science*
394
+ 7.1–3 (2011): 1–336.
395
+ <https://doi.org/10.1561/0400000010>
396
+
397
+ """
398
+ return len(nx.node_boundary(G, S)) / len(S)