Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- wemm/lib/python3.10/site-packages/botocore/data/m2/2021-04-28/endpoint-rule-set-1.json.gz +3 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__init__.py +87 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/centrality.py +290 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/edgelist.py +360 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/projection.py +526 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/redundancy.py +112 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/spectral.py +69 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/__init__.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_basic.py +125 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_edgelist.py +240 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_matching.py +327 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/dispersion.py +107 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/flow_matrix.py +130 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/laplacian.py +150 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/__init__.py +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_closeness_centrality.py +307 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_laplacian_centrality.py +221 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/clique.py +755 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/cycles.py +1230 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/distance_regular.py +238 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/dominating.py +95 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/euler.py +470 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__init__.py +7 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/ismags.py +1163 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorph.py +249 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp_helpers.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-310.pyc +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99 +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99 +0 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py +327 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py +48 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py +212 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py +292 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py +1608 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py +200 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2pp.py +1075 -0
- wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py +192 -0
wemm/lib/python3.10/site-packages/botocore/data/m2/2021-04-28/endpoint-rule-set-1.json.gz
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:88c6e0a16a1567c4c2925bb8a62d4d85d50d8d666ab5d0a0341e0de61892b4a5
|
| 3 |
+
size 1134
|
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__init__.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""This module provides functions and operations for bipartite
|
| 2 |
+
graphs. Bipartite graphs `B = (U, V, E)` have two node sets `U,V` and edges in
|
| 3 |
+
`E` that only connect nodes from opposite sets. It is common in the literature
|
| 4 |
+
to use an spatial analogy referring to the two node sets as top and bottom nodes.
|
| 5 |
+
|
| 6 |
+
The bipartite algorithms are not imported into the networkx namespace
|
| 7 |
+
at the top level so the easiest way to use them is with:
|
| 8 |
+
|
| 9 |
+
>>> from networkx.algorithms import bipartite
|
| 10 |
+
|
| 11 |
+
NetworkX does not have a custom bipartite graph class but the Graph()
|
| 12 |
+
or DiGraph() classes can be used to represent bipartite graphs. However,
|
| 13 |
+
you have to keep track of which set each node belongs to, and make
|
| 14 |
+
sure that there is no edge between nodes of the same set. The convention used
|
| 15 |
+
in NetworkX is to use a node attribute named `bipartite` with values 0 or 1 to
|
| 16 |
+
identify the sets each node belongs to. This convention is not enforced in
|
| 17 |
+
the source code of bipartite functions, it's only a recommendation.
|
| 18 |
+
|
| 19 |
+
For example:
|
| 20 |
+
|
| 21 |
+
>>> B = nx.Graph()
|
| 22 |
+
>>> # Add nodes with the node attribute "bipartite"
|
| 23 |
+
>>> B.add_nodes_from([1, 2, 3, 4], bipartite=0)
|
| 24 |
+
>>> B.add_nodes_from(["a", "b", "c"], bipartite=1)
|
| 25 |
+
>>> # Add edges only between nodes of opposite node sets
|
| 26 |
+
>>> B.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
|
| 27 |
+
|
| 28 |
+
Many algorithms of the bipartite module of NetworkX require, as an argument, a
|
| 29 |
+
container with all the nodes that belong to one set, in addition to the bipartite
|
| 30 |
+
graph `B`. The functions in the bipartite package do not check that the node set
|
| 31 |
+
is actually correct nor that the input graph is actually bipartite.
|
| 32 |
+
If `B` is connected, you can find the two node sets using a two-coloring
|
| 33 |
+
algorithm:
|
| 34 |
+
|
| 35 |
+
>>> nx.is_connected(B)
|
| 36 |
+
True
|
| 37 |
+
>>> bottom_nodes, top_nodes = bipartite.sets(B)
|
| 38 |
+
|
| 39 |
+
However, if the input graph is not connected, there are more than one possible
|
| 40 |
+
colorations. This is the reason why we require the user to pass a container
|
| 41 |
+
with all nodes of one bipartite node set as an argument to most bipartite
|
| 42 |
+
functions. In the face of ambiguity, we refuse the temptation to guess and
|
| 43 |
+
raise an :exc:`AmbiguousSolution <networkx.AmbiguousSolution>`
|
| 44 |
+
Exception if the input graph for
|
| 45 |
+
:func:`bipartite.sets <networkx.algorithms.bipartite.basic.sets>`
|
| 46 |
+
is disconnected.
|
| 47 |
+
|
| 48 |
+
Using the `bipartite` node attribute, you can easily get the two node sets:
|
| 49 |
+
|
| 50 |
+
>>> top_nodes = {n for n, d in B.nodes(data=True) if d["bipartite"] == 0}
|
| 51 |
+
>>> bottom_nodes = set(B) - top_nodes
|
| 52 |
+
|
| 53 |
+
So you can easily use the bipartite algorithms that require, as an argument, a
|
| 54 |
+
container with all nodes that belong to one node set:
|
| 55 |
+
|
| 56 |
+
>>> print(round(bipartite.density(B, bottom_nodes), 2))
|
| 57 |
+
0.5
|
| 58 |
+
>>> G = bipartite.projected_graph(B, top_nodes)
|
| 59 |
+
|
| 60 |
+
All bipartite graph generators in NetworkX build bipartite graphs with the
|
| 61 |
+
`bipartite` node attribute. Thus, you can use the same approach:
|
| 62 |
+
|
| 63 |
+
>>> RB = bipartite.random_graph(5, 7, 0.2)
|
| 64 |
+
>>> RB_top = {n for n, d in RB.nodes(data=True) if d["bipartite"] == 0}
|
| 65 |
+
>>> RB_bottom = set(RB) - RB_top
|
| 66 |
+
>>> list(RB_top)
|
| 67 |
+
[0, 1, 2, 3, 4]
|
| 68 |
+
>>> list(RB_bottom)
|
| 69 |
+
[5, 6, 7, 8, 9, 10, 11]
|
| 70 |
+
|
| 71 |
+
For other bipartite graph generators see
|
| 72 |
+
:mod:`Generators <networkx.algorithms.bipartite.generators>`.
|
| 73 |
+
|
| 74 |
+
"""
|
| 75 |
+
|
| 76 |
+
from networkx.algorithms.bipartite.basic import *
|
| 77 |
+
from networkx.algorithms.bipartite.centrality import *
|
| 78 |
+
from networkx.algorithms.bipartite.cluster import *
|
| 79 |
+
from networkx.algorithms.bipartite.covering import *
|
| 80 |
+
from networkx.algorithms.bipartite.edgelist import *
|
| 81 |
+
from networkx.algorithms.bipartite.matching import *
|
| 82 |
+
from networkx.algorithms.bipartite.matrix import *
|
| 83 |
+
from networkx.algorithms.bipartite.projection import *
|
| 84 |
+
from networkx.algorithms.bipartite.redundancy import *
|
| 85 |
+
from networkx.algorithms.bipartite.spectral import *
|
| 86 |
+
from networkx.algorithms.bipartite.generators import *
|
| 87 |
+
from networkx.algorithms.bipartite.extendability import *
|
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (3.98 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-310.pyc
ADDED
|
Binary file (18.8 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/centrality.py
ADDED
|
@@ -0,0 +1,290 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
|
| 3 |
+
__all__ = ["degree_centrality", "betweenness_centrality", "closeness_centrality"]
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
@nx._dispatchable(name="bipartite_degree_centrality")
|
| 7 |
+
def degree_centrality(G, nodes):
|
| 8 |
+
r"""Compute the degree centrality for nodes in a bipartite network.
|
| 9 |
+
|
| 10 |
+
The degree centrality for a node `v` is the fraction of nodes
|
| 11 |
+
connected to it.
|
| 12 |
+
|
| 13 |
+
Parameters
|
| 14 |
+
----------
|
| 15 |
+
G : graph
|
| 16 |
+
A bipartite network
|
| 17 |
+
|
| 18 |
+
nodes : list or container
|
| 19 |
+
Container with all nodes in one bipartite node set.
|
| 20 |
+
|
| 21 |
+
Returns
|
| 22 |
+
-------
|
| 23 |
+
centrality : dictionary
|
| 24 |
+
Dictionary keyed by node with bipartite degree centrality as the value.
|
| 25 |
+
|
| 26 |
+
Examples
|
| 27 |
+
--------
|
| 28 |
+
>>> G = nx.wheel_graph(5)
|
| 29 |
+
>>> top_nodes = {0, 1, 2}
|
| 30 |
+
>>> nx.bipartite.degree_centrality(G, nodes=top_nodes)
|
| 31 |
+
{0: 2.0, 1: 1.5, 2: 1.5, 3: 1.0, 4: 1.0}
|
| 32 |
+
|
| 33 |
+
See Also
|
| 34 |
+
--------
|
| 35 |
+
betweenness_centrality
|
| 36 |
+
closeness_centrality
|
| 37 |
+
:func:`~networkx.algorithms.bipartite.basic.sets`
|
| 38 |
+
:func:`~networkx.algorithms.bipartite.basic.is_bipartite`
|
| 39 |
+
|
| 40 |
+
Notes
|
| 41 |
+
-----
|
| 42 |
+
The nodes input parameter must contain all nodes in one bipartite node set,
|
| 43 |
+
but the dictionary returned contains all nodes from both bipartite node
|
| 44 |
+
sets. See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 45 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 46 |
+
|
| 47 |
+
For unipartite networks, the degree centrality values are
|
| 48 |
+
normalized by dividing by the maximum possible degree (which is
|
| 49 |
+
`n-1` where `n` is the number of nodes in G).
|
| 50 |
+
|
| 51 |
+
In the bipartite case, the maximum possible degree of a node in a
|
| 52 |
+
bipartite node set is the number of nodes in the opposite node set
|
| 53 |
+
[1]_. The degree centrality for a node `v` in the bipartite
|
| 54 |
+
sets `U` with `n` nodes and `V` with `m` nodes is
|
| 55 |
+
|
| 56 |
+
.. math::
|
| 57 |
+
|
| 58 |
+
d_{v} = \frac{deg(v)}{m}, \mbox{for} v \in U ,
|
| 59 |
+
|
| 60 |
+
d_{v} = \frac{deg(v)}{n}, \mbox{for} v \in V ,
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
where `deg(v)` is the degree of node `v`.
|
| 64 |
+
|
| 65 |
+
References
|
| 66 |
+
----------
|
| 67 |
+
.. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
|
| 68 |
+
Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
| 69 |
+
of Social Network Analysis. Sage Publications.
|
| 70 |
+
https://dx.doi.org/10.4135/9781446294413.n28
|
| 71 |
+
"""
|
| 72 |
+
top = set(nodes)
|
| 73 |
+
bottom = set(G) - top
|
| 74 |
+
s = 1.0 / len(bottom)
|
| 75 |
+
centrality = {n: d * s for n, d in G.degree(top)}
|
| 76 |
+
s = 1.0 / len(top)
|
| 77 |
+
centrality.update({n: d * s for n, d in G.degree(bottom)})
|
| 78 |
+
return centrality
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
@nx._dispatchable(name="bipartite_betweenness_centrality")
|
| 82 |
+
def betweenness_centrality(G, nodes):
|
| 83 |
+
r"""Compute betweenness centrality for nodes in a bipartite network.
|
| 84 |
+
|
| 85 |
+
Betweenness centrality of a node `v` is the sum of the
|
| 86 |
+
fraction of all-pairs shortest paths that pass through `v`.
|
| 87 |
+
|
| 88 |
+
Values of betweenness are normalized by the maximum possible
|
| 89 |
+
value which for bipartite graphs is limited by the relative size
|
| 90 |
+
of the two node sets [1]_.
|
| 91 |
+
|
| 92 |
+
Let `n` be the number of nodes in the node set `U` and
|
| 93 |
+
`m` be the number of nodes in the node set `V`, then
|
| 94 |
+
nodes in `U` are normalized by dividing by
|
| 95 |
+
|
| 96 |
+
.. math::
|
| 97 |
+
|
| 98 |
+
\frac{1}{2} [m^2 (s + 1)^2 + m (s + 1)(2t - s - 1) - t (2s - t + 3)] ,
|
| 99 |
+
|
| 100 |
+
where
|
| 101 |
+
|
| 102 |
+
.. math::
|
| 103 |
+
|
| 104 |
+
s = (n - 1) \div m , t = (n - 1) \mod m ,
|
| 105 |
+
|
| 106 |
+
and nodes in `V` are normalized by dividing by
|
| 107 |
+
|
| 108 |
+
.. math::
|
| 109 |
+
|
| 110 |
+
\frac{1}{2} [n^2 (p + 1)^2 + n (p + 1)(2r - p - 1) - r (2p - r + 3)] ,
|
| 111 |
+
|
| 112 |
+
where,
|
| 113 |
+
|
| 114 |
+
.. math::
|
| 115 |
+
|
| 116 |
+
p = (m - 1) \div n , r = (m - 1) \mod n .
|
| 117 |
+
|
| 118 |
+
Parameters
|
| 119 |
+
----------
|
| 120 |
+
G : graph
|
| 121 |
+
A bipartite graph
|
| 122 |
+
|
| 123 |
+
nodes : list or container
|
| 124 |
+
Container with all nodes in one bipartite node set.
|
| 125 |
+
|
| 126 |
+
Returns
|
| 127 |
+
-------
|
| 128 |
+
betweenness : dictionary
|
| 129 |
+
Dictionary keyed by node with bipartite betweenness centrality
|
| 130 |
+
as the value.
|
| 131 |
+
|
| 132 |
+
Examples
|
| 133 |
+
--------
|
| 134 |
+
>>> G = nx.cycle_graph(4)
|
| 135 |
+
>>> top_nodes = {1, 2}
|
| 136 |
+
>>> nx.bipartite.betweenness_centrality(G, nodes=top_nodes)
|
| 137 |
+
{0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
|
| 138 |
+
|
| 139 |
+
See Also
|
| 140 |
+
--------
|
| 141 |
+
degree_centrality
|
| 142 |
+
closeness_centrality
|
| 143 |
+
:func:`~networkx.algorithms.bipartite.basic.sets`
|
| 144 |
+
:func:`~networkx.algorithms.bipartite.basic.is_bipartite`
|
| 145 |
+
|
| 146 |
+
Notes
|
| 147 |
+
-----
|
| 148 |
+
The nodes input parameter must contain all nodes in one bipartite node set,
|
| 149 |
+
but the dictionary returned contains all nodes from both node sets.
|
| 150 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 151 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
References
|
| 155 |
+
----------
|
| 156 |
+
.. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
|
| 157 |
+
Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
| 158 |
+
of Social Network Analysis. Sage Publications.
|
| 159 |
+
https://dx.doi.org/10.4135/9781446294413.n28
|
| 160 |
+
"""
|
| 161 |
+
top = set(nodes)
|
| 162 |
+
bottom = set(G) - top
|
| 163 |
+
n = len(top)
|
| 164 |
+
m = len(bottom)
|
| 165 |
+
s, t = divmod(n - 1, m)
|
| 166 |
+
bet_max_top = (
|
| 167 |
+
((m**2) * ((s + 1) ** 2))
|
| 168 |
+
+ (m * (s + 1) * (2 * t - s - 1))
|
| 169 |
+
- (t * ((2 * s) - t + 3))
|
| 170 |
+
) / 2.0
|
| 171 |
+
p, r = divmod(m - 1, n)
|
| 172 |
+
bet_max_bot = (
|
| 173 |
+
((n**2) * ((p + 1) ** 2))
|
| 174 |
+
+ (n * (p + 1) * (2 * r - p - 1))
|
| 175 |
+
- (r * ((2 * p) - r + 3))
|
| 176 |
+
) / 2.0
|
| 177 |
+
betweenness = nx.betweenness_centrality(G, normalized=False, weight=None)
|
| 178 |
+
for node in top:
|
| 179 |
+
betweenness[node] /= bet_max_top
|
| 180 |
+
for node in bottom:
|
| 181 |
+
betweenness[node] /= bet_max_bot
|
| 182 |
+
return betweenness
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
@nx._dispatchable(name="bipartite_closeness_centrality")
|
| 186 |
+
def closeness_centrality(G, nodes, normalized=True):
|
| 187 |
+
r"""Compute the closeness centrality for nodes in a bipartite network.
|
| 188 |
+
|
| 189 |
+
The closeness of a node is the distance to all other nodes in the
|
| 190 |
+
graph or in the case that the graph is not connected to all other nodes
|
| 191 |
+
in the connected component containing that node.
|
| 192 |
+
|
| 193 |
+
Parameters
|
| 194 |
+
----------
|
| 195 |
+
G : graph
|
| 196 |
+
A bipartite network
|
| 197 |
+
|
| 198 |
+
nodes : list or container
|
| 199 |
+
Container with all nodes in one bipartite node set.
|
| 200 |
+
|
| 201 |
+
normalized : bool, optional
|
| 202 |
+
If True (default) normalize by connected component size.
|
| 203 |
+
|
| 204 |
+
Returns
|
| 205 |
+
-------
|
| 206 |
+
closeness : dictionary
|
| 207 |
+
Dictionary keyed by node with bipartite closeness centrality
|
| 208 |
+
as the value.
|
| 209 |
+
|
| 210 |
+
Examples
|
| 211 |
+
--------
|
| 212 |
+
>>> G = nx.wheel_graph(5)
|
| 213 |
+
>>> top_nodes = {0, 1, 2}
|
| 214 |
+
>>> nx.bipartite.closeness_centrality(G, nodes=top_nodes)
|
| 215 |
+
{0: 1.5, 1: 1.2, 2: 1.2, 3: 1.0, 4: 1.0}
|
| 216 |
+
|
| 217 |
+
See Also
|
| 218 |
+
--------
|
| 219 |
+
betweenness_centrality
|
| 220 |
+
degree_centrality
|
| 221 |
+
:func:`~networkx.algorithms.bipartite.basic.sets`
|
| 222 |
+
:func:`~networkx.algorithms.bipartite.basic.is_bipartite`
|
| 223 |
+
|
| 224 |
+
Notes
|
| 225 |
+
-----
|
| 226 |
+
The nodes input parameter must contain all nodes in one bipartite node set,
|
| 227 |
+
but the dictionary returned contains all nodes from both node sets.
|
| 228 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 229 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
Closeness centrality is normalized by the minimum distance possible.
|
| 233 |
+
In the bipartite case the minimum distance for a node in one bipartite
|
| 234 |
+
node set is 1 from all nodes in the other node set and 2 from all
|
| 235 |
+
other nodes in its own set [1]_. Thus the closeness centrality
|
| 236 |
+
for node `v` in the two bipartite sets `U` with
|
| 237 |
+
`n` nodes and `V` with `m` nodes is
|
| 238 |
+
|
| 239 |
+
.. math::
|
| 240 |
+
|
| 241 |
+
c_{v} = \frac{m + 2(n - 1)}{d}, \mbox{for} v \in U,
|
| 242 |
+
|
| 243 |
+
c_{v} = \frac{n + 2(m - 1)}{d}, \mbox{for} v \in V,
|
| 244 |
+
|
| 245 |
+
where `d` is the sum of the distances from `v` to all
|
| 246 |
+
other nodes.
|
| 247 |
+
|
| 248 |
+
Higher values of closeness indicate higher centrality.
|
| 249 |
+
|
| 250 |
+
As in the unipartite case, setting normalized=True causes the
|
| 251 |
+
values to normalized further to n-1 / size(G)-1 where n is the
|
| 252 |
+
number of nodes in the connected part of graph containing the
|
| 253 |
+
node. If the graph is not completely connected, this algorithm
|
| 254 |
+
computes the closeness centrality for each connected part
|
| 255 |
+
separately.
|
| 256 |
+
|
| 257 |
+
References
|
| 258 |
+
----------
|
| 259 |
+
.. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
|
| 260 |
+
Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
| 261 |
+
of Social Network Analysis. Sage Publications.
|
| 262 |
+
https://dx.doi.org/10.4135/9781446294413.n28
|
| 263 |
+
"""
|
| 264 |
+
closeness = {}
|
| 265 |
+
path_length = nx.single_source_shortest_path_length
|
| 266 |
+
top = set(nodes)
|
| 267 |
+
bottom = set(G) - top
|
| 268 |
+
n = len(top)
|
| 269 |
+
m = len(bottom)
|
| 270 |
+
for node in top:
|
| 271 |
+
sp = dict(path_length(G, node))
|
| 272 |
+
totsp = sum(sp.values())
|
| 273 |
+
if totsp > 0.0 and len(G) > 1:
|
| 274 |
+
closeness[node] = (m + 2 * (n - 1)) / totsp
|
| 275 |
+
if normalized:
|
| 276 |
+
s = (len(sp) - 1) / (len(G) - 1)
|
| 277 |
+
closeness[node] *= s
|
| 278 |
+
else:
|
| 279 |
+
closeness[node] = 0.0
|
| 280 |
+
for node in bottom:
|
| 281 |
+
sp = dict(path_length(G, node))
|
| 282 |
+
totsp = sum(sp.values())
|
| 283 |
+
if totsp > 0.0 and len(G) > 1:
|
| 284 |
+
closeness[node] = (n + 2 * (m - 1)) / totsp
|
| 285 |
+
if normalized:
|
| 286 |
+
s = (len(sp) - 1) / (len(G) - 1)
|
| 287 |
+
closeness[node] *= s
|
| 288 |
+
else:
|
| 289 |
+
closeness[node] = 0.0
|
| 290 |
+
return closeness
|
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/edgelist.py
ADDED
|
@@ -0,0 +1,360 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
********************
|
| 3 |
+
Bipartite Edge Lists
|
| 4 |
+
********************
|
| 5 |
+
Read and write NetworkX graphs as bipartite edge lists.
|
| 6 |
+
|
| 7 |
+
Format
|
| 8 |
+
------
|
| 9 |
+
You can read or write three formats of edge lists with these functions.
|
| 10 |
+
|
| 11 |
+
Node pairs with no data::
|
| 12 |
+
|
| 13 |
+
1 2
|
| 14 |
+
|
| 15 |
+
Python dictionary as data::
|
| 16 |
+
|
| 17 |
+
1 2 {'weight':7, 'color':'green'}
|
| 18 |
+
|
| 19 |
+
Arbitrary data::
|
| 20 |
+
|
| 21 |
+
1 2 7 green
|
| 22 |
+
|
| 23 |
+
For each edge (u, v) the node u is assigned to part 0 and the node v to part 1.
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
__all__ = ["generate_edgelist", "write_edgelist", "parse_edgelist", "read_edgelist"]
|
| 27 |
+
|
| 28 |
+
import networkx as nx
|
| 29 |
+
from networkx.utils import not_implemented_for, open_file
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
@open_file(1, mode="wb")
|
| 33 |
+
def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="utf-8"):
|
| 34 |
+
"""Write a bipartite graph as a list of edges.
|
| 35 |
+
|
| 36 |
+
Parameters
|
| 37 |
+
----------
|
| 38 |
+
G : Graph
|
| 39 |
+
A NetworkX bipartite graph
|
| 40 |
+
path : file or string
|
| 41 |
+
File or filename to write. If a file is provided, it must be
|
| 42 |
+
opened in 'wb' mode. Filenames ending in .gz or .bz2 will be compressed.
|
| 43 |
+
comments : string, optional
|
| 44 |
+
The character used to indicate the start of a comment
|
| 45 |
+
delimiter : string, optional
|
| 46 |
+
The string used to separate values. The default is whitespace.
|
| 47 |
+
data : bool or list, optional
|
| 48 |
+
If False write no edge data.
|
| 49 |
+
If True write a string representation of the edge data dictionary..
|
| 50 |
+
If a list (or other iterable) is provided, write the keys specified
|
| 51 |
+
in the list.
|
| 52 |
+
encoding: string, optional
|
| 53 |
+
Specify which encoding to use when writing file.
|
| 54 |
+
|
| 55 |
+
Examples
|
| 56 |
+
--------
|
| 57 |
+
>>> G = nx.path_graph(4)
|
| 58 |
+
>>> G.add_nodes_from([0, 2], bipartite=0)
|
| 59 |
+
>>> G.add_nodes_from([1, 3], bipartite=1)
|
| 60 |
+
>>> nx.write_edgelist(G, "test.edgelist")
|
| 61 |
+
>>> fh = open("test.edgelist", "wb")
|
| 62 |
+
>>> nx.write_edgelist(G, fh)
|
| 63 |
+
>>> nx.write_edgelist(G, "test.edgelist.gz")
|
| 64 |
+
>>> nx.write_edgelist(G, "test.edgelist.gz", data=False)
|
| 65 |
+
|
| 66 |
+
>>> G = nx.Graph()
|
| 67 |
+
>>> G.add_edge(1, 2, weight=7, color="red")
|
| 68 |
+
>>> nx.write_edgelist(G, "test.edgelist", data=False)
|
| 69 |
+
>>> nx.write_edgelist(G, "test.edgelist", data=["color"])
|
| 70 |
+
>>> nx.write_edgelist(G, "test.edgelist", data=["color", "weight"])
|
| 71 |
+
|
| 72 |
+
See Also
|
| 73 |
+
--------
|
| 74 |
+
write_edgelist
|
| 75 |
+
generate_edgelist
|
| 76 |
+
"""
|
| 77 |
+
for line in generate_edgelist(G, delimiter, data):
|
| 78 |
+
line += "\n"
|
| 79 |
+
path.write(line.encode(encoding))
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
@not_implemented_for("directed")
|
| 83 |
+
def generate_edgelist(G, delimiter=" ", data=True):
|
| 84 |
+
"""Generate a single line of the bipartite graph G in edge list format.
|
| 85 |
+
|
| 86 |
+
Parameters
|
| 87 |
+
----------
|
| 88 |
+
G : NetworkX graph
|
| 89 |
+
The graph is assumed to have node attribute `part` set to 0,1 representing
|
| 90 |
+
the two graph parts
|
| 91 |
+
|
| 92 |
+
delimiter : string, optional
|
| 93 |
+
Separator for node labels
|
| 94 |
+
|
| 95 |
+
data : bool or list of keys
|
| 96 |
+
If False generate no edge data. If True use a dictionary
|
| 97 |
+
representation of edge data. If a list of keys use a list of data
|
| 98 |
+
values corresponding to the keys.
|
| 99 |
+
|
| 100 |
+
Returns
|
| 101 |
+
-------
|
| 102 |
+
lines : string
|
| 103 |
+
Lines of data in adjlist format.
|
| 104 |
+
|
| 105 |
+
Examples
|
| 106 |
+
--------
|
| 107 |
+
>>> from networkx.algorithms import bipartite
|
| 108 |
+
>>> G = nx.path_graph(4)
|
| 109 |
+
>>> G.add_nodes_from([0, 2], bipartite=0)
|
| 110 |
+
>>> G.add_nodes_from([1, 3], bipartite=1)
|
| 111 |
+
>>> G[1][2]["weight"] = 3
|
| 112 |
+
>>> G[2][3]["capacity"] = 12
|
| 113 |
+
>>> for line in bipartite.generate_edgelist(G, data=False):
|
| 114 |
+
... print(line)
|
| 115 |
+
0 1
|
| 116 |
+
2 1
|
| 117 |
+
2 3
|
| 118 |
+
|
| 119 |
+
>>> for line in bipartite.generate_edgelist(G):
|
| 120 |
+
... print(line)
|
| 121 |
+
0 1 {}
|
| 122 |
+
2 1 {'weight': 3}
|
| 123 |
+
2 3 {'capacity': 12}
|
| 124 |
+
|
| 125 |
+
>>> for line in bipartite.generate_edgelist(G, data=["weight"]):
|
| 126 |
+
... print(line)
|
| 127 |
+
0 1
|
| 128 |
+
2 1 3
|
| 129 |
+
2 3
|
| 130 |
+
"""
|
| 131 |
+
try:
|
| 132 |
+
part0 = [n for n, d in G.nodes.items() if d["bipartite"] == 0]
|
| 133 |
+
except BaseException as err:
|
| 134 |
+
raise AttributeError("Missing node attribute `bipartite`") from err
|
| 135 |
+
if data is True or data is False:
|
| 136 |
+
for n in part0:
|
| 137 |
+
for edge in G.edges(n, data=data):
|
| 138 |
+
yield delimiter.join(map(str, edge))
|
| 139 |
+
else:
|
| 140 |
+
for n in part0:
|
| 141 |
+
for u, v, d in G.edges(n, data=True):
|
| 142 |
+
edge = [u, v]
|
| 143 |
+
try:
|
| 144 |
+
edge.extend(d[k] for k in data)
|
| 145 |
+
except KeyError:
|
| 146 |
+
pass # missing data for this edge, should warn?
|
| 147 |
+
yield delimiter.join(map(str, edge))
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
@nx._dispatchable(name="bipartite_parse_edgelist", graphs=None, returns_graph=True)
|
| 151 |
+
def parse_edgelist(
|
| 152 |
+
lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True
|
| 153 |
+
):
|
| 154 |
+
"""Parse lines of an edge list representation of a bipartite graph.
|
| 155 |
+
|
| 156 |
+
Parameters
|
| 157 |
+
----------
|
| 158 |
+
lines : list or iterator of strings
|
| 159 |
+
Input data in edgelist format
|
| 160 |
+
comments : string, optional
|
| 161 |
+
Marker for comment lines
|
| 162 |
+
delimiter : string, optional
|
| 163 |
+
Separator for node labels
|
| 164 |
+
create_using: NetworkX graph container, optional
|
| 165 |
+
Use given NetworkX graph for holding nodes or edges.
|
| 166 |
+
nodetype : Python type, optional
|
| 167 |
+
Convert nodes to this type.
|
| 168 |
+
data : bool or list of (label,type) tuples
|
| 169 |
+
If False generate no edge data or if True use a dictionary
|
| 170 |
+
representation of edge data or a list tuples specifying dictionary
|
| 171 |
+
key names and types for edge data.
|
| 172 |
+
|
| 173 |
+
Returns
|
| 174 |
+
-------
|
| 175 |
+
G: NetworkX Graph
|
| 176 |
+
The bipartite graph corresponding to lines
|
| 177 |
+
|
| 178 |
+
Examples
|
| 179 |
+
--------
|
| 180 |
+
Edgelist with no data:
|
| 181 |
+
|
| 182 |
+
>>> from networkx.algorithms import bipartite
|
| 183 |
+
>>> lines = ["1 2", "2 3", "3 4"]
|
| 184 |
+
>>> G = bipartite.parse_edgelist(lines, nodetype=int)
|
| 185 |
+
>>> sorted(G.nodes())
|
| 186 |
+
[1, 2, 3, 4]
|
| 187 |
+
>>> sorted(G.nodes(data=True))
|
| 188 |
+
[(1, {'bipartite': 0}), (2, {'bipartite': 0}), (3, {'bipartite': 0}), (4, {'bipartite': 1})]
|
| 189 |
+
>>> sorted(G.edges())
|
| 190 |
+
[(1, 2), (2, 3), (3, 4)]
|
| 191 |
+
|
| 192 |
+
Edgelist with data in Python dictionary representation:
|
| 193 |
+
|
| 194 |
+
>>> lines = ["1 2 {'weight':3}", "2 3 {'weight':27}", "3 4 {'weight':3.0}"]
|
| 195 |
+
>>> G = bipartite.parse_edgelist(lines, nodetype=int)
|
| 196 |
+
>>> sorted(G.nodes())
|
| 197 |
+
[1, 2, 3, 4]
|
| 198 |
+
>>> sorted(G.edges(data=True))
|
| 199 |
+
[(1, 2, {'weight': 3}), (2, 3, {'weight': 27}), (3, 4, {'weight': 3.0})]
|
| 200 |
+
|
| 201 |
+
Edgelist with data in a list:
|
| 202 |
+
|
| 203 |
+
>>> lines = ["1 2 3", "2 3 27", "3 4 3.0"]
|
| 204 |
+
>>> G = bipartite.parse_edgelist(lines, nodetype=int, data=(("weight", float),))
|
| 205 |
+
>>> sorted(G.nodes())
|
| 206 |
+
[1, 2, 3, 4]
|
| 207 |
+
>>> sorted(G.edges(data=True))
|
| 208 |
+
[(1, 2, {'weight': 3.0}), (2, 3, {'weight': 27.0}), (3, 4, {'weight': 3.0})]
|
| 209 |
+
|
| 210 |
+
See Also
|
| 211 |
+
--------
|
| 212 |
+
"""
|
| 213 |
+
from ast import literal_eval
|
| 214 |
+
|
| 215 |
+
G = nx.empty_graph(0, create_using)
|
| 216 |
+
for line in lines:
|
| 217 |
+
p = line.find(comments)
|
| 218 |
+
if p >= 0:
|
| 219 |
+
line = line[:p]
|
| 220 |
+
if not len(line):
|
| 221 |
+
continue
|
| 222 |
+
# split line, should have 2 or more
|
| 223 |
+
s = line.rstrip("\n").split(delimiter)
|
| 224 |
+
if len(s) < 2:
|
| 225 |
+
continue
|
| 226 |
+
u = s.pop(0)
|
| 227 |
+
v = s.pop(0)
|
| 228 |
+
d = s
|
| 229 |
+
if nodetype is not None:
|
| 230 |
+
try:
|
| 231 |
+
u = nodetype(u)
|
| 232 |
+
v = nodetype(v)
|
| 233 |
+
except BaseException as err:
|
| 234 |
+
raise TypeError(
|
| 235 |
+
f"Failed to convert nodes {u},{v} to type {nodetype}."
|
| 236 |
+
) from err
|
| 237 |
+
|
| 238 |
+
if len(d) == 0 or data is False:
|
| 239 |
+
# no data or data type specified
|
| 240 |
+
edgedata = {}
|
| 241 |
+
elif data is True:
|
| 242 |
+
# no edge types specified
|
| 243 |
+
try: # try to evaluate as dictionary
|
| 244 |
+
edgedata = dict(literal_eval(" ".join(d)))
|
| 245 |
+
except BaseException as err:
|
| 246 |
+
raise TypeError(
|
| 247 |
+
f"Failed to convert edge data ({d}) to dictionary."
|
| 248 |
+
) from err
|
| 249 |
+
else:
|
| 250 |
+
# convert edge data to dictionary with specified keys and type
|
| 251 |
+
if len(d) != len(data):
|
| 252 |
+
raise IndexError(
|
| 253 |
+
f"Edge data {d} and data_keys {data} are not the same length"
|
| 254 |
+
)
|
| 255 |
+
edgedata = {}
|
| 256 |
+
for (edge_key, edge_type), edge_value in zip(data, d):
|
| 257 |
+
try:
|
| 258 |
+
edge_value = edge_type(edge_value)
|
| 259 |
+
except BaseException as err:
|
| 260 |
+
raise TypeError(
|
| 261 |
+
f"Failed to convert {edge_key} data "
|
| 262 |
+
f"{edge_value} to type {edge_type}."
|
| 263 |
+
) from err
|
| 264 |
+
edgedata.update({edge_key: edge_value})
|
| 265 |
+
G.add_node(u, bipartite=0)
|
| 266 |
+
G.add_node(v, bipartite=1)
|
| 267 |
+
G.add_edge(u, v, **edgedata)
|
| 268 |
+
return G
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
@open_file(0, mode="rb")
|
| 272 |
+
@nx._dispatchable(name="bipartite_read_edgelist", graphs=None, returns_graph=True)
|
| 273 |
+
def read_edgelist(
|
| 274 |
+
path,
|
| 275 |
+
comments="#",
|
| 276 |
+
delimiter=None,
|
| 277 |
+
create_using=None,
|
| 278 |
+
nodetype=None,
|
| 279 |
+
data=True,
|
| 280 |
+
edgetype=None,
|
| 281 |
+
encoding="utf-8",
|
| 282 |
+
):
|
| 283 |
+
"""Read a bipartite graph from a list of edges.
|
| 284 |
+
|
| 285 |
+
Parameters
|
| 286 |
+
----------
|
| 287 |
+
path : file or string
|
| 288 |
+
File or filename to read. If a file is provided, it must be
|
| 289 |
+
opened in 'rb' mode.
|
| 290 |
+
Filenames ending in .gz or .bz2 will be uncompressed.
|
| 291 |
+
comments : string, optional
|
| 292 |
+
The character used to indicate the start of a comment.
|
| 293 |
+
delimiter : string, optional
|
| 294 |
+
The string used to separate values. The default is whitespace.
|
| 295 |
+
create_using : Graph container, optional,
|
| 296 |
+
Use specified container to build graph. The default is networkx.Graph,
|
| 297 |
+
an undirected graph.
|
| 298 |
+
nodetype : int, float, str, Python type, optional
|
| 299 |
+
Convert node data from strings to specified type
|
| 300 |
+
data : bool or list of (label,type) tuples
|
| 301 |
+
Tuples specifying dictionary key names and types for edge data
|
| 302 |
+
edgetype : int, float, str, Python type, optional OBSOLETE
|
| 303 |
+
Convert edge data from strings to specified type and use as 'weight'
|
| 304 |
+
encoding: string, optional
|
| 305 |
+
Specify which encoding to use when reading file.
|
| 306 |
+
|
| 307 |
+
Returns
|
| 308 |
+
-------
|
| 309 |
+
G : graph
|
| 310 |
+
A networkx Graph or other type specified with create_using
|
| 311 |
+
|
| 312 |
+
Examples
|
| 313 |
+
--------
|
| 314 |
+
>>> from networkx.algorithms import bipartite
|
| 315 |
+
>>> G = nx.path_graph(4)
|
| 316 |
+
>>> G.add_nodes_from([0, 2], bipartite=0)
|
| 317 |
+
>>> G.add_nodes_from([1, 3], bipartite=1)
|
| 318 |
+
>>> bipartite.write_edgelist(G, "test.edgelist")
|
| 319 |
+
>>> G = bipartite.read_edgelist("test.edgelist")
|
| 320 |
+
|
| 321 |
+
>>> fh = open("test.edgelist", "rb")
|
| 322 |
+
>>> G = bipartite.read_edgelist(fh)
|
| 323 |
+
>>> fh.close()
|
| 324 |
+
|
| 325 |
+
>>> G = bipartite.read_edgelist("test.edgelist", nodetype=int)
|
| 326 |
+
|
| 327 |
+
Edgelist with data in a list:
|
| 328 |
+
|
| 329 |
+
>>> textline = "1 2 3"
|
| 330 |
+
>>> fh = open("test.edgelist", "w")
|
| 331 |
+
>>> d = fh.write(textline)
|
| 332 |
+
>>> fh.close()
|
| 333 |
+
>>> G = bipartite.read_edgelist(
|
| 334 |
+
... "test.edgelist", nodetype=int, data=(("weight", float),)
|
| 335 |
+
... )
|
| 336 |
+
>>> list(G)
|
| 337 |
+
[1, 2]
|
| 338 |
+
>>> list(G.edges(data=True))
|
| 339 |
+
[(1, 2, {'weight': 3.0})]
|
| 340 |
+
|
| 341 |
+
See parse_edgelist() for more examples of formatting.
|
| 342 |
+
|
| 343 |
+
See Also
|
| 344 |
+
--------
|
| 345 |
+
parse_edgelist
|
| 346 |
+
|
| 347 |
+
Notes
|
| 348 |
+
-----
|
| 349 |
+
Since nodes must be hashable, the function nodetype must return hashable
|
| 350 |
+
types (e.g. int, float, str, frozenset - or tuples of those, etc.)
|
| 351 |
+
"""
|
| 352 |
+
lines = (line.decode(encoding) for line in path)
|
| 353 |
+
return parse_edgelist(
|
| 354 |
+
lines,
|
| 355 |
+
comments=comments,
|
| 356 |
+
delimiter=delimiter,
|
| 357 |
+
create_using=create_using,
|
| 358 |
+
nodetype=nodetype,
|
| 359 |
+
data=data,
|
| 360 |
+
)
|
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/projection.py
ADDED
|
@@ -0,0 +1,526 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""One-mode (unipartite) projections of bipartite graphs."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.exception import NetworkXAlgorithmError
|
| 5 |
+
from networkx.utils import not_implemented_for
|
| 6 |
+
|
| 7 |
+
__all__ = [
|
| 8 |
+
"projected_graph",
|
| 9 |
+
"weighted_projected_graph",
|
| 10 |
+
"collaboration_weighted_projected_graph",
|
| 11 |
+
"overlap_weighted_projected_graph",
|
| 12 |
+
"generic_weighted_projected_graph",
|
| 13 |
+
]
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@nx._dispatchable(
|
| 17 |
+
graphs="B", preserve_node_attrs=True, preserve_graph_attrs=True, returns_graph=True
|
| 18 |
+
)
|
| 19 |
+
def projected_graph(B, nodes, multigraph=False):
|
| 20 |
+
r"""Returns the projection of B onto one of its node sets.
|
| 21 |
+
|
| 22 |
+
Returns the graph G that is the projection of the bipartite graph B
|
| 23 |
+
onto the specified nodes. They retain their attributes and are connected
|
| 24 |
+
in G if they have a common neighbor in B.
|
| 25 |
+
|
| 26 |
+
Parameters
|
| 27 |
+
----------
|
| 28 |
+
B : NetworkX graph
|
| 29 |
+
The input graph should be bipartite.
|
| 30 |
+
|
| 31 |
+
nodes : list or iterable
|
| 32 |
+
Nodes to project onto (the "bottom" nodes).
|
| 33 |
+
|
| 34 |
+
multigraph: bool (default=False)
|
| 35 |
+
If True return a multigraph where the multiple edges represent multiple
|
| 36 |
+
shared neighbors. They edge key in the multigraph is assigned to the
|
| 37 |
+
label of the neighbor.
|
| 38 |
+
|
| 39 |
+
Returns
|
| 40 |
+
-------
|
| 41 |
+
Graph : NetworkX graph or multigraph
|
| 42 |
+
A graph that is the projection onto the given nodes.
|
| 43 |
+
|
| 44 |
+
Examples
|
| 45 |
+
--------
|
| 46 |
+
>>> from networkx.algorithms import bipartite
|
| 47 |
+
>>> B = nx.path_graph(4)
|
| 48 |
+
>>> G = bipartite.projected_graph(B, [1, 3])
|
| 49 |
+
>>> list(G)
|
| 50 |
+
[1, 3]
|
| 51 |
+
>>> list(G.edges())
|
| 52 |
+
[(1, 3)]
|
| 53 |
+
|
| 54 |
+
If nodes `a`, and `b` are connected through both nodes 1 and 2 then
|
| 55 |
+
building a multigraph results in two edges in the projection onto
|
| 56 |
+
[`a`, `b`]:
|
| 57 |
+
|
| 58 |
+
>>> B = nx.Graph()
|
| 59 |
+
>>> B.add_edges_from([("a", 1), ("b", 1), ("a", 2), ("b", 2)])
|
| 60 |
+
>>> G = bipartite.projected_graph(B, ["a", "b"], multigraph=True)
|
| 61 |
+
>>> print([sorted((u, v)) for u, v in G.edges()])
|
| 62 |
+
[['a', 'b'], ['a', 'b']]
|
| 63 |
+
|
| 64 |
+
Notes
|
| 65 |
+
-----
|
| 66 |
+
No attempt is made to verify that the input graph B is bipartite.
|
| 67 |
+
Returns a simple graph that is the projection of the bipartite graph B
|
| 68 |
+
onto the set of nodes given in list nodes. If multigraph=True then
|
| 69 |
+
a multigraph is returned with an edge for every shared neighbor.
|
| 70 |
+
|
| 71 |
+
Directed graphs are allowed as input. The output will also then
|
| 72 |
+
be a directed graph with edges if there is a directed path between
|
| 73 |
+
the nodes.
|
| 74 |
+
|
| 75 |
+
The graph and node properties are (shallow) copied to the projected graph.
|
| 76 |
+
|
| 77 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 78 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 79 |
+
|
| 80 |
+
See Also
|
| 81 |
+
--------
|
| 82 |
+
is_bipartite,
|
| 83 |
+
is_bipartite_node_set,
|
| 84 |
+
sets,
|
| 85 |
+
weighted_projected_graph,
|
| 86 |
+
collaboration_weighted_projected_graph,
|
| 87 |
+
overlap_weighted_projected_graph,
|
| 88 |
+
generic_weighted_projected_graph
|
| 89 |
+
"""
|
| 90 |
+
if B.is_multigraph():
|
| 91 |
+
raise nx.NetworkXError("not defined for multigraphs")
|
| 92 |
+
if B.is_directed():
|
| 93 |
+
directed = True
|
| 94 |
+
if multigraph:
|
| 95 |
+
G = nx.MultiDiGraph()
|
| 96 |
+
else:
|
| 97 |
+
G = nx.DiGraph()
|
| 98 |
+
else:
|
| 99 |
+
directed = False
|
| 100 |
+
if multigraph:
|
| 101 |
+
G = nx.MultiGraph()
|
| 102 |
+
else:
|
| 103 |
+
G = nx.Graph()
|
| 104 |
+
G.graph.update(B.graph)
|
| 105 |
+
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
| 106 |
+
for u in nodes:
|
| 107 |
+
nbrs2 = {v for nbr in B[u] for v in B[nbr] if v != u}
|
| 108 |
+
if multigraph:
|
| 109 |
+
for n in nbrs2:
|
| 110 |
+
if directed:
|
| 111 |
+
links = set(B[u]) & set(B.pred[n])
|
| 112 |
+
else:
|
| 113 |
+
links = set(B[u]) & set(B[n])
|
| 114 |
+
for l in links:
|
| 115 |
+
if not G.has_edge(u, n, l):
|
| 116 |
+
G.add_edge(u, n, key=l)
|
| 117 |
+
else:
|
| 118 |
+
G.add_edges_from((u, n) for n in nbrs2)
|
| 119 |
+
return G
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
@not_implemented_for("multigraph")
|
| 123 |
+
@nx._dispatchable(graphs="B", returns_graph=True)
|
| 124 |
+
def weighted_projected_graph(B, nodes, ratio=False):
|
| 125 |
+
r"""Returns a weighted projection of B onto one of its node sets.
|
| 126 |
+
|
| 127 |
+
The weighted projected graph is the projection of the bipartite
|
| 128 |
+
network B onto the specified nodes with weights representing the
|
| 129 |
+
number of shared neighbors or the ratio between actual shared
|
| 130 |
+
neighbors and possible shared neighbors if ``ratio is True`` [1]_.
|
| 131 |
+
The nodes retain their attributes and are connected in the resulting
|
| 132 |
+
graph if they have an edge to a common node in the original graph.
|
| 133 |
+
|
| 134 |
+
Parameters
|
| 135 |
+
----------
|
| 136 |
+
B : NetworkX graph
|
| 137 |
+
The input graph should be bipartite.
|
| 138 |
+
|
| 139 |
+
nodes : list or iterable
|
| 140 |
+
Distinct nodes to project onto (the "bottom" nodes).
|
| 141 |
+
|
| 142 |
+
ratio: Bool (default=False)
|
| 143 |
+
If True, edge weight is the ratio between actual shared neighbors
|
| 144 |
+
and maximum possible shared neighbors (i.e., the size of the other
|
| 145 |
+
node set). If False, edges weight is the number of shared neighbors.
|
| 146 |
+
|
| 147 |
+
Returns
|
| 148 |
+
-------
|
| 149 |
+
Graph : NetworkX graph
|
| 150 |
+
A graph that is the projection onto the given nodes.
|
| 151 |
+
|
| 152 |
+
Examples
|
| 153 |
+
--------
|
| 154 |
+
>>> from networkx.algorithms import bipartite
|
| 155 |
+
>>> B = nx.path_graph(4)
|
| 156 |
+
>>> G = bipartite.weighted_projected_graph(B, [1, 3])
|
| 157 |
+
>>> list(G)
|
| 158 |
+
[1, 3]
|
| 159 |
+
>>> list(G.edges(data=True))
|
| 160 |
+
[(1, 3, {'weight': 1})]
|
| 161 |
+
>>> G = bipartite.weighted_projected_graph(B, [1, 3], ratio=True)
|
| 162 |
+
>>> list(G.edges(data=True))
|
| 163 |
+
[(1, 3, {'weight': 0.5})]
|
| 164 |
+
|
| 165 |
+
Notes
|
| 166 |
+
-----
|
| 167 |
+
No attempt is made to verify that the input graph B is bipartite, or that
|
| 168 |
+
the input nodes are distinct. However, if the length of the input nodes is
|
| 169 |
+
greater than or equal to the nodes in the graph B, an exception is raised.
|
| 170 |
+
If the nodes are not distinct but don't raise this error, the output weights
|
| 171 |
+
will be incorrect.
|
| 172 |
+
The graph and node properties are (shallow) copied to the projected graph.
|
| 173 |
+
|
| 174 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 175 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 176 |
+
|
| 177 |
+
See Also
|
| 178 |
+
--------
|
| 179 |
+
is_bipartite,
|
| 180 |
+
is_bipartite_node_set,
|
| 181 |
+
sets,
|
| 182 |
+
collaboration_weighted_projected_graph,
|
| 183 |
+
overlap_weighted_projected_graph,
|
| 184 |
+
generic_weighted_projected_graph
|
| 185 |
+
projected_graph
|
| 186 |
+
|
| 187 |
+
References
|
| 188 |
+
----------
|
| 189 |
+
.. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
|
| 190 |
+
Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
| 191 |
+
of Social Network Analysis. Sage Publications.
|
| 192 |
+
"""
|
| 193 |
+
if B.is_directed():
|
| 194 |
+
pred = B.pred
|
| 195 |
+
G = nx.DiGraph()
|
| 196 |
+
else:
|
| 197 |
+
pred = B.adj
|
| 198 |
+
G = nx.Graph()
|
| 199 |
+
G.graph.update(B.graph)
|
| 200 |
+
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
| 201 |
+
n_top = len(B) - len(nodes)
|
| 202 |
+
|
| 203 |
+
if n_top < 1:
|
| 204 |
+
raise NetworkXAlgorithmError(
|
| 205 |
+
f"the size of the nodes to project onto ({len(nodes)}) is >= the graph size ({len(B)}).\n"
|
| 206 |
+
"They are either not a valid bipartite partition or contain duplicates"
|
| 207 |
+
)
|
| 208 |
+
|
| 209 |
+
for u in nodes:
|
| 210 |
+
unbrs = set(B[u])
|
| 211 |
+
nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u}
|
| 212 |
+
for v in nbrs2:
|
| 213 |
+
vnbrs = set(pred[v])
|
| 214 |
+
common = unbrs & vnbrs
|
| 215 |
+
if not ratio:
|
| 216 |
+
weight = len(common)
|
| 217 |
+
else:
|
| 218 |
+
weight = len(common) / n_top
|
| 219 |
+
G.add_edge(u, v, weight=weight)
|
| 220 |
+
return G
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
@not_implemented_for("multigraph")
|
| 224 |
+
@nx._dispatchable(graphs="B", returns_graph=True)
|
| 225 |
+
def collaboration_weighted_projected_graph(B, nodes):
|
| 226 |
+
r"""Newman's weighted projection of B onto one of its node sets.
|
| 227 |
+
|
| 228 |
+
The collaboration weighted projection is the projection of the
|
| 229 |
+
bipartite network B onto the specified nodes with weights assigned
|
| 230 |
+
using Newman's collaboration model [1]_:
|
| 231 |
+
|
| 232 |
+
.. math::
|
| 233 |
+
|
| 234 |
+
w_{u, v} = \sum_k \frac{\delta_{u}^{k} \delta_{v}^{k}}{d_k - 1}
|
| 235 |
+
|
| 236 |
+
where `u` and `v` are nodes from the bottom bipartite node set,
|
| 237 |
+
and `k` is a node of the top node set.
|
| 238 |
+
The value `d_k` is the degree of node `k` in the bipartite
|
| 239 |
+
network and `\delta_{u}^{k}` is 1 if node `u` is
|
| 240 |
+
linked to node `k` in the original bipartite graph or 0 otherwise.
|
| 241 |
+
|
| 242 |
+
The nodes retain their attributes and are connected in the resulting
|
| 243 |
+
graph if have an edge to a common node in the original bipartite
|
| 244 |
+
graph.
|
| 245 |
+
|
| 246 |
+
Parameters
|
| 247 |
+
----------
|
| 248 |
+
B : NetworkX graph
|
| 249 |
+
The input graph should be bipartite.
|
| 250 |
+
|
| 251 |
+
nodes : list or iterable
|
| 252 |
+
Nodes to project onto (the "bottom" nodes).
|
| 253 |
+
|
| 254 |
+
Returns
|
| 255 |
+
-------
|
| 256 |
+
Graph : NetworkX graph
|
| 257 |
+
A graph that is the projection onto the given nodes.
|
| 258 |
+
|
| 259 |
+
Examples
|
| 260 |
+
--------
|
| 261 |
+
>>> from networkx.algorithms import bipartite
|
| 262 |
+
>>> B = nx.path_graph(5)
|
| 263 |
+
>>> B.add_edge(1, 5)
|
| 264 |
+
>>> G = bipartite.collaboration_weighted_projected_graph(B, [0, 2, 4, 5])
|
| 265 |
+
>>> list(G)
|
| 266 |
+
[0, 2, 4, 5]
|
| 267 |
+
>>> for edge in sorted(G.edges(data=True)):
|
| 268 |
+
... print(edge)
|
| 269 |
+
(0, 2, {'weight': 0.5})
|
| 270 |
+
(0, 5, {'weight': 0.5})
|
| 271 |
+
(2, 4, {'weight': 1.0})
|
| 272 |
+
(2, 5, {'weight': 0.5})
|
| 273 |
+
|
| 274 |
+
Notes
|
| 275 |
+
-----
|
| 276 |
+
No attempt is made to verify that the input graph B is bipartite.
|
| 277 |
+
The graph and node properties are (shallow) copied to the projected graph.
|
| 278 |
+
|
| 279 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 280 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 281 |
+
|
| 282 |
+
See Also
|
| 283 |
+
--------
|
| 284 |
+
is_bipartite,
|
| 285 |
+
is_bipartite_node_set,
|
| 286 |
+
sets,
|
| 287 |
+
weighted_projected_graph,
|
| 288 |
+
overlap_weighted_projected_graph,
|
| 289 |
+
generic_weighted_projected_graph,
|
| 290 |
+
projected_graph
|
| 291 |
+
|
| 292 |
+
References
|
| 293 |
+
----------
|
| 294 |
+
.. [1] Scientific collaboration networks: II.
|
| 295 |
+
Shortest paths, weighted networks, and centrality,
|
| 296 |
+
M. E. J. Newman, Phys. Rev. E 64, 016132 (2001).
|
| 297 |
+
"""
|
| 298 |
+
if B.is_directed():
|
| 299 |
+
pred = B.pred
|
| 300 |
+
G = nx.DiGraph()
|
| 301 |
+
else:
|
| 302 |
+
pred = B.adj
|
| 303 |
+
G = nx.Graph()
|
| 304 |
+
G.graph.update(B.graph)
|
| 305 |
+
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
| 306 |
+
for u in nodes:
|
| 307 |
+
unbrs = set(B[u])
|
| 308 |
+
nbrs2 = {n for nbr in unbrs for n in B[nbr] if n != u}
|
| 309 |
+
for v in nbrs2:
|
| 310 |
+
vnbrs = set(pred[v])
|
| 311 |
+
common_degree = (len(B[n]) for n in unbrs & vnbrs)
|
| 312 |
+
weight = sum(1.0 / (deg - 1) for deg in common_degree if deg > 1)
|
| 313 |
+
G.add_edge(u, v, weight=weight)
|
| 314 |
+
return G
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
@not_implemented_for("multigraph")
|
| 318 |
+
@nx._dispatchable(graphs="B", returns_graph=True)
|
| 319 |
+
def overlap_weighted_projected_graph(B, nodes, jaccard=True):
|
| 320 |
+
r"""Overlap weighted projection of B onto one of its node sets.
|
| 321 |
+
|
| 322 |
+
The overlap weighted projection is the projection of the bipartite
|
| 323 |
+
network B onto the specified nodes with weights representing
|
| 324 |
+
the Jaccard index between the neighborhoods of the two nodes in the
|
| 325 |
+
original bipartite network [1]_:
|
| 326 |
+
|
| 327 |
+
.. math::
|
| 328 |
+
|
| 329 |
+
w_{v, u} = \frac{|N(u) \cap N(v)|}{|N(u) \cup N(v)|}
|
| 330 |
+
|
| 331 |
+
or if the parameter 'jaccard' is False, the fraction of common
|
| 332 |
+
neighbors by minimum of both nodes degree in the original
|
| 333 |
+
bipartite graph [1]_:
|
| 334 |
+
|
| 335 |
+
.. math::
|
| 336 |
+
|
| 337 |
+
w_{v, u} = \frac{|N(u) \cap N(v)|}{min(|N(u)|, |N(v)|)}
|
| 338 |
+
|
| 339 |
+
The nodes retain their attributes and are connected in the resulting
|
| 340 |
+
graph if have an edge to a common node in the original bipartite graph.
|
| 341 |
+
|
| 342 |
+
Parameters
|
| 343 |
+
----------
|
| 344 |
+
B : NetworkX graph
|
| 345 |
+
The input graph should be bipartite.
|
| 346 |
+
|
| 347 |
+
nodes : list or iterable
|
| 348 |
+
Nodes to project onto (the "bottom" nodes).
|
| 349 |
+
|
| 350 |
+
jaccard: Bool (default=True)
|
| 351 |
+
|
| 352 |
+
Returns
|
| 353 |
+
-------
|
| 354 |
+
Graph : NetworkX graph
|
| 355 |
+
A graph that is the projection onto the given nodes.
|
| 356 |
+
|
| 357 |
+
Examples
|
| 358 |
+
--------
|
| 359 |
+
>>> from networkx.algorithms import bipartite
|
| 360 |
+
>>> B = nx.path_graph(5)
|
| 361 |
+
>>> nodes = [0, 2, 4]
|
| 362 |
+
>>> G = bipartite.overlap_weighted_projected_graph(B, nodes)
|
| 363 |
+
>>> list(G)
|
| 364 |
+
[0, 2, 4]
|
| 365 |
+
>>> list(G.edges(data=True))
|
| 366 |
+
[(0, 2, {'weight': 0.5}), (2, 4, {'weight': 0.5})]
|
| 367 |
+
>>> G = bipartite.overlap_weighted_projected_graph(B, nodes, jaccard=False)
|
| 368 |
+
>>> list(G.edges(data=True))
|
| 369 |
+
[(0, 2, {'weight': 1.0}), (2, 4, {'weight': 1.0})]
|
| 370 |
+
|
| 371 |
+
Notes
|
| 372 |
+
-----
|
| 373 |
+
No attempt is made to verify that the input graph B is bipartite.
|
| 374 |
+
The graph and node properties are (shallow) copied to the projected graph.
|
| 375 |
+
|
| 376 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 377 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 378 |
+
|
| 379 |
+
See Also
|
| 380 |
+
--------
|
| 381 |
+
is_bipartite,
|
| 382 |
+
is_bipartite_node_set,
|
| 383 |
+
sets,
|
| 384 |
+
weighted_projected_graph,
|
| 385 |
+
collaboration_weighted_projected_graph,
|
| 386 |
+
generic_weighted_projected_graph,
|
| 387 |
+
projected_graph
|
| 388 |
+
|
| 389 |
+
References
|
| 390 |
+
----------
|
| 391 |
+
.. [1] Borgatti, S.P. and Halgin, D. In press. Analyzing Affiliation
|
| 392 |
+
Networks. In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
| 393 |
+
of Social Network Analysis. Sage Publications.
|
| 394 |
+
|
| 395 |
+
"""
|
| 396 |
+
if B.is_directed():
|
| 397 |
+
pred = B.pred
|
| 398 |
+
G = nx.DiGraph()
|
| 399 |
+
else:
|
| 400 |
+
pred = B.adj
|
| 401 |
+
G = nx.Graph()
|
| 402 |
+
G.graph.update(B.graph)
|
| 403 |
+
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
| 404 |
+
for u in nodes:
|
| 405 |
+
unbrs = set(B[u])
|
| 406 |
+
nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u}
|
| 407 |
+
for v in nbrs2:
|
| 408 |
+
vnbrs = set(pred[v])
|
| 409 |
+
if jaccard:
|
| 410 |
+
wt = len(unbrs & vnbrs) / len(unbrs | vnbrs)
|
| 411 |
+
else:
|
| 412 |
+
wt = len(unbrs & vnbrs) / min(len(unbrs), len(vnbrs))
|
| 413 |
+
G.add_edge(u, v, weight=wt)
|
| 414 |
+
return G
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
@not_implemented_for("multigraph")
|
| 418 |
+
@nx._dispatchable(graphs="B", preserve_all_attrs=True, returns_graph=True)
|
| 419 |
+
def generic_weighted_projected_graph(B, nodes, weight_function=None):
|
| 420 |
+
r"""Weighted projection of B with a user-specified weight function.
|
| 421 |
+
|
| 422 |
+
The bipartite network B is projected on to the specified nodes
|
| 423 |
+
with weights computed by a user-specified function. This function
|
| 424 |
+
must accept as a parameter the neighborhood sets of two nodes and
|
| 425 |
+
return an integer or a float.
|
| 426 |
+
|
| 427 |
+
The nodes retain their attributes and are connected in the resulting graph
|
| 428 |
+
if they have an edge to a common node in the original graph.
|
| 429 |
+
|
| 430 |
+
Parameters
|
| 431 |
+
----------
|
| 432 |
+
B : NetworkX graph
|
| 433 |
+
The input graph should be bipartite.
|
| 434 |
+
|
| 435 |
+
nodes : list or iterable
|
| 436 |
+
Nodes to project onto (the "bottom" nodes).
|
| 437 |
+
|
| 438 |
+
weight_function : function
|
| 439 |
+
This function must accept as parameters the same input graph
|
| 440 |
+
that this function, and two nodes; and return an integer or a float.
|
| 441 |
+
The default function computes the number of shared neighbors.
|
| 442 |
+
|
| 443 |
+
Returns
|
| 444 |
+
-------
|
| 445 |
+
Graph : NetworkX graph
|
| 446 |
+
A graph that is the projection onto the given nodes.
|
| 447 |
+
|
| 448 |
+
Examples
|
| 449 |
+
--------
|
| 450 |
+
>>> from networkx.algorithms import bipartite
|
| 451 |
+
>>> # Define some custom weight functions
|
| 452 |
+
>>> def jaccard(G, u, v):
|
| 453 |
+
... unbrs = set(G[u])
|
| 454 |
+
... vnbrs = set(G[v])
|
| 455 |
+
... return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs)
|
| 456 |
+
>>> def my_weight(G, u, v, weight="weight"):
|
| 457 |
+
... w = 0
|
| 458 |
+
... for nbr in set(G[u]) & set(G[v]):
|
| 459 |
+
... w += G[u][nbr].get(weight, 1) + G[v][nbr].get(weight, 1)
|
| 460 |
+
... return w
|
| 461 |
+
>>> # A complete bipartite graph with 4 nodes and 4 edges
|
| 462 |
+
>>> B = nx.complete_bipartite_graph(2, 2)
|
| 463 |
+
>>> # Add some arbitrary weight to the edges
|
| 464 |
+
>>> for i, (u, v) in enumerate(B.edges()):
|
| 465 |
+
... B.edges[u, v]["weight"] = i + 1
|
| 466 |
+
>>> for edge in B.edges(data=True):
|
| 467 |
+
... print(edge)
|
| 468 |
+
(0, 2, {'weight': 1})
|
| 469 |
+
(0, 3, {'weight': 2})
|
| 470 |
+
(1, 2, {'weight': 3})
|
| 471 |
+
(1, 3, {'weight': 4})
|
| 472 |
+
>>> # By default, the weight is the number of shared neighbors
|
| 473 |
+
>>> G = bipartite.generic_weighted_projected_graph(B, [0, 1])
|
| 474 |
+
>>> print(list(G.edges(data=True)))
|
| 475 |
+
[(0, 1, {'weight': 2})]
|
| 476 |
+
>>> # To specify a custom weight function use the weight_function parameter
|
| 477 |
+
>>> G = bipartite.generic_weighted_projected_graph(
|
| 478 |
+
... B, [0, 1], weight_function=jaccard
|
| 479 |
+
... )
|
| 480 |
+
>>> print(list(G.edges(data=True)))
|
| 481 |
+
[(0, 1, {'weight': 1.0})]
|
| 482 |
+
>>> G = bipartite.generic_weighted_projected_graph(
|
| 483 |
+
... B, [0, 1], weight_function=my_weight
|
| 484 |
+
... )
|
| 485 |
+
>>> print(list(G.edges(data=True)))
|
| 486 |
+
[(0, 1, {'weight': 10})]
|
| 487 |
+
|
| 488 |
+
Notes
|
| 489 |
+
-----
|
| 490 |
+
No attempt is made to verify that the input graph B is bipartite.
|
| 491 |
+
The graph and node properties are (shallow) copied to the projected graph.
|
| 492 |
+
|
| 493 |
+
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
| 494 |
+
for further details on how bipartite graphs are handled in NetworkX.
|
| 495 |
+
|
| 496 |
+
See Also
|
| 497 |
+
--------
|
| 498 |
+
is_bipartite,
|
| 499 |
+
is_bipartite_node_set,
|
| 500 |
+
sets,
|
| 501 |
+
weighted_projected_graph,
|
| 502 |
+
collaboration_weighted_projected_graph,
|
| 503 |
+
overlap_weighted_projected_graph,
|
| 504 |
+
projected_graph
|
| 505 |
+
|
| 506 |
+
"""
|
| 507 |
+
if B.is_directed():
|
| 508 |
+
pred = B.pred
|
| 509 |
+
G = nx.DiGraph()
|
| 510 |
+
else:
|
| 511 |
+
pred = B.adj
|
| 512 |
+
G = nx.Graph()
|
| 513 |
+
if weight_function is None:
|
| 514 |
+
|
| 515 |
+
def weight_function(G, u, v):
|
| 516 |
+
# Notice that we use set(pred[v]) for handling the directed case.
|
| 517 |
+
return len(set(G[u]) & set(pred[v]))
|
| 518 |
+
|
| 519 |
+
G.graph.update(B.graph)
|
| 520 |
+
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
| 521 |
+
for u in nodes:
|
| 522 |
+
nbrs2 = {n for nbr in set(B[u]) for n in B[nbr]} - {u}
|
| 523 |
+
for v in nbrs2:
|
| 524 |
+
weight = weight_function(B, u, v)
|
| 525 |
+
G.add_edge(u, v, weight=weight)
|
| 526 |
+
return G
|
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/redundancy.py
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Node redundancy for bipartite graphs."""
|
| 2 |
+
|
| 3 |
+
from itertools import combinations
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx import NetworkXError
|
| 7 |
+
|
| 8 |
+
__all__ = ["node_redundancy"]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@nx._dispatchable
|
| 12 |
+
def node_redundancy(G, nodes=None):
|
| 13 |
+
r"""Computes the node redundancy coefficients for the nodes in the bipartite
|
| 14 |
+
graph `G`.
|
| 15 |
+
|
| 16 |
+
The redundancy coefficient of a node `v` is the fraction of pairs of
|
| 17 |
+
neighbors of `v` that are both linked to other nodes. In a one-mode
|
| 18 |
+
projection these nodes would be linked together even if `v` were
|
| 19 |
+
not there.
|
| 20 |
+
|
| 21 |
+
More formally, for any vertex `v`, the *redundancy coefficient of `v`* is
|
| 22 |
+
defined by
|
| 23 |
+
|
| 24 |
+
.. math::
|
| 25 |
+
|
| 26 |
+
rc(v) = \frac{|\{\{u, w\} \subseteq N(v),
|
| 27 |
+
\: \exists v' \neq v,\: (v',u) \in E\:
|
| 28 |
+
\mathrm{and}\: (v',w) \in E\}|}{ \frac{|N(v)|(|N(v)|-1)}{2}},
|
| 29 |
+
|
| 30 |
+
where `N(v)` is the set of neighbors of `v` in `G`.
|
| 31 |
+
|
| 32 |
+
Parameters
|
| 33 |
+
----------
|
| 34 |
+
G : graph
|
| 35 |
+
A bipartite graph
|
| 36 |
+
|
| 37 |
+
nodes : list or iterable (optional)
|
| 38 |
+
Compute redundancy for these nodes. The default is all nodes in G.
|
| 39 |
+
|
| 40 |
+
Returns
|
| 41 |
+
-------
|
| 42 |
+
redundancy : dictionary
|
| 43 |
+
A dictionary keyed by node with the node redundancy value.
|
| 44 |
+
|
| 45 |
+
Examples
|
| 46 |
+
--------
|
| 47 |
+
Compute the redundancy coefficient of each node in a graph::
|
| 48 |
+
|
| 49 |
+
>>> from networkx.algorithms import bipartite
|
| 50 |
+
>>> G = nx.cycle_graph(4)
|
| 51 |
+
>>> rc = bipartite.node_redundancy(G)
|
| 52 |
+
>>> rc[0]
|
| 53 |
+
1.0
|
| 54 |
+
|
| 55 |
+
Compute the average redundancy for the graph::
|
| 56 |
+
|
| 57 |
+
>>> from networkx.algorithms import bipartite
|
| 58 |
+
>>> G = nx.cycle_graph(4)
|
| 59 |
+
>>> rc = bipartite.node_redundancy(G)
|
| 60 |
+
>>> sum(rc.values()) / len(G)
|
| 61 |
+
1.0
|
| 62 |
+
|
| 63 |
+
Compute the average redundancy for a set of nodes::
|
| 64 |
+
|
| 65 |
+
>>> from networkx.algorithms import bipartite
|
| 66 |
+
>>> G = nx.cycle_graph(4)
|
| 67 |
+
>>> rc = bipartite.node_redundancy(G)
|
| 68 |
+
>>> nodes = [0, 2]
|
| 69 |
+
>>> sum(rc[n] for n in nodes) / len(nodes)
|
| 70 |
+
1.0
|
| 71 |
+
|
| 72 |
+
Raises
|
| 73 |
+
------
|
| 74 |
+
NetworkXError
|
| 75 |
+
If any of the nodes in the graph (or in `nodes`, if specified) has
|
| 76 |
+
(out-)degree less than two (which would result in division by zero,
|
| 77 |
+
according to the definition of the redundancy coefficient).
|
| 78 |
+
|
| 79 |
+
References
|
| 80 |
+
----------
|
| 81 |
+
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
|
| 82 |
+
Basic notions for the analysis of large two-mode networks.
|
| 83 |
+
Social Networks 30(1), 31--48.
|
| 84 |
+
|
| 85 |
+
"""
|
| 86 |
+
if nodes is None:
|
| 87 |
+
nodes = G
|
| 88 |
+
if any(len(G[v]) < 2 for v in nodes):
|
| 89 |
+
raise NetworkXError(
|
| 90 |
+
"Cannot compute redundancy coefficient for a node"
|
| 91 |
+
" that has fewer than two neighbors."
|
| 92 |
+
)
|
| 93 |
+
# TODO This can be trivially parallelized.
|
| 94 |
+
return {v: _node_redundancy(G, v) for v in nodes}
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def _node_redundancy(G, v):
|
| 98 |
+
"""Returns the redundancy of the node `v` in the bipartite graph `G`.
|
| 99 |
+
|
| 100 |
+
If `G` is a graph with `n` nodes, the redundancy of a node is the ratio
|
| 101 |
+
of the "overlap" of `v` to the maximum possible overlap of `v`
|
| 102 |
+
according to its degree. The overlap of `v` is the number of pairs of
|
| 103 |
+
neighbors that have mutual neighbors themselves, other than `v`.
|
| 104 |
+
|
| 105 |
+
`v` must have at least two neighbors in `G`.
|
| 106 |
+
|
| 107 |
+
"""
|
| 108 |
+
n = len(G[v])
|
| 109 |
+
overlap = sum(
|
| 110 |
+
1 for (u, w) in combinations(G[v], 2) if (set(G[u]) & set(G[w])) - {v}
|
| 111 |
+
)
|
| 112 |
+
return (2 * overlap) / (n * (n - 1))
|
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/spectral.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Spectral bipartivity measure.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
__all__ = ["spectral_bipartivity"]
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 11 |
+
def spectral_bipartivity(G, nodes=None, weight="weight"):
|
| 12 |
+
"""Returns the spectral bipartivity.
|
| 13 |
+
|
| 14 |
+
Parameters
|
| 15 |
+
----------
|
| 16 |
+
G : NetworkX graph
|
| 17 |
+
|
| 18 |
+
nodes : list or container optional(default is all nodes)
|
| 19 |
+
Nodes to return value of spectral bipartivity contribution.
|
| 20 |
+
|
| 21 |
+
weight : string or None optional (default = 'weight')
|
| 22 |
+
Edge data key to use for edge weights. If None, weights set to 1.
|
| 23 |
+
|
| 24 |
+
Returns
|
| 25 |
+
-------
|
| 26 |
+
sb : float or dict
|
| 27 |
+
A single number if the keyword nodes is not specified, or
|
| 28 |
+
a dictionary keyed by node with the spectral bipartivity contribution
|
| 29 |
+
of that node as the value.
|
| 30 |
+
|
| 31 |
+
Examples
|
| 32 |
+
--------
|
| 33 |
+
>>> from networkx.algorithms import bipartite
|
| 34 |
+
>>> G = nx.path_graph(4)
|
| 35 |
+
>>> bipartite.spectral_bipartivity(G)
|
| 36 |
+
1.0
|
| 37 |
+
|
| 38 |
+
Notes
|
| 39 |
+
-----
|
| 40 |
+
This implementation uses Numpy (dense) matrices which are not efficient
|
| 41 |
+
for storing large sparse graphs.
|
| 42 |
+
|
| 43 |
+
See Also
|
| 44 |
+
--------
|
| 45 |
+
color
|
| 46 |
+
|
| 47 |
+
References
|
| 48 |
+
----------
|
| 49 |
+
.. [1] E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of
|
| 50 |
+
bipartivity in complex networks", PhysRev E 72, 046105 (2005)
|
| 51 |
+
"""
|
| 52 |
+
import scipy as sp
|
| 53 |
+
|
| 54 |
+
nodelist = list(G) # ordering of nodes in matrix
|
| 55 |
+
A = nx.to_numpy_array(G, nodelist, weight=weight)
|
| 56 |
+
expA = sp.linalg.expm(A)
|
| 57 |
+
expmA = sp.linalg.expm(-A)
|
| 58 |
+
coshA = 0.5 * (expA + expmA)
|
| 59 |
+
if nodes is None:
|
| 60 |
+
# return single number for entire graph
|
| 61 |
+
return float(coshA.diagonal().sum() / expA.diagonal().sum())
|
| 62 |
+
else:
|
| 63 |
+
# contribution for individual nodes
|
| 64 |
+
index = dict(zip(nodelist, range(len(nodelist))))
|
| 65 |
+
sb = {}
|
| 66 |
+
for n in nodes:
|
| 67 |
+
i = index[n]
|
| 68 |
+
sb[n] = coshA.item(i, i) / expA.item(i, i)
|
| 69 |
+
return sb
|
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (185 Bytes). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_basic.py
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.algorithms import bipartite
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class TestBipartiteBasic:
|
| 8 |
+
def test_is_bipartite(self):
|
| 9 |
+
assert bipartite.is_bipartite(nx.path_graph(4))
|
| 10 |
+
assert bipartite.is_bipartite(nx.DiGraph([(1, 0)]))
|
| 11 |
+
assert not bipartite.is_bipartite(nx.complete_graph(3))
|
| 12 |
+
|
| 13 |
+
def test_bipartite_color(self):
|
| 14 |
+
G = nx.path_graph(4)
|
| 15 |
+
c = bipartite.color(G)
|
| 16 |
+
assert c == {0: 1, 1: 0, 2: 1, 3: 0}
|
| 17 |
+
|
| 18 |
+
def test_not_bipartite_color(self):
|
| 19 |
+
with pytest.raises(nx.NetworkXError):
|
| 20 |
+
c = bipartite.color(nx.complete_graph(4))
|
| 21 |
+
|
| 22 |
+
def test_bipartite_directed(self):
|
| 23 |
+
G = bipartite.random_graph(10, 10, 0.1, directed=True)
|
| 24 |
+
assert bipartite.is_bipartite(G)
|
| 25 |
+
|
| 26 |
+
def test_bipartite_sets(self):
|
| 27 |
+
G = nx.path_graph(4)
|
| 28 |
+
X, Y = bipartite.sets(G)
|
| 29 |
+
assert X == {0, 2}
|
| 30 |
+
assert Y == {1, 3}
|
| 31 |
+
|
| 32 |
+
def test_bipartite_sets_directed(self):
|
| 33 |
+
G = nx.path_graph(4)
|
| 34 |
+
D = G.to_directed()
|
| 35 |
+
X, Y = bipartite.sets(D)
|
| 36 |
+
assert X == {0, 2}
|
| 37 |
+
assert Y == {1, 3}
|
| 38 |
+
|
| 39 |
+
def test_bipartite_sets_given_top_nodes(self):
|
| 40 |
+
G = nx.path_graph(4)
|
| 41 |
+
top_nodes = [0, 2]
|
| 42 |
+
X, Y = bipartite.sets(G, top_nodes)
|
| 43 |
+
assert X == {0, 2}
|
| 44 |
+
assert Y == {1, 3}
|
| 45 |
+
|
| 46 |
+
def test_bipartite_sets_disconnected(self):
|
| 47 |
+
with pytest.raises(nx.AmbiguousSolution):
|
| 48 |
+
G = nx.path_graph(4)
|
| 49 |
+
G.add_edges_from([(5, 6), (6, 7)])
|
| 50 |
+
X, Y = bipartite.sets(G)
|
| 51 |
+
|
| 52 |
+
def test_is_bipartite_node_set(self):
|
| 53 |
+
G = nx.path_graph(4)
|
| 54 |
+
|
| 55 |
+
with pytest.raises(nx.AmbiguousSolution):
|
| 56 |
+
bipartite.is_bipartite_node_set(G, [1, 1, 2, 3])
|
| 57 |
+
|
| 58 |
+
assert bipartite.is_bipartite_node_set(G, [0, 2])
|
| 59 |
+
assert bipartite.is_bipartite_node_set(G, [1, 3])
|
| 60 |
+
assert not bipartite.is_bipartite_node_set(G, [1, 2])
|
| 61 |
+
G.add_edge(10, 20)
|
| 62 |
+
assert bipartite.is_bipartite_node_set(G, [0, 2, 10])
|
| 63 |
+
assert bipartite.is_bipartite_node_set(G, [0, 2, 20])
|
| 64 |
+
assert bipartite.is_bipartite_node_set(G, [1, 3, 10])
|
| 65 |
+
assert bipartite.is_bipartite_node_set(G, [1, 3, 20])
|
| 66 |
+
|
| 67 |
+
def test_bipartite_density(self):
|
| 68 |
+
G = nx.path_graph(5)
|
| 69 |
+
X, Y = bipartite.sets(G)
|
| 70 |
+
density = len(list(G.edges())) / (len(X) * len(Y))
|
| 71 |
+
assert bipartite.density(G, X) == density
|
| 72 |
+
D = nx.DiGraph(G.edges())
|
| 73 |
+
assert bipartite.density(D, X) == density / 2.0
|
| 74 |
+
assert bipartite.density(nx.Graph(), {}) == 0.0
|
| 75 |
+
|
| 76 |
+
def test_bipartite_degrees(self):
|
| 77 |
+
G = nx.path_graph(5)
|
| 78 |
+
X = {1, 3}
|
| 79 |
+
Y = {0, 2, 4}
|
| 80 |
+
u, d = bipartite.degrees(G, Y)
|
| 81 |
+
assert dict(u) == {1: 2, 3: 2}
|
| 82 |
+
assert dict(d) == {0: 1, 2: 2, 4: 1}
|
| 83 |
+
|
| 84 |
+
def test_bipartite_weighted_degrees(self):
|
| 85 |
+
G = nx.path_graph(5)
|
| 86 |
+
G.add_edge(0, 1, weight=0.1, other=0.2)
|
| 87 |
+
X = {1, 3}
|
| 88 |
+
Y = {0, 2, 4}
|
| 89 |
+
u, d = bipartite.degrees(G, Y, weight="weight")
|
| 90 |
+
assert dict(u) == {1: 1.1, 3: 2}
|
| 91 |
+
assert dict(d) == {0: 0.1, 2: 2, 4: 1}
|
| 92 |
+
u, d = bipartite.degrees(G, Y, weight="other")
|
| 93 |
+
assert dict(u) == {1: 1.2, 3: 2}
|
| 94 |
+
assert dict(d) == {0: 0.2, 2: 2, 4: 1}
|
| 95 |
+
|
| 96 |
+
def test_biadjacency_matrix_weight(self):
|
| 97 |
+
pytest.importorskip("scipy")
|
| 98 |
+
G = nx.path_graph(5)
|
| 99 |
+
G.add_edge(0, 1, weight=2, other=4)
|
| 100 |
+
X = [1, 3]
|
| 101 |
+
Y = [0, 2, 4]
|
| 102 |
+
M = bipartite.biadjacency_matrix(G, X, weight="weight")
|
| 103 |
+
assert M[0, 0] == 2
|
| 104 |
+
M = bipartite.biadjacency_matrix(G, X, weight="other")
|
| 105 |
+
assert M[0, 0] == 4
|
| 106 |
+
|
| 107 |
+
def test_biadjacency_matrix(self):
|
| 108 |
+
pytest.importorskip("scipy")
|
| 109 |
+
tops = [2, 5, 10]
|
| 110 |
+
bots = [5, 10, 15]
|
| 111 |
+
for i in range(len(tops)):
|
| 112 |
+
G = bipartite.random_graph(tops[i], bots[i], 0.2)
|
| 113 |
+
top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
|
| 114 |
+
M = bipartite.biadjacency_matrix(G, top)
|
| 115 |
+
assert M.shape[0] == tops[i]
|
| 116 |
+
assert M.shape[1] == bots[i]
|
| 117 |
+
|
| 118 |
+
def test_biadjacency_matrix_order(self):
|
| 119 |
+
pytest.importorskip("scipy")
|
| 120 |
+
G = nx.path_graph(5)
|
| 121 |
+
G.add_edge(0, 1, weight=2)
|
| 122 |
+
X = [3, 1]
|
| 123 |
+
Y = [4, 2, 0]
|
| 124 |
+
M = bipartite.biadjacency_matrix(G, X, Y, weight="weight")
|
| 125 |
+
assert M[1, 2] == 2
|
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_edgelist.py
ADDED
|
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Unit tests for bipartite edgelists.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import io
|
| 6 |
+
|
| 7 |
+
import pytest
|
| 8 |
+
|
| 9 |
+
import networkx as nx
|
| 10 |
+
from networkx.algorithms import bipartite
|
| 11 |
+
from networkx.utils import edges_equal, graphs_equal, nodes_equal
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class TestEdgelist:
|
| 15 |
+
@classmethod
|
| 16 |
+
def setup_class(cls):
|
| 17 |
+
cls.G = nx.Graph(name="test")
|
| 18 |
+
e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")]
|
| 19 |
+
cls.G.add_edges_from(e)
|
| 20 |
+
cls.G.add_nodes_from(["a", "c", "e"], bipartite=0)
|
| 21 |
+
cls.G.add_nodes_from(["b", "d", "f"], bipartite=1)
|
| 22 |
+
cls.G.add_node("g", bipartite=0)
|
| 23 |
+
cls.DG = nx.DiGraph(cls.G)
|
| 24 |
+
cls.MG = nx.MultiGraph()
|
| 25 |
+
cls.MG.add_edges_from([(1, 2), (1, 2), (1, 2)])
|
| 26 |
+
cls.MG.add_node(1, bipartite=0)
|
| 27 |
+
cls.MG.add_node(2, bipartite=1)
|
| 28 |
+
|
| 29 |
+
def test_read_edgelist_1(self):
|
| 30 |
+
s = b"""\
|
| 31 |
+
# comment line
|
| 32 |
+
1 2
|
| 33 |
+
# comment line
|
| 34 |
+
2 3
|
| 35 |
+
"""
|
| 36 |
+
bytesIO = io.BytesIO(s)
|
| 37 |
+
G = bipartite.read_edgelist(bytesIO, nodetype=int)
|
| 38 |
+
assert edges_equal(G.edges(), [(1, 2), (2, 3)])
|
| 39 |
+
|
| 40 |
+
def test_read_edgelist_3(self):
|
| 41 |
+
s = b"""\
|
| 42 |
+
# comment line
|
| 43 |
+
1 2 {'weight':2.0}
|
| 44 |
+
# comment line
|
| 45 |
+
2 3 {'weight':3.0}
|
| 46 |
+
"""
|
| 47 |
+
bytesIO = io.BytesIO(s)
|
| 48 |
+
G = bipartite.read_edgelist(bytesIO, nodetype=int, data=False)
|
| 49 |
+
assert edges_equal(G.edges(), [(1, 2), (2, 3)])
|
| 50 |
+
|
| 51 |
+
bytesIO = io.BytesIO(s)
|
| 52 |
+
G = bipartite.read_edgelist(bytesIO, nodetype=int, data=True)
|
| 53 |
+
assert edges_equal(
|
| 54 |
+
G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})]
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
def test_write_edgelist_1(self):
|
| 58 |
+
fh = io.BytesIO()
|
| 59 |
+
G = nx.Graph()
|
| 60 |
+
G.add_edges_from([(1, 2), (2, 3)])
|
| 61 |
+
G.add_node(1, bipartite=0)
|
| 62 |
+
G.add_node(2, bipartite=1)
|
| 63 |
+
G.add_node(3, bipartite=0)
|
| 64 |
+
bipartite.write_edgelist(G, fh, data=False)
|
| 65 |
+
fh.seek(0)
|
| 66 |
+
assert fh.read() == b"1 2\n3 2\n"
|
| 67 |
+
|
| 68 |
+
def test_write_edgelist_2(self):
|
| 69 |
+
fh = io.BytesIO()
|
| 70 |
+
G = nx.Graph()
|
| 71 |
+
G.add_edges_from([(1, 2), (2, 3)])
|
| 72 |
+
G.add_node(1, bipartite=0)
|
| 73 |
+
G.add_node(2, bipartite=1)
|
| 74 |
+
G.add_node(3, bipartite=0)
|
| 75 |
+
bipartite.write_edgelist(G, fh, data=True)
|
| 76 |
+
fh.seek(0)
|
| 77 |
+
assert fh.read() == b"1 2 {}\n3 2 {}\n"
|
| 78 |
+
|
| 79 |
+
def test_write_edgelist_3(self):
|
| 80 |
+
fh = io.BytesIO()
|
| 81 |
+
G = nx.Graph()
|
| 82 |
+
G.add_edge(1, 2, weight=2.0)
|
| 83 |
+
G.add_edge(2, 3, weight=3.0)
|
| 84 |
+
G.add_node(1, bipartite=0)
|
| 85 |
+
G.add_node(2, bipartite=1)
|
| 86 |
+
G.add_node(3, bipartite=0)
|
| 87 |
+
bipartite.write_edgelist(G, fh, data=True)
|
| 88 |
+
fh.seek(0)
|
| 89 |
+
assert fh.read() == b"1 2 {'weight': 2.0}\n3 2 {'weight': 3.0}\n"
|
| 90 |
+
|
| 91 |
+
def test_write_edgelist_4(self):
|
| 92 |
+
fh = io.BytesIO()
|
| 93 |
+
G = nx.Graph()
|
| 94 |
+
G.add_edge(1, 2, weight=2.0)
|
| 95 |
+
G.add_edge(2, 3, weight=3.0)
|
| 96 |
+
G.add_node(1, bipartite=0)
|
| 97 |
+
G.add_node(2, bipartite=1)
|
| 98 |
+
G.add_node(3, bipartite=0)
|
| 99 |
+
bipartite.write_edgelist(G, fh, data=[("weight")])
|
| 100 |
+
fh.seek(0)
|
| 101 |
+
assert fh.read() == b"1 2 2.0\n3 2 3.0\n"
|
| 102 |
+
|
| 103 |
+
def test_unicode(self, tmp_path):
|
| 104 |
+
G = nx.Graph()
|
| 105 |
+
name1 = chr(2344) + chr(123) + chr(6543)
|
| 106 |
+
name2 = chr(5543) + chr(1543) + chr(324)
|
| 107 |
+
G.add_edge(name1, "Radiohead", **{name2: 3})
|
| 108 |
+
G.add_node(name1, bipartite=0)
|
| 109 |
+
G.add_node("Radiohead", bipartite=1)
|
| 110 |
+
|
| 111 |
+
fname = tmp_path / "edgelist.txt"
|
| 112 |
+
bipartite.write_edgelist(G, fname)
|
| 113 |
+
H = bipartite.read_edgelist(fname)
|
| 114 |
+
assert graphs_equal(G, H)
|
| 115 |
+
|
| 116 |
+
def test_latin1_issue(self, tmp_path):
|
| 117 |
+
G = nx.Graph()
|
| 118 |
+
name1 = chr(2344) + chr(123) + chr(6543)
|
| 119 |
+
name2 = chr(5543) + chr(1543) + chr(324)
|
| 120 |
+
G.add_edge(name1, "Radiohead", **{name2: 3})
|
| 121 |
+
G.add_node(name1, bipartite=0)
|
| 122 |
+
G.add_node("Radiohead", bipartite=1)
|
| 123 |
+
|
| 124 |
+
fname = tmp_path / "edgelist.txt"
|
| 125 |
+
with pytest.raises(UnicodeEncodeError):
|
| 126 |
+
bipartite.write_edgelist(G, fname, encoding="latin-1")
|
| 127 |
+
|
| 128 |
+
def test_latin1(self, tmp_path):
|
| 129 |
+
G = nx.Graph()
|
| 130 |
+
name1 = "Bj" + chr(246) + "rk"
|
| 131 |
+
name2 = chr(220) + "ber"
|
| 132 |
+
G.add_edge(name1, "Radiohead", **{name2: 3})
|
| 133 |
+
G.add_node(name1, bipartite=0)
|
| 134 |
+
G.add_node("Radiohead", bipartite=1)
|
| 135 |
+
|
| 136 |
+
fname = tmp_path / "edgelist.txt"
|
| 137 |
+
bipartite.write_edgelist(G, fname, encoding="latin-1")
|
| 138 |
+
H = bipartite.read_edgelist(fname, encoding="latin-1")
|
| 139 |
+
assert graphs_equal(G, H)
|
| 140 |
+
|
| 141 |
+
def test_edgelist_graph(self, tmp_path):
|
| 142 |
+
G = self.G
|
| 143 |
+
fname = tmp_path / "edgelist.txt"
|
| 144 |
+
bipartite.write_edgelist(G, fname)
|
| 145 |
+
H = bipartite.read_edgelist(fname)
|
| 146 |
+
H2 = bipartite.read_edgelist(fname)
|
| 147 |
+
assert H is not H2 # they should be different graphs
|
| 148 |
+
G.remove_node("g") # isolated nodes are not written in edgelist
|
| 149 |
+
assert nodes_equal(list(H), list(G))
|
| 150 |
+
assert edges_equal(list(H.edges()), list(G.edges()))
|
| 151 |
+
|
| 152 |
+
def test_edgelist_integers(self, tmp_path):
|
| 153 |
+
G = nx.convert_node_labels_to_integers(self.G)
|
| 154 |
+
fname = tmp_path / "edgelist.txt"
|
| 155 |
+
bipartite.write_edgelist(G, fname)
|
| 156 |
+
H = bipartite.read_edgelist(fname, nodetype=int)
|
| 157 |
+
# isolated nodes are not written in edgelist
|
| 158 |
+
G.remove_nodes_from(list(nx.isolates(G)))
|
| 159 |
+
assert nodes_equal(list(H), list(G))
|
| 160 |
+
assert edges_equal(list(H.edges()), list(G.edges()))
|
| 161 |
+
|
| 162 |
+
def test_edgelist_multigraph(self, tmp_path):
|
| 163 |
+
G = self.MG
|
| 164 |
+
fname = tmp_path / "edgelist.txt"
|
| 165 |
+
bipartite.write_edgelist(G, fname)
|
| 166 |
+
H = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
|
| 167 |
+
H2 = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
|
| 168 |
+
assert H is not H2 # they should be different graphs
|
| 169 |
+
assert nodes_equal(list(H), list(G))
|
| 170 |
+
assert edges_equal(list(H.edges()), list(G.edges()))
|
| 171 |
+
|
| 172 |
+
def test_empty_digraph(self):
|
| 173 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 174 |
+
bytesIO = io.BytesIO()
|
| 175 |
+
bipartite.write_edgelist(nx.DiGraph(), bytesIO)
|
| 176 |
+
|
| 177 |
+
def test_raise_attribute(self):
|
| 178 |
+
with pytest.raises(AttributeError):
|
| 179 |
+
G = nx.path_graph(4)
|
| 180 |
+
bytesIO = io.BytesIO()
|
| 181 |
+
bipartite.write_edgelist(G, bytesIO)
|
| 182 |
+
|
| 183 |
+
def test_parse_edgelist(self):
|
| 184 |
+
"""Tests for conditions specific to
|
| 185 |
+
parse_edge_list method"""
|
| 186 |
+
|
| 187 |
+
# ignore strings of length less than 2
|
| 188 |
+
lines = ["1 2", "2 3", "3 1", "4", " "]
|
| 189 |
+
G = bipartite.parse_edgelist(lines, nodetype=int)
|
| 190 |
+
assert list(G.nodes) == [1, 2, 3]
|
| 191 |
+
|
| 192 |
+
# Exception raised when node is not convertible
|
| 193 |
+
# to specified data type
|
| 194 |
+
with pytest.raises(TypeError, match=".*Failed to convert nodes"):
|
| 195 |
+
lines = ["a b", "b c", "c a"]
|
| 196 |
+
G = bipartite.parse_edgelist(lines, nodetype=int)
|
| 197 |
+
|
| 198 |
+
# Exception raised when format of data is not
|
| 199 |
+
# convertible to dictionary object
|
| 200 |
+
with pytest.raises(TypeError, match=".*Failed to convert edge data"):
|
| 201 |
+
lines = ["1 2 3", "2 3 4", "3 1 2"]
|
| 202 |
+
G = bipartite.parse_edgelist(lines, nodetype=int)
|
| 203 |
+
|
| 204 |
+
# Exception raised when edge data and data
|
| 205 |
+
# keys are not of same length
|
| 206 |
+
with pytest.raises(IndexError):
|
| 207 |
+
lines = ["1 2 3 4", "2 3 4"]
|
| 208 |
+
G = bipartite.parse_edgelist(
|
| 209 |
+
lines, nodetype=int, data=[("weight", int), ("key", int)]
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
+
# Exception raised when edge data is not
|
| 213 |
+
# convertible to specified data type
|
| 214 |
+
with pytest.raises(TypeError, match=".*Failed to convert key data"):
|
| 215 |
+
lines = ["1 2 3 a", "2 3 4 b"]
|
| 216 |
+
G = bipartite.parse_edgelist(
|
| 217 |
+
lines, nodetype=int, data=[("weight", int), ("key", int)]
|
| 218 |
+
)
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
def test_bipartite_edgelist_consistent_strip_handling():
|
| 222 |
+
"""See gh-7462
|
| 223 |
+
|
| 224 |
+
Input when printed looks like:
|
| 225 |
+
|
| 226 |
+
A B interaction 2
|
| 227 |
+
B C interaction 4
|
| 228 |
+
C A interaction
|
| 229 |
+
|
| 230 |
+
Note the trailing \\t in the last line, which indicates the existence of
|
| 231 |
+
an empty data field.
|
| 232 |
+
"""
|
| 233 |
+
lines = io.StringIO(
|
| 234 |
+
"A\tB\tinteraction\t2\nB\tC\tinteraction\t4\nC\tA\tinteraction\t"
|
| 235 |
+
)
|
| 236 |
+
descr = [("type", str), ("weight", str)]
|
| 237 |
+
# Should not raise
|
| 238 |
+
G = nx.bipartite.parse_edgelist(lines, delimiter="\t", data=descr)
|
| 239 |
+
expected = [("A", "B", "2"), ("A", "C", ""), ("B", "C", "4")]
|
| 240 |
+
assert sorted(G.edges(data="weight")) == expected
|
wemm/lib/python3.10/site-packages/networkx/algorithms/bipartite/tests/test_matching.py
ADDED
|
@@ -0,0 +1,327 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Unit tests for the :mod:`networkx.algorithms.bipartite.matching` module."""
|
| 2 |
+
|
| 3 |
+
import itertools
|
| 4 |
+
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.algorithms.bipartite.matching import (
|
| 9 |
+
eppstein_matching,
|
| 10 |
+
hopcroft_karp_matching,
|
| 11 |
+
maximum_matching,
|
| 12 |
+
minimum_weight_full_matching,
|
| 13 |
+
to_vertex_cover,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class TestMatching:
|
| 18 |
+
"""Tests for bipartite matching algorithms."""
|
| 19 |
+
|
| 20 |
+
def setup_method(self):
|
| 21 |
+
"""Creates a bipartite graph for use in testing matching algorithms.
|
| 22 |
+
|
| 23 |
+
The bipartite graph has a maximum cardinality matching that leaves
|
| 24 |
+
vertex 1 and vertex 10 unmatched. The first six numbers are the left
|
| 25 |
+
vertices and the next six numbers are the right vertices.
|
| 26 |
+
|
| 27 |
+
"""
|
| 28 |
+
self.simple_graph = nx.complete_bipartite_graph(2, 3)
|
| 29 |
+
self.simple_solution = {0: 2, 1: 3, 2: 0, 3: 1}
|
| 30 |
+
|
| 31 |
+
edges = [(0, 7), (0, 8), (2, 6), (2, 9), (3, 8), (4, 8), (4, 9), (5, 11)]
|
| 32 |
+
self.top_nodes = set(range(6))
|
| 33 |
+
self.graph = nx.Graph()
|
| 34 |
+
self.graph.add_nodes_from(range(12))
|
| 35 |
+
self.graph.add_edges_from(edges)
|
| 36 |
+
|
| 37 |
+
# Example bipartite graph from issue 2127
|
| 38 |
+
G = nx.Graph()
|
| 39 |
+
G.add_nodes_from(
|
| 40 |
+
[
|
| 41 |
+
(1, "C"),
|
| 42 |
+
(1, "B"),
|
| 43 |
+
(0, "G"),
|
| 44 |
+
(1, "F"),
|
| 45 |
+
(1, "E"),
|
| 46 |
+
(0, "C"),
|
| 47 |
+
(1, "D"),
|
| 48 |
+
(1, "I"),
|
| 49 |
+
(0, "A"),
|
| 50 |
+
(0, "D"),
|
| 51 |
+
(0, "F"),
|
| 52 |
+
(0, "E"),
|
| 53 |
+
(0, "H"),
|
| 54 |
+
(1, "G"),
|
| 55 |
+
(1, "A"),
|
| 56 |
+
(0, "I"),
|
| 57 |
+
(0, "B"),
|
| 58 |
+
(1, "H"),
|
| 59 |
+
]
|
| 60 |
+
)
|
| 61 |
+
G.add_edge((1, "C"), (0, "A"))
|
| 62 |
+
G.add_edge((1, "B"), (0, "A"))
|
| 63 |
+
G.add_edge((0, "G"), (1, "I"))
|
| 64 |
+
G.add_edge((0, "G"), (1, "H"))
|
| 65 |
+
G.add_edge((1, "F"), (0, "A"))
|
| 66 |
+
G.add_edge((1, "F"), (0, "C"))
|
| 67 |
+
G.add_edge((1, "F"), (0, "E"))
|
| 68 |
+
G.add_edge((1, "E"), (0, "A"))
|
| 69 |
+
G.add_edge((1, "E"), (0, "C"))
|
| 70 |
+
G.add_edge((0, "C"), (1, "D"))
|
| 71 |
+
G.add_edge((0, "C"), (1, "I"))
|
| 72 |
+
G.add_edge((0, "C"), (1, "G"))
|
| 73 |
+
G.add_edge((0, "C"), (1, "H"))
|
| 74 |
+
G.add_edge((1, "D"), (0, "A"))
|
| 75 |
+
G.add_edge((1, "I"), (0, "A"))
|
| 76 |
+
G.add_edge((1, "I"), (0, "E"))
|
| 77 |
+
G.add_edge((0, "A"), (1, "G"))
|
| 78 |
+
G.add_edge((0, "A"), (1, "H"))
|
| 79 |
+
G.add_edge((0, "E"), (1, "G"))
|
| 80 |
+
G.add_edge((0, "E"), (1, "H"))
|
| 81 |
+
self.disconnected_graph = G
|
| 82 |
+
|
| 83 |
+
def check_match(self, matching):
|
| 84 |
+
"""Asserts that the matching is what we expect from the bipartite graph
|
| 85 |
+
constructed in the :meth:`setup` fixture.
|
| 86 |
+
|
| 87 |
+
"""
|
| 88 |
+
# For the sake of brevity, rename `matching` to `M`.
|
| 89 |
+
M = matching
|
| 90 |
+
matched_vertices = frozenset(itertools.chain(*M.items()))
|
| 91 |
+
# Assert that the maximum number of vertices (10) is matched.
|
| 92 |
+
assert matched_vertices == frozenset(range(12)) - {1, 10}
|
| 93 |
+
# Assert that no vertex appears in two edges, or in other words, that
|
| 94 |
+
# the matching (u, v) and (v, u) both appear in the matching
|
| 95 |
+
# dictionary.
|
| 96 |
+
assert all(u == M[M[u]] for u in range(12) if u in M)
|
| 97 |
+
|
| 98 |
+
def check_vertex_cover(self, vertices):
|
| 99 |
+
"""Asserts that the given set of vertices is the vertex cover we
|
| 100 |
+
expected from the bipartite graph constructed in the :meth:`setup`
|
| 101 |
+
fixture.
|
| 102 |
+
|
| 103 |
+
"""
|
| 104 |
+
# By Konig's theorem, the number of edges in a maximum matching equals
|
| 105 |
+
# the number of vertices in a minimum vertex cover.
|
| 106 |
+
assert len(vertices) == 5
|
| 107 |
+
# Assert that the set is truly a vertex cover.
|
| 108 |
+
for u, v in self.graph.edges():
|
| 109 |
+
assert u in vertices or v in vertices
|
| 110 |
+
# TODO Assert that the vertices are the correct ones.
|
| 111 |
+
|
| 112 |
+
def test_eppstein_matching(self):
|
| 113 |
+
"""Tests that David Eppstein's implementation of the Hopcroft--Karp
|
| 114 |
+
algorithm produces a maximum cardinality matching.
|
| 115 |
+
|
| 116 |
+
"""
|
| 117 |
+
self.check_match(eppstein_matching(self.graph, self.top_nodes))
|
| 118 |
+
|
| 119 |
+
def test_hopcroft_karp_matching(self):
|
| 120 |
+
"""Tests that the Hopcroft--Karp algorithm produces a maximum
|
| 121 |
+
cardinality matching in a bipartite graph.
|
| 122 |
+
|
| 123 |
+
"""
|
| 124 |
+
self.check_match(hopcroft_karp_matching(self.graph, self.top_nodes))
|
| 125 |
+
|
| 126 |
+
def test_to_vertex_cover(self):
|
| 127 |
+
"""Test for converting a maximum matching to a minimum vertex cover."""
|
| 128 |
+
matching = maximum_matching(self.graph, self.top_nodes)
|
| 129 |
+
vertex_cover = to_vertex_cover(self.graph, matching, self.top_nodes)
|
| 130 |
+
self.check_vertex_cover(vertex_cover)
|
| 131 |
+
|
| 132 |
+
def test_eppstein_matching_simple(self):
|
| 133 |
+
match = eppstein_matching(self.simple_graph)
|
| 134 |
+
assert match == self.simple_solution
|
| 135 |
+
|
| 136 |
+
def test_hopcroft_karp_matching_simple(self):
|
| 137 |
+
match = hopcroft_karp_matching(self.simple_graph)
|
| 138 |
+
assert match == self.simple_solution
|
| 139 |
+
|
| 140 |
+
def test_eppstein_matching_disconnected(self):
|
| 141 |
+
with pytest.raises(nx.AmbiguousSolution):
|
| 142 |
+
match = eppstein_matching(self.disconnected_graph)
|
| 143 |
+
|
| 144 |
+
def test_hopcroft_karp_matching_disconnected(self):
|
| 145 |
+
with pytest.raises(nx.AmbiguousSolution):
|
| 146 |
+
match = hopcroft_karp_matching(self.disconnected_graph)
|
| 147 |
+
|
| 148 |
+
def test_issue_2127(self):
|
| 149 |
+
"""Test from issue 2127"""
|
| 150 |
+
# Build the example DAG
|
| 151 |
+
G = nx.DiGraph()
|
| 152 |
+
G.add_edge("A", "C")
|
| 153 |
+
G.add_edge("A", "B")
|
| 154 |
+
G.add_edge("C", "E")
|
| 155 |
+
G.add_edge("C", "D")
|
| 156 |
+
G.add_edge("E", "G")
|
| 157 |
+
G.add_edge("E", "F")
|
| 158 |
+
G.add_edge("G", "I")
|
| 159 |
+
G.add_edge("G", "H")
|
| 160 |
+
|
| 161 |
+
tc = nx.transitive_closure(G)
|
| 162 |
+
btc = nx.Graph()
|
| 163 |
+
|
| 164 |
+
# Create a bipartite graph based on the transitive closure of G
|
| 165 |
+
for v in tc.nodes():
|
| 166 |
+
btc.add_node((0, v))
|
| 167 |
+
btc.add_node((1, v))
|
| 168 |
+
|
| 169 |
+
for u, v in tc.edges():
|
| 170 |
+
btc.add_edge((0, u), (1, v))
|
| 171 |
+
|
| 172 |
+
top_nodes = {n for n in btc if n[0] == 0}
|
| 173 |
+
matching = hopcroft_karp_matching(btc, top_nodes)
|
| 174 |
+
vertex_cover = to_vertex_cover(btc, matching, top_nodes)
|
| 175 |
+
independent_set = set(G) - {v for _, v in vertex_cover}
|
| 176 |
+
assert {"B", "D", "F", "I", "H"} == independent_set
|
| 177 |
+
|
| 178 |
+
def test_vertex_cover_issue_2384(self):
|
| 179 |
+
G = nx.Graph([(0, 3), (1, 3), (1, 4), (2, 3)])
|
| 180 |
+
matching = maximum_matching(G)
|
| 181 |
+
vertex_cover = to_vertex_cover(G, matching)
|
| 182 |
+
for u, v in G.edges():
|
| 183 |
+
assert u in vertex_cover or v in vertex_cover
|
| 184 |
+
|
| 185 |
+
def test_vertex_cover_issue_3306(self):
|
| 186 |
+
G = nx.Graph()
|
| 187 |
+
edges = [(0, 2), (1, 0), (1, 1), (1, 2), (2, 2)]
|
| 188 |
+
G.add_edges_from([((i, "L"), (j, "R")) for i, j in edges])
|
| 189 |
+
|
| 190 |
+
matching = maximum_matching(G)
|
| 191 |
+
vertex_cover = to_vertex_cover(G, matching)
|
| 192 |
+
for u, v in G.edges():
|
| 193 |
+
assert u in vertex_cover or v in vertex_cover
|
| 194 |
+
|
| 195 |
+
def test_unorderable_nodes(self):
|
| 196 |
+
a = object()
|
| 197 |
+
b = object()
|
| 198 |
+
c = object()
|
| 199 |
+
d = object()
|
| 200 |
+
e = object()
|
| 201 |
+
G = nx.Graph([(a, d), (b, d), (b, e), (c, d)])
|
| 202 |
+
matching = maximum_matching(G)
|
| 203 |
+
vertex_cover = to_vertex_cover(G, matching)
|
| 204 |
+
for u, v in G.edges():
|
| 205 |
+
assert u in vertex_cover or v in vertex_cover
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
def test_eppstein_matching():
|
| 209 |
+
"""Test in accordance to issue #1927"""
|
| 210 |
+
G = nx.Graph()
|
| 211 |
+
G.add_nodes_from(["a", 2, 3, 4], bipartite=0)
|
| 212 |
+
G.add_nodes_from([1, "b", "c"], bipartite=1)
|
| 213 |
+
G.add_edges_from([("a", 1), ("a", "b"), (2, "b"), (2, "c"), (3, "c"), (4, 1)])
|
| 214 |
+
matching = eppstein_matching(G)
|
| 215 |
+
assert len(matching) == len(maximum_matching(G))
|
| 216 |
+
assert all(x in set(matching.keys()) for x in set(matching.values()))
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
class TestMinimumWeightFullMatching:
|
| 220 |
+
@classmethod
|
| 221 |
+
def setup_class(cls):
|
| 222 |
+
pytest.importorskip("scipy")
|
| 223 |
+
|
| 224 |
+
def test_minimum_weight_full_matching_incomplete_graph(self):
|
| 225 |
+
B = nx.Graph()
|
| 226 |
+
B.add_nodes_from([1, 2], bipartite=0)
|
| 227 |
+
B.add_nodes_from([3, 4], bipartite=1)
|
| 228 |
+
B.add_edge(1, 4, weight=100)
|
| 229 |
+
B.add_edge(2, 3, weight=100)
|
| 230 |
+
B.add_edge(2, 4, weight=50)
|
| 231 |
+
matching = minimum_weight_full_matching(B)
|
| 232 |
+
assert matching == {1: 4, 2: 3, 4: 1, 3: 2}
|
| 233 |
+
|
| 234 |
+
def test_minimum_weight_full_matching_with_no_full_matching(self):
|
| 235 |
+
B = nx.Graph()
|
| 236 |
+
B.add_nodes_from([1, 2, 3], bipartite=0)
|
| 237 |
+
B.add_nodes_from([4, 5, 6], bipartite=1)
|
| 238 |
+
B.add_edge(1, 4, weight=100)
|
| 239 |
+
B.add_edge(2, 4, weight=100)
|
| 240 |
+
B.add_edge(3, 4, weight=50)
|
| 241 |
+
B.add_edge(3, 5, weight=50)
|
| 242 |
+
B.add_edge(3, 6, weight=50)
|
| 243 |
+
with pytest.raises(ValueError):
|
| 244 |
+
minimum_weight_full_matching(B)
|
| 245 |
+
|
| 246 |
+
def test_minimum_weight_full_matching_square(self):
|
| 247 |
+
G = nx.complete_bipartite_graph(3, 3)
|
| 248 |
+
G.add_edge(0, 3, weight=400)
|
| 249 |
+
G.add_edge(0, 4, weight=150)
|
| 250 |
+
G.add_edge(0, 5, weight=400)
|
| 251 |
+
G.add_edge(1, 3, weight=400)
|
| 252 |
+
G.add_edge(1, 4, weight=450)
|
| 253 |
+
G.add_edge(1, 5, weight=600)
|
| 254 |
+
G.add_edge(2, 3, weight=300)
|
| 255 |
+
G.add_edge(2, 4, weight=225)
|
| 256 |
+
G.add_edge(2, 5, weight=300)
|
| 257 |
+
matching = minimum_weight_full_matching(G)
|
| 258 |
+
assert matching == {0: 4, 1: 3, 2: 5, 4: 0, 3: 1, 5: 2}
|
| 259 |
+
|
| 260 |
+
def test_minimum_weight_full_matching_smaller_left(self):
|
| 261 |
+
G = nx.complete_bipartite_graph(3, 4)
|
| 262 |
+
G.add_edge(0, 3, weight=400)
|
| 263 |
+
G.add_edge(0, 4, weight=150)
|
| 264 |
+
G.add_edge(0, 5, weight=400)
|
| 265 |
+
G.add_edge(0, 6, weight=1)
|
| 266 |
+
G.add_edge(1, 3, weight=400)
|
| 267 |
+
G.add_edge(1, 4, weight=450)
|
| 268 |
+
G.add_edge(1, 5, weight=600)
|
| 269 |
+
G.add_edge(1, 6, weight=2)
|
| 270 |
+
G.add_edge(2, 3, weight=300)
|
| 271 |
+
G.add_edge(2, 4, weight=225)
|
| 272 |
+
G.add_edge(2, 5, weight=290)
|
| 273 |
+
G.add_edge(2, 6, weight=3)
|
| 274 |
+
matching = minimum_weight_full_matching(G)
|
| 275 |
+
assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1}
|
| 276 |
+
|
| 277 |
+
def test_minimum_weight_full_matching_smaller_top_nodes_right(self):
|
| 278 |
+
G = nx.complete_bipartite_graph(3, 4)
|
| 279 |
+
G.add_edge(0, 3, weight=400)
|
| 280 |
+
G.add_edge(0, 4, weight=150)
|
| 281 |
+
G.add_edge(0, 5, weight=400)
|
| 282 |
+
G.add_edge(0, 6, weight=1)
|
| 283 |
+
G.add_edge(1, 3, weight=400)
|
| 284 |
+
G.add_edge(1, 4, weight=450)
|
| 285 |
+
G.add_edge(1, 5, weight=600)
|
| 286 |
+
G.add_edge(1, 6, weight=2)
|
| 287 |
+
G.add_edge(2, 3, weight=300)
|
| 288 |
+
G.add_edge(2, 4, weight=225)
|
| 289 |
+
G.add_edge(2, 5, weight=290)
|
| 290 |
+
G.add_edge(2, 6, weight=3)
|
| 291 |
+
matching = minimum_weight_full_matching(G, top_nodes=[3, 4, 5, 6])
|
| 292 |
+
assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1}
|
| 293 |
+
|
| 294 |
+
def test_minimum_weight_full_matching_smaller_right(self):
|
| 295 |
+
G = nx.complete_bipartite_graph(4, 3)
|
| 296 |
+
G.add_edge(0, 4, weight=400)
|
| 297 |
+
G.add_edge(0, 5, weight=400)
|
| 298 |
+
G.add_edge(0, 6, weight=300)
|
| 299 |
+
G.add_edge(1, 4, weight=150)
|
| 300 |
+
G.add_edge(1, 5, weight=450)
|
| 301 |
+
G.add_edge(1, 6, weight=225)
|
| 302 |
+
G.add_edge(2, 4, weight=400)
|
| 303 |
+
G.add_edge(2, 5, weight=600)
|
| 304 |
+
G.add_edge(2, 6, weight=290)
|
| 305 |
+
G.add_edge(3, 4, weight=1)
|
| 306 |
+
G.add_edge(3, 5, weight=2)
|
| 307 |
+
G.add_edge(3, 6, weight=3)
|
| 308 |
+
matching = minimum_weight_full_matching(G)
|
| 309 |
+
assert matching == {1: 4, 2: 6, 3: 5, 4: 1, 5: 3, 6: 2}
|
| 310 |
+
|
| 311 |
+
def test_minimum_weight_full_matching_negative_weights(self):
|
| 312 |
+
G = nx.complete_bipartite_graph(2, 2)
|
| 313 |
+
G.add_edge(0, 2, weight=-2)
|
| 314 |
+
G.add_edge(0, 3, weight=0.2)
|
| 315 |
+
G.add_edge(1, 2, weight=-2)
|
| 316 |
+
G.add_edge(1, 3, weight=0.3)
|
| 317 |
+
matching = minimum_weight_full_matching(G)
|
| 318 |
+
assert matching == {0: 3, 1: 2, 2: 1, 3: 0}
|
| 319 |
+
|
| 320 |
+
def test_minimum_weight_full_matching_different_weight_key(self):
|
| 321 |
+
G = nx.complete_bipartite_graph(2, 2)
|
| 322 |
+
G.add_edge(0, 2, mass=2)
|
| 323 |
+
G.add_edge(0, 3, mass=0.2)
|
| 324 |
+
G.add_edge(1, 2, mass=1)
|
| 325 |
+
G.add_edge(1, 3, mass=2)
|
| 326 |
+
matching = minimum_weight_full_matching(G, weight="mass")
|
| 327 |
+
assert matching == {0: 3, 1: 2, 2: 1, 3: 0}
|
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/dispersion.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import combinations
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
__all__ = ["dispersion"]
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@nx._dispatchable
|
| 9 |
+
def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0):
|
| 10 |
+
r"""Calculate dispersion between `u` and `v` in `G`.
|
| 11 |
+
|
| 12 |
+
A link between two actors (`u` and `v`) has a high dispersion when their
|
| 13 |
+
mutual ties (`s` and `t`) are not well connected with each other.
|
| 14 |
+
|
| 15 |
+
Parameters
|
| 16 |
+
----------
|
| 17 |
+
G : graph
|
| 18 |
+
A NetworkX graph.
|
| 19 |
+
u : node, optional
|
| 20 |
+
The source for the dispersion score (e.g. ego node of the network).
|
| 21 |
+
v : node, optional
|
| 22 |
+
The target of the dispersion score if specified.
|
| 23 |
+
normalized : bool
|
| 24 |
+
If True (default) normalize by the embeddedness of the nodes (u and v).
|
| 25 |
+
alpha, b, c : float
|
| 26 |
+
Parameters for the normalization procedure. When `normalized` is True,
|
| 27 |
+
the dispersion value is normalized by::
|
| 28 |
+
|
| 29 |
+
result = ((dispersion + b) ** alpha) / (embeddedness + c)
|
| 30 |
+
|
| 31 |
+
as long as the denominator is nonzero.
|
| 32 |
+
|
| 33 |
+
Returns
|
| 34 |
+
-------
|
| 35 |
+
nodes : dictionary
|
| 36 |
+
If u (v) is specified, returns a dictionary of nodes with dispersion
|
| 37 |
+
score for all "target" ("source") nodes. If neither u nor v is
|
| 38 |
+
specified, returns a dictionary of dictionaries for all nodes 'u' in the
|
| 39 |
+
graph with a dispersion score for each node 'v'.
|
| 40 |
+
|
| 41 |
+
Notes
|
| 42 |
+
-----
|
| 43 |
+
This implementation follows Lars Backstrom and Jon Kleinberg [1]_. Typical
|
| 44 |
+
usage would be to run dispersion on the ego network $G_u$ if $u$ were
|
| 45 |
+
specified. Running :func:`dispersion` with neither $u$ nor $v$ specified
|
| 46 |
+
can take some time to complete.
|
| 47 |
+
|
| 48 |
+
References
|
| 49 |
+
----------
|
| 50 |
+
.. [1] Romantic Partnerships and the Dispersion of Social Ties:
|
| 51 |
+
A Network Analysis of Relationship Status on Facebook.
|
| 52 |
+
Lars Backstrom, Jon Kleinberg.
|
| 53 |
+
https://arxiv.org/pdf/1310.6753v1.pdf
|
| 54 |
+
|
| 55 |
+
"""
|
| 56 |
+
|
| 57 |
+
def _dispersion(G_u, u, v):
|
| 58 |
+
"""dispersion for all nodes 'v' in a ego network G_u of node 'u'"""
|
| 59 |
+
u_nbrs = set(G_u[u])
|
| 60 |
+
ST = {n for n in G_u[v] if n in u_nbrs}
|
| 61 |
+
set_uv = {u, v}
|
| 62 |
+
# all possible ties of connections that u and b share
|
| 63 |
+
possib = combinations(ST, 2)
|
| 64 |
+
total = 0
|
| 65 |
+
for s, t in possib:
|
| 66 |
+
# neighbors of s that are in G_u, not including u and v
|
| 67 |
+
nbrs_s = u_nbrs.intersection(G_u[s]) - set_uv
|
| 68 |
+
# s and t are not directly connected
|
| 69 |
+
if t not in nbrs_s:
|
| 70 |
+
# s and t do not share a connection
|
| 71 |
+
if nbrs_s.isdisjoint(G_u[t]):
|
| 72 |
+
# tick for disp(u, v)
|
| 73 |
+
total += 1
|
| 74 |
+
# neighbors that u and v share
|
| 75 |
+
embeddedness = len(ST)
|
| 76 |
+
|
| 77 |
+
dispersion_val = total
|
| 78 |
+
if normalized:
|
| 79 |
+
dispersion_val = (total + b) ** alpha
|
| 80 |
+
if embeddedness + c != 0:
|
| 81 |
+
dispersion_val /= embeddedness + c
|
| 82 |
+
|
| 83 |
+
return dispersion_val
|
| 84 |
+
|
| 85 |
+
if u is None:
|
| 86 |
+
# v and u are not specified
|
| 87 |
+
if v is None:
|
| 88 |
+
results = {n: {} for n in G}
|
| 89 |
+
for u in G:
|
| 90 |
+
for v in G[u]:
|
| 91 |
+
results[u][v] = _dispersion(G, u, v)
|
| 92 |
+
# u is not specified, but v is
|
| 93 |
+
else:
|
| 94 |
+
results = dict.fromkeys(G[v], {})
|
| 95 |
+
for u in G[v]:
|
| 96 |
+
results[u] = _dispersion(G, v, u)
|
| 97 |
+
else:
|
| 98 |
+
# u is specified with no target v
|
| 99 |
+
if v is None:
|
| 100 |
+
results = dict.fromkeys(G[u], {})
|
| 101 |
+
for v in G[u]:
|
| 102 |
+
results[v] = _dispersion(G, u, v)
|
| 103 |
+
# both u and v are specified
|
| 104 |
+
else:
|
| 105 |
+
results = _dispersion(G, u, v)
|
| 106 |
+
|
| 107 |
+
return results
|
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/flow_matrix.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Helpers for current-flow betweenness and current-flow closeness
|
| 2 |
+
# Lazy computations for inverse Laplacian and flow-matrix rows.
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 7 |
+
def flow_matrix_row(G, weight=None, dtype=float, solver="lu"):
|
| 8 |
+
# Generate a row of the current-flow matrix
|
| 9 |
+
import numpy as np
|
| 10 |
+
|
| 11 |
+
solvername = {
|
| 12 |
+
"full": FullInverseLaplacian,
|
| 13 |
+
"lu": SuperLUInverseLaplacian,
|
| 14 |
+
"cg": CGInverseLaplacian,
|
| 15 |
+
}
|
| 16 |
+
n = G.number_of_nodes()
|
| 17 |
+
L = nx.laplacian_matrix(G, nodelist=range(n), weight=weight).asformat("csc")
|
| 18 |
+
L = L.astype(dtype)
|
| 19 |
+
C = solvername[solver](L, dtype=dtype) # initialize solver
|
| 20 |
+
w = C.w # w is the Laplacian matrix width
|
| 21 |
+
# row-by-row flow matrix
|
| 22 |
+
for u, v in sorted(sorted((u, v)) for u, v in G.edges()):
|
| 23 |
+
B = np.zeros(w, dtype=dtype)
|
| 24 |
+
c = G[u][v].get(weight, 1.0)
|
| 25 |
+
B[u % w] = c
|
| 26 |
+
B[v % w] = -c
|
| 27 |
+
# get only the rows needed in the inverse laplacian
|
| 28 |
+
# and multiply to get the flow matrix row
|
| 29 |
+
row = B @ C.get_rows(u, v)
|
| 30 |
+
yield row, (u, v)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
# Class to compute the inverse laplacian only for specified rows
|
| 34 |
+
# Allows computation of the current-flow matrix without storing entire
|
| 35 |
+
# inverse laplacian matrix
|
| 36 |
+
class InverseLaplacian:
|
| 37 |
+
def __init__(self, L, width=None, dtype=None):
|
| 38 |
+
global np
|
| 39 |
+
import numpy as np
|
| 40 |
+
|
| 41 |
+
(n, n) = L.shape
|
| 42 |
+
self.dtype = dtype
|
| 43 |
+
self.n = n
|
| 44 |
+
if width is None:
|
| 45 |
+
self.w = self.width(L)
|
| 46 |
+
else:
|
| 47 |
+
self.w = width
|
| 48 |
+
self.C = np.zeros((self.w, n), dtype=dtype)
|
| 49 |
+
self.L1 = L[1:, 1:]
|
| 50 |
+
self.init_solver(L)
|
| 51 |
+
|
| 52 |
+
def init_solver(self, L):
|
| 53 |
+
pass
|
| 54 |
+
|
| 55 |
+
def solve(self, r):
|
| 56 |
+
raise nx.NetworkXError("Implement solver")
|
| 57 |
+
|
| 58 |
+
def solve_inverse(self, r):
|
| 59 |
+
raise nx.NetworkXError("Implement solver")
|
| 60 |
+
|
| 61 |
+
def get_rows(self, r1, r2):
|
| 62 |
+
for r in range(r1, r2 + 1):
|
| 63 |
+
self.C[r % self.w, 1:] = self.solve_inverse(r)
|
| 64 |
+
return self.C
|
| 65 |
+
|
| 66 |
+
def get_row(self, r):
|
| 67 |
+
self.C[r % self.w, 1:] = self.solve_inverse(r)
|
| 68 |
+
return self.C[r % self.w]
|
| 69 |
+
|
| 70 |
+
def width(self, L):
|
| 71 |
+
m = 0
|
| 72 |
+
for i, row in enumerate(L):
|
| 73 |
+
w = 0
|
| 74 |
+
y = np.nonzero(row)[-1]
|
| 75 |
+
if len(y) > 0:
|
| 76 |
+
v = y - i
|
| 77 |
+
w = v.max() - v.min() + 1
|
| 78 |
+
m = max(w, m)
|
| 79 |
+
return m
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class FullInverseLaplacian(InverseLaplacian):
|
| 83 |
+
def init_solver(self, L):
|
| 84 |
+
self.IL = np.zeros(L.shape, dtype=self.dtype)
|
| 85 |
+
self.IL[1:, 1:] = np.linalg.inv(self.L1.todense())
|
| 86 |
+
|
| 87 |
+
def solve(self, rhs):
|
| 88 |
+
s = np.zeros(rhs.shape, dtype=self.dtype)
|
| 89 |
+
s = self.IL @ rhs
|
| 90 |
+
return s
|
| 91 |
+
|
| 92 |
+
def solve_inverse(self, r):
|
| 93 |
+
return self.IL[r, 1:]
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
class SuperLUInverseLaplacian(InverseLaplacian):
|
| 97 |
+
def init_solver(self, L):
|
| 98 |
+
import scipy as sp
|
| 99 |
+
|
| 100 |
+
self.lusolve = sp.sparse.linalg.factorized(self.L1.tocsc())
|
| 101 |
+
|
| 102 |
+
def solve_inverse(self, r):
|
| 103 |
+
rhs = np.zeros(self.n, dtype=self.dtype)
|
| 104 |
+
rhs[r] = 1
|
| 105 |
+
return self.lusolve(rhs[1:])
|
| 106 |
+
|
| 107 |
+
def solve(self, rhs):
|
| 108 |
+
s = np.zeros(rhs.shape, dtype=self.dtype)
|
| 109 |
+
s[1:] = self.lusolve(rhs[1:])
|
| 110 |
+
return s
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
class CGInverseLaplacian(InverseLaplacian):
|
| 114 |
+
def init_solver(self, L):
|
| 115 |
+
global sp
|
| 116 |
+
import scipy as sp
|
| 117 |
+
|
| 118 |
+
ilu = sp.sparse.linalg.spilu(self.L1.tocsc())
|
| 119 |
+
n = self.n - 1
|
| 120 |
+
self.M = sp.sparse.linalg.LinearOperator(shape=(n, n), matvec=ilu.solve)
|
| 121 |
+
|
| 122 |
+
def solve(self, rhs):
|
| 123 |
+
s = np.zeros(rhs.shape, dtype=self.dtype)
|
| 124 |
+
s[1:] = sp.sparse.linalg.cg(self.L1, rhs[1:], M=self.M, atol=0)[0]
|
| 125 |
+
return s
|
| 126 |
+
|
| 127 |
+
def solve_inverse(self, r):
|
| 128 |
+
rhs = np.zeros(self.n, self.dtype)
|
| 129 |
+
rhs[r] = 1
|
| 130 |
+
return sp.sparse.linalg.cg(self.L1, rhs[1:], M=self.M, atol=0)[0]
|
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/laplacian.py
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Laplacian centrality measures.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
__all__ = ["laplacian_centrality"]
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 11 |
+
def laplacian_centrality(
|
| 12 |
+
G, normalized=True, nodelist=None, weight="weight", walk_type=None, alpha=0.95
|
| 13 |
+
):
|
| 14 |
+
r"""Compute the Laplacian centrality for nodes in the graph `G`.
|
| 15 |
+
|
| 16 |
+
The Laplacian Centrality of a node ``i`` is measured by the drop in the
|
| 17 |
+
Laplacian Energy after deleting node ``i`` from the graph. The Laplacian Energy
|
| 18 |
+
is the sum of the squared eigenvalues of a graph's Laplacian matrix.
|
| 19 |
+
|
| 20 |
+
.. math::
|
| 21 |
+
|
| 22 |
+
C_L(u_i,G) = \frac{(\Delta E)_i}{E_L (G)} = \frac{E_L (G)-E_L (G_i)}{E_L (G)}
|
| 23 |
+
|
| 24 |
+
E_L (G) = \sum_{i=0}^n \lambda_i^2
|
| 25 |
+
|
| 26 |
+
Where $E_L (G)$ is the Laplacian energy of graph `G`,
|
| 27 |
+
E_L (G_i) is the Laplacian energy of graph `G` after deleting node ``i``
|
| 28 |
+
and $\lambda_i$ are the eigenvalues of `G`'s Laplacian matrix.
|
| 29 |
+
This formula shows the normalized value. Without normalization,
|
| 30 |
+
the numerator on the right side is returned.
|
| 31 |
+
|
| 32 |
+
Parameters
|
| 33 |
+
----------
|
| 34 |
+
G : graph
|
| 35 |
+
A networkx graph
|
| 36 |
+
|
| 37 |
+
normalized : bool (default = True)
|
| 38 |
+
If True the centrality score is scaled so the sum over all nodes is 1.
|
| 39 |
+
If False the centrality score for each node is the drop in Laplacian
|
| 40 |
+
energy when that node is removed.
|
| 41 |
+
|
| 42 |
+
nodelist : list, optional (default = None)
|
| 43 |
+
The rows and columns are ordered according to the nodes in nodelist.
|
| 44 |
+
If nodelist is None, then the ordering is produced by G.nodes().
|
| 45 |
+
|
| 46 |
+
weight: string or None, optional (default=`weight`)
|
| 47 |
+
Optional parameter `weight` to compute the Laplacian matrix.
|
| 48 |
+
The edge data key used to compute each value in the matrix.
|
| 49 |
+
If None, then each edge has weight 1.
|
| 50 |
+
|
| 51 |
+
walk_type : string or None, optional (default=None)
|
| 52 |
+
Optional parameter `walk_type` used when calling
|
| 53 |
+
:func:`directed_laplacian_matrix <networkx.directed_laplacian_matrix>`.
|
| 54 |
+
One of ``"random"``, ``"lazy"``, or ``"pagerank"``. If ``walk_type=None``
|
| 55 |
+
(the default), then a value is selected according to the properties of `G`:
|
| 56 |
+
- ``walk_type="random"`` if `G` is strongly connected and aperiodic
|
| 57 |
+
- ``walk_type="lazy"`` if `G` is strongly connected but not aperiodic
|
| 58 |
+
- ``walk_type="pagerank"`` for all other cases.
|
| 59 |
+
|
| 60 |
+
alpha : real (default = 0.95)
|
| 61 |
+
Optional parameter `alpha` used when calling
|
| 62 |
+
:func:`directed_laplacian_matrix <networkx.directed_laplacian_matrix>`.
|
| 63 |
+
(1 - alpha) is the teleportation probability used with pagerank.
|
| 64 |
+
|
| 65 |
+
Returns
|
| 66 |
+
-------
|
| 67 |
+
nodes : dictionary
|
| 68 |
+
Dictionary of nodes with Laplacian centrality as the value.
|
| 69 |
+
|
| 70 |
+
Examples
|
| 71 |
+
--------
|
| 72 |
+
>>> G = nx.Graph()
|
| 73 |
+
>>> edges = [(0, 1, 4), (0, 2, 2), (2, 1, 1), (1, 3, 2), (1, 4, 2), (4, 5, 1)]
|
| 74 |
+
>>> G.add_weighted_edges_from(edges)
|
| 75 |
+
>>> sorted((v, f"{c:0.2f}") for v, c in laplacian_centrality(G).items())
|
| 76 |
+
[(0, '0.70'), (1, '0.90'), (2, '0.28'), (3, '0.22'), (4, '0.26'), (5, '0.04')]
|
| 77 |
+
|
| 78 |
+
Notes
|
| 79 |
+
-----
|
| 80 |
+
The algorithm is implemented based on [1]_ with an extension to directed graphs
|
| 81 |
+
using the ``directed_laplacian_matrix`` function.
|
| 82 |
+
|
| 83 |
+
Raises
|
| 84 |
+
------
|
| 85 |
+
NetworkXPointlessConcept
|
| 86 |
+
If the graph `G` is the null graph.
|
| 87 |
+
ZeroDivisionError
|
| 88 |
+
If the graph `G` has no edges (is empty) and normalization is requested.
|
| 89 |
+
|
| 90 |
+
References
|
| 91 |
+
----------
|
| 92 |
+
.. [1] Qi, X., Fuller, E., Wu, Q., Wu, Y., and Zhang, C.-Q. (2012).
|
| 93 |
+
Laplacian centrality: A new centrality measure for weighted networks.
|
| 94 |
+
Information Sciences, 194:240-253.
|
| 95 |
+
https://math.wvu.edu/~cqzhang/Publication-files/my-paper/INS-2012-Laplacian-W.pdf
|
| 96 |
+
|
| 97 |
+
See Also
|
| 98 |
+
--------
|
| 99 |
+
:func:`~networkx.linalg.laplacianmatrix.directed_laplacian_matrix`
|
| 100 |
+
:func:`~networkx.linalg.laplacianmatrix.laplacian_matrix`
|
| 101 |
+
"""
|
| 102 |
+
import numpy as np
|
| 103 |
+
import scipy as sp
|
| 104 |
+
|
| 105 |
+
if len(G) == 0:
|
| 106 |
+
raise nx.NetworkXPointlessConcept("null graph has no centrality defined")
|
| 107 |
+
if G.size(weight=weight) == 0:
|
| 108 |
+
if normalized:
|
| 109 |
+
raise ZeroDivisionError("graph with no edges has zero full energy")
|
| 110 |
+
return {n: 0 for n in G}
|
| 111 |
+
|
| 112 |
+
if nodelist is not None:
|
| 113 |
+
nodeset = set(G.nbunch_iter(nodelist))
|
| 114 |
+
if len(nodeset) != len(nodelist):
|
| 115 |
+
raise nx.NetworkXError("nodelist has duplicate nodes or nodes not in G")
|
| 116 |
+
nodes = nodelist + [n for n in G if n not in nodeset]
|
| 117 |
+
else:
|
| 118 |
+
nodelist = nodes = list(G)
|
| 119 |
+
|
| 120 |
+
if G.is_directed():
|
| 121 |
+
lap_matrix = nx.directed_laplacian_matrix(G, nodes, weight, walk_type, alpha)
|
| 122 |
+
else:
|
| 123 |
+
lap_matrix = nx.laplacian_matrix(G, nodes, weight).toarray()
|
| 124 |
+
|
| 125 |
+
full_energy = np.power(sp.linalg.eigh(lap_matrix, eigvals_only=True), 2).sum()
|
| 126 |
+
|
| 127 |
+
# calculate laplacian centrality
|
| 128 |
+
laplace_centralities_dict = {}
|
| 129 |
+
for i, node in enumerate(nodelist):
|
| 130 |
+
# remove row and col i from lap_matrix
|
| 131 |
+
all_but_i = list(np.arange(lap_matrix.shape[0]))
|
| 132 |
+
all_but_i.remove(i)
|
| 133 |
+
A_2 = lap_matrix[all_but_i, :][:, all_but_i]
|
| 134 |
+
|
| 135 |
+
# Adjust diagonal for removed row
|
| 136 |
+
new_diag = lap_matrix.diagonal() - abs(lap_matrix[:, i])
|
| 137 |
+
np.fill_diagonal(A_2, new_diag[all_but_i])
|
| 138 |
+
|
| 139 |
+
if len(all_but_i) > 0: # catches degenerate case of single node
|
| 140 |
+
new_energy = np.power(sp.linalg.eigh(A_2, eigvals_only=True), 2).sum()
|
| 141 |
+
else:
|
| 142 |
+
new_energy = 0.0
|
| 143 |
+
|
| 144 |
+
lapl_cent = full_energy - new_energy
|
| 145 |
+
if normalized:
|
| 146 |
+
lapl_cent = lapl_cent / full_energy
|
| 147 |
+
|
| 148 |
+
laplace_centralities_dict[node] = float(lapl_cent)
|
| 149 |
+
|
| 150 |
+
return laplace_centralities_dict
|
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/__init__.py
ADDED
|
File without changes
|
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_closeness_centrality.py
ADDED
|
@@ -0,0 +1,307 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for closeness centrality.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TestClosenessCentrality:
|
| 11 |
+
@classmethod
|
| 12 |
+
def setup_class(cls):
|
| 13 |
+
cls.K = nx.krackhardt_kite_graph()
|
| 14 |
+
cls.P3 = nx.path_graph(3)
|
| 15 |
+
cls.P4 = nx.path_graph(4)
|
| 16 |
+
cls.K5 = nx.complete_graph(5)
|
| 17 |
+
|
| 18 |
+
cls.C4 = nx.cycle_graph(4)
|
| 19 |
+
cls.T = nx.balanced_tree(r=2, h=2)
|
| 20 |
+
cls.Gb = nx.Graph()
|
| 21 |
+
cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
|
| 22 |
+
|
| 23 |
+
F = nx.florentine_families_graph()
|
| 24 |
+
cls.F = F
|
| 25 |
+
|
| 26 |
+
cls.LM = nx.les_miserables_graph()
|
| 27 |
+
|
| 28 |
+
# Create random undirected, unweighted graph for testing incremental version
|
| 29 |
+
cls.undirected_G = nx.fast_gnp_random_graph(n=100, p=0.6, seed=123)
|
| 30 |
+
cls.undirected_G_cc = nx.closeness_centrality(cls.undirected_G)
|
| 31 |
+
|
| 32 |
+
def test_wf_improved(self):
|
| 33 |
+
G = nx.union(self.P4, nx.path_graph([4, 5, 6]))
|
| 34 |
+
c = nx.closeness_centrality(G)
|
| 35 |
+
cwf = nx.closeness_centrality(G, wf_improved=False)
|
| 36 |
+
res = {0: 0.25, 1: 0.375, 2: 0.375, 3: 0.25, 4: 0.222, 5: 0.333, 6: 0.222}
|
| 37 |
+
wf_res = {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5, 4: 0.667, 5: 1.0, 6: 0.667}
|
| 38 |
+
for n in G:
|
| 39 |
+
assert c[n] == pytest.approx(res[n], abs=1e-3)
|
| 40 |
+
assert cwf[n] == pytest.approx(wf_res[n], abs=1e-3)
|
| 41 |
+
|
| 42 |
+
def test_digraph(self):
|
| 43 |
+
G = nx.path_graph(3, create_using=nx.DiGraph())
|
| 44 |
+
c = nx.closeness_centrality(G)
|
| 45 |
+
cr = nx.closeness_centrality(G.reverse())
|
| 46 |
+
d = {0: 0.0, 1: 0.500, 2: 0.667}
|
| 47 |
+
dr = {0: 0.667, 1: 0.500, 2: 0.0}
|
| 48 |
+
for n in sorted(self.P3):
|
| 49 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 50 |
+
assert cr[n] == pytest.approx(dr[n], abs=1e-3)
|
| 51 |
+
|
| 52 |
+
def test_k5_closeness(self):
|
| 53 |
+
c = nx.closeness_centrality(self.K5)
|
| 54 |
+
d = {0: 1.000, 1: 1.000, 2: 1.000, 3: 1.000, 4: 1.000}
|
| 55 |
+
for n in sorted(self.K5):
|
| 56 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 57 |
+
|
| 58 |
+
def test_p3_closeness(self):
|
| 59 |
+
c = nx.closeness_centrality(self.P3)
|
| 60 |
+
d = {0: 0.667, 1: 1.000, 2: 0.667}
|
| 61 |
+
for n in sorted(self.P3):
|
| 62 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 63 |
+
|
| 64 |
+
def test_krackhardt_closeness(self):
|
| 65 |
+
c = nx.closeness_centrality(self.K)
|
| 66 |
+
d = {
|
| 67 |
+
0: 0.529,
|
| 68 |
+
1: 0.529,
|
| 69 |
+
2: 0.500,
|
| 70 |
+
3: 0.600,
|
| 71 |
+
4: 0.500,
|
| 72 |
+
5: 0.643,
|
| 73 |
+
6: 0.643,
|
| 74 |
+
7: 0.600,
|
| 75 |
+
8: 0.429,
|
| 76 |
+
9: 0.310,
|
| 77 |
+
}
|
| 78 |
+
for n in sorted(self.K):
|
| 79 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 80 |
+
|
| 81 |
+
def test_florentine_families_closeness(self):
|
| 82 |
+
c = nx.closeness_centrality(self.F)
|
| 83 |
+
d = {
|
| 84 |
+
"Acciaiuoli": 0.368,
|
| 85 |
+
"Albizzi": 0.483,
|
| 86 |
+
"Barbadori": 0.4375,
|
| 87 |
+
"Bischeri": 0.400,
|
| 88 |
+
"Castellani": 0.389,
|
| 89 |
+
"Ginori": 0.333,
|
| 90 |
+
"Guadagni": 0.467,
|
| 91 |
+
"Lamberteschi": 0.326,
|
| 92 |
+
"Medici": 0.560,
|
| 93 |
+
"Pazzi": 0.286,
|
| 94 |
+
"Peruzzi": 0.368,
|
| 95 |
+
"Ridolfi": 0.500,
|
| 96 |
+
"Salviati": 0.389,
|
| 97 |
+
"Strozzi": 0.4375,
|
| 98 |
+
"Tornabuoni": 0.483,
|
| 99 |
+
}
|
| 100 |
+
for n in sorted(self.F):
|
| 101 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 102 |
+
|
| 103 |
+
def test_les_miserables_closeness(self):
|
| 104 |
+
c = nx.closeness_centrality(self.LM)
|
| 105 |
+
d = {
|
| 106 |
+
"Napoleon": 0.302,
|
| 107 |
+
"Myriel": 0.429,
|
| 108 |
+
"MlleBaptistine": 0.413,
|
| 109 |
+
"MmeMagloire": 0.413,
|
| 110 |
+
"CountessDeLo": 0.302,
|
| 111 |
+
"Geborand": 0.302,
|
| 112 |
+
"Champtercier": 0.302,
|
| 113 |
+
"Cravatte": 0.302,
|
| 114 |
+
"Count": 0.302,
|
| 115 |
+
"OldMan": 0.302,
|
| 116 |
+
"Valjean": 0.644,
|
| 117 |
+
"Labarre": 0.394,
|
| 118 |
+
"Marguerite": 0.413,
|
| 119 |
+
"MmeDeR": 0.394,
|
| 120 |
+
"Isabeau": 0.394,
|
| 121 |
+
"Gervais": 0.394,
|
| 122 |
+
"Listolier": 0.341,
|
| 123 |
+
"Tholomyes": 0.392,
|
| 124 |
+
"Fameuil": 0.341,
|
| 125 |
+
"Blacheville": 0.341,
|
| 126 |
+
"Favourite": 0.341,
|
| 127 |
+
"Dahlia": 0.341,
|
| 128 |
+
"Zephine": 0.341,
|
| 129 |
+
"Fantine": 0.461,
|
| 130 |
+
"MmeThenardier": 0.461,
|
| 131 |
+
"Thenardier": 0.517,
|
| 132 |
+
"Cosette": 0.478,
|
| 133 |
+
"Javert": 0.517,
|
| 134 |
+
"Fauchelevent": 0.402,
|
| 135 |
+
"Bamatabois": 0.427,
|
| 136 |
+
"Perpetue": 0.318,
|
| 137 |
+
"Simplice": 0.418,
|
| 138 |
+
"Scaufflaire": 0.394,
|
| 139 |
+
"Woman1": 0.396,
|
| 140 |
+
"Judge": 0.404,
|
| 141 |
+
"Champmathieu": 0.404,
|
| 142 |
+
"Brevet": 0.404,
|
| 143 |
+
"Chenildieu": 0.404,
|
| 144 |
+
"Cochepaille": 0.404,
|
| 145 |
+
"Pontmercy": 0.373,
|
| 146 |
+
"Boulatruelle": 0.342,
|
| 147 |
+
"Eponine": 0.396,
|
| 148 |
+
"Anzelma": 0.352,
|
| 149 |
+
"Woman2": 0.402,
|
| 150 |
+
"MotherInnocent": 0.398,
|
| 151 |
+
"Gribier": 0.288,
|
| 152 |
+
"MmeBurgon": 0.344,
|
| 153 |
+
"Jondrette": 0.257,
|
| 154 |
+
"Gavroche": 0.514,
|
| 155 |
+
"Gillenormand": 0.442,
|
| 156 |
+
"Magnon": 0.335,
|
| 157 |
+
"MlleGillenormand": 0.442,
|
| 158 |
+
"MmePontmercy": 0.315,
|
| 159 |
+
"MlleVaubois": 0.308,
|
| 160 |
+
"LtGillenormand": 0.365,
|
| 161 |
+
"Marius": 0.531,
|
| 162 |
+
"BaronessT": 0.352,
|
| 163 |
+
"Mabeuf": 0.396,
|
| 164 |
+
"Enjolras": 0.481,
|
| 165 |
+
"Combeferre": 0.392,
|
| 166 |
+
"Prouvaire": 0.357,
|
| 167 |
+
"Feuilly": 0.392,
|
| 168 |
+
"Courfeyrac": 0.400,
|
| 169 |
+
"Bahorel": 0.394,
|
| 170 |
+
"Bossuet": 0.475,
|
| 171 |
+
"Joly": 0.394,
|
| 172 |
+
"Grantaire": 0.358,
|
| 173 |
+
"MotherPlutarch": 0.285,
|
| 174 |
+
"Gueulemer": 0.463,
|
| 175 |
+
"Babet": 0.463,
|
| 176 |
+
"Claquesous": 0.452,
|
| 177 |
+
"Montparnasse": 0.458,
|
| 178 |
+
"Toussaint": 0.402,
|
| 179 |
+
"Child1": 0.342,
|
| 180 |
+
"Child2": 0.342,
|
| 181 |
+
"Brujon": 0.380,
|
| 182 |
+
"MmeHucheloup": 0.353,
|
| 183 |
+
}
|
| 184 |
+
for n in sorted(self.LM):
|
| 185 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 186 |
+
|
| 187 |
+
def test_weighted_closeness(self):
|
| 188 |
+
edges = [
|
| 189 |
+
("s", "u", 10),
|
| 190 |
+
("s", "x", 5),
|
| 191 |
+
("u", "v", 1),
|
| 192 |
+
("u", "x", 2),
|
| 193 |
+
("v", "y", 1),
|
| 194 |
+
("x", "u", 3),
|
| 195 |
+
("x", "v", 5),
|
| 196 |
+
("x", "y", 2),
|
| 197 |
+
("y", "s", 7),
|
| 198 |
+
("y", "v", 6),
|
| 199 |
+
]
|
| 200 |
+
XG = nx.Graph()
|
| 201 |
+
XG.add_weighted_edges_from(edges)
|
| 202 |
+
c = nx.closeness_centrality(XG, distance="weight")
|
| 203 |
+
d = {"y": 0.200, "x": 0.286, "s": 0.138, "u": 0.235, "v": 0.200}
|
| 204 |
+
for n in sorted(XG):
|
| 205 |
+
assert c[n] == pytest.approx(d[n], abs=1e-3)
|
| 206 |
+
|
| 207 |
+
#
|
| 208 |
+
# Tests for incremental closeness centrality.
|
| 209 |
+
#
|
| 210 |
+
@staticmethod
|
| 211 |
+
def pick_add_edge(g):
|
| 212 |
+
u = nx.utils.arbitrary_element(g)
|
| 213 |
+
possible_nodes = set(g.nodes())
|
| 214 |
+
neighbors = list(g.neighbors(u)) + [u]
|
| 215 |
+
possible_nodes.difference_update(neighbors)
|
| 216 |
+
v = nx.utils.arbitrary_element(possible_nodes)
|
| 217 |
+
return (u, v)
|
| 218 |
+
|
| 219 |
+
@staticmethod
|
| 220 |
+
def pick_remove_edge(g):
|
| 221 |
+
u = nx.utils.arbitrary_element(g)
|
| 222 |
+
possible_nodes = list(g.neighbors(u))
|
| 223 |
+
v = nx.utils.arbitrary_element(possible_nodes)
|
| 224 |
+
return (u, v)
|
| 225 |
+
|
| 226 |
+
def test_directed_raises(self):
|
| 227 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 228 |
+
dir_G = nx.gn_graph(n=5)
|
| 229 |
+
prev_cc = None
|
| 230 |
+
edge = self.pick_add_edge(dir_G)
|
| 231 |
+
insert = True
|
| 232 |
+
nx.incremental_closeness_centrality(dir_G, edge, prev_cc, insert)
|
| 233 |
+
|
| 234 |
+
def test_wrong_size_prev_cc_raises(self):
|
| 235 |
+
with pytest.raises(nx.NetworkXError):
|
| 236 |
+
G = self.undirected_G.copy()
|
| 237 |
+
edge = self.pick_add_edge(G)
|
| 238 |
+
insert = True
|
| 239 |
+
prev_cc = self.undirected_G_cc.copy()
|
| 240 |
+
prev_cc.pop(0)
|
| 241 |
+
nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
|
| 242 |
+
|
| 243 |
+
def test_wrong_nodes_prev_cc_raises(self):
|
| 244 |
+
with pytest.raises(nx.NetworkXError):
|
| 245 |
+
G = self.undirected_G.copy()
|
| 246 |
+
edge = self.pick_add_edge(G)
|
| 247 |
+
insert = True
|
| 248 |
+
prev_cc = self.undirected_G_cc.copy()
|
| 249 |
+
num_nodes = len(prev_cc)
|
| 250 |
+
prev_cc.pop(0)
|
| 251 |
+
prev_cc[num_nodes] = 0.5
|
| 252 |
+
nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
|
| 253 |
+
|
| 254 |
+
def test_zero_centrality(self):
|
| 255 |
+
G = nx.path_graph(3)
|
| 256 |
+
prev_cc = nx.closeness_centrality(G)
|
| 257 |
+
edge = self.pick_remove_edge(G)
|
| 258 |
+
test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insertion=False)
|
| 259 |
+
G.remove_edges_from([edge])
|
| 260 |
+
real_cc = nx.closeness_centrality(G)
|
| 261 |
+
shared_items = set(test_cc.items()) & set(real_cc.items())
|
| 262 |
+
assert len(shared_items) == len(real_cc)
|
| 263 |
+
assert 0 in test_cc.values()
|
| 264 |
+
|
| 265 |
+
def test_incremental(self):
|
| 266 |
+
# Check that incremental and regular give same output
|
| 267 |
+
G = self.undirected_G.copy()
|
| 268 |
+
prev_cc = None
|
| 269 |
+
for i in range(5):
|
| 270 |
+
if i % 2 == 0:
|
| 271 |
+
# Remove an edge
|
| 272 |
+
insert = False
|
| 273 |
+
edge = self.pick_remove_edge(G)
|
| 274 |
+
else:
|
| 275 |
+
# Add an edge
|
| 276 |
+
insert = True
|
| 277 |
+
edge = self.pick_add_edge(G)
|
| 278 |
+
|
| 279 |
+
# start = timeit.default_timer()
|
| 280 |
+
test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insert)
|
| 281 |
+
# inc_elapsed = (timeit.default_timer() - start)
|
| 282 |
+
# print(f"incremental time: {inc_elapsed}")
|
| 283 |
+
|
| 284 |
+
if insert:
|
| 285 |
+
G.add_edges_from([edge])
|
| 286 |
+
else:
|
| 287 |
+
G.remove_edges_from([edge])
|
| 288 |
+
|
| 289 |
+
# start = timeit.default_timer()
|
| 290 |
+
real_cc = nx.closeness_centrality(G)
|
| 291 |
+
# reg_elapsed = (timeit.default_timer() - start)
|
| 292 |
+
# print(f"regular time: {reg_elapsed}")
|
| 293 |
+
# Example output:
|
| 294 |
+
# incremental time: 0.208
|
| 295 |
+
# regular time: 0.276
|
| 296 |
+
# incremental time: 0.00683
|
| 297 |
+
# regular time: 0.260
|
| 298 |
+
# incremental time: 0.0224
|
| 299 |
+
# regular time: 0.278
|
| 300 |
+
# incremental time: 0.00804
|
| 301 |
+
# regular time: 0.208
|
| 302 |
+
# incremental time: 0.00947
|
| 303 |
+
# regular time: 0.188
|
| 304 |
+
|
| 305 |
+
assert set(test_cc.items()) == set(real_cc.items())
|
| 306 |
+
|
| 307 |
+
prev_cc = test_cc
|
wemm/lib/python3.10/site-packages/networkx/algorithms/centrality/tests/test_laplacian_centrality.py
ADDED
|
@@ -0,0 +1,221 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
np = pytest.importorskip("numpy")
|
| 6 |
+
sp = pytest.importorskip("scipy")
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def test_laplacian_centrality_null_graph():
|
| 10 |
+
G = nx.Graph()
|
| 11 |
+
with pytest.raises(nx.NetworkXPointlessConcept):
|
| 12 |
+
d = nx.laplacian_centrality(G, normalized=False)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def test_laplacian_centrality_single_node():
|
| 16 |
+
"""See gh-6571"""
|
| 17 |
+
G = nx.empty_graph(1)
|
| 18 |
+
assert nx.laplacian_centrality(G, normalized=False) == {0: 0}
|
| 19 |
+
with pytest.raises(ZeroDivisionError):
|
| 20 |
+
nx.laplacian_centrality(G, normalized=True)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def test_laplacian_centrality_unconnected_nodes():
|
| 24 |
+
"""laplacian_centrality on a unconnected node graph should return 0
|
| 25 |
+
|
| 26 |
+
For graphs without edges, the Laplacian energy is 0 and is unchanged with
|
| 27 |
+
node removal, so::
|
| 28 |
+
|
| 29 |
+
LC(v) = LE(G) - LE(G - v) = 0 - 0 = 0
|
| 30 |
+
"""
|
| 31 |
+
G = nx.empty_graph(3)
|
| 32 |
+
assert nx.laplacian_centrality(G, normalized=False) == {0: 0, 1: 0, 2: 0}
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def test_laplacian_centrality_empty_graph():
|
| 36 |
+
G = nx.empty_graph(3)
|
| 37 |
+
with pytest.raises(ZeroDivisionError):
|
| 38 |
+
d = nx.laplacian_centrality(G, normalized=True)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def test_laplacian_centrality_E():
|
| 42 |
+
E = nx.Graph()
|
| 43 |
+
E.add_weighted_edges_from(
|
| 44 |
+
[(0, 1, 4), (4, 5, 1), (0, 2, 2), (2, 1, 1), (1, 3, 2), (1, 4, 2)]
|
| 45 |
+
)
|
| 46 |
+
d = nx.laplacian_centrality(E)
|
| 47 |
+
exact = {
|
| 48 |
+
0: 0.700000,
|
| 49 |
+
1: 0.900000,
|
| 50 |
+
2: 0.280000,
|
| 51 |
+
3: 0.220000,
|
| 52 |
+
4: 0.260000,
|
| 53 |
+
5: 0.040000,
|
| 54 |
+
}
|
| 55 |
+
|
| 56 |
+
for n, dc in d.items():
|
| 57 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
| 58 |
+
|
| 59 |
+
# Check not normalized
|
| 60 |
+
full_energy = 200
|
| 61 |
+
dnn = nx.laplacian_centrality(E, normalized=False)
|
| 62 |
+
for n, dc in dnn.items():
|
| 63 |
+
assert exact[n] * full_energy == pytest.approx(dc, abs=1e-7)
|
| 64 |
+
|
| 65 |
+
# Check unweighted not-normalized version
|
| 66 |
+
duw_nn = nx.laplacian_centrality(E, normalized=False, weight=None)
|
| 67 |
+
print(duw_nn)
|
| 68 |
+
exact_uw_nn = {
|
| 69 |
+
0: 18,
|
| 70 |
+
1: 34,
|
| 71 |
+
2: 18,
|
| 72 |
+
3: 10,
|
| 73 |
+
4: 16,
|
| 74 |
+
5: 6,
|
| 75 |
+
}
|
| 76 |
+
for n, dc in duw_nn.items():
|
| 77 |
+
assert exact_uw_nn[n] == pytest.approx(dc, abs=1e-7)
|
| 78 |
+
|
| 79 |
+
# Check unweighted version
|
| 80 |
+
duw = nx.laplacian_centrality(E, weight=None)
|
| 81 |
+
full_energy = 42
|
| 82 |
+
for n, dc in duw.items():
|
| 83 |
+
assert exact_uw_nn[n] / full_energy == pytest.approx(dc, abs=1e-7)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def test_laplacian_centrality_KC():
|
| 87 |
+
KC = nx.karate_club_graph()
|
| 88 |
+
d = nx.laplacian_centrality(KC)
|
| 89 |
+
exact = {
|
| 90 |
+
0: 0.2543593,
|
| 91 |
+
1: 0.1724524,
|
| 92 |
+
2: 0.2166053,
|
| 93 |
+
3: 0.0964646,
|
| 94 |
+
4: 0.0350344,
|
| 95 |
+
5: 0.0571109,
|
| 96 |
+
6: 0.0540713,
|
| 97 |
+
7: 0.0788674,
|
| 98 |
+
8: 0.1222204,
|
| 99 |
+
9: 0.0217565,
|
| 100 |
+
10: 0.0308751,
|
| 101 |
+
11: 0.0215965,
|
| 102 |
+
12: 0.0174372,
|
| 103 |
+
13: 0.118861,
|
| 104 |
+
14: 0.0366341,
|
| 105 |
+
15: 0.0548712,
|
| 106 |
+
16: 0.0172772,
|
| 107 |
+
17: 0.0191969,
|
| 108 |
+
18: 0.0225564,
|
| 109 |
+
19: 0.0331147,
|
| 110 |
+
20: 0.0279955,
|
| 111 |
+
21: 0.0246361,
|
| 112 |
+
22: 0.0382339,
|
| 113 |
+
23: 0.1294193,
|
| 114 |
+
24: 0.0227164,
|
| 115 |
+
25: 0.0644697,
|
| 116 |
+
26: 0.0281555,
|
| 117 |
+
27: 0.075188,
|
| 118 |
+
28: 0.0364742,
|
| 119 |
+
29: 0.0707087,
|
| 120 |
+
30: 0.0708687,
|
| 121 |
+
31: 0.131019,
|
| 122 |
+
32: 0.2370821,
|
| 123 |
+
33: 0.3066709,
|
| 124 |
+
}
|
| 125 |
+
for n, dc in d.items():
|
| 126 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
| 127 |
+
|
| 128 |
+
# Check not normalized
|
| 129 |
+
full_energy = 12502
|
| 130 |
+
dnn = nx.laplacian_centrality(KC, normalized=False)
|
| 131 |
+
for n, dc in dnn.items():
|
| 132 |
+
assert exact[n] * full_energy == pytest.approx(dc, abs=1e-3)
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def test_laplacian_centrality_K():
|
| 136 |
+
K = nx.krackhardt_kite_graph()
|
| 137 |
+
d = nx.laplacian_centrality(K)
|
| 138 |
+
exact = {
|
| 139 |
+
0: 0.3010753,
|
| 140 |
+
1: 0.3010753,
|
| 141 |
+
2: 0.2258065,
|
| 142 |
+
3: 0.483871,
|
| 143 |
+
4: 0.2258065,
|
| 144 |
+
5: 0.3870968,
|
| 145 |
+
6: 0.3870968,
|
| 146 |
+
7: 0.1935484,
|
| 147 |
+
8: 0.0752688,
|
| 148 |
+
9: 0.0322581,
|
| 149 |
+
}
|
| 150 |
+
for n, dc in d.items():
|
| 151 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
| 152 |
+
|
| 153 |
+
# Check not normalized
|
| 154 |
+
full_energy = 186
|
| 155 |
+
dnn = nx.laplacian_centrality(K, normalized=False)
|
| 156 |
+
for n, dc in dnn.items():
|
| 157 |
+
assert exact[n] * full_energy == pytest.approx(dc, abs=1e-3)
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def test_laplacian_centrality_P3():
|
| 161 |
+
P3 = nx.path_graph(3)
|
| 162 |
+
d = nx.laplacian_centrality(P3)
|
| 163 |
+
exact = {0: 0.6, 1: 1.0, 2: 0.6}
|
| 164 |
+
for n, dc in d.items():
|
| 165 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
def test_laplacian_centrality_K5():
|
| 169 |
+
K5 = nx.complete_graph(5)
|
| 170 |
+
d = nx.laplacian_centrality(K5)
|
| 171 |
+
exact = {0: 0.52, 1: 0.52, 2: 0.52, 3: 0.52, 4: 0.52}
|
| 172 |
+
for n, dc in d.items():
|
| 173 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
def test_laplacian_centrality_FF():
|
| 177 |
+
FF = nx.florentine_families_graph()
|
| 178 |
+
d = nx.laplacian_centrality(FF)
|
| 179 |
+
exact = {
|
| 180 |
+
"Acciaiuoli": 0.0804598,
|
| 181 |
+
"Medici": 0.4022989,
|
| 182 |
+
"Castellani": 0.1724138,
|
| 183 |
+
"Peruzzi": 0.183908,
|
| 184 |
+
"Strozzi": 0.2528736,
|
| 185 |
+
"Barbadori": 0.137931,
|
| 186 |
+
"Ridolfi": 0.2183908,
|
| 187 |
+
"Tornabuoni": 0.2183908,
|
| 188 |
+
"Albizzi": 0.1954023,
|
| 189 |
+
"Salviati": 0.1149425,
|
| 190 |
+
"Pazzi": 0.0344828,
|
| 191 |
+
"Bischeri": 0.1954023,
|
| 192 |
+
"Guadagni": 0.2298851,
|
| 193 |
+
"Ginori": 0.045977,
|
| 194 |
+
"Lamberteschi": 0.0574713,
|
| 195 |
+
}
|
| 196 |
+
for n, dc in d.items():
|
| 197 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def test_laplacian_centrality_DG():
|
| 201 |
+
DG = nx.DiGraph([(0, 5), (1, 5), (2, 5), (3, 5), (4, 5), (5, 6), (5, 7), (5, 8)])
|
| 202 |
+
d = nx.laplacian_centrality(DG)
|
| 203 |
+
exact = {
|
| 204 |
+
0: 0.2123352,
|
| 205 |
+
5: 0.515391,
|
| 206 |
+
1: 0.2123352,
|
| 207 |
+
2: 0.2123352,
|
| 208 |
+
3: 0.2123352,
|
| 209 |
+
4: 0.2123352,
|
| 210 |
+
6: 0.2952031,
|
| 211 |
+
7: 0.2952031,
|
| 212 |
+
8: 0.2952031,
|
| 213 |
+
}
|
| 214 |
+
for n, dc in d.items():
|
| 215 |
+
assert exact[n] == pytest.approx(dc, abs=1e-7)
|
| 216 |
+
|
| 217 |
+
# Check not normalized
|
| 218 |
+
full_energy = 9.50704
|
| 219 |
+
dnn = nx.laplacian_centrality(DG, normalized=False)
|
| 220 |
+
for n, dc in dnn.items():
|
| 221 |
+
assert exact[n] * full_energy == pytest.approx(dc, abs=1e-4)
|
wemm/lib/python3.10/site-packages/networkx/algorithms/clique.py
ADDED
|
@@ -0,0 +1,755 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for finding and manipulating cliques.
|
| 2 |
+
|
| 3 |
+
Finding the largest clique in a graph is NP-complete problem, so most of
|
| 4 |
+
these algorithms have an exponential running time; for more information,
|
| 5 |
+
see the Wikipedia article on the clique problem [1]_.
|
| 6 |
+
|
| 7 |
+
.. [1] clique problem:: https://en.wikipedia.org/wiki/Clique_problem
|
| 8 |
+
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from collections import defaultdict, deque
|
| 12 |
+
from itertools import chain, combinations, islice
|
| 13 |
+
|
| 14 |
+
import networkx as nx
|
| 15 |
+
from networkx.utils import not_implemented_for
|
| 16 |
+
|
| 17 |
+
__all__ = [
|
| 18 |
+
"find_cliques",
|
| 19 |
+
"find_cliques_recursive",
|
| 20 |
+
"make_max_clique_graph",
|
| 21 |
+
"make_clique_bipartite",
|
| 22 |
+
"node_clique_number",
|
| 23 |
+
"number_of_cliques",
|
| 24 |
+
"enumerate_all_cliques",
|
| 25 |
+
"max_weight_clique",
|
| 26 |
+
]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
@not_implemented_for("directed")
|
| 30 |
+
@nx._dispatchable
|
| 31 |
+
def enumerate_all_cliques(G):
|
| 32 |
+
"""Returns all cliques in an undirected graph.
|
| 33 |
+
|
| 34 |
+
This function returns an iterator over cliques, each of which is a
|
| 35 |
+
list of nodes. The iteration is ordered by cardinality of the
|
| 36 |
+
cliques: first all cliques of size one, then all cliques of size
|
| 37 |
+
two, etc.
|
| 38 |
+
|
| 39 |
+
Parameters
|
| 40 |
+
----------
|
| 41 |
+
G : NetworkX graph
|
| 42 |
+
An undirected graph.
|
| 43 |
+
|
| 44 |
+
Returns
|
| 45 |
+
-------
|
| 46 |
+
iterator
|
| 47 |
+
An iterator over cliques, each of which is a list of nodes in
|
| 48 |
+
`G`. The cliques are ordered according to size.
|
| 49 |
+
|
| 50 |
+
Notes
|
| 51 |
+
-----
|
| 52 |
+
To obtain a list of all cliques, use
|
| 53 |
+
`list(enumerate_all_cliques(G))`. However, be aware that in the
|
| 54 |
+
worst-case, the length of this list can be exponential in the number
|
| 55 |
+
of nodes in the graph (for example, when the graph is the complete
|
| 56 |
+
graph). This function avoids storing all cliques in memory by only
|
| 57 |
+
keeping current candidate node lists in memory during its search.
|
| 58 |
+
|
| 59 |
+
The implementation is adapted from the algorithm by Zhang, et
|
| 60 |
+
al. (2005) [1]_ to output all cliques discovered.
|
| 61 |
+
|
| 62 |
+
This algorithm ignores self-loops and parallel edges, since cliques
|
| 63 |
+
are not conventionally defined with such edges.
|
| 64 |
+
|
| 65 |
+
References
|
| 66 |
+
----------
|
| 67 |
+
.. [1] Yun Zhang, Abu-Khzam, F.N., Baldwin, N.E., Chesler, E.J.,
|
| 68 |
+
Langston, M.A., Samatova, N.F.,
|
| 69 |
+
"Genome-Scale Computational Approaches to Memory-Intensive
|
| 70 |
+
Applications in Systems Biology".
|
| 71 |
+
*Supercomputing*, 2005. Proceedings of the ACM/IEEE SC 2005
|
| 72 |
+
Conference, pp. 12, 12--18 Nov. 2005.
|
| 73 |
+
<https://doi.org/10.1109/SC.2005.29>.
|
| 74 |
+
|
| 75 |
+
"""
|
| 76 |
+
index = {}
|
| 77 |
+
nbrs = {}
|
| 78 |
+
for u in G:
|
| 79 |
+
index[u] = len(index)
|
| 80 |
+
# Neighbors of u that appear after u in the iteration order of G.
|
| 81 |
+
nbrs[u] = {v for v in G[u] if v not in index}
|
| 82 |
+
|
| 83 |
+
queue = deque(([u], sorted(nbrs[u], key=index.__getitem__)) for u in G)
|
| 84 |
+
# Loop invariants:
|
| 85 |
+
# 1. len(base) is nondecreasing.
|
| 86 |
+
# 2. (base + cnbrs) is sorted with respect to the iteration order of G.
|
| 87 |
+
# 3. cnbrs is a set of common neighbors of nodes in base.
|
| 88 |
+
while queue:
|
| 89 |
+
base, cnbrs = map(list, queue.popleft())
|
| 90 |
+
yield base
|
| 91 |
+
for i, u in enumerate(cnbrs):
|
| 92 |
+
# Use generators to reduce memory consumption.
|
| 93 |
+
queue.append(
|
| 94 |
+
(
|
| 95 |
+
chain(base, [u]),
|
| 96 |
+
filter(nbrs[u].__contains__, islice(cnbrs, i + 1, None)),
|
| 97 |
+
)
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
@not_implemented_for("directed")
|
| 102 |
+
@nx._dispatchable
|
| 103 |
+
def find_cliques(G, nodes=None):
|
| 104 |
+
"""Returns all maximal cliques in an undirected graph.
|
| 105 |
+
|
| 106 |
+
For each node *n*, a *maximal clique for n* is a largest complete
|
| 107 |
+
subgraph containing *n*. The largest maximal clique is sometimes
|
| 108 |
+
called the *maximum clique*.
|
| 109 |
+
|
| 110 |
+
This function returns an iterator over cliques, each of which is a
|
| 111 |
+
list of nodes. It is an iterative implementation, so should not
|
| 112 |
+
suffer from recursion depth issues.
|
| 113 |
+
|
| 114 |
+
This function accepts a list of `nodes` and only the maximal cliques
|
| 115 |
+
containing all of these `nodes` are returned. It can considerably speed up
|
| 116 |
+
the running time if some specific cliques are desired.
|
| 117 |
+
|
| 118 |
+
Parameters
|
| 119 |
+
----------
|
| 120 |
+
G : NetworkX graph
|
| 121 |
+
An undirected graph.
|
| 122 |
+
|
| 123 |
+
nodes : list, optional (default=None)
|
| 124 |
+
If provided, only yield *maximal cliques* containing all nodes in `nodes`.
|
| 125 |
+
If `nodes` isn't a clique itself, a ValueError is raised.
|
| 126 |
+
|
| 127 |
+
Returns
|
| 128 |
+
-------
|
| 129 |
+
iterator
|
| 130 |
+
An iterator over maximal cliques, each of which is a list of
|
| 131 |
+
nodes in `G`. If `nodes` is provided, only the maximal cliques
|
| 132 |
+
containing all the nodes in `nodes` are returned. The order of
|
| 133 |
+
cliques is arbitrary.
|
| 134 |
+
|
| 135 |
+
Raises
|
| 136 |
+
------
|
| 137 |
+
ValueError
|
| 138 |
+
If `nodes` is not a clique.
|
| 139 |
+
|
| 140 |
+
Examples
|
| 141 |
+
--------
|
| 142 |
+
>>> from pprint import pprint # For nice dict formatting
|
| 143 |
+
>>> G = nx.karate_club_graph()
|
| 144 |
+
>>> sum(1 for c in nx.find_cliques(G)) # The number of maximal cliques in G
|
| 145 |
+
36
|
| 146 |
+
>>> max(nx.find_cliques(G), key=len) # The largest maximal clique in G
|
| 147 |
+
[0, 1, 2, 3, 13]
|
| 148 |
+
|
| 149 |
+
The size of the largest maximal clique is known as the *clique number* of
|
| 150 |
+
the graph, which can be found directly with:
|
| 151 |
+
|
| 152 |
+
>>> max(len(c) for c in nx.find_cliques(G))
|
| 153 |
+
5
|
| 154 |
+
|
| 155 |
+
One can also compute the number of maximal cliques in `G` that contain a given
|
| 156 |
+
node. The following produces a dictionary keyed by node whose
|
| 157 |
+
values are the number of maximal cliques in `G` that contain the node:
|
| 158 |
+
|
| 159 |
+
>>> pprint({n: sum(1 for c in nx.find_cliques(G) if n in c) for n in G})
|
| 160 |
+
{0: 13,
|
| 161 |
+
1: 6,
|
| 162 |
+
2: 7,
|
| 163 |
+
3: 3,
|
| 164 |
+
4: 2,
|
| 165 |
+
5: 3,
|
| 166 |
+
6: 3,
|
| 167 |
+
7: 1,
|
| 168 |
+
8: 3,
|
| 169 |
+
9: 2,
|
| 170 |
+
10: 2,
|
| 171 |
+
11: 1,
|
| 172 |
+
12: 1,
|
| 173 |
+
13: 2,
|
| 174 |
+
14: 1,
|
| 175 |
+
15: 1,
|
| 176 |
+
16: 1,
|
| 177 |
+
17: 1,
|
| 178 |
+
18: 1,
|
| 179 |
+
19: 2,
|
| 180 |
+
20: 1,
|
| 181 |
+
21: 1,
|
| 182 |
+
22: 1,
|
| 183 |
+
23: 3,
|
| 184 |
+
24: 2,
|
| 185 |
+
25: 2,
|
| 186 |
+
26: 1,
|
| 187 |
+
27: 3,
|
| 188 |
+
28: 2,
|
| 189 |
+
29: 2,
|
| 190 |
+
30: 2,
|
| 191 |
+
31: 4,
|
| 192 |
+
32: 9,
|
| 193 |
+
33: 14}
|
| 194 |
+
|
| 195 |
+
Or, similarly, the maximal cliques in `G` that contain a given node.
|
| 196 |
+
For example, the 4 maximal cliques that contain node 31:
|
| 197 |
+
|
| 198 |
+
>>> [c for c in nx.find_cliques(G) if 31 in c]
|
| 199 |
+
[[0, 31], [33, 32, 31], [33, 28, 31], [24, 25, 31]]
|
| 200 |
+
|
| 201 |
+
See Also
|
| 202 |
+
--------
|
| 203 |
+
find_cliques_recursive
|
| 204 |
+
A recursive version of the same algorithm.
|
| 205 |
+
|
| 206 |
+
Notes
|
| 207 |
+
-----
|
| 208 |
+
To obtain a list of all maximal cliques, use
|
| 209 |
+
`list(find_cliques(G))`. However, be aware that in the worst-case,
|
| 210 |
+
the length of this list can be exponential in the number of nodes in
|
| 211 |
+
the graph. This function avoids storing all cliques in memory by
|
| 212 |
+
only keeping current candidate node lists in memory during its search.
|
| 213 |
+
|
| 214 |
+
This implementation is based on the algorithm published by Bron and
|
| 215 |
+
Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi
|
| 216 |
+
(2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. It
|
| 217 |
+
essentially unrolls the recursion used in the references to avoid
|
| 218 |
+
issues of recursion stack depth (for a recursive implementation, see
|
| 219 |
+
:func:`find_cliques_recursive`).
|
| 220 |
+
|
| 221 |
+
This algorithm ignores self-loops and parallel edges, since cliques
|
| 222 |
+
are not conventionally defined with such edges.
|
| 223 |
+
|
| 224 |
+
References
|
| 225 |
+
----------
|
| 226 |
+
.. [1] Bron, C. and Kerbosch, J.
|
| 227 |
+
"Algorithm 457: finding all cliques of an undirected graph".
|
| 228 |
+
*Communications of the ACM* 16, 9 (Sep. 1973), 575--577.
|
| 229 |
+
<http://portal.acm.org/citation.cfm?doid=362342.362367>
|
| 230 |
+
|
| 231 |
+
.. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi,
|
| 232 |
+
"The worst-case time complexity for generating all maximal
|
| 233 |
+
cliques and computational experiments",
|
| 234 |
+
*Theoretical Computer Science*, Volume 363, Issue 1,
|
| 235 |
+
Computing and Combinatorics,
|
| 236 |
+
10th Annual International Conference on
|
| 237 |
+
Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42
|
| 238 |
+
<https://doi.org/10.1016/j.tcs.2006.06.015>
|
| 239 |
+
|
| 240 |
+
.. [3] F. Cazals, C. Karande,
|
| 241 |
+
"A note on the problem of reporting maximal cliques",
|
| 242 |
+
*Theoretical Computer Science*,
|
| 243 |
+
Volume 407, Issues 1--3, 6 November 2008, Pages 564--568,
|
| 244 |
+
<https://doi.org/10.1016/j.tcs.2008.05.010>
|
| 245 |
+
|
| 246 |
+
"""
|
| 247 |
+
if len(G) == 0:
|
| 248 |
+
return
|
| 249 |
+
|
| 250 |
+
adj = {u: {v for v in G[u] if v != u} for u in G}
|
| 251 |
+
|
| 252 |
+
# Initialize Q with the given nodes and subg, cand with their nbrs
|
| 253 |
+
Q = nodes[:] if nodes is not None else []
|
| 254 |
+
cand = set(G)
|
| 255 |
+
for node in Q:
|
| 256 |
+
if node not in cand:
|
| 257 |
+
raise ValueError(f"The given `nodes` {nodes} do not form a clique")
|
| 258 |
+
cand &= adj[node]
|
| 259 |
+
|
| 260 |
+
if not cand:
|
| 261 |
+
yield Q[:]
|
| 262 |
+
return
|
| 263 |
+
|
| 264 |
+
subg = cand.copy()
|
| 265 |
+
stack = []
|
| 266 |
+
Q.append(None)
|
| 267 |
+
|
| 268 |
+
u = max(subg, key=lambda u: len(cand & adj[u]))
|
| 269 |
+
ext_u = cand - adj[u]
|
| 270 |
+
|
| 271 |
+
try:
|
| 272 |
+
while True:
|
| 273 |
+
if ext_u:
|
| 274 |
+
q = ext_u.pop()
|
| 275 |
+
cand.remove(q)
|
| 276 |
+
Q[-1] = q
|
| 277 |
+
adj_q = adj[q]
|
| 278 |
+
subg_q = subg & adj_q
|
| 279 |
+
if not subg_q:
|
| 280 |
+
yield Q[:]
|
| 281 |
+
else:
|
| 282 |
+
cand_q = cand & adj_q
|
| 283 |
+
if cand_q:
|
| 284 |
+
stack.append((subg, cand, ext_u))
|
| 285 |
+
Q.append(None)
|
| 286 |
+
subg = subg_q
|
| 287 |
+
cand = cand_q
|
| 288 |
+
u = max(subg, key=lambda u: len(cand & adj[u]))
|
| 289 |
+
ext_u = cand - adj[u]
|
| 290 |
+
else:
|
| 291 |
+
Q.pop()
|
| 292 |
+
subg, cand, ext_u = stack.pop()
|
| 293 |
+
except IndexError:
|
| 294 |
+
pass
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
# TODO Should this also be not implemented for directed graphs?
|
| 298 |
+
@nx._dispatchable
|
| 299 |
+
def find_cliques_recursive(G, nodes=None):
|
| 300 |
+
"""Returns all maximal cliques in a graph.
|
| 301 |
+
|
| 302 |
+
For each node *v*, a *maximal clique for v* is a largest complete
|
| 303 |
+
subgraph containing *v*. The largest maximal clique is sometimes
|
| 304 |
+
called the *maximum clique*.
|
| 305 |
+
|
| 306 |
+
This function returns an iterator over cliques, each of which is a
|
| 307 |
+
list of nodes. It is a recursive implementation, so may suffer from
|
| 308 |
+
recursion depth issues, but is included for pedagogical reasons.
|
| 309 |
+
For a non-recursive implementation, see :func:`find_cliques`.
|
| 310 |
+
|
| 311 |
+
This function accepts a list of `nodes` and only the maximal cliques
|
| 312 |
+
containing all of these `nodes` are returned. It can considerably speed up
|
| 313 |
+
the running time if some specific cliques are desired.
|
| 314 |
+
|
| 315 |
+
Parameters
|
| 316 |
+
----------
|
| 317 |
+
G : NetworkX graph
|
| 318 |
+
|
| 319 |
+
nodes : list, optional (default=None)
|
| 320 |
+
If provided, only yield *maximal cliques* containing all nodes in `nodes`.
|
| 321 |
+
If `nodes` isn't a clique itself, a ValueError is raised.
|
| 322 |
+
|
| 323 |
+
Returns
|
| 324 |
+
-------
|
| 325 |
+
iterator
|
| 326 |
+
An iterator over maximal cliques, each of which is a list of
|
| 327 |
+
nodes in `G`. If `nodes` is provided, only the maximal cliques
|
| 328 |
+
containing all the nodes in `nodes` are yielded. The order of
|
| 329 |
+
cliques is arbitrary.
|
| 330 |
+
|
| 331 |
+
Raises
|
| 332 |
+
------
|
| 333 |
+
ValueError
|
| 334 |
+
If `nodes` is not a clique.
|
| 335 |
+
|
| 336 |
+
See Also
|
| 337 |
+
--------
|
| 338 |
+
find_cliques
|
| 339 |
+
An iterative version of the same algorithm. See docstring for examples.
|
| 340 |
+
|
| 341 |
+
Notes
|
| 342 |
+
-----
|
| 343 |
+
To obtain a list of all maximal cliques, use
|
| 344 |
+
`list(find_cliques_recursive(G))`. However, be aware that in the
|
| 345 |
+
worst-case, the length of this list can be exponential in the number
|
| 346 |
+
of nodes in the graph. This function avoids storing all cliques in memory
|
| 347 |
+
by only keeping current candidate node lists in memory during its search.
|
| 348 |
+
|
| 349 |
+
This implementation is based on the algorithm published by Bron and
|
| 350 |
+
Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi
|
| 351 |
+
(2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. For a
|
| 352 |
+
non-recursive implementation, see :func:`find_cliques`.
|
| 353 |
+
|
| 354 |
+
This algorithm ignores self-loops and parallel edges, since cliques
|
| 355 |
+
are not conventionally defined with such edges.
|
| 356 |
+
|
| 357 |
+
References
|
| 358 |
+
----------
|
| 359 |
+
.. [1] Bron, C. and Kerbosch, J.
|
| 360 |
+
"Algorithm 457: finding all cliques of an undirected graph".
|
| 361 |
+
*Communications of the ACM* 16, 9 (Sep. 1973), 575--577.
|
| 362 |
+
<http://portal.acm.org/citation.cfm?doid=362342.362367>
|
| 363 |
+
|
| 364 |
+
.. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi,
|
| 365 |
+
"The worst-case time complexity for generating all maximal
|
| 366 |
+
cliques and computational experiments",
|
| 367 |
+
*Theoretical Computer Science*, Volume 363, Issue 1,
|
| 368 |
+
Computing and Combinatorics,
|
| 369 |
+
10th Annual International Conference on
|
| 370 |
+
Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42
|
| 371 |
+
<https://doi.org/10.1016/j.tcs.2006.06.015>
|
| 372 |
+
|
| 373 |
+
.. [3] F. Cazals, C. Karande,
|
| 374 |
+
"A note on the problem of reporting maximal cliques",
|
| 375 |
+
*Theoretical Computer Science*,
|
| 376 |
+
Volume 407, Issues 1--3, 6 November 2008, Pages 564--568,
|
| 377 |
+
<https://doi.org/10.1016/j.tcs.2008.05.010>
|
| 378 |
+
|
| 379 |
+
"""
|
| 380 |
+
if len(G) == 0:
|
| 381 |
+
return iter([])
|
| 382 |
+
|
| 383 |
+
adj = {u: {v for v in G[u] if v != u} for u in G}
|
| 384 |
+
|
| 385 |
+
# Initialize Q with the given nodes and subg, cand with their nbrs
|
| 386 |
+
Q = nodes[:] if nodes is not None else []
|
| 387 |
+
cand_init = set(G)
|
| 388 |
+
for node in Q:
|
| 389 |
+
if node not in cand_init:
|
| 390 |
+
raise ValueError(f"The given `nodes` {nodes} do not form a clique")
|
| 391 |
+
cand_init &= adj[node]
|
| 392 |
+
|
| 393 |
+
if not cand_init:
|
| 394 |
+
return iter([Q])
|
| 395 |
+
|
| 396 |
+
subg_init = cand_init.copy()
|
| 397 |
+
|
| 398 |
+
def expand(subg, cand):
|
| 399 |
+
u = max(subg, key=lambda u: len(cand & adj[u]))
|
| 400 |
+
for q in cand - adj[u]:
|
| 401 |
+
cand.remove(q)
|
| 402 |
+
Q.append(q)
|
| 403 |
+
adj_q = adj[q]
|
| 404 |
+
subg_q = subg & adj_q
|
| 405 |
+
if not subg_q:
|
| 406 |
+
yield Q[:]
|
| 407 |
+
else:
|
| 408 |
+
cand_q = cand & adj_q
|
| 409 |
+
if cand_q:
|
| 410 |
+
yield from expand(subg_q, cand_q)
|
| 411 |
+
Q.pop()
|
| 412 |
+
|
| 413 |
+
return expand(subg_init, cand_init)
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
@nx._dispatchable(returns_graph=True)
|
| 417 |
+
def make_max_clique_graph(G, create_using=None):
|
| 418 |
+
"""Returns the maximal clique graph of the given graph.
|
| 419 |
+
|
| 420 |
+
The nodes of the maximal clique graph of `G` are the cliques of
|
| 421 |
+
`G` and an edge joins two cliques if the cliques are not disjoint.
|
| 422 |
+
|
| 423 |
+
Parameters
|
| 424 |
+
----------
|
| 425 |
+
G : NetworkX graph
|
| 426 |
+
|
| 427 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 428 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 429 |
+
|
| 430 |
+
Returns
|
| 431 |
+
-------
|
| 432 |
+
NetworkX graph
|
| 433 |
+
A graph whose nodes are the cliques of `G` and whose edges
|
| 434 |
+
join two cliques if they are not disjoint.
|
| 435 |
+
|
| 436 |
+
Notes
|
| 437 |
+
-----
|
| 438 |
+
This function behaves like the following code::
|
| 439 |
+
|
| 440 |
+
import networkx as nx
|
| 441 |
+
|
| 442 |
+
G = nx.make_clique_bipartite(G)
|
| 443 |
+
cliques = [v for v in G.nodes() if G.nodes[v]["bipartite"] == 0]
|
| 444 |
+
G = nx.bipartite.projected_graph(G, cliques)
|
| 445 |
+
G = nx.relabel_nodes(G, {-v: v - 1 for v in G})
|
| 446 |
+
|
| 447 |
+
It should be faster, though, since it skips all the intermediate
|
| 448 |
+
steps.
|
| 449 |
+
|
| 450 |
+
"""
|
| 451 |
+
if create_using is None:
|
| 452 |
+
B = G.__class__()
|
| 453 |
+
else:
|
| 454 |
+
B = nx.empty_graph(0, create_using)
|
| 455 |
+
cliques = list(enumerate(set(c) for c in find_cliques(G)))
|
| 456 |
+
# Add a numbered node for each clique.
|
| 457 |
+
B.add_nodes_from(i for i, c in cliques)
|
| 458 |
+
# Join cliques by an edge if they share a node.
|
| 459 |
+
clique_pairs = combinations(cliques, 2)
|
| 460 |
+
B.add_edges_from((i, j) for (i, c1), (j, c2) in clique_pairs if c1 & c2)
|
| 461 |
+
return B
|
| 462 |
+
|
| 463 |
+
|
| 464 |
+
@nx._dispatchable(returns_graph=True)
|
| 465 |
+
def make_clique_bipartite(G, fpos=None, create_using=None, name=None):
|
| 466 |
+
"""Returns the bipartite clique graph corresponding to `G`.
|
| 467 |
+
|
| 468 |
+
In the returned bipartite graph, the "bottom" nodes are the nodes of
|
| 469 |
+
`G` and the "top" nodes represent the maximal cliques of `G`.
|
| 470 |
+
There is an edge from node *v* to clique *C* in the returned graph
|
| 471 |
+
if and only if *v* is an element of *C*.
|
| 472 |
+
|
| 473 |
+
Parameters
|
| 474 |
+
----------
|
| 475 |
+
G : NetworkX graph
|
| 476 |
+
An undirected graph.
|
| 477 |
+
|
| 478 |
+
fpos : bool
|
| 479 |
+
If True or not None, the returned graph will have an
|
| 480 |
+
additional attribute, `pos`, a dictionary mapping node to
|
| 481 |
+
position in the Euclidean plane.
|
| 482 |
+
|
| 483 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 484 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 485 |
+
|
| 486 |
+
Returns
|
| 487 |
+
-------
|
| 488 |
+
NetworkX graph
|
| 489 |
+
A bipartite graph whose "bottom" set is the nodes of the graph
|
| 490 |
+
`G`, whose "top" set is the cliques of `G`, and whose edges
|
| 491 |
+
join nodes of `G` to the cliques that contain them.
|
| 492 |
+
|
| 493 |
+
The nodes of the graph `G` have the node attribute
|
| 494 |
+
'bipartite' set to 1 and the nodes representing cliques
|
| 495 |
+
have the node attribute 'bipartite' set to 0, as is the
|
| 496 |
+
convention for bipartite graphs in NetworkX.
|
| 497 |
+
|
| 498 |
+
"""
|
| 499 |
+
B = nx.empty_graph(0, create_using)
|
| 500 |
+
B.clear()
|
| 501 |
+
# The "bottom" nodes in the bipartite graph are the nodes of the
|
| 502 |
+
# original graph, G.
|
| 503 |
+
B.add_nodes_from(G, bipartite=1)
|
| 504 |
+
for i, cl in enumerate(find_cliques(G)):
|
| 505 |
+
# The "top" nodes in the bipartite graph are the cliques. These
|
| 506 |
+
# nodes get negative numbers as labels.
|
| 507 |
+
name = -i - 1
|
| 508 |
+
B.add_node(name, bipartite=0)
|
| 509 |
+
B.add_edges_from((v, name) for v in cl)
|
| 510 |
+
return B
|
| 511 |
+
|
| 512 |
+
|
| 513 |
+
@nx._dispatchable
|
| 514 |
+
def node_clique_number(G, nodes=None, cliques=None, separate_nodes=False):
|
| 515 |
+
"""Returns the size of the largest maximal clique containing each given node.
|
| 516 |
+
|
| 517 |
+
Returns a single or list depending on input nodes.
|
| 518 |
+
An optional list of cliques can be input if already computed.
|
| 519 |
+
|
| 520 |
+
Parameters
|
| 521 |
+
----------
|
| 522 |
+
G : NetworkX graph
|
| 523 |
+
An undirected graph.
|
| 524 |
+
|
| 525 |
+
cliques : list, optional (default=None)
|
| 526 |
+
A list of cliques, each of which is itself a list of nodes.
|
| 527 |
+
If not specified, the list of all cliques will be computed
|
| 528 |
+
using :func:`find_cliques`.
|
| 529 |
+
|
| 530 |
+
Returns
|
| 531 |
+
-------
|
| 532 |
+
int or dict
|
| 533 |
+
If `nodes` is a single node, returns the size of the
|
| 534 |
+
largest maximal clique in `G` containing that node.
|
| 535 |
+
Otherwise return a dict keyed by node to the size
|
| 536 |
+
of the largest maximal clique containing that node.
|
| 537 |
+
|
| 538 |
+
See Also
|
| 539 |
+
--------
|
| 540 |
+
find_cliques
|
| 541 |
+
find_cliques yields the maximal cliques of G.
|
| 542 |
+
It accepts a `nodes` argument which restricts consideration to
|
| 543 |
+
maximal cliques containing all the given `nodes`.
|
| 544 |
+
The search for the cliques is optimized for `nodes`.
|
| 545 |
+
"""
|
| 546 |
+
if cliques is None:
|
| 547 |
+
if nodes is not None:
|
| 548 |
+
# Use ego_graph to decrease size of graph
|
| 549 |
+
# check for single node
|
| 550 |
+
if nodes in G:
|
| 551 |
+
return max(len(c) for c in find_cliques(nx.ego_graph(G, nodes)))
|
| 552 |
+
# handle multiple nodes
|
| 553 |
+
return {
|
| 554 |
+
n: max(len(c) for c in find_cliques(nx.ego_graph(G, n))) for n in nodes
|
| 555 |
+
}
|
| 556 |
+
|
| 557 |
+
# nodes is None--find all cliques
|
| 558 |
+
cliques = list(find_cliques(G))
|
| 559 |
+
|
| 560 |
+
# single node requested
|
| 561 |
+
if nodes in G:
|
| 562 |
+
return max(len(c) for c in cliques if nodes in c)
|
| 563 |
+
|
| 564 |
+
# multiple nodes requested
|
| 565 |
+
# preprocess all nodes (faster than one at a time for even 2 nodes)
|
| 566 |
+
size_for_n = defaultdict(int)
|
| 567 |
+
for c in cliques:
|
| 568 |
+
size_of_c = len(c)
|
| 569 |
+
for n in c:
|
| 570 |
+
if size_for_n[n] < size_of_c:
|
| 571 |
+
size_for_n[n] = size_of_c
|
| 572 |
+
if nodes is None:
|
| 573 |
+
return size_for_n
|
| 574 |
+
return {n: size_for_n[n] for n in nodes}
|
| 575 |
+
|
| 576 |
+
|
| 577 |
+
def number_of_cliques(G, nodes=None, cliques=None):
|
| 578 |
+
"""Returns the number of maximal cliques for each node.
|
| 579 |
+
|
| 580 |
+
Returns a single or list depending on input nodes.
|
| 581 |
+
Optional list of cliques can be input if already computed.
|
| 582 |
+
"""
|
| 583 |
+
if cliques is None:
|
| 584 |
+
cliques = list(find_cliques(G))
|
| 585 |
+
|
| 586 |
+
if nodes is None:
|
| 587 |
+
nodes = list(G.nodes()) # none, get entire graph
|
| 588 |
+
|
| 589 |
+
if not isinstance(nodes, list): # check for a list
|
| 590 |
+
v = nodes
|
| 591 |
+
# assume it is a single value
|
| 592 |
+
numcliq = len([1 for c in cliques if v in c])
|
| 593 |
+
else:
|
| 594 |
+
numcliq = {}
|
| 595 |
+
for v in nodes:
|
| 596 |
+
numcliq[v] = len([1 for c in cliques if v in c])
|
| 597 |
+
return numcliq
|
| 598 |
+
|
| 599 |
+
|
| 600 |
+
class MaxWeightClique:
|
| 601 |
+
"""A class for the maximum weight clique algorithm.
|
| 602 |
+
|
| 603 |
+
This class is a helper for the `max_weight_clique` function. The class
|
| 604 |
+
should not normally be used directly.
|
| 605 |
+
|
| 606 |
+
Parameters
|
| 607 |
+
----------
|
| 608 |
+
G : NetworkX graph
|
| 609 |
+
The undirected graph for which a maximum weight clique is sought
|
| 610 |
+
weight : string or None, optional (default='weight')
|
| 611 |
+
The node attribute that holds the integer value used as a weight.
|
| 612 |
+
If None, then each node has weight 1.
|
| 613 |
+
|
| 614 |
+
Attributes
|
| 615 |
+
----------
|
| 616 |
+
G : NetworkX graph
|
| 617 |
+
The undirected graph for which a maximum weight clique is sought
|
| 618 |
+
node_weights: dict
|
| 619 |
+
The weight of each node
|
| 620 |
+
incumbent_nodes : list
|
| 621 |
+
The nodes of the incumbent clique (the best clique found so far)
|
| 622 |
+
incumbent_weight: int
|
| 623 |
+
The weight of the incumbent clique
|
| 624 |
+
"""
|
| 625 |
+
|
| 626 |
+
def __init__(self, G, weight):
|
| 627 |
+
self.G = G
|
| 628 |
+
self.incumbent_nodes = []
|
| 629 |
+
self.incumbent_weight = 0
|
| 630 |
+
|
| 631 |
+
if weight is None:
|
| 632 |
+
self.node_weights = {v: 1 for v in G.nodes()}
|
| 633 |
+
else:
|
| 634 |
+
for v in G.nodes():
|
| 635 |
+
if weight not in G.nodes[v]:
|
| 636 |
+
errmsg = f"Node {v!r} does not have the requested weight field."
|
| 637 |
+
raise KeyError(errmsg)
|
| 638 |
+
if not isinstance(G.nodes[v][weight], int):
|
| 639 |
+
errmsg = f"The {weight!r} field of node {v!r} is not an integer."
|
| 640 |
+
raise ValueError(errmsg)
|
| 641 |
+
self.node_weights = {v: G.nodes[v][weight] for v in G.nodes()}
|
| 642 |
+
|
| 643 |
+
def update_incumbent_if_improved(self, C, C_weight):
|
| 644 |
+
"""Update the incumbent if the node set C has greater weight.
|
| 645 |
+
|
| 646 |
+
C is assumed to be a clique.
|
| 647 |
+
"""
|
| 648 |
+
if C_weight > self.incumbent_weight:
|
| 649 |
+
self.incumbent_nodes = C[:]
|
| 650 |
+
self.incumbent_weight = C_weight
|
| 651 |
+
|
| 652 |
+
def greedily_find_independent_set(self, P):
|
| 653 |
+
"""Greedily find an independent set of nodes from a set of
|
| 654 |
+
nodes P."""
|
| 655 |
+
independent_set = []
|
| 656 |
+
P = P[:]
|
| 657 |
+
while P:
|
| 658 |
+
v = P[0]
|
| 659 |
+
independent_set.append(v)
|
| 660 |
+
P = [w for w in P if v != w and not self.G.has_edge(v, w)]
|
| 661 |
+
return independent_set
|
| 662 |
+
|
| 663 |
+
def find_branching_nodes(self, P, target):
|
| 664 |
+
"""Find a set of nodes to branch on."""
|
| 665 |
+
residual_wt = {v: self.node_weights[v] for v in P}
|
| 666 |
+
total_wt = 0
|
| 667 |
+
P = P[:]
|
| 668 |
+
while P:
|
| 669 |
+
independent_set = self.greedily_find_independent_set(P)
|
| 670 |
+
min_wt_in_class = min(residual_wt[v] for v in independent_set)
|
| 671 |
+
total_wt += min_wt_in_class
|
| 672 |
+
if total_wt > target:
|
| 673 |
+
break
|
| 674 |
+
for v in independent_set:
|
| 675 |
+
residual_wt[v] -= min_wt_in_class
|
| 676 |
+
P = [v for v in P if residual_wt[v] != 0]
|
| 677 |
+
return P
|
| 678 |
+
|
| 679 |
+
def expand(self, C, C_weight, P):
|
| 680 |
+
"""Look for the best clique that contains all the nodes in C and zero or
|
| 681 |
+
more of the nodes in P, backtracking if it can be shown that no such
|
| 682 |
+
clique has greater weight than the incumbent.
|
| 683 |
+
"""
|
| 684 |
+
self.update_incumbent_if_improved(C, C_weight)
|
| 685 |
+
branching_nodes = self.find_branching_nodes(P, self.incumbent_weight - C_weight)
|
| 686 |
+
while branching_nodes:
|
| 687 |
+
v = branching_nodes.pop()
|
| 688 |
+
P.remove(v)
|
| 689 |
+
new_C = C + [v]
|
| 690 |
+
new_C_weight = C_weight + self.node_weights[v]
|
| 691 |
+
new_P = [w for w in P if self.G.has_edge(v, w)]
|
| 692 |
+
self.expand(new_C, new_C_weight, new_P)
|
| 693 |
+
|
| 694 |
+
def find_max_weight_clique(self):
|
| 695 |
+
"""Find a maximum weight clique."""
|
| 696 |
+
# Sort nodes in reverse order of degree for speed
|
| 697 |
+
nodes = sorted(self.G.nodes(), key=lambda v: self.G.degree(v), reverse=True)
|
| 698 |
+
nodes = [v for v in nodes if self.node_weights[v] > 0]
|
| 699 |
+
self.expand([], 0, nodes)
|
| 700 |
+
|
| 701 |
+
|
| 702 |
+
@not_implemented_for("directed")
|
| 703 |
+
@nx._dispatchable(node_attrs="weight")
|
| 704 |
+
def max_weight_clique(G, weight="weight"):
|
| 705 |
+
"""Find a maximum weight clique in G.
|
| 706 |
+
|
| 707 |
+
A *clique* in a graph is a set of nodes such that every two distinct nodes
|
| 708 |
+
are adjacent. The *weight* of a clique is the sum of the weights of its
|
| 709 |
+
nodes. A *maximum weight clique* of graph G is a clique C in G such that
|
| 710 |
+
no clique in G has weight greater than the weight of C.
|
| 711 |
+
|
| 712 |
+
Parameters
|
| 713 |
+
----------
|
| 714 |
+
G : NetworkX graph
|
| 715 |
+
Undirected graph
|
| 716 |
+
weight : string or None, optional (default='weight')
|
| 717 |
+
The node attribute that holds the integer value used as a weight.
|
| 718 |
+
If None, then each node has weight 1.
|
| 719 |
+
|
| 720 |
+
Returns
|
| 721 |
+
-------
|
| 722 |
+
clique : list
|
| 723 |
+
the nodes of a maximum weight clique
|
| 724 |
+
weight : int
|
| 725 |
+
the weight of a maximum weight clique
|
| 726 |
+
|
| 727 |
+
Notes
|
| 728 |
+
-----
|
| 729 |
+
The implementation is recursive, and therefore it may run into recursion
|
| 730 |
+
depth issues if G contains a clique whose number of nodes is close to the
|
| 731 |
+
recursion depth limit.
|
| 732 |
+
|
| 733 |
+
At each search node, the algorithm greedily constructs a weighted
|
| 734 |
+
independent set cover of part of the graph in order to find a small set of
|
| 735 |
+
nodes on which to branch. The algorithm is very similar to the algorithm
|
| 736 |
+
of Tavares et al. [1]_, other than the fact that the NetworkX version does
|
| 737 |
+
not use bitsets. This style of algorithm for maximum weight clique (and
|
| 738 |
+
maximum weight independent set, which is the same problem but on the
|
| 739 |
+
complement graph) has a decades-long history. See Algorithm B of Warren
|
| 740 |
+
and Hicks [2]_ and the references in that paper.
|
| 741 |
+
|
| 742 |
+
References
|
| 743 |
+
----------
|
| 744 |
+
.. [1] Tavares, W.A., Neto, M.B.C., Rodrigues, C.D., Michelon, P.: Um
|
| 745 |
+
algoritmo de branch and bound para o problema da clique máxima
|
| 746 |
+
ponderada. Proceedings of XLVII SBPO 1 (2015).
|
| 747 |
+
|
| 748 |
+
.. [2] Warren, Jeffrey S, Hicks, Illya V.: Combinatorial Branch-and-Bound
|
| 749 |
+
for the Maximum Weight Independent Set Problem. Technical Report,
|
| 750 |
+
Texas A&M University (2016).
|
| 751 |
+
"""
|
| 752 |
+
|
| 753 |
+
mwc = MaxWeightClique(G, weight)
|
| 754 |
+
mwc.find_max_weight_clique()
|
| 755 |
+
return mwc.incumbent_nodes, mwc.incumbent_weight
|
wemm/lib/python3.10/site-packages/networkx/algorithms/cycles.py
ADDED
|
@@ -0,0 +1,1230 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
========================
|
| 3 |
+
Cycle finding algorithms
|
| 4 |
+
========================
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from collections import Counter, defaultdict
|
| 8 |
+
from itertools import combinations, product
|
| 9 |
+
from math import inf
|
| 10 |
+
|
| 11 |
+
import networkx as nx
|
| 12 |
+
from networkx.utils import not_implemented_for, pairwise
|
| 13 |
+
|
| 14 |
+
__all__ = [
|
| 15 |
+
"cycle_basis",
|
| 16 |
+
"simple_cycles",
|
| 17 |
+
"recursive_simple_cycles",
|
| 18 |
+
"find_cycle",
|
| 19 |
+
"minimum_cycle_basis",
|
| 20 |
+
"chordless_cycles",
|
| 21 |
+
"girth",
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
@not_implemented_for("directed")
|
| 26 |
+
@not_implemented_for("multigraph")
|
| 27 |
+
@nx._dispatchable
|
| 28 |
+
def cycle_basis(G, root=None):
|
| 29 |
+
"""Returns a list of cycles which form a basis for cycles of G.
|
| 30 |
+
|
| 31 |
+
A basis for cycles of a network is a minimal collection of
|
| 32 |
+
cycles such that any cycle in the network can be written
|
| 33 |
+
as a sum of cycles in the basis. Here summation of cycles
|
| 34 |
+
is defined as "exclusive or" of the edges. Cycle bases are
|
| 35 |
+
useful, e.g. when deriving equations for electric circuits
|
| 36 |
+
using Kirchhoff's Laws.
|
| 37 |
+
|
| 38 |
+
Parameters
|
| 39 |
+
----------
|
| 40 |
+
G : NetworkX Graph
|
| 41 |
+
root : node, optional
|
| 42 |
+
Specify starting node for basis.
|
| 43 |
+
|
| 44 |
+
Returns
|
| 45 |
+
-------
|
| 46 |
+
A list of cycle lists. Each cycle list is a list of nodes
|
| 47 |
+
which forms a cycle (loop) in G.
|
| 48 |
+
|
| 49 |
+
Examples
|
| 50 |
+
--------
|
| 51 |
+
>>> G = nx.Graph()
|
| 52 |
+
>>> nx.add_cycle(G, [0, 1, 2, 3])
|
| 53 |
+
>>> nx.add_cycle(G, [0, 3, 4, 5])
|
| 54 |
+
>>> nx.cycle_basis(G, 0)
|
| 55 |
+
[[3, 4, 5, 0], [1, 2, 3, 0]]
|
| 56 |
+
|
| 57 |
+
Notes
|
| 58 |
+
-----
|
| 59 |
+
This is adapted from algorithm CACM 491 [1]_.
|
| 60 |
+
|
| 61 |
+
References
|
| 62 |
+
----------
|
| 63 |
+
.. [1] Paton, K. An algorithm for finding a fundamental set of
|
| 64 |
+
cycles of a graph. Comm. ACM 12, 9 (Sept 1969), 514-518.
|
| 65 |
+
|
| 66 |
+
See Also
|
| 67 |
+
--------
|
| 68 |
+
simple_cycles
|
| 69 |
+
minimum_cycle_basis
|
| 70 |
+
"""
|
| 71 |
+
gnodes = dict.fromkeys(G) # set-like object that maintains node order
|
| 72 |
+
cycles = []
|
| 73 |
+
while gnodes: # loop over connected components
|
| 74 |
+
if root is None:
|
| 75 |
+
root = gnodes.popitem()[0]
|
| 76 |
+
stack = [root]
|
| 77 |
+
pred = {root: root}
|
| 78 |
+
used = {root: set()}
|
| 79 |
+
while stack: # walk the spanning tree finding cycles
|
| 80 |
+
z = stack.pop() # use last-in so cycles easier to find
|
| 81 |
+
zused = used[z]
|
| 82 |
+
for nbr in G[z]:
|
| 83 |
+
if nbr not in used: # new node
|
| 84 |
+
pred[nbr] = z
|
| 85 |
+
stack.append(nbr)
|
| 86 |
+
used[nbr] = {z}
|
| 87 |
+
elif nbr == z: # self loops
|
| 88 |
+
cycles.append([z])
|
| 89 |
+
elif nbr not in zused: # found a cycle
|
| 90 |
+
pn = used[nbr]
|
| 91 |
+
cycle = [nbr, z]
|
| 92 |
+
p = pred[z]
|
| 93 |
+
while p not in pn:
|
| 94 |
+
cycle.append(p)
|
| 95 |
+
p = pred[p]
|
| 96 |
+
cycle.append(p)
|
| 97 |
+
cycles.append(cycle)
|
| 98 |
+
used[nbr].add(z)
|
| 99 |
+
for node in pred:
|
| 100 |
+
gnodes.pop(node, None)
|
| 101 |
+
root = None
|
| 102 |
+
return cycles
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
@nx._dispatchable
|
| 106 |
+
def simple_cycles(G, length_bound=None):
|
| 107 |
+
"""Find simple cycles (elementary circuits) of a graph.
|
| 108 |
+
|
| 109 |
+
A "simple cycle", or "elementary circuit", is a closed path where
|
| 110 |
+
no node appears twice. In a directed graph, two simple cycles are distinct
|
| 111 |
+
if they are not cyclic permutations of each other. In an undirected graph,
|
| 112 |
+
two simple cycles are distinct if they are not cyclic permutations of each
|
| 113 |
+
other nor of the other's reversal.
|
| 114 |
+
|
| 115 |
+
Optionally, the cycles are bounded in length. In the unbounded case, we use
|
| 116 |
+
a nonrecursive, iterator/generator version of Johnson's algorithm [1]_. In
|
| 117 |
+
the bounded case, we use a version of the algorithm of Gupta and
|
| 118 |
+
Suzumura [2]_. There may be better algorithms for some cases [3]_ [4]_ [5]_.
|
| 119 |
+
|
| 120 |
+
The algorithms of Johnson, and Gupta and Suzumura, are enhanced by some
|
| 121 |
+
well-known preprocessing techniques. When `G` is directed, we restrict our
|
| 122 |
+
attention to strongly connected components of `G`, generate all simple cycles
|
| 123 |
+
containing a certain node, remove that node, and further decompose the
|
| 124 |
+
remainder into strongly connected components. When `G` is undirected, we
|
| 125 |
+
restrict our attention to biconnected components, generate all simple cycles
|
| 126 |
+
containing a particular edge, remove that edge, and further decompose the
|
| 127 |
+
remainder into biconnected components.
|
| 128 |
+
|
| 129 |
+
Note that multigraphs are supported by this function -- and in undirected
|
| 130 |
+
multigraphs, a pair of parallel edges is considered a cycle of length 2.
|
| 131 |
+
Likewise, self-loops are considered to be cycles of length 1. We define
|
| 132 |
+
cycles as sequences of nodes; so the presence of loops and parallel edges
|
| 133 |
+
does not change the number of simple cycles in a graph.
|
| 134 |
+
|
| 135 |
+
Parameters
|
| 136 |
+
----------
|
| 137 |
+
G : NetworkX Graph
|
| 138 |
+
A networkx graph. Undirected, directed, and multigraphs are all supported.
|
| 139 |
+
|
| 140 |
+
length_bound : int or None, optional (default=None)
|
| 141 |
+
If `length_bound` is an int, generate all simple cycles of `G` with length at
|
| 142 |
+
most `length_bound`. Otherwise, generate all simple cycles of `G`.
|
| 143 |
+
|
| 144 |
+
Yields
|
| 145 |
+
------
|
| 146 |
+
list of nodes
|
| 147 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 148 |
+
|
| 149 |
+
Examples
|
| 150 |
+
--------
|
| 151 |
+
>>> G = nx.DiGraph([(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)])
|
| 152 |
+
>>> sorted(nx.simple_cycles(G))
|
| 153 |
+
[[0], [0, 1, 2], [0, 2], [1, 2], [2]]
|
| 154 |
+
|
| 155 |
+
To filter the cycles so that they don't include certain nodes or edges,
|
| 156 |
+
copy your graph and eliminate those nodes or edges before calling.
|
| 157 |
+
For example, to exclude self-loops from the above example:
|
| 158 |
+
|
| 159 |
+
>>> H = G.copy()
|
| 160 |
+
>>> H.remove_edges_from(nx.selfloop_edges(G))
|
| 161 |
+
>>> sorted(nx.simple_cycles(H))
|
| 162 |
+
[[0, 1, 2], [0, 2], [1, 2]]
|
| 163 |
+
|
| 164 |
+
Notes
|
| 165 |
+
-----
|
| 166 |
+
When `length_bound` is None, the time complexity is $O((n+e)(c+1))$ for $n$
|
| 167 |
+
nodes, $e$ edges and $c$ simple circuits. Otherwise, when ``length_bound > 1``,
|
| 168 |
+
the time complexity is $O((c+n)(k-1)d^k)$ where $d$ is the average degree of
|
| 169 |
+
the nodes of `G` and $k$ = `length_bound`.
|
| 170 |
+
|
| 171 |
+
Raises
|
| 172 |
+
------
|
| 173 |
+
ValueError
|
| 174 |
+
when ``length_bound < 0``.
|
| 175 |
+
|
| 176 |
+
References
|
| 177 |
+
----------
|
| 178 |
+
.. [1] Finding all the elementary circuits of a directed graph.
|
| 179 |
+
D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
|
| 180 |
+
https://doi.org/10.1137/0204007
|
| 181 |
+
.. [2] Finding All Bounded-Length Simple Cycles in a Directed Graph
|
| 182 |
+
A. Gupta and T. Suzumura https://arxiv.org/abs/2105.10094
|
| 183 |
+
.. [3] Enumerating the cycles of a digraph: a new preprocessing strategy.
|
| 184 |
+
G. Loizou and P. Thanish, Information Sciences, v. 27, 163-182, 1982.
|
| 185 |
+
.. [4] A search strategy for the elementary cycles of a directed graph.
|
| 186 |
+
J.L. Szwarcfiter and P.E. Lauer, BIT NUMERICAL MATHEMATICS,
|
| 187 |
+
v. 16, no. 2, 192-204, 1976.
|
| 188 |
+
.. [5] Optimal Listing of Cycles and st-Paths in Undirected Graphs
|
| 189 |
+
R. Ferreira and R. Grossi and A. Marino and N. Pisanti and R. Rizzi and
|
| 190 |
+
G. Sacomoto https://arxiv.org/abs/1205.2766
|
| 191 |
+
|
| 192 |
+
See Also
|
| 193 |
+
--------
|
| 194 |
+
cycle_basis
|
| 195 |
+
chordless_cycles
|
| 196 |
+
"""
|
| 197 |
+
|
| 198 |
+
if length_bound is not None:
|
| 199 |
+
if length_bound == 0:
|
| 200 |
+
return
|
| 201 |
+
elif length_bound < 0:
|
| 202 |
+
raise ValueError("length bound must be non-negative")
|
| 203 |
+
|
| 204 |
+
directed = G.is_directed()
|
| 205 |
+
yield from ([v] for v, Gv in G.adj.items() if v in Gv)
|
| 206 |
+
|
| 207 |
+
if length_bound is not None and length_bound == 1:
|
| 208 |
+
return
|
| 209 |
+
|
| 210 |
+
if G.is_multigraph() and not directed:
|
| 211 |
+
visited = set()
|
| 212 |
+
for u, Gu in G.adj.items():
|
| 213 |
+
multiplicity = ((v, len(Guv)) for v, Guv in Gu.items() if v in visited)
|
| 214 |
+
yield from ([u, v] for v, m in multiplicity if m > 1)
|
| 215 |
+
visited.add(u)
|
| 216 |
+
|
| 217 |
+
# explicitly filter out loops; implicitly filter out parallel edges
|
| 218 |
+
if directed:
|
| 219 |
+
G = nx.DiGraph((u, v) for u, Gu in G.adj.items() for v in Gu if v != u)
|
| 220 |
+
else:
|
| 221 |
+
G = nx.Graph((u, v) for u, Gu in G.adj.items() for v in Gu if v != u)
|
| 222 |
+
|
| 223 |
+
# this case is not strictly necessary but improves performance
|
| 224 |
+
if length_bound is not None and length_bound == 2:
|
| 225 |
+
if directed:
|
| 226 |
+
visited = set()
|
| 227 |
+
for u, Gu in G.adj.items():
|
| 228 |
+
yield from (
|
| 229 |
+
[v, u] for v in visited.intersection(Gu) if G.has_edge(v, u)
|
| 230 |
+
)
|
| 231 |
+
visited.add(u)
|
| 232 |
+
return
|
| 233 |
+
|
| 234 |
+
if directed:
|
| 235 |
+
yield from _directed_cycle_search(G, length_bound)
|
| 236 |
+
else:
|
| 237 |
+
yield from _undirected_cycle_search(G, length_bound)
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
def _directed_cycle_search(G, length_bound):
|
| 241 |
+
"""A dispatch function for `simple_cycles` for directed graphs.
|
| 242 |
+
|
| 243 |
+
We generate all cycles of G through binary partition.
|
| 244 |
+
|
| 245 |
+
1. Pick a node v in G which belongs to at least one cycle
|
| 246 |
+
a. Generate all cycles of G which contain the node v.
|
| 247 |
+
b. Recursively generate all cycles of G \\ v.
|
| 248 |
+
|
| 249 |
+
This is accomplished through the following:
|
| 250 |
+
|
| 251 |
+
1. Compute the strongly connected components SCC of G.
|
| 252 |
+
2. Select and remove a biconnected component C from BCC. Select a
|
| 253 |
+
non-tree edge (u, v) of a depth-first search of G[C].
|
| 254 |
+
3. For each simple cycle P containing v in G[C], yield P.
|
| 255 |
+
4. Add the biconnected components of G[C \\ v] to BCC.
|
| 256 |
+
|
| 257 |
+
If the parameter length_bound is not None, then step 3 will be limited to
|
| 258 |
+
simple cycles of length at most length_bound.
|
| 259 |
+
|
| 260 |
+
Parameters
|
| 261 |
+
----------
|
| 262 |
+
G : NetworkX DiGraph
|
| 263 |
+
A directed graph
|
| 264 |
+
|
| 265 |
+
length_bound : int or None
|
| 266 |
+
If length_bound is an int, generate all simple cycles of G with length at most length_bound.
|
| 267 |
+
Otherwise, generate all simple cycles of G.
|
| 268 |
+
|
| 269 |
+
Yields
|
| 270 |
+
------
|
| 271 |
+
list of nodes
|
| 272 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 273 |
+
"""
|
| 274 |
+
|
| 275 |
+
scc = nx.strongly_connected_components
|
| 276 |
+
components = [c for c in scc(G) if len(c) >= 2]
|
| 277 |
+
while components:
|
| 278 |
+
c = components.pop()
|
| 279 |
+
Gc = G.subgraph(c)
|
| 280 |
+
v = next(iter(c))
|
| 281 |
+
if length_bound is None:
|
| 282 |
+
yield from _johnson_cycle_search(Gc, [v])
|
| 283 |
+
else:
|
| 284 |
+
yield from _bounded_cycle_search(Gc, [v], length_bound)
|
| 285 |
+
# delete v after searching G, to make sure we can find v
|
| 286 |
+
G.remove_node(v)
|
| 287 |
+
components.extend(c for c in scc(Gc) if len(c) >= 2)
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
def _undirected_cycle_search(G, length_bound):
|
| 291 |
+
"""A dispatch function for `simple_cycles` for undirected graphs.
|
| 292 |
+
|
| 293 |
+
We generate all cycles of G through binary partition.
|
| 294 |
+
|
| 295 |
+
1. Pick an edge (u, v) in G which belongs to at least one cycle
|
| 296 |
+
a. Generate all cycles of G which contain the edge (u, v)
|
| 297 |
+
b. Recursively generate all cycles of G \\ (u, v)
|
| 298 |
+
|
| 299 |
+
This is accomplished through the following:
|
| 300 |
+
|
| 301 |
+
1. Compute the biconnected components BCC of G.
|
| 302 |
+
2. Select and remove a biconnected component C from BCC. Select a
|
| 303 |
+
non-tree edge (u, v) of a depth-first search of G[C].
|
| 304 |
+
3. For each (v -> u) path P remaining in G[C] \\ (u, v), yield P.
|
| 305 |
+
4. Add the biconnected components of G[C] \\ (u, v) to BCC.
|
| 306 |
+
|
| 307 |
+
If the parameter length_bound is not None, then step 3 will be limited to simple paths
|
| 308 |
+
of length at most length_bound.
|
| 309 |
+
|
| 310 |
+
Parameters
|
| 311 |
+
----------
|
| 312 |
+
G : NetworkX Graph
|
| 313 |
+
An undirected graph
|
| 314 |
+
|
| 315 |
+
length_bound : int or None
|
| 316 |
+
If length_bound is an int, generate all simple cycles of G with length at most length_bound.
|
| 317 |
+
Otherwise, generate all simple cycles of G.
|
| 318 |
+
|
| 319 |
+
Yields
|
| 320 |
+
------
|
| 321 |
+
list of nodes
|
| 322 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 323 |
+
"""
|
| 324 |
+
|
| 325 |
+
bcc = nx.biconnected_components
|
| 326 |
+
components = [c for c in bcc(G) if len(c) >= 3]
|
| 327 |
+
while components:
|
| 328 |
+
c = components.pop()
|
| 329 |
+
Gc = G.subgraph(c)
|
| 330 |
+
uv = list(next(iter(Gc.edges)))
|
| 331 |
+
G.remove_edge(*uv)
|
| 332 |
+
# delete (u, v) before searching G, to avoid fake 3-cycles [u, v, u]
|
| 333 |
+
if length_bound is None:
|
| 334 |
+
yield from _johnson_cycle_search(Gc, uv)
|
| 335 |
+
else:
|
| 336 |
+
yield from _bounded_cycle_search(Gc, uv, length_bound)
|
| 337 |
+
components.extend(c for c in bcc(Gc) if len(c) >= 3)
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
class _NeighborhoodCache(dict):
|
| 341 |
+
"""Very lightweight graph wrapper which caches neighborhoods as list.
|
| 342 |
+
|
| 343 |
+
This dict subclass uses the __missing__ functionality to query graphs for
|
| 344 |
+
their neighborhoods, and store the result as a list. This is used to avoid
|
| 345 |
+
the performance penalty incurred by subgraph views.
|
| 346 |
+
"""
|
| 347 |
+
|
| 348 |
+
def __init__(self, G):
|
| 349 |
+
self.G = G
|
| 350 |
+
|
| 351 |
+
def __missing__(self, v):
|
| 352 |
+
Gv = self[v] = list(self.G[v])
|
| 353 |
+
return Gv
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
def _johnson_cycle_search(G, path):
|
| 357 |
+
"""The main loop of the cycle-enumeration algorithm of Johnson.
|
| 358 |
+
|
| 359 |
+
Parameters
|
| 360 |
+
----------
|
| 361 |
+
G : NetworkX Graph or DiGraph
|
| 362 |
+
A graph
|
| 363 |
+
|
| 364 |
+
path : list
|
| 365 |
+
A cycle prefix. All cycles generated will begin with this prefix.
|
| 366 |
+
|
| 367 |
+
Yields
|
| 368 |
+
------
|
| 369 |
+
list of nodes
|
| 370 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 371 |
+
|
| 372 |
+
References
|
| 373 |
+
----------
|
| 374 |
+
.. [1] Finding all the elementary circuits of a directed graph.
|
| 375 |
+
D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
|
| 376 |
+
https://doi.org/10.1137/0204007
|
| 377 |
+
|
| 378 |
+
"""
|
| 379 |
+
|
| 380 |
+
G = _NeighborhoodCache(G)
|
| 381 |
+
blocked = set(path)
|
| 382 |
+
B = defaultdict(set) # graph portions that yield no elementary circuit
|
| 383 |
+
start = path[0]
|
| 384 |
+
stack = [iter(G[path[-1]])]
|
| 385 |
+
closed = [False]
|
| 386 |
+
while stack:
|
| 387 |
+
nbrs = stack[-1]
|
| 388 |
+
for w in nbrs:
|
| 389 |
+
if w == start:
|
| 390 |
+
yield path[:]
|
| 391 |
+
closed[-1] = True
|
| 392 |
+
elif w not in blocked:
|
| 393 |
+
path.append(w)
|
| 394 |
+
closed.append(False)
|
| 395 |
+
stack.append(iter(G[w]))
|
| 396 |
+
blocked.add(w)
|
| 397 |
+
break
|
| 398 |
+
else: # no more nbrs
|
| 399 |
+
stack.pop()
|
| 400 |
+
v = path.pop()
|
| 401 |
+
if closed.pop():
|
| 402 |
+
if closed:
|
| 403 |
+
closed[-1] = True
|
| 404 |
+
unblock_stack = {v}
|
| 405 |
+
while unblock_stack:
|
| 406 |
+
u = unblock_stack.pop()
|
| 407 |
+
if u in blocked:
|
| 408 |
+
blocked.remove(u)
|
| 409 |
+
unblock_stack.update(B[u])
|
| 410 |
+
B[u].clear()
|
| 411 |
+
else:
|
| 412 |
+
for w in G[v]:
|
| 413 |
+
B[w].add(v)
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
def _bounded_cycle_search(G, path, length_bound):
|
| 417 |
+
"""The main loop of the cycle-enumeration algorithm of Gupta and Suzumura.
|
| 418 |
+
|
| 419 |
+
Parameters
|
| 420 |
+
----------
|
| 421 |
+
G : NetworkX Graph or DiGraph
|
| 422 |
+
A graph
|
| 423 |
+
|
| 424 |
+
path : list
|
| 425 |
+
A cycle prefix. All cycles generated will begin with this prefix.
|
| 426 |
+
|
| 427 |
+
length_bound: int
|
| 428 |
+
A length bound. All cycles generated will have length at most length_bound.
|
| 429 |
+
|
| 430 |
+
Yields
|
| 431 |
+
------
|
| 432 |
+
list of nodes
|
| 433 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 434 |
+
|
| 435 |
+
References
|
| 436 |
+
----------
|
| 437 |
+
.. [1] Finding All Bounded-Length Simple Cycles in a Directed Graph
|
| 438 |
+
A. Gupta and T. Suzumura https://arxiv.org/abs/2105.10094
|
| 439 |
+
|
| 440 |
+
"""
|
| 441 |
+
G = _NeighborhoodCache(G)
|
| 442 |
+
lock = {v: 0 for v in path}
|
| 443 |
+
B = defaultdict(set)
|
| 444 |
+
start = path[0]
|
| 445 |
+
stack = [iter(G[path[-1]])]
|
| 446 |
+
blen = [length_bound]
|
| 447 |
+
while stack:
|
| 448 |
+
nbrs = stack[-1]
|
| 449 |
+
for w in nbrs:
|
| 450 |
+
if w == start:
|
| 451 |
+
yield path[:]
|
| 452 |
+
blen[-1] = 1
|
| 453 |
+
elif len(path) < lock.get(w, length_bound):
|
| 454 |
+
path.append(w)
|
| 455 |
+
blen.append(length_bound)
|
| 456 |
+
lock[w] = len(path)
|
| 457 |
+
stack.append(iter(G[w]))
|
| 458 |
+
break
|
| 459 |
+
else:
|
| 460 |
+
stack.pop()
|
| 461 |
+
v = path.pop()
|
| 462 |
+
bl = blen.pop()
|
| 463 |
+
if blen:
|
| 464 |
+
blen[-1] = min(blen[-1], bl)
|
| 465 |
+
if bl < length_bound:
|
| 466 |
+
relax_stack = [(bl, v)]
|
| 467 |
+
while relax_stack:
|
| 468 |
+
bl, u = relax_stack.pop()
|
| 469 |
+
if lock.get(u, length_bound) < length_bound - bl + 1:
|
| 470 |
+
lock[u] = length_bound - bl + 1
|
| 471 |
+
relax_stack.extend((bl + 1, w) for w in B[u].difference(path))
|
| 472 |
+
else:
|
| 473 |
+
for w in G[v]:
|
| 474 |
+
B[w].add(v)
|
| 475 |
+
|
| 476 |
+
|
| 477 |
+
@nx._dispatchable
|
| 478 |
+
def chordless_cycles(G, length_bound=None):
|
| 479 |
+
"""Find simple chordless cycles of a graph.
|
| 480 |
+
|
| 481 |
+
A `simple cycle` is a closed path where no node appears twice. In a simple
|
| 482 |
+
cycle, a `chord` is an additional edge between two nodes in the cycle. A
|
| 483 |
+
`chordless cycle` is a simple cycle without chords. Said differently, a
|
| 484 |
+
chordless cycle is a cycle C in a graph G where the number of edges in the
|
| 485 |
+
induced graph G[C] is equal to the length of `C`.
|
| 486 |
+
|
| 487 |
+
Note that some care must be taken in the case that G is not a simple graph
|
| 488 |
+
nor a simple digraph. Some authors limit the definition of chordless cycles
|
| 489 |
+
to have a prescribed minimum length; we do not.
|
| 490 |
+
|
| 491 |
+
1. We interpret self-loops to be chordless cycles, except in multigraphs
|
| 492 |
+
with multiple loops in parallel. Likewise, in a chordless cycle of
|
| 493 |
+
length greater than 1, there can be no nodes with self-loops.
|
| 494 |
+
|
| 495 |
+
2. We interpret directed two-cycles to be chordless cycles, except in
|
| 496 |
+
multi-digraphs when any edge in a two-cycle has a parallel copy.
|
| 497 |
+
|
| 498 |
+
3. We interpret parallel pairs of undirected edges as two-cycles, except
|
| 499 |
+
when a third (or more) parallel edge exists between the two nodes.
|
| 500 |
+
|
| 501 |
+
4. Generalizing the above, edges with parallel clones may not occur in
|
| 502 |
+
chordless cycles.
|
| 503 |
+
|
| 504 |
+
In a directed graph, two chordless cycles are distinct if they are not
|
| 505 |
+
cyclic permutations of each other. In an undirected graph, two chordless
|
| 506 |
+
cycles are distinct if they are not cyclic permutations of each other nor of
|
| 507 |
+
the other's reversal.
|
| 508 |
+
|
| 509 |
+
Optionally, the cycles are bounded in length.
|
| 510 |
+
|
| 511 |
+
We use an algorithm strongly inspired by that of Dias et al [1]_. It has
|
| 512 |
+
been modified in the following ways:
|
| 513 |
+
|
| 514 |
+
1. Recursion is avoided, per Python's limitations
|
| 515 |
+
|
| 516 |
+
2. The labeling function is not necessary, because the starting paths
|
| 517 |
+
are chosen (and deleted from the host graph) to prevent multiple
|
| 518 |
+
occurrences of the same path
|
| 519 |
+
|
| 520 |
+
3. The search is optionally bounded at a specified length
|
| 521 |
+
|
| 522 |
+
4. Support for directed graphs is provided by extending cycles along
|
| 523 |
+
forward edges, and blocking nodes along forward and reverse edges
|
| 524 |
+
|
| 525 |
+
5. Support for multigraphs is provided by omitting digons from the set
|
| 526 |
+
of forward edges
|
| 527 |
+
|
| 528 |
+
Parameters
|
| 529 |
+
----------
|
| 530 |
+
G : NetworkX DiGraph
|
| 531 |
+
A directed graph
|
| 532 |
+
|
| 533 |
+
length_bound : int or None, optional (default=None)
|
| 534 |
+
If length_bound is an int, generate all simple cycles of G with length at
|
| 535 |
+
most length_bound. Otherwise, generate all simple cycles of G.
|
| 536 |
+
|
| 537 |
+
Yields
|
| 538 |
+
------
|
| 539 |
+
list of nodes
|
| 540 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 541 |
+
|
| 542 |
+
Examples
|
| 543 |
+
--------
|
| 544 |
+
>>> sorted(list(nx.chordless_cycles(nx.complete_graph(4))))
|
| 545 |
+
[[1, 0, 2], [1, 0, 3], [2, 0, 3], [2, 1, 3]]
|
| 546 |
+
|
| 547 |
+
Notes
|
| 548 |
+
-----
|
| 549 |
+
When length_bound is None, and the graph is simple, the time complexity is
|
| 550 |
+
$O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$ chordless cycles.
|
| 551 |
+
|
| 552 |
+
Raises
|
| 553 |
+
------
|
| 554 |
+
ValueError
|
| 555 |
+
when length_bound < 0.
|
| 556 |
+
|
| 557 |
+
References
|
| 558 |
+
----------
|
| 559 |
+
.. [1] Efficient enumeration of chordless cycles
|
| 560 |
+
E. Dias and D. Castonguay and H. Longo and W.A.R. Jradi
|
| 561 |
+
https://arxiv.org/abs/1309.1051
|
| 562 |
+
|
| 563 |
+
See Also
|
| 564 |
+
--------
|
| 565 |
+
simple_cycles
|
| 566 |
+
"""
|
| 567 |
+
|
| 568 |
+
if length_bound is not None:
|
| 569 |
+
if length_bound == 0:
|
| 570 |
+
return
|
| 571 |
+
elif length_bound < 0:
|
| 572 |
+
raise ValueError("length bound must be non-negative")
|
| 573 |
+
|
| 574 |
+
directed = G.is_directed()
|
| 575 |
+
multigraph = G.is_multigraph()
|
| 576 |
+
|
| 577 |
+
if multigraph:
|
| 578 |
+
yield from ([v] for v, Gv in G.adj.items() if len(Gv.get(v, ())) == 1)
|
| 579 |
+
else:
|
| 580 |
+
yield from ([v] for v, Gv in G.adj.items() if v in Gv)
|
| 581 |
+
|
| 582 |
+
if length_bound is not None and length_bound == 1:
|
| 583 |
+
return
|
| 584 |
+
|
| 585 |
+
# Nodes with loops cannot belong to longer cycles. Let's delete them here.
|
| 586 |
+
# also, we implicitly reduce the multiplicity of edges down to 1 in the case
|
| 587 |
+
# of multiedges.
|
| 588 |
+
if directed:
|
| 589 |
+
F = nx.DiGraph((u, v) for u, Gu in G.adj.items() if u not in Gu for v in Gu)
|
| 590 |
+
B = F.to_undirected(as_view=False)
|
| 591 |
+
else:
|
| 592 |
+
F = nx.Graph((u, v) for u, Gu in G.adj.items() if u not in Gu for v in Gu)
|
| 593 |
+
B = None
|
| 594 |
+
|
| 595 |
+
# If we're given a multigraph, we have a few cases to consider with parallel
|
| 596 |
+
# edges.
|
| 597 |
+
#
|
| 598 |
+
# 1. If we have 2 or more edges in parallel between the nodes (u, v), we
|
| 599 |
+
# must not construct longer cycles along (u, v).
|
| 600 |
+
# 2. If G is not directed, then a pair of parallel edges between (u, v) is a
|
| 601 |
+
# chordless cycle unless there exists a third (or more) parallel edge.
|
| 602 |
+
# 3. If G is directed, then parallel edges do not form cycles, but do
|
| 603 |
+
# preclude back-edges from forming cycles (handled in the next section),
|
| 604 |
+
# Thus, if an edge (u, v) is duplicated and the reverse (v, u) is also
|
| 605 |
+
# present, then we remove both from F.
|
| 606 |
+
#
|
| 607 |
+
# In directed graphs, we need to consider both directions that edges can
|
| 608 |
+
# take, so iterate over all edges (u, v) and possibly (v, u). In undirected
|
| 609 |
+
# graphs, we need to be a little careful to only consider every edge once,
|
| 610 |
+
# so we use a "visited" set to emulate node-order comparisons.
|
| 611 |
+
|
| 612 |
+
if multigraph:
|
| 613 |
+
if not directed:
|
| 614 |
+
B = F.copy()
|
| 615 |
+
visited = set()
|
| 616 |
+
for u, Gu in G.adj.items():
|
| 617 |
+
if directed:
|
| 618 |
+
multiplicity = ((v, len(Guv)) for v, Guv in Gu.items())
|
| 619 |
+
for v, m in multiplicity:
|
| 620 |
+
if m > 1:
|
| 621 |
+
F.remove_edges_from(((u, v), (v, u)))
|
| 622 |
+
else:
|
| 623 |
+
multiplicity = ((v, len(Guv)) for v, Guv in Gu.items() if v in visited)
|
| 624 |
+
for v, m in multiplicity:
|
| 625 |
+
if m == 2:
|
| 626 |
+
yield [u, v]
|
| 627 |
+
if m > 1:
|
| 628 |
+
F.remove_edge(u, v)
|
| 629 |
+
visited.add(u)
|
| 630 |
+
|
| 631 |
+
# If we're given a directed graphs, we need to think about digons. If we
|
| 632 |
+
# have two edges (u, v) and (v, u), then that's a two-cycle. If either edge
|
| 633 |
+
# was duplicated above, then we removed both from F. So, any digons we find
|
| 634 |
+
# here are chordless. After finding digons, we remove their edges from F
|
| 635 |
+
# to avoid traversing them in the search for chordless cycles.
|
| 636 |
+
if directed:
|
| 637 |
+
for u, Fu in F.adj.items():
|
| 638 |
+
digons = [[u, v] for v in Fu if F.has_edge(v, u)]
|
| 639 |
+
yield from digons
|
| 640 |
+
F.remove_edges_from(digons)
|
| 641 |
+
F.remove_edges_from(e[::-1] for e in digons)
|
| 642 |
+
|
| 643 |
+
if length_bound is not None and length_bound == 2:
|
| 644 |
+
return
|
| 645 |
+
|
| 646 |
+
# Now, we prepare to search for cycles. We have removed all cycles of
|
| 647 |
+
# lengths 1 and 2, so F is a simple graph or simple digraph. We repeatedly
|
| 648 |
+
# separate digraphs into their strongly connected components, and undirected
|
| 649 |
+
# graphs into their biconnected components. For each component, we pick a
|
| 650 |
+
# node v, search for chordless cycles based at each "stem" (u, v, w), and
|
| 651 |
+
# then remove v from that component before separating the graph again.
|
| 652 |
+
if directed:
|
| 653 |
+
separate = nx.strongly_connected_components
|
| 654 |
+
|
| 655 |
+
# Directed stems look like (u -> v -> w), so we use the product of
|
| 656 |
+
# predecessors of v with successors of v.
|
| 657 |
+
def stems(C, v):
|
| 658 |
+
for u, w in product(C.pred[v], C.succ[v]):
|
| 659 |
+
if not G.has_edge(u, w): # omit stems with acyclic chords
|
| 660 |
+
yield [u, v, w], F.has_edge(w, u)
|
| 661 |
+
|
| 662 |
+
else:
|
| 663 |
+
separate = nx.biconnected_components
|
| 664 |
+
|
| 665 |
+
# Undirected stems look like (u ~ v ~ w), but we must not also search
|
| 666 |
+
# (w ~ v ~ u), so we use combinations of v's neighbors of length 2.
|
| 667 |
+
def stems(C, v):
|
| 668 |
+
yield from (([u, v, w], F.has_edge(w, u)) for u, w in combinations(C[v], 2))
|
| 669 |
+
|
| 670 |
+
components = [c for c in separate(F) if len(c) > 2]
|
| 671 |
+
while components:
|
| 672 |
+
c = components.pop()
|
| 673 |
+
v = next(iter(c))
|
| 674 |
+
Fc = F.subgraph(c)
|
| 675 |
+
Fcc = Bcc = None
|
| 676 |
+
for S, is_triangle in stems(Fc, v):
|
| 677 |
+
if is_triangle:
|
| 678 |
+
yield S
|
| 679 |
+
else:
|
| 680 |
+
if Fcc is None:
|
| 681 |
+
Fcc = _NeighborhoodCache(Fc)
|
| 682 |
+
Bcc = Fcc if B is None else _NeighborhoodCache(B.subgraph(c))
|
| 683 |
+
yield from _chordless_cycle_search(Fcc, Bcc, S, length_bound)
|
| 684 |
+
|
| 685 |
+
components.extend(c for c in separate(F.subgraph(c - {v})) if len(c) > 2)
|
| 686 |
+
|
| 687 |
+
|
| 688 |
+
def _chordless_cycle_search(F, B, path, length_bound):
|
| 689 |
+
"""The main loop for chordless cycle enumeration.
|
| 690 |
+
|
| 691 |
+
This algorithm is strongly inspired by that of Dias et al [1]_. It has been
|
| 692 |
+
modified in the following ways:
|
| 693 |
+
|
| 694 |
+
1. Recursion is avoided, per Python's limitations
|
| 695 |
+
|
| 696 |
+
2. The labeling function is not necessary, because the starting paths
|
| 697 |
+
are chosen (and deleted from the host graph) to prevent multiple
|
| 698 |
+
occurrences of the same path
|
| 699 |
+
|
| 700 |
+
3. The search is optionally bounded at a specified length
|
| 701 |
+
|
| 702 |
+
4. Support for directed graphs is provided by extending cycles along
|
| 703 |
+
forward edges, and blocking nodes along forward and reverse edges
|
| 704 |
+
|
| 705 |
+
5. Support for multigraphs is provided by omitting digons from the set
|
| 706 |
+
of forward edges
|
| 707 |
+
|
| 708 |
+
Parameters
|
| 709 |
+
----------
|
| 710 |
+
F : _NeighborhoodCache
|
| 711 |
+
A graph of forward edges to follow in constructing cycles
|
| 712 |
+
|
| 713 |
+
B : _NeighborhoodCache
|
| 714 |
+
A graph of blocking edges to prevent the production of chordless cycles
|
| 715 |
+
|
| 716 |
+
path : list
|
| 717 |
+
A cycle prefix. All cycles generated will begin with this prefix.
|
| 718 |
+
|
| 719 |
+
length_bound : int
|
| 720 |
+
A length bound. All cycles generated will have length at most length_bound.
|
| 721 |
+
|
| 722 |
+
|
| 723 |
+
Yields
|
| 724 |
+
------
|
| 725 |
+
list of nodes
|
| 726 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 727 |
+
|
| 728 |
+
References
|
| 729 |
+
----------
|
| 730 |
+
.. [1] Efficient enumeration of chordless cycles
|
| 731 |
+
E. Dias and D. Castonguay and H. Longo and W.A.R. Jradi
|
| 732 |
+
https://arxiv.org/abs/1309.1051
|
| 733 |
+
|
| 734 |
+
"""
|
| 735 |
+
blocked = defaultdict(int)
|
| 736 |
+
target = path[0]
|
| 737 |
+
blocked[path[1]] = 1
|
| 738 |
+
for w in path[1:]:
|
| 739 |
+
for v in B[w]:
|
| 740 |
+
blocked[v] += 1
|
| 741 |
+
|
| 742 |
+
stack = [iter(F[path[2]])]
|
| 743 |
+
while stack:
|
| 744 |
+
nbrs = stack[-1]
|
| 745 |
+
for w in nbrs:
|
| 746 |
+
if blocked[w] == 1 and (length_bound is None or len(path) < length_bound):
|
| 747 |
+
Fw = F[w]
|
| 748 |
+
if target in Fw:
|
| 749 |
+
yield path + [w]
|
| 750 |
+
else:
|
| 751 |
+
Bw = B[w]
|
| 752 |
+
if target in Bw:
|
| 753 |
+
continue
|
| 754 |
+
for v in Bw:
|
| 755 |
+
blocked[v] += 1
|
| 756 |
+
path.append(w)
|
| 757 |
+
stack.append(iter(Fw))
|
| 758 |
+
break
|
| 759 |
+
else:
|
| 760 |
+
stack.pop()
|
| 761 |
+
for v in B[path.pop()]:
|
| 762 |
+
blocked[v] -= 1
|
| 763 |
+
|
| 764 |
+
|
| 765 |
+
@not_implemented_for("undirected")
|
| 766 |
+
@nx._dispatchable(mutates_input=True)
|
| 767 |
+
def recursive_simple_cycles(G):
|
| 768 |
+
"""Find simple cycles (elementary circuits) of a directed graph.
|
| 769 |
+
|
| 770 |
+
A `simple cycle`, or `elementary circuit`, is a closed path where
|
| 771 |
+
no node appears twice. Two elementary circuits are distinct if they
|
| 772 |
+
are not cyclic permutations of each other.
|
| 773 |
+
|
| 774 |
+
This version uses a recursive algorithm to build a list of cycles.
|
| 775 |
+
You should probably use the iterator version called simple_cycles().
|
| 776 |
+
Warning: This recursive version uses lots of RAM!
|
| 777 |
+
It appears in NetworkX for pedagogical value.
|
| 778 |
+
|
| 779 |
+
Parameters
|
| 780 |
+
----------
|
| 781 |
+
G : NetworkX DiGraph
|
| 782 |
+
A directed graph
|
| 783 |
+
|
| 784 |
+
Returns
|
| 785 |
+
-------
|
| 786 |
+
A list of cycles, where each cycle is represented by a list of nodes
|
| 787 |
+
along the cycle.
|
| 788 |
+
|
| 789 |
+
Example:
|
| 790 |
+
|
| 791 |
+
>>> edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]
|
| 792 |
+
>>> G = nx.DiGraph(edges)
|
| 793 |
+
>>> nx.recursive_simple_cycles(G)
|
| 794 |
+
[[0], [2], [0, 1, 2], [0, 2], [1, 2]]
|
| 795 |
+
|
| 796 |
+
Notes
|
| 797 |
+
-----
|
| 798 |
+
The implementation follows pp. 79-80 in [1]_.
|
| 799 |
+
|
| 800 |
+
The time complexity is $O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$
|
| 801 |
+
elementary circuits.
|
| 802 |
+
|
| 803 |
+
References
|
| 804 |
+
----------
|
| 805 |
+
.. [1] Finding all the elementary circuits of a directed graph.
|
| 806 |
+
D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
|
| 807 |
+
https://doi.org/10.1137/0204007
|
| 808 |
+
|
| 809 |
+
See Also
|
| 810 |
+
--------
|
| 811 |
+
simple_cycles, cycle_basis
|
| 812 |
+
"""
|
| 813 |
+
|
| 814 |
+
# Jon Olav Vik, 2010-08-09
|
| 815 |
+
def _unblock(thisnode):
|
| 816 |
+
"""Recursively unblock and remove nodes from B[thisnode]."""
|
| 817 |
+
if blocked[thisnode]:
|
| 818 |
+
blocked[thisnode] = False
|
| 819 |
+
while B[thisnode]:
|
| 820 |
+
_unblock(B[thisnode].pop())
|
| 821 |
+
|
| 822 |
+
def circuit(thisnode, startnode, component):
|
| 823 |
+
closed = False # set to True if elementary path is closed
|
| 824 |
+
path.append(thisnode)
|
| 825 |
+
blocked[thisnode] = True
|
| 826 |
+
for nextnode in component[thisnode]: # direct successors of thisnode
|
| 827 |
+
if nextnode == startnode:
|
| 828 |
+
result.append(path[:])
|
| 829 |
+
closed = True
|
| 830 |
+
elif not blocked[nextnode]:
|
| 831 |
+
if circuit(nextnode, startnode, component):
|
| 832 |
+
closed = True
|
| 833 |
+
if closed:
|
| 834 |
+
_unblock(thisnode)
|
| 835 |
+
else:
|
| 836 |
+
for nextnode in component[thisnode]:
|
| 837 |
+
if thisnode not in B[nextnode]: # TODO: use set for speedup?
|
| 838 |
+
B[nextnode].append(thisnode)
|
| 839 |
+
path.pop() # remove thisnode from path
|
| 840 |
+
return closed
|
| 841 |
+
|
| 842 |
+
path = [] # stack of nodes in current path
|
| 843 |
+
blocked = defaultdict(bool) # vertex: blocked from search?
|
| 844 |
+
B = defaultdict(list) # graph portions that yield no elementary circuit
|
| 845 |
+
result = [] # list to accumulate the circuits found
|
| 846 |
+
|
| 847 |
+
# Johnson's algorithm exclude self cycle edges like (v, v)
|
| 848 |
+
# To be backward compatible, we record those cycles in advance
|
| 849 |
+
# and then remove from subG
|
| 850 |
+
for v in G:
|
| 851 |
+
if G.has_edge(v, v):
|
| 852 |
+
result.append([v])
|
| 853 |
+
G.remove_edge(v, v)
|
| 854 |
+
|
| 855 |
+
# Johnson's algorithm requires some ordering of the nodes.
|
| 856 |
+
# They might not be sortable so we assign an arbitrary ordering.
|
| 857 |
+
ordering = dict(zip(G, range(len(G))))
|
| 858 |
+
for s in ordering:
|
| 859 |
+
# Build the subgraph induced by s and following nodes in the ordering
|
| 860 |
+
subgraph = G.subgraph(node for node in G if ordering[node] >= ordering[s])
|
| 861 |
+
# Find the strongly connected component in the subgraph
|
| 862 |
+
# that contains the least node according to the ordering
|
| 863 |
+
strongcomp = nx.strongly_connected_components(subgraph)
|
| 864 |
+
mincomp = min(strongcomp, key=lambda ns: min(ordering[n] for n in ns))
|
| 865 |
+
component = G.subgraph(mincomp)
|
| 866 |
+
if len(component) > 1:
|
| 867 |
+
# smallest node in the component according to the ordering
|
| 868 |
+
startnode = min(component, key=ordering.__getitem__)
|
| 869 |
+
for node in component:
|
| 870 |
+
blocked[node] = False
|
| 871 |
+
B[node][:] = []
|
| 872 |
+
dummy = circuit(startnode, startnode, component)
|
| 873 |
+
return result
|
| 874 |
+
|
| 875 |
+
|
| 876 |
+
@nx._dispatchable
|
| 877 |
+
def find_cycle(G, source=None, orientation=None):
|
| 878 |
+
"""Returns a cycle found via depth-first traversal.
|
| 879 |
+
|
| 880 |
+
The cycle is a list of edges indicating the cyclic path.
|
| 881 |
+
Orientation of directed edges is controlled by `orientation`.
|
| 882 |
+
|
| 883 |
+
Parameters
|
| 884 |
+
----------
|
| 885 |
+
G : graph
|
| 886 |
+
A directed/undirected graph/multigraph.
|
| 887 |
+
|
| 888 |
+
source : node, list of nodes
|
| 889 |
+
The node from which the traversal begins. If None, then a source
|
| 890 |
+
is chosen arbitrarily and repeatedly until all edges from each node in
|
| 891 |
+
the graph are searched.
|
| 892 |
+
|
| 893 |
+
orientation : None | 'original' | 'reverse' | 'ignore' (default: None)
|
| 894 |
+
For directed graphs and directed multigraphs, edge traversals need not
|
| 895 |
+
respect the original orientation of the edges.
|
| 896 |
+
When set to 'reverse' every edge is traversed in the reverse direction.
|
| 897 |
+
When set to 'ignore', every edge is treated as undirected.
|
| 898 |
+
When set to 'original', every edge is treated as directed.
|
| 899 |
+
In all three cases, the yielded edge tuples add a last entry to
|
| 900 |
+
indicate the direction in which that edge was traversed.
|
| 901 |
+
If orientation is None, the yielded edge has no direction indicated.
|
| 902 |
+
The direction is respected, but not reported.
|
| 903 |
+
|
| 904 |
+
Returns
|
| 905 |
+
-------
|
| 906 |
+
edges : directed edges
|
| 907 |
+
A list of directed edges indicating the path taken for the loop.
|
| 908 |
+
If no cycle is found, then an exception is raised.
|
| 909 |
+
For graphs, an edge is of the form `(u, v)` where `u` and `v`
|
| 910 |
+
are the tail and head of the edge as determined by the traversal.
|
| 911 |
+
For multigraphs, an edge is of the form `(u, v, key)`, where `key` is
|
| 912 |
+
the key of the edge. When the graph is directed, then `u` and `v`
|
| 913 |
+
are always in the order of the actual directed edge.
|
| 914 |
+
If orientation is not None then the edge tuple is extended to include
|
| 915 |
+
the direction of traversal ('forward' or 'reverse') on that edge.
|
| 916 |
+
|
| 917 |
+
Raises
|
| 918 |
+
------
|
| 919 |
+
NetworkXNoCycle
|
| 920 |
+
If no cycle was found.
|
| 921 |
+
|
| 922 |
+
Examples
|
| 923 |
+
--------
|
| 924 |
+
In this example, we construct a DAG and find, in the first call, that there
|
| 925 |
+
are no directed cycles, and so an exception is raised. In the second call,
|
| 926 |
+
we ignore edge orientations and find that there is an undirected cycle.
|
| 927 |
+
Note that the second call finds a directed cycle while effectively
|
| 928 |
+
traversing an undirected graph, and so, we found an "undirected cycle".
|
| 929 |
+
This means that this DAG structure does not form a directed tree (which
|
| 930 |
+
is also known as a polytree).
|
| 931 |
+
|
| 932 |
+
>>> G = nx.DiGraph([(0, 1), (0, 2), (1, 2)])
|
| 933 |
+
>>> nx.find_cycle(G, orientation="original")
|
| 934 |
+
Traceback (most recent call last):
|
| 935 |
+
...
|
| 936 |
+
networkx.exception.NetworkXNoCycle: No cycle found.
|
| 937 |
+
>>> list(nx.find_cycle(G, orientation="ignore"))
|
| 938 |
+
[(0, 1, 'forward'), (1, 2, 'forward'), (0, 2, 'reverse')]
|
| 939 |
+
|
| 940 |
+
See Also
|
| 941 |
+
--------
|
| 942 |
+
simple_cycles
|
| 943 |
+
"""
|
| 944 |
+
if not G.is_directed() or orientation in (None, "original"):
|
| 945 |
+
|
| 946 |
+
def tailhead(edge):
|
| 947 |
+
return edge[:2]
|
| 948 |
+
|
| 949 |
+
elif orientation == "reverse":
|
| 950 |
+
|
| 951 |
+
def tailhead(edge):
|
| 952 |
+
return edge[1], edge[0]
|
| 953 |
+
|
| 954 |
+
elif orientation == "ignore":
|
| 955 |
+
|
| 956 |
+
def tailhead(edge):
|
| 957 |
+
if edge[-1] == "reverse":
|
| 958 |
+
return edge[1], edge[0]
|
| 959 |
+
return edge[:2]
|
| 960 |
+
|
| 961 |
+
explored = set()
|
| 962 |
+
cycle = []
|
| 963 |
+
final_node = None
|
| 964 |
+
for start_node in G.nbunch_iter(source):
|
| 965 |
+
if start_node in explored:
|
| 966 |
+
# No loop is possible.
|
| 967 |
+
continue
|
| 968 |
+
|
| 969 |
+
edges = []
|
| 970 |
+
# All nodes seen in this iteration of edge_dfs
|
| 971 |
+
seen = {start_node}
|
| 972 |
+
# Nodes in active path.
|
| 973 |
+
active_nodes = {start_node}
|
| 974 |
+
previous_head = None
|
| 975 |
+
|
| 976 |
+
for edge in nx.edge_dfs(G, start_node, orientation):
|
| 977 |
+
# Determine if this edge is a continuation of the active path.
|
| 978 |
+
tail, head = tailhead(edge)
|
| 979 |
+
if head in explored:
|
| 980 |
+
# Then we've already explored it. No loop is possible.
|
| 981 |
+
continue
|
| 982 |
+
if previous_head is not None and tail != previous_head:
|
| 983 |
+
# This edge results from backtracking.
|
| 984 |
+
# Pop until we get a node whose head equals the current tail.
|
| 985 |
+
# So for example, we might have:
|
| 986 |
+
# (0, 1), (1, 2), (2, 3), (1, 4)
|
| 987 |
+
# which must become:
|
| 988 |
+
# (0, 1), (1, 4)
|
| 989 |
+
while True:
|
| 990 |
+
try:
|
| 991 |
+
popped_edge = edges.pop()
|
| 992 |
+
except IndexError:
|
| 993 |
+
edges = []
|
| 994 |
+
active_nodes = {tail}
|
| 995 |
+
break
|
| 996 |
+
else:
|
| 997 |
+
popped_head = tailhead(popped_edge)[1]
|
| 998 |
+
active_nodes.remove(popped_head)
|
| 999 |
+
|
| 1000 |
+
if edges:
|
| 1001 |
+
last_head = tailhead(edges[-1])[1]
|
| 1002 |
+
if tail == last_head:
|
| 1003 |
+
break
|
| 1004 |
+
edges.append(edge)
|
| 1005 |
+
|
| 1006 |
+
if head in active_nodes:
|
| 1007 |
+
# We have a loop!
|
| 1008 |
+
cycle.extend(edges)
|
| 1009 |
+
final_node = head
|
| 1010 |
+
break
|
| 1011 |
+
else:
|
| 1012 |
+
seen.add(head)
|
| 1013 |
+
active_nodes.add(head)
|
| 1014 |
+
previous_head = head
|
| 1015 |
+
|
| 1016 |
+
if cycle:
|
| 1017 |
+
break
|
| 1018 |
+
else:
|
| 1019 |
+
explored.update(seen)
|
| 1020 |
+
|
| 1021 |
+
else:
|
| 1022 |
+
assert len(cycle) == 0
|
| 1023 |
+
raise nx.exception.NetworkXNoCycle("No cycle found.")
|
| 1024 |
+
|
| 1025 |
+
# We now have a list of edges which ends on a cycle.
|
| 1026 |
+
# So we need to remove from the beginning edges that are not relevant.
|
| 1027 |
+
|
| 1028 |
+
for i, edge in enumerate(cycle):
|
| 1029 |
+
tail, head = tailhead(edge)
|
| 1030 |
+
if tail == final_node:
|
| 1031 |
+
break
|
| 1032 |
+
|
| 1033 |
+
return cycle[i:]
|
| 1034 |
+
|
| 1035 |
+
|
| 1036 |
+
@not_implemented_for("directed")
|
| 1037 |
+
@not_implemented_for("multigraph")
|
| 1038 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 1039 |
+
def minimum_cycle_basis(G, weight=None):
|
| 1040 |
+
"""Returns a minimum weight cycle basis for G
|
| 1041 |
+
|
| 1042 |
+
Minimum weight means a cycle basis for which the total weight
|
| 1043 |
+
(length for unweighted graphs) of all the cycles is minimum.
|
| 1044 |
+
|
| 1045 |
+
Parameters
|
| 1046 |
+
----------
|
| 1047 |
+
G : NetworkX Graph
|
| 1048 |
+
weight: string
|
| 1049 |
+
name of the edge attribute to use for edge weights
|
| 1050 |
+
|
| 1051 |
+
Returns
|
| 1052 |
+
-------
|
| 1053 |
+
A list of cycle lists. Each cycle list is a list of nodes
|
| 1054 |
+
which forms a cycle (loop) in G. Note that the nodes are not
|
| 1055 |
+
necessarily returned in a order by which they appear in the cycle
|
| 1056 |
+
|
| 1057 |
+
Examples
|
| 1058 |
+
--------
|
| 1059 |
+
>>> G = nx.Graph()
|
| 1060 |
+
>>> nx.add_cycle(G, [0, 1, 2, 3])
|
| 1061 |
+
>>> nx.add_cycle(G, [0, 3, 4, 5])
|
| 1062 |
+
>>> nx.minimum_cycle_basis(G)
|
| 1063 |
+
[[5, 4, 3, 0], [3, 2, 1, 0]]
|
| 1064 |
+
|
| 1065 |
+
References:
|
| 1066 |
+
[1] Kavitha, Telikepalli, et al. "An O(m^2n) Algorithm for
|
| 1067 |
+
Minimum Cycle Basis of Graphs."
|
| 1068 |
+
http://link.springer.com/article/10.1007/s00453-007-9064-z
|
| 1069 |
+
[2] de Pina, J. 1995. Applications of shortest path methods.
|
| 1070 |
+
Ph.D. thesis, University of Amsterdam, Netherlands
|
| 1071 |
+
|
| 1072 |
+
See Also
|
| 1073 |
+
--------
|
| 1074 |
+
simple_cycles, cycle_basis
|
| 1075 |
+
"""
|
| 1076 |
+
# We first split the graph in connected subgraphs
|
| 1077 |
+
return sum(
|
| 1078 |
+
(_min_cycle_basis(G.subgraph(c), weight) for c in nx.connected_components(G)),
|
| 1079 |
+
[],
|
| 1080 |
+
)
|
| 1081 |
+
|
| 1082 |
+
|
| 1083 |
+
def _min_cycle_basis(G, weight):
|
| 1084 |
+
cb = []
|
| 1085 |
+
# We extract the edges not in a spanning tree. We do not really need a
|
| 1086 |
+
# *minimum* spanning tree. That is why we call the next function with
|
| 1087 |
+
# weight=None. Depending on implementation, it may be faster as well
|
| 1088 |
+
tree_edges = list(nx.minimum_spanning_edges(G, weight=None, data=False))
|
| 1089 |
+
chords = G.edges - tree_edges - {(v, u) for u, v in tree_edges}
|
| 1090 |
+
|
| 1091 |
+
# We maintain a set of vectors orthogonal to sofar found cycles
|
| 1092 |
+
set_orth = [{edge} for edge in chords]
|
| 1093 |
+
while set_orth:
|
| 1094 |
+
base = set_orth.pop()
|
| 1095 |
+
# kth cycle is "parallel" to kth vector in set_orth
|
| 1096 |
+
cycle_edges = _min_cycle(G, base, weight)
|
| 1097 |
+
cb.append([v for u, v in cycle_edges])
|
| 1098 |
+
|
| 1099 |
+
# now update set_orth so that k+1,k+2... th elements are
|
| 1100 |
+
# orthogonal to the newly found cycle, as per [p. 336, 1]
|
| 1101 |
+
set_orth = [
|
| 1102 |
+
(
|
| 1103 |
+
{e for e in orth if e not in base if e[::-1] not in base}
|
| 1104 |
+
| {e for e in base if e not in orth if e[::-1] not in orth}
|
| 1105 |
+
)
|
| 1106 |
+
if sum((e in orth or e[::-1] in orth) for e in cycle_edges) % 2
|
| 1107 |
+
else orth
|
| 1108 |
+
for orth in set_orth
|
| 1109 |
+
]
|
| 1110 |
+
return cb
|
| 1111 |
+
|
| 1112 |
+
|
| 1113 |
+
def _min_cycle(G, orth, weight):
|
| 1114 |
+
"""
|
| 1115 |
+
Computes the minimum weight cycle in G,
|
| 1116 |
+
orthogonal to the vector orth as per [p. 338, 1]
|
| 1117 |
+
Use (u, 1) to indicate the lifted copy of u (denoted u' in paper).
|
| 1118 |
+
"""
|
| 1119 |
+
Gi = nx.Graph()
|
| 1120 |
+
|
| 1121 |
+
# Add 2 copies of each edge in G to Gi.
|
| 1122 |
+
# If edge is in orth, add cross edge; otherwise in-plane edge
|
| 1123 |
+
for u, v, wt in G.edges(data=weight, default=1):
|
| 1124 |
+
if (u, v) in orth or (v, u) in orth:
|
| 1125 |
+
Gi.add_edges_from([(u, (v, 1)), ((u, 1), v)], Gi_weight=wt)
|
| 1126 |
+
else:
|
| 1127 |
+
Gi.add_edges_from([(u, v), ((u, 1), (v, 1))], Gi_weight=wt)
|
| 1128 |
+
|
| 1129 |
+
# find the shortest length in Gi between n and (n, 1) for each n
|
| 1130 |
+
# Note: Use "Gi_weight" for name of weight attribute
|
| 1131 |
+
spl = nx.shortest_path_length
|
| 1132 |
+
lift = {n: spl(Gi, source=n, target=(n, 1), weight="Gi_weight") for n in G}
|
| 1133 |
+
|
| 1134 |
+
# Now compute that short path in Gi, which translates to a cycle in G
|
| 1135 |
+
start = min(lift, key=lift.get)
|
| 1136 |
+
end = (start, 1)
|
| 1137 |
+
min_path_i = nx.shortest_path(Gi, source=start, target=end, weight="Gi_weight")
|
| 1138 |
+
|
| 1139 |
+
# Now we obtain the actual path, re-map nodes in Gi to those in G
|
| 1140 |
+
min_path = [n if n in G else n[0] for n in min_path_i]
|
| 1141 |
+
|
| 1142 |
+
# Now remove the edges that occur two times
|
| 1143 |
+
# two passes: flag which edges get kept, then build it
|
| 1144 |
+
edgelist = list(pairwise(min_path))
|
| 1145 |
+
edgeset = set()
|
| 1146 |
+
for e in edgelist:
|
| 1147 |
+
if e in edgeset:
|
| 1148 |
+
edgeset.remove(e)
|
| 1149 |
+
elif e[::-1] in edgeset:
|
| 1150 |
+
edgeset.remove(e[::-1])
|
| 1151 |
+
else:
|
| 1152 |
+
edgeset.add(e)
|
| 1153 |
+
|
| 1154 |
+
min_edgelist = []
|
| 1155 |
+
for e in edgelist:
|
| 1156 |
+
if e in edgeset:
|
| 1157 |
+
min_edgelist.append(e)
|
| 1158 |
+
edgeset.remove(e)
|
| 1159 |
+
elif e[::-1] in edgeset:
|
| 1160 |
+
min_edgelist.append(e[::-1])
|
| 1161 |
+
edgeset.remove(e[::-1])
|
| 1162 |
+
|
| 1163 |
+
return min_edgelist
|
| 1164 |
+
|
| 1165 |
+
|
| 1166 |
+
@not_implemented_for("directed")
|
| 1167 |
+
@not_implemented_for("multigraph")
|
| 1168 |
+
@nx._dispatchable
|
| 1169 |
+
def girth(G):
|
| 1170 |
+
"""Returns the girth of the graph.
|
| 1171 |
+
|
| 1172 |
+
The girth of a graph is the length of its shortest cycle, or infinity if
|
| 1173 |
+
the graph is acyclic. The algorithm follows the description given on the
|
| 1174 |
+
Wikipedia page [1]_, and runs in time O(mn) on a graph with m edges and n
|
| 1175 |
+
nodes.
|
| 1176 |
+
|
| 1177 |
+
Parameters
|
| 1178 |
+
----------
|
| 1179 |
+
G : NetworkX Graph
|
| 1180 |
+
|
| 1181 |
+
Returns
|
| 1182 |
+
-------
|
| 1183 |
+
int or math.inf
|
| 1184 |
+
|
| 1185 |
+
Examples
|
| 1186 |
+
--------
|
| 1187 |
+
All examples below (except P_5) can easily be checked using Wikipedia,
|
| 1188 |
+
which has a page for each of these famous graphs.
|
| 1189 |
+
|
| 1190 |
+
>>> nx.girth(nx.chvatal_graph())
|
| 1191 |
+
4
|
| 1192 |
+
>>> nx.girth(nx.tutte_graph())
|
| 1193 |
+
4
|
| 1194 |
+
>>> nx.girth(nx.petersen_graph())
|
| 1195 |
+
5
|
| 1196 |
+
>>> nx.girth(nx.heawood_graph())
|
| 1197 |
+
6
|
| 1198 |
+
>>> nx.girth(nx.pappus_graph())
|
| 1199 |
+
6
|
| 1200 |
+
>>> nx.girth(nx.path_graph(5))
|
| 1201 |
+
inf
|
| 1202 |
+
|
| 1203 |
+
References
|
| 1204 |
+
----------
|
| 1205 |
+
.. [1] `Wikipedia: Girth <https://en.wikipedia.org/wiki/Girth_(graph_theory)>`_
|
| 1206 |
+
|
| 1207 |
+
"""
|
| 1208 |
+
girth = depth_limit = inf
|
| 1209 |
+
tree_edge = nx.algorithms.traversal.breadth_first_search.TREE_EDGE
|
| 1210 |
+
level_edge = nx.algorithms.traversal.breadth_first_search.LEVEL_EDGE
|
| 1211 |
+
for n in G:
|
| 1212 |
+
# run a BFS from source n, keeping track of distances; since we want
|
| 1213 |
+
# the shortest cycle, no need to explore beyond the current minimum length
|
| 1214 |
+
depth = {n: 0}
|
| 1215 |
+
for u, v, label in nx.bfs_labeled_edges(G, n):
|
| 1216 |
+
du = depth[u]
|
| 1217 |
+
if du > depth_limit:
|
| 1218 |
+
break
|
| 1219 |
+
if label is tree_edge:
|
| 1220 |
+
depth[v] = du + 1
|
| 1221 |
+
else:
|
| 1222 |
+
# if (u, v) is a level edge, the length is du + du + 1 (odd)
|
| 1223 |
+
# otherwise, it's a forward edge; length is du + (du + 1) + 1 (even)
|
| 1224 |
+
delta = label is level_edge
|
| 1225 |
+
length = du + du + 2 - delta
|
| 1226 |
+
if length < girth:
|
| 1227 |
+
girth = length
|
| 1228 |
+
depth_limit = du - delta
|
| 1229 |
+
|
| 1230 |
+
return girth
|
wemm/lib/python3.10/site-packages/networkx/algorithms/distance_regular.py
ADDED
|
@@ -0,0 +1,238 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
=======================
|
| 3 |
+
Distance-regular graphs
|
| 4 |
+
=======================
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.utils import not_implemented_for
|
| 9 |
+
|
| 10 |
+
from .distance_measures import diameter
|
| 11 |
+
|
| 12 |
+
__all__ = [
|
| 13 |
+
"is_distance_regular",
|
| 14 |
+
"is_strongly_regular",
|
| 15 |
+
"intersection_array",
|
| 16 |
+
"global_parameters",
|
| 17 |
+
]
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@nx._dispatchable
|
| 21 |
+
def is_distance_regular(G):
|
| 22 |
+
"""Returns True if the graph is distance regular, False otherwise.
|
| 23 |
+
|
| 24 |
+
A connected graph G is distance-regular if for any nodes x,y
|
| 25 |
+
and any integers i,j=0,1,...,d (where d is the graph
|
| 26 |
+
diameter), the number of vertices at distance i from x and
|
| 27 |
+
distance j from y depends only on i,j and the graph distance
|
| 28 |
+
between x and y, independently of the choice of x and y.
|
| 29 |
+
|
| 30 |
+
Parameters
|
| 31 |
+
----------
|
| 32 |
+
G: Networkx graph (undirected)
|
| 33 |
+
|
| 34 |
+
Returns
|
| 35 |
+
-------
|
| 36 |
+
bool
|
| 37 |
+
True if the graph is Distance Regular, False otherwise
|
| 38 |
+
|
| 39 |
+
Examples
|
| 40 |
+
--------
|
| 41 |
+
>>> G = nx.hypercube_graph(6)
|
| 42 |
+
>>> nx.is_distance_regular(G)
|
| 43 |
+
True
|
| 44 |
+
|
| 45 |
+
See Also
|
| 46 |
+
--------
|
| 47 |
+
intersection_array, global_parameters
|
| 48 |
+
|
| 49 |
+
Notes
|
| 50 |
+
-----
|
| 51 |
+
For undirected and simple graphs only
|
| 52 |
+
|
| 53 |
+
References
|
| 54 |
+
----------
|
| 55 |
+
.. [1] Brouwer, A. E.; Cohen, A. M.; and Neumaier, A.
|
| 56 |
+
Distance-Regular Graphs. New York: Springer-Verlag, 1989.
|
| 57 |
+
.. [2] Weisstein, Eric W. "Distance-Regular Graph."
|
| 58 |
+
http://mathworld.wolfram.com/Distance-RegularGraph.html
|
| 59 |
+
|
| 60 |
+
"""
|
| 61 |
+
try:
|
| 62 |
+
intersection_array(G)
|
| 63 |
+
return True
|
| 64 |
+
except nx.NetworkXError:
|
| 65 |
+
return False
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def global_parameters(b, c):
|
| 69 |
+
"""Returns global parameters for a given intersection array.
|
| 70 |
+
|
| 71 |
+
Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d
|
| 72 |
+
such that for any 2 vertices x,y in G at a distance i=d(x,y), there
|
| 73 |
+
are exactly c_i neighbors of y at a distance of i-1 from x and b_i
|
| 74 |
+
neighbors of y at a distance of i+1 from x.
|
| 75 |
+
|
| 76 |
+
Thus, a distance regular graph has the global parameters,
|
| 77 |
+
[[c_0,a_0,b_0],[c_1,a_1,b_1],......,[c_d,a_d,b_d]] for the
|
| 78 |
+
intersection array [b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d]
|
| 79 |
+
where a_i+b_i+c_i=k , k= degree of every vertex.
|
| 80 |
+
|
| 81 |
+
Parameters
|
| 82 |
+
----------
|
| 83 |
+
b : list
|
| 84 |
+
|
| 85 |
+
c : list
|
| 86 |
+
|
| 87 |
+
Returns
|
| 88 |
+
-------
|
| 89 |
+
iterable
|
| 90 |
+
An iterable over three tuples.
|
| 91 |
+
|
| 92 |
+
Examples
|
| 93 |
+
--------
|
| 94 |
+
>>> G = nx.dodecahedral_graph()
|
| 95 |
+
>>> b, c = nx.intersection_array(G)
|
| 96 |
+
>>> list(nx.global_parameters(b, c))
|
| 97 |
+
[(0, 0, 3), (1, 0, 2), (1, 1, 1), (1, 1, 1), (2, 0, 1), (3, 0, 0)]
|
| 98 |
+
|
| 99 |
+
References
|
| 100 |
+
----------
|
| 101 |
+
.. [1] Weisstein, Eric W. "Global Parameters."
|
| 102 |
+
From MathWorld--A Wolfram Web Resource.
|
| 103 |
+
http://mathworld.wolfram.com/GlobalParameters.html
|
| 104 |
+
|
| 105 |
+
See Also
|
| 106 |
+
--------
|
| 107 |
+
intersection_array
|
| 108 |
+
"""
|
| 109 |
+
return ((y, b[0] - x - y, x) for x, y in zip(b + [0], [0] + c))
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
@not_implemented_for("directed")
|
| 113 |
+
@not_implemented_for("multigraph")
|
| 114 |
+
@nx._dispatchable
|
| 115 |
+
def intersection_array(G):
|
| 116 |
+
"""Returns the intersection array of a distance-regular graph.
|
| 117 |
+
|
| 118 |
+
Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d
|
| 119 |
+
such that for any 2 vertices x,y in G at a distance i=d(x,y), there
|
| 120 |
+
are exactly c_i neighbors of y at a distance of i-1 from x and b_i
|
| 121 |
+
neighbors of y at a distance of i+1 from x.
|
| 122 |
+
|
| 123 |
+
A distance regular graph's intersection array is given by,
|
| 124 |
+
[b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d]
|
| 125 |
+
|
| 126 |
+
Parameters
|
| 127 |
+
----------
|
| 128 |
+
G: Networkx graph (undirected)
|
| 129 |
+
|
| 130 |
+
Returns
|
| 131 |
+
-------
|
| 132 |
+
b,c: tuple of lists
|
| 133 |
+
|
| 134 |
+
Examples
|
| 135 |
+
--------
|
| 136 |
+
>>> G = nx.icosahedral_graph()
|
| 137 |
+
>>> nx.intersection_array(G)
|
| 138 |
+
([5, 2, 1], [1, 2, 5])
|
| 139 |
+
|
| 140 |
+
References
|
| 141 |
+
----------
|
| 142 |
+
.. [1] Weisstein, Eric W. "Intersection Array."
|
| 143 |
+
From MathWorld--A Wolfram Web Resource.
|
| 144 |
+
http://mathworld.wolfram.com/IntersectionArray.html
|
| 145 |
+
|
| 146 |
+
See Also
|
| 147 |
+
--------
|
| 148 |
+
global_parameters
|
| 149 |
+
"""
|
| 150 |
+
# test for regular graph (all degrees must be equal)
|
| 151 |
+
if len(G) == 0:
|
| 152 |
+
raise nx.NetworkXPointlessConcept("Graph has no nodes.")
|
| 153 |
+
degree = iter(G.degree())
|
| 154 |
+
(_, k) = next(degree)
|
| 155 |
+
for _, knext in degree:
|
| 156 |
+
if knext != k:
|
| 157 |
+
raise nx.NetworkXError("Graph is not distance regular.")
|
| 158 |
+
k = knext
|
| 159 |
+
path_length = dict(nx.all_pairs_shortest_path_length(G))
|
| 160 |
+
diameter = max(max(path_length[n].values()) for n in path_length)
|
| 161 |
+
bint = {} # 'b' intersection array
|
| 162 |
+
cint = {} # 'c' intersection array
|
| 163 |
+
for u in G:
|
| 164 |
+
for v in G:
|
| 165 |
+
try:
|
| 166 |
+
i = path_length[u][v]
|
| 167 |
+
except KeyError as err: # graph must be connected
|
| 168 |
+
raise nx.NetworkXError("Graph is not distance regular.") from err
|
| 169 |
+
# number of neighbors of v at a distance of i-1 from u
|
| 170 |
+
c = len([n for n in G[v] if path_length[n][u] == i - 1])
|
| 171 |
+
# number of neighbors of v at a distance of i+1 from u
|
| 172 |
+
b = len([n for n in G[v] if path_length[n][u] == i + 1])
|
| 173 |
+
# b,c are independent of u and v
|
| 174 |
+
if cint.get(i, c) != c or bint.get(i, b) != b:
|
| 175 |
+
raise nx.NetworkXError("Graph is not distance regular")
|
| 176 |
+
bint[i] = b
|
| 177 |
+
cint[i] = c
|
| 178 |
+
return (
|
| 179 |
+
[bint.get(j, 0) for j in range(diameter)],
|
| 180 |
+
[cint.get(j + 1, 0) for j in range(diameter)],
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
# TODO There is a definition for directed strongly regular graphs.
|
| 185 |
+
@not_implemented_for("directed")
|
| 186 |
+
@not_implemented_for("multigraph")
|
| 187 |
+
@nx._dispatchable
|
| 188 |
+
def is_strongly_regular(G):
|
| 189 |
+
"""Returns True if and only if the given graph is strongly
|
| 190 |
+
regular.
|
| 191 |
+
|
| 192 |
+
An undirected graph is *strongly regular* if
|
| 193 |
+
|
| 194 |
+
* it is regular,
|
| 195 |
+
* each pair of adjacent vertices has the same number of neighbors in
|
| 196 |
+
common,
|
| 197 |
+
* each pair of nonadjacent vertices has the same number of neighbors
|
| 198 |
+
in common.
|
| 199 |
+
|
| 200 |
+
Each strongly regular graph is a distance-regular graph.
|
| 201 |
+
Conversely, if a distance-regular graph has diameter two, then it is
|
| 202 |
+
a strongly regular graph. For more information on distance-regular
|
| 203 |
+
graphs, see :func:`is_distance_regular`.
|
| 204 |
+
|
| 205 |
+
Parameters
|
| 206 |
+
----------
|
| 207 |
+
G : NetworkX graph
|
| 208 |
+
An undirected graph.
|
| 209 |
+
|
| 210 |
+
Returns
|
| 211 |
+
-------
|
| 212 |
+
bool
|
| 213 |
+
Whether `G` is strongly regular.
|
| 214 |
+
|
| 215 |
+
Examples
|
| 216 |
+
--------
|
| 217 |
+
|
| 218 |
+
The cycle graph on five vertices is strongly regular. It is
|
| 219 |
+
two-regular, each pair of adjacent vertices has no shared neighbors,
|
| 220 |
+
and each pair of nonadjacent vertices has one shared neighbor::
|
| 221 |
+
|
| 222 |
+
>>> G = nx.cycle_graph(5)
|
| 223 |
+
>>> nx.is_strongly_regular(G)
|
| 224 |
+
True
|
| 225 |
+
|
| 226 |
+
"""
|
| 227 |
+
# Here is an alternate implementation based directly on the
|
| 228 |
+
# definition of strongly regular graphs:
|
| 229 |
+
#
|
| 230 |
+
# return (all_equal(G.degree().values())
|
| 231 |
+
# and all_equal(len(common_neighbors(G, u, v))
|
| 232 |
+
# for u, v in G.edges())
|
| 233 |
+
# and all_equal(len(common_neighbors(G, u, v))
|
| 234 |
+
# for u, v in non_edges(G)))
|
| 235 |
+
#
|
| 236 |
+
# We instead use the fact that a distance-regular graph of diameter
|
| 237 |
+
# two is strongly regular.
|
| 238 |
+
return is_distance_regular(G) and diameter(G) == 2
|
wemm/lib/python3.10/site-packages/networkx/algorithms/dominating.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for computing dominating sets in a graph."""
|
| 2 |
+
|
| 3 |
+
from itertools import chain
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.utils import arbitrary_element
|
| 7 |
+
|
| 8 |
+
__all__ = ["dominating_set", "is_dominating_set"]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@nx._dispatchable
|
| 12 |
+
def dominating_set(G, start_with=None):
|
| 13 |
+
r"""Finds a dominating set for the graph G.
|
| 14 |
+
|
| 15 |
+
A *dominating set* for a graph with node set *V* is a subset *D* of
|
| 16 |
+
*V* such that every node not in *D* is adjacent to at least one
|
| 17 |
+
member of *D* [1]_.
|
| 18 |
+
|
| 19 |
+
Parameters
|
| 20 |
+
----------
|
| 21 |
+
G : NetworkX graph
|
| 22 |
+
|
| 23 |
+
start_with : node (default=None)
|
| 24 |
+
Node to use as a starting point for the algorithm.
|
| 25 |
+
|
| 26 |
+
Returns
|
| 27 |
+
-------
|
| 28 |
+
D : set
|
| 29 |
+
A dominating set for G.
|
| 30 |
+
|
| 31 |
+
Notes
|
| 32 |
+
-----
|
| 33 |
+
This function is an implementation of algorithm 7 in [2]_ which
|
| 34 |
+
finds some dominating set, not necessarily the smallest one.
|
| 35 |
+
|
| 36 |
+
See also
|
| 37 |
+
--------
|
| 38 |
+
is_dominating_set
|
| 39 |
+
|
| 40 |
+
References
|
| 41 |
+
----------
|
| 42 |
+
.. [1] https://en.wikipedia.org/wiki/Dominating_set
|
| 43 |
+
|
| 44 |
+
.. [2] Abdol-Hossein Esfahanian. Connectivity Algorithms.
|
| 45 |
+
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
|
| 46 |
+
|
| 47 |
+
"""
|
| 48 |
+
all_nodes = set(G)
|
| 49 |
+
if start_with is None:
|
| 50 |
+
start_with = arbitrary_element(all_nodes)
|
| 51 |
+
if start_with not in G:
|
| 52 |
+
raise nx.NetworkXError(f"node {start_with} is not in G")
|
| 53 |
+
dominating_set = {start_with}
|
| 54 |
+
dominated_nodes = set(G[start_with])
|
| 55 |
+
remaining_nodes = all_nodes - dominated_nodes - dominating_set
|
| 56 |
+
while remaining_nodes:
|
| 57 |
+
# Choose an arbitrary node and determine its undominated neighbors.
|
| 58 |
+
v = remaining_nodes.pop()
|
| 59 |
+
undominated_nbrs = set(G[v]) - dominating_set
|
| 60 |
+
# Add the node to the dominating set and the neighbors to the
|
| 61 |
+
# dominated set. Finally, remove all of those nodes from the set
|
| 62 |
+
# of remaining nodes.
|
| 63 |
+
dominating_set.add(v)
|
| 64 |
+
dominated_nodes |= undominated_nbrs
|
| 65 |
+
remaining_nodes -= undominated_nbrs
|
| 66 |
+
return dominating_set
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
@nx._dispatchable
|
| 70 |
+
def is_dominating_set(G, nbunch):
|
| 71 |
+
"""Checks if `nbunch` is a dominating set for `G`.
|
| 72 |
+
|
| 73 |
+
A *dominating set* for a graph with node set *V* is a subset *D* of
|
| 74 |
+
*V* such that every node not in *D* is adjacent to at least one
|
| 75 |
+
member of *D* [1]_.
|
| 76 |
+
|
| 77 |
+
Parameters
|
| 78 |
+
----------
|
| 79 |
+
G : NetworkX graph
|
| 80 |
+
|
| 81 |
+
nbunch : iterable
|
| 82 |
+
An iterable of nodes in the graph `G`.
|
| 83 |
+
|
| 84 |
+
See also
|
| 85 |
+
--------
|
| 86 |
+
dominating_set
|
| 87 |
+
|
| 88 |
+
References
|
| 89 |
+
----------
|
| 90 |
+
.. [1] https://en.wikipedia.org/wiki/Dominating_set
|
| 91 |
+
|
| 92 |
+
"""
|
| 93 |
+
testset = {n for n in nbunch if n in G}
|
| 94 |
+
nbrs = set(chain.from_iterable(G[n] for n in testset))
|
| 95 |
+
return len(set(G) - testset - nbrs) == 0
|
wemm/lib/python3.10/site-packages/networkx/algorithms/euler.py
ADDED
|
@@ -0,0 +1,470 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Eulerian circuits and graphs.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from itertools import combinations
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
|
| 9 |
+
from ..utils import arbitrary_element, not_implemented_for
|
| 10 |
+
|
| 11 |
+
__all__ = [
|
| 12 |
+
"is_eulerian",
|
| 13 |
+
"eulerian_circuit",
|
| 14 |
+
"eulerize",
|
| 15 |
+
"is_semieulerian",
|
| 16 |
+
"has_eulerian_path",
|
| 17 |
+
"eulerian_path",
|
| 18 |
+
]
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
@nx._dispatchable
|
| 22 |
+
def is_eulerian(G):
|
| 23 |
+
"""Returns True if and only if `G` is Eulerian.
|
| 24 |
+
|
| 25 |
+
A graph is *Eulerian* if it has an Eulerian circuit. An *Eulerian
|
| 26 |
+
circuit* is a closed walk that includes each edge of a graph exactly
|
| 27 |
+
once.
|
| 28 |
+
|
| 29 |
+
Graphs with isolated vertices (i.e. vertices with zero degree) are not
|
| 30 |
+
considered to have Eulerian circuits. Therefore, if the graph is not
|
| 31 |
+
connected (or not strongly connected, for directed graphs), this function
|
| 32 |
+
returns False.
|
| 33 |
+
|
| 34 |
+
Parameters
|
| 35 |
+
----------
|
| 36 |
+
G : NetworkX graph
|
| 37 |
+
A graph, either directed or undirected.
|
| 38 |
+
|
| 39 |
+
Examples
|
| 40 |
+
--------
|
| 41 |
+
>>> nx.is_eulerian(nx.DiGraph({0: [3], 1: [2], 2: [3], 3: [0, 1]}))
|
| 42 |
+
True
|
| 43 |
+
>>> nx.is_eulerian(nx.complete_graph(5))
|
| 44 |
+
True
|
| 45 |
+
>>> nx.is_eulerian(nx.petersen_graph())
|
| 46 |
+
False
|
| 47 |
+
|
| 48 |
+
If you prefer to allow graphs with isolated vertices to have Eulerian circuits,
|
| 49 |
+
you can first remove such vertices and then call `is_eulerian` as below example shows.
|
| 50 |
+
|
| 51 |
+
>>> G = nx.Graph([(0, 1), (1, 2), (0, 2)])
|
| 52 |
+
>>> G.add_node(3)
|
| 53 |
+
>>> nx.is_eulerian(G)
|
| 54 |
+
False
|
| 55 |
+
|
| 56 |
+
>>> G.remove_nodes_from(list(nx.isolates(G)))
|
| 57 |
+
>>> nx.is_eulerian(G)
|
| 58 |
+
True
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
"""
|
| 62 |
+
if G.is_directed():
|
| 63 |
+
# Every node must have equal in degree and out degree and the
|
| 64 |
+
# graph must be strongly connected
|
| 65 |
+
return all(
|
| 66 |
+
G.in_degree(n) == G.out_degree(n) for n in G
|
| 67 |
+
) and nx.is_strongly_connected(G)
|
| 68 |
+
# An undirected Eulerian graph has no vertices of odd degree and
|
| 69 |
+
# must be connected.
|
| 70 |
+
return all(d % 2 == 0 for v, d in G.degree()) and nx.is_connected(G)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
@nx._dispatchable
|
| 74 |
+
def is_semieulerian(G):
|
| 75 |
+
"""Return True iff `G` is semi-Eulerian.
|
| 76 |
+
|
| 77 |
+
G is semi-Eulerian if it has an Eulerian path but no Eulerian circuit.
|
| 78 |
+
|
| 79 |
+
See Also
|
| 80 |
+
--------
|
| 81 |
+
has_eulerian_path
|
| 82 |
+
is_eulerian
|
| 83 |
+
"""
|
| 84 |
+
return has_eulerian_path(G) and not is_eulerian(G)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def _find_path_start(G):
|
| 88 |
+
"""Return a suitable starting vertex for an Eulerian path.
|
| 89 |
+
|
| 90 |
+
If no path exists, return None.
|
| 91 |
+
"""
|
| 92 |
+
if not has_eulerian_path(G):
|
| 93 |
+
return None
|
| 94 |
+
|
| 95 |
+
if is_eulerian(G):
|
| 96 |
+
return arbitrary_element(G)
|
| 97 |
+
|
| 98 |
+
if G.is_directed():
|
| 99 |
+
v1, v2 = (v for v in G if G.in_degree(v) != G.out_degree(v))
|
| 100 |
+
# Determines which is the 'start' node (as opposed to the 'end')
|
| 101 |
+
if G.out_degree(v1) > G.in_degree(v1):
|
| 102 |
+
return v1
|
| 103 |
+
else:
|
| 104 |
+
return v2
|
| 105 |
+
|
| 106 |
+
else:
|
| 107 |
+
# In an undirected graph randomly choose one of the possibilities
|
| 108 |
+
start = [v for v in G if G.degree(v) % 2 != 0][0]
|
| 109 |
+
return start
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def _simplegraph_eulerian_circuit(G, source):
|
| 113 |
+
if G.is_directed():
|
| 114 |
+
degree = G.out_degree
|
| 115 |
+
edges = G.out_edges
|
| 116 |
+
else:
|
| 117 |
+
degree = G.degree
|
| 118 |
+
edges = G.edges
|
| 119 |
+
vertex_stack = [source]
|
| 120 |
+
last_vertex = None
|
| 121 |
+
while vertex_stack:
|
| 122 |
+
current_vertex = vertex_stack[-1]
|
| 123 |
+
if degree(current_vertex) == 0:
|
| 124 |
+
if last_vertex is not None:
|
| 125 |
+
yield (last_vertex, current_vertex)
|
| 126 |
+
last_vertex = current_vertex
|
| 127 |
+
vertex_stack.pop()
|
| 128 |
+
else:
|
| 129 |
+
_, next_vertex = arbitrary_element(edges(current_vertex))
|
| 130 |
+
vertex_stack.append(next_vertex)
|
| 131 |
+
G.remove_edge(current_vertex, next_vertex)
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
def _multigraph_eulerian_circuit(G, source):
|
| 135 |
+
if G.is_directed():
|
| 136 |
+
degree = G.out_degree
|
| 137 |
+
edges = G.out_edges
|
| 138 |
+
else:
|
| 139 |
+
degree = G.degree
|
| 140 |
+
edges = G.edges
|
| 141 |
+
vertex_stack = [(source, None)]
|
| 142 |
+
last_vertex = None
|
| 143 |
+
last_key = None
|
| 144 |
+
while vertex_stack:
|
| 145 |
+
current_vertex, current_key = vertex_stack[-1]
|
| 146 |
+
if degree(current_vertex) == 0:
|
| 147 |
+
if last_vertex is not None:
|
| 148 |
+
yield (last_vertex, current_vertex, last_key)
|
| 149 |
+
last_vertex, last_key = current_vertex, current_key
|
| 150 |
+
vertex_stack.pop()
|
| 151 |
+
else:
|
| 152 |
+
triple = arbitrary_element(edges(current_vertex, keys=True))
|
| 153 |
+
_, next_vertex, next_key = triple
|
| 154 |
+
vertex_stack.append((next_vertex, next_key))
|
| 155 |
+
G.remove_edge(current_vertex, next_vertex, next_key)
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
@nx._dispatchable
|
| 159 |
+
def eulerian_circuit(G, source=None, keys=False):
|
| 160 |
+
"""Returns an iterator over the edges of an Eulerian circuit in `G`.
|
| 161 |
+
|
| 162 |
+
An *Eulerian circuit* is a closed walk that includes each edge of a
|
| 163 |
+
graph exactly once.
|
| 164 |
+
|
| 165 |
+
Parameters
|
| 166 |
+
----------
|
| 167 |
+
G : NetworkX graph
|
| 168 |
+
A graph, either directed or undirected.
|
| 169 |
+
|
| 170 |
+
source : node, optional
|
| 171 |
+
Starting node for circuit.
|
| 172 |
+
|
| 173 |
+
keys : bool
|
| 174 |
+
If False, edges generated by this function will be of the form
|
| 175 |
+
``(u, v)``. Otherwise, edges will be of the form ``(u, v, k)``.
|
| 176 |
+
This option is ignored unless `G` is a multigraph.
|
| 177 |
+
|
| 178 |
+
Returns
|
| 179 |
+
-------
|
| 180 |
+
edges : iterator
|
| 181 |
+
An iterator over edges in the Eulerian circuit.
|
| 182 |
+
|
| 183 |
+
Raises
|
| 184 |
+
------
|
| 185 |
+
NetworkXError
|
| 186 |
+
If the graph is not Eulerian.
|
| 187 |
+
|
| 188 |
+
See Also
|
| 189 |
+
--------
|
| 190 |
+
is_eulerian
|
| 191 |
+
|
| 192 |
+
Notes
|
| 193 |
+
-----
|
| 194 |
+
This is a linear time implementation of an algorithm adapted from [1]_.
|
| 195 |
+
|
| 196 |
+
For general information about Euler tours, see [2]_.
|
| 197 |
+
|
| 198 |
+
References
|
| 199 |
+
----------
|
| 200 |
+
.. [1] J. Edmonds, E. L. Johnson.
|
| 201 |
+
Matching, Euler tours and the Chinese postman.
|
| 202 |
+
Mathematical programming, Volume 5, Issue 1 (1973), 111-114.
|
| 203 |
+
.. [2] https://en.wikipedia.org/wiki/Eulerian_path
|
| 204 |
+
|
| 205 |
+
Examples
|
| 206 |
+
--------
|
| 207 |
+
To get an Eulerian circuit in an undirected graph::
|
| 208 |
+
|
| 209 |
+
>>> G = nx.complete_graph(3)
|
| 210 |
+
>>> list(nx.eulerian_circuit(G))
|
| 211 |
+
[(0, 2), (2, 1), (1, 0)]
|
| 212 |
+
>>> list(nx.eulerian_circuit(G, source=1))
|
| 213 |
+
[(1, 2), (2, 0), (0, 1)]
|
| 214 |
+
|
| 215 |
+
To get the sequence of vertices in an Eulerian circuit::
|
| 216 |
+
|
| 217 |
+
>>> [u for u, v in nx.eulerian_circuit(G)]
|
| 218 |
+
[0, 2, 1]
|
| 219 |
+
|
| 220 |
+
"""
|
| 221 |
+
if not is_eulerian(G):
|
| 222 |
+
raise nx.NetworkXError("G is not Eulerian.")
|
| 223 |
+
if G.is_directed():
|
| 224 |
+
G = G.reverse()
|
| 225 |
+
else:
|
| 226 |
+
G = G.copy()
|
| 227 |
+
if source is None:
|
| 228 |
+
source = arbitrary_element(G)
|
| 229 |
+
if G.is_multigraph():
|
| 230 |
+
for u, v, k in _multigraph_eulerian_circuit(G, source):
|
| 231 |
+
if keys:
|
| 232 |
+
yield u, v, k
|
| 233 |
+
else:
|
| 234 |
+
yield u, v
|
| 235 |
+
else:
|
| 236 |
+
yield from _simplegraph_eulerian_circuit(G, source)
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
@nx._dispatchable
|
| 240 |
+
def has_eulerian_path(G, source=None):
|
| 241 |
+
"""Return True iff `G` has an Eulerian path.
|
| 242 |
+
|
| 243 |
+
An Eulerian path is a path in a graph which uses each edge of a graph
|
| 244 |
+
exactly once. If `source` is specified, then this function checks
|
| 245 |
+
whether an Eulerian path that starts at node `source` exists.
|
| 246 |
+
|
| 247 |
+
A directed graph has an Eulerian path iff:
|
| 248 |
+
- at most one vertex has out_degree - in_degree = 1,
|
| 249 |
+
- at most one vertex has in_degree - out_degree = 1,
|
| 250 |
+
- every other vertex has equal in_degree and out_degree,
|
| 251 |
+
- and all of its vertices belong to a single connected
|
| 252 |
+
component of the underlying undirected graph.
|
| 253 |
+
|
| 254 |
+
If `source` is not None, an Eulerian path starting at `source` exists if no
|
| 255 |
+
other node has out_degree - in_degree = 1. This is equivalent to either
|
| 256 |
+
there exists an Eulerian circuit or `source` has out_degree - in_degree = 1
|
| 257 |
+
and the conditions above hold.
|
| 258 |
+
|
| 259 |
+
An undirected graph has an Eulerian path iff:
|
| 260 |
+
- exactly zero or two vertices have odd degree,
|
| 261 |
+
- and all of its vertices belong to a single connected component.
|
| 262 |
+
|
| 263 |
+
If `source` is not None, an Eulerian path starting at `source` exists if
|
| 264 |
+
either there exists an Eulerian circuit or `source` has an odd degree and the
|
| 265 |
+
conditions above hold.
|
| 266 |
+
|
| 267 |
+
Graphs with isolated vertices (i.e. vertices with zero degree) are not considered
|
| 268 |
+
to have an Eulerian path. Therefore, if the graph is not connected (or not strongly
|
| 269 |
+
connected, for directed graphs), this function returns False.
|
| 270 |
+
|
| 271 |
+
Parameters
|
| 272 |
+
----------
|
| 273 |
+
G : NetworkX Graph
|
| 274 |
+
The graph to find an euler path in.
|
| 275 |
+
|
| 276 |
+
source : node, optional
|
| 277 |
+
Starting node for path.
|
| 278 |
+
|
| 279 |
+
Returns
|
| 280 |
+
-------
|
| 281 |
+
Bool : True if G has an Eulerian path.
|
| 282 |
+
|
| 283 |
+
Examples
|
| 284 |
+
--------
|
| 285 |
+
If you prefer to allow graphs with isolated vertices to have Eulerian path,
|
| 286 |
+
you can first remove such vertices and then call `has_eulerian_path` as below example shows.
|
| 287 |
+
|
| 288 |
+
>>> G = nx.Graph([(0, 1), (1, 2), (0, 2)])
|
| 289 |
+
>>> G.add_node(3)
|
| 290 |
+
>>> nx.has_eulerian_path(G)
|
| 291 |
+
False
|
| 292 |
+
|
| 293 |
+
>>> G.remove_nodes_from(list(nx.isolates(G)))
|
| 294 |
+
>>> nx.has_eulerian_path(G)
|
| 295 |
+
True
|
| 296 |
+
|
| 297 |
+
See Also
|
| 298 |
+
--------
|
| 299 |
+
is_eulerian
|
| 300 |
+
eulerian_path
|
| 301 |
+
"""
|
| 302 |
+
if nx.is_eulerian(G):
|
| 303 |
+
return True
|
| 304 |
+
|
| 305 |
+
if G.is_directed():
|
| 306 |
+
ins = G.in_degree
|
| 307 |
+
outs = G.out_degree
|
| 308 |
+
# Since we know it is not eulerian, outs - ins must be 1 for source
|
| 309 |
+
if source is not None and outs[source] - ins[source] != 1:
|
| 310 |
+
return False
|
| 311 |
+
|
| 312 |
+
unbalanced_ins = 0
|
| 313 |
+
unbalanced_outs = 0
|
| 314 |
+
for v in G:
|
| 315 |
+
if ins[v] - outs[v] == 1:
|
| 316 |
+
unbalanced_ins += 1
|
| 317 |
+
elif outs[v] - ins[v] == 1:
|
| 318 |
+
unbalanced_outs += 1
|
| 319 |
+
elif ins[v] != outs[v]:
|
| 320 |
+
return False
|
| 321 |
+
|
| 322 |
+
return (
|
| 323 |
+
unbalanced_ins <= 1 and unbalanced_outs <= 1 and nx.is_weakly_connected(G)
|
| 324 |
+
)
|
| 325 |
+
else:
|
| 326 |
+
# We know it is not eulerian, so degree of source must be odd.
|
| 327 |
+
if source is not None and G.degree[source] % 2 != 1:
|
| 328 |
+
return False
|
| 329 |
+
|
| 330 |
+
# Sum is 2 since we know it is not eulerian (which implies sum is 0)
|
| 331 |
+
return sum(d % 2 == 1 for v, d in G.degree()) == 2 and nx.is_connected(G)
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
@nx._dispatchable
|
| 335 |
+
def eulerian_path(G, source=None, keys=False):
|
| 336 |
+
"""Return an iterator over the edges of an Eulerian path in `G`.
|
| 337 |
+
|
| 338 |
+
Parameters
|
| 339 |
+
----------
|
| 340 |
+
G : NetworkX Graph
|
| 341 |
+
The graph in which to look for an eulerian path.
|
| 342 |
+
source : node or None (default: None)
|
| 343 |
+
The node at which to start the search. None means search over all
|
| 344 |
+
starting nodes.
|
| 345 |
+
keys : Bool (default: False)
|
| 346 |
+
Indicates whether to yield edge 3-tuples (u, v, edge_key).
|
| 347 |
+
The default yields edge 2-tuples
|
| 348 |
+
|
| 349 |
+
Yields
|
| 350 |
+
------
|
| 351 |
+
Edge tuples along the eulerian path.
|
| 352 |
+
|
| 353 |
+
Warning: If `source` provided is not the start node of an Euler path
|
| 354 |
+
will raise error even if an Euler Path exists.
|
| 355 |
+
"""
|
| 356 |
+
if not has_eulerian_path(G, source):
|
| 357 |
+
raise nx.NetworkXError("Graph has no Eulerian paths.")
|
| 358 |
+
if G.is_directed():
|
| 359 |
+
G = G.reverse()
|
| 360 |
+
if source is None or nx.is_eulerian(G) is False:
|
| 361 |
+
source = _find_path_start(G)
|
| 362 |
+
if G.is_multigraph():
|
| 363 |
+
for u, v, k in _multigraph_eulerian_circuit(G, source):
|
| 364 |
+
if keys:
|
| 365 |
+
yield u, v, k
|
| 366 |
+
else:
|
| 367 |
+
yield u, v
|
| 368 |
+
else:
|
| 369 |
+
yield from _simplegraph_eulerian_circuit(G, source)
|
| 370 |
+
else:
|
| 371 |
+
G = G.copy()
|
| 372 |
+
if source is None:
|
| 373 |
+
source = _find_path_start(G)
|
| 374 |
+
if G.is_multigraph():
|
| 375 |
+
if keys:
|
| 376 |
+
yield from reversed(
|
| 377 |
+
[(v, u, k) for u, v, k in _multigraph_eulerian_circuit(G, source)]
|
| 378 |
+
)
|
| 379 |
+
else:
|
| 380 |
+
yield from reversed(
|
| 381 |
+
[(v, u) for u, v, k in _multigraph_eulerian_circuit(G, source)]
|
| 382 |
+
)
|
| 383 |
+
else:
|
| 384 |
+
yield from reversed(
|
| 385 |
+
[(v, u) for u, v in _simplegraph_eulerian_circuit(G, source)]
|
| 386 |
+
)
|
| 387 |
+
|
| 388 |
+
|
| 389 |
+
@not_implemented_for("directed")
|
| 390 |
+
@nx._dispatchable(returns_graph=True)
|
| 391 |
+
def eulerize(G):
|
| 392 |
+
"""Transforms a graph into an Eulerian graph.
|
| 393 |
+
|
| 394 |
+
If `G` is Eulerian the result is `G` as a MultiGraph, otherwise the result is a smallest
|
| 395 |
+
(in terms of the number of edges) multigraph whose underlying simple graph is `G`.
|
| 396 |
+
|
| 397 |
+
Parameters
|
| 398 |
+
----------
|
| 399 |
+
G : NetworkX graph
|
| 400 |
+
An undirected graph
|
| 401 |
+
|
| 402 |
+
Returns
|
| 403 |
+
-------
|
| 404 |
+
G : NetworkX multigraph
|
| 405 |
+
|
| 406 |
+
Raises
|
| 407 |
+
------
|
| 408 |
+
NetworkXError
|
| 409 |
+
If the graph is not connected.
|
| 410 |
+
|
| 411 |
+
See Also
|
| 412 |
+
--------
|
| 413 |
+
is_eulerian
|
| 414 |
+
eulerian_circuit
|
| 415 |
+
|
| 416 |
+
References
|
| 417 |
+
----------
|
| 418 |
+
.. [1] J. Edmonds, E. L. Johnson.
|
| 419 |
+
Matching, Euler tours and the Chinese postman.
|
| 420 |
+
Mathematical programming, Volume 5, Issue 1 (1973), 111-114.
|
| 421 |
+
.. [2] https://en.wikipedia.org/wiki/Eulerian_path
|
| 422 |
+
.. [3] http://web.math.princeton.edu/math_alive/5/Notes1.pdf
|
| 423 |
+
|
| 424 |
+
Examples
|
| 425 |
+
--------
|
| 426 |
+
>>> G = nx.complete_graph(10)
|
| 427 |
+
>>> H = nx.eulerize(G)
|
| 428 |
+
>>> nx.is_eulerian(H)
|
| 429 |
+
True
|
| 430 |
+
|
| 431 |
+
"""
|
| 432 |
+
if G.order() == 0:
|
| 433 |
+
raise nx.NetworkXPointlessConcept("Cannot Eulerize null graph")
|
| 434 |
+
if not nx.is_connected(G):
|
| 435 |
+
raise nx.NetworkXError("G is not connected")
|
| 436 |
+
odd_degree_nodes = [n for n, d in G.degree() if d % 2 == 1]
|
| 437 |
+
G = nx.MultiGraph(G)
|
| 438 |
+
if len(odd_degree_nodes) == 0:
|
| 439 |
+
return G
|
| 440 |
+
|
| 441 |
+
# get all shortest paths between vertices of odd degree
|
| 442 |
+
odd_deg_pairs_paths = [
|
| 443 |
+
(m, {n: nx.shortest_path(G, source=m, target=n)})
|
| 444 |
+
for m, n in combinations(odd_degree_nodes, 2)
|
| 445 |
+
]
|
| 446 |
+
|
| 447 |
+
# use the number of vertices in a graph + 1 as an upper bound on
|
| 448 |
+
# the maximum length of a path in G
|
| 449 |
+
upper_bound_on_max_path_length = len(G) + 1
|
| 450 |
+
|
| 451 |
+
# use "len(G) + 1 - len(P)",
|
| 452 |
+
# where P is a shortest path between vertices n and m,
|
| 453 |
+
# as edge-weights in a new graph
|
| 454 |
+
# store the paths in the graph for easy indexing later
|
| 455 |
+
Gp = nx.Graph()
|
| 456 |
+
for n, Ps in odd_deg_pairs_paths:
|
| 457 |
+
for m, P in Ps.items():
|
| 458 |
+
if n != m:
|
| 459 |
+
Gp.add_edge(
|
| 460 |
+
m, n, weight=upper_bound_on_max_path_length - len(P), path=P
|
| 461 |
+
)
|
| 462 |
+
|
| 463 |
+
# find the minimum weight matching of edges in the weighted graph
|
| 464 |
+
best_matching = nx.Graph(list(nx.max_weight_matching(Gp)))
|
| 465 |
+
|
| 466 |
+
# duplicate each edge along each path in the set of paths in Gp
|
| 467 |
+
for m, n in best_matching.edges():
|
| 468 |
+
path = Gp[m][n]["path"]
|
| 469 |
+
G.add_edges_from(nx.utils.pairwise(path))
|
| 470 |
+
return G
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__init__.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from networkx.algorithms.isomorphism.isomorph import *
|
| 2 |
+
from networkx.algorithms.isomorphism.vf2userfunc import *
|
| 3 |
+
from networkx.algorithms.isomorphism.matchhelpers import *
|
| 4 |
+
from networkx.algorithms.isomorphism.temporalisomorphvf2 import *
|
| 5 |
+
from networkx.algorithms.isomorphism.ismags import *
|
| 6 |
+
from networkx.algorithms.isomorphism.tree_isomorphism import *
|
| 7 |
+
from networkx.algorithms.isomorphism.vf2pp import *
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (571 Bytes). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-310.pyc
ADDED
|
Binary file (33 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-310.pyc
ADDED
|
Binary file (7.74 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-310.pyc
ADDED
|
Binary file (28.5 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-310.pyc
ADDED
|
Binary file (10.8 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-310.pyc
ADDED
|
Binary file (7.43 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/ismags.py
ADDED
|
@@ -0,0 +1,1163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ISMAGS Algorithm
|
| 3 |
+
================
|
| 4 |
+
|
| 5 |
+
Provides a Python implementation of the ISMAGS algorithm. [1]_
|
| 6 |
+
|
| 7 |
+
It is capable of finding (subgraph) isomorphisms between two graphs, taking the
|
| 8 |
+
symmetry of the subgraph into account. In most cases the VF2 algorithm is
|
| 9 |
+
faster (at least on small graphs) than this implementation, but in some cases
|
| 10 |
+
there is an exponential number of isomorphisms that are symmetrically
|
| 11 |
+
equivalent. In that case, the ISMAGS algorithm will provide only one solution
|
| 12 |
+
per symmetry group.
|
| 13 |
+
|
| 14 |
+
>>> petersen = nx.petersen_graph()
|
| 15 |
+
>>> ismags = nx.isomorphism.ISMAGS(petersen, petersen)
|
| 16 |
+
>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=False))
|
| 17 |
+
>>> len(isomorphisms)
|
| 18 |
+
120
|
| 19 |
+
>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=True))
|
| 20 |
+
>>> answer = [{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}]
|
| 21 |
+
>>> answer == isomorphisms
|
| 22 |
+
True
|
| 23 |
+
|
| 24 |
+
In addition, this implementation also provides an interface to find the
|
| 25 |
+
largest common induced subgraph [2]_ between any two graphs, again taking
|
| 26 |
+
symmetry into account. Given `graph` and `subgraph` the algorithm will remove
|
| 27 |
+
nodes from the `subgraph` until `subgraph` is isomorphic to a subgraph of
|
| 28 |
+
`graph`. Since only the symmetry of `subgraph` is taken into account it is
|
| 29 |
+
worth thinking about how you provide your graphs:
|
| 30 |
+
|
| 31 |
+
>>> graph1 = nx.path_graph(4)
|
| 32 |
+
>>> graph2 = nx.star_graph(3)
|
| 33 |
+
>>> ismags = nx.isomorphism.ISMAGS(graph1, graph2)
|
| 34 |
+
>>> ismags.is_isomorphic()
|
| 35 |
+
False
|
| 36 |
+
>>> largest_common_subgraph = list(ismags.largest_common_subgraph())
|
| 37 |
+
>>> answer = [{1: 0, 0: 1, 2: 2}, {2: 0, 1: 1, 3: 2}]
|
| 38 |
+
>>> answer == largest_common_subgraph
|
| 39 |
+
True
|
| 40 |
+
>>> ismags2 = nx.isomorphism.ISMAGS(graph2, graph1)
|
| 41 |
+
>>> largest_common_subgraph = list(ismags2.largest_common_subgraph())
|
| 42 |
+
>>> answer = [
|
| 43 |
+
... {1: 0, 0: 1, 2: 2},
|
| 44 |
+
... {1: 0, 0: 1, 3: 2},
|
| 45 |
+
... {2: 0, 0: 1, 1: 2},
|
| 46 |
+
... {2: 0, 0: 1, 3: 2},
|
| 47 |
+
... {3: 0, 0: 1, 1: 2},
|
| 48 |
+
... {3: 0, 0: 1, 2: 2},
|
| 49 |
+
... ]
|
| 50 |
+
>>> answer == largest_common_subgraph
|
| 51 |
+
True
|
| 52 |
+
|
| 53 |
+
However, when not taking symmetry into account, it doesn't matter:
|
| 54 |
+
|
| 55 |
+
>>> largest_common_subgraph = list(ismags.largest_common_subgraph(symmetry=False))
|
| 56 |
+
>>> answer = [
|
| 57 |
+
... {1: 0, 0: 1, 2: 2},
|
| 58 |
+
... {1: 0, 2: 1, 0: 2},
|
| 59 |
+
... {2: 0, 1: 1, 3: 2},
|
| 60 |
+
... {2: 0, 3: 1, 1: 2},
|
| 61 |
+
... {1: 0, 0: 1, 2: 3},
|
| 62 |
+
... {1: 0, 2: 1, 0: 3},
|
| 63 |
+
... {2: 0, 1: 1, 3: 3},
|
| 64 |
+
... {2: 0, 3: 1, 1: 3},
|
| 65 |
+
... {1: 0, 0: 2, 2: 3},
|
| 66 |
+
... {1: 0, 2: 2, 0: 3},
|
| 67 |
+
... {2: 0, 1: 2, 3: 3},
|
| 68 |
+
... {2: 0, 3: 2, 1: 3},
|
| 69 |
+
... ]
|
| 70 |
+
>>> answer == largest_common_subgraph
|
| 71 |
+
True
|
| 72 |
+
>>> largest_common_subgraph = list(ismags2.largest_common_subgraph(symmetry=False))
|
| 73 |
+
>>> answer = [
|
| 74 |
+
... {1: 0, 0: 1, 2: 2},
|
| 75 |
+
... {1: 0, 0: 1, 3: 2},
|
| 76 |
+
... {2: 0, 0: 1, 1: 2},
|
| 77 |
+
... {2: 0, 0: 1, 3: 2},
|
| 78 |
+
... {3: 0, 0: 1, 1: 2},
|
| 79 |
+
... {3: 0, 0: 1, 2: 2},
|
| 80 |
+
... {1: 1, 0: 2, 2: 3},
|
| 81 |
+
... {1: 1, 0: 2, 3: 3},
|
| 82 |
+
... {2: 1, 0: 2, 1: 3},
|
| 83 |
+
... {2: 1, 0: 2, 3: 3},
|
| 84 |
+
... {3: 1, 0: 2, 1: 3},
|
| 85 |
+
... {3: 1, 0: 2, 2: 3},
|
| 86 |
+
... ]
|
| 87 |
+
>>> answer == largest_common_subgraph
|
| 88 |
+
True
|
| 89 |
+
|
| 90 |
+
Notes
|
| 91 |
+
-----
|
| 92 |
+
- The current implementation works for undirected graphs only. The algorithm
|
| 93 |
+
in general should work for directed graphs as well though.
|
| 94 |
+
- Node keys for both provided graphs need to be fully orderable as well as
|
| 95 |
+
hashable.
|
| 96 |
+
- Node and edge equality is assumed to be transitive: if A is equal to B, and
|
| 97 |
+
B is equal to C, then A is equal to C.
|
| 98 |
+
|
| 99 |
+
References
|
| 100 |
+
----------
|
| 101 |
+
.. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
|
| 102 |
+
M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
|
| 103 |
+
Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
|
| 104 |
+
Enumeration", PLoS One 9(5): e97896, 2014.
|
| 105 |
+
https://doi.org/10.1371/journal.pone.0097896
|
| 106 |
+
.. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph
|
| 107 |
+
"""
|
| 108 |
+
|
| 109 |
+
__all__ = ["ISMAGS"]
|
| 110 |
+
|
| 111 |
+
import itertools
|
| 112 |
+
from collections import Counter, defaultdict
|
| 113 |
+
from functools import reduce, wraps
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def are_all_equal(iterable):
|
| 117 |
+
"""
|
| 118 |
+
Returns ``True`` if and only if all elements in `iterable` are equal; and
|
| 119 |
+
``False`` otherwise.
|
| 120 |
+
|
| 121 |
+
Parameters
|
| 122 |
+
----------
|
| 123 |
+
iterable: collections.abc.Iterable
|
| 124 |
+
The container whose elements will be checked.
|
| 125 |
+
|
| 126 |
+
Returns
|
| 127 |
+
-------
|
| 128 |
+
bool
|
| 129 |
+
``True`` iff all elements in `iterable` compare equal, ``False``
|
| 130 |
+
otherwise.
|
| 131 |
+
"""
|
| 132 |
+
try:
|
| 133 |
+
shape = iterable.shape
|
| 134 |
+
except AttributeError:
|
| 135 |
+
pass
|
| 136 |
+
else:
|
| 137 |
+
if len(shape) > 1:
|
| 138 |
+
message = "The function does not works on multidimensional arrays."
|
| 139 |
+
raise NotImplementedError(message) from None
|
| 140 |
+
|
| 141 |
+
iterator = iter(iterable)
|
| 142 |
+
first = next(iterator, None)
|
| 143 |
+
return all(item == first for item in iterator)
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def make_partitions(items, test):
|
| 147 |
+
"""
|
| 148 |
+
Partitions items into sets based on the outcome of ``test(item1, item2)``.
|
| 149 |
+
Pairs of items for which `test` returns `True` end up in the same set.
|
| 150 |
+
|
| 151 |
+
Parameters
|
| 152 |
+
----------
|
| 153 |
+
items : collections.abc.Iterable[collections.abc.Hashable]
|
| 154 |
+
Items to partition
|
| 155 |
+
test : collections.abc.Callable[collections.abc.Hashable, collections.abc.Hashable]
|
| 156 |
+
A function that will be called with 2 arguments, taken from items.
|
| 157 |
+
Should return `True` if those 2 items need to end up in the same
|
| 158 |
+
partition, and `False` otherwise.
|
| 159 |
+
|
| 160 |
+
Returns
|
| 161 |
+
-------
|
| 162 |
+
list[set]
|
| 163 |
+
A list of sets, with each set containing part of the items in `items`,
|
| 164 |
+
such that ``all(test(*pair) for pair in itertools.combinations(set, 2))
|
| 165 |
+
== True``
|
| 166 |
+
|
| 167 |
+
Notes
|
| 168 |
+
-----
|
| 169 |
+
The function `test` is assumed to be transitive: if ``test(a, b)`` and
|
| 170 |
+
``test(b, c)`` return ``True``, then ``test(a, c)`` must also be ``True``.
|
| 171 |
+
"""
|
| 172 |
+
partitions = []
|
| 173 |
+
for item in items:
|
| 174 |
+
for partition in partitions:
|
| 175 |
+
p_item = next(iter(partition))
|
| 176 |
+
if test(item, p_item):
|
| 177 |
+
partition.add(item)
|
| 178 |
+
break
|
| 179 |
+
else: # No break
|
| 180 |
+
partitions.append({item})
|
| 181 |
+
return partitions
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
def partition_to_color(partitions):
|
| 185 |
+
"""
|
| 186 |
+
Creates a dictionary that maps each item in each partition to the index of
|
| 187 |
+
the partition to which it belongs.
|
| 188 |
+
|
| 189 |
+
Parameters
|
| 190 |
+
----------
|
| 191 |
+
partitions: collections.abc.Sequence[collections.abc.Iterable]
|
| 192 |
+
As returned by :func:`make_partitions`.
|
| 193 |
+
|
| 194 |
+
Returns
|
| 195 |
+
-------
|
| 196 |
+
dict
|
| 197 |
+
"""
|
| 198 |
+
colors = {}
|
| 199 |
+
for color, keys in enumerate(partitions):
|
| 200 |
+
for key in keys:
|
| 201 |
+
colors[key] = color
|
| 202 |
+
return colors
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
def intersect(collection_of_sets):
|
| 206 |
+
"""
|
| 207 |
+
Given an collection of sets, returns the intersection of those sets.
|
| 208 |
+
|
| 209 |
+
Parameters
|
| 210 |
+
----------
|
| 211 |
+
collection_of_sets: collections.abc.Collection[set]
|
| 212 |
+
A collection of sets.
|
| 213 |
+
|
| 214 |
+
Returns
|
| 215 |
+
-------
|
| 216 |
+
set
|
| 217 |
+
An intersection of all sets in `collection_of_sets`. Will have the same
|
| 218 |
+
type as the item initially taken from `collection_of_sets`.
|
| 219 |
+
"""
|
| 220 |
+
collection_of_sets = list(collection_of_sets)
|
| 221 |
+
first = collection_of_sets.pop()
|
| 222 |
+
out = reduce(set.intersection, collection_of_sets, set(first))
|
| 223 |
+
return type(first)(out)
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
class ISMAGS:
|
| 227 |
+
"""
|
| 228 |
+
Implements the ISMAGS subgraph matching algorithm. [1]_ ISMAGS stands for
|
| 229 |
+
"Index-based Subgraph Matching Algorithm with General Symmetries". As the
|
| 230 |
+
name implies, it is symmetry aware and will only generate non-symmetric
|
| 231 |
+
isomorphisms.
|
| 232 |
+
|
| 233 |
+
Notes
|
| 234 |
+
-----
|
| 235 |
+
The implementation imposes additional conditions compared to the VF2
|
| 236 |
+
algorithm on the graphs provided and the comparison functions
|
| 237 |
+
(:attr:`node_equality` and :attr:`edge_equality`):
|
| 238 |
+
|
| 239 |
+
- Node keys in both graphs must be orderable as well as hashable.
|
| 240 |
+
- Equality must be transitive: if A is equal to B, and B is equal to C,
|
| 241 |
+
then A must be equal to C.
|
| 242 |
+
|
| 243 |
+
Attributes
|
| 244 |
+
----------
|
| 245 |
+
graph: networkx.Graph
|
| 246 |
+
subgraph: networkx.Graph
|
| 247 |
+
node_equality: collections.abc.Callable
|
| 248 |
+
The function called to see if two nodes should be considered equal.
|
| 249 |
+
It's signature looks like this:
|
| 250 |
+
``f(graph1: networkx.Graph, node1, graph2: networkx.Graph, node2) -> bool``.
|
| 251 |
+
`node1` is a node in `graph1`, and `node2` a node in `graph2`.
|
| 252 |
+
Constructed from the argument `node_match`.
|
| 253 |
+
edge_equality: collections.abc.Callable
|
| 254 |
+
The function called to see if two edges should be considered equal.
|
| 255 |
+
It's signature looks like this:
|
| 256 |
+
``f(graph1: networkx.Graph, edge1, graph2: networkx.Graph, edge2) -> bool``.
|
| 257 |
+
`edge1` is an edge in `graph1`, and `edge2` an edge in `graph2`.
|
| 258 |
+
Constructed from the argument `edge_match`.
|
| 259 |
+
|
| 260 |
+
References
|
| 261 |
+
----------
|
| 262 |
+
.. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
|
| 263 |
+
M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
|
| 264 |
+
Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
|
| 265 |
+
Enumeration", PLoS One 9(5): e97896, 2014.
|
| 266 |
+
https://doi.org/10.1371/journal.pone.0097896
|
| 267 |
+
"""
|
| 268 |
+
|
| 269 |
+
def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None):
|
| 270 |
+
"""
|
| 271 |
+
Parameters
|
| 272 |
+
----------
|
| 273 |
+
graph: networkx.Graph
|
| 274 |
+
subgraph: networkx.Graph
|
| 275 |
+
node_match: collections.abc.Callable or None
|
| 276 |
+
Function used to determine whether two nodes are equivalent. Its
|
| 277 |
+
signature should look like ``f(n1: dict, n2: dict) -> bool``, with
|
| 278 |
+
`n1` and `n2` node property dicts. See also
|
| 279 |
+
:func:`~networkx.algorithms.isomorphism.categorical_node_match` and
|
| 280 |
+
friends.
|
| 281 |
+
If `None`, all nodes are considered equal.
|
| 282 |
+
edge_match: collections.abc.Callable or None
|
| 283 |
+
Function used to determine whether two edges are equivalent. Its
|
| 284 |
+
signature should look like ``f(e1: dict, e2: dict) -> bool``, with
|
| 285 |
+
`e1` and `e2` edge property dicts. See also
|
| 286 |
+
:func:`~networkx.algorithms.isomorphism.categorical_edge_match` and
|
| 287 |
+
friends.
|
| 288 |
+
If `None`, all edges are considered equal.
|
| 289 |
+
cache: collections.abc.Mapping
|
| 290 |
+
A cache used for caching graph symmetries.
|
| 291 |
+
"""
|
| 292 |
+
# TODO: graph and subgraph setter methods that invalidate the caches.
|
| 293 |
+
# TODO: allow for precomputed partitions and colors
|
| 294 |
+
self.graph = graph
|
| 295 |
+
self.subgraph = subgraph
|
| 296 |
+
self._symmetry_cache = cache
|
| 297 |
+
# Naming conventions are taken from the original paper. For your
|
| 298 |
+
# sanity:
|
| 299 |
+
# sg: subgraph
|
| 300 |
+
# g: graph
|
| 301 |
+
# e: edge(s)
|
| 302 |
+
# n: node(s)
|
| 303 |
+
# So: sgn means "subgraph nodes".
|
| 304 |
+
self._sgn_partitions_ = None
|
| 305 |
+
self._sge_partitions_ = None
|
| 306 |
+
|
| 307 |
+
self._sgn_colors_ = None
|
| 308 |
+
self._sge_colors_ = None
|
| 309 |
+
|
| 310 |
+
self._gn_partitions_ = None
|
| 311 |
+
self._ge_partitions_ = None
|
| 312 |
+
|
| 313 |
+
self._gn_colors_ = None
|
| 314 |
+
self._ge_colors_ = None
|
| 315 |
+
|
| 316 |
+
self._node_compat_ = None
|
| 317 |
+
self._edge_compat_ = None
|
| 318 |
+
|
| 319 |
+
if node_match is None:
|
| 320 |
+
self.node_equality = self._node_match_maker(lambda n1, n2: True)
|
| 321 |
+
self._sgn_partitions_ = [set(self.subgraph.nodes)]
|
| 322 |
+
self._gn_partitions_ = [set(self.graph.nodes)]
|
| 323 |
+
self._node_compat_ = {0: 0}
|
| 324 |
+
else:
|
| 325 |
+
self.node_equality = self._node_match_maker(node_match)
|
| 326 |
+
if edge_match is None:
|
| 327 |
+
self.edge_equality = self._edge_match_maker(lambda e1, e2: True)
|
| 328 |
+
self._sge_partitions_ = [set(self.subgraph.edges)]
|
| 329 |
+
self._ge_partitions_ = [set(self.graph.edges)]
|
| 330 |
+
self._edge_compat_ = {0: 0}
|
| 331 |
+
else:
|
| 332 |
+
self.edge_equality = self._edge_match_maker(edge_match)
|
| 333 |
+
|
| 334 |
+
@property
|
| 335 |
+
def _sgn_partitions(self):
|
| 336 |
+
if self._sgn_partitions_ is None:
|
| 337 |
+
|
| 338 |
+
def nodematch(node1, node2):
|
| 339 |
+
return self.node_equality(self.subgraph, node1, self.subgraph, node2)
|
| 340 |
+
|
| 341 |
+
self._sgn_partitions_ = make_partitions(self.subgraph.nodes, nodematch)
|
| 342 |
+
return self._sgn_partitions_
|
| 343 |
+
|
| 344 |
+
@property
|
| 345 |
+
def _sge_partitions(self):
|
| 346 |
+
if self._sge_partitions_ is None:
|
| 347 |
+
|
| 348 |
+
def edgematch(edge1, edge2):
|
| 349 |
+
return self.edge_equality(self.subgraph, edge1, self.subgraph, edge2)
|
| 350 |
+
|
| 351 |
+
self._sge_partitions_ = make_partitions(self.subgraph.edges, edgematch)
|
| 352 |
+
return self._sge_partitions_
|
| 353 |
+
|
| 354 |
+
@property
|
| 355 |
+
def _gn_partitions(self):
|
| 356 |
+
if self._gn_partitions_ is None:
|
| 357 |
+
|
| 358 |
+
def nodematch(node1, node2):
|
| 359 |
+
return self.node_equality(self.graph, node1, self.graph, node2)
|
| 360 |
+
|
| 361 |
+
self._gn_partitions_ = make_partitions(self.graph.nodes, nodematch)
|
| 362 |
+
return self._gn_partitions_
|
| 363 |
+
|
| 364 |
+
@property
|
| 365 |
+
def _ge_partitions(self):
|
| 366 |
+
if self._ge_partitions_ is None:
|
| 367 |
+
|
| 368 |
+
def edgematch(edge1, edge2):
|
| 369 |
+
return self.edge_equality(self.graph, edge1, self.graph, edge2)
|
| 370 |
+
|
| 371 |
+
self._ge_partitions_ = make_partitions(self.graph.edges, edgematch)
|
| 372 |
+
return self._ge_partitions_
|
| 373 |
+
|
| 374 |
+
@property
|
| 375 |
+
def _sgn_colors(self):
|
| 376 |
+
if self._sgn_colors_ is None:
|
| 377 |
+
self._sgn_colors_ = partition_to_color(self._sgn_partitions)
|
| 378 |
+
return self._sgn_colors_
|
| 379 |
+
|
| 380 |
+
@property
|
| 381 |
+
def _sge_colors(self):
|
| 382 |
+
if self._sge_colors_ is None:
|
| 383 |
+
self._sge_colors_ = partition_to_color(self._sge_partitions)
|
| 384 |
+
return self._sge_colors_
|
| 385 |
+
|
| 386 |
+
@property
|
| 387 |
+
def _gn_colors(self):
|
| 388 |
+
if self._gn_colors_ is None:
|
| 389 |
+
self._gn_colors_ = partition_to_color(self._gn_partitions)
|
| 390 |
+
return self._gn_colors_
|
| 391 |
+
|
| 392 |
+
@property
|
| 393 |
+
def _ge_colors(self):
|
| 394 |
+
if self._ge_colors_ is None:
|
| 395 |
+
self._ge_colors_ = partition_to_color(self._ge_partitions)
|
| 396 |
+
return self._ge_colors_
|
| 397 |
+
|
| 398 |
+
@property
|
| 399 |
+
def _node_compatibility(self):
|
| 400 |
+
if self._node_compat_ is not None:
|
| 401 |
+
return self._node_compat_
|
| 402 |
+
self._node_compat_ = {}
|
| 403 |
+
for sgn_part_color, gn_part_color in itertools.product(
|
| 404 |
+
range(len(self._sgn_partitions)), range(len(self._gn_partitions))
|
| 405 |
+
):
|
| 406 |
+
sgn = next(iter(self._sgn_partitions[sgn_part_color]))
|
| 407 |
+
gn = next(iter(self._gn_partitions[gn_part_color]))
|
| 408 |
+
if self.node_equality(self.subgraph, sgn, self.graph, gn):
|
| 409 |
+
self._node_compat_[sgn_part_color] = gn_part_color
|
| 410 |
+
return self._node_compat_
|
| 411 |
+
|
| 412 |
+
@property
|
| 413 |
+
def _edge_compatibility(self):
|
| 414 |
+
if self._edge_compat_ is not None:
|
| 415 |
+
return self._edge_compat_
|
| 416 |
+
self._edge_compat_ = {}
|
| 417 |
+
for sge_part_color, ge_part_color in itertools.product(
|
| 418 |
+
range(len(self._sge_partitions)), range(len(self._ge_partitions))
|
| 419 |
+
):
|
| 420 |
+
sge = next(iter(self._sge_partitions[sge_part_color]))
|
| 421 |
+
ge = next(iter(self._ge_partitions[ge_part_color]))
|
| 422 |
+
if self.edge_equality(self.subgraph, sge, self.graph, ge):
|
| 423 |
+
self._edge_compat_[sge_part_color] = ge_part_color
|
| 424 |
+
return self._edge_compat_
|
| 425 |
+
|
| 426 |
+
@staticmethod
|
| 427 |
+
def _node_match_maker(cmp):
|
| 428 |
+
@wraps(cmp)
|
| 429 |
+
def comparer(graph1, node1, graph2, node2):
|
| 430 |
+
return cmp(graph1.nodes[node1], graph2.nodes[node2])
|
| 431 |
+
|
| 432 |
+
return comparer
|
| 433 |
+
|
| 434 |
+
@staticmethod
|
| 435 |
+
def _edge_match_maker(cmp):
|
| 436 |
+
@wraps(cmp)
|
| 437 |
+
def comparer(graph1, edge1, graph2, edge2):
|
| 438 |
+
return cmp(graph1.edges[edge1], graph2.edges[edge2])
|
| 439 |
+
|
| 440 |
+
return comparer
|
| 441 |
+
|
| 442 |
+
def find_isomorphisms(self, symmetry=True):
|
| 443 |
+
"""Find all subgraph isomorphisms between subgraph and graph
|
| 444 |
+
|
| 445 |
+
Finds isomorphisms where :attr:`subgraph` <= :attr:`graph`.
|
| 446 |
+
|
| 447 |
+
Parameters
|
| 448 |
+
----------
|
| 449 |
+
symmetry: bool
|
| 450 |
+
Whether symmetry should be taken into account. If False, found
|
| 451 |
+
isomorphisms may be symmetrically equivalent.
|
| 452 |
+
|
| 453 |
+
Yields
|
| 454 |
+
------
|
| 455 |
+
dict
|
| 456 |
+
The found isomorphism mappings of {graph_node: subgraph_node}.
|
| 457 |
+
"""
|
| 458 |
+
# The networkx VF2 algorithm is slightly funny in when it yields an
|
| 459 |
+
# empty dict and when not.
|
| 460 |
+
if not self.subgraph:
|
| 461 |
+
yield {}
|
| 462 |
+
return
|
| 463 |
+
elif not self.graph:
|
| 464 |
+
return
|
| 465 |
+
elif len(self.graph) < len(self.subgraph):
|
| 466 |
+
return
|
| 467 |
+
|
| 468 |
+
if symmetry:
|
| 469 |
+
_, cosets = self.analyze_symmetry(
|
| 470 |
+
self.subgraph, self._sgn_partitions, self._sge_colors
|
| 471 |
+
)
|
| 472 |
+
constraints = self._make_constraints(cosets)
|
| 473 |
+
else:
|
| 474 |
+
constraints = []
|
| 475 |
+
|
| 476 |
+
candidates = self._find_nodecolor_candidates()
|
| 477 |
+
la_candidates = self._get_lookahead_candidates()
|
| 478 |
+
for sgn in self.subgraph:
|
| 479 |
+
extra_candidates = la_candidates[sgn]
|
| 480 |
+
if extra_candidates:
|
| 481 |
+
candidates[sgn] = candidates[sgn] | {frozenset(extra_candidates)}
|
| 482 |
+
|
| 483 |
+
if any(candidates.values()):
|
| 484 |
+
start_sgn = min(candidates, key=lambda n: min(candidates[n], key=len))
|
| 485 |
+
candidates[start_sgn] = (intersect(candidates[start_sgn]),)
|
| 486 |
+
yield from self._map_nodes(start_sgn, candidates, constraints)
|
| 487 |
+
else:
|
| 488 |
+
return
|
| 489 |
+
|
| 490 |
+
@staticmethod
|
| 491 |
+
def _find_neighbor_color_count(graph, node, node_color, edge_color):
|
| 492 |
+
"""
|
| 493 |
+
For `node` in `graph`, count the number of edges of a specific color
|
| 494 |
+
it has to nodes of a specific color.
|
| 495 |
+
"""
|
| 496 |
+
counts = Counter()
|
| 497 |
+
neighbors = graph[node]
|
| 498 |
+
for neighbor in neighbors:
|
| 499 |
+
n_color = node_color[neighbor]
|
| 500 |
+
if (node, neighbor) in edge_color:
|
| 501 |
+
e_color = edge_color[node, neighbor]
|
| 502 |
+
else:
|
| 503 |
+
e_color = edge_color[neighbor, node]
|
| 504 |
+
counts[e_color, n_color] += 1
|
| 505 |
+
return counts
|
| 506 |
+
|
| 507 |
+
def _get_lookahead_candidates(self):
|
| 508 |
+
"""
|
| 509 |
+
Returns a mapping of {subgraph node: collection of graph nodes} for
|
| 510 |
+
which the graph nodes are feasible candidates for the subgraph node, as
|
| 511 |
+
determined by looking ahead one edge.
|
| 512 |
+
"""
|
| 513 |
+
g_counts = {}
|
| 514 |
+
for gn in self.graph:
|
| 515 |
+
g_counts[gn] = self._find_neighbor_color_count(
|
| 516 |
+
self.graph, gn, self._gn_colors, self._ge_colors
|
| 517 |
+
)
|
| 518 |
+
candidates = defaultdict(set)
|
| 519 |
+
for sgn in self.subgraph:
|
| 520 |
+
sg_count = self._find_neighbor_color_count(
|
| 521 |
+
self.subgraph, sgn, self._sgn_colors, self._sge_colors
|
| 522 |
+
)
|
| 523 |
+
new_sg_count = Counter()
|
| 524 |
+
for (sge_color, sgn_color), count in sg_count.items():
|
| 525 |
+
try:
|
| 526 |
+
ge_color = self._edge_compatibility[sge_color]
|
| 527 |
+
gn_color = self._node_compatibility[sgn_color]
|
| 528 |
+
except KeyError:
|
| 529 |
+
pass
|
| 530 |
+
else:
|
| 531 |
+
new_sg_count[ge_color, gn_color] = count
|
| 532 |
+
|
| 533 |
+
for gn, g_count in g_counts.items():
|
| 534 |
+
if all(new_sg_count[x] <= g_count[x] for x in new_sg_count):
|
| 535 |
+
# Valid candidate
|
| 536 |
+
candidates[sgn].add(gn)
|
| 537 |
+
return candidates
|
| 538 |
+
|
| 539 |
+
def largest_common_subgraph(self, symmetry=True):
|
| 540 |
+
"""
|
| 541 |
+
Find the largest common induced subgraphs between :attr:`subgraph` and
|
| 542 |
+
:attr:`graph`.
|
| 543 |
+
|
| 544 |
+
Parameters
|
| 545 |
+
----------
|
| 546 |
+
symmetry: bool
|
| 547 |
+
Whether symmetry should be taken into account. If False, found
|
| 548 |
+
largest common subgraphs may be symmetrically equivalent.
|
| 549 |
+
|
| 550 |
+
Yields
|
| 551 |
+
------
|
| 552 |
+
dict
|
| 553 |
+
The found isomorphism mappings of {graph_node: subgraph_node}.
|
| 554 |
+
"""
|
| 555 |
+
# The networkx VF2 algorithm is slightly funny in when it yields an
|
| 556 |
+
# empty dict and when not.
|
| 557 |
+
if not self.subgraph:
|
| 558 |
+
yield {}
|
| 559 |
+
return
|
| 560 |
+
elif not self.graph:
|
| 561 |
+
return
|
| 562 |
+
|
| 563 |
+
if symmetry:
|
| 564 |
+
_, cosets = self.analyze_symmetry(
|
| 565 |
+
self.subgraph, self._sgn_partitions, self._sge_colors
|
| 566 |
+
)
|
| 567 |
+
constraints = self._make_constraints(cosets)
|
| 568 |
+
else:
|
| 569 |
+
constraints = []
|
| 570 |
+
|
| 571 |
+
candidates = self._find_nodecolor_candidates()
|
| 572 |
+
|
| 573 |
+
if any(candidates.values()):
|
| 574 |
+
yield from self._largest_common_subgraph(candidates, constraints)
|
| 575 |
+
else:
|
| 576 |
+
return
|
| 577 |
+
|
| 578 |
+
def analyze_symmetry(self, graph, node_partitions, edge_colors):
|
| 579 |
+
"""
|
| 580 |
+
Find a minimal set of permutations and corresponding co-sets that
|
| 581 |
+
describe the symmetry of `graph`, given the node and edge equalities
|
| 582 |
+
given by `node_partitions` and `edge_colors`, respectively.
|
| 583 |
+
|
| 584 |
+
Parameters
|
| 585 |
+
----------
|
| 586 |
+
graph : networkx.Graph
|
| 587 |
+
The graph whose symmetry should be analyzed.
|
| 588 |
+
node_partitions : list of sets
|
| 589 |
+
A list of sets containing node keys. Node keys in the same set
|
| 590 |
+
are considered equivalent. Every node key in `graph` should be in
|
| 591 |
+
exactly one of the sets. If all nodes are equivalent, this should
|
| 592 |
+
be ``[set(graph.nodes)]``.
|
| 593 |
+
edge_colors : dict mapping edges to their colors
|
| 594 |
+
A dict mapping every edge in `graph` to its corresponding color.
|
| 595 |
+
Edges with the same color are considered equivalent. If all edges
|
| 596 |
+
are equivalent, this should be ``{e: 0 for e in graph.edges}``.
|
| 597 |
+
|
| 598 |
+
|
| 599 |
+
Returns
|
| 600 |
+
-------
|
| 601 |
+
set[frozenset]
|
| 602 |
+
The found permutations. This is a set of frozensets of pairs of node
|
| 603 |
+
keys which can be exchanged without changing :attr:`subgraph`.
|
| 604 |
+
dict[collections.abc.Hashable, set[collections.abc.Hashable]]
|
| 605 |
+
The found co-sets. The co-sets is a dictionary of
|
| 606 |
+
``{node key: set of node keys}``.
|
| 607 |
+
Every key-value pair describes which ``values`` can be interchanged
|
| 608 |
+
without changing nodes less than ``key``.
|
| 609 |
+
"""
|
| 610 |
+
if self._symmetry_cache is not None:
|
| 611 |
+
key = hash(
|
| 612 |
+
(
|
| 613 |
+
tuple(graph.nodes),
|
| 614 |
+
tuple(graph.edges),
|
| 615 |
+
tuple(map(tuple, node_partitions)),
|
| 616 |
+
tuple(edge_colors.items()),
|
| 617 |
+
)
|
| 618 |
+
)
|
| 619 |
+
if key in self._symmetry_cache:
|
| 620 |
+
return self._symmetry_cache[key]
|
| 621 |
+
node_partitions = list(
|
| 622 |
+
self._refine_node_partitions(graph, node_partitions, edge_colors)
|
| 623 |
+
)
|
| 624 |
+
assert len(node_partitions) == 1
|
| 625 |
+
node_partitions = node_partitions[0]
|
| 626 |
+
permutations, cosets = self._process_ordered_pair_partitions(
|
| 627 |
+
graph, node_partitions, node_partitions, edge_colors
|
| 628 |
+
)
|
| 629 |
+
if self._symmetry_cache is not None:
|
| 630 |
+
self._symmetry_cache[key] = permutations, cosets
|
| 631 |
+
return permutations, cosets
|
| 632 |
+
|
| 633 |
+
def is_isomorphic(self, symmetry=False):
|
| 634 |
+
"""
|
| 635 |
+
Returns True if :attr:`graph` is isomorphic to :attr:`subgraph` and
|
| 636 |
+
False otherwise.
|
| 637 |
+
|
| 638 |
+
Returns
|
| 639 |
+
-------
|
| 640 |
+
bool
|
| 641 |
+
"""
|
| 642 |
+
return len(self.subgraph) == len(self.graph) and self.subgraph_is_isomorphic(
|
| 643 |
+
symmetry
|
| 644 |
+
)
|
| 645 |
+
|
| 646 |
+
def subgraph_is_isomorphic(self, symmetry=False):
|
| 647 |
+
"""
|
| 648 |
+
Returns True if a subgraph of :attr:`graph` is isomorphic to
|
| 649 |
+
:attr:`subgraph` and False otherwise.
|
| 650 |
+
|
| 651 |
+
Returns
|
| 652 |
+
-------
|
| 653 |
+
bool
|
| 654 |
+
"""
|
| 655 |
+
# symmetry=False, since we only need to know whether there is any
|
| 656 |
+
# example; figuring out all symmetry elements probably costs more time
|
| 657 |
+
# than it gains.
|
| 658 |
+
isom = next(self.subgraph_isomorphisms_iter(symmetry=symmetry), None)
|
| 659 |
+
return isom is not None
|
| 660 |
+
|
| 661 |
+
def isomorphisms_iter(self, symmetry=True):
|
| 662 |
+
"""
|
| 663 |
+
Does the same as :meth:`find_isomorphisms` if :attr:`graph` and
|
| 664 |
+
:attr:`subgraph` have the same number of nodes.
|
| 665 |
+
"""
|
| 666 |
+
if len(self.graph) == len(self.subgraph):
|
| 667 |
+
yield from self.subgraph_isomorphisms_iter(symmetry=symmetry)
|
| 668 |
+
|
| 669 |
+
def subgraph_isomorphisms_iter(self, symmetry=True):
|
| 670 |
+
"""Alternative name for :meth:`find_isomorphisms`."""
|
| 671 |
+
return self.find_isomorphisms(symmetry)
|
| 672 |
+
|
| 673 |
+
def _find_nodecolor_candidates(self):
|
| 674 |
+
"""
|
| 675 |
+
Per node in subgraph find all nodes in graph that have the same color.
|
| 676 |
+
"""
|
| 677 |
+
candidates = defaultdict(set)
|
| 678 |
+
for sgn in self.subgraph.nodes:
|
| 679 |
+
sgn_color = self._sgn_colors[sgn]
|
| 680 |
+
if sgn_color in self._node_compatibility:
|
| 681 |
+
gn_color = self._node_compatibility[sgn_color]
|
| 682 |
+
candidates[sgn].add(frozenset(self._gn_partitions[gn_color]))
|
| 683 |
+
else:
|
| 684 |
+
candidates[sgn].add(frozenset())
|
| 685 |
+
candidates = dict(candidates)
|
| 686 |
+
for sgn, options in candidates.items():
|
| 687 |
+
candidates[sgn] = frozenset(options)
|
| 688 |
+
return candidates
|
| 689 |
+
|
| 690 |
+
@staticmethod
|
| 691 |
+
def _make_constraints(cosets):
|
| 692 |
+
"""
|
| 693 |
+
Turn cosets into constraints.
|
| 694 |
+
"""
|
| 695 |
+
constraints = []
|
| 696 |
+
for node_i, node_ts in cosets.items():
|
| 697 |
+
for node_t in node_ts:
|
| 698 |
+
if node_i != node_t:
|
| 699 |
+
# Node i must be smaller than node t.
|
| 700 |
+
constraints.append((node_i, node_t))
|
| 701 |
+
return constraints
|
| 702 |
+
|
| 703 |
+
@staticmethod
|
| 704 |
+
def _find_node_edge_color(graph, node_colors, edge_colors):
|
| 705 |
+
"""
|
| 706 |
+
For every node in graph, come up with a color that combines 1) the
|
| 707 |
+
color of the node, and 2) the number of edges of a color to each type
|
| 708 |
+
of node.
|
| 709 |
+
"""
|
| 710 |
+
counts = defaultdict(lambda: defaultdict(int))
|
| 711 |
+
for node1, node2 in graph.edges:
|
| 712 |
+
if (node1, node2) in edge_colors:
|
| 713 |
+
# FIXME directed graphs
|
| 714 |
+
ecolor = edge_colors[node1, node2]
|
| 715 |
+
else:
|
| 716 |
+
ecolor = edge_colors[node2, node1]
|
| 717 |
+
# Count per node how many edges it has of what color to nodes of
|
| 718 |
+
# what color
|
| 719 |
+
counts[node1][ecolor, node_colors[node2]] += 1
|
| 720 |
+
counts[node2][ecolor, node_colors[node1]] += 1
|
| 721 |
+
|
| 722 |
+
node_edge_colors = {}
|
| 723 |
+
for node in graph.nodes:
|
| 724 |
+
node_edge_colors[node] = node_colors[node], set(counts[node].items())
|
| 725 |
+
|
| 726 |
+
return node_edge_colors
|
| 727 |
+
|
| 728 |
+
@staticmethod
|
| 729 |
+
def _get_permutations_by_length(items):
|
| 730 |
+
"""
|
| 731 |
+
Get all permutations of items, but only permute items with the same
|
| 732 |
+
length.
|
| 733 |
+
|
| 734 |
+
>>> found = list(ISMAGS._get_permutations_by_length([[1], [2], [3, 4], [4, 5]]))
|
| 735 |
+
>>> answer = [
|
| 736 |
+
... (([1], [2]), ([3, 4], [4, 5])),
|
| 737 |
+
... (([1], [2]), ([4, 5], [3, 4])),
|
| 738 |
+
... (([2], [1]), ([3, 4], [4, 5])),
|
| 739 |
+
... (([2], [1]), ([4, 5], [3, 4])),
|
| 740 |
+
... ]
|
| 741 |
+
>>> found == answer
|
| 742 |
+
True
|
| 743 |
+
"""
|
| 744 |
+
by_len = defaultdict(list)
|
| 745 |
+
for item in items:
|
| 746 |
+
by_len[len(item)].append(item)
|
| 747 |
+
|
| 748 |
+
yield from itertools.product(
|
| 749 |
+
*(itertools.permutations(by_len[l]) for l in sorted(by_len))
|
| 750 |
+
)
|
| 751 |
+
|
| 752 |
+
@classmethod
|
| 753 |
+
def _refine_node_partitions(cls, graph, node_partitions, edge_colors, branch=False):
|
| 754 |
+
"""
|
| 755 |
+
Given a partition of nodes in graph, make the partitions smaller such
|
| 756 |
+
that all nodes in a partition have 1) the same color, and 2) the same
|
| 757 |
+
number of edges to specific other partitions.
|
| 758 |
+
"""
|
| 759 |
+
|
| 760 |
+
def equal_color(node1, node2):
|
| 761 |
+
return node_edge_colors[node1] == node_edge_colors[node2]
|
| 762 |
+
|
| 763 |
+
node_partitions = list(node_partitions)
|
| 764 |
+
node_colors = partition_to_color(node_partitions)
|
| 765 |
+
node_edge_colors = cls._find_node_edge_color(graph, node_colors, edge_colors)
|
| 766 |
+
if all(
|
| 767 |
+
are_all_equal(node_edge_colors[node] for node in partition)
|
| 768 |
+
for partition in node_partitions
|
| 769 |
+
):
|
| 770 |
+
yield node_partitions
|
| 771 |
+
return
|
| 772 |
+
|
| 773 |
+
new_partitions = []
|
| 774 |
+
output = [new_partitions]
|
| 775 |
+
for partition in node_partitions:
|
| 776 |
+
if not are_all_equal(node_edge_colors[node] for node in partition):
|
| 777 |
+
refined = make_partitions(partition, equal_color)
|
| 778 |
+
if (
|
| 779 |
+
branch
|
| 780 |
+
and len(refined) != 1
|
| 781 |
+
and len({len(r) for r in refined}) != len([len(r) for r in refined])
|
| 782 |
+
):
|
| 783 |
+
# This is where it breaks. There are multiple new cells
|
| 784 |
+
# in refined with the same length, and their order
|
| 785 |
+
# matters.
|
| 786 |
+
# So option 1) Hit it with a big hammer and simply make all
|
| 787 |
+
# orderings.
|
| 788 |
+
permutations = cls._get_permutations_by_length(refined)
|
| 789 |
+
new_output = []
|
| 790 |
+
for n_p in output:
|
| 791 |
+
for permutation in permutations:
|
| 792 |
+
new_output.append(n_p + list(permutation[0]))
|
| 793 |
+
output = new_output
|
| 794 |
+
else:
|
| 795 |
+
for n_p in output:
|
| 796 |
+
n_p.extend(sorted(refined, key=len))
|
| 797 |
+
else:
|
| 798 |
+
for n_p in output:
|
| 799 |
+
n_p.append(partition)
|
| 800 |
+
for n_p in output:
|
| 801 |
+
yield from cls._refine_node_partitions(graph, n_p, edge_colors, branch)
|
| 802 |
+
|
| 803 |
+
def _edges_of_same_color(self, sgn1, sgn2):
|
| 804 |
+
"""
|
| 805 |
+
Returns all edges in :attr:`graph` that have the same colour as the
|
| 806 |
+
edge between sgn1 and sgn2 in :attr:`subgraph`.
|
| 807 |
+
"""
|
| 808 |
+
if (sgn1, sgn2) in self._sge_colors:
|
| 809 |
+
# FIXME directed graphs
|
| 810 |
+
sge_color = self._sge_colors[sgn1, sgn2]
|
| 811 |
+
else:
|
| 812 |
+
sge_color = self._sge_colors[sgn2, sgn1]
|
| 813 |
+
if sge_color in self._edge_compatibility:
|
| 814 |
+
ge_color = self._edge_compatibility[sge_color]
|
| 815 |
+
g_edges = self._ge_partitions[ge_color]
|
| 816 |
+
else:
|
| 817 |
+
g_edges = []
|
| 818 |
+
return g_edges
|
| 819 |
+
|
| 820 |
+
def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=None):
|
| 821 |
+
"""
|
| 822 |
+
Find all subgraph isomorphisms honoring constraints.
|
| 823 |
+
"""
|
| 824 |
+
if mapping is None:
|
| 825 |
+
mapping = {}
|
| 826 |
+
else:
|
| 827 |
+
mapping = mapping.copy()
|
| 828 |
+
if to_be_mapped is None:
|
| 829 |
+
to_be_mapped = set(self.subgraph.nodes)
|
| 830 |
+
|
| 831 |
+
# Note, we modify candidates here. Doesn't seem to affect results, but
|
| 832 |
+
# remember this.
|
| 833 |
+
# candidates = candidates.copy()
|
| 834 |
+
sgn_candidates = intersect(candidates[sgn])
|
| 835 |
+
candidates[sgn] = frozenset([sgn_candidates])
|
| 836 |
+
for gn in sgn_candidates:
|
| 837 |
+
# We're going to try to map sgn to gn.
|
| 838 |
+
if gn in mapping.values() or sgn not in to_be_mapped:
|
| 839 |
+
# gn is already mapped to something
|
| 840 |
+
continue # pragma: no cover
|
| 841 |
+
|
| 842 |
+
# REDUCTION and COMBINATION
|
| 843 |
+
mapping[sgn] = gn
|
| 844 |
+
# BASECASE
|
| 845 |
+
if to_be_mapped == set(mapping.keys()):
|
| 846 |
+
yield {v: k for k, v in mapping.items()}
|
| 847 |
+
continue
|
| 848 |
+
left_to_map = to_be_mapped - set(mapping.keys())
|
| 849 |
+
|
| 850 |
+
new_candidates = candidates.copy()
|
| 851 |
+
sgn_nbrs = set(self.subgraph[sgn])
|
| 852 |
+
not_gn_nbrs = set(self.graph.nodes) - set(self.graph[gn])
|
| 853 |
+
for sgn2 in left_to_map:
|
| 854 |
+
if sgn2 not in sgn_nbrs:
|
| 855 |
+
gn2_options = not_gn_nbrs
|
| 856 |
+
else:
|
| 857 |
+
# Get all edges to gn of the right color:
|
| 858 |
+
g_edges = self._edges_of_same_color(sgn, sgn2)
|
| 859 |
+
# FIXME directed graphs
|
| 860 |
+
# And all nodes involved in those which are connected to gn
|
| 861 |
+
gn2_options = {n for e in g_edges for n in e if gn in e}
|
| 862 |
+
# Node color compatibility should be taken care of by the
|
| 863 |
+
# initial candidate lists made by find_subgraphs
|
| 864 |
+
|
| 865 |
+
# Add gn2_options to the right collection. Since new_candidates
|
| 866 |
+
# is a dict of frozensets of frozensets of node indices it's
|
| 867 |
+
# a bit clunky. We can't do .add, and + also doesn't work. We
|
| 868 |
+
# could do |, but I deem union to be clearer.
|
| 869 |
+
new_candidates[sgn2] = new_candidates[sgn2].union(
|
| 870 |
+
[frozenset(gn2_options)]
|
| 871 |
+
)
|
| 872 |
+
|
| 873 |
+
if (sgn, sgn2) in constraints:
|
| 874 |
+
gn2_options = {gn2 for gn2 in self.graph if gn2 > gn}
|
| 875 |
+
elif (sgn2, sgn) in constraints:
|
| 876 |
+
gn2_options = {gn2 for gn2 in self.graph if gn2 < gn}
|
| 877 |
+
else:
|
| 878 |
+
continue # pragma: no cover
|
| 879 |
+
new_candidates[sgn2] = new_candidates[sgn2].union(
|
| 880 |
+
[frozenset(gn2_options)]
|
| 881 |
+
)
|
| 882 |
+
|
| 883 |
+
# The next node is the one that is unmapped and has fewest
|
| 884 |
+
# candidates
|
| 885 |
+
next_sgn = min(left_to_map, key=lambda n: min(new_candidates[n], key=len))
|
| 886 |
+
yield from self._map_nodes(
|
| 887 |
+
next_sgn,
|
| 888 |
+
new_candidates,
|
| 889 |
+
constraints,
|
| 890 |
+
mapping=mapping,
|
| 891 |
+
to_be_mapped=to_be_mapped,
|
| 892 |
+
)
|
| 893 |
+
# Unmap sgn-gn. Strictly not necessary since it'd get overwritten
|
| 894 |
+
# when making a new mapping for sgn.
|
| 895 |
+
# del mapping[sgn]
|
| 896 |
+
|
| 897 |
+
def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None):
|
| 898 |
+
"""
|
| 899 |
+
Find all largest common subgraphs honoring constraints.
|
| 900 |
+
"""
|
| 901 |
+
if to_be_mapped is None:
|
| 902 |
+
to_be_mapped = {frozenset(self.subgraph.nodes)}
|
| 903 |
+
|
| 904 |
+
# The LCS problem is basically a repeated subgraph isomorphism problem
|
| 905 |
+
# with smaller and smaller subgraphs. We store the nodes that are
|
| 906 |
+
# "part of" the subgraph in to_be_mapped, and we make it a little
|
| 907 |
+
# smaller every iteration.
|
| 908 |
+
|
| 909 |
+
current_size = len(next(iter(to_be_mapped), []))
|
| 910 |
+
|
| 911 |
+
found_iso = False
|
| 912 |
+
if current_size <= len(self.graph):
|
| 913 |
+
# There's no point in trying to find isomorphisms of
|
| 914 |
+
# graph >= subgraph if subgraph has more nodes than graph.
|
| 915 |
+
|
| 916 |
+
# Try the isomorphism first with the nodes with lowest ID. So sort
|
| 917 |
+
# them. Those are more likely to be part of the final
|
| 918 |
+
# correspondence. This makes finding the first answer(s) faster. In
|
| 919 |
+
# theory.
|
| 920 |
+
for nodes in sorted(to_be_mapped, key=sorted):
|
| 921 |
+
# Find the isomorphism between subgraph[to_be_mapped] <= graph
|
| 922 |
+
next_sgn = min(nodes, key=lambda n: min(candidates[n], key=len))
|
| 923 |
+
isomorphs = self._map_nodes(
|
| 924 |
+
next_sgn, candidates, constraints, to_be_mapped=nodes
|
| 925 |
+
)
|
| 926 |
+
|
| 927 |
+
# This is effectively `yield from isomorphs`, except that we look
|
| 928 |
+
# whether an item was yielded.
|
| 929 |
+
try:
|
| 930 |
+
item = next(isomorphs)
|
| 931 |
+
except StopIteration:
|
| 932 |
+
pass
|
| 933 |
+
else:
|
| 934 |
+
yield item
|
| 935 |
+
yield from isomorphs
|
| 936 |
+
found_iso = True
|
| 937 |
+
|
| 938 |
+
# BASECASE
|
| 939 |
+
if found_iso or current_size == 1:
|
| 940 |
+
# Shrinking has no point because either 1) we end up with a smaller
|
| 941 |
+
# common subgraph (and we want the largest), or 2) there'll be no
|
| 942 |
+
# more subgraph.
|
| 943 |
+
return
|
| 944 |
+
|
| 945 |
+
left_to_be_mapped = set()
|
| 946 |
+
for nodes in to_be_mapped:
|
| 947 |
+
for sgn in nodes:
|
| 948 |
+
# We're going to remove sgn from to_be_mapped, but subject to
|
| 949 |
+
# symmetry constraints. We know that for every constraint we
|
| 950 |
+
# have those subgraph nodes are equal. So whenever we would
|
| 951 |
+
# remove the lower part of a constraint, remove the higher
|
| 952 |
+
# instead. This is all dealth with by _remove_node. And because
|
| 953 |
+
# left_to_be_mapped is a set, we don't do double work.
|
| 954 |
+
|
| 955 |
+
# And finally, make the subgraph one node smaller.
|
| 956 |
+
# REDUCTION
|
| 957 |
+
new_nodes = self._remove_node(sgn, nodes, constraints)
|
| 958 |
+
left_to_be_mapped.add(new_nodes)
|
| 959 |
+
# COMBINATION
|
| 960 |
+
yield from self._largest_common_subgraph(
|
| 961 |
+
candidates, constraints, to_be_mapped=left_to_be_mapped
|
| 962 |
+
)
|
| 963 |
+
|
| 964 |
+
@staticmethod
|
| 965 |
+
def _remove_node(node, nodes, constraints):
|
| 966 |
+
"""
|
| 967 |
+
Returns a new set where node has been removed from nodes, subject to
|
| 968 |
+
symmetry constraints. We know, that for every constraint we have
|
| 969 |
+
those subgraph nodes are equal. So whenever we would remove the
|
| 970 |
+
lower part of a constraint, remove the higher instead.
|
| 971 |
+
"""
|
| 972 |
+
while True:
|
| 973 |
+
for low, high in constraints:
|
| 974 |
+
if low == node and high in nodes:
|
| 975 |
+
node = high
|
| 976 |
+
break
|
| 977 |
+
else: # no break, couldn't find node in constraints
|
| 978 |
+
break
|
| 979 |
+
return frozenset(nodes - {node})
|
| 980 |
+
|
| 981 |
+
@staticmethod
|
| 982 |
+
def _find_permutations(top_partitions, bottom_partitions):
|
| 983 |
+
"""
|
| 984 |
+
Return the pairs of top/bottom partitions where the partitions are
|
| 985 |
+
different. Ensures that all partitions in both top and bottom
|
| 986 |
+
partitions have size 1.
|
| 987 |
+
"""
|
| 988 |
+
# Find permutations
|
| 989 |
+
permutations = set()
|
| 990 |
+
for top, bot in zip(top_partitions, bottom_partitions):
|
| 991 |
+
# top and bot have only one element
|
| 992 |
+
if len(top) != 1 or len(bot) != 1:
|
| 993 |
+
raise IndexError(
|
| 994 |
+
"Not all nodes are coupled. This is"
|
| 995 |
+
f" impossible: {top_partitions}, {bottom_partitions}"
|
| 996 |
+
)
|
| 997 |
+
if top != bot:
|
| 998 |
+
permutations.add(frozenset((next(iter(top)), next(iter(bot)))))
|
| 999 |
+
return permutations
|
| 1000 |
+
|
| 1001 |
+
@staticmethod
|
| 1002 |
+
def _update_orbits(orbits, permutations):
|
| 1003 |
+
"""
|
| 1004 |
+
Update orbits based on permutations. Orbits is modified in place.
|
| 1005 |
+
For every pair of items in permutations their respective orbits are
|
| 1006 |
+
merged.
|
| 1007 |
+
"""
|
| 1008 |
+
for permutation in permutations:
|
| 1009 |
+
node, node2 = permutation
|
| 1010 |
+
# Find the orbits that contain node and node2, and replace the
|
| 1011 |
+
# orbit containing node with the union
|
| 1012 |
+
first = second = None
|
| 1013 |
+
for idx, orbit in enumerate(orbits):
|
| 1014 |
+
if first is not None and second is not None:
|
| 1015 |
+
break
|
| 1016 |
+
if node in orbit:
|
| 1017 |
+
first = idx
|
| 1018 |
+
if node2 in orbit:
|
| 1019 |
+
second = idx
|
| 1020 |
+
if first != second:
|
| 1021 |
+
orbits[first].update(orbits[second])
|
| 1022 |
+
del orbits[second]
|
| 1023 |
+
|
| 1024 |
+
def _couple_nodes(
|
| 1025 |
+
self,
|
| 1026 |
+
top_partitions,
|
| 1027 |
+
bottom_partitions,
|
| 1028 |
+
pair_idx,
|
| 1029 |
+
t_node,
|
| 1030 |
+
b_node,
|
| 1031 |
+
graph,
|
| 1032 |
+
edge_colors,
|
| 1033 |
+
):
|
| 1034 |
+
"""
|
| 1035 |
+
Generate new partitions from top and bottom_partitions where t_node is
|
| 1036 |
+
coupled to b_node. pair_idx is the index of the partitions where t_ and
|
| 1037 |
+
b_node can be found.
|
| 1038 |
+
"""
|
| 1039 |
+
t_partition = top_partitions[pair_idx]
|
| 1040 |
+
b_partition = bottom_partitions[pair_idx]
|
| 1041 |
+
assert t_node in t_partition and b_node in b_partition
|
| 1042 |
+
# Couple node to node2. This means they get their own partition
|
| 1043 |
+
new_top_partitions = [top.copy() for top in top_partitions]
|
| 1044 |
+
new_bottom_partitions = [bot.copy() for bot in bottom_partitions]
|
| 1045 |
+
new_t_groups = {t_node}, t_partition - {t_node}
|
| 1046 |
+
new_b_groups = {b_node}, b_partition - {b_node}
|
| 1047 |
+
# Replace the old partitions with the coupled ones
|
| 1048 |
+
del new_top_partitions[pair_idx]
|
| 1049 |
+
del new_bottom_partitions[pair_idx]
|
| 1050 |
+
new_top_partitions[pair_idx:pair_idx] = new_t_groups
|
| 1051 |
+
new_bottom_partitions[pair_idx:pair_idx] = new_b_groups
|
| 1052 |
+
|
| 1053 |
+
new_top_partitions = self._refine_node_partitions(
|
| 1054 |
+
graph, new_top_partitions, edge_colors
|
| 1055 |
+
)
|
| 1056 |
+
new_bottom_partitions = self._refine_node_partitions(
|
| 1057 |
+
graph, new_bottom_partitions, edge_colors, branch=True
|
| 1058 |
+
)
|
| 1059 |
+
new_top_partitions = list(new_top_partitions)
|
| 1060 |
+
assert len(new_top_partitions) == 1
|
| 1061 |
+
new_top_partitions = new_top_partitions[0]
|
| 1062 |
+
for bot in new_bottom_partitions:
|
| 1063 |
+
yield list(new_top_partitions), bot
|
| 1064 |
+
|
| 1065 |
+
def _process_ordered_pair_partitions(
|
| 1066 |
+
self,
|
| 1067 |
+
graph,
|
| 1068 |
+
top_partitions,
|
| 1069 |
+
bottom_partitions,
|
| 1070 |
+
edge_colors,
|
| 1071 |
+
orbits=None,
|
| 1072 |
+
cosets=None,
|
| 1073 |
+
):
|
| 1074 |
+
"""
|
| 1075 |
+
Processes ordered pair partitions as per the reference paper. Finds and
|
| 1076 |
+
returns all permutations and cosets that leave the graph unchanged.
|
| 1077 |
+
"""
|
| 1078 |
+
if orbits is None:
|
| 1079 |
+
orbits = [{node} for node in graph.nodes]
|
| 1080 |
+
else:
|
| 1081 |
+
# Note that we don't copy orbits when we are given one. This means
|
| 1082 |
+
# we leak information between the recursive branches. This is
|
| 1083 |
+
# intentional!
|
| 1084 |
+
orbits = orbits
|
| 1085 |
+
if cosets is None:
|
| 1086 |
+
cosets = {}
|
| 1087 |
+
else:
|
| 1088 |
+
cosets = cosets.copy()
|
| 1089 |
+
|
| 1090 |
+
assert all(
|
| 1091 |
+
len(t_p) == len(b_p) for t_p, b_p in zip(top_partitions, bottom_partitions)
|
| 1092 |
+
)
|
| 1093 |
+
|
| 1094 |
+
# BASECASE
|
| 1095 |
+
if all(len(top) == 1 for top in top_partitions):
|
| 1096 |
+
# All nodes are mapped
|
| 1097 |
+
permutations = self._find_permutations(top_partitions, bottom_partitions)
|
| 1098 |
+
self._update_orbits(orbits, permutations)
|
| 1099 |
+
if permutations:
|
| 1100 |
+
return [permutations], cosets
|
| 1101 |
+
else:
|
| 1102 |
+
return [], cosets
|
| 1103 |
+
|
| 1104 |
+
permutations = []
|
| 1105 |
+
unmapped_nodes = {
|
| 1106 |
+
(node, idx)
|
| 1107 |
+
for idx, t_partition in enumerate(top_partitions)
|
| 1108 |
+
for node in t_partition
|
| 1109 |
+
if len(t_partition) > 1
|
| 1110 |
+
}
|
| 1111 |
+
node, pair_idx = min(unmapped_nodes)
|
| 1112 |
+
b_partition = bottom_partitions[pair_idx]
|
| 1113 |
+
|
| 1114 |
+
for node2 in sorted(b_partition):
|
| 1115 |
+
if len(b_partition) == 1:
|
| 1116 |
+
# Can never result in symmetry
|
| 1117 |
+
continue
|
| 1118 |
+
if node != node2 and any(
|
| 1119 |
+
node in orbit and node2 in orbit for orbit in orbits
|
| 1120 |
+
):
|
| 1121 |
+
# Orbit prune branch
|
| 1122 |
+
continue
|
| 1123 |
+
# REDUCTION
|
| 1124 |
+
# Couple node to node2
|
| 1125 |
+
partitions = self._couple_nodes(
|
| 1126 |
+
top_partitions,
|
| 1127 |
+
bottom_partitions,
|
| 1128 |
+
pair_idx,
|
| 1129 |
+
node,
|
| 1130 |
+
node2,
|
| 1131 |
+
graph,
|
| 1132 |
+
edge_colors,
|
| 1133 |
+
)
|
| 1134 |
+
for opp in partitions:
|
| 1135 |
+
new_top_partitions, new_bottom_partitions = opp
|
| 1136 |
+
|
| 1137 |
+
new_perms, new_cosets = self._process_ordered_pair_partitions(
|
| 1138 |
+
graph,
|
| 1139 |
+
new_top_partitions,
|
| 1140 |
+
new_bottom_partitions,
|
| 1141 |
+
edge_colors,
|
| 1142 |
+
orbits,
|
| 1143 |
+
cosets,
|
| 1144 |
+
)
|
| 1145 |
+
# COMBINATION
|
| 1146 |
+
permutations += new_perms
|
| 1147 |
+
cosets.update(new_cosets)
|
| 1148 |
+
|
| 1149 |
+
mapped = {
|
| 1150 |
+
k
|
| 1151 |
+
for top, bottom in zip(top_partitions, bottom_partitions)
|
| 1152 |
+
for k in top
|
| 1153 |
+
if len(top) == 1 and top == bottom
|
| 1154 |
+
}
|
| 1155 |
+
ks = {k for k in graph.nodes if k < node}
|
| 1156 |
+
# Have all nodes with ID < node been mapped?
|
| 1157 |
+
find_coset = ks <= mapped and node not in cosets
|
| 1158 |
+
if find_coset:
|
| 1159 |
+
# Find the orbit that contains node
|
| 1160 |
+
for orbit in orbits:
|
| 1161 |
+
if node in orbit:
|
| 1162 |
+
cosets[node] = orbit.copy()
|
| 1163 |
+
return permutations, cosets
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorph.py
ADDED
|
@@ -0,0 +1,249 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Graph isomorphism functions.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.exception import NetworkXError
|
| 7 |
+
|
| 8 |
+
__all__ = [
|
| 9 |
+
"could_be_isomorphic",
|
| 10 |
+
"fast_could_be_isomorphic",
|
| 11 |
+
"faster_could_be_isomorphic",
|
| 12 |
+
"is_isomorphic",
|
| 13 |
+
]
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@nx._dispatchable(graphs={"G1": 0, "G2": 1})
|
| 17 |
+
def could_be_isomorphic(G1, G2):
|
| 18 |
+
"""Returns False if graphs are definitely not isomorphic.
|
| 19 |
+
True does NOT guarantee isomorphism.
|
| 20 |
+
|
| 21 |
+
Parameters
|
| 22 |
+
----------
|
| 23 |
+
G1, G2 : graphs
|
| 24 |
+
The two graphs G1 and G2 must be the same type.
|
| 25 |
+
|
| 26 |
+
Notes
|
| 27 |
+
-----
|
| 28 |
+
Checks for matching degree, triangle, and number of cliques sequences.
|
| 29 |
+
The triangle sequence contains the number of triangles each node is part of.
|
| 30 |
+
The clique sequence contains for each node the number of maximal cliques
|
| 31 |
+
involving that node.
|
| 32 |
+
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
# Check global properties
|
| 36 |
+
if G1.order() != G2.order():
|
| 37 |
+
return False
|
| 38 |
+
|
| 39 |
+
# Check local properties
|
| 40 |
+
d1 = G1.degree()
|
| 41 |
+
t1 = nx.triangles(G1)
|
| 42 |
+
clqs_1 = list(nx.find_cliques(G1))
|
| 43 |
+
c1 = {n: sum(1 for c in clqs_1 if n in c) for n in G1} # number of cliques
|
| 44 |
+
props1 = [[d, t1[v], c1[v]] for v, d in d1]
|
| 45 |
+
props1.sort()
|
| 46 |
+
|
| 47 |
+
d2 = G2.degree()
|
| 48 |
+
t2 = nx.triangles(G2)
|
| 49 |
+
clqs_2 = list(nx.find_cliques(G2))
|
| 50 |
+
c2 = {n: sum(1 for c in clqs_2 if n in c) for n in G2} # number of cliques
|
| 51 |
+
props2 = [[d, t2[v], c2[v]] for v, d in d2]
|
| 52 |
+
props2.sort()
|
| 53 |
+
|
| 54 |
+
if props1 != props2:
|
| 55 |
+
return False
|
| 56 |
+
|
| 57 |
+
# OK...
|
| 58 |
+
return True
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
graph_could_be_isomorphic = could_be_isomorphic
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
@nx._dispatchable(graphs={"G1": 0, "G2": 1})
|
| 65 |
+
def fast_could_be_isomorphic(G1, G2):
|
| 66 |
+
"""Returns False if graphs are definitely not isomorphic.
|
| 67 |
+
|
| 68 |
+
True does NOT guarantee isomorphism.
|
| 69 |
+
|
| 70 |
+
Parameters
|
| 71 |
+
----------
|
| 72 |
+
G1, G2 : graphs
|
| 73 |
+
The two graphs G1 and G2 must be the same type.
|
| 74 |
+
|
| 75 |
+
Notes
|
| 76 |
+
-----
|
| 77 |
+
Checks for matching degree and triangle sequences. The triangle
|
| 78 |
+
sequence contains the number of triangles each node is part of.
|
| 79 |
+
"""
|
| 80 |
+
# Check global properties
|
| 81 |
+
if G1.order() != G2.order():
|
| 82 |
+
return False
|
| 83 |
+
|
| 84 |
+
# Check local properties
|
| 85 |
+
d1 = G1.degree()
|
| 86 |
+
t1 = nx.triangles(G1)
|
| 87 |
+
props1 = [[d, t1[v]] for v, d in d1]
|
| 88 |
+
props1.sort()
|
| 89 |
+
|
| 90 |
+
d2 = G2.degree()
|
| 91 |
+
t2 = nx.triangles(G2)
|
| 92 |
+
props2 = [[d, t2[v]] for v, d in d2]
|
| 93 |
+
props2.sort()
|
| 94 |
+
|
| 95 |
+
if props1 != props2:
|
| 96 |
+
return False
|
| 97 |
+
|
| 98 |
+
# OK...
|
| 99 |
+
return True
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
fast_graph_could_be_isomorphic = fast_could_be_isomorphic
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
@nx._dispatchable(graphs={"G1": 0, "G2": 1})
|
| 106 |
+
def faster_could_be_isomorphic(G1, G2):
|
| 107 |
+
"""Returns False if graphs are definitely not isomorphic.
|
| 108 |
+
|
| 109 |
+
True does NOT guarantee isomorphism.
|
| 110 |
+
|
| 111 |
+
Parameters
|
| 112 |
+
----------
|
| 113 |
+
G1, G2 : graphs
|
| 114 |
+
The two graphs G1 and G2 must be the same type.
|
| 115 |
+
|
| 116 |
+
Notes
|
| 117 |
+
-----
|
| 118 |
+
Checks for matching degree sequences.
|
| 119 |
+
"""
|
| 120 |
+
# Check global properties
|
| 121 |
+
if G1.order() != G2.order():
|
| 122 |
+
return False
|
| 123 |
+
|
| 124 |
+
# Check local properties
|
| 125 |
+
d1 = sorted(d for n, d in G1.degree())
|
| 126 |
+
d2 = sorted(d for n, d in G2.degree())
|
| 127 |
+
|
| 128 |
+
if d1 != d2:
|
| 129 |
+
return False
|
| 130 |
+
|
| 131 |
+
# OK...
|
| 132 |
+
return True
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
faster_graph_could_be_isomorphic = faster_could_be_isomorphic
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
@nx._dispatchable(
|
| 139 |
+
graphs={"G1": 0, "G2": 1},
|
| 140 |
+
preserve_edge_attrs="edge_match",
|
| 141 |
+
preserve_node_attrs="node_match",
|
| 142 |
+
)
|
| 143 |
+
def is_isomorphic(G1, G2, node_match=None, edge_match=None):
|
| 144 |
+
"""Returns True if the graphs G1 and G2 are isomorphic and False otherwise.
|
| 145 |
+
|
| 146 |
+
Parameters
|
| 147 |
+
----------
|
| 148 |
+
G1, G2: graphs
|
| 149 |
+
The two graphs G1 and G2 must be the same type.
|
| 150 |
+
|
| 151 |
+
node_match : callable
|
| 152 |
+
A function that returns True if node n1 in G1 and n2 in G2 should
|
| 153 |
+
be considered equal during the isomorphism test.
|
| 154 |
+
If node_match is not specified then node attributes are not considered.
|
| 155 |
+
|
| 156 |
+
The function will be called like
|
| 157 |
+
|
| 158 |
+
node_match(G1.nodes[n1], G2.nodes[n2]).
|
| 159 |
+
|
| 160 |
+
That is, the function will receive the node attribute dictionaries
|
| 161 |
+
for n1 and n2 as inputs.
|
| 162 |
+
|
| 163 |
+
edge_match : callable
|
| 164 |
+
A function that returns True if the edge attribute dictionary
|
| 165 |
+
for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
|
| 166 |
+
be considered equal during the isomorphism test. If edge_match is
|
| 167 |
+
not specified then edge attributes are not considered.
|
| 168 |
+
|
| 169 |
+
The function will be called like
|
| 170 |
+
|
| 171 |
+
edge_match(G1[u1][v1], G2[u2][v2]).
|
| 172 |
+
|
| 173 |
+
That is, the function will receive the edge attribute dictionaries
|
| 174 |
+
of the edges under consideration.
|
| 175 |
+
|
| 176 |
+
Notes
|
| 177 |
+
-----
|
| 178 |
+
Uses the vf2 algorithm [1]_.
|
| 179 |
+
|
| 180 |
+
Examples
|
| 181 |
+
--------
|
| 182 |
+
>>> import networkx.algorithms.isomorphism as iso
|
| 183 |
+
|
| 184 |
+
For digraphs G1 and G2, using 'weight' edge attribute (default: 1)
|
| 185 |
+
|
| 186 |
+
>>> G1 = nx.DiGraph()
|
| 187 |
+
>>> G2 = nx.DiGraph()
|
| 188 |
+
>>> nx.add_path(G1, [1, 2, 3, 4], weight=1)
|
| 189 |
+
>>> nx.add_path(G2, [10, 20, 30, 40], weight=2)
|
| 190 |
+
>>> em = iso.numerical_edge_match("weight", 1)
|
| 191 |
+
>>> nx.is_isomorphic(G1, G2) # no weights considered
|
| 192 |
+
True
|
| 193 |
+
>>> nx.is_isomorphic(G1, G2, edge_match=em) # match weights
|
| 194 |
+
False
|
| 195 |
+
|
| 196 |
+
For multidigraphs G1 and G2, using 'fill' node attribute (default: '')
|
| 197 |
+
|
| 198 |
+
>>> G1 = nx.MultiDiGraph()
|
| 199 |
+
>>> G2 = nx.MultiDiGraph()
|
| 200 |
+
>>> G1.add_nodes_from([1, 2, 3], fill="red")
|
| 201 |
+
>>> G2.add_nodes_from([10, 20, 30, 40], fill="red")
|
| 202 |
+
>>> nx.add_path(G1, [1, 2, 3, 4], weight=3, linewidth=2.5)
|
| 203 |
+
>>> nx.add_path(G2, [10, 20, 30, 40], weight=3)
|
| 204 |
+
>>> nm = iso.categorical_node_match("fill", "red")
|
| 205 |
+
>>> nx.is_isomorphic(G1, G2, node_match=nm)
|
| 206 |
+
True
|
| 207 |
+
|
| 208 |
+
For multidigraphs G1 and G2, using 'weight' edge attribute (default: 7)
|
| 209 |
+
|
| 210 |
+
>>> G1.add_edge(1, 2, weight=7)
|
| 211 |
+
1
|
| 212 |
+
>>> G2.add_edge(10, 20)
|
| 213 |
+
1
|
| 214 |
+
>>> em = iso.numerical_multiedge_match("weight", 7, rtol=1e-6)
|
| 215 |
+
>>> nx.is_isomorphic(G1, G2, edge_match=em)
|
| 216 |
+
True
|
| 217 |
+
|
| 218 |
+
For multigraphs G1 and G2, using 'weight' and 'linewidth' edge attributes
|
| 219 |
+
with default values 7 and 2.5. Also using 'fill' node attribute with
|
| 220 |
+
default value 'red'.
|
| 221 |
+
|
| 222 |
+
>>> em = iso.numerical_multiedge_match(["weight", "linewidth"], [7, 2.5])
|
| 223 |
+
>>> nm = iso.categorical_node_match("fill", "red")
|
| 224 |
+
>>> nx.is_isomorphic(G1, G2, edge_match=em, node_match=nm)
|
| 225 |
+
True
|
| 226 |
+
|
| 227 |
+
See Also
|
| 228 |
+
--------
|
| 229 |
+
numerical_node_match, numerical_edge_match, numerical_multiedge_match
|
| 230 |
+
categorical_node_match, categorical_edge_match, categorical_multiedge_match
|
| 231 |
+
|
| 232 |
+
References
|
| 233 |
+
----------
|
| 234 |
+
.. [1] L. P. Cordella, P. Foggia, C. Sansone, M. Vento,
|
| 235 |
+
"An Improved Algorithm for Matching Large Graphs",
|
| 236 |
+
3rd IAPR-TC15 Workshop on Graph-based Representations in
|
| 237 |
+
Pattern Recognition, Cuen, pp. 149-159, 2001.
|
| 238 |
+
https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
|
| 239 |
+
"""
|
| 240 |
+
if G1.is_directed() and G2.is_directed():
|
| 241 |
+
GM = nx.algorithms.isomorphism.DiGraphMatcher
|
| 242 |
+
elif (not G1.is_directed()) and (not G2.is_directed()):
|
| 243 |
+
GM = nx.algorithms.isomorphism.GraphMatcher
|
| 244 |
+
else:
|
| 245 |
+
raise NetworkXError("Graphs G1 and G2 are not of the same type.")
|
| 246 |
+
|
| 247 |
+
gm = GM(G1, G2, node_match=node_match, edge_match=edge_match)
|
| 248 |
+
|
| 249 |
+
return gm.is_isomorphic()
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (187 Bytes). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-310.pyc
ADDED
|
Binary file (2.26 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-310.pyc
ADDED
|
Binary file (8.84 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-310.pyc
ADDED
|
Binary file (4.62 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp_helpers.cpython-310.pyc
ADDED
|
Binary file (50.3 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-310.pyc
ADDED
|
Binary file (7.13 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99
ADDED
|
Binary file (1.44 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99
ADDED
|
Binary file (1.44 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99
ADDED
|
Binary file (1.6 kB). View file
|
|
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py
ADDED
|
@@ -0,0 +1,327 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for ISMAGS isomorphism algorithm.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.algorithms import isomorphism as iso
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def _matches_to_sets(matches):
|
| 12 |
+
"""
|
| 13 |
+
Helper function to facilitate comparing collections of dictionaries in
|
| 14 |
+
which order does not matter.
|
| 15 |
+
"""
|
| 16 |
+
return {frozenset(m.items()) for m in matches}
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class TestSelfIsomorphism:
|
| 20 |
+
data = [
|
| 21 |
+
(
|
| 22 |
+
[
|
| 23 |
+
(0, {"name": "a"}),
|
| 24 |
+
(1, {"name": "a"}),
|
| 25 |
+
(2, {"name": "b"}),
|
| 26 |
+
(3, {"name": "b"}),
|
| 27 |
+
(4, {"name": "a"}),
|
| 28 |
+
(5, {"name": "a"}),
|
| 29 |
+
],
|
| 30 |
+
[(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)],
|
| 31 |
+
),
|
| 32 |
+
(range(1, 5), [(1, 2), (2, 4), (4, 3), (3, 1)]),
|
| 33 |
+
(
|
| 34 |
+
[],
|
| 35 |
+
[
|
| 36 |
+
(0, 1),
|
| 37 |
+
(1, 2),
|
| 38 |
+
(2, 3),
|
| 39 |
+
(3, 4),
|
| 40 |
+
(4, 5),
|
| 41 |
+
(5, 0),
|
| 42 |
+
(0, 6),
|
| 43 |
+
(6, 7),
|
| 44 |
+
(2, 8),
|
| 45 |
+
(8, 9),
|
| 46 |
+
(4, 10),
|
| 47 |
+
(10, 11),
|
| 48 |
+
],
|
| 49 |
+
),
|
| 50 |
+
([], [(0, 1), (1, 2), (1, 4), (2, 3), (3, 5), (3, 6)]),
|
| 51 |
+
]
|
| 52 |
+
|
| 53 |
+
def test_self_isomorphism(self):
|
| 54 |
+
"""
|
| 55 |
+
For some small, symmetric graphs, make sure that 1) they are isomorphic
|
| 56 |
+
to themselves, and 2) that only the identity mapping is found.
|
| 57 |
+
"""
|
| 58 |
+
for node_data, edge_data in self.data:
|
| 59 |
+
graph = nx.Graph()
|
| 60 |
+
graph.add_nodes_from(node_data)
|
| 61 |
+
graph.add_edges_from(edge_data)
|
| 62 |
+
|
| 63 |
+
ismags = iso.ISMAGS(
|
| 64 |
+
graph, graph, node_match=iso.categorical_node_match("name", None)
|
| 65 |
+
)
|
| 66 |
+
assert ismags.is_isomorphic()
|
| 67 |
+
assert ismags.subgraph_is_isomorphic()
|
| 68 |
+
assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
|
| 69 |
+
{n: n for n in graph.nodes}
|
| 70 |
+
]
|
| 71 |
+
|
| 72 |
+
def test_edgecase_self_isomorphism(self):
|
| 73 |
+
"""
|
| 74 |
+
This edgecase is one of the cases in which it is hard to find all
|
| 75 |
+
symmetry elements.
|
| 76 |
+
"""
|
| 77 |
+
graph = nx.Graph()
|
| 78 |
+
nx.add_path(graph, range(5))
|
| 79 |
+
graph.add_edges_from([(2, 5), (5, 6)])
|
| 80 |
+
|
| 81 |
+
ismags = iso.ISMAGS(graph, graph)
|
| 82 |
+
ismags_answer = list(ismags.find_isomorphisms(True))
|
| 83 |
+
assert ismags_answer == [{n: n for n in graph.nodes}]
|
| 84 |
+
|
| 85 |
+
graph = nx.relabel_nodes(graph, {0: 0, 1: 1, 2: 2, 3: 3, 4: 6, 5: 4, 6: 5})
|
| 86 |
+
ismags = iso.ISMAGS(graph, graph)
|
| 87 |
+
ismags_answer = list(ismags.find_isomorphisms(True))
|
| 88 |
+
assert ismags_answer == [{n: n for n in graph.nodes}]
|
| 89 |
+
|
| 90 |
+
def test_directed_self_isomorphism(self):
|
| 91 |
+
"""
|
| 92 |
+
For some small, directed, symmetric graphs, make sure that 1) they are
|
| 93 |
+
isomorphic to themselves, and 2) that only the identity mapping is
|
| 94 |
+
found.
|
| 95 |
+
"""
|
| 96 |
+
for node_data, edge_data in self.data:
|
| 97 |
+
graph = nx.Graph()
|
| 98 |
+
graph.add_nodes_from(node_data)
|
| 99 |
+
graph.add_edges_from(edge_data)
|
| 100 |
+
|
| 101 |
+
ismags = iso.ISMAGS(
|
| 102 |
+
graph, graph, node_match=iso.categorical_node_match("name", None)
|
| 103 |
+
)
|
| 104 |
+
assert ismags.is_isomorphic()
|
| 105 |
+
assert ismags.subgraph_is_isomorphic()
|
| 106 |
+
assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
|
| 107 |
+
{n: n for n in graph.nodes}
|
| 108 |
+
]
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
class TestSubgraphIsomorphism:
|
| 112 |
+
def test_isomorphism(self):
|
| 113 |
+
g1 = nx.Graph()
|
| 114 |
+
nx.add_cycle(g1, range(4))
|
| 115 |
+
|
| 116 |
+
g2 = nx.Graph()
|
| 117 |
+
nx.add_cycle(g2, range(4))
|
| 118 |
+
g2.add_edges_from(list(zip(g2, range(4, 8))))
|
| 119 |
+
ismags = iso.ISMAGS(g2, g1)
|
| 120 |
+
assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
|
| 121 |
+
{n: n for n in g1.nodes}
|
| 122 |
+
]
|
| 123 |
+
|
| 124 |
+
def test_isomorphism2(self):
|
| 125 |
+
g1 = nx.Graph()
|
| 126 |
+
nx.add_path(g1, range(3))
|
| 127 |
+
|
| 128 |
+
g2 = g1.copy()
|
| 129 |
+
g2.add_edge(1, 3)
|
| 130 |
+
|
| 131 |
+
ismags = iso.ISMAGS(g2, g1)
|
| 132 |
+
matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
|
| 133 |
+
expected_symmetric = [
|
| 134 |
+
{0: 0, 1: 1, 2: 2},
|
| 135 |
+
{0: 0, 1: 1, 3: 2},
|
| 136 |
+
{2: 0, 1: 1, 3: 2},
|
| 137 |
+
]
|
| 138 |
+
assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
|
| 139 |
+
|
| 140 |
+
matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
|
| 141 |
+
expected_asymmetric = [
|
| 142 |
+
{0: 2, 1: 1, 2: 0},
|
| 143 |
+
{0: 2, 1: 1, 3: 0},
|
| 144 |
+
{2: 2, 1: 1, 3: 0},
|
| 145 |
+
]
|
| 146 |
+
assert _matches_to_sets(matches) == _matches_to_sets(
|
| 147 |
+
expected_symmetric + expected_asymmetric
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
def test_labeled_nodes(self):
|
| 151 |
+
g1 = nx.Graph()
|
| 152 |
+
nx.add_cycle(g1, range(3))
|
| 153 |
+
g1.nodes[1]["attr"] = True
|
| 154 |
+
|
| 155 |
+
g2 = g1.copy()
|
| 156 |
+
g2.add_edge(1, 3)
|
| 157 |
+
ismags = iso.ISMAGS(g2, g1, node_match=lambda x, y: x == y)
|
| 158 |
+
matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
|
| 159 |
+
expected_symmetric = [{0: 0, 1: 1, 2: 2}]
|
| 160 |
+
assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
|
| 161 |
+
|
| 162 |
+
matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
|
| 163 |
+
expected_asymmetric = [{0: 2, 1: 1, 2: 0}]
|
| 164 |
+
assert _matches_to_sets(matches) == _matches_to_sets(
|
| 165 |
+
expected_symmetric + expected_asymmetric
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
def test_labeled_edges(self):
|
| 169 |
+
g1 = nx.Graph()
|
| 170 |
+
nx.add_cycle(g1, range(3))
|
| 171 |
+
g1.edges[1, 2]["attr"] = True
|
| 172 |
+
|
| 173 |
+
g2 = g1.copy()
|
| 174 |
+
g2.add_edge(1, 3)
|
| 175 |
+
ismags = iso.ISMAGS(g2, g1, edge_match=lambda x, y: x == y)
|
| 176 |
+
matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
|
| 177 |
+
expected_symmetric = [{0: 0, 1: 1, 2: 2}]
|
| 178 |
+
assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
|
| 179 |
+
|
| 180 |
+
matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
|
| 181 |
+
expected_asymmetric = [{1: 2, 0: 0, 2: 1}]
|
| 182 |
+
assert _matches_to_sets(matches) == _matches_to_sets(
|
| 183 |
+
expected_symmetric + expected_asymmetric
|
| 184 |
+
)
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
class TestWikipediaExample:
|
| 188 |
+
# Nodes 'a', 'b', 'c' and 'd' form a column.
|
| 189 |
+
# Nodes 'g', 'h', 'i' and 'j' form a column.
|
| 190 |
+
g1edges = [
|
| 191 |
+
["a", "g"],
|
| 192 |
+
["a", "h"],
|
| 193 |
+
["a", "i"],
|
| 194 |
+
["b", "g"],
|
| 195 |
+
["b", "h"],
|
| 196 |
+
["b", "j"],
|
| 197 |
+
["c", "g"],
|
| 198 |
+
["c", "i"],
|
| 199 |
+
["c", "j"],
|
| 200 |
+
["d", "h"],
|
| 201 |
+
["d", "i"],
|
| 202 |
+
["d", "j"],
|
| 203 |
+
]
|
| 204 |
+
|
| 205 |
+
# Nodes 1,2,3,4 form the clockwise corners of a large square.
|
| 206 |
+
# Nodes 5,6,7,8 form the clockwise corners of a small square
|
| 207 |
+
g2edges = [
|
| 208 |
+
[1, 2],
|
| 209 |
+
[2, 3],
|
| 210 |
+
[3, 4],
|
| 211 |
+
[4, 1],
|
| 212 |
+
[5, 6],
|
| 213 |
+
[6, 7],
|
| 214 |
+
[7, 8],
|
| 215 |
+
[8, 5],
|
| 216 |
+
[1, 5],
|
| 217 |
+
[2, 6],
|
| 218 |
+
[3, 7],
|
| 219 |
+
[4, 8],
|
| 220 |
+
]
|
| 221 |
+
|
| 222 |
+
def test_graph(self):
|
| 223 |
+
g1 = nx.Graph()
|
| 224 |
+
g2 = nx.Graph()
|
| 225 |
+
g1.add_edges_from(self.g1edges)
|
| 226 |
+
g2.add_edges_from(self.g2edges)
|
| 227 |
+
gm = iso.ISMAGS(g1, g2)
|
| 228 |
+
assert gm.is_isomorphic()
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
class TestLargestCommonSubgraph:
|
| 232 |
+
def test_mcis(self):
|
| 233 |
+
# Example graphs from DOI: 10.1002/spe.588
|
| 234 |
+
graph1 = nx.Graph()
|
| 235 |
+
graph1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 4), (4, 5)])
|
| 236 |
+
graph1.nodes[1]["color"] = 0
|
| 237 |
+
|
| 238 |
+
graph2 = nx.Graph()
|
| 239 |
+
graph2.add_edges_from(
|
| 240 |
+
[(1, 2), (2, 3), (2, 4), (3, 4), (3, 5), (5, 6), (5, 7), (6, 7)]
|
| 241 |
+
)
|
| 242 |
+
graph2.nodes[1]["color"] = 1
|
| 243 |
+
graph2.nodes[6]["color"] = 2
|
| 244 |
+
graph2.nodes[7]["color"] = 2
|
| 245 |
+
|
| 246 |
+
ismags = iso.ISMAGS(
|
| 247 |
+
graph1, graph2, node_match=iso.categorical_node_match("color", None)
|
| 248 |
+
)
|
| 249 |
+
assert list(ismags.subgraph_isomorphisms_iter(True)) == []
|
| 250 |
+
assert list(ismags.subgraph_isomorphisms_iter(False)) == []
|
| 251 |
+
found_mcis = _matches_to_sets(ismags.largest_common_subgraph())
|
| 252 |
+
expected = _matches_to_sets(
|
| 253 |
+
[{2: 2, 3: 4, 4: 3, 5: 5}, {2: 4, 3: 2, 4: 3, 5: 5}]
|
| 254 |
+
)
|
| 255 |
+
assert expected == found_mcis
|
| 256 |
+
|
| 257 |
+
ismags = iso.ISMAGS(
|
| 258 |
+
graph2, graph1, node_match=iso.categorical_node_match("color", None)
|
| 259 |
+
)
|
| 260 |
+
assert list(ismags.subgraph_isomorphisms_iter(True)) == []
|
| 261 |
+
assert list(ismags.subgraph_isomorphisms_iter(False)) == []
|
| 262 |
+
found_mcis = _matches_to_sets(ismags.largest_common_subgraph())
|
| 263 |
+
# Same answer, but reversed.
|
| 264 |
+
expected = _matches_to_sets(
|
| 265 |
+
[{2: 2, 3: 4, 4: 3, 5: 5}, {4: 2, 2: 3, 3: 4, 5: 5}]
|
| 266 |
+
)
|
| 267 |
+
assert expected == found_mcis
|
| 268 |
+
|
| 269 |
+
def test_symmetry_mcis(self):
|
| 270 |
+
graph1 = nx.Graph()
|
| 271 |
+
nx.add_path(graph1, range(4))
|
| 272 |
+
|
| 273 |
+
graph2 = nx.Graph()
|
| 274 |
+
nx.add_path(graph2, range(3))
|
| 275 |
+
graph2.add_edge(1, 3)
|
| 276 |
+
|
| 277 |
+
# Only the symmetry of graph2 is taken into account here.
|
| 278 |
+
ismags1 = iso.ISMAGS(
|
| 279 |
+
graph1, graph2, node_match=iso.categorical_node_match("color", None)
|
| 280 |
+
)
|
| 281 |
+
assert list(ismags1.subgraph_isomorphisms_iter(True)) == []
|
| 282 |
+
found_mcis = _matches_to_sets(ismags1.largest_common_subgraph())
|
| 283 |
+
expected = _matches_to_sets([{0: 0, 1: 1, 2: 2}, {1: 0, 3: 2, 2: 1}])
|
| 284 |
+
assert expected == found_mcis
|
| 285 |
+
|
| 286 |
+
# Only the symmetry of graph1 is taken into account here.
|
| 287 |
+
ismags2 = iso.ISMAGS(
|
| 288 |
+
graph2, graph1, node_match=iso.categorical_node_match("color", None)
|
| 289 |
+
)
|
| 290 |
+
assert list(ismags2.subgraph_isomorphisms_iter(True)) == []
|
| 291 |
+
found_mcis = _matches_to_sets(ismags2.largest_common_subgraph())
|
| 292 |
+
expected = _matches_to_sets(
|
| 293 |
+
[
|
| 294 |
+
{3: 2, 0: 0, 1: 1},
|
| 295 |
+
{2: 0, 0: 2, 1: 1},
|
| 296 |
+
{3: 0, 0: 2, 1: 1},
|
| 297 |
+
{3: 0, 1: 1, 2: 2},
|
| 298 |
+
{0: 0, 1: 1, 2: 2},
|
| 299 |
+
{2: 0, 3: 2, 1: 1},
|
| 300 |
+
]
|
| 301 |
+
)
|
| 302 |
+
|
| 303 |
+
assert expected == found_mcis
|
| 304 |
+
|
| 305 |
+
found_mcis1 = _matches_to_sets(ismags1.largest_common_subgraph(False))
|
| 306 |
+
found_mcis2 = ismags2.largest_common_subgraph(False)
|
| 307 |
+
found_mcis2 = [{v: k for k, v in d.items()} for d in found_mcis2]
|
| 308 |
+
found_mcis2 = _matches_to_sets(found_mcis2)
|
| 309 |
+
|
| 310 |
+
expected = _matches_to_sets(
|
| 311 |
+
[
|
| 312 |
+
{3: 2, 1: 3, 2: 1},
|
| 313 |
+
{2: 0, 0: 2, 1: 1},
|
| 314 |
+
{1: 2, 3: 3, 2: 1},
|
| 315 |
+
{3: 0, 1: 3, 2: 1},
|
| 316 |
+
{0: 2, 2: 3, 1: 1},
|
| 317 |
+
{3: 0, 1: 2, 2: 1},
|
| 318 |
+
{2: 0, 0: 3, 1: 1},
|
| 319 |
+
{0: 0, 2: 3, 1: 1},
|
| 320 |
+
{1: 0, 3: 3, 2: 1},
|
| 321 |
+
{1: 0, 3: 2, 2: 1},
|
| 322 |
+
{0: 3, 1: 1, 2: 2},
|
| 323 |
+
{0: 0, 1: 1, 2: 2},
|
| 324 |
+
]
|
| 325 |
+
)
|
| 326 |
+
assert expected == found_mcis1
|
| 327 |
+
assert expected == found_mcis2
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.algorithms import isomorphism as iso
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class TestIsomorph:
|
| 8 |
+
@classmethod
|
| 9 |
+
def setup_class(cls):
|
| 10 |
+
cls.G1 = nx.Graph()
|
| 11 |
+
cls.G2 = nx.Graph()
|
| 12 |
+
cls.G3 = nx.Graph()
|
| 13 |
+
cls.G4 = nx.Graph()
|
| 14 |
+
cls.G5 = nx.Graph()
|
| 15 |
+
cls.G6 = nx.Graph()
|
| 16 |
+
cls.G1.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 3]])
|
| 17 |
+
cls.G2.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50]])
|
| 18 |
+
cls.G3.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 5]])
|
| 19 |
+
cls.G4.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 4]])
|
| 20 |
+
cls.G5.add_edges_from([[1, 2], [1, 3]])
|
| 21 |
+
cls.G6.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50], [20, 50]])
|
| 22 |
+
|
| 23 |
+
def test_could_be_isomorphic(self):
|
| 24 |
+
assert iso.could_be_isomorphic(self.G1, self.G2)
|
| 25 |
+
assert iso.could_be_isomorphic(self.G1, self.G3)
|
| 26 |
+
assert not iso.could_be_isomorphic(self.G1, self.G4)
|
| 27 |
+
assert iso.could_be_isomorphic(self.G3, self.G2)
|
| 28 |
+
assert not iso.could_be_isomorphic(self.G1, self.G6)
|
| 29 |
+
|
| 30 |
+
def test_fast_could_be_isomorphic(self):
|
| 31 |
+
assert iso.fast_could_be_isomorphic(self.G3, self.G2)
|
| 32 |
+
assert not iso.fast_could_be_isomorphic(self.G3, self.G5)
|
| 33 |
+
assert not iso.fast_could_be_isomorphic(self.G1, self.G6)
|
| 34 |
+
|
| 35 |
+
def test_faster_could_be_isomorphic(self):
|
| 36 |
+
assert iso.faster_could_be_isomorphic(self.G3, self.G2)
|
| 37 |
+
assert not iso.faster_could_be_isomorphic(self.G3, self.G5)
|
| 38 |
+
assert not iso.faster_could_be_isomorphic(self.G1, self.G6)
|
| 39 |
+
|
| 40 |
+
def test_is_isomorphic(self):
|
| 41 |
+
assert iso.is_isomorphic(self.G1, self.G2)
|
| 42 |
+
assert not iso.is_isomorphic(self.G1, self.G4)
|
| 43 |
+
assert iso.is_isomorphic(self.G1.to_directed(), self.G2.to_directed())
|
| 44 |
+
assert not iso.is_isomorphic(self.G1.to_directed(), self.G4.to_directed())
|
| 45 |
+
with pytest.raises(
|
| 46 |
+
nx.NetworkXError, match="Graphs G1 and G2 are not of the same type."
|
| 47 |
+
):
|
| 48 |
+
iso.is_isomorphic(self.G1.to_directed(), self.G1)
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py
ADDED
|
@@ -0,0 +1,212 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for the temporal aspect of the Temporal VF2 isomorphism algorithm.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from datetime import date, datetime, timedelta
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.algorithms import isomorphism as iso
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def provide_g1_edgelist():
|
| 12 |
+
return [(0, 1), (0, 2), (1, 2), (2, 4), (1, 3), (3, 4), (4, 5)]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def put_same_time(G, att_name):
|
| 16 |
+
for e in G.edges(data=True):
|
| 17 |
+
e[2][att_name] = date(2015, 1, 1)
|
| 18 |
+
return G
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def put_same_datetime(G, att_name):
|
| 22 |
+
for e in G.edges(data=True):
|
| 23 |
+
e[2][att_name] = datetime(2015, 1, 1)
|
| 24 |
+
return G
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def put_sequence_time(G, att_name):
|
| 28 |
+
current_date = date(2015, 1, 1)
|
| 29 |
+
for e in G.edges(data=True):
|
| 30 |
+
current_date += timedelta(days=1)
|
| 31 |
+
e[2][att_name] = current_date
|
| 32 |
+
return G
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def put_time_config_0(G, att_name):
|
| 36 |
+
G[0][1][att_name] = date(2015, 1, 2)
|
| 37 |
+
G[0][2][att_name] = date(2015, 1, 2)
|
| 38 |
+
G[1][2][att_name] = date(2015, 1, 3)
|
| 39 |
+
G[1][3][att_name] = date(2015, 1, 1)
|
| 40 |
+
G[2][4][att_name] = date(2015, 1, 1)
|
| 41 |
+
G[3][4][att_name] = date(2015, 1, 3)
|
| 42 |
+
G[4][5][att_name] = date(2015, 1, 3)
|
| 43 |
+
return G
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def put_time_config_1(G, att_name):
|
| 47 |
+
G[0][1][att_name] = date(2015, 1, 2)
|
| 48 |
+
G[0][2][att_name] = date(2015, 1, 1)
|
| 49 |
+
G[1][2][att_name] = date(2015, 1, 3)
|
| 50 |
+
G[1][3][att_name] = date(2015, 1, 1)
|
| 51 |
+
G[2][4][att_name] = date(2015, 1, 2)
|
| 52 |
+
G[3][4][att_name] = date(2015, 1, 4)
|
| 53 |
+
G[4][5][att_name] = date(2015, 1, 3)
|
| 54 |
+
return G
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def put_time_config_2(G, att_name):
|
| 58 |
+
G[0][1][att_name] = date(2015, 1, 1)
|
| 59 |
+
G[0][2][att_name] = date(2015, 1, 1)
|
| 60 |
+
G[1][2][att_name] = date(2015, 1, 3)
|
| 61 |
+
G[1][3][att_name] = date(2015, 1, 2)
|
| 62 |
+
G[2][4][att_name] = date(2015, 1, 2)
|
| 63 |
+
G[3][4][att_name] = date(2015, 1, 3)
|
| 64 |
+
G[4][5][att_name] = date(2015, 1, 2)
|
| 65 |
+
return G
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class TestTimeRespectingGraphMatcher:
|
| 69 |
+
"""
|
| 70 |
+
A test class for the undirected temporal graph matcher.
|
| 71 |
+
"""
|
| 72 |
+
|
| 73 |
+
def provide_g1_topology(self):
|
| 74 |
+
G1 = nx.Graph()
|
| 75 |
+
G1.add_edges_from(provide_g1_edgelist())
|
| 76 |
+
return G1
|
| 77 |
+
|
| 78 |
+
def provide_g2_path_3edges(self):
|
| 79 |
+
G2 = nx.Graph()
|
| 80 |
+
G2.add_edges_from([(0, 1), (1, 2), (2, 3)])
|
| 81 |
+
return G2
|
| 82 |
+
|
| 83 |
+
def test_timdelta_zero_timeRespecting_returnsTrue(self):
|
| 84 |
+
G1 = self.provide_g1_topology()
|
| 85 |
+
temporal_name = "date"
|
| 86 |
+
G1 = put_same_time(G1, temporal_name)
|
| 87 |
+
G2 = self.provide_g2_path_3edges()
|
| 88 |
+
d = timedelta()
|
| 89 |
+
gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
|
| 90 |
+
assert gm.subgraph_is_isomorphic()
|
| 91 |
+
|
| 92 |
+
def test_timdelta_zero_datetime_timeRespecting_returnsTrue(self):
|
| 93 |
+
G1 = self.provide_g1_topology()
|
| 94 |
+
temporal_name = "date"
|
| 95 |
+
G1 = put_same_datetime(G1, temporal_name)
|
| 96 |
+
G2 = self.provide_g2_path_3edges()
|
| 97 |
+
d = timedelta()
|
| 98 |
+
gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
|
| 99 |
+
assert gm.subgraph_is_isomorphic()
|
| 100 |
+
|
| 101 |
+
def test_attNameStrange_timdelta_zero_timeRespecting_returnsTrue(self):
|
| 102 |
+
G1 = self.provide_g1_topology()
|
| 103 |
+
temporal_name = "strange_name"
|
| 104 |
+
G1 = put_same_time(G1, temporal_name)
|
| 105 |
+
G2 = self.provide_g2_path_3edges()
|
| 106 |
+
d = timedelta()
|
| 107 |
+
gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
|
| 108 |
+
assert gm.subgraph_is_isomorphic()
|
| 109 |
+
|
| 110 |
+
def test_notTimeRespecting_returnsFalse(self):
|
| 111 |
+
G1 = self.provide_g1_topology()
|
| 112 |
+
temporal_name = "date"
|
| 113 |
+
G1 = put_sequence_time(G1, temporal_name)
|
| 114 |
+
G2 = self.provide_g2_path_3edges()
|
| 115 |
+
d = timedelta()
|
| 116 |
+
gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
|
| 117 |
+
assert not gm.subgraph_is_isomorphic()
|
| 118 |
+
|
| 119 |
+
def test_timdelta_one_config0_returns_no_embeddings(self):
|
| 120 |
+
G1 = self.provide_g1_topology()
|
| 121 |
+
temporal_name = "date"
|
| 122 |
+
G1 = put_time_config_0(G1, temporal_name)
|
| 123 |
+
G2 = self.provide_g2_path_3edges()
|
| 124 |
+
d = timedelta(days=1)
|
| 125 |
+
gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
|
| 126 |
+
count_match = len(list(gm.subgraph_isomorphisms_iter()))
|
| 127 |
+
assert count_match == 0
|
| 128 |
+
|
| 129 |
+
def test_timdelta_one_config1_returns_four_embedding(self):
|
| 130 |
+
G1 = self.provide_g1_topology()
|
| 131 |
+
temporal_name = "date"
|
| 132 |
+
G1 = put_time_config_1(G1, temporal_name)
|
| 133 |
+
G2 = self.provide_g2_path_3edges()
|
| 134 |
+
d = timedelta(days=1)
|
| 135 |
+
gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
|
| 136 |
+
count_match = len(list(gm.subgraph_isomorphisms_iter()))
|
| 137 |
+
assert count_match == 4
|
| 138 |
+
|
| 139 |
+
def test_timdelta_one_config2_returns_ten_embeddings(self):
|
| 140 |
+
G1 = self.provide_g1_topology()
|
| 141 |
+
temporal_name = "date"
|
| 142 |
+
G1 = put_time_config_2(G1, temporal_name)
|
| 143 |
+
G2 = self.provide_g2_path_3edges()
|
| 144 |
+
d = timedelta(days=1)
|
| 145 |
+
gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
|
| 146 |
+
L = list(gm.subgraph_isomorphisms_iter())
|
| 147 |
+
count_match = len(list(gm.subgraph_isomorphisms_iter()))
|
| 148 |
+
assert count_match == 10
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
class TestDiTimeRespectingGraphMatcher:
|
| 152 |
+
"""
|
| 153 |
+
A test class for the directed time-respecting graph matcher.
|
| 154 |
+
"""
|
| 155 |
+
|
| 156 |
+
def provide_g1_topology(self):
|
| 157 |
+
G1 = nx.DiGraph()
|
| 158 |
+
G1.add_edges_from(provide_g1_edgelist())
|
| 159 |
+
return G1
|
| 160 |
+
|
| 161 |
+
def provide_g2_path_3edges(self):
|
| 162 |
+
G2 = nx.DiGraph()
|
| 163 |
+
G2.add_edges_from([(0, 1), (1, 2), (2, 3)])
|
| 164 |
+
return G2
|
| 165 |
+
|
| 166 |
+
def test_timdelta_zero_same_dates_returns_true(self):
|
| 167 |
+
G1 = self.provide_g1_topology()
|
| 168 |
+
temporal_name = "date"
|
| 169 |
+
G1 = put_same_time(G1, temporal_name)
|
| 170 |
+
G2 = self.provide_g2_path_3edges()
|
| 171 |
+
d = timedelta()
|
| 172 |
+
gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
|
| 173 |
+
assert gm.subgraph_is_isomorphic()
|
| 174 |
+
|
| 175 |
+
def test_attNameStrange_timdelta_zero_same_dates_returns_true(self):
|
| 176 |
+
G1 = self.provide_g1_topology()
|
| 177 |
+
temporal_name = "strange"
|
| 178 |
+
G1 = put_same_time(G1, temporal_name)
|
| 179 |
+
G2 = self.provide_g2_path_3edges()
|
| 180 |
+
d = timedelta()
|
| 181 |
+
gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
|
| 182 |
+
assert gm.subgraph_is_isomorphic()
|
| 183 |
+
|
| 184 |
+
def test_timdelta_one_config0_returns_no_embeddings(self):
|
| 185 |
+
G1 = self.provide_g1_topology()
|
| 186 |
+
temporal_name = "date"
|
| 187 |
+
G1 = put_time_config_0(G1, temporal_name)
|
| 188 |
+
G2 = self.provide_g2_path_3edges()
|
| 189 |
+
d = timedelta(days=1)
|
| 190 |
+
gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
|
| 191 |
+
count_match = len(list(gm.subgraph_isomorphisms_iter()))
|
| 192 |
+
assert count_match == 0
|
| 193 |
+
|
| 194 |
+
def test_timdelta_one_config1_returns_one_embedding(self):
|
| 195 |
+
G1 = self.provide_g1_topology()
|
| 196 |
+
temporal_name = "date"
|
| 197 |
+
G1 = put_time_config_1(G1, temporal_name)
|
| 198 |
+
G2 = self.provide_g2_path_3edges()
|
| 199 |
+
d = timedelta(days=1)
|
| 200 |
+
gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
|
| 201 |
+
count_match = len(list(gm.subgraph_isomorphisms_iter()))
|
| 202 |
+
assert count_match == 1
|
| 203 |
+
|
| 204 |
+
def test_timdelta_one_config2_returns_two_embeddings(self):
|
| 205 |
+
G1 = self.provide_g1_topology()
|
| 206 |
+
temporal_name = "date"
|
| 207 |
+
G1 = put_time_config_2(G1, temporal_name)
|
| 208 |
+
G2 = self.provide_g2_path_3edges()
|
| 209 |
+
d = timedelta(days=1)
|
| 210 |
+
gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
|
| 211 |
+
count_match = len(list(gm.subgraph_isomorphisms_iter()))
|
| 212 |
+
assert count_match == 2
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py
ADDED
|
@@ -0,0 +1,292 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import random
|
| 2 |
+
import time
|
| 3 |
+
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
from networkx.algorithms.isomorphism.tree_isomorphism import (
|
| 8 |
+
rooted_tree_isomorphism,
|
| 9 |
+
tree_isomorphism,
|
| 10 |
+
)
|
| 11 |
+
from networkx.classes.function import is_directed
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph))
|
| 15 |
+
def test_tree_isomorphism_raises_on_directed_and_multigraphs(graph_constructor):
|
| 16 |
+
t1 = graph_constructor([(0, 1)])
|
| 17 |
+
t2 = graph_constructor([(1, 2)])
|
| 18 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 19 |
+
nx.isomorphism.tree_isomorphism(t1, t2)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
# have this work for graph
|
| 23 |
+
# given two trees (either the directed or undirected)
|
| 24 |
+
# transform t2 according to the isomorphism
|
| 25 |
+
# and confirm it is identical to t1
|
| 26 |
+
# randomize the order of the edges when constructing
|
| 27 |
+
def check_isomorphism(t1, t2, isomorphism):
|
| 28 |
+
# get the name of t1, given the name in t2
|
| 29 |
+
mapping = {v2: v1 for (v1, v2) in isomorphism}
|
| 30 |
+
|
| 31 |
+
# these should be the same
|
| 32 |
+
d1 = is_directed(t1)
|
| 33 |
+
d2 = is_directed(t2)
|
| 34 |
+
assert d1 == d2
|
| 35 |
+
|
| 36 |
+
edges_1 = []
|
| 37 |
+
for u, v in t1.edges():
|
| 38 |
+
if d1:
|
| 39 |
+
edges_1.append((u, v))
|
| 40 |
+
else:
|
| 41 |
+
# if not directed, then need to
|
| 42 |
+
# put the edge in a consistent direction
|
| 43 |
+
if u < v:
|
| 44 |
+
edges_1.append((u, v))
|
| 45 |
+
else:
|
| 46 |
+
edges_1.append((v, u))
|
| 47 |
+
|
| 48 |
+
edges_2 = []
|
| 49 |
+
for u, v in t2.edges():
|
| 50 |
+
# translate to names for t1
|
| 51 |
+
u = mapping[u]
|
| 52 |
+
v = mapping[v]
|
| 53 |
+
if d2:
|
| 54 |
+
edges_2.append((u, v))
|
| 55 |
+
else:
|
| 56 |
+
if u < v:
|
| 57 |
+
edges_2.append((u, v))
|
| 58 |
+
else:
|
| 59 |
+
edges_2.append((v, u))
|
| 60 |
+
|
| 61 |
+
return sorted(edges_1) == sorted(edges_2)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def test_hardcoded():
|
| 65 |
+
print("hardcoded test")
|
| 66 |
+
|
| 67 |
+
# define a test problem
|
| 68 |
+
edges_1 = [
|
| 69 |
+
("a", "b"),
|
| 70 |
+
("a", "c"),
|
| 71 |
+
("a", "d"),
|
| 72 |
+
("b", "e"),
|
| 73 |
+
("b", "f"),
|
| 74 |
+
("e", "j"),
|
| 75 |
+
("e", "k"),
|
| 76 |
+
("c", "g"),
|
| 77 |
+
("c", "h"),
|
| 78 |
+
("g", "m"),
|
| 79 |
+
("d", "i"),
|
| 80 |
+
("f", "l"),
|
| 81 |
+
]
|
| 82 |
+
|
| 83 |
+
edges_2 = [
|
| 84 |
+
("v", "y"),
|
| 85 |
+
("v", "z"),
|
| 86 |
+
("u", "x"),
|
| 87 |
+
("q", "u"),
|
| 88 |
+
("q", "v"),
|
| 89 |
+
("p", "t"),
|
| 90 |
+
("n", "p"),
|
| 91 |
+
("n", "q"),
|
| 92 |
+
("n", "o"),
|
| 93 |
+
("o", "r"),
|
| 94 |
+
("o", "s"),
|
| 95 |
+
("s", "w"),
|
| 96 |
+
]
|
| 97 |
+
|
| 98 |
+
# there are two possible correct isomorphisms
|
| 99 |
+
# it currently returns isomorphism1
|
| 100 |
+
# but the second is also correct
|
| 101 |
+
isomorphism1 = [
|
| 102 |
+
("a", "n"),
|
| 103 |
+
("b", "q"),
|
| 104 |
+
("c", "o"),
|
| 105 |
+
("d", "p"),
|
| 106 |
+
("e", "v"),
|
| 107 |
+
("f", "u"),
|
| 108 |
+
("g", "s"),
|
| 109 |
+
("h", "r"),
|
| 110 |
+
("i", "t"),
|
| 111 |
+
("j", "y"),
|
| 112 |
+
("k", "z"),
|
| 113 |
+
("l", "x"),
|
| 114 |
+
("m", "w"),
|
| 115 |
+
]
|
| 116 |
+
|
| 117 |
+
# could swap y and z
|
| 118 |
+
isomorphism2 = [
|
| 119 |
+
("a", "n"),
|
| 120 |
+
("b", "q"),
|
| 121 |
+
("c", "o"),
|
| 122 |
+
("d", "p"),
|
| 123 |
+
("e", "v"),
|
| 124 |
+
("f", "u"),
|
| 125 |
+
("g", "s"),
|
| 126 |
+
("h", "r"),
|
| 127 |
+
("i", "t"),
|
| 128 |
+
("j", "z"),
|
| 129 |
+
("k", "y"),
|
| 130 |
+
("l", "x"),
|
| 131 |
+
("m", "w"),
|
| 132 |
+
]
|
| 133 |
+
|
| 134 |
+
t1 = nx.Graph()
|
| 135 |
+
t1.add_edges_from(edges_1)
|
| 136 |
+
root1 = "a"
|
| 137 |
+
|
| 138 |
+
t2 = nx.Graph()
|
| 139 |
+
t2.add_edges_from(edges_2)
|
| 140 |
+
root2 = "n"
|
| 141 |
+
|
| 142 |
+
isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
|
| 143 |
+
|
| 144 |
+
# is correct by hand
|
| 145 |
+
assert isomorphism in (isomorphism1, isomorphism2)
|
| 146 |
+
|
| 147 |
+
# check algorithmically
|
| 148 |
+
assert check_isomorphism(t1, t2, isomorphism)
|
| 149 |
+
|
| 150 |
+
# try again as digraph
|
| 151 |
+
t1 = nx.DiGraph()
|
| 152 |
+
t1.add_edges_from(edges_1)
|
| 153 |
+
root1 = "a"
|
| 154 |
+
|
| 155 |
+
t2 = nx.DiGraph()
|
| 156 |
+
t2.add_edges_from(edges_2)
|
| 157 |
+
root2 = "n"
|
| 158 |
+
|
| 159 |
+
isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
|
| 160 |
+
|
| 161 |
+
# is correct by hand
|
| 162 |
+
assert isomorphism in (isomorphism1, isomorphism2)
|
| 163 |
+
|
| 164 |
+
# check algorithmically
|
| 165 |
+
assert check_isomorphism(t1, t2, isomorphism)
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
# randomly swap a tuple (a,b)
|
| 169 |
+
def random_swap(t):
|
| 170 |
+
(a, b) = t
|
| 171 |
+
if random.randint(0, 1) == 1:
|
| 172 |
+
return (a, b)
|
| 173 |
+
else:
|
| 174 |
+
return (b, a)
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
# given a tree t1, create a new tree t2
|
| 178 |
+
# that is isomorphic to t1, with a known isomorphism
|
| 179 |
+
# and test that our algorithm found the right one
|
| 180 |
+
def positive_single_tree(t1):
|
| 181 |
+
assert nx.is_tree(t1)
|
| 182 |
+
|
| 183 |
+
nodes1 = list(t1.nodes())
|
| 184 |
+
# get a random permutation of this
|
| 185 |
+
nodes2 = nodes1.copy()
|
| 186 |
+
random.shuffle(nodes2)
|
| 187 |
+
|
| 188 |
+
# this is one isomorphism, however they may be multiple
|
| 189 |
+
# so we don't necessarily get this one back
|
| 190 |
+
someisomorphism = list(zip(nodes1, nodes2))
|
| 191 |
+
|
| 192 |
+
# map from old to new
|
| 193 |
+
map1to2 = dict(someisomorphism)
|
| 194 |
+
|
| 195 |
+
# get the edges with the transformed names
|
| 196 |
+
edges2 = [random_swap((map1to2[u], map1to2[v])) for (u, v) in t1.edges()]
|
| 197 |
+
# randomly permute, to ensure we're not relying on edge order somehow
|
| 198 |
+
random.shuffle(edges2)
|
| 199 |
+
|
| 200 |
+
# so t2 is isomorphic to t1
|
| 201 |
+
t2 = nx.Graph()
|
| 202 |
+
t2.add_edges_from(edges2)
|
| 203 |
+
|
| 204 |
+
# lets call our code to see if t1 and t2 are isomorphic
|
| 205 |
+
isomorphism = tree_isomorphism(t1, t2)
|
| 206 |
+
|
| 207 |
+
# make sure we got a correct solution
|
| 208 |
+
# although not necessarily someisomorphism
|
| 209 |
+
assert len(isomorphism) > 0
|
| 210 |
+
assert check_isomorphism(t1, t2, isomorphism)
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
# run positive_single_tree over all the
|
| 214 |
+
# non-isomorphic trees for k from 4 to maxk
|
| 215 |
+
# k = 4 is the first level that has more than 1 non-isomorphic tree
|
| 216 |
+
# k = 13 takes about 2.86 seconds to run on my laptop
|
| 217 |
+
# larger values run slow down significantly
|
| 218 |
+
# as the number of trees grows rapidly
|
| 219 |
+
def test_positive(maxk=14):
|
| 220 |
+
print("positive test")
|
| 221 |
+
|
| 222 |
+
for k in range(2, maxk + 1):
|
| 223 |
+
start_time = time.time()
|
| 224 |
+
trial = 0
|
| 225 |
+
for t in nx.nonisomorphic_trees(k):
|
| 226 |
+
positive_single_tree(t)
|
| 227 |
+
trial += 1
|
| 228 |
+
print(k, trial, time.time() - start_time)
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
# test the trivial case of a single node in each tree
|
| 232 |
+
# note that nonisomorphic_trees doesn't work for k = 1
|
| 233 |
+
def test_trivial():
|
| 234 |
+
print("trivial test")
|
| 235 |
+
|
| 236 |
+
# back to an undirected graph
|
| 237 |
+
t1 = nx.Graph()
|
| 238 |
+
t1.add_node("a")
|
| 239 |
+
root1 = "a"
|
| 240 |
+
|
| 241 |
+
t2 = nx.Graph()
|
| 242 |
+
t2.add_node("n")
|
| 243 |
+
root2 = "n"
|
| 244 |
+
|
| 245 |
+
isomorphism = rooted_tree_isomorphism(t1, root1, t2, root2)
|
| 246 |
+
|
| 247 |
+
assert isomorphism == [("a", "n")]
|
| 248 |
+
|
| 249 |
+
assert check_isomorphism(t1, t2, isomorphism)
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
# test another trivial case where the two graphs have
|
| 253 |
+
# different numbers of nodes
|
| 254 |
+
def test_trivial_2():
|
| 255 |
+
print("trivial test 2")
|
| 256 |
+
|
| 257 |
+
edges_1 = [("a", "b"), ("a", "c")]
|
| 258 |
+
|
| 259 |
+
edges_2 = [("v", "y")]
|
| 260 |
+
|
| 261 |
+
t1 = nx.Graph()
|
| 262 |
+
t1.add_edges_from(edges_1)
|
| 263 |
+
|
| 264 |
+
t2 = nx.Graph()
|
| 265 |
+
t2.add_edges_from(edges_2)
|
| 266 |
+
|
| 267 |
+
isomorphism = tree_isomorphism(t1, t2)
|
| 268 |
+
|
| 269 |
+
# they cannot be isomorphic,
|
| 270 |
+
# since they have different numbers of nodes
|
| 271 |
+
assert isomorphism == []
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
# the function nonisomorphic_trees generates all the non-isomorphic
|
| 275 |
+
# trees of a given size. Take each pair of these and verify that
|
| 276 |
+
# they are not isomorphic
|
| 277 |
+
# k = 4 is the first level that has more than 1 non-isomorphic tree
|
| 278 |
+
# k = 11 takes about 4.76 seconds to run on my laptop
|
| 279 |
+
# larger values run slow down significantly
|
| 280 |
+
# as the number of trees grows rapidly
|
| 281 |
+
def test_negative(maxk=11):
|
| 282 |
+
print("negative test")
|
| 283 |
+
|
| 284 |
+
for k in range(4, maxk + 1):
|
| 285 |
+
test_trees = list(nx.nonisomorphic_trees(k))
|
| 286 |
+
start_time = time.time()
|
| 287 |
+
trial = 0
|
| 288 |
+
for i in range(len(test_trees) - 1):
|
| 289 |
+
for j in range(i + 1, len(test_trees)):
|
| 290 |
+
trial += 1
|
| 291 |
+
assert tree_isomorphism(test_trees[i], test_trees[j]) == []
|
| 292 |
+
print(k, trial, time.time() - start_time)
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py
ADDED
|
@@ -0,0 +1,1608 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import itertools as it
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx import vf2pp_is_isomorphic, vf2pp_isomorphism
|
| 7 |
+
|
| 8 |
+
labels_same = ["blue"]
|
| 9 |
+
|
| 10 |
+
labels_many = [
|
| 11 |
+
"white",
|
| 12 |
+
"red",
|
| 13 |
+
"blue",
|
| 14 |
+
"green",
|
| 15 |
+
"orange",
|
| 16 |
+
"black",
|
| 17 |
+
"purple",
|
| 18 |
+
"yellow",
|
| 19 |
+
"brown",
|
| 20 |
+
"cyan",
|
| 21 |
+
"solarized",
|
| 22 |
+
"pink",
|
| 23 |
+
"none",
|
| 24 |
+
]
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class TestPreCheck:
|
| 28 |
+
def test_first_graph_empty(self):
|
| 29 |
+
G1 = nx.Graph()
|
| 30 |
+
G2 = nx.Graph([(0, 1), (1, 2)])
|
| 31 |
+
assert not vf2pp_is_isomorphic(G1, G2)
|
| 32 |
+
|
| 33 |
+
def test_second_graph_empty(self):
|
| 34 |
+
G1 = nx.Graph([(0, 1), (1, 2)])
|
| 35 |
+
G2 = nx.Graph()
|
| 36 |
+
assert not vf2pp_is_isomorphic(G1, G2)
|
| 37 |
+
|
| 38 |
+
def test_different_order1(self):
|
| 39 |
+
G1 = nx.path_graph(5)
|
| 40 |
+
G2 = nx.path_graph(6)
|
| 41 |
+
assert not vf2pp_is_isomorphic(G1, G2)
|
| 42 |
+
|
| 43 |
+
def test_different_order2(self):
|
| 44 |
+
G1 = nx.barbell_graph(100, 20)
|
| 45 |
+
G2 = nx.barbell_graph(101, 20)
|
| 46 |
+
assert not vf2pp_is_isomorphic(G1, G2)
|
| 47 |
+
|
| 48 |
+
def test_different_order3(self):
|
| 49 |
+
G1 = nx.complete_graph(7)
|
| 50 |
+
G2 = nx.complete_graph(8)
|
| 51 |
+
assert not vf2pp_is_isomorphic(G1, G2)
|
| 52 |
+
|
| 53 |
+
def test_different_degree_sequences1(self):
|
| 54 |
+
G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (0, 4)])
|
| 55 |
+
G2 = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (0, 4), (2, 5)])
|
| 56 |
+
assert not vf2pp_is_isomorphic(G1, G2)
|
| 57 |
+
|
| 58 |
+
G2.remove_node(3)
|
| 59 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
|
| 60 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
|
| 61 |
+
|
| 62 |
+
assert vf2pp_is_isomorphic(G1, G2)
|
| 63 |
+
|
| 64 |
+
def test_different_degree_sequences2(self):
|
| 65 |
+
G1 = nx.Graph(
|
| 66 |
+
[
|
| 67 |
+
(0, 1),
|
| 68 |
+
(1, 2),
|
| 69 |
+
(0, 2),
|
| 70 |
+
(2, 3),
|
| 71 |
+
(3, 4),
|
| 72 |
+
(4, 5),
|
| 73 |
+
(5, 6),
|
| 74 |
+
(6, 3),
|
| 75 |
+
(4, 7),
|
| 76 |
+
(7, 8),
|
| 77 |
+
(8, 3),
|
| 78 |
+
]
|
| 79 |
+
)
|
| 80 |
+
G2 = G1.copy()
|
| 81 |
+
G2.add_edge(8, 0)
|
| 82 |
+
assert not vf2pp_is_isomorphic(G1, G2)
|
| 83 |
+
|
| 84 |
+
G1.add_edge(6, 1)
|
| 85 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
|
| 86 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
|
| 87 |
+
|
| 88 |
+
assert vf2pp_is_isomorphic(G1, G2)
|
| 89 |
+
|
| 90 |
+
def test_different_degree_sequences3(self):
|
| 91 |
+
G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
|
| 92 |
+
G2 = nx.Graph(
|
| 93 |
+
[(0, 1), (0, 6), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)]
|
| 94 |
+
)
|
| 95 |
+
assert not vf2pp_is_isomorphic(G1, G2)
|
| 96 |
+
|
| 97 |
+
G1.add_edge(3, 5)
|
| 98 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
|
| 99 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
|
| 100 |
+
|
| 101 |
+
assert vf2pp_is_isomorphic(G1, G2)
|
| 102 |
+
|
| 103 |
+
def test_label_distribution(self):
|
| 104 |
+
G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
|
| 105 |
+
G2 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
|
| 106 |
+
|
| 107 |
+
colors1 = ["blue", "blue", "blue", "yellow", "black", "purple", "purple"]
|
| 108 |
+
colors2 = ["blue", "blue", "yellow", "yellow", "black", "purple", "purple"]
|
| 109 |
+
|
| 110 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(colors1[::-1]))), "label")
|
| 111 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle(colors2[::-1]))), "label")
|
| 112 |
+
|
| 113 |
+
assert not vf2pp_is_isomorphic(G1, G2, node_label="label")
|
| 114 |
+
G2.nodes[3]["label"] = "blue"
|
| 115 |
+
assert vf2pp_is_isomorphic(G1, G2, node_label="label")
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
class TestAllGraphTypesEdgeCases:
|
| 119 |
+
@pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
|
| 120 |
+
def test_both_graphs_empty(self, graph_type):
|
| 121 |
+
G = graph_type()
|
| 122 |
+
H = graph_type()
|
| 123 |
+
assert vf2pp_isomorphism(G, H) is None
|
| 124 |
+
|
| 125 |
+
G.add_node(0)
|
| 126 |
+
|
| 127 |
+
assert vf2pp_isomorphism(G, H) is None
|
| 128 |
+
assert vf2pp_isomorphism(H, G) is None
|
| 129 |
+
|
| 130 |
+
H.add_node(0)
|
| 131 |
+
assert vf2pp_isomorphism(G, H) == {0: 0}
|
| 132 |
+
|
| 133 |
+
@pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
|
| 134 |
+
def test_first_graph_empty(self, graph_type):
|
| 135 |
+
G = graph_type()
|
| 136 |
+
H = graph_type([(0, 1)])
|
| 137 |
+
assert vf2pp_isomorphism(G, H) is None
|
| 138 |
+
|
| 139 |
+
@pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
|
| 140 |
+
def test_second_graph_empty(self, graph_type):
|
| 141 |
+
G = graph_type([(0, 1)])
|
| 142 |
+
H = graph_type()
|
| 143 |
+
assert vf2pp_isomorphism(G, H) is None
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
class TestGraphISOVF2pp:
|
| 147 |
+
def test_custom_graph1_same_labels(self):
|
| 148 |
+
G1 = nx.Graph()
|
| 149 |
+
|
| 150 |
+
mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
|
| 151 |
+
edges1 = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 6), (3, 4), (5, 1), (5, 2)]
|
| 152 |
+
|
| 153 |
+
G1.add_edges_from(edges1)
|
| 154 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 155 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
|
| 156 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
|
| 157 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 158 |
+
|
| 159 |
+
# Add edge making G1 symmetrical
|
| 160 |
+
G1.add_edge(3, 7)
|
| 161 |
+
G1.nodes[7]["label"] = "blue"
|
| 162 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") is None
|
| 163 |
+
|
| 164 |
+
# Make G2 isomorphic to G1
|
| 165 |
+
G2.add_edges_from([(mapped[3], "X"), (mapped[6], mapped[5])])
|
| 166 |
+
G1.add_edge(4, 7)
|
| 167 |
+
G2.nodes["X"]["label"] = "blue"
|
| 168 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 169 |
+
|
| 170 |
+
# Re-structure maintaining isomorphism
|
| 171 |
+
G1.remove_edges_from([(1, 4), (1, 3)])
|
| 172 |
+
G2.remove_edges_from([(mapped[1], mapped[5]), (mapped[1], mapped[2])])
|
| 173 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 174 |
+
|
| 175 |
+
def test_custom_graph1_different_labels(self):
|
| 176 |
+
G1 = nx.Graph()
|
| 177 |
+
|
| 178 |
+
mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
|
| 179 |
+
edges1 = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 6), (3, 4), (5, 1), (5, 2)]
|
| 180 |
+
|
| 181 |
+
G1.add_edges_from(edges1)
|
| 182 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 183 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
|
| 184 |
+
nx.set_node_attributes(
|
| 185 |
+
G2,
|
| 186 |
+
dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
|
| 187 |
+
"label",
|
| 188 |
+
)
|
| 189 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
|
| 190 |
+
|
| 191 |
+
def test_custom_graph2_same_labels(self):
|
| 192 |
+
G1 = nx.Graph()
|
| 193 |
+
|
| 194 |
+
mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
|
| 195 |
+
edges1 = [(1, 2), (1, 5), (5, 6), (2, 3), (2, 4), (3, 4), (4, 5), (2, 7)]
|
| 196 |
+
|
| 197 |
+
G1.add_edges_from(edges1)
|
| 198 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 199 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
|
| 200 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
|
| 201 |
+
|
| 202 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 203 |
+
|
| 204 |
+
# Obtain two isomorphic subgraphs from the graph
|
| 205 |
+
G2.remove_edge(mapped[1], mapped[2])
|
| 206 |
+
G2.add_edge(mapped[1], mapped[4])
|
| 207 |
+
H1 = nx.Graph(G1.subgraph([2, 3, 4, 7]))
|
| 208 |
+
H2 = nx.Graph(G2.subgraph([mapped[1], mapped[4], mapped[5], mapped[6]]))
|
| 209 |
+
assert vf2pp_isomorphism(H1, H2, node_label="label")
|
| 210 |
+
|
| 211 |
+
# Add edges maintaining isomorphism
|
| 212 |
+
H1.add_edges_from([(3, 7), (4, 7)])
|
| 213 |
+
H2.add_edges_from([(mapped[1], mapped[6]), (mapped[4], mapped[6])])
|
| 214 |
+
assert vf2pp_isomorphism(H1, H2, node_label="label")
|
| 215 |
+
|
| 216 |
+
def test_custom_graph2_different_labels(self):
|
| 217 |
+
G1 = nx.Graph()
|
| 218 |
+
|
| 219 |
+
mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
|
| 220 |
+
edges1 = [(1, 2), (1, 5), (5, 6), (2, 3), (2, 4), (3, 4), (4, 5), (2, 7)]
|
| 221 |
+
|
| 222 |
+
G1.add_edges_from(edges1)
|
| 223 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 224 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
|
| 225 |
+
nx.set_node_attributes(
|
| 226 |
+
G2,
|
| 227 |
+
dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
|
| 228 |
+
"label",
|
| 229 |
+
)
|
| 230 |
+
|
| 231 |
+
# Adding new nodes
|
| 232 |
+
G1.add_node(0)
|
| 233 |
+
G2.add_node("Z")
|
| 234 |
+
G1.nodes[0]["label"] = G1.nodes[1]["label"]
|
| 235 |
+
G2.nodes["Z"]["label"] = G1.nodes[1]["label"]
|
| 236 |
+
mapped.update({0: "Z"})
|
| 237 |
+
|
| 238 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
|
| 239 |
+
|
| 240 |
+
# Change the color of one of the nodes
|
| 241 |
+
G2.nodes["Z"]["label"] = G1.nodes[2]["label"]
|
| 242 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") is None
|
| 243 |
+
|
| 244 |
+
# Add an extra edge
|
| 245 |
+
G1.nodes[0]["label"] = "blue"
|
| 246 |
+
G2.nodes["Z"]["label"] = "blue"
|
| 247 |
+
G1.add_edge(0, 1)
|
| 248 |
+
|
| 249 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") is None
|
| 250 |
+
|
| 251 |
+
# Add extra edge to both
|
| 252 |
+
G2.add_edge("Z", "A")
|
| 253 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
|
| 254 |
+
|
| 255 |
+
def test_custom_graph3_same_labels(self):
|
| 256 |
+
G1 = nx.Graph()
|
| 257 |
+
|
| 258 |
+
mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
|
| 259 |
+
edges1 = [
|
| 260 |
+
(1, 2),
|
| 261 |
+
(1, 3),
|
| 262 |
+
(2, 3),
|
| 263 |
+
(3, 4),
|
| 264 |
+
(4, 5),
|
| 265 |
+
(4, 7),
|
| 266 |
+
(4, 9),
|
| 267 |
+
(5, 8),
|
| 268 |
+
(8, 9),
|
| 269 |
+
(5, 6),
|
| 270 |
+
(6, 7),
|
| 271 |
+
(5, 2),
|
| 272 |
+
]
|
| 273 |
+
G1.add_edges_from(edges1)
|
| 274 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 275 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
|
| 276 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
|
| 277 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 278 |
+
|
| 279 |
+
# Connect nodes maintaining symmetry
|
| 280 |
+
G1.add_edges_from([(6, 9), (7, 8)])
|
| 281 |
+
G2.add_edges_from([(mapped[6], mapped[8]), (mapped[7], mapped[9])])
|
| 282 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") is None
|
| 283 |
+
|
| 284 |
+
# Make isomorphic
|
| 285 |
+
G1.add_edges_from([(6, 8), (7, 9)])
|
| 286 |
+
G2.add_edges_from([(mapped[6], mapped[9]), (mapped[7], mapped[8])])
|
| 287 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 288 |
+
|
| 289 |
+
# Connect more nodes
|
| 290 |
+
G1.add_edges_from([(2, 7), (3, 6)])
|
| 291 |
+
G2.add_edges_from([(mapped[2], mapped[7]), (mapped[3], mapped[6])])
|
| 292 |
+
G1.add_node(10)
|
| 293 |
+
G2.add_node("Z")
|
| 294 |
+
G1.nodes[10]["label"] = "blue"
|
| 295 |
+
G2.nodes["Z"]["label"] = "blue"
|
| 296 |
+
|
| 297 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 298 |
+
|
| 299 |
+
# Connect the newly added node, to opposite sides of the graph
|
| 300 |
+
G1.add_edges_from([(10, 1), (10, 5), (10, 8)])
|
| 301 |
+
G2.add_edges_from([("Z", mapped[1]), ("Z", mapped[4]), ("Z", mapped[9])])
|
| 302 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 303 |
+
|
| 304 |
+
# Get two subgraphs that are not isomorphic but are easy to make
|
| 305 |
+
H1 = nx.Graph(G1.subgraph([2, 3, 4, 5, 6, 7, 10]))
|
| 306 |
+
H2 = nx.Graph(
|
| 307 |
+
G2.subgraph(
|
| 308 |
+
[mapped[4], mapped[5], mapped[6], mapped[7], mapped[8], mapped[9], "Z"]
|
| 309 |
+
)
|
| 310 |
+
)
|
| 311 |
+
assert vf2pp_isomorphism(H1, H2, node_label="label") is None
|
| 312 |
+
|
| 313 |
+
# Restructure both to make them isomorphic
|
| 314 |
+
H1.add_edges_from([(10, 2), (10, 6), (3, 6), (2, 7), (2, 6), (3, 7)])
|
| 315 |
+
H2.add_edges_from(
|
| 316 |
+
[("Z", mapped[7]), (mapped[6], mapped[9]), (mapped[7], mapped[8])]
|
| 317 |
+
)
|
| 318 |
+
assert vf2pp_isomorphism(H1, H2, node_label="label")
|
| 319 |
+
|
| 320 |
+
# Add edges with opposite direction in each Graph
|
| 321 |
+
H1.add_edge(3, 5)
|
| 322 |
+
H2.add_edge(mapped[5], mapped[7])
|
| 323 |
+
assert vf2pp_isomorphism(H1, H2, node_label="label") is None
|
| 324 |
+
|
| 325 |
+
def test_custom_graph3_different_labels(self):
|
| 326 |
+
G1 = nx.Graph()
|
| 327 |
+
|
| 328 |
+
mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
|
| 329 |
+
edges1 = [
|
| 330 |
+
(1, 2),
|
| 331 |
+
(1, 3),
|
| 332 |
+
(2, 3),
|
| 333 |
+
(3, 4),
|
| 334 |
+
(4, 5),
|
| 335 |
+
(4, 7),
|
| 336 |
+
(4, 9),
|
| 337 |
+
(5, 8),
|
| 338 |
+
(8, 9),
|
| 339 |
+
(5, 6),
|
| 340 |
+
(6, 7),
|
| 341 |
+
(5, 2),
|
| 342 |
+
]
|
| 343 |
+
G1.add_edges_from(edges1)
|
| 344 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 345 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
|
| 346 |
+
nx.set_node_attributes(
|
| 347 |
+
G2,
|
| 348 |
+
dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
|
| 349 |
+
"label",
|
| 350 |
+
)
|
| 351 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
|
| 352 |
+
|
| 353 |
+
# Add extra edge to G1
|
| 354 |
+
G1.add_edge(1, 7)
|
| 355 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") is None
|
| 356 |
+
|
| 357 |
+
# Compensate in G2
|
| 358 |
+
G2.add_edge(9, 1)
|
| 359 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
|
| 360 |
+
|
| 361 |
+
# Add extra node
|
| 362 |
+
G1.add_node("A")
|
| 363 |
+
G2.add_node("K")
|
| 364 |
+
G1.nodes["A"]["label"] = "green"
|
| 365 |
+
G2.nodes["K"]["label"] = "green"
|
| 366 |
+
mapped.update({"A": "K"})
|
| 367 |
+
|
| 368 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
|
| 369 |
+
|
| 370 |
+
# Connect A to one side of G1 and K to the opposite
|
| 371 |
+
G1.add_edge("A", 6)
|
| 372 |
+
G2.add_edge("K", 5)
|
| 373 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") is None
|
| 374 |
+
|
| 375 |
+
# Make the graphs symmetrical
|
| 376 |
+
G1.add_edge(1, 5)
|
| 377 |
+
G1.add_edge(2, 9)
|
| 378 |
+
G2.add_edge(9, 3)
|
| 379 |
+
G2.add_edge(8, 4)
|
| 380 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") is None
|
| 381 |
+
|
| 382 |
+
# Assign same colors so the two opposite sides are identical
|
| 383 |
+
for node in G1.nodes():
|
| 384 |
+
color = "red"
|
| 385 |
+
G1.nodes[node]["label"] = color
|
| 386 |
+
G2.nodes[mapped[node]]["label"] = color
|
| 387 |
+
|
| 388 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 389 |
+
|
| 390 |
+
def test_custom_graph4_different_labels(self):
|
| 391 |
+
G1 = nx.Graph()
|
| 392 |
+
edges1 = [
|
| 393 |
+
(1, 2),
|
| 394 |
+
(2, 3),
|
| 395 |
+
(3, 8),
|
| 396 |
+
(3, 4),
|
| 397 |
+
(4, 5),
|
| 398 |
+
(4, 6),
|
| 399 |
+
(3, 6),
|
| 400 |
+
(8, 7),
|
| 401 |
+
(8, 9),
|
| 402 |
+
(5, 9),
|
| 403 |
+
(10, 11),
|
| 404 |
+
(11, 12),
|
| 405 |
+
(12, 13),
|
| 406 |
+
(11, 13),
|
| 407 |
+
]
|
| 408 |
+
|
| 409 |
+
mapped = {
|
| 410 |
+
1: "n",
|
| 411 |
+
2: "m",
|
| 412 |
+
3: "l",
|
| 413 |
+
4: "j",
|
| 414 |
+
5: "k",
|
| 415 |
+
6: "i",
|
| 416 |
+
7: "g",
|
| 417 |
+
8: "h",
|
| 418 |
+
9: "f",
|
| 419 |
+
10: "b",
|
| 420 |
+
11: "a",
|
| 421 |
+
12: "d",
|
| 422 |
+
13: "e",
|
| 423 |
+
}
|
| 424 |
+
|
| 425 |
+
G1.add_edges_from(edges1)
|
| 426 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 427 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
|
| 428 |
+
nx.set_node_attributes(
|
| 429 |
+
G2,
|
| 430 |
+
dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
|
| 431 |
+
"label",
|
| 432 |
+
)
|
| 433 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
|
| 434 |
+
|
| 435 |
+
def test_custom_graph4_same_labels(self):
|
| 436 |
+
G1 = nx.Graph()
|
| 437 |
+
edges1 = [
|
| 438 |
+
(1, 2),
|
| 439 |
+
(2, 3),
|
| 440 |
+
(3, 8),
|
| 441 |
+
(3, 4),
|
| 442 |
+
(4, 5),
|
| 443 |
+
(4, 6),
|
| 444 |
+
(3, 6),
|
| 445 |
+
(8, 7),
|
| 446 |
+
(8, 9),
|
| 447 |
+
(5, 9),
|
| 448 |
+
(10, 11),
|
| 449 |
+
(11, 12),
|
| 450 |
+
(12, 13),
|
| 451 |
+
(11, 13),
|
| 452 |
+
]
|
| 453 |
+
|
| 454 |
+
mapped = {
|
| 455 |
+
1: "n",
|
| 456 |
+
2: "m",
|
| 457 |
+
3: "l",
|
| 458 |
+
4: "j",
|
| 459 |
+
5: "k",
|
| 460 |
+
6: "i",
|
| 461 |
+
7: "g",
|
| 462 |
+
8: "h",
|
| 463 |
+
9: "f",
|
| 464 |
+
10: "b",
|
| 465 |
+
11: "a",
|
| 466 |
+
12: "d",
|
| 467 |
+
13: "e",
|
| 468 |
+
}
|
| 469 |
+
|
| 470 |
+
G1.add_edges_from(edges1)
|
| 471 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 472 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
|
| 473 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
|
| 474 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 475 |
+
|
| 476 |
+
# Add nodes of different label
|
| 477 |
+
G1.add_node(0)
|
| 478 |
+
G2.add_node("z")
|
| 479 |
+
G1.nodes[0]["label"] = "green"
|
| 480 |
+
G2.nodes["z"]["label"] = "blue"
|
| 481 |
+
|
| 482 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") is None
|
| 483 |
+
|
| 484 |
+
# Make the labels identical
|
| 485 |
+
G2.nodes["z"]["label"] = "green"
|
| 486 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 487 |
+
|
| 488 |
+
# Change the structure of the graphs, keeping them isomorphic
|
| 489 |
+
G1.add_edge(2, 5)
|
| 490 |
+
G2.remove_edge("i", "l")
|
| 491 |
+
G2.add_edge("g", "l")
|
| 492 |
+
G2.add_edge("m", "f")
|
| 493 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 494 |
+
|
| 495 |
+
# Change the structure of the disconnected sub-graph, keeping it isomorphic
|
| 496 |
+
G1.remove_node(13)
|
| 497 |
+
G2.remove_node("d")
|
| 498 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 499 |
+
|
| 500 |
+
# Connect the newly added node to the disconnected graph, which now is just a path of size 3
|
| 501 |
+
G1.add_edge(0, 10)
|
| 502 |
+
G2.add_edge("e", "z")
|
| 503 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 504 |
+
|
| 505 |
+
# Connect the two disconnected sub-graphs, forming a single graph
|
| 506 |
+
G1.add_edge(11, 3)
|
| 507 |
+
G1.add_edge(0, 8)
|
| 508 |
+
G2.add_edge("a", "l")
|
| 509 |
+
G2.add_edge("z", "j")
|
| 510 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 511 |
+
|
| 512 |
+
def test_custom_graph5_same_labels(self):
|
| 513 |
+
G1 = nx.Graph()
|
| 514 |
+
edges1 = [
|
| 515 |
+
(1, 5),
|
| 516 |
+
(1, 2),
|
| 517 |
+
(1, 4),
|
| 518 |
+
(2, 3),
|
| 519 |
+
(2, 6),
|
| 520 |
+
(3, 4),
|
| 521 |
+
(3, 7),
|
| 522 |
+
(4, 8),
|
| 523 |
+
(5, 8),
|
| 524 |
+
(5, 6),
|
| 525 |
+
(6, 7),
|
| 526 |
+
(7, 8),
|
| 527 |
+
]
|
| 528 |
+
mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
|
| 529 |
+
|
| 530 |
+
G1.add_edges_from(edges1)
|
| 531 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 532 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
|
| 533 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
|
| 534 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 535 |
+
|
| 536 |
+
# Add different edges in each graph, maintaining symmetry
|
| 537 |
+
G1.add_edges_from([(3, 6), (2, 7), (2, 5), (1, 3), (4, 7), (6, 8)])
|
| 538 |
+
G2.add_edges_from(
|
| 539 |
+
[
|
| 540 |
+
(mapped[6], mapped[3]),
|
| 541 |
+
(mapped[2], mapped[7]),
|
| 542 |
+
(mapped[1], mapped[6]),
|
| 543 |
+
(mapped[5], mapped[7]),
|
| 544 |
+
(mapped[3], mapped[8]),
|
| 545 |
+
(mapped[2], mapped[4]),
|
| 546 |
+
]
|
| 547 |
+
)
|
| 548 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 549 |
+
|
| 550 |
+
# Obtain two different but isomorphic subgraphs from G1 and G2
|
| 551 |
+
H1 = nx.Graph(G1.subgraph([1, 5, 8, 6, 7, 3]))
|
| 552 |
+
H2 = nx.Graph(
|
| 553 |
+
G2.subgraph(
|
| 554 |
+
[mapped[1], mapped[4], mapped[8], mapped[7], mapped[3], mapped[5]]
|
| 555 |
+
)
|
| 556 |
+
)
|
| 557 |
+
assert vf2pp_isomorphism(H1, H2, node_label="label")
|
| 558 |
+
|
| 559 |
+
# Delete corresponding node from the two graphs
|
| 560 |
+
H1.remove_node(8)
|
| 561 |
+
H2.remove_node(mapped[7])
|
| 562 |
+
assert vf2pp_isomorphism(H1, H2, node_label="label")
|
| 563 |
+
|
| 564 |
+
# Re-orient, maintaining isomorphism
|
| 565 |
+
H1.add_edge(1, 6)
|
| 566 |
+
H1.remove_edge(3, 6)
|
| 567 |
+
assert vf2pp_isomorphism(H1, H2, node_label="label")
|
| 568 |
+
|
| 569 |
+
def test_custom_graph5_different_labels(self):
|
| 570 |
+
G1 = nx.Graph()
|
| 571 |
+
edges1 = [
|
| 572 |
+
(1, 5),
|
| 573 |
+
(1, 2),
|
| 574 |
+
(1, 4),
|
| 575 |
+
(2, 3),
|
| 576 |
+
(2, 6),
|
| 577 |
+
(3, 4),
|
| 578 |
+
(3, 7),
|
| 579 |
+
(4, 8),
|
| 580 |
+
(5, 8),
|
| 581 |
+
(5, 6),
|
| 582 |
+
(6, 7),
|
| 583 |
+
(7, 8),
|
| 584 |
+
]
|
| 585 |
+
mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
|
| 586 |
+
|
| 587 |
+
G1.add_edges_from(edges1)
|
| 588 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 589 |
+
|
| 590 |
+
colors = ["red", "blue", "grey", "none", "brown", "solarized", "yellow", "pink"]
|
| 591 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
|
| 592 |
+
nx.set_node_attributes(
|
| 593 |
+
G2,
|
| 594 |
+
dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
|
| 595 |
+
"label",
|
| 596 |
+
)
|
| 597 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
|
| 598 |
+
|
| 599 |
+
# Assign different colors to matching nodes
|
| 600 |
+
c = 0
|
| 601 |
+
for node in G1.nodes():
|
| 602 |
+
color1 = colors[c]
|
| 603 |
+
color2 = colors[(c + 3) % len(colors)]
|
| 604 |
+
G1.nodes[node]["label"] = color1
|
| 605 |
+
G2.nodes[mapped[node]]["label"] = color2
|
| 606 |
+
c += 1
|
| 607 |
+
|
| 608 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") is None
|
| 609 |
+
|
| 610 |
+
# Get symmetrical sub-graphs of G1,G2 and compare them
|
| 611 |
+
H1 = G1.subgraph([1, 5])
|
| 612 |
+
H2 = G2.subgraph(["i", "c"])
|
| 613 |
+
c = 0
|
| 614 |
+
for node1, node2 in zip(H1.nodes(), H2.nodes()):
|
| 615 |
+
H1.nodes[node1]["label"] = "red"
|
| 616 |
+
H2.nodes[node2]["label"] = "red"
|
| 617 |
+
c += 1
|
| 618 |
+
|
| 619 |
+
assert vf2pp_isomorphism(H1, H2, node_label="label")
|
| 620 |
+
|
| 621 |
+
def test_disconnected_graph_all_same_labels(self):
|
| 622 |
+
G1 = nx.Graph()
|
| 623 |
+
G1.add_nodes_from(list(range(10)))
|
| 624 |
+
|
| 625 |
+
mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
|
| 626 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 627 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
|
| 628 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
|
| 629 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 630 |
+
|
| 631 |
+
def test_disconnected_graph_all_different_labels(self):
|
| 632 |
+
G1 = nx.Graph()
|
| 633 |
+
G1.add_nodes_from(list(range(10)))
|
| 634 |
+
|
| 635 |
+
mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
|
| 636 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 637 |
+
|
| 638 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
|
| 639 |
+
nx.set_node_attributes(
|
| 640 |
+
G2,
|
| 641 |
+
dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
|
| 642 |
+
"label",
|
| 643 |
+
)
|
| 644 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
|
| 645 |
+
|
| 646 |
+
def test_disconnected_graph_some_same_labels(self):
|
| 647 |
+
G1 = nx.Graph()
|
| 648 |
+
G1.add_nodes_from(list(range(10)))
|
| 649 |
+
|
| 650 |
+
mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
|
| 651 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 652 |
+
|
| 653 |
+
colors = [
|
| 654 |
+
"white",
|
| 655 |
+
"white",
|
| 656 |
+
"white",
|
| 657 |
+
"purple",
|
| 658 |
+
"purple",
|
| 659 |
+
"red",
|
| 660 |
+
"red",
|
| 661 |
+
"pink",
|
| 662 |
+
"pink",
|
| 663 |
+
"pink",
|
| 664 |
+
]
|
| 665 |
+
|
| 666 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(colors))), "label")
|
| 667 |
+
nx.set_node_attributes(
|
| 668 |
+
G2, dict(zip([mapped[n] for n in G1], it.cycle(colors))), "label"
|
| 669 |
+
)
|
| 670 |
+
|
| 671 |
+
assert vf2pp_isomorphism(G1, G2, node_label="label")
|
| 672 |
+
|
| 673 |
+
|
| 674 |
+
class TestMultiGraphISOVF2pp:
|
| 675 |
+
def test_custom_multigraph1_same_labels(self):
|
| 676 |
+
G1 = nx.MultiGraph()
|
| 677 |
+
|
| 678 |
+
mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
|
| 679 |
+
edges1 = [
|
| 680 |
+
(1, 2),
|
| 681 |
+
(1, 3),
|
| 682 |
+
(1, 4),
|
| 683 |
+
(1, 4),
|
| 684 |
+
(1, 4),
|
| 685 |
+
(2, 3),
|
| 686 |
+
(2, 6),
|
| 687 |
+
(2, 6),
|
| 688 |
+
(3, 4),
|
| 689 |
+
(3, 4),
|
| 690 |
+
(5, 1),
|
| 691 |
+
(5, 1),
|
| 692 |
+
(5, 2),
|
| 693 |
+
(5, 2),
|
| 694 |
+
]
|
| 695 |
+
|
| 696 |
+
G1.add_edges_from(edges1)
|
| 697 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 698 |
+
|
| 699 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
|
| 700 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
|
| 701 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 702 |
+
assert m
|
| 703 |
+
|
| 704 |
+
# Transfer the 2-clique to the right side of G1
|
| 705 |
+
G1.remove_edges_from([(2, 6), (2, 6)])
|
| 706 |
+
G1.add_edges_from([(3, 6), (3, 6)])
|
| 707 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 708 |
+
assert not m
|
| 709 |
+
|
| 710 |
+
# Delete an edges, making them symmetrical, so the position of the 2-clique doesn't matter
|
| 711 |
+
G2.remove_edge(mapped[1], mapped[4])
|
| 712 |
+
G1.remove_edge(1, 4)
|
| 713 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 714 |
+
assert m
|
| 715 |
+
|
| 716 |
+
# Add self-loops
|
| 717 |
+
G1.add_edges_from([(5, 5), (5, 5), (1, 1)])
|
| 718 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 719 |
+
assert not m
|
| 720 |
+
|
| 721 |
+
# Compensate in G2
|
| 722 |
+
G2.add_edges_from(
|
| 723 |
+
[(mapped[1], mapped[1]), (mapped[4], mapped[4]), (mapped[4], mapped[4])]
|
| 724 |
+
)
|
| 725 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 726 |
+
assert m
|
| 727 |
+
|
| 728 |
+
def test_custom_multigraph1_different_labels(self):
|
| 729 |
+
G1 = nx.MultiGraph()
|
| 730 |
+
|
| 731 |
+
mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
|
| 732 |
+
edges1 = [
|
| 733 |
+
(1, 2),
|
| 734 |
+
(1, 3),
|
| 735 |
+
(1, 4),
|
| 736 |
+
(1, 4),
|
| 737 |
+
(1, 4),
|
| 738 |
+
(2, 3),
|
| 739 |
+
(2, 6),
|
| 740 |
+
(2, 6),
|
| 741 |
+
(3, 4),
|
| 742 |
+
(3, 4),
|
| 743 |
+
(5, 1),
|
| 744 |
+
(5, 1),
|
| 745 |
+
(5, 2),
|
| 746 |
+
(5, 2),
|
| 747 |
+
]
|
| 748 |
+
|
| 749 |
+
G1.add_edges_from(edges1)
|
| 750 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 751 |
+
|
| 752 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
|
| 753 |
+
nx.set_node_attributes(
|
| 754 |
+
G2,
|
| 755 |
+
dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
|
| 756 |
+
"label",
|
| 757 |
+
)
|
| 758 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 759 |
+
assert m
|
| 760 |
+
assert m == mapped
|
| 761 |
+
|
| 762 |
+
# Re-structure G1, maintaining the degree sequence
|
| 763 |
+
G1.remove_edge(1, 4)
|
| 764 |
+
G1.add_edge(1, 5)
|
| 765 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 766 |
+
assert not m
|
| 767 |
+
|
| 768 |
+
# Restructure G2, making it isomorphic to G1
|
| 769 |
+
G2.remove_edge("A", "D")
|
| 770 |
+
G2.add_edge("A", "Z")
|
| 771 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 772 |
+
assert m
|
| 773 |
+
assert m == mapped
|
| 774 |
+
|
| 775 |
+
# Add edge from node to itself
|
| 776 |
+
G1.add_edges_from([(6, 6), (6, 6), (6, 6)])
|
| 777 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 778 |
+
assert not m
|
| 779 |
+
|
| 780 |
+
# Same for G2
|
| 781 |
+
G2.add_edges_from([("E", "E"), ("E", "E"), ("E", "E")])
|
| 782 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 783 |
+
assert m
|
| 784 |
+
assert m == mapped
|
| 785 |
+
|
| 786 |
+
def test_custom_multigraph2_same_labels(self):
|
| 787 |
+
G1 = nx.MultiGraph()
|
| 788 |
+
|
| 789 |
+
mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
|
| 790 |
+
edges1 = [
|
| 791 |
+
(1, 2),
|
| 792 |
+
(1, 2),
|
| 793 |
+
(1, 5),
|
| 794 |
+
(1, 5),
|
| 795 |
+
(1, 5),
|
| 796 |
+
(5, 6),
|
| 797 |
+
(2, 3),
|
| 798 |
+
(2, 3),
|
| 799 |
+
(2, 4),
|
| 800 |
+
(3, 4),
|
| 801 |
+
(3, 4),
|
| 802 |
+
(4, 5),
|
| 803 |
+
(4, 5),
|
| 804 |
+
(4, 5),
|
| 805 |
+
(2, 7),
|
| 806 |
+
(2, 7),
|
| 807 |
+
(2, 7),
|
| 808 |
+
]
|
| 809 |
+
|
| 810 |
+
G1.add_edges_from(edges1)
|
| 811 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 812 |
+
|
| 813 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
|
| 814 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
|
| 815 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 816 |
+
assert m
|
| 817 |
+
|
| 818 |
+
# Obtain two non-isomorphic subgraphs from the graph
|
| 819 |
+
G2.remove_edges_from([(mapped[1], mapped[2]), (mapped[1], mapped[2])])
|
| 820 |
+
G2.add_edge(mapped[1], mapped[4])
|
| 821 |
+
H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 7]))
|
| 822 |
+
H2 = nx.MultiGraph(G2.subgraph([mapped[1], mapped[4], mapped[5], mapped[6]]))
|
| 823 |
+
|
| 824 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 825 |
+
assert not m
|
| 826 |
+
|
| 827 |
+
# Make them isomorphic
|
| 828 |
+
H1.remove_edge(3, 4)
|
| 829 |
+
H1.add_edges_from([(2, 3), (2, 4), (2, 4)])
|
| 830 |
+
H2.add_edges_from([(mapped[5], mapped[6]), (mapped[5], mapped[6])])
|
| 831 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 832 |
+
assert m
|
| 833 |
+
|
| 834 |
+
# Remove triangle edge
|
| 835 |
+
H1.remove_edges_from([(2, 3), (2, 3), (2, 3)])
|
| 836 |
+
H2.remove_edges_from([(mapped[5], mapped[4])] * 3)
|
| 837 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 838 |
+
assert m
|
| 839 |
+
|
| 840 |
+
# Change the edge orientation such that H1 is rotated H2
|
| 841 |
+
H1.remove_edges_from([(2, 7), (2, 7)])
|
| 842 |
+
H1.add_edges_from([(3, 4), (3, 4)])
|
| 843 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 844 |
+
assert m
|
| 845 |
+
|
| 846 |
+
# Add extra edges maintaining degree sequence, but in a non-symmetrical manner
|
| 847 |
+
H2.add_edge(mapped[5], mapped[1])
|
| 848 |
+
H1.add_edge(3, 4)
|
| 849 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 850 |
+
assert not m
|
| 851 |
+
|
| 852 |
+
def test_custom_multigraph2_different_labels(self):
|
| 853 |
+
G1 = nx.MultiGraph()
|
| 854 |
+
|
| 855 |
+
mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
|
| 856 |
+
edges1 = [
|
| 857 |
+
(1, 2),
|
| 858 |
+
(1, 2),
|
| 859 |
+
(1, 5),
|
| 860 |
+
(1, 5),
|
| 861 |
+
(1, 5),
|
| 862 |
+
(5, 6),
|
| 863 |
+
(2, 3),
|
| 864 |
+
(2, 3),
|
| 865 |
+
(2, 4),
|
| 866 |
+
(3, 4),
|
| 867 |
+
(3, 4),
|
| 868 |
+
(4, 5),
|
| 869 |
+
(4, 5),
|
| 870 |
+
(4, 5),
|
| 871 |
+
(2, 7),
|
| 872 |
+
(2, 7),
|
| 873 |
+
(2, 7),
|
| 874 |
+
]
|
| 875 |
+
|
| 876 |
+
G1.add_edges_from(edges1)
|
| 877 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 878 |
+
|
| 879 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
|
| 880 |
+
nx.set_node_attributes(
|
| 881 |
+
G2,
|
| 882 |
+
dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
|
| 883 |
+
"label",
|
| 884 |
+
)
|
| 885 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 886 |
+
assert m
|
| 887 |
+
assert m == mapped
|
| 888 |
+
|
| 889 |
+
# Re-structure G1
|
| 890 |
+
G1.remove_edge(2, 7)
|
| 891 |
+
G1.add_edge(5, 6)
|
| 892 |
+
|
| 893 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 894 |
+
assert not m
|
| 895 |
+
|
| 896 |
+
# Same for G2
|
| 897 |
+
G2.remove_edge("B", "C")
|
| 898 |
+
G2.add_edge("G", "F")
|
| 899 |
+
|
| 900 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 901 |
+
assert m
|
| 902 |
+
assert m == mapped
|
| 903 |
+
|
| 904 |
+
# Delete node from G1 and G2, keeping them isomorphic
|
| 905 |
+
G1.remove_node(3)
|
| 906 |
+
G2.remove_node("D")
|
| 907 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 908 |
+
assert m
|
| 909 |
+
|
| 910 |
+
# Change G1 edges
|
| 911 |
+
G1.remove_edge(1, 2)
|
| 912 |
+
G1.remove_edge(2, 7)
|
| 913 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 914 |
+
assert not m
|
| 915 |
+
|
| 916 |
+
# Make G2 identical to G1, but with different edge orientation and different labels
|
| 917 |
+
G2.add_edges_from([("A", "C"), ("C", "E"), ("C", "E")])
|
| 918 |
+
G2.remove_edges_from(
|
| 919 |
+
[("A", "G"), ("A", "G"), ("F", "G"), ("E", "G"), ("E", "G")]
|
| 920 |
+
)
|
| 921 |
+
|
| 922 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 923 |
+
assert not m
|
| 924 |
+
|
| 925 |
+
# Make all labels the same, so G1 and G2 are also isomorphic
|
| 926 |
+
for n1, n2 in zip(G1.nodes(), G2.nodes()):
|
| 927 |
+
G1.nodes[n1]["label"] = "blue"
|
| 928 |
+
G2.nodes[n2]["label"] = "blue"
|
| 929 |
+
|
| 930 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 931 |
+
assert m
|
| 932 |
+
|
| 933 |
+
def test_custom_multigraph3_same_labels(self):
|
| 934 |
+
G1 = nx.MultiGraph()
|
| 935 |
+
|
| 936 |
+
mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
|
| 937 |
+
edges1 = [
|
| 938 |
+
(1, 2),
|
| 939 |
+
(1, 3),
|
| 940 |
+
(1, 3),
|
| 941 |
+
(2, 3),
|
| 942 |
+
(2, 3),
|
| 943 |
+
(3, 4),
|
| 944 |
+
(4, 5),
|
| 945 |
+
(4, 7),
|
| 946 |
+
(4, 9),
|
| 947 |
+
(4, 9),
|
| 948 |
+
(4, 9),
|
| 949 |
+
(5, 8),
|
| 950 |
+
(5, 8),
|
| 951 |
+
(8, 9),
|
| 952 |
+
(8, 9),
|
| 953 |
+
(5, 6),
|
| 954 |
+
(6, 7),
|
| 955 |
+
(6, 7),
|
| 956 |
+
(6, 7),
|
| 957 |
+
(5, 2),
|
| 958 |
+
]
|
| 959 |
+
G1.add_edges_from(edges1)
|
| 960 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 961 |
+
|
| 962 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
|
| 963 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
|
| 964 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 965 |
+
assert m
|
| 966 |
+
|
| 967 |
+
# Connect nodes maintaining symmetry
|
| 968 |
+
G1.add_edges_from([(6, 9), (7, 8), (5, 8), (4, 9), (4, 9)])
|
| 969 |
+
G2.add_edges_from(
|
| 970 |
+
[
|
| 971 |
+
(mapped[6], mapped[8]),
|
| 972 |
+
(mapped[7], mapped[9]),
|
| 973 |
+
(mapped[5], mapped[8]),
|
| 974 |
+
(mapped[4], mapped[9]),
|
| 975 |
+
(mapped[4], mapped[9]),
|
| 976 |
+
]
|
| 977 |
+
)
|
| 978 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 979 |
+
assert not m
|
| 980 |
+
|
| 981 |
+
# Make isomorphic
|
| 982 |
+
G1.add_edges_from([(6, 8), (6, 8), (7, 9), (7, 9), (7, 9)])
|
| 983 |
+
G2.add_edges_from(
|
| 984 |
+
[
|
| 985 |
+
(mapped[6], mapped[8]),
|
| 986 |
+
(mapped[6], mapped[9]),
|
| 987 |
+
(mapped[7], mapped[8]),
|
| 988 |
+
(mapped[7], mapped[9]),
|
| 989 |
+
(mapped[7], mapped[9]),
|
| 990 |
+
]
|
| 991 |
+
)
|
| 992 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 993 |
+
assert m
|
| 994 |
+
|
| 995 |
+
# Connect more nodes
|
| 996 |
+
G1.add_edges_from([(2, 7), (2, 7), (3, 6), (3, 6)])
|
| 997 |
+
G2.add_edges_from(
|
| 998 |
+
[
|
| 999 |
+
(mapped[2], mapped[7]),
|
| 1000 |
+
(mapped[2], mapped[7]),
|
| 1001 |
+
(mapped[3], mapped[6]),
|
| 1002 |
+
(mapped[3], mapped[6]),
|
| 1003 |
+
]
|
| 1004 |
+
)
|
| 1005 |
+
G1.add_node(10)
|
| 1006 |
+
G2.add_node("Z")
|
| 1007 |
+
G1.nodes[10]["label"] = "blue"
|
| 1008 |
+
G2.nodes["Z"]["label"] = "blue"
|
| 1009 |
+
|
| 1010 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1011 |
+
assert m
|
| 1012 |
+
|
| 1013 |
+
# Connect the newly added node, to opposite sides of the graph
|
| 1014 |
+
G1.add_edges_from([(10, 1), (10, 5), (10, 8), (10, 10), (10, 10)])
|
| 1015 |
+
G2.add_edges_from(
|
| 1016 |
+
[
|
| 1017 |
+
("Z", mapped[1]),
|
| 1018 |
+
("Z", mapped[4]),
|
| 1019 |
+
("Z", mapped[9]),
|
| 1020 |
+
("Z", "Z"),
|
| 1021 |
+
("Z", "Z"),
|
| 1022 |
+
]
|
| 1023 |
+
)
|
| 1024 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1025 |
+
assert not m
|
| 1026 |
+
|
| 1027 |
+
# We connected the new node to opposite sides, so G1 must be symmetrical to G2. Re-structure them to be so
|
| 1028 |
+
G1.remove_edges_from([(1, 3), (4, 9), (4, 9), (7, 9)])
|
| 1029 |
+
G2.remove_edges_from(
|
| 1030 |
+
[
|
| 1031 |
+
(mapped[1], mapped[3]),
|
| 1032 |
+
(mapped[4], mapped[9]),
|
| 1033 |
+
(mapped[4], mapped[9]),
|
| 1034 |
+
(mapped[7], mapped[9]),
|
| 1035 |
+
]
|
| 1036 |
+
)
|
| 1037 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1038 |
+
assert m
|
| 1039 |
+
|
| 1040 |
+
# Get two subgraphs that are not isomorphic but are easy to make
|
| 1041 |
+
H1 = nx.Graph(G1.subgraph([2, 3, 4, 5, 6, 7, 10]))
|
| 1042 |
+
H2 = nx.Graph(
|
| 1043 |
+
G2.subgraph(
|
| 1044 |
+
[mapped[4], mapped[5], mapped[6], mapped[7], mapped[8], mapped[9], "Z"]
|
| 1045 |
+
)
|
| 1046 |
+
)
|
| 1047 |
+
|
| 1048 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1049 |
+
assert not m
|
| 1050 |
+
|
| 1051 |
+
# Restructure both to make them isomorphic
|
| 1052 |
+
H1.add_edges_from([(10, 2), (10, 6), (3, 6), (2, 7), (2, 6), (3, 7)])
|
| 1053 |
+
H2.add_edges_from(
|
| 1054 |
+
[("Z", mapped[7]), (mapped[6], mapped[9]), (mapped[7], mapped[8])]
|
| 1055 |
+
)
|
| 1056 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1057 |
+
assert m
|
| 1058 |
+
|
| 1059 |
+
# Remove one self-loop in H2
|
| 1060 |
+
H2.remove_edge("Z", "Z")
|
| 1061 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1062 |
+
assert not m
|
| 1063 |
+
|
| 1064 |
+
# Compensate in H1
|
| 1065 |
+
H1.remove_edge(10, 10)
|
| 1066 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1067 |
+
assert m
|
| 1068 |
+
|
| 1069 |
+
def test_custom_multigraph3_different_labels(self):
|
| 1070 |
+
G1 = nx.MultiGraph()
|
| 1071 |
+
|
| 1072 |
+
mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
|
| 1073 |
+
edges1 = [
|
| 1074 |
+
(1, 2),
|
| 1075 |
+
(1, 3),
|
| 1076 |
+
(1, 3),
|
| 1077 |
+
(2, 3),
|
| 1078 |
+
(2, 3),
|
| 1079 |
+
(3, 4),
|
| 1080 |
+
(4, 5),
|
| 1081 |
+
(4, 7),
|
| 1082 |
+
(4, 9),
|
| 1083 |
+
(4, 9),
|
| 1084 |
+
(4, 9),
|
| 1085 |
+
(5, 8),
|
| 1086 |
+
(5, 8),
|
| 1087 |
+
(8, 9),
|
| 1088 |
+
(8, 9),
|
| 1089 |
+
(5, 6),
|
| 1090 |
+
(6, 7),
|
| 1091 |
+
(6, 7),
|
| 1092 |
+
(6, 7),
|
| 1093 |
+
(5, 2),
|
| 1094 |
+
]
|
| 1095 |
+
|
| 1096 |
+
G1.add_edges_from(edges1)
|
| 1097 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 1098 |
+
|
| 1099 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
|
| 1100 |
+
nx.set_node_attributes(
|
| 1101 |
+
G2,
|
| 1102 |
+
dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
|
| 1103 |
+
"label",
|
| 1104 |
+
)
|
| 1105 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1106 |
+
assert m
|
| 1107 |
+
assert m == mapped
|
| 1108 |
+
|
| 1109 |
+
# Delete edge maintaining isomorphism
|
| 1110 |
+
G1.remove_edge(4, 9)
|
| 1111 |
+
G2.remove_edge(4, 6)
|
| 1112 |
+
|
| 1113 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1114 |
+
assert m
|
| 1115 |
+
assert m == mapped
|
| 1116 |
+
|
| 1117 |
+
# Change edge orientation such that G1 mirrors G2
|
| 1118 |
+
G1.add_edges_from([(4, 9), (1, 2), (1, 2)])
|
| 1119 |
+
G1.remove_edges_from([(1, 3), (1, 3)])
|
| 1120 |
+
G2.add_edges_from([(3, 5), (7, 9)])
|
| 1121 |
+
G2.remove_edge(8, 9)
|
| 1122 |
+
|
| 1123 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1124 |
+
assert not m
|
| 1125 |
+
|
| 1126 |
+
# Make all labels the same, so G1 and G2 are also isomorphic
|
| 1127 |
+
for n1, n2 in zip(G1.nodes(), G2.nodes()):
|
| 1128 |
+
G1.nodes[n1]["label"] = "blue"
|
| 1129 |
+
G2.nodes[n2]["label"] = "blue"
|
| 1130 |
+
|
| 1131 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1132 |
+
assert m
|
| 1133 |
+
|
| 1134 |
+
G1.add_node(10)
|
| 1135 |
+
G2.add_node("Z")
|
| 1136 |
+
G1.nodes[10]["label"] = "green"
|
| 1137 |
+
G2.nodes["Z"]["label"] = "green"
|
| 1138 |
+
|
| 1139 |
+
# Add different number of edges between the new nodes and themselves
|
| 1140 |
+
G1.add_edges_from([(10, 10), (10, 10)])
|
| 1141 |
+
G2.add_edges_from([("Z", "Z")])
|
| 1142 |
+
|
| 1143 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1144 |
+
assert not m
|
| 1145 |
+
|
| 1146 |
+
# Make the number of self-edges equal
|
| 1147 |
+
G1.remove_edge(10, 10)
|
| 1148 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1149 |
+
assert m
|
| 1150 |
+
|
| 1151 |
+
# Connect the new node to the graph
|
| 1152 |
+
G1.add_edges_from([(10, 3), (10, 4)])
|
| 1153 |
+
G2.add_edges_from([("Z", 8), ("Z", 3)])
|
| 1154 |
+
|
| 1155 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1156 |
+
assert m
|
| 1157 |
+
|
| 1158 |
+
# Remove central node
|
| 1159 |
+
G1.remove_node(4)
|
| 1160 |
+
G2.remove_node(3)
|
| 1161 |
+
G1.add_edges_from([(5, 6), (5, 6), (5, 7)])
|
| 1162 |
+
G2.add_edges_from([(1, 6), (1, 6), (6, 2)])
|
| 1163 |
+
|
| 1164 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1165 |
+
assert m
|
| 1166 |
+
|
| 1167 |
+
def test_custom_multigraph4_same_labels(self):
|
| 1168 |
+
G1 = nx.MultiGraph()
|
| 1169 |
+
edges1 = [
|
| 1170 |
+
(1, 2),
|
| 1171 |
+
(1, 2),
|
| 1172 |
+
(2, 2),
|
| 1173 |
+
(2, 3),
|
| 1174 |
+
(3, 8),
|
| 1175 |
+
(3, 8),
|
| 1176 |
+
(3, 4),
|
| 1177 |
+
(4, 5),
|
| 1178 |
+
(4, 5),
|
| 1179 |
+
(4, 5),
|
| 1180 |
+
(4, 6),
|
| 1181 |
+
(3, 6),
|
| 1182 |
+
(3, 6),
|
| 1183 |
+
(6, 6),
|
| 1184 |
+
(8, 7),
|
| 1185 |
+
(7, 7),
|
| 1186 |
+
(8, 9),
|
| 1187 |
+
(9, 9),
|
| 1188 |
+
(8, 9),
|
| 1189 |
+
(8, 9),
|
| 1190 |
+
(5, 9),
|
| 1191 |
+
(10, 11),
|
| 1192 |
+
(11, 12),
|
| 1193 |
+
(12, 13),
|
| 1194 |
+
(11, 13),
|
| 1195 |
+
(10, 10),
|
| 1196 |
+
(10, 11),
|
| 1197 |
+
(11, 13),
|
| 1198 |
+
]
|
| 1199 |
+
|
| 1200 |
+
mapped = {
|
| 1201 |
+
1: "n",
|
| 1202 |
+
2: "m",
|
| 1203 |
+
3: "l",
|
| 1204 |
+
4: "j",
|
| 1205 |
+
5: "k",
|
| 1206 |
+
6: "i",
|
| 1207 |
+
7: "g",
|
| 1208 |
+
8: "h",
|
| 1209 |
+
9: "f",
|
| 1210 |
+
10: "b",
|
| 1211 |
+
11: "a",
|
| 1212 |
+
12: "d",
|
| 1213 |
+
13: "e",
|
| 1214 |
+
}
|
| 1215 |
+
|
| 1216 |
+
G1.add_edges_from(edges1)
|
| 1217 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 1218 |
+
|
| 1219 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
|
| 1220 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
|
| 1221 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1222 |
+
assert m
|
| 1223 |
+
|
| 1224 |
+
# Add extra but corresponding edges to both graphs
|
| 1225 |
+
G1.add_edges_from([(2, 2), (2, 3), (2, 8), (3, 4)])
|
| 1226 |
+
G2.add_edges_from([("m", "m"), ("m", "l"), ("m", "h"), ("l", "j")])
|
| 1227 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1228 |
+
assert m
|
| 1229 |
+
|
| 1230 |
+
# Obtain subgraphs
|
| 1231 |
+
H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 6, 10, 11, 12, 13]))
|
| 1232 |
+
H2 = nx.MultiGraph(
|
| 1233 |
+
G2.subgraph(
|
| 1234 |
+
[
|
| 1235 |
+
mapped[2],
|
| 1236 |
+
mapped[3],
|
| 1237 |
+
mapped[8],
|
| 1238 |
+
mapped[9],
|
| 1239 |
+
mapped[10],
|
| 1240 |
+
mapped[11],
|
| 1241 |
+
mapped[12],
|
| 1242 |
+
mapped[13],
|
| 1243 |
+
]
|
| 1244 |
+
)
|
| 1245 |
+
)
|
| 1246 |
+
|
| 1247 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1248 |
+
assert not m
|
| 1249 |
+
|
| 1250 |
+
# Make them isomorphic
|
| 1251 |
+
H2.remove_edges_from(
|
| 1252 |
+
[(mapped[3], mapped[2]), (mapped[9], mapped[8]), (mapped[2], mapped[2])]
|
| 1253 |
+
)
|
| 1254 |
+
H2.add_edges_from([(mapped[9], mapped[9]), (mapped[2], mapped[8])])
|
| 1255 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1256 |
+
assert m
|
| 1257 |
+
|
| 1258 |
+
# Re-structure the disconnected sub-graph
|
| 1259 |
+
H1.remove_node(12)
|
| 1260 |
+
H2.remove_node(mapped[12])
|
| 1261 |
+
H1.add_edge(13, 13)
|
| 1262 |
+
H2.add_edge(mapped[13], mapped[13])
|
| 1263 |
+
|
| 1264 |
+
# Connect the two disconnected components, forming a single graph
|
| 1265 |
+
H1.add_edges_from([(3, 13), (6, 11)])
|
| 1266 |
+
H2.add_edges_from([(mapped[8], mapped[10]), (mapped[2], mapped[11])])
|
| 1267 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1268 |
+
assert m
|
| 1269 |
+
|
| 1270 |
+
# Change orientation of self-loops in one graph, maintaining the degree sequence
|
| 1271 |
+
H1.remove_edges_from([(2, 2), (3, 6)])
|
| 1272 |
+
H1.add_edges_from([(6, 6), (2, 3)])
|
| 1273 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1274 |
+
assert not m
|
| 1275 |
+
|
| 1276 |
+
def test_custom_multigraph4_different_labels(self):
|
| 1277 |
+
G1 = nx.MultiGraph()
|
| 1278 |
+
edges1 = [
|
| 1279 |
+
(1, 2),
|
| 1280 |
+
(1, 2),
|
| 1281 |
+
(2, 2),
|
| 1282 |
+
(2, 3),
|
| 1283 |
+
(3, 8),
|
| 1284 |
+
(3, 8),
|
| 1285 |
+
(3, 4),
|
| 1286 |
+
(4, 5),
|
| 1287 |
+
(4, 5),
|
| 1288 |
+
(4, 5),
|
| 1289 |
+
(4, 6),
|
| 1290 |
+
(3, 6),
|
| 1291 |
+
(3, 6),
|
| 1292 |
+
(6, 6),
|
| 1293 |
+
(8, 7),
|
| 1294 |
+
(7, 7),
|
| 1295 |
+
(8, 9),
|
| 1296 |
+
(9, 9),
|
| 1297 |
+
(8, 9),
|
| 1298 |
+
(8, 9),
|
| 1299 |
+
(5, 9),
|
| 1300 |
+
(10, 11),
|
| 1301 |
+
(11, 12),
|
| 1302 |
+
(12, 13),
|
| 1303 |
+
(11, 13),
|
| 1304 |
+
]
|
| 1305 |
+
|
| 1306 |
+
mapped = {
|
| 1307 |
+
1: "n",
|
| 1308 |
+
2: "m",
|
| 1309 |
+
3: "l",
|
| 1310 |
+
4: "j",
|
| 1311 |
+
5: "k",
|
| 1312 |
+
6: "i",
|
| 1313 |
+
7: "g",
|
| 1314 |
+
8: "h",
|
| 1315 |
+
9: "f",
|
| 1316 |
+
10: "b",
|
| 1317 |
+
11: "a",
|
| 1318 |
+
12: "d",
|
| 1319 |
+
13: "e",
|
| 1320 |
+
}
|
| 1321 |
+
|
| 1322 |
+
G1.add_edges_from(edges1)
|
| 1323 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 1324 |
+
|
| 1325 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
|
| 1326 |
+
nx.set_node_attributes(
|
| 1327 |
+
G2,
|
| 1328 |
+
dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
|
| 1329 |
+
"label",
|
| 1330 |
+
)
|
| 1331 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1332 |
+
assert m == mapped
|
| 1333 |
+
|
| 1334 |
+
# Add extra but corresponding edges to both graphs
|
| 1335 |
+
G1.add_edges_from([(2, 2), (2, 3), (2, 8), (3, 4)])
|
| 1336 |
+
G2.add_edges_from([("m", "m"), ("m", "l"), ("m", "h"), ("l", "j")])
|
| 1337 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1338 |
+
assert m == mapped
|
| 1339 |
+
|
| 1340 |
+
# Obtain isomorphic subgraphs
|
| 1341 |
+
H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 6]))
|
| 1342 |
+
H2 = nx.MultiGraph(G2.subgraph(["m", "l", "j", "i"]))
|
| 1343 |
+
|
| 1344 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1345 |
+
assert m
|
| 1346 |
+
|
| 1347 |
+
# Delete the 3-clique, keeping only the path-graph. Also, H1 mirrors H2
|
| 1348 |
+
H1.remove_node(4)
|
| 1349 |
+
H2.remove_node("j")
|
| 1350 |
+
H1.remove_edges_from([(2, 2), (2, 3), (6, 6)])
|
| 1351 |
+
H2.remove_edges_from([("l", "i"), ("m", "m"), ("m", "m")])
|
| 1352 |
+
|
| 1353 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1354 |
+
assert not m
|
| 1355 |
+
|
| 1356 |
+
# Assign the same labels so that mirroring means isomorphic
|
| 1357 |
+
for n1, n2 in zip(H1.nodes(), H2.nodes()):
|
| 1358 |
+
H1.nodes[n1]["label"] = "red"
|
| 1359 |
+
H2.nodes[n2]["label"] = "red"
|
| 1360 |
+
|
| 1361 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1362 |
+
assert m
|
| 1363 |
+
|
| 1364 |
+
# Leave only one node with self-loop
|
| 1365 |
+
H1.remove_nodes_from([3, 6])
|
| 1366 |
+
H2.remove_nodes_from(["m", "l"])
|
| 1367 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1368 |
+
assert m
|
| 1369 |
+
|
| 1370 |
+
# Remove one self-loop from H1
|
| 1371 |
+
H1.remove_edge(2, 2)
|
| 1372 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1373 |
+
assert not m
|
| 1374 |
+
|
| 1375 |
+
# Same for H2
|
| 1376 |
+
H2.remove_edge("i", "i")
|
| 1377 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1378 |
+
assert m
|
| 1379 |
+
|
| 1380 |
+
# Compose H1 with the disconnected sub-graph of G1. Same for H2
|
| 1381 |
+
S1 = nx.compose(H1, nx.MultiGraph(G1.subgraph([10, 11, 12, 13])))
|
| 1382 |
+
S2 = nx.compose(H2, nx.MultiGraph(G2.subgraph(["a", "b", "d", "e"])))
|
| 1383 |
+
|
| 1384 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1385 |
+
assert m
|
| 1386 |
+
|
| 1387 |
+
# Connect the two components
|
| 1388 |
+
S1.add_edges_from([(13, 13), (13, 13), (2, 13)])
|
| 1389 |
+
S2.add_edges_from([("a", "a"), ("a", "a"), ("i", "e")])
|
| 1390 |
+
m = vf2pp_isomorphism(H1, H2, node_label="label")
|
| 1391 |
+
assert m
|
| 1392 |
+
|
| 1393 |
+
def test_custom_multigraph5_same_labels(self):
|
| 1394 |
+
G1 = nx.MultiGraph()
|
| 1395 |
+
|
| 1396 |
+
edges1 = [
|
| 1397 |
+
(1, 5),
|
| 1398 |
+
(1, 2),
|
| 1399 |
+
(1, 4),
|
| 1400 |
+
(2, 3),
|
| 1401 |
+
(2, 6),
|
| 1402 |
+
(3, 4),
|
| 1403 |
+
(3, 7),
|
| 1404 |
+
(4, 8),
|
| 1405 |
+
(5, 8),
|
| 1406 |
+
(5, 6),
|
| 1407 |
+
(6, 7),
|
| 1408 |
+
(7, 8),
|
| 1409 |
+
]
|
| 1410 |
+
mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
|
| 1411 |
+
|
| 1412 |
+
G1.add_edges_from(edges1)
|
| 1413 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 1414 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
|
| 1415 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
|
| 1416 |
+
|
| 1417 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1418 |
+
assert m
|
| 1419 |
+
|
| 1420 |
+
# Add multiple edges and self-loops, maintaining isomorphism
|
| 1421 |
+
G1.add_edges_from(
|
| 1422 |
+
[(1, 2), (1, 2), (3, 7), (8, 8), (8, 8), (7, 8), (2, 3), (5, 6)]
|
| 1423 |
+
)
|
| 1424 |
+
G2.add_edges_from(
|
| 1425 |
+
[
|
| 1426 |
+
("a", "h"),
|
| 1427 |
+
("a", "h"),
|
| 1428 |
+
("d", "j"),
|
| 1429 |
+
("c", "c"),
|
| 1430 |
+
("c", "c"),
|
| 1431 |
+
("j", "c"),
|
| 1432 |
+
("d", "h"),
|
| 1433 |
+
("g", "b"),
|
| 1434 |
+
]
|
| 1435 |
+
)
|
| 1436 |
+
|
| 1437 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1438 |
+
assert m
|
| 1439 |
+
|
| 1440 |
+
# Make G2 to be the rotated G1
|
| 1441 |
+
G2.remove_edges_from(
|
| 1442 |
+
[
|
| 1443 |
+
("a", "h"),
|
| 1444 |
+
("a", "h"),
|
| 1445 |
+
("d", "j"),
|
| 1446 |
+
("c", "c"),
|
| 1447 |
+
("c", "c"),
|
| 1448 |
+
("j", "c"),
|
| 1449 |
+
("d", "h"),
|
| 1450 |
+
("g", "b"),
|
| 1451 |
+
]
|
| 1452 |
+
)
|
| 1453 |
+
G2.add_edges_from(
|
| 1454 |
+
[
|
| 1455 |
+
("d", "i"),
|
| 1456 |
+
("a", "h"),
|
| 1457 |
+
("g", "b"),
|
| 1458 |
+
("g", "b"),
|
| 1459 |
+
("i", "i"),
|
| 1460 |
+
("i", "i"),
|
| 1461 |
+
("b", "j"),
|
| 1462 |
+
("d", "j"),
|
| 1463 |
+
]
|
| 1464 |
+
)
|
| 1465 |
+
|
| 1466 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1467 |
+
assert m
|
| 1468 |
+
|
| 1469 |
+
def test_disconnected_multigraph_all_same_labels(self):
|
| 1470 |
+
G1 = nx.MultiGraph()
|
| 1471 |
+
G1.add_nodes_from(list(range(10)))
|
| 1472 |
+
G1.add_edges_from([(i, i) for i in range(10)])
|
| 1473 |
+
|
| 1474 |
+
mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
|
| 1475 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 1476 |
+
|
| 1477 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
|
| 1478 |
+
nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
|
| 1479 |
+
|
| 1480 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1481 |
+
assert m
|
| 1482 |
+
|
| 1483 |
+
# Add self-loops to non-mapped nodes. Should be the same, as the graph is disconnected.
|
| 1484 |
+
G1.add_edges_from([(i, i) for i in range(5, 8)] * 3)
|
| 1485 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1486 |
+
assert not m
|
| 1487 |
+
|
| 1488 |
+
# Compensate in G2
|
| 1489 |
+
G2.add_edges_from([(i, i) for i in range(3)] * 3)
|
| 1490 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1491 |
+
assert m
|
| 1492 |
+
|
| 1493 |
+
# Add one more self-loop in G2
|
| 1494 |
+
G2.add_edges_from([(0, 0), (1, 1), (1, 1)])
|
| 1495 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1496 |
+
assert not m
|
| 1497 |
+
|
| 1498 |
+
# Compensate in G1
|
| 1499 |
+
G1.add_edges_from([(5, 5), (7, 7), (7, 7)])
|
| 1500 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1501 |
+
assert m
|
| 1502 |
+
|
| 1503 |
+
def test_disconnected_multigraph_all_different_labels(self):
|
| 1504 |
+
G1 = nx.MultiGraph()
|
| 1505 |
+
G1.add_nodes_from(list(range(10)))
|
| 1506 |
+
G1.add_edges_from([(i, i) for i in range(10)])
|
| 1507 |
+
|
| 1508 |
+
mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
|
| 1509 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 1510 |
+
|
| 1511 |
+
nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
|
| 1512 |
+
nx.set_node_attributes(
|
| 1513 |
+
G2,
|
| 1514 |
+
dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
|
| 1515 |
+
"label",
|
| 1516 |
+
)
|
| 1517 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1518 |
+
assert m
|
| 1519 |
+
assert m == mapped
|
| 1520 |
+
|
| 1521 |
+
# Add self-loops to non-mapped nodes. Now it is not the same, as there are different labels
|
| 1522 |
+
G1.add_edges_from([(i, i) for i in range(5, 8)] * 3)
|
| 1523 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1524 |
+
assert not m
|
| 1525 |
+
|
| 1526 |
+
# Add self-loops to non mapped nodes in G2 as well
|
| 1527 |
+
G2.add_edges_from([(mapped[i], mapped[i]) for i in range(3)] * 7)
|
| 1528 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1529 |
+
assert not m
|
| 1530 |
+
|
| 1531 |
+
# Add self-loops to mapped nodes in G2
|
| 1532 |
+
G2.add_edges_from([(mapped[i], mapped[i]) for i in range(5, 8)] * 3)
|
| 1533 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1534 |
+
assert not m
|
| 1535 |
+
|
| 1536 |
+
# Add self-loops to G1 so that they are even in both graphs
|
| 1537 |
+
G1.add_edges_from([(i, i) for i in range(3)] * 7)
|
| 1538 |
+
m = vf2pp_isomorphism(G1, G2, node_label="label")
|
| 1539 |
+
assert m
|
| 1540 |
+
|
| 1541 |
+
|
| 1542 |
+
class TestDiGraphISOVF2pp:
|
| 1543 |
+
def test_wikipedia_graph(self):
|
| 1544 |
+
edges1 = [
|
| 1545 |
+
(1, 5),
|
| 1546 |
+
(1, 2),
|
| 1547 |
+
(1, 4),
|
| 1548 |
+
(3, 2),
|
| 1549 |
+
(6, 2),
|
| 1550 |
+
(3, 4),
|
| 1551 |
+
(7, 3),
|
| 1552 |
+
(4, 8),
|
| 1553 |
+
(5, 8),
|
| 1554 |
+
(6, 5),
|
| 1555 |
+
(6, 7),
|
| 1556 |
+
(7, 8),
|
| 1557 |
+
]
|
| 1558 |
+
mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
|
| 1559 |
+
|
| 1560 |
+
G1 = nx.DiGraph(edges1)
|
| 1561 |
+
G2 = nx.relabel_nodes(G1, mapped)
|
| 1562 |
+
|
| 1563 |
+
assert vf2pp_isomorphism(G1, G2) == mapped
|
| 1564 |
+
|
| 1565 |
+
# Change the direction of an edge
|
| 1566 |
+
G1.remove_edge(1, 5)
|
| 1567 |
+
G1.add_edge(5, 1)
|
| 1568 |
+
assert vf2pp_isomorphism(G1, G2) is None
|
| 1569 |
+
|
| 1570 |
+
def test_non_isomorphic_same_degree_sequence(self):
|
| 1571 |
+
r"""
|
| 1572 |
+
G1 G2
|
| 1573 |
+
x--------------x x--------------x
|
| 1574 |
+
| \ | | \ |
|
| 1575 |
+
| x-------x | | x-------x |
|
| 1576 |
+
| | | | | | | |
|
| 1577 |
+
| x-------x | | x-------x |
|
| 1578 |
+
| / | | \ |
|
| 1579 |
+
x--------------x x--------------x
|
| 1580 |
+
"""
|
| 1581 |
+
edges1 = [
|
| 1582 |
+
(1, 5),
|
| 1583 |
+
(1, 2),
|
| 1584 |
+
(4, 1),
|
| 1585 |
+
(3, 2),
|
| 1586 |
+
(3, 4),
|
| 1587 |
+
(4, 8),
|
| 1588 |
+
(5, 8),
|
| 1589 |
+
(6, 5),
|
| 1590 |
+
(6, 7),
|
| 1591 |
+
(7, 8),
|
| 1592 |
+
]
|
| 1593 |
+
edges2 = [
|
| 1594 |
+
(1, 5),
|
| 1595 |
+
(1, 2),
|
| 1596 |
+
(4, 1),
|
| 1597 |
+
(3, 2),
|
| 1598 |
+
(4, 3),
|
| 1599 |
+
(5, 8),
|
| 1600 |
+
(6, 5),
|
| 1601 |
+
(6, 7),
|
| 1602 |
+
(3, 7),
|
| 1603 |
+
(8, 7),
|
| 1604 |
+
]
|
| 1605 |
+
|
| 1606 |
+
G1 = nx.DiGraph(edges1)
|
| 1607 |
+
G2 = nx.DiGraph(edges2)
|
| 1608 |
+
assert vf2pp_isomorphism(G1, G2) is None
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for VF2 isomorphism algorithm for weighted graphs.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import math
|
| 6 |
+
from operator import eq
|
| 7 |
+
|
| 8 |
+
import networkx as nx
|
| 9 |
+
import networkx.algorithms.isomorphism as iso
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def test_simple():
|
| 13 |
+
# 16 simple tests
|
| 14 |
+
w = "weight"
|
| 15 |
+
edges = [(0, 0, 1), (0, 0, 1.5), (0, 1, 2), (1, 0, 3)]
|
| 16 |
+
for g1 in [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()]:
|
| 17 |
+
g1.add_weighted_edges_from(edges)
|
| 18 |
+
g2 = g1.subgraph(g1.nodes())
|
| 19 |
+
if g1.is_multigraph():
|
| 20 |
+
em = iso.numerical_multiedge_match("weight", 1)
|
| 21 |
+
else:
|
| 22 |
+
em = iso.numerical_edge_match("weight", 1)
|
| 23 |
+
assert nx.is_isomorphic(g1, g2, edge_match=em)
|
| 24 |
+
|
| 25 |
+
for mod1, mod2 in [(False, True), (True, False), (True, True)]:
|
| 26 |
+
# mod1 tests a regular edge
|
| 27 |
+
# mod2 tests a selfloop
|
| 28 |
+
if g2.is_multigraph():
|
| 29 |
+
if mod1:
|
| 30 |
+
data1 = {0: {"weight": 10}}
|
| 31 |
+
if mod2:
|
| 32 |
+
data2 = {0: {"weight": 1}, 1: {"weight": 2.5}}
|
| 33 |
+
else:
|
| 34 |
+
if mod1:
|
| 35 |
+
data1 = {"weight": 10}
|
| 36 |
+
if mod2:
|
| 37 |
+
data2 = {"weight": 2.5}
|
| 38 |
+
|
| 39 |
+
g2 = g1.subgraph(g1.nodes()).copy()
|
| 40 |
+
if mod1:
|
| 41 |
+
if not g1.is_directed():
|
| 42 |
+
g2._adj[1][0] = data1
|
| 43 |
+
g2._adj[0][1] = data1
|
| 44 |
+
else:
|
| 45 |
+
g2._succ[1][0] = data1
|
| 46 |
+
g2._pred[0][1] = data1
|
| 47 |
+
if mod2:
|
| 48 |
+
if not g1.is_directed():
|
| 49 |
+
g2._adj[0][0] = data2
|
| 50 |
+
else:
|
| 51 |
+
g2._succ[0][0] = data2
|
| 52 |
+
g2._pred[0][0] = data2
|
| 53 |
+
|
| 54 |
+
assert not nx.is_isomorphic(g1, g2, edge_match=em)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def test_weightkey():
|
| 58 |
+
g1 = nx.DiGraph()
|
| 59 |
+
g2 = nx.DiGraph()
|
| 60 |
+
|
| 61 |
+
g1.add_edge("A", "B", weight=1)
|
| 62 |
+
g2.add_edge("C", "D", weight=0)
|
| 63 |
+
|
| 64 |
+
assert nx.is_isomorphic(g1, g2)
|
| 65 |
+
em = iso.numerical_edge_match("nonexistent attribute", 1)
|
| 66 |
+
assert nx.is_isomorphic(g1, g2, edge_match=em)
|
| 67 |
+
em = iso.numerical_edge_match("weight", 1)
|
| 68 |
+
assert not nx.is_isomorphic(g1, g2, edge_match=em)
|
| 69 |
+
|
| 70 |
+
g2 = nx.DiGraph()
|
| 71 |
+
g2.add_edge("C", "D")
|
| 72 |
+
assert nx.is_isomorphic(g1, g2, edge_match=em)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class TestNodeMatch_Graph:
|
| 76 |
+
def setup_method(self):
|
| 77 |
+
self.g1 = nx.Graph()
|
| 78 |
+
self.g2 = nx.Graph()
|
| 79 |
+
self.build()
|
| 80 |
+
|
| 81 |
+
def build(self):
|
| 82 |
+
self.nm = iso.categorical_node_match("color", "")
|
| 83 |
+
self.em = iso.numerical_edge_match("weight", 1)
|
| 84 |
+
|
| 85 |
+
self.g1.add_node("A", color="red")
|
| 86 |
+
self.g2.add_node("C", color="blue")
|
| 87 |
+
|
| 88 |
+
self.g1.add_edge("A", "B", weight=1)
|
| 89 |
+
self.g2.add_edge("C", "D", weight=1)
|
| 90 |
+
|
| 91 |
+
def test_noweight_nocolor(self):
|
| 92 |
+
assert nx.is_isomorphic(self.g1, self.g2)
|
| 93 |
+
|
| 94 |
+
def test_color1(self):
|
| 95 |
+
assert not nx.is_isomorphic(self.g1, self.g2, node_match=self.nm)
|
| 96 |
+
|
| 97 |
+
def test_color2(self):
|
| 98 |
+
self.g1.nodes["A"]["color"] = "blue"
|
| 99 |
+
assert nx.is_isomorphic(self.g1, self.g2, node_match=self.nm)
|
| 100 |
+
|
| 101 |
+
def test_weight1(self):
|
| 102 |
+
assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)
|
| 103 |
+
|
| 104 |
+
def test_weight2(self):
|
| 105 |
+
self.g1.add_edge("A", "B", weight=2)
|
| 106 |
+
assert not nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)
|
| 107 |
+
|
| 108 |
+
def test_colorsandweights1(self):
|
| 109 |
+
iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em)
|
| 110 |
+
assert not iso
|
| 111 |
+
|
| 112 |
+
def test_colorsandweights2(self):
|
| 113 |
+
self.g1.nodes["A"]["color"] = "blue"
|
| 114 |
+
iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em)
|
| 115 |
+
assert iso
|
| 116 |
+
|
| 117 |
+
def test_colorsandweights3(self):
|
| 118 |
+
# make the weights disagree
|
| 119 |
+
self.g1.add_edge("A", "B", weight=2)
|
| 120 |
+
assert not nx.is_isomorphic(
|
| 121 |
+
self.g1, self.g2, node_match=self.nm, edge_match=self.em
|
| 122 |
+
)
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
class TestEdgeMatch_MultiGraph:
|
| 126 |
+
def setup_method(self):
|
| 127 |
+
self.g1 = nx.MultiGraph()
|
| 128 |
+
self.g2 = nx.MultiGraph()
|
| 129 |
+
self.GM = iso.MultiGraphMatcher
|
| 130 |
+
self.build()
|
| 131 |
+
|
| 132 |
+
def build(self):
|
| 133 |
+
g1 = self.g1
|
| 134 |
+
g2 = self.g2
|
| 135 |
+
|
| 136 |
+
# We will assume integer weights only.
|
| 137 |
+
g1.add_edge("A", "B", color="green", weight=0, size=0.5)
|
| 138 |
+
g1.add_edge("A", "B", color="red", weight=1, size=0.35)
|
| 139 |
+
g1.add_edge("A", "B", color="red", weight=2, size=0.65)
|
| 140 |
+
|
| 141 |
+
g2.add_edge("C", "D", color="green", weight=1, size=0.5)
|
| 142 |
+
g2.add_edge("C", "D", color="red", weight=0, size=0.45)
|
| 143 |
+
g2.add_edge("C", "D", color="red", weight=2, size=0.65)
|
| 144 |
+
|
| 145 |
+
if g1.is_multigraph():
|
| 146 |
+
self.em = iso.numerical_multiedge_match("weight", 1)
|
| 147 |
+
self.emc = iso.categorical_multiedge_match("color", "")
|
| 148 |
+
self.emcm = iso.categorical_multiedge_match(["color", "weight"], ["", 1])
|
| 149 |
+
self.emg1 = iso.generic_multiedge_match("color", "red", eq)
|
| 150 |
+
self.emg2 = iso.generic_multiedge_match(
|
| 151 |
+
["color", "weight", "size"],
|
| 152 |
+
["red", 1, 0.5],
|
| 153 |
+
[eq, eq, math.isclose],
|
| 154 |
+
)
|
| 155 |
+
else:
|
| 156 |
+
self.em = iso.numerical_edge_match("weight", 1)
|
| 157 |
+
self.emc = iso.categorical_edge_match("color", "")
|
| 158 |
+
self.emcm = iso.categorical_edge_match(["color", "weight"], ["", 1])
|
| 159 |
+
self.emg1 = iso.generic_multiedge_match("color", "red", eq)
|
| 160 |
+
self.emg2 = iso.generic_edge_match(
|
| 161 |
+
["color", "weight", "size"],
|
| 162 |
+
["red", 1, 0.5],
|
| 163 |
+
[eq, eq, math.isclose],
|
| 164 |
+
)
|
| 165 |
+
|
| 166 |
+
def test_weights_only(self):
|
| 167 |
+
assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)
|
| 168 |
+
|
| 169 |
+
def test_colors_only(self):
|
| 170 |
+
gm = self.GM(self.g1, self.g2, edge_match=self.emc)
|
| 171 |
+
assert gm.is_isomorphic()
|
| 172 |
+
|
| 173 |
+
def test_colorsandweights(self):
|
| 174 |
+
gm = self.GM(self.g1, self.g2, edge_match=self.emcm)
|
| 175 |
+
assert not gm.is_isomorphic()
|
| 176 |
+
|
| 177 |
+
def test_generic1(self):
|
| 178 |
+
gm = self.GM(self.g1, self.g2, edge_match=self.emg1)
|
| 179 |
+
assert gm.is_isomorphic()
|
| 180 |
+
|
| 181 |
+
def test_generic2(self):
|
| 182 |
+
gm = self.GM(self.g1, self.g2, edge_match=self.emg2)
|
| 183 |
+
assert not gm.is_isomorphic()
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
class TestEdgeMatch_DiGraph(TestNodeMatch_Graph):
|
| 187 |
+
def setup_method(self):
|
| 188 |
+
TestNodeMatch_Graph.setup_method(self)
|
| 189 |
+
self.g1 = nx.DiGraph()
|
| 190 |
+
self.g2 = nx.DiGraph()
|
| 191 |
+
self.build()
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
class TestEdgeMatch_MultiDiGraph(TestEdgeMatch_MultiGraph):
|
| 195 |
+
def setup_method(self):
|
| 196 |
+
TestEdgeMatch_MultiGraph.setup_method(self)
|
| 197 |
+
self.g1 = nx.MultiDiGraph()
|
| 198 |
+
self.g2 = nx.MultiDiGraph()
|
| 199 |
+
self.GM = iso.MultiDiGraphMatcher
|
| 200 |
+
self.build()
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2pp.py
ADDED
|
@@ -0,0 +1,1075 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
***************
|
| 3 |
+
VF2++ Algorithm
|
| 4 |
+
***************
|
| 5 |
+
|
| 6 |
+
An implementation of the VF2++ algorithm [1]_ for Graph Isomorphism testing.
|
| 7 |
+
|
| 8 |
+
The simplest interface to use this module is to call:
|
| 9 |
+
|
| 10 |
+
`vf2pp_is_isomorphic`: to check whether two graphs are isomorphic.
|
| 11 |
+
`vf2pp_isomorphism`: to obtain the node mapping between two graphs,
|
| 12 |
+
in case they are isomorphic.
|
| 13 |
+
`vf2pp_all_isomorphisms`: to generate all possible mappings between two graphs,
|
| 14 |
+
if isomorphic.
|
| 15 |
+
|
| 16 |
+
Introduction
|
| 17 |
+
------------
|
| 18 |
+
The VF2++ algorithm, follows a similar logic to that of VF2, while also
|
| 19 |
+
introducing new easy-to-check cutting rules and determining the optimal access
|
| 20 |
+
order of nodes. It is also implemented in a non-recursive manner, which saves
|
| 21 |
+
both time and space, when compared to its previous counterpart.
|
| 22 |
+
|
| 23 |
+
The optimal node ordering is obtained after taking into consideration both the
|
| 24 |
+
degree but also the label rarity of each node.
|
| 25 |
+
This way we place the nodes that are more likely to match, first in the order,
|
| 26 |
+
thus examining the most promising branches in the beginning.
|
| 27 |
+
The rules also consider node labels, making it easier to prune unfruitful
|
| 28 |
+
branches early in the process.
|
| 29 |
+
|
| 30 |
+
Examples
|
| 31 |
+
--------
|
| 32 |
+
|
| 33 |
+
Suppose G1 and G2 are Isomorphic Graphs. Verification is as follows:
|
| 34 |
+
|
| 35 |
+
Without node labels:
|
| 36 |
+
|
| 37 |
+
>>> import networkx as nx
|
| 38 |
+
>>> G1 = nx.path_graph(4)
|
| 39 |
+
>>> G2 = nx.path_graph(4)
|
| 40 |
+
>>> nx.vf2pp_is_isomorphic(G1, G2, node_label=None)
|
| 41 |
+
True
|
| 42 |
+
>>> nx.vf2pp_isomorphism(G1, G2, node_label=None)
|
| 43 |
+
{1: 1, 2: 2, 0: 0, 3: 3}
|
| 44 |
+
|
| 45 |
+
With node labels:
|
| 46 |
+
|
| 47 |
+
>>> G1 = nx.path_graph(4)
|
| 48 |
+
>>> G2 = nx.path_graph(4)
|
| 49 |
+
>>> mapped = {1: 1, 2: 2, 3: 3, 0: 0}
|
| 50 |
+
>>> nx.set_node_attributes(
|
| 51 |
+
... G1, dict(zip(G1, ["blue", "red", "green", "yellow"])), "label"
|
| 52 |
+
... )
|
| 53 |
+
>>> nx.set_node_attributes(
|
| 54 |
+
... G2,
|
| 55 |
+
... dict(zip([mapped[u] for u in G1], ["blue", "red", "green", "yellow"])),
|
| 56 |
+
... "label",
|
| 57 |
+
... )
|
| 58 |
+
>>> nx.vf2pp_is_isomorphic(G1, G2, node_label="label")
|
| 59 |
+
True
|
| 60 |
+
>>> nx.vf2pp_isomorphism(G1, G2, node_label="label")
|
| 61 |
+
{1: 1, 2: 2, 0: 0, 3: 3}
|
| 62 |
+
|
| 63 |
+
References
|
| 64 |
+
----------
|
| 65 |
+
.. [1] Jüttner, Alpár & Madarasi, Péter. (2018). "VF2++—An improved subgraph
|
| 66 |
+
isomorphism algorithm". Discrete Applied Mathematics. 242.
|
| 67 |
+
https://doi.org/10.1016/j.dam.2018.02.018
|
| 68 |
+
|
| 69 |
+
"""
|
| 70 |
+
|
| 71 |
+
import collections
|
| 72 |
+
|
| 73 |
+
import networkx as nx
|
| 74 |
+
|
| 75 |
+
__all__ = ["vf2pp_isomorphism", "vf2pp_is_isomorphic", "vf2pp_all_isomorphisms"]
|
| 76 |
+
|
| 77 |
+
_GraphParameters = collections.namedtuple(
|
| 78 |
+
"_GraphParameters",
|
| 79 |
+
[
|
| 80 |
+
"G1",
|
| 81 |
+
"G2",
|
| 82 |
+
"G1_labels",
|
| 83 |
+
"G2_labels",
|
| 84 |
+
"nodes_of_G1Labels",
|
| 85 |
+
"nodes_of_G2Labels",
|
| 86 |
+
"G2_nodes_of_degree",
|
| 87 |
+
],
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
_StateParameters = collections.namedtuple(
|
| 91 |
+
"_StateParameters",
|
| 92 |
+
[
|
| 93 |
+
"mapping",
|
| 94 |
+
"reverse_mapping",
|
| 95 |
+
"T1",
|
| 96 |
+
"T1_in",
|
| 97 |
+
"T1_tilde",
|
| 98 |
+
"T1_tilde_in",
|
| 99 |
+
"T2",
|
| 100 |
+
"T2_in",
|
| 101 |
+
"T2_tilde",
|
| 102 |
+
"T2_tilde_in",
|
| 103 |
+
],
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"})
|
| 108 |
+
def vf2pp_isomorphism(G1, G2, node_label=None, default_label=None):
|
| 109 |
+
"""Return an isomorphic mapping between `G1` and `G2` if it exists.
|
| 110 |
+
|
| 111 |
+
Parameters
|
| 112 |
+
----------
|
| 113 |
+
G1, G2 : NetworkX Graph or MultiGraph instances.
|
| 114 |
+
The two graphs to check for isomorphism.
|
| 115 |
+
|
| 116 |
+
node_label : str, optional
|
| 117 |
+
The name of the node attribute to be used when comparing nodes.
|
| 118 |
+
The default is `None`, meaning node attributes are not considered
|
| 119 |
+
in the comparison. Any node that doesn't have the `node_label`
|
| 120 |
+
attribute uses `default_label` instead.
|
| 121 |
+
|
| 122 |
+
default_label : scalar
|
| 123 |
+
Default value to use when a node doesn't have an attribute
|
| 124 |
+
named `node_label`. Default is `None`.
|
| 125 |
+
|
| 126 |
+
Returns
|
| 127 |
+
-------
|
| 128 |
+
dict or None
|
| 129 |
+
Node mapping if the two graphs are isomorphic. None otherwise.
|
| 130 |
+
"""
|
| 131 |
+
try:
|
| 132 |
+
mapping = next(vf2pp_all_isomorphisms(G1, G2, node_label, default_label))
|
| 133 |
+
return mapping
|
| 134 |
+
except StopIteration:
|
| 135 |
+
return None
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"})
|
| 139 |
+
def vf2pp_is_isomorphic(G1, G2, node_label=None, default_label=None):
|
| 140 |
+
"""Examines whether G1 and G2 are isomorphic.
|
| 141 |
+
|
| 142 |
+
Parameters
|
| 143 |
+
----------
|
| 144 |
+
G1, G2 : NetworkX Graph or MultiGraph instances.
|
| 145 |
+
The two graphs to check for isomorphism.
|
| 146 |
+
|
| 147 |
+
node_label : str, optional
|
| 148 |
+
The name of the node attribute to be used when comparing nodes.
|
| 149 |
+
The default is `None`, meaning node attributes are not considered
|
| 150 |
+
in the comparison. Any node that doesn't have the `node_label`
|
| 151 |
+
attribute uses `default_label` instead.
|
| 152 |
+
|
| 153 |
+
default_label : scalar
|
| 154 |
+
Default value to use when a node doesn't have an attribute
|
| 155 |
+
named `node_label`. Default is `None`.
|
| 156 |
+
|
| 157 |
+
Returns
|
| 158 |
+
-------
|
| 159 |
+
bool
|
| 160 |
+
True if the two graphs are isomorphic, False otherwise.
|
| 161 |
+
"""
|
| 162 |
+
if vf2pp_isomorphism(G1, G2, node_label, default_label) is not None:
|
| 163 |
+
return True
|
| 164 |
+
return False
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"})
|
| 168 |
+
def vf2pp_all_isomorphisms(G1, G2, node_label=None, default_label=None):
|
| 169 |
+
"""Yields all the possible mappings between G1 and G2.
|
| 170 |
+
|
| 171 |
+
Parameters
|
| 172 |
+
----------
|
| 173 |
+
G1, G2 : NetworkX Graph or MultiGraph instances.
|
| 174 |
+
The two graphs to check for isomorphism.
|
| 175 |
+
|
| 176 |
+
node_label : str, optional
|
| 177 |
+
The name of the node attribute to be used when comparing nodes.
|
| 178 |
+
The default is `None`, meaning node attributes are not considered
|
| 179 |
+
in the comparison. Any node that doesn't have the `node_label`
|
| 180 |
+
attribute uses `default_label` instead.
|
| 181 |
+
|
| 182 |
+
default_label : scalar
|
| 183 |
+
Default value to use when a node doesn't have an attribute
|
| 184 |
+
named `node_label`. Default is `None`.
|
| 185 |
+
|
| 186 |
+
Yields
|
| 187 |
+
------
|
| 188 |
+
dict
|
| 189 |
+
Isomorphic mapping between the nodes in `G1` and `G2`.
|
| 190 |
+
"""
|
| 191 |
+
if G1.number_of_nodes() == 0 or G2.number_of_nodes() == 0:
|
| 192 |
+
return False
|
| 193 |
+
|
| 194 |
+
# Create the degree dicts based on graph type
|
| 195 |
+
if G1.is_directed():
|
| 196 |
+
G1_degree = {
|
| 197 |
+
n: (in_degree, out_degree)
|
| 198 |
+
for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
|
| 199 |
+
}
|
| 200 |
+
G2_degree = {
|
| 201 |
+
n: (in_degree, out_degree)
|
| 202 |
+
for (n, in_degree), (_, out_degree) in zip(G2.in_degree, G2.out_degree)
|
| 203 |
+
}
|
| 204 |
+
else:
|
| 205 |
+
G1_degree = dict(G1.degree)
|
| 206 |
+
G2_degree = dict(G2.degree)
|
| 207 |
+
|
| 208 |
+
if not G1.is_directed():
|
| 209 |
+
find_candidates = _find_candidates
|
| 210 |
+
restore_Tinout = _restore_Tinout
|
| 211 |
+
else:
|
| 212 |
+
find_candidates = _find_candidates_Di
|
| 213 |
+
restore_Tinout = _restore_Tinout_Di
|
| 214 |
+
|
| 215 |
+
# Check that both graphs have the same number of nodes and degree sequence
|
| 216 |
+
if G1.order() != G2.order():
|
| 217 |
+
return False
|
| 218 |
+
if sorted(G1_degree.values()) != sorted(G2_degree.values()):
|
| 219 |
+
return False
|
| 220 |
+
|
| 221 |
+
# Initialize parameters and cache necessary information about degree and labels
|
| 222 |
+
graph_params, state_params = _initialize_parameters(
|
| 223 |
+
G1, G2, G2_degree, node_label, default_label
|
| 224 |
+
)
|
| 225 |
+
|
| 226 |
+
# Check if G1 and G2 have the same labels, and that number of nodes per label is equal between the two graphs
|
| 227 |
+
if not _precheck_label_properties(graph_params):
|
| 228 |
+
return False
|
| 229 |
+
|
| 230 |
+
# Calculate the optimal node ordering
|
| 231 |
+
node_order = _matching_order(graph_params)
|
| 232 |
+
|
| 233 |
+
# Initialize the stack
|
| 234 |
+
stack = []
|
| 235 |
+
candidates = iter(
|
| 236 |
+
find_candidates(node_order[0], graph_params, state_params, G1_degree)
|
| 237 |
+
)
|
| 238 |
+
stack.append((node_order[0], candidates))
|
| 239 |
+
|
| 240 |
+
mapping = state_params.mapping
|
| 241 |
+
reverse_mapping = state_params.reverse_mapping
|
| 242 |
+
|
| 243 |
+
# Index of the node from the order, currently being examined
|
| 244 |
+
matching_node = 1
|
| 245 |
+
|
| 246 |
+
while stack:
|
| 247 |
+
current_node, candidate_nodes = stack[-1]
|
| 248 |
+
|
| 249 |
+
try:
|
| 250 |
+
candidate = next(candidate_nodes)
|
| 251 |
+
except StopIteration:
|
| 252 |
+
# If no remaining candidates, return to a previous state, and follow another branch
|
| 253 |
+
stack.pop()
|
| 254 |
+
matching_node -= 1
|
| 255 |
+
if stack:
|
| 256 |
+
# Pop the previously added u-v pair, and look for a different candidate _v for u
|
| 257 |
+
popped_node1, _ = stack[-1]
|
| 258 |
+
popped_node2 = mapping[popped_node1]
|
| 259 |
+
mapping.pop(popped_node1)
|
| 260 |
+
reverse_mapping.pop(popped_node2)
|
| 261 |
+
restore_Tinout(popped_node1, popped_node2, graph_params, state_params)
|
| 262 |
+
continue
|
| 263 |
+
|
| 264 |
+
if _feasibility(current_node, candidate, graph_params, state_params):
|
| 265 |
+
# Terminate if mapping is extended to its full
|
| 266 |
+
if len(mapping) == G2.number_of_nodes() - 1:
|
| 267 |
+
cp_mapping = mapping.copy()
|
| 268 |
+
cp_mapping[current_node] = candidate
|
| 269 |
+
yield cp_mapping
|
| 270 |
+
continue
|
| 271 |
+
|
| 272 |
+
# Feasibility rules pass, so extend the mapping and update the parameters
|
| 273 |
+
mapping[current_node] = candidate
|
| 274 |
+
reverse_mapping[candidate] = current_node
|
| 275 |
+
_update_Tinout(current_node, candidate, graph_params, state_params)
|
| 276 |
+
# Append the next node and its candidates to the stack
|
| 277 |
+
candidates = iter(
|
| 278 |
+
find_candidates(
|
| 279 |
+
node_order[matching_node], graph_params, state_params, G1_degree
|
| 280 |
+
)
|
| 281 |
+
)
|
| 282 |
+
stack.append((node_order[matching_node], candidates))
|
| 283 |
+
matching_node += 1
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
def _precheck_label_properties(graph_params):
|
| 287 |
+
G1, G2, G1_labels, G2_labels, nodes_of_G1Labels, nodes_of_G2Labels, _ = graph_params
|
| 288 |
+
if any(
|
| 289 |
+
label not in nodes_of_G1Labels or len(nodes_of_G1Labels[label]) != len(nodes)
|
| 290 |
+
for label, nodes in nodes_of_G2Labels.items()
|
| 291 |
+
):
|
| 292 |
+
return False
|
| 293 |
+
return True
|
| 294 |
+
|
| 295 |
+
|
| 296 |
+
def _initialize_parameters(G1, G2, G2_degree, node_label=None, default_label=-1):
|
| 297 |
+
"""Initializes all the necessary parameters for VF2++
|
| 298 |
+
|
| 299 |
+
Parameters
|
| 300 |
+
----------
|
| 301 |
+
G1,G2: NetworkX Graph or MultiGraph instances.
|
| 302 |
+
The two graphs to check for isomorphism or monomorphism
|
| 303 |
+
|
| 304 |
+
G1_labels,G2_labels: dict
|
| 305 |
+
The label of every node in G1 and G2 respectively
|
| 306 |
+
|
| 307 |
+
Returns
|
| 308 |
+
-------
|
| 309 |
+
graph_params: namedtuple
|
| 310 |
+
Contains all the Graph-related parameters:
|
| 311 |
+
|
| 312 |
+
G1,G2
|
| 313 |
+
G1_labels,G2_labels: dict
|
| 314 |
+
|
| 315 |
+
state_params: namedtuple
|
| 316 |
+
Contains all the State-related parameters:
|
| 317 |
+
|
| 318 |
+
mapping: dict
|
| 319 |
+
The mapping as extended so far. Maps nodes of G1 to nodes of G2
|
| 320 |
+
|
| 321 |
+
reverse_mapping: dict
|
| 322 |
+
The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
|
| 323 |
+
|
| 324 |
+
T1, T2: set
|
| 325 |
+
Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
|
| 326 |
+
neighbors of nodes that are.
|
| 327 |
+
|
| 328 |
+
T1_out, T2_out: set
|
| 329 |
+
Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
|
| 330 |
+
"""
|
| 331 |
+
G1_labels = dict(G1.nodes(data=node_label, default=default_label))
|
| 332 |
+
G2_labels = dict(G2.nodes(data=node_label, default=default_label))
|
| 333 |
+
|
| 334 |
+
graph_params = _GraphParameters(
|
| 335 |
+
G1,
|
| 336 |
+
G2,
|
| 337 |
+
G1_labels,
|
| 338 |
+
G2_labels,
|
| 339 |
+
nx.utils.groups(G1_labels),
|
| 340 |
+
nx.utils.groups(G2_labels),
|
| 341 |
+
nx.utils.groups(G2_degree),
|
| 342 |
+
)
|
| 343 |
+
|
| 344 |
+
T1, T1_in = set(), set()
|
| 345 |
+
T2, T2_in = set(), set()
|
| 346 |
+
if G1.is_directed():
|
| 347 |
+
T1_tilde, T1_tilde_in = (
|
| 348 |
+
set(G1.nodes()),
|
| 349 |
+
set(),
|
| 350 |
+
) # todo: do we need Ti_tilde_in? What nodes does it have?
|
| 351 |
+
T2_tilde, T2_tilde_in = set(G2.nodes()), set()
|
| 352 |
+
else:
|
| 353 |
+
T1_tilde, T1_tilde_in = set(G1.nodes()), set()
|
| 354 |
+
T2_tilde, T2_tilde_in = set(G2.nodes()), set()
|
| 355 |
+
|
| 356 |
+
state_params = _StateParameters(
|
| 357 |
+
{},
|
| 358 |
+
{},
|
| 359 |
+
T1,
|
| 360 |
+
T1_in,
|
| 361 |
+
T1_tilde,
|
| 362 |
+
T1_tilde_in,
|
| 363 |
+
T2,
|
| 364 |
+
T2_in,
|
| 365 |
+
T2_tilde,
|
| 366 |
+
T2_tilde_in,
|
| 367 |
+
)
|
| 368 |
+
|
| 369 |
+
return graph_params, state_params
|
| 370 |
+
|
| 371 |
+
|
| 372 |
+
def _matching_order(graph_params):
|
| 373 |
+
"""The node ordering as introduced in VF2++.
|
| 374 |
+
|
| 375 |
+
Notes
|
| 376 |
+
-----
|
| 377 |
+
Taking into account the structure of the Graph and the node labeling, the nodes are placed in an order such that,
|
| 378 |
+
most of the unfruitful/infeasible branches of the search space can be pruned on high levels, significantly
|
| 379 |
+
decreasing the number of visited states. The premise is that, the algorithm will be able to recognize
|
| 380 |
+
inconsistencies early, proceeding to go deep into the search tree only if it's needed.
|
| 381 |
+
|
| 382 |
+
Parameters
|
| 383 |
+
----------
|
| 384 |
+
graph_params: namedtuple
|
| 385 |
+
Contains:
|
| 386 |
+
|
| 387 |
+
G1,G2: NetworkX Graph or MultiGraph instances.
|
| 388 |
+
The two graphs to check for isomorphism or monomorphism.
|
| 389 |
+
|
| 390 |
+
G1_labels,G2_labels: dict
|
| 391 |
+
The label of every node in G1 and G2 respectively.
|
| 392 |
+
|
| 393 |
+
Returns
|
| 394 |
+
-------
|
| 395 |
+
node_order: list
|
| 396 |
+
The ordering of the nodes.
|
| 397 |
+
"""
|
| 398 |
+
G1, G2, G1_labels, _, _, nodes_of_G2Labels, _ = graph_params
|
| 399 |
+
if not G1 and not G2:
|
| 400 |
+
return {}
|
| 401 |
+
|
| 402 |
+
if G1.is_directed():
|
| 403 |
+
G1 = G1.to_undirected(as_view=True)
|
| 404 |
+
|
| 405 |
+
V1_unordered = set(G1.nodes())
|
| 406 |
+
label_rarity = {label: len(nodes) for label, nodes in nodes_of_G2Labels.items()}
|
| 407 |
+
used_degrees = {node: 0 for node in G1}
|
| 408 |
+
node_order = []
|
| 409 |
+
|
| 410 |
+
while V1_unordered:
|
| 411 |
+
max_rarity = min(label_rarity[G1_labels[x]] for x in V1_unordered)
|
| 412 |
+
rarest_nodes = [
|
| 413 |
+
n for n in V1_unordered if label_rarity[G1_labels[n]] == max_rarity
|
| 414 |
+
]
|
| 415 |
+
max_node = max(rarest_nodes, key=G1.degree)
|
| 416 |
+
|
| 417 |
+
for dlevel_nodes in nx.bfs_layers(G1, max_node):
|
| 418 |
+
nodes_to_add = dlevel_nodes.copy()
|
| 419 |
+
while nodes_to_add:
|
| 420 |
+
max_used_degree = max(used_degrees[n] for n in nodes_to_add)
|
| 421 |
+
max_used_degree_nodes = [
|
| 422 |
+
n for n in nodes_to_add if used_degrees[n] == max_used_degree
|
| 423 |
+
]
|
| 424 |
+
max_degree = max(G1.degree[n] for n in max_used_degree_nodes)
|
| 425 |
+
max_degree_nodes = [
|
| 426 |
+
n for n in max_used_degree_nodes if G1.degree[n] == max_degree
|
| 427 |
+
]
|
| 428 |
+
next_node = min(
|
| 429 |
+
max_degree_nodes, key=lambda x: label_rarity[G1_labels[x]]
|
| 430 |
+
)
|
| 431 |
+
|
| 432 |
+
node_order.append(next_node)
|
| 433 |
+
for node in G1.neighbors(next_node):
|
| 434 |
+
used_degrees[node] += 1
|
| 435 |
+
|
| 436 |
+
nodes_to_add.remove(next_node)
|
| 437 |
+
label_rarity[G1_labels[next_node]] -= 1
|
| 438 |
+
V1_unordered.discard(next_node)
|
| 439 |
+
|
| 440 |
+
return node_order
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
def _find_candidates(
|
| 444 |
+
u, graph_params, state_params, G1_degree
|
| 445 |
+
): # todo: make the 4th argument the degree of u
|
| 446 |
+
"""Given node u of G1, finds the candidates of u from G2.
|
| 447 |
+
|
| 448 |
+
Parameters
|
| 449 |
+
----------
|
| 450 |
+
u: Graph node
|
| 451 |
+
The node from G1 for which to find the candidates from G2.
|
| 452 |
+
|
| 453 |
+
graph_params: namedtuple
|
| 454 |
+
Contains all the Graph-related parameters:
|
| 455 |
+
|
| 456 |
+
G1,G2: NetworkX Graph or MultiGraph instances.
|
| 457 |
+
The two graphs to check for isomorphism or monomorphism
|
| 458 |
+
|
| 459 |
+
G1_labels,G2_labels: dict
|
| 460 |
+
The label of every node in G1 and G2 respectively
|
| 461 |
+
|
| 462 |
+
state_params: namedtuple
|
| 463 |
+
Contains all the State-related parameters:
|
| 464 |
+
|
| 465 |
+
mapping: dict
|
| 466 |
+
The mapping as extended so far. Maps nodes of G1 to nodes of G2
|
| 467 |
+
|
| 468 |
+
reverse_mapping: dict
|
| 469 |
+
The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
|
| 470 |
+
|
| 471 |
+
T1, T2: set
|
| 472 |
+
Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
|
| 473 |
+
neighbors of nodes that are.
|
| 474 |
+
|
| 475 |
+
T1_tilde, T2_tilde: set
|
| 476 |
+
Ti_tilde contains all the nodes from Gi, that are neither in the mapping nor in Ti
|
| 477 |
+
|
| 478 |
+
Returns
|
| 479 |
+
-------
|
| 480 |
+
candidates: set
|
| 481 |
+
The nodes from G2 which are candidates for u.
|
| 482 |
+
"""
|
| 483 |
+
G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params
|
| 484 |
+
mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params
|
| 485 |
+
|
| 486 |
+
covered_nbrs = [nbr for nbr in G1[u] if nbr in mapping]
|
| 487 |
+
if not covered_nbrs:
|
| 488 |
+
candidates = set(nodes_of_G2Labels[G1_labels[u]])
|
| 489 |
+
candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]])
|
| 490 |
+
candidates.intersection_update(T2_tilde)
|
| 491 |
+
candidates.difference_update(reverse_mapping)
|
| 492 |
+
if G1.is_multigraph():
|
| 493 |
+
candidates.difference_update(
|
| 494 |
+
{
|
| 495 |
+
node
|
| 496 |
+
for node in candidates
|
| 497 |
+
if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
|
| 498 |
+
}
|
| 499 |
+
)
|
| 500 |
+
return candidates
|
| 501 |
+
|
| 502 |
+
nbr1 = covered_nbrs[0]
|
| 503 |
+
common_nodes = set(G2[mapping[nbr1]])
|
| 504 |
+
|
| 505 |
+
for nbr1 in covered_nbrs[1:]:
|
| 506 |
+
common_nodes.intersection_update(G2[mapping[nbr1]])
|
| 507 |
+
|
| 508 |
+
common_nodes.difference_update(reverse_mapping)
|
| 509 |
+
common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]])
|
| 510 |
+
common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]])
|
| 511 |
+
if G1.is_multigraph():
|
| 512 |
+
common_nodes.difference_update(
|
| 513 |
+
{
|
| 514 |
+
node
|
| 515 |
+
for node in common_nodes
|
| 516 |
+
if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
|
| 517 |
+
}
|
| 518 |
+
)
|
| 519 |
+
return common_nodes
|
| 520 |
+
|
| 521 |
+
|
| 522 |
+
def _find_candidates_Di(u, graph_params, state_params, G1_degree):
|
| 523 |
+
G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params
|
| 524 |
+
mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params
|
| 525 |
+
|
| 526 |
+
covered_successors = [succ for succ in G1[u] if succ in mapping]
|
| 527 |
+
covered_predecessors = [pred for pred in G1.pred[u] if pred in mapping]
|
| 528 |
+
|
| 529 |
+
if not (covered_successors or covered_predecessors):
|
| 530 |
+
candidates = set(nodes_of_G2Labels[G1_labels[u]])
|
| 531 |
+
candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]])
|
| 532 |
+
candidates.intersection_update(T2_tilde)
|
| 533 |
+
candidates.difference_update(reverse_mapping)
|
| 534 |
+
if G1.is_multigraph():
|
| 535 |
+
candidates.difference_update(
|
| 536 |
+
{
|
| 537 |
+
node
|
| 538 |
+
for node in candidates
|
| 539 |
+
if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
|
| 540 |
+
}
|
| 541 |
+
)
|
| 542 |
+
return candidates
|
| 543 |
+
|
| 544 |
+
if covered_successors:
|
| 545 |
+
succ1 = covered_successors[0]
|
| 546 |
+
common_nodes = set(G2.pred[mapping[succ1]])
|
| 547 |
+
|
| 548 |
+
for succ1 in covered_successors[1:]:
|
| 549 |
+
common_nodes.intersection_update(G2.pred[mapping[succ1]])
|
| 550 |
+
else:
|
| 551 |
+
pred1 = covered_predecessors.pop()
|
| 552 |
+
common_nodes = set(G2[mapping[pred1]])
|
| 553 |
+
|
| 554 |
+
for pred1 in covered_predecessors:
|
| 555 |
+
common_nodes.intersection_update(G2[mapping[pred1]])
|
| 556 |
+
|
| 557 |
+
common_nodes.difference_update(reverse_mapping)
|
| 558 |
+
common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]])
|
| 559 |
+
common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]])
|
| 560 |
+
if G1.is_multigraph():
|
| 561 |
+
common_nodes.difference_update(
|
| 562 |
+
{
|
| 563 |
+
node
|
| 564 |
+
for node in common_nodes
|
| 565 |
+
if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
|
| 566 |
+
}
|
| 567 |
+
)
|
| 568 |
+
return common_nodes
|
| 569 |
+
|
| 570 |
+
|
| 571 |
+
def _feasibility(node1, node2, graph_params, state_params):
|
| 572 |
+
"""Given a candidate pair of nodes u and v from G1 and G2 respectively, checks if it's feasible to extend the
|
| 573 |
+
mapping, i.e. if u and v can be matched.
|
| 574 |
+
|
| 575 |
+
Notes
|
| 576 |
+
-----
|
| 577 |
+
This function performs all the necessary checking by applying both consistency and cutting rules.
|
| 578 |
+
|
| 579 |
+
Parameters
|
| 580 |
+
----------
|
| 581 |
+
node1, node2: Graph node
|
| 582 |
+
The candidate pair of nodes being checked for matching
|
| 583 |
+
|
| 584 |
+
graph_params: namedtuple
|
| 585 |
+
Contains all the Graph-related parameters:
|
| 586 |
+
|
| 587 |
+
G1,G2: NetworkX Graph or MultiGraph instances.
|
| 588 |
+
The two graphs to check for isomorphism or monomorphism
|
| 589 |
+
|
| 590 |
+
G1_labels,G2_labels: dict
|
| 591 |
+
The label of every node in G1 and G2 respectively
|
| 592 |
+
|
| 593 |
+
state_params: namedtuple
|
| 594 |
+
Contains all the State-related parameters:
|
| 595 |
+
|
| 596 |
+
mapping: dict
|
| 597 |
+
The mapping as extended so far. Maps nodes of G1 to nodes of G2
|
| 598 |
+
|
| 599 |
+
reverse_mapping: dict
|
| 600 |
+
The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
|
| 601 |
+
|
| 602 |
+
T1, T2: set
|
| 603 |
+
Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
|
| 604 |
+
neighbors of nodes that are.
|
| 605 |
+
|
| 606 |
+
T1_out, T2_out: set
|
| 607 |
+
Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
|
| 608 |
+
|
| 609 |
+
Returns
|
| 610 |
+
-------
|
| 611 |
+
True if all checks are successful, False otherwise.
|
| 612 |
+
"""
|
| 613 |
+
G1 = graph_params.G1
|
| 614 |
+
|
| 615 |
+
if _cut_PT(node1, node2, graph_params, state_params):
|
| 616 |
+
return False
|
| 617 |
+
|
| 618 |
+
if G1.is_multigraph():
|
| 619 |
+
if not _consistent_PT(node1, node2, graph_params, state_params):
|
| 620 |
+
return False
|
| 621 |
+
|
| 622 |
+
return True
|
| 623 |
+
|
| 624 |
+
|
| 625 |
+
def _cut_PT(u, v, graph_params, state_params):
|
| 626 |
+
"""Implements the cutting rules for the ISO problem.
|
| 627 |
+
|
| 628 |
+
Parameters
|
| 629 |
+
----------
|
| 630 |
+
u, v: Graph node
|
| 631 |
+
The two candidate nodes being examined.
|
| 632 |
+
|
| 633 |
+
graph_params: namedtuple
|
| 634 |
+
Contains all the Graph-related parameters:
|
| 635 |
+
|
| 636 |
+
G1,G2: NetworkX Graph or MultiGraph instances.
|
| 637 |
+
The two graphs to check for isomorphism or monomorphism
|
| 638 |
+
|
| 639 |
+
G1_labels,G2_labels: dict
|
| 640 |
+
The label of every node in G1 and G2 respectively
|
| 641 |
+
|
| 642 |
+
state_params: namedtuple
|
| 643 |
+
Contains all the State-related parameters:
|
| 644 |
+
|
| 645 |
+
mapping: dict
|
| 646 |
+
The mapping as extended so far. Maps nodes of G1 to nodes of G2
|
| 647 |
+
|
| 648 |
+
reverse_mapping: dict
|
| 649 |
+
The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
|
| 650 |
+
|
| 651 |
+
T1, T2: set
|
| 652 |
+
Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
|
| 653 |
+
neighbors of nodes that are.
|
| 654 |
+
|
| 655 |
+
T1_tilde, T2_tilde: set
|
| 656 |
+
Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
|
| 657 |
+
|
| 658 |
+
Returns
|
| 659 |
+
-------
|
| 660 |
+
True if we should prune this branch, i.e. the node pair failed the cutting checks. False otherwise.
|
| 661 |
+
"""
|
| 662 |
+
G1, G2, G1_labels, G2_labels, _, _, _ = graph_params
|
| 663 |
+
(
|
| 664 |
+
_,
|
| 665 |
+
_,
|
| 666 |
+
T1,
|
| 667 |
+
T1_in,
|
| 668 |
+
T1_tilde,
|
| 669 |
+
_,
|
| 670 |
+
T2,
|
| 671 |
+
T2_in,
|
| 672 |
+
T2_tilde,
|
| 673 |
+
_,
|
| 674 |
+
) = state_params
|
| 675 |
+
|
| 676 |
+
u_labels_predecessors, v_labels_predecessors = {}, {}
|
| 677 |
+
if G1.is_directed():
|
| 678 |
+
u_labels_predecessors = nx.utils.groups(
|
| 679 |
+
{n1: G1_labels[n1] for n1 in G1.pred[u]}
|
| 680 |
+
)
|
| 681 |
+
v_labels_predecessors = nx.utils.groups(
|
| 682 |
+
{n2: G2_labels[n2] for n2 in G2.pred[v]}
|
| 683 |
+
)
|
| 684 |
+
|
| 685 |
+
if set(u_labels_predecessors.keys()) != set(v_labels_predecessors.keys()):
|
| 686 |
+
return True
|
| 687 |
+
|
| 688 |
+
u_labels_successors = nx.utils.groups({n1: G1_labels[n1] for n1 in G1[u]})
|
| 689 |
+
v_labels_successors = nx.utils.groups({n2: G2_labels[n2] for n2 in G2[v]})
|
| 690 |
+
|
| 691 |
+
# if the neighbors of u, do not have the same labels as those of v, NOT feasible.
|
| 692 |
+
if set(u_labels_successors.keys()) != set(v_labels_successors.keys()):
|
| 693 |
+
return True
|
| 694 |
+
|
| 695 |
+
for label, G1_nbh in u_labels_successors.items():
|
| 696 |
+
G2_nbh = v_labels_successors[label]
|
| 697 |
+
|
| 698 |
+
if G1.is_multigraph():
|
| 699 |
+
# Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2
|
| 700 |
+
u_nbrs_edges = sorted(G1.number_of_edges(u, x) for x in G1_nbh)
|
| 701 |
+
v_nbrs_edges = sorted(G2.number_of_edges(v, x) for x in G2_nbh)
|
| 702 |
+
if any(
|
| 703 |
+
u_nbr_edges != v_nbr_edges
|
| 704 |
+
for u_nbr_edges, v_nbr_edges in zip(u_nbrs_edges, v_nbrs_edges)
|
| 705 |
+
):
|
| 706 |
+
return True
|
| 707 |
+
|
| 708 |
+
if len(T1.intersection(G1_nbh)) != len(T2.intersection(G2_nbh)):
|
| 709 |
+
return True
|
| 710 |
+
if len(T1_tilde.intersection(G1_nbh)) != len(T2_tilde.intersection(G2_nbh)):
|
| 711 |
+
return True
|
| 712 |
+
if G1.is_directed() and len(T1_in.intersection(G1_nbh)) != len(
|
| 713 |
+
T2_in.intersection(G2_nbh)
|
| 714 |
+
):
|
| 715 |
+
return True
|
| 716 |
+
|
| 717 |
+
if not G1.is_directed():
|
| 718 |
+
return False
|
| 719 |
+
|
| 720 |
+
for label, G1_pred in u_labels_predecessors.items():
|
| 721 |
+
G2_pred = v_labels_predecessors[label]
|
| 722 |
+
|
| 723 |
+
if G1.is_multigraph():
|
| 724 |
+
# Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2
|
| 725 |
+
u_pred_edges = sorted(G1.number_of_edges(u, x) for x in G1_pred)
|
| 726 |
+
v_pred_edges = sorted(G2.number_of_edges(v, x) for x in G2_pred)
|
| 727 |
+
if any(
|
| 728 |
+
u_nbr_edges != v_nbr_edges
|
| 729 |
+
for u_nbr_edges, v_nbr_edges in zip(u_pred_edges, v_pred_edges)
|
| 730 |
+
):
|
| 731 |
+
return True
|
| 732 |
+
|
| 733 |
+
if len(T1.intersection(G1_pred)) != len(T2.intersection(G2_pred)):
|
| 734 |
+
return True
|
| 735 |
+
if len(T1_tilde.intersection(G1_pred)) != len(T2_tilde.intersection(G2_pred)):
|
| 736 |
+
return True
|
| 737 |
+
if len(T1_in.intersection(G1_pred)) != len(T2_in.intersection(G2_pred)):
|
| 738 |
+
return True
|
| 739 |
+
|
| 740 |
+
return False
|
| 741 |
+
|
| 742 |
+
|
| 743 |
+
def _consistent_PT(u, v, graph_params, state_params):
|
| 744 |
+
"""Checks the consistency of extending the mapping using the current node pair.
|
| 745 |
+
|
| 746 |
+
Parameters
|
| 747 |
+
----------
|
| 748 |
+
u, v: Graph node
|
| 749 |
+
The two candidate nodes being examined.
|
| 750 |
+
|
| 751 |
+
graph_params: namedtuple
|
| 752 |
+
Contains all the Graph-related parameters:
|
| 753 |
+
|
| 754 |
+
G1,G2: NetworkX Graph or MultiGraph instances.
|
| 755 |
+
The two graphs to check for isomorphism or monomorphism
|
| 756 |
+
|
| 757 |
+
G1_labels,G2_labels: dict
|
| 758 |
+
The label of every node in G1 and G2 respectively
|
| 759 |
+
|
| 760 |
+
state_params: namedtuple
|
| 761 |
+
Contains all the State-related parameters:
|
| 762 |
+
|
| 763 |
+
mapping: dict
|
| 764 |
+
The mapping as extended so far. Maps nodes of G1 to nodes of G2
|
| 765 |
+
|
| 766 |
+
reverse_mapping: dict
|
| 767 |
+
The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
|
| 768 |
+
|
| 769 |
+
T1, T2: set
|
| 770 |
+
Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
|
| 771 |
+
neighbors of nodes that are.
|
| 772 |
+
|
| 773 |
+
T1_out, T2_out: set
|
| 774 |
+
Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
|
| 775 |
+
|
| 776 |
+
Returns
|
| 777 |
+
-------
|
| 778 |
+
True if the pair passes all the consistency checks successfully. False otherwise.
|
| 779 |
+
"""
|
| 780 |
+
G1, G2 = graph_params.G1, graph_params.G2
|
| 781 |
+
mapping, reverse_mapping = state_params.mapping, state_params.reverse_mapping
|
| 782 |
+
|
| 783 |
+
for neighbor in G1[u]:
|
| 784 |
+
if neighbor in mapping:
|
| 785 |
+
if G1.number_of_edges(u, neighbor) != G2.number_of_edges(
|
| 786 |
+
v, mapping[neighbor]
|
| 787 |
+
):
|
| 788 |
+
return False
|
| 789 |
+
|
| 790 |
+
for neighbor in G2[v]:
|
| 791 |
+
if neighbor in reverse_mapping:
|
| 792 |
+
if G1.number_of_edges(u, reverse_mapping[neighbor]) != G2.number_of_edges(
|
| 793 |
+
v, neighbor
|
| 794 |
+
):
|
| 795 |
+
return False
|
| 796 |
+
|
| 797 |
+
if not G1.is_directed():
|
| 798 |
+
return True
|
| 799 |
+
|
| 800 |
+
for predecessor in G1.pred[u]:
|
| 801 |
+
if predecessor in mapping:
|
| 802 |
+
if G1.number_of_edges(predecessor, u) != G2.number_of_edges(
|
| 803 |
+
mapping[predecessor], v
|
| 804 |
+
):
|
| 805 |
+
return False
|
| 806 |
+
|
| 807 |
+
for predecessor in G2.pred[v]:
|
| 808 |
+
if predecessor in reverse_mapping:
|
| 809 |
+
if G1.number_of_edges(
|
| 810 |
+
reverse_mapping[predecessor], u
|
| 811 |
+
) != G2.number_of_edges(predecessor, v):
|
| 812 |
+
return False
|
| 813 |
+
|
| 814 |
+
return True
|
| 815 |
+
|
| 816 |
+
|
| 817 |
+
def _update_Tinout(new_node1, new_node2, graph_params, state_params):
|
| 818 |
+
"""Updates the Ti/Ti_out (i=1,2) when a new node pair u-v is added to the mapping.
|
| 819 |
+
|
| 820 |
+
Notes
|
| 821 |
+
-----
|
| 822 |
+
This function should be called right after the feasibility checks are passed, and node1 is mapped to node2. The
|
| 823 |
+
purpose of this function is to avoid brute force computing of Ti/Ti_out by iterating over all nodes of the graph
|
| 824 |
+
and checking which nodes satisfy the necessary conditions. Instead, in every step of the algorithm we focus
|
| 825 |
+
exclusively on the two nodes that are being added to the mapping, incrementally updating Ti/Ti_out.
|
| 826 |
+
|
| 827 |
+
Parameters
|
| 828 |
+
----------
|
| 829 |
+
new_node1, new_node2: Graph node
|
| 830 |
+
The two new nodes, added to the mapping.
|
| 831 |
+
|
| 832 |
+
graph_params: namedtuple
|
| 833 |
+
Contains all the Graph-related parameters:
|
| 834 |
+
|
| 835 |
+
G1,G2: NetworkX Graph or MultiGraph instances.
|
| 836 |
+
The two graphs to check for isomorphism or monomorphism
|
| 837 |
+
|
| 838 |
+
G1_labels,G2_labels: dict
|
| 839 |
+
The label of every node in G1 and G2 respectively
|
| 840 |
+
|
| 841 |
+
state_params: namedtuple
|
| 842 |
+
Contains all the State-related parameters:
|
| 843 |
+
|
| 844 |
+
mapping: dict
|
| 845 |
+
The mapping as extended so far. Maps nodes of G1 to nodes of G2
|
| 846 |
+
|
| 847 |
+
reverse_mapping: dict
|
| 848 |
+
The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
|
| 849 |
+
|
| 850 |
+
T1, T2: set
|
| 851 |
+
Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
|
| 852 |
+
neighbors of nodes that are.
|
| 853 |
+
|
| 854 |
+
T1_tilde, T2_tilde: set
|
| 855 |
+
Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
|
| 856 |
+
"""
|
| 857 |
+
G1, G2, _, _, _, _, _ = graph_params
|
| 858 |
+
(
|
| 859 |
+
mapping,
|
| 860 |
+
reverse_mapping,
|
| 861 |
+
T1,
|
| 862 |
+
T1_in,
|
| 863 |
+
T1_tilde,
|
| 864 |
+
T1_tilde_in,
|
| 865 |
+
T2,
|
| 866 |
+
T2_in,
|
| 867 |
+
T2_tilde,
|
| 868 |
+
T2_tilde_in,
|
| 869 |
+
) = state_params
|
| 870 |
+
|
| 871 |
+
uncovered_successors_G1 = {succ for succ in G1[new_node1] if succ not in mapping}
|
| 872 |
+
uncovered_successors_G2 = {
|
| 873 |
+
succ for succ in G2[new_node2] if succ not in reverse_mapping
|
| 874 |
+
}
|
| 875 |
+
|
| 876 |
+
# Add the uncovered neighbors of node1 and node2 in T1 and T2 respectively
|
| 877 |
+
T1.update(uncovered_successors_G1)
|
| 878 |
+
T2.update(uncovered_successors_G2)
|
| 879 |
+
T1.discard(new_node1)
|
| 880 |
+
T2.discard(new_node2)
|
| 881 |
+
|
| 882 |
+
T1_tilde.difference_update(uncovered_successors_G1)
|
| 883 |
+
T2_tilde.difference_update(uncovered_successors_G2)
|
| 884 |
+
T1_tilde.discard(new_node1)
|
| 885 |
+
T2_tilde.discard(new_node2)
|
| 886 |
+
|
| 887 |
+
if not G1.is_directed():
|
| 888 |
+
return
|
| 889 |
+
|
| 890 |
+
uncovered_predecessors_G1 = {
|
| 891 |
+
pred for pred in G1.pred[new_node1] if pred not in mapping
|
| 892 |
+
}
|
| 893 |
+
uncovered_predecessors_G2 = {
|
| 894 |
+
pred for pred in G2.pred[new_node2] if pred not in reverse_mapping
|
| 895 |
+
}
|
| 896 |
+
|
| 897 |
+
T1_in.update(uncovered_predecessors_G1)
|
| 898 |
+
T2_in.update(uncovered_predecessors_G2)
|
| 899 |
+
T1_in.discard(new_node1)
|
| 900 |
+
T2_in.discard(new_node2)
|
| 901 |
+
|
| 902 |
+
T1_tilde.difference_update(uncovered_predecessors_G1)
|
| 903 |
+
T2_tilde.difference_update(uncovered_predecessors_G2)
|
| 904 |
+
T1_tilde.discard(new_node1)
|
| 905 |
+
T2_tilde.discard(new_node2)
|
| 906 |
+
|
| 907 |
+
|
| 908 |
+
def _restore_Tinout(popped_node1, popped_node2, graph_params, state_params):
|
| 909 |
+
"""Restores the previous version of Ti/Ti_out when a node pair is deleted from the mapping.
|
| 910 |
+
|
| 911 |
+
Parameters
|
| 912 |
+
----------
|
| 913 |
+
popped_node1, popped_node2: Graph node
|
| 914 |
+
The two nodes deleted from the mapping.
|
| 915 |
+
|
| 916 |
+
graph_params: namedtuple
|
| 917 |
+
Contains all the Graph-related parameters:
|
| 918 |
+
|
| 919 |
+
G1,G2: NetworkX Graph or MultiGraph instances.
|
| 920 |
+
The two graphs to check for isomorphism or monomorphism
|
| 921 |
+
|
| 922 |
+
G1_labels,G2_labels: dict
|
| 923 |
+
The label of every node in G1 and G2 respectively
|
| 924 |
+
|
| 925 |
+
state_params: namedtuple
|
| 926 |
+
Contains all the State-related parameters:
|
| 927 |
+
|
| 928 |
+
mapping: dict
|
| 929 |
+
The mapping as extended so far. Maps nodes of G1 to nodes of G2
|
| 930 |
+
|
| 931 |
+
reverse_mapping: dict
|
| 932 |
+
The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
|
| 933 |
+
|
| 934 |
+
T1, T2: set
|
| 935 |
+
Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
|
| 936 |
+
neighbors of nodes that are.
|
| 937 |
+
|
| 938 |
+
T1_tilde, T2_tilde: set
|
| 939 |
+
Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
|
| 940 |
+
"""
|
| 941 |
+
# If the node we want to remove from the mapping, has at least one covered neighbor, add it to T1.
|
| 942 |
+
G1, G2, _, _, _, _, _ = graph_params
|
| 943 |
+
(
|
| 944 |
+
mapping,
|
| 945 |
+
reverse_mapping,
|
| 946 |
+
T1,
|
| 947 |
+
T1_in,
|
| 948 |
+
T1_tilde,
|
| 949 |
+
T1_tilde_in,
|
| 950 |
+
T2,
|
| 951 |
+
T2_in,
|
| 952 |
+
T2_tilde,
|
| 953 |
+
T2_tilde_in,
|
| 954 |
+
) = state_params
|
| 955 |
+
|
| 956 |
+
is_added = False
|
| 957 |
+
for neighbor in G1[popped_node1]:
|
| 958 |
+
if neighbor in mapping:
|
| 959 |
+
# if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
|
| 960 |
+
is_added = True
|
| 961 |
+
T1.add(popped_node1)
|
| 962 |
+
else:
|
| 963 |
+
# check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
|
| 964 |
+
if any(nbr in mapping for nbr in G1[neighbor]):
|
| 965 |
+
continue
|
| 966 |
+
T1.discard(neighbor)
|
| 967 |
+
T1_tilde.add(neighbor)
|
| 968 |
+
|
| 969 |
+
# Case where the node is not present in neither the mapping nor T1. By definition, it should belong to T1_tilde
|
| 970 |
+
if not is_added:
|
| 971 |
+
T1_tilde.add(popped_node1)
|
| 972 |
+
|
| 973 |
+
is_added = False
|
| 974 |
+
for neighbor in G2[popped_node2]:
|
| 975 |
+
if neighbor in reverse_mapping:
|
| 976 |
+
is_added = True
|
| 977 |
+
T2.add(popped_node2)
|
| 978 |
+
else:
|
| 979 |
+
if any(nbr in reverse_mapping for nbr in G2[neighbor]):
|
| 980 |
+
continue
|
| 981 |
+
T2.discard(neighbor)
|
| 982 |
+
T2_tilde.add(neighbor)
|
| 983 |
+
|
| 984 |
+
if not is_added:
|
| 985 |
+
T2_tilde.add(popped_node2)
|
| 986 |
+
|
| 987 |
+
|
| 988 |
+
def _restore_Tinout_Di(popped_node1, popped_node2, graph_params, state_params):
|
| 989 |
+
# If the node we want to remove from the mapping, has at least one covered neighbor, add it to T1.
|
| 990 |
+
G1, G2, _, _, _, _, _ = graph_params
|
| 991 |
+
(
|
| 992 |
+
mapping,
|
| 993 |
+
reverse_mapping,
|
| 994 |
+
T1,
|
| 995 |
+
T1_in,
|
| 996 |
+
T1_tilde,
|
| 997 |
+
T1_tilde_in,
|
| 998 |
+
T2,
|
| 999 |
+
T2_in,
|
| 1000 |
+
T2_tilde,
|
| 1001 |
+
T2_tilde_in,
|
| 1002 |
+
) = state_params
|
| 1003 |
+
|
| 1004 |
+
is_added = False
|
| 1005 |
+
for successor in G1[popped_node1]:
|
| 1006 |
+
if successor in mapping:
|
| 1007 |
+
# if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
|
| 1008 |
+
is_added = True
|
| 1009 |
+
T1_in.add(popped_node1)
|
| 1010 |
+
else:
|
| 1011 |
+
# check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
|
| 1012 |
+
if not any(pred in mapping for pred in G1.pred[successor]):
|
| 1013 |
+
T1.discard(successor)
|
| 1014 |
+
|
| 1015 |
+
if not any(succ in mapping for succ in G1[successor]):
|
| 1016 |
+
T1_in.discard(successor)
|
| 1017 |
+
|
| 1018 |
+
if successor not in T1:
|
| 1019 |
+
if successor not in T1_in:
|
| 1020 |
+
T1_tilde.add(successor)
|
| 1021 |
+
|
| 1022 |
+
for predecessor in G1.pred[popped_node1]:
|
| 1023 |
+
if predecessor in mapping:
|
| 1024 |
+
# if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
|
| 1025 |
+
is_added = True
|
| 1026 |
+
T1.add(popped_node1)
|
| 1027 |
+
else:
|
| 1028 |
+
# check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
|
| 1029 |
+
if not any(pred in mapping for pred in G1.pred[predecessor]):
|
| 1030 |
+
T1.discard(predecessor)
|
| 1031 |
+
|
| 1032 |
+
if not any(succ in mapping for succ in G1[predecessor]):
|
| 1033 |
+
T1_in.discard(predecessor)
|
| 1034 |
+
|
| 1035 |
+
if not (predecessor in T1 or predecessor in T1_in):
|
| 1036 |
+
T1_tilde.add(predecessor)
|
| 1037 |
+
|
| 1038 |
+
# Case where the node is not present in neither the mapping nor T1. By definition it should belong to T1_tilde
|
| 1039 |
+
if not is_added:
|
| 1040 |
+
T1_tilde.add(popped_node1)
|
| 1041 |
+
|
| 1042 |
+
is_added = False
|
| 1043 |
+
for successor in G2[popped_node2]:
|
| 1044 |
+
if successor in reverse_mapping:
|
| 1045 |
+
is_added = True
|
| 1046 |
+
T2_in.add(popped_node2)
|
| 1047 |
+
else:
|
| 1048 |
+
if not any(pred in reverse_mapping for pred in G2.pred[successor]):
|
| 1049 |
+
T2.discard(successor)
|
| 1050 |
+
|
| 1051 |
+
if not any(succ in reverse_mapping for succ in G2[successor]):
|
| 1052 |
+
T2_in.discard(successor)
|
| 1053 |
+
|
| 1054 |
+
if successor not in T2:
|
| 1055 |
+
if successor not in T2_in:
|
| 1056 |
+
T2_tilde.add(successor)
|
| 1057 |
+
|
| 1058 |
+
for predecessor in G2.pred[popped_node2]:
|
| 1059 |
+
if predecessor in reverse_mapping:
|
| 1060 |
+
# if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
|
| 1061 |
+
is_added = True
|
| 1062 |
+
T2.add(popped_node2)
|
| 1063 |
+
else:
|
| 1064 |
+
# check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
|
| 1065 |
+
if not any(pred in reverse_mapping for pred in G2.pred[predecessor]):
|
| 1066 |
+
T2.discard(predecessor)
|
| 1067 |
+
|
| 1068 |
+
if not any(succ in reverse_mapping for succ in G2[predecessor]):
|
| 1069 |
+
T2_in.discard(predecessor)
|
| 1070 |
+
|
| 1071 |
+
if not (predecessor in T2 or predecessor in T2_in):
|
| 1072 |
+
T2_tilde.add(predecessor)
|
| 1073 |
+
|
| 1074 |
+
if not is_added:
|
| 1075 |
+
T2_tilde.add(popped_node2)
|
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py
ADDED
|
@@ -0,0 +1,192 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Module to simplify the specification of user-defined equality functions for
|
| 3 |
+
node and edge attributes during isomorphism checks.
|
| 4 |
+
|
| 5 |
+
During the construction of an isomorphism, the algorithm considers two
|
| 6 |
+
candidate nodes n1 in G1 and n2 in G2. The graphs G1 and G2 are then
|
| 7 |
+
compared with respect to properties involving n1 and n2, and if the outcome
|
| 8 |
+
is good, then the candidate nodes are considered isomorphic. NetworkX
|
| 9 |
+
provides a simple mechanism for users to extend the comparisons to include
|
| 10 |
+
node and edge attributes.
|
| 11 |
+
|
| 12 |
+
Node attributes are handled by the node_match keyword. When considering
|
| 13 |
+
n1 and n2, the algorithm passes their node attribute dictionaries to
|
| 14 |
+
node_match, and if it returns False, then n1 and n2 cannot be
|
| 15 |
+
considered to be isomorphic.
|
| 16 |
+
|
| 17 |
+
Edge attributes are handled by the edge_match keyword. When considering
|
| 18 |
+
n1 and n2, the algorithm must verify that outgoing edges from n1 are
|
| 19 |
+
commensurate with the outgoing edges for n2. If the graph is directed,
|
| 20 |
+
then a similar check is also performed for incoming edges.
|
| 21 |
+
|
| 22 |
+
Focusing only on outgoing edges, we consider pairs of nodes (n1, v1) from
|
| 23 |
+
G1 and (n2, v2) from G2. For graphs and digraphs, there is only one edge
|
| 24 |
+
between (n1, v1) and only one edge between (n2, v2). Those edge attribute
|
| 25 |
+
dictionaries are passed to edge_match, and if it returns False, then
|
| 26 |
+
n1 and n2 cannot be considered isomorphic. For multigraphs and
|
| 27 |
+
multidigraphs, there can be multiple edges between (n1, v1) and also
|
| 28 |
+
multiple edges between (n2, v2). Now, there must exist an isomorphism
|
| 29 |
+
from "all the edges between (n1, v1)" to "all the edges between (n2, v2)".
|
| 30 |
+
So, all of the edge attribute dictionaries are passed to edge_match, and
|
| 31 |
+
it must determine if there is an isomorphism between the two sets of edges.
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
from . import isomorphvf2 as vf2
|
| 35 |
+
|
| 36 |
+
__all__ = ["GraphMatcher", "DiGraphMatcher", "MultiGraphMatcher", "MultiDiGraphMatcher"]
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def _semantic_feasibility(self, G1_node, G2_node):
|
| 40 |
+
"""Returns True if mapping G1_node to G2_node is semantically feasible."""
|
| 41 |
+
# Make sure the nodes match
|
| 42 |
+
if self.node_match is not None:
|
| 43 |
+
nm = self.node_match(self.G1.nodes[G1_node], self.G2.nodes[G2_node])
|
| 44 |
+
if not nm:
|
| 45 |
+
return False
|
| 46 |
+
|
| 47 |
+
# Make sure the edges match
|
| 48 |
+
if self.edge_match is not None:
|
| 49 |
+
# Cached lookups
|
| 50 |
+
G1nbrs = self.G1_adj[G1_node]
|
| 51 |
+
G2nbrs = self.G2_adj[G2_node]
|
| 52 |
+
core_1 = self.core_1
|
| 53 |
+
edge_match = self.edge_match
|
| 54 |
+
|
| 55 |
+
for neighbor in G1nbrs:
|
| 56 |
+
# G1_node is not in core_1, so we must handle R_self separately
|
| 57 |
+
if neighbor == G1_node:
|
| 58 |
+
if G2_node in G2nbrs and not edge_match(
|
| 59 |
+
G1nbrs[G1_node], G2nbrs[G2_node]
|
| 60 |
+
):
|
| 61 |
+
return False
|
| 62 |
+
elif neighbor in core_1:
|
| 63 |
+
G2_nbr = core_1[neighbor]
|
| 64 |
+
if G2_nbr in G2nbrs and not edge_match(
|
| 65 |
+
G1nbrs[neighbor], G2nbrs[G2_nbr]
|
| 66 |
+
):
|
| 67 |
+
return False
|
| 68 |
+
# syntactic check has already verified that neighbors are symmetric
|
| 69 |
+
|
| 70 |
+
return True
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class GraphMatcher(vf2.GraphMatcher):
|
| 74 |
+
"""VF2 isomorphism checker for undirected graphs."""
|
| 75 |
+
|
| 76 |
+
def __init__(self, G1, G2, node_match=None, edge_match=None):
|
| 77 |
+
"""Initialize graph matcher.
|
| 78 |
+
|
| 79 |
+
Parameters
|
| 80 |
+
----------
|
| 81 |
+
G1, G2: graph
|
| 82 |
+
The graphs to be tested.
|
| 83 |
+
|
| 84 |
+
node_match: callable
|
| 85 |
+
A function that returns True iff node n1 in G1 and n2 in G2
|
| 86 |
+
should be considered equal during the isomorphism test. The
|
| 87 |
+
function will be called like::
|
| 88 |
+
|
| 89 |
+
node_match(G1.nodes[n1], G2.nodes[n2])
|
| 90 |
+
|
| 91 |
+
That is, the function will receive the node attribute dictionaries
|
| 92 |
+
of the nodes under consideration. If None, then no attributes are
|
| 93 |
+
considered when testing for an isomorphism.
|
| 94 |
+
|
| 95 |
+
edge_match: callable
|
| 96 |
+
A function that returns True iff the edge attribute dictionary for
|
| 97 |
+
the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be
|
| 98 |
+
considered equal during the isomorphism test. The function will be
|
| 99 |
+
called like::
|
| 100 |
+
|
| 101 |
+
edge_match(G1[u1][v1], G2[u2][v2])
|
| 102 |
+
|
| 103 |
+
That is, the function will receive the edge attribute dictionaries
|
| 104 |
+
of the edges under consideration. If None, then no attributes are
|
| 105 |
+
considered when testing for an isomorphism.
|
| 106 |
+
|
| 107 |
+
"""
|
| 108 |
+
vf2.GraphMatcher.__init__(self, G1, G2)
|
| 109 |
+
|
| 110 |
+
self.node_match = node_match
|
| 111 |
+
self.edge_match = edge_match
|
| 112 |
+
|
| 113 |
+
# These will be modified during checks to minimize code repeat.
|
| 114 |
+
self.G1_adj = self.G1.adj
|
| 115 |
+
self.G2_adj = self.G2.adj
|
| 116 |
+
|
| 117 |
+
semantic_feasibility = _semantic_feasibility
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
class DiGraphMatcher(vf2.DiGraphMatcher):
|
| 121 |
+
"""VF2 isomorphism checker for directed graphs."""
|
| 122 |
+
|
| 123 |
+
def __init__(self, G1, G2, node_match=None, edge_match=None):
|
| 124 |
+
"""Initialize graph matcher.
|
| 125 |
+
|
| 126 |
+
Parameters
|
| 127 |
+
----------
|
| 128 |
+
G1, G2 : graph
|
| 129 |
+
The graphs to be tested.
|
| 130 |
+
|
| 131 |
+
node_match : callable
|
| 132 |
+
A function that returns True iff node n1 in G1 and n2 in G2
|
| 133 |
+
should be considered equal during the isomorphism test. The
|
| 134 |
+
function will be called like::
|
| 135 |
+
|
| 136 |
+
node_match(G1.nodes[n1], G2.nodes[n2])
|
| 137 |
+
|
| 138 |
+
That is, the function will receive the node attribute dictionaries
|
| 139 |
+
of the nodes under consideration. If None, then no attributes are
|
| 140 |
+
considered when testing for an isomorphism.
|
| 141 |
+
|
| 142 |
+
edge_match : callable
|
| 143 |
+
A function that returns True iff the edge attribute dictionary for
|
| 144 |
+
the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be
|
| 145 |
+
considered equal during the isomorphism test. The function will be
|
| 146 |
+
called like::
|
| 147 |
+
|
| 148 |
+
edge_match(G1[u1][v1], G2[u2][v2])
|
| 149 |
+
|
| 150 |
+
That is, the function will receive the edge attribute dictionaries
|
| 151 |
+
of the edges under consideration. If None, then no attributes are
|
| 152 |
+
considered when testing for an isomorphism.
|
| 153 |
+
|
| 154 |
+
"""
|
| 155 |
+
vf2.DiGraphMatcher.__init__(self, G1, G2)
|
| 156 |
+
|
| 157 |
+
self.node_match = node_match
|
| 158 |
+
self.edge_match = edge_match
|
| 159 |
+
|
| 160 |
+
# These will be modified during checks to minimize code repeat.
|
| 161 |
+
self.G1_adj = self.G1.adj
|
| 162 |
+
self.G2_adj = self.G2.adj
|
| 163 |
+
|
| 164 |
+
def semantic_feasibility(self, G1_node, G2_node):
|
| 165 |
+
"""Returns True if mapping G1_node to G2_node is semantically feasible."""
|
| 166 |
+
|
| 167 |
+
# Test node_match and also test edge_match on successors
|
| 168 |
+
feasible = _semantic_feasibility(self, G1_node, G2_node)
|
| 169 |
+
if not feasible:
|
| 170 |
+
return False
|
| 171 |
+
|
| 172 |
+
# Test edge_match on predecessors
|
| 173 |
+
self.G1_adj = self.G1.pred
|
| 174 |
+
self.G2_adj = self.G2.pred
|
| 175 |
+
feasible = _semantic_feasibility(self, G1_node, G2_node)
|
| 176 |
+
self.G1_adj = self.G1.adj
|
| 177 |
+
self.G2_adj = self.G2.adj
|
| 178 |
+
|
| 179 |
+
return feasible
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
# The "semantics" of edge_match are different for multi(di)graphs, but
|
| 183 |
+
# the implementation is the same. So, technically we do not need to
|
| 184 |
+
# provide "multi" versions, but we do so to match NetworkX's base classes.
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
class MultiGraphMatcher(GraphMatcher):
|
| 188 |
+
"""VF2 isomorphism checker for undirected multigraphs."""
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
class MultiDiGraphMatcher(DiGraphMatcher):
|
| 192 |
+
"""VF2 isomorphism checker for directed multigraphs."""
|