ZTWHHH commited on
Commit
1786a07
·
verified ·
1 Parent(s): 2fffc6e

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +3 -0
  2. minigpt2/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc +3 -0
  3. minigpt2/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc +3 -0
  4. minigpt2/lib/python3.10/site-packages/networkx/algorithms/asteroidal.py +171 -0
  5. minigpt2/lib/python3.10/site-packages/networkx/algorithms/boundary.py +168 -0
  6. minigpt2/lib/python3.10/site-packages/networkx/algorithms/cluster.py +609 -0
  7. minigpt2/lib/python3.10/site-packages/networkx/algorithms/communicability_alg.py +163 -0
  8. minigpt2/lib/python3.10/site-packages/networkx/algorithms/efficiency_measures.py +167 -0
  9. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__init__.py +7 -0
  10. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-310.pyc +0 -0
  11. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-310.pyc +0 -0
  12. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-310.pyc +0 -0
  13. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-310.pyc +0 -0
  14. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-310.pyc +0 -0
  15. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-310.pyc +0 -0
  16. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-310.pyc +0 -0
  17. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2pp.cpython-310.pyc +0 -0
  18. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-310.pyc +0 -0
  19. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/ismags.py +1163 -0
  20. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorph.py +249 -0
  21. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py +1238 -0
  22. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/matchhelpers.py +352 -0
  23. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py +308 -0
  24. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__init__.py +0 -0
  25. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  26. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-310.pyc +0 -0
  27. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-310.pyc +0 -0
  28. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-310.pyc +0 -0
  29. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-310.pyc +0 -0
  30. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-310.pyc +0 -0
  31. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-310.pyc +0 -0
  32. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp.cpython-310.pyc +0 -0
  33. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp_helpers.cpython-310.pyc +0 -0
  34. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-310.pyc +0 -0
  35. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 +0 -0
  36. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99 +0 -0
  37. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99 +0 -0
  38. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99 +0 -0
  39. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py +327 -0
  40. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py +48 -0
  41. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py +410 -0
  42. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_match_helpers.py +64 -0
  43. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py +212 -0
  44. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py +292 -0
  45. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py +1608 -0
  46. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py +3106 -0
  47. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py +200 -0
  48. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py +284 -0
  49. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2pp.py +1075 -0
  50. minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py +192 -0
.gitattributes CHANGED
@@ -1374,3 +1374,6 @@ minigpt2/lib/python3.10/site-packages/pillow.libs/libopenjp2-05423b53.so filter=
1374
  minigpt2/lib/python3.10/site-packages/pillow.libs/liblzma-13fa198c.so.5.4.5 filter=lfs diff=lfs merge=lfs -text
1375
  minigpt2/lib/python3.10/site-packages/pillow.libs/libjpeg-77ae51ab.so.62.4.0 filter=lfs diff=lfs merge=lfs -text
1376
  minigpt2/lib/python3.10/site-packages/pillow.libs/liblcms2-e69eef39.so.2.0.16 filter=lfs diff=lfs merge=lfs -text
 
 
 
 
1374
  minigpt2/lib/python3.10/site-packages/pillow.libs/liblzma-13fa198c.so.5.4.5 filter=lfs diff=lfs merge=lfs -text
1375
  minigpt2/lib/python3.10/site-packages/pillow.libs/libjpeg-77ae51ab.so.62.4.0 filter=lfs diff=lfs merge=lfs -text
1376
  minigpt2/lib/python3.10/site-packages/pillow.libs/liblcms2-e69eef39.so.2.0.16 filter=lfs diff=lfs merge=lfs -text
1377
+ minigpt2/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1378
+ minigpt2/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
1379
+ minigpt2/lib/python3.10/site-packages/pillow.libs/libbrotlicommon-3ecfe81c.so.1 filter=lfs diff=lfs merge=lfs -text
minigpt2/lib/python3.10/site-packages/idna/__pycache__/idnadata.cpython-310.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:053d2be7ff186942e253ba0956fac839831a75276a51defe9b97cf2a19718ade
3
+ size 194418
minigpt2/lib/python3.10/site-packages/idna/__pycache__/uts46data.cpython-310.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddb07983c8ac1d76df51414a4c42740ddf03b814d0ff1e9b5d745a911e711ce3
3
+ size 152346
minigpt2/lib/python3.10/site-packages/networkx/algorithms/asteroidal.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Algorithms for asteroidal triples and asteroidal numbers in graphs.
3
+
4
+ An asteroidal triple in a graph G is a set of three non-adjacent vertices
5
+ u, v and w such that there exist a path between any two of them that avoids
6
+ closed neighborhood of the third. More formally, v_j, v_k belongs to the same
7
+ connected component of G - N[v_i], where N[v_i] denotes the closed neighborhood
8
+ of v_i. A graph which does not contain any asteroidal triples is called
9
+ an AT-free graph. The class of AT-free graphs is a graph class for which
10
+ many NP-complete problems are solvable in polynomial time. Amongst them,
11
+ independent set and coloring.
12
+ """
13
+
14
+ import networkx as nx
15
+ from networkx.utils import not_implemented_for
16
+
17
+ __all__ = ["is_at_free", "find_asteroidal_triple"]
18
+
19
+
20
+ @not_implemented_for("directed")
21
+ @not_implemented_for("multigraph")
22
+ @nx._dispatchable
23
+ def find_asteroidal_triple(G):
24
+ r"""Find an asteroidal triple in the given graph.
25
+
26
+ An asteroidal triple is a triple of non-adjacent vertices such that
27
+ there exists a path between any two of them which avoids the closed
28
+ neighborhood of the third. It checks all independent triples of vertices
29
+ and whether they are an asteroidal triple or not. This is done with the
30
+ help of a data structure called a component structure.
31
+ A component structure encodes information about which vertices belongs to
32
+ the same connected component when the closed neighborhood of a given vertex
33
+ is removed from the graph. The algorithm used to check is the trivial
34
+ one, outlined in [1]_, which has a runtime of
35
+ :math:`O(|V||\overline{E} + |V||E|)`, where the second term is the
36
+ creation of the component structure.
37
+
38
+ Parameters
39
+ ----------
40
+ G : NetworkX Graph
41
+ The graph to check whether is AT-free or not
42
+
43
+ Returns
44
+ -------
45
+ list or None
46
+ An asteroidal triple is returned as a list of nodes. If no asteroidal
47
+ triple exists, i.e. the graph is AT-free, then None is returned.
48
+ The returned value depends on the certificate parameter. The default
49
+ option is a bool which is True if the graph is AT-free, i.e. the
50
+ given graph contains no asteroidal triples, and False otherwise, i.e.
51
+ if the graph contains at least one asteroidal triple.
52
+
53
+ Notes
54
+ -----
55
+ The component structure and the algorithm is described in [1]_. The current
56
+ implementation implements the trivial algorithm for simple graphs.
57
+
58
+ References
59
+ ----------
60
+ .. [1] Ekkehard Köhler,
61
+ "Recognizing Graphs without asteroidal triples",
62
+ Journal of Discrete Algorithms 2, pages 439-452, 2004.
63
+ https://www.sciencedirect.com/science/article/pii/S157086670400019X
64
+ """
65
+ V = set(G.nodes)
66
+
67
+ if len(V) < 6:
68
+ # An asteroidal triple cannot exist in a graph with 5 or less vertices.
69
+ return None
70
+
71
+ component_structure = create_component_structure(G)
72
+ E_complement = set(nx.complement(G).edges)
73
+
74
+ for e in E_complement:
75
+ u = e[0]
76
+ v = e[1]
77
+ u_neighborhood = set(G[u]).union([u])
78
+ v_neighborhood = set(G[v]).union([v])
79
+ union_of_neighborhoods = u_neighborhood.union(v_neighborhood)
80
+ for w in V - union_of_neighborhoods:
81
+ # Check for each pair of vertices whether they belong to the
82
+ # same connected component when the closed neighborhood of the
83
+ # third is removed.
84
+ if (
85
+ component_structure[u][v] == component_structure[u][w]
86
+ and component_structure[v][u] == component_structure[v][w]
87
+ and component_structure[w][u] == component_structure[w][v]
88
+ ):
89
+ return [u, v, w]
90
+ return None
91
+
92
+
93
+ @not_implemented_for("directed")
94
+ @not_implemented_for("multigraph")
95
+ @nx._dispatchable
96
+ def is_at_free(G):
97
+ """Check if a graph is AT-free.
98
+
99
+ The method uses the `find_asteroidal_triple` method to recognize
100
+ an AT-free graph. If no asteroidal triple is found the graph is
101
+ AT-free and True is returned. If at least one asteroidal triple is
102
+ found the graph is not AT-free and False is returned.
103
+
104
+ Parameters
105
+ ----------
106
+ G : NetworkX Graph
107
+ The graph to check whether is AT-free or not.
108
+
109
+ Returns
110
+ -------
111
+ bool
112
+ True if G is AT-free and False otherwise.
113
+
114
+ Examples
115
+ --------
116
+ >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)])
117
+ >>> nx.is_at_free(G)
118
+ True
119
+
120
+ >>> G = nx.cycle_graph(6)
121
+ >>> nx.is_at_free(G)
122
+ False
123
+ """
124
+ return find_asteroidal_triple(G) is None
125
+
126
+
127
+ @not_implemented_for("directed")
128
+ @not_implemented_for("multigraph")
129
+ @nx._dispatchable
130
+ def create_component_structure(G):
131
+ r"""Create component structure for G.
132
+
133
+ A *component structure* is an `nxn` array, denoted `c`, where `n` is
134
+ the number of vertices, where each row and column corresponds to a vertex.
135
+
136
+ .. math::
137
+ c_{uv} = \begin{cases} 0, if v \in N[u] \\
138
+ k, if v \in component k of G \setminus N[u] \end{cases}
139
+
140
+ Where `k` is an arbitrary label for each component. The structure is used
141
+ to simplify the detection of asteroidal triples.
142
+
143
+ Parameters
144
+ ----------
145
+ G : NetworkX Graph
146
+ Undirected, simple graph.
147
+
148
+ Returns
149
+ -------
150
+ component_structure : dictionary
151
+ A dictionary of dictionaries, keyed by pairs of vertices.
152
+
153
+ """
154
+ V = set(G.nodes)
155
+ component_structure = {}
156
+ for v in V:
157
+ label = 0
158
+ closed_neighborhood = set(G[v]).union({v})
159
+ row_dict = {}
160
+ for u in closed_neighborhood:
161
+ row_dict[u] = 0
162
+
163
+ G_reduced = G.subgraph(set(G.nodes) - closed_neighborhood)
164
+ for cc in nx.connected_components(G_reduced):
165
+ label += 1
166
+ for u in cc:
167
+ row_dict[u] = label
168
+
169
+ component_structure[v] = row_dict
170
+
171
+ return component_structure
minigpt2/lib/python3.10/site-packages/networkx/algorithms/boundary.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Routines to find the boundary of a set of nodes.
2
+
3
+ An edge boundary is a set of edges, each of which has exactly one
4
+ endpoint in a given set of nodes (or, in the case of directed graphs,
5
+ the set of edges whose source node is in the set).
6
+
7
+ A node boundary of a set *S* of nodes is the set of (out-)neighbors of
8
+ nodes in *S* that are outside *S*.
9
+
10
+ """
11
+
12
+ from itertools import chain
13
+
14
+ import networkx as nx
15
+
16
+ __all__ = ["edge_boundary", "node_boundary"]
17
+
18
+
19
+ @nx._dispatchable(edge_attrs={"data": "default"}, preserve_edge_attrs="data")
20
+ def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None):
21
+ """Returns the edge boundary of `nbunch1`.
22
+
23
+ The *edge boundary* of a set *S* with respect to a set *T* is the
24
+ set of edges (*u*, *v*) such that *u* is in *S* and *v* is in *T*.
25
+ If *T* is not specified, it is assumed to be the set of all nodes
26
+ not in *S*.
27
+
28
+ Parameters
29
+ ----------
30
+ G : NetworkX graph
31
+
32
+ nbunch1 : iterable
33
+ Iterable of nodes in the graph representing the set of nodes
34
+ whose edge boundary will be returned. (This is the set *S* from
35
+ the definition above.)
36
+
37
+ nbunch2 : iterable
38
+ Iterable of nodes representing the target (or "exterior") set of
39
+ nodes. (This is the set *T* from the definition above.) If not
40
+ specified, this is assumed to be the set of all nodes in `G`
41
+ not in `nbunch1`.
42
+
43
+ keys : bool
44
+ This parameter has the same meaning as in
45
+ :meth:`MultiGraph.edges`.
46
+
47
+ data : bool or object
48
+ This parameter has the same meaning as in
49
+ :meth:`MultiGraph.edges`.
50
+
51
+ default : object
52
+ This parameter has the same meaning as in
53
+ :meth:`MultiGraph.edges`.
54
+
55
+ Returns
56
+ -------
57
+ iterator
58
+ An iterator over the edges in the boundary of `nbunch1` with
59
+ respect to `nbunch2`. If `keys`, `data`, or `default`
60
+ are specified and `G` is a multigraph, then edges are returned
61
+ with keys and/or data, as in :meth:`MultiGraph.edges`.
62
+
63
+ Examples
64
+ --------
65
+ >>> G = nx.wheel_graph(6)
66
+
67
+ When nbunch2=None:
68
+
69
+ >>> list(nx.edge_boundary(G, (1, 3)))
70
+ [(1, 0), (1, 2), (1, 5), (3, 0), (3, 2), (3, 4)]
71
+
72
+ When nbunch2 is given:
73
+
74
+ >>> list(nx.edge_boundary(G, (1, 3), (2, 0)))
75
+ [(1, 0), (1, 2), (3, 0), (3, 2)]
76
+
77
+ Notes
78
+ -----
79
+ Any element of `nbunch` that is not in the graph `G` will be
80
+ ignored.
81
+
82
+ `nbunch1` and `nbunch2` are usually meant to be disjoint, but in
83
+ the interest of speed and generality, that is not required here.
84
+
85
+ """
86
+ nset1 = {n for n in nbunch1 if n in G}
87
+ # Here we create an iterator over edges incident to nodes in the set
88
+ # `nset1`. The `Graph.edges()` method does not provide a guarantee
89
+ # on the orientation of the edges, so our algorithm below must
90
+ # handle the case in which exactly one orientation, either (u, v) or
91
+ # (v, u), appears in this iterable.
92
+ if G.is_multigraph():
93
+ edges = G.edges(nset1, data=data, keys=keys, default=default)
94
+ else:
95
+ edges = G.edges(nset1, data=data, default=default)
96
+ # If `nbunch2` is not provided, then it is assumed to be the set
97
+ # complement of `nbunch1`. For the sake of efficiency, this is
98
+ # implemented by using the `not in` operator, instead of by creating
99
+ # an additional set and using the `in` operator.
100
+ if nbunch2 is None:
101
+ return (e for e in edges if (e[0] in nset1) ^ (e[1] in nset1))
102
+ nset2 = set(nbunch2)
103
+ return (
104
+ e
105
+ for e in edges
106
+ if (e[0] in nset1 and e[1] in nset2) or (e[1] in nset1 and e[0] in nset2)
107
+ )
108
+
109
+
110
+ @nx._dispatchable
111
+ def node_boundary(G, nbunch1, nbunch2=None):
112
+ """Returns the node boundary of `nbunch1`.
113
+
114
+ The *node boundary* of a set *S* with respect to a set *T* is the
115
+ set of nodes *v* in *T* such that for some *u* in *S*, there is an
116
+ edge joining *u* to *v*. If *T* is not specified, it is assumed to
117
+ be the set of all nodes not in *S*.
118
+
119
+ Parameters
120
+ ----------
121
+ G : NetworkX graph
122
+
123
+ nbunch1 : iterable
124
+ Iterable of nodes in the graph representing the set of nodes
125
+ whose node boundary will be returned. (This is the set *S* from
126
+ the definition above.)
127
+
128
+ nbunch2 : iterable
129
+ Iterable of nodes representing the target (or "exterior") set of
130
+ nodes. (This is the set *T* from the definition above.) If not
131
+ specified, this is assumed to be the set of all nodes in `G`
132
+ not in `nbunch1`.
133
+
134
+ Returns
135
+ -------
136
+ set
137
+ The node boundary of `nbunch1` with respect to `nbunch2`.
138
+
139
+ Examples
140
+ --------
141
+ >>> G = nx.wheel_graph(6)
142
+
143
+ When nbunch2=None:
144
+
145
+ >>> list(nx.node_boundary(G, (3, 4)))
146
+ [0, 2, 5]
147
+
148
+ When nbunch2 is given:
149
+
150
+ >>> list(nx.node_boundary(G, (3, 4), (0, 1, 5)))
151
+ [0, 5]
152
+
153
+ Notes
154
+ -----
155
+ Any element of `nbunch` that is not in the graph `G` will be
156
+ ignored.
157
+
158
+ `nbunch1` and `nbunch2` are usually meant to be disjoint, but in
159
+ the interest of speed and generality, that is not required here.
160
+
161
+ """
162
+ nset1 = {n for n in nbunch1 if n in G}
163
+ bdy = set(chain.from_iterable(G[v] for v in nset1)) - nset1
164
+ # If `nbunch2` is not specified, it is assumed to be the set
165
+ # complement of `nbunch1`.
166
+ if nbunch2 is not None:
167
+ bdy &= set(nbunch2)
168
+ return bdy
minigpt2/lib/python3.10/site-packages/networkx/algorithms/cluster.py ADDED
@@ -0,0 +1,609 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Algorithms to characterize the number of triangles in a graph."""
2
+
3
+ from collections import Counter
4
+ from itertools import chain, combinations
5
+
6
+ import networkx as nx
7
+ from networkx.utils import not_implemented_for
8
+
9
+ __all__ = [
10
+ "triangles",
11
+ "average_clustering",
12
+ "clustering",
13
+ "transitivity",
14
+ "square_clustering",
15
+ "generalized_degree",
16
+ ]
17
+
18
+
19
+ @not_implemented_for("directed")
20
+ @nx._dispatchable
21
+ def triangles(G, nodes=None):
22
+ """Compute the number of triangles.
23
+
24
+ Finds the number of triangles that include a node as one vertex.
25
+
26
+ Parameters
27
+ ----------
28
+ G : graph
29
+ A networkx graph
30
+
31
+ nodes : node, iterable of nodes, or None (default=None)
32
+ If a singleton node, return the number of triangles for that node.
33
+ If an iterable, compute the number of triangles for each of those nodes.
34
+ If `None` (the default) compute the number of triangles for all nodes in `G`.
35
+
36
+ Returns
37
+ -------
38
+ out : dict or int
39
+ If `nodes` is a container of nodes, returns number of triangles keyed by node (dict).
40
+ If `nodes` is a specific node, returns number of triangles for the node (int).
41
+
42
+ Examples
43
+ --------
44
+ >>> G = nx.complete_graph(5)
45
+ >>> print(nx.triangles(G, 0))
46
+ 6
47
+ >>> print(nx.triangles(G))
48
+ {0: 6, 1: 6, 2: 6, 3: 6, 4: 6}
49
+ >>> print(list(nx.triangles(G, [0, 1]).values()))
50
+ [6, 6]
51
+
52
+ Notes
53
+ -----
54
+ Self loops are ignored.
55
+
56
+ """
57
+ if nodes is not None:
58
+ # If `nodes` represents a single node, return only its number of triangles
59
+ if nodes in G:
60
+ return next(_triangles_and_degree_iter(G, nodes))[2] // 2
61
+
62
+ # if `nodes` is a container of nodes, then return a
63
+ # dictionary mapping node to number of triangles.
64
+ return {v: t // 2 for v, d, t, _ in _triangles_and_degree_iter(G, nodes)}
65
+
66
+ # if nodes is None, then compute triangles for the complete graph
67
+
68
+ # dict used to avoid visiting the same nodes twice
69
+ # this allows calculating/counting each triangle only once
70
+ later_nbrs = {}
71
+
72
+ # iterate over the nodes in a graph
73
+ for node, neighbors in G.adjacency():
74
+ later_nbrs[node] = {n for n in neighbors if n not in later_nbrs and n != node}
75
+
76
+ # instantiate Counter for each node to include isolated nodes
77
+ # add 1 to the count if a nodes neighbor's neighbor is also a neighbor
78
+ triangle_counts = Counter(dict.fromkeys(G, 0))
79
+ for node1, neighbors in later_nbrs.items():
80
+ for node2 in neighbors:
81
+ third_nodes = neighbors & later_nbrs[node2]
82
+ m = len(third_nodes)
83
+ triangle_counts[node1] += m
84
+ triangle_counts[node2] += m
85
+ triangle_counts.update(third_nodes)
86
+
87
+ return dict(triangle_counts)
88
+
89
+
90
+ @not_implemented_for("multigraph")
91
+ def _triangles_and_degree_iter(G, nodes=None):
92
+ """Return an iterator of (node, degree, triangles, generalized degree).
93
+
94
+ This double counts triangles so you may want to divide by 2.
95
+ See degree(), triangles() and generalized_degree() for definitions
96
+ and details.
97
+
98
+ """
99
+ if nodes is None:
100
+ nodes_nbrs = G.adj.items()
101
+ else:
102
+ nodes_nbrs = ((n, G[n]) for n in G.nbunch_iter(nodes))
103
+
104
+ for v, v_nbrs in nodes_nbrs:
105
+ vs = set(v_nbrs) - {v}
106
+ gen_degree = Counter(len(vs & (set(G[w]) - {w})) for w in vs)
107
+ ntriangles = sum(k * val for k, val in gen_degree.items())
108
+ yield (v, len(vs), ntriangles, gen_degree)
109
+
110
+
111
+ @not_implemented_for("multigraph")
112
+ def _weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"):
113
+ """Return an iterator of (node, degree, weighted_triangles).
114
+
115
+ Used for weighted clustering.
116
+ Note: this returns the geometric average weight of edges in the triangle.
117
+ Also, each triangle is counted twice (each direction).
118
+ So you may want to divide by 2.
119
+
120
+ """
121
+ import numpy as np
122
+
123
+ if weight is None or G.number_of_edges() == 0:
124
+ max_weight = 1
125
+ else:
126
+ max_weight = max(d.get(weight, 1) for u, v, d in G.edges(data=True))
127
+ if nodes is None:
128
+ nodes_nbrs = G.adj.items()
129
+ else:
130
+ nodes_nbrs = ((n, G[n]) for n in G.nbunch_iter(nodes))
131
+
132
+ def wt(u, v):
133
+ return G[u][v].get(weight, 1) / max_weight
134
+
135
+ for i, nbrs in nodes_nbrs:
136
+ inbrs = set(nbrs) - {i}
137
+ weighted_triangles = 0
138
+ seen = set()
139
+ for j in inbrs:
140
+ seen.add(j)
141
+ # This avoids counting twice -- we double at the end.
142
+ jnbrs = set(G[j]) - seen
143
+ # Only compute the edge weight once, before the inner inner
144
+ # loop.
145
+ wij = wt(i, j)
146
+ weighted_triangles += np.cbrt(
147
+ [(wij * wt(j, k) * wt(k, i)) for k in inbrs & jnbrs]
148
+ ).sum()
149
+ yield (i, len(inbrs), 2 * float(weighted_triangles))
150
+
151
+
152
+ @not_implemented_for("multigraph")
153
+ def _directed_triangles_and_degree_iter(G, nodes=None):
154
+ """Return an iterator of
155
+ (node, total_degree, reciprocal_degree, directed_triangles).
156
+
157
+ Used for directed clustering.
158
+ Note that unlike `_triangles_and_degree_iter()`, this function counts
159
+ directed triangles so does not count triangles twice.
160
+
161
+ """
162
+ nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes))
163
+
164
+ for i, preds, succs in nodes_nbrs:
165
+ ipreds = set(preds) - {i}
166
+ isuccs = set(succs) - {i}
167
+
168
+ directed_triangles = 0
169
+ for j in chain(ipreds, isuccs):
170
+ jpreds = set(G._pred[j]) - {j}
171
+ jsuccs = set(G._succ[j]) - {j}
172
+ directed_triangles += sum(
173
+ 1
174
+ for k in chain(
175
+ (ipreds & jpreds),
176
+ (ipreds & jsuccs),
177
+ (isuccs & jpreds),
178
+ (isuccs & jsuccs),
179
+ )
180
+ )
181
+ dtotal = len(ipreds) + len(isuccs)
182
+ dbidirectional = len(ipreds & isuccs)
183
+ yield (i, dtotal, dbidirectional, directed_triangles)
184
+
185
+
186
+ @not_implemented_for("multigraph")
187
+ def _directed_weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"):
188
+ """Return an iterator of
189
+ (node, total_degree, reciprocal_degree, directed_weighted_triangles).
190
+
191
+ Used for directed weighted clustering.
192
+ Note that unlike `_weighted_triangles_and_degree_iter()`, this function counts
193
+ directed triangles so does not count triangles twice.
194
+
195
+ """
196
+ import numpy as np
197
+
198
+ if weight is None or G.number_of_edges() == 0:
199
+ max_weight = 1
200
+ else:
201
+ max_weight = max(d.get(weight, 1) for u, v, d in G.edges(data=True))
202
+
203
+ nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes))
204
+
205
+ def wt(u, v):
206
+ return G[u][v].get(weight, 1) / max_weight
207
+
208
+ for i, preds, succs in nodes_nbrs:
209
+ ipreds = set(preds) - {i}
210
+ isuccs = set(succs) - {i}
211
+
212
+ directed_triangles = 0
213
+ for j in ipreds:
214
+ jpreds = set(G._pred[j]) - {j}
215
+ jsuccs = set(G._succ[j]) - {j}
216
+ directed_triangles += np.cbrt(
217
+ [(wt(j, i) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]
218
+ ).sum()
219
+ directed_triangles += np.cbrt(
220
+ [(wt(j, i) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]
221
+ ).sum()
222
+ directed_triangles += np.cbrt(
223
+ [(wt(j, i) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]
224
+ ).sum()
225
+ directed_triangles += np.cbrt(
226
+ [(wt(j, i) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]
227
+ ).sum()
228
+
229
+ for j in isuccs:
230
+ jpreds = set(G._pred[j]) - {j}
231
+ jsuccs = set(G._succ[j]) - {j}
232
+ directed_triangles += np.cbrt(
233
+ [(wt(i, j) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]
234
+ ).sum()
235
+ directed_triangles += np.cbrt(
236
+ [(wt(i, j) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]
237
+ ).sum()
238
+ directed_triangles += np.cbrt(
239
+ [(wt(i, j) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]
240
+ ).sum()
241
+ directed_triangles += np.cbrt(
242
+ [(wt(i, j) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]
243
+ ).sum()
244
+
245
+ dtotal = len(ipreds) + len(isuccs)
246
+ dbidirectional = len(ipreds & isuccs)
247
+ yield (i, dtotal, dbidirectional, float(directed_triangles))
248
+
249
+
250
+ @nx._dispatchable(edge_attrs="weight")
251
+ def average_clustering(G, nodes=None, weight=None, count_zeros=True):
252
+ r"""Compute the average clustering coefficient for the graph G.
253
+
254
+ The clustering coefficient for the graph is the average,
255
+
256
+ .. math::
257
+
258
+ C = \frac{1}{n}\sum_{v \in G} c_v,
259
+
260
+ where :math:`n` is the number of nodes in `G`.
261
+
262
+ Parameters
263
+ ----------
264
+ G : graph
265
+
266
+ nodes : container of nodes, optional (default=all nodes in G)
267
+ Compute average clustering for nodes in this container.
268
+
269
+ weight : string or None, optional (default=None)
270
+ The edge attribute that holds the numerical value used as a weight.
271
+ If None, then each edge has weight 1.
272
+
273
+ count_zeros : bool
274
+ If False include only the nodes with nonzero clustering in the average.
275
+
276
+ Returns
277
+ -------
278
+ avg : float
279
+ Average clustering
280
+
281
+ Examples
282
+ --------
283
+ >>> G = nx.complete_graph(5)
284
+ >>> print(nx.average_clustering(G))
285
+ 1.0
286
+
287
+ Notes
288
+ -----
289
+ This is a space saving routine; it might be faster
290
+ to use the clustering function to get a list and then take the average.
291
+
292
+ Self loops are ignored.
293
+
294
+ References
295
+ ----------
296
+ .. [1] Generalizations of the clustering coefficient to weighted
297
+ complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela,
298
+ K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007).
299
+ http://jponnela.com/web_documents/a9.pdf
300
+ .. [2] Marcus Kaiser, Mean clustering coefficients: the role of isolated
301
+ nodes and leafs on clustering measures for small-world networks.
302
+ https://arxiv.org/abs/0802.2512
303
+ """
304
+ c = clustering(G, nodes, weight=weight).values()
305
+ if not count_zeros:
306
+ c = [v for v in c if abs(v) > 0]
307
+ return sum(c) / len(c)
308
+
309
+
310
+ @nx._dispatchable(edge_attrs="weight")
311
+ def clustering(G, nodes=None, weight=None):
312
+ r"""Compute the clustering coefficient for nodes.
313
+
314
+ For unweighted graphs, the clustering of a node :math:`u`
315
+ is the fraction of possible triangles through that node that exist,
316
+
317
+ .. math::
318
+
319
+ c_u = \frac{2 T(u)}{deg(u)(deg(u)-1)},
320
+
321
+ where :math:`T(u)` is the number of triangles through node :math:`u` and
322
+ :math:`deg(u)` is the degree of :math:`u`.
323
+
324
+ For weighted graphs, there are several ways to define clustering [1]_.
325
+ the one used here is defined
326
+ as the geometric average of the subgraph edge weights [2]_,
327
+
328
+ .. math::
329
+
330
+ c_u = \frac{1}{deg(u)(deg(u)-1))}
331
+ \sum_{vw} (\hat{w}_{uv} \hat{w}_{uw} \hat{w}_{vw})^{1/3}.
332
+
333
+ The edge weights :math:`\hat{w}_{uv}` are normalized by the maximum weight
334
+ in the network :math:`\hat{w}_{uv} = w_{uv}/\max(w)`.
335
+
336
+ The value of :math:`c_u` is assigned to 0 if :math:`deg(u) < 2`.
337
+
338
+ Additionally, this weighted definition has been generalized to support negative edge weights [3]_.
339
+
340
+ For directed graphs, the clustering is similarly defined as the fraction
341
+ of all possible directed triangles or geometric average of the subgraph
342
+ edge weights for unweighted and weighted directed graph respectively [4]_.
343
+
344
+ .. math::
345
+
346
+ c_u = \frac{T(u)}{2(deg^{tot}(u)(deg^{tot}(u)-1) - 2deg^{\leftrightarrow}(u))},
347
+
348
+ where :math:`T(u)` is the number of directed triangles through node
349
+ :math:`u`, :math:`deg^{tot}(u)` is the sum of in degree and out degree of
350
+ :math:`u` and :math:`deg^{\leftrightarrow}(u)` is the reciprocal degree of
351
+ :math:`u`.
352
+
353
+
354
+ Parameters
355
+ ----------
356
+ G : graph
357
+
358
+ nodes : node, iterable of nodes, or None (default=None)
359
+ If a singleton node, return the number of triangles for that node.
360
+ If an iterable, compute the number of triangles for each of those nodes.
361
+ If `None` (the default) compute the number of triangles for all nodes in `G`.
362
+
363
+ weight : string or None, optional (default=None)
364
+ The edge attribute that holds the numerical value used as a weight.
365
+ If None, then each edge has weight 1.
366
+
367
+ Returns
368
+ -------
369
+ out : float, or dictionary
370
+ Clustering coefficient at specified nodes
371
+
372
+ Examples
373
+ --------
374
+ >>> G = nx.complete_graph(5)
375
+ >>> print(nx.clustering(G, 0))
376
+ 1.0
377
+ >>> print(nx.clustering(G))
378
+ {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
379
+
380
+ Notes
381
+ -----
382
+ Self loops are ignored.
383
+
384
+ References
385
+ ----------
386
+ .. [1] Generalizations of the clustering coefficient to weighted
387
+ complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela,
388
+ K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007).
389
+ http://jponnela.com/web_documents/a9.pdf
390
+ .. [2] Intensity and coherence of motifs in weighted complex
391
+ networks by J. P. Onnela, J. Saramäki, J. Kertész, and K. Kaski,
392
+ Physical Review E, 71(6), 065103 (2005).
393
+ .. [3] Generalization of Clustering Coefficients to Signed Correlation Networks
394
+ by G. Costantini and M. Perugini, PloS one, 9(2), e88669 (2014).
395
+ .. [4] Clustering in complex directed networks by G. Fagiolo,
396
+ Physical Review E, 76(2), 026107 (2007).
397
+ """
398
+ if G.is_directed():
399
+ if weight is not None:
400
+ td_iter = _directed_weighted_triangles_and_degree_iter(G, nodes, weight)
401
+ clusterc = {
402
+ v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2)
403
+ for v, dt, db, t in td_iter
404
+ }
405
+ else:
406
+ td_iter = _directed_triangles_and_degree_iter(G, nodes)
407
+ clusterc = {
408
+ v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2)
409
+ for v, dt, db, t in td_iter
410
+ }
411
+ else:
412
+ # The formula 2*T/(d*(d-1)) from docs is t/(d*(d-1)) here b/c t==2*T
413
+ if weight is not None:
414
+ td_iter = _weighted_triangles_and_degree_iter(G, nodes, weight)
415
+ clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t in td_iter}
416
+ else:
417
+ td_iter = _triangles_and_degree_iter(G, nodes)
418
+ clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t, _ in td_iter}
419
+ if nodes in G:
420
+ # Return the value of the sole entry in the dictionary.
421
+ return clusterc[nodes]
422
+ return clusterc
423
+
424
+
425
+ @nx._dispatchable
426
+ def transitivity(G):
427
+ r"""Compute graph transitivity, the fraction of all possible triangles
428
+ present in G.
429
+
430
+ Possible triangles are identified by the number of "triads"
431
+ (two edges with a shared vertex).
432
+
433
+ The transitivity is
434
+
435
+ .. math::
436
+
437
+ T = 3\frac{\#triangles}{\#triads}.
438
+
439
+ Parameters
440
+ ----------
441
+ G : graph
442
+
443
+ Returns
444
+ -------
445
+ out : float
446
+ Transitivity
447
+
448
+ Notes
449
+ -----
450
+ Self loops are ignored.
451
+
452
+ Examples
453
+ --------
454
+ >>> G = nx.complete_graph(5)
455
+ >>> print(nx.transitivity(G))
456
+ 1.0
457
+ """
458
+ triangles_contri = [
459
+ (t, d * (d - 1)) for v, d, t, _ in _triangles_and_degree_iter(G)
460
+ ]
461
+ # If the graph is empty
462
+ if len(triangles_contri) == 0:
463
+ return 0
464
+ triangles, contri = map(sum, zip(*triangles_contri))
465
+ return 0 if triangles == 0 else triangles / contri
466
+
467
+
468
+ @nx._dispatchable
469
+ def square_clustering(G, nodes=None):
470
+ r"""Compute the squares clustering coefficient for nodes.
471
+
472
+ For each node return the fraction of possible squares that exist at
473
+ the node [1]_
474
+
475
+ .. math::
476
+ C_4(v) = \frac{ \sum_{u=1}^{k_v}
477
+ \sum_{w=u+1}^{k_v} q_v(u,w) }{ \sum_{u=1}^{k_v}
478
+ \sum_{w=u+1}^{k_v} [a_v(u,w) + q_v(u,w)]},
479
+
480
+ where :math:`q_v(u,w)` are the number of common neighbors of :math:`u` and
481
+ :math:`w` other than :math:`v` (ie squares), and :math:`a_v(u,w) = (k_u -
482
+ (1+q_v(u,w)+\theta_{uv})) + (k_w - (1+q_v(u,w)+\theta_{uw}))`, where
483
+ :math:`\theta_{uw} = 1` if :math:`u` and :math:`w` are connected and 0
484
+ otherwise. [2]_
485
+
486
+ Parameters
487
+ ----------
488
+ G : graph
489
+
490
+ nodes : container of nodes, optional (default=all nodes in G)
491
+ Compute clustering for nodes in this container.
492
+
493
+ Returns
494
+ -------
495
+ c4 : dictionary
496
+ A dictionary keyed by node with the square clustering coefficient value.
497
+
498
+ Examples
499
+ --------
500
+ >>> G = nx.complete_graph(5)
501
+ >>> print(nx.square_clustering(G, 0))
502
+ 1.0
503
+ >>> print(nx.square_clustering(G))
504
+ {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
505
+
506
+ Notes
507
+ -----
508
+ While :math:`C_3(v)` (triangle clustering) gives the probability that
509
+ two neighbors of node v are connected with each other, :math:`C_4(v)` is
510
+ the probability that two neighbors of node v share a common
511
+ neighbor different from v. This algorithm can be applied to both
512
+ bipartite and unipartite networks.
513
+
514
+ References
515
+ ----------
516
+ .. [1] Pedro G. Lind, Marta C. González, and Hans J. Herrmann. 2005
517
+ Cycles and clustering in bipartite networks.
518
+ Physical Review E (72) 056127.
519
+ .. [2] Zhang, Peng et al. Clustering Coefficient and Community Structure of
520
+ Bipartite Networks. Physica A: Statistical Mechanics and its Applications 387.27 (2008): 6869–6875.
521
+ https://arxiv.org/abs/0710.0117v1
522
+ """
523
+ if nodes is None:
524
+ node_iter = G
525
+ else:
526
+ node_iter = G.nbunch_iter(nodes)
527
+ clustering = {}
528
+ for v in node_iter:
529
+ clustering[v] = 0
530
+ potential = 0
531
+ for u, w in combinations(G[v], 2):
532
+ squares = len((set(G[u]) & set(G[w])) - {v})
533
+ clustering[v] += squares
534
+ degm = squares + 1
535
+ if w in G[u]:
536
+ degm += 1
537
+ potential += (len(G[u]) - degm) + (len(G[w]) - degm) + squares
538
+ if potential > 0:
539
+ clustering[v] /= potential
540
+ if nodes in G:
541
+ # Return the value of the sole entry in the dictionary.
542
+ return clustering[nodes]
543
+ return clustering
544
+
545
+
546
+ @not_implemented_for("directed")
547
+ @nx._dispatchable
548
+ def generalized_degree(G, nodes=None):
549
+ r"""Compute the generalized degree for nodes.
550
+
551
+ For each node, the generalized degree shows how many edges of given
552
+ triangle multiplicity the node is connected to. The triangle multiplicity
553
+ of an edge is the number of triangles an edge participates in. The
554
+ generalized degree of node :math:`i` can be written as a vector
555
+ :math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc, k_i^{(N-2)})` where
556
+ :math:`k_i^{(j)}` is the number of edges attached to node :math:`i` that
557
+ participate in :math:`j` triangles.
558
+
559
+ Parameters
560
+ ----------
561
+ G : graph
562
+
563
+ nodes : container of nodes, optional (default=all nodes in G)
564
+ Compute the generalized degree for nodes in this container.
565
+
566
+ Returns
567
+ -------
568
+ out : Counter, or dictionary of Counters
569
+ Generalized degree of specified nodes. The Counter is keyed by edge
570
+ triangle multiplicity.
571
+
572
+ Examples
573
+ --------
574
+ >>> G = nx.complete_graph(5)
575
+ >>> print(nx.generalized_degree(G, 0))
576
+ Counter({3: 4})
577
+ >>> print(nx.generalized_degree(G))
578
+ {0: Counter({3: 4}), 1: Counter({3: 4}), 2: Counter({3: 4}), 3: Counter({3: 4}), 4: Counter({3: 4})}
579
+
580
+ To recover the number of triangles attached to a node:
581
+
582
+ >>> k1 = nx.generalized_degree(G, 0)
583
+ >>> sum([k * v for k, v in k1.items()]) / 2 == nx.triangles(G, 0)
584
+ True
585
+
586
+ Notes
587
+ -----
588
+ Self loops are ignored.
589
+
590
+ In a network of N nodes, the highest triangle multiplicity an edge can have
591
+ is N-2.
592
+
593
+ The return value does not include a `zero` entry if no edges of a
594
+ particular triangle multiplicity are present.
595
+
596
+ The number of triangles node :math:`i` is attached to can be recovered from
597
+ the generalized degree :math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc,
598
+ k_i^{(N-2)})` by :math:`(k_i^{(1)}+2k_i^{(2)}+\dotsc +(N-2)k_i^{(N-2)})/2`.
599
+
600
+ References
601
+ ----------
602
+ .. [1] Networks with arbitrary edge multiplicities by V. Zlatić,
603
+ D. Garlaschelli and G. Caldarelli, EPL (Europhysics Letters),
604
+ Volume 97, Number 2 (2012).
605
+ https://iopscience.iop.org/article/10.1209/0295-5075/97/28005
606
+ """
607
+ if nodes in G:
608
+ return next(_triangles_and_degree_iter(G, nodes))[3]
609
+ return {v: gd for v, d, t, gd in _triangles_and_degree_iter(G, nodes)}
minigpt2/lib/python3.10/site-packages/networkx/algorithms/communicability_alg.py ADDED
@@ -0,0 +1,163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Communicability.
3
+ """
4
+
5
+ import networkx as nx
6
+ from networkx.utils import not_implemented_for
7
+
8
+ __all__ = ["communicability", "communicability_exp"]
9
+
10
+
11
+ @not_implemented_for("directed")
12
+ @not_implemented_for("multigraph")
13
+ @nx._dispatchable
14
+ def communicability(G):
15
+ r"""Returns communicability between all pairs of nodes in G.
16
+
17
+ The communicability between pairs of nodes in G is the sum of
18
+ walks of different lengths starting at node u and ending at node v.
19
+
20
+ Parameters
21
+ ----------
22
+ G: graph
23
+
24
+ Returns
25
+ -------
26
+ comm: dictionary of dictionaries
27
+ Dictionary of dictionaries keyed by nodes with communicability
28
+ as the value.
29
+
30
+ Raises
31
+ ------
32
+ NetworkXError
33
+ If the graph is not undirected and simple.
34
+
35
+ See Also
36
+ --------
37
+ communicability_exp:
38
+ Communicability between all pairs of nodes in G using spectral
39
+ decomposition.
40
+ communicability_betweenness_centrality:
41
+ Communicability betweenness centrality for each node in G.
42
+
43
+ Notes
44
+ -----
45
+ This algorithm uses a spectral decomposition of the adjacency matrix.
46
+ Let G=(V,E) be a simple undirected graph. Using the connection between
47
+ the powers of the adjacency matrix and the number of walks in the graph,
48
+ the communicability between nodes `u` and `v` based on the graph spectrum
49
+ is [1]_
50
+
51
+ .. math::
52
+ C(u,v)=\sum_{j=1}^{n}\phi_{j}(u)\phi_{j}(v)e^{\lambda_{j}},
53
+
54
+ where `\phi_{j}(u)` is the `u\rm{th}` element of the `j\rm{th}` orthonormal
55
+ eigenvector of the adjacency matrix associated with the eigenvalue
56
+ `\lambda_{j}`.
57
+
58
+ References
59
+ ----------
60
+ .. [1] Ernesto Estrada, Naomichi Hatano,
61
+ "Communicability in complex networks",
62
+ Phys. Rev. E 77, 036111 (2008).
63
+ https://arxiv.org/abs/0707.0756
64
+
65
+ Examples
66
+ --------
67
+ >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
68
+ >>> c = nx.communicability(G)
69
+ """
70
+ import numpy as np
71
+
72
+ nodelist = list(G) # ordering of nodes in matrix
73
+ A = nx.to_numpy_array(G, nodelist)
74
+ # convert to 0-1 matrix
75
+ A[A != 0.0] = 1
76
+ w, vec = np.linalg.eigh(A)
77
+ expw = np.exp(w)
78
+ mapping = dict(zip(nodelist, range(len(nodelist))))
79
+ c = {}
80
+ # computing communicabilities
81
+ for u in G:
82
+ c[u] = {}
83
+ for v in G:
84
+ s = 0
85
+ p = mapping[u]
86
+ q = mapping[v]
87
+ for j in range(len(nodelist)):
88
+ s += vec[:, j][p] * vec[:, j][q] * expw[j]
89
+ c[u][v] = float(s)
90
+ return c
91
+
92
+
93
+ @not_implemented_for("directed")
94
+ @not_implemented_for("multigraph")
95
+ @nx._dispatchable
96
+ def communicability_exp(G):
97
+ r"""Returns communicability between all pairs of nodes in G.
98
+
99
+ Communicability between pair of node (u,v) of node in G is the sum of
100
+ walks of different lengths starting at node u and ending at node v.
101
+
102
+ Parameters
103
+ ----------
104
+ G: graph
105
+
106
+ Returns
107
+ -------
108
+ comm: dictionary of dictionaries
109
+ Dictionary of dictionaries keyed by nodes with communicability
110
+ as the value.
111
+
112
+ Raises
113
+ ------
114
+ NetworkXError
115
+ If the graph is not undirected and simple.
116
+
117
+ See Also
118
+ --------
119
+ communicability:
120
+ Communicability between pairs of nodes in G.
121
+ communicability_betweenness_centrality:
122
+ Communicability betweenness centrality for each node in G.
123
+
124
+ Notes
125
+ -----
126
+ This algorithm uses matrix exponentiation of the adjacency matrix.
127
+
128
+ Let G=(V,E) be a simple undirected graph. Using the connection between
129
+ the powers of the adjacency matrix and the number of walks in the graph,
130
+ the communicability between nodes u and v is [1]_,
131
+
132
+ .. math::
133
+ C(u,v) = (e^A)_{uv},
134
+
135
+ where `A` is the adjacency matrix of G.
136
+
137
+ References
138
+ ----------
139
+ .. [1] Ernesto Estrada, Naomichi Hatano,
140
+ "Communicability in complex networks",
141
+ Phys. Rev. E 77, 036111 (2008).
142
+ https://arxiv.org/abs/0707.0756
143
+
144
+ Examples
145
+ --------
146
+ >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
147
+ >>> c = nx.communicability_exp(G)
148
+ """
149
+ import scipy as sp
150
+
151
+ nodelist = list(G) # ordering of nodes in matrix
152
+ A = nx.to_numpy_array(G, nodelist)
153
+ # convert to 0-1 matrix
154
+ A[A != 0.0] = 1
155
+ # communicability matrix
156
+ expA = sp.linalg.expm(A)
157
+ mapping = dict(zip(nodelist, range(len(nodelist))))
158
+ c = {}
159
+ for u in G:
160
+ c[u] = {}
161
+ for v in G:
162
+ c[u][v] = float(expA[mapping[u], mapping[v]])
163
+ return c
minigpt2/lib/python3.10/site-packages/networkx/algorithms/efficiency_measures.py ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Provides functions for computing the efficiency of nodes and graphs."""
2
+
3
+ import networkx as nx
4
+ from networkx.exception import NetworkXNoPath
5
+
6
+ from ..utils import not_implemented_for
7
+
8
+ __all__ = ["efficiency", "local_efficiency", "global_efficiency"]
9
+
10
+
11
+ @not_implemented_for("directed")
12
+ @nx._dispatchable
13
+ def efficiency(G, u, v):
14
+ """Returns the efficiency of a pair of nodes in a graph.
15
+
16
+ The *efficiency* of a pair of nodes is the multiplicative inverse of the
17
+ shortest path distance between the nodes [1]_. Returns 0 if no path
18
+ between nodes.
19
+
20
+ Parameters
21
+ ----------
22
+ G : :class:`networkx.Graph`
23
+ An undirected graph for which to compute the average local efficiency.
24
+ u, v : node
25
+ Nodes in the graph ``G``.
26
+
27
+ Returns
28
+ -------
29
+ float
30
+ Multiplicative inverse of the shortest path distance between the nodes.
31
+
32
+ Examples
33
+ --------
34
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
35
+ >>> nx.efficiency(G, 2, 3) # this gives efficiency for node 2 and 3
36
+ 0.5
37
+
38
+ Notes
39
+ -----
40
+ Edge weights are ignored when computing the shortest path distances.
41
+
42
+ See also
43
+ --------
44
+ local_efficiency
45
+ global_efficiency
46
+
47
+ References
48
+ ----------
49
+ .. [1] Latora, Vito, and Massimo Marchiori.
50
+ "Efficient behavior of small-world networks."
51
+ *Physical Review Letters* 87.19 (2001): 198701.
52
+ <https://doi.org/10.1103/PhysRevLett.87.198701>
53
+
54
+ """
55
+ try:
56
+ eff = 1 / nx.shortest_path_length(G, u, v)
57
+ except NetworkXNoPath:
58
+ eff = 0
59
+ return eff
60
+
61
+
62
+ @not_implemented_for("directed")
63
+ @nx._dispatchable
64
+ def global_efficiency(G):
65
+ """Returns the average global efficiency of the graph.
66
+
67
+ The *efficiency* of a pair of nodes in a graph is the multiplicative
68
+ inverse of the shortest path distance between the nodes. The *average
69
+ global efficiency* of a graph is the average efficiency of all pairs of
70
+ nodes [1]_.
71
+
72
+ Parameters
73
+ ----------
74
+ G : :class:`networkx.Graph`
75
+ An undirected graph for which to compute the average global efficiency.
76
+
77
+ Returns
78
+ -------
79
+ float
80
+ The average global efficiency of the graph.
81
+
82
+ Examples
83
+ --------
84
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
85
+ >>> round(nx.global_efficiency(G), 12)
86
+ 0.916666666667
87
+
88
+ Notes
89
+ -----
90
+ Edge weights are ignored when computing the shortest path distances.
91
+
92
+ See also
93
+ --------
94
+ local_efficiency
95
+
96
+ References
97
+ ----------
98
+ .. [1] Latora, Vito, and Massimo Marchiori.
99
+ "Efficient behavior of small-world networks."
100
+ *Physical Review Letters* 87.19 (2001): 198701.
101
+ <https://doi.org/10.1103/PhysRevLett.87.198701>
102
+
103
+ """
104
+ n = len(G)
105
+ denom = n * (n - 1)
106
+ if denom != 0:
107
+ lengths = nx.all_pairs_shortest_path_length(G)
108
+ g_eff = 0
109
+ for source, targets in lengths:
110
+ for target, distance in targets.items():
111
+ if distance > 0:
112
+ g_eff += 1 / distance
113
+ g_eff /= denom
114
+ # g_eff = sum(1 / d for s, tgts in lengths
115
+ # for t, d in tgts.items() if d > 0) / denom
116
+ else:
117
+ g_eff = 0
118
+ # TODO This can be made more efficient by computing all pairs shortest
119
+ # path lengths in parallel.
120
+ return g_eff
121
+
122
+
123
+ @not_implemented_for("directed")
124
+ @nx._dispatchable
125
+ def local_efficiency(G):
126
+ """Returns the average local efficiency of the graph.
127
+
128
+ The *efficiency* of a pair of nodes in a graph is the multiplicative
129
+ inverse of the shortest path distance between the nodes. The *local
130
+ efficiency* of a node in the graph is the average global efficiency of the
131
+ subgraph induced by the neighbors of the node. The *average local
132
+ efficiency* is the average of the local efficiencies of each node [1]_.
133
+
134
+ Parameters
135
+ ----------
136
+ G : :class:`networkx.Graph`
137
+ An undirected graph for which to compute the average local efficiency.
138
+
139
+ Returns
140
+ -------
141
+ float
142
+ The average local efficiency of the graph.
143
+
144
+ Examples
145
+ --------
146
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
147
+ >>> nx.local_efficiency(G)
148
+ 0.9166666666666667
149
+
150
+ Notes
151
+ -----
152
+ Edge weights are ignored when computing the shortest path distances.
153
+
154
+ See also
155
+ --------
156
+ global_efficiency
157
+
158
+ References
159
+ ----------
160
+ .. [1] Latora, Vito, and Massimo Marchiori.
161
+ "Efficient behavior of small-world networks."
162
+ *Physical Review Letters* 87.19 (2001): 198701.
163
+ <https://doi.org/10.1103/PhysRevLett.87.198701>
164
+
165
+ """
166
+ efficiency_list = (global_efficiency(G.subgraph(G[v])) for v in G)
167
+ return sum(efficiency_list) / len(G)
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__init__.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ from networkx.algorithms.isomorphism.isomorph import *
2
+ from networkx.algorithms.isomorphism.vf2userfunc import *
3
+ from networkx.algorithms.isomorphism.matchhelpers import *
4
+ from networkx.algorithms.isomorphism.temporalisomorphvf2 import *
5
+ from networkx.algorithms.isomorphism.ismags import *
6
+ from networkx.algorithms.isomorphism.tree_isomorphism import *
7
+ from networkx.algorithms.isomorphism.vf2pp import *
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (575 Bytes). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-310.pyc ADDED
Binary file (33 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-310.pyc ADDED
Binary file (7.74 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-310.pyc ADDED
Binary file (28.5 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-310.pyc ADDED
Binary file (10.9 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-310.pyc ADDED
Binary file (10.8 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-310.pyc ADDED
Binary file (7.44 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2pp.cpython-310.pyc ADDED
Binary file (28.5 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-310.pyc ADDED
Binary file (6.47 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/ismags.py ADDED
@@ -0,0 +1,1163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ISMAGS Algorithm
3
+ ================
4
+
5
+ Provides a Python implementation of the ISMAGS algorithm. [1]_
6
+
7
+ It is capable of finding (subgraph) isomorphisms between two graphs, taking the
8
+ symmetry of the subgraph into account. In most cases the VF2 algorithm is
9
+ faster (at least on small graphs) than this implementation, but in some cases
10
+ there is an exponential number of isomorphisms that are symmetrically
11
+ equivalent. In that case, the ISMAGS algorithm will provide only one solution
12
+ per symmetry group.
13
+
14
+ >>> petersen = nx.petersen_graph()
15
+ >>> ismags = nx.isomorphism.ISMAGS(petersen, petersen)
16
+ >>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=False))
17
+ >>> len(isomorphisms)
18
+ 120
19
+ >>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=True))
20
+ >>> answer = [{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}]
21
+ >>> answer == isomorphisms
22
+ True
23
+
24
+ In addition, this implementation also provides an interface to find the
25
+ largest common induced subgraph [2]_ between any two graphs, again taking
26
+ symmetry into account. Given `graph` and `subgraph` the algorithm will remove
27
+ nodes from the `subgraph` until `subgraph` is isomorphic to a subgraph of
28
+ `graph`. Since only the symmetry of `subgraph` is taken into account it is
29
+ worth thinking about how you provide your graphs:
30
+
31
+ >>> graph1 = nx.path_graph(4)
32
+ >>> graph2 = nx.star_graph(3)
33
+ >>> ismags = nx.isomorphism.ISMAGS(graph1, graph2)
34
+ >>> ismags.is_isomorphic()
35
+ False
36
+ >>> largest_common_subgraph = list(ismags.largest_common_subgraph())
37
+ >>> answer = [{1: 0, 0: 1, 2: 2}, {2: 0, 1: 1, 3: 2}]
38
+ >>> answer == largest_common_subgraph
39
+ True
40
+ >>> ismags2 = nx.isomorphism.ISMAGS(graph2, graph1)
41
+ >>> largest_common_subgraph = list(ismags2.largest_common_subgraph())
42
+ >>> answer = [
43
+ ... {1: 0, 0: 1, 2: 2},
44
+ ... {1: 0, 0: 1, 3: 2},
45
+ ... {2: 0, 0: 1, 1: 2},
46
+ ... {2: 0, 0: 1, 3: 2},
47
+ ... {3: 0, 0: 1, 1: 2},
48
+ ... {3: 0, 0: 1, 2: 2},
49
+ ... ]
50
+ >>> answer == largest_common_subgraph
51
+ True
52
+
53
+ However, when not taking symmetry into account, it doesn't matter:
54
+
55
+ >>> largest_common_subgraph = list(ismags.largest_common_subgraph(symmetry=False))
56
+ >>> answer = [
57
+ ... {1: 0, 0: 1, 2: 2},
58
+ ... {1: 0, 2: 1, 0: 2},
59
+ ... {2: 0, 1: 1, 3: 2},
60
+ ... {2: 0, 3: 1, 1: 2},
61
+ ... {1: 0, 0: 1, 2: 3},
62
+ ... {1: 0, 2: 1, 0: 3},
63
+ ... {2: 0, 1: 1, 3: 3},
64
+ ... {2: 0, 3: 1, 1: 3},
65
+ ... {1: 0, 0: 2, 2: 3},
66
+ ... {1: 0, 2: 2, 0: 3},
67
+ ... {2: 0, 1: 2, 3: 3},
68
+ ... {2: 0, 3: 2, 1: 3},
69
+ ... ]
70
+ >>> answer == largest_common_subgraph
71
+ True
72
+ >>> largest_common_subgraph = list(ismags2.largest_common_subgraph(symmetry=False))
73
+ >>> answer = [
74
+ ... {1: 0, 0: 1, 2: 2},
75
+ ... {1: 0, 0: 1, 3: 2},
76
+ ... {2: 0, 0: 1, 1: 2},
77
+ ... {2: 0, 0: 1, 3: 2},
78
+ ... {3: 0, 0: 1, 1: 2},
79
+ ... {3: 0, 0: 1, 2: 2},
80
+ ... {1: 1, 0: 2, 2: 3},
81
+ ... {1: 1, 0: 2, 3: 3},
82
+ ... {2: 1, 0: 2, 1: 3},
83
+ ... {2: 1, 0: 2, 3: 3},
84
+ ... {3: 1, 0: 2, 1: 3},
85
+ ... {3: 1, 0: 2, 2: 3},
86
+ ... ]
87
+ >>> answer == largest_common_subgraph
88
+ True
89
+
90
+ Notes
91
+ -----
92
+ - The current implementation works for undirected graphs only. The algorithm
93
+ in general should work for directed graphs as well though.
94
+ - Node keys for both provided graphs need to be fully orderable as well as
95
+ hashable.
96
+ - Node and edge equality is assumed to be transitive: if A is equal to B, and
97
+ B is equal to C, then A is equal to C.
98
+
99
+ References
100
+ ----------
101
+ .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
102
+ M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
103
+ Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
104
+ Enumeration", PLoS One 9(5): e97896, 2014.
105
+ https://doi.org/10.1371/journal.pone.0097896
106
+ .. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph
107
+ """
108
+
109
+ __all__ = ["ISMAGS"]
110
+
111
+ import itertools
112
+ from collections import Counter, defaultdict
113
+ from functools import reduce, wraps
114
+
115
+
116
+ def are_all_equal(iterable):
117
+ """
118
+ Returns ``True`` if and only if all elements in `iterable` are equal; and
119
+ ``False`` otherwise.
120
+
121
+ Parameters
122
+ ----------
123
+ iterable: collections.abc.Iterable
124
+ The container whose elements will be checked.
125
+
126
+ Returns
127
+ -------
128
+ bool
129
+ ``True`` iff all elements in `iterable` compare equal, ``False``
130
+ otherwise.
131
+ """
132
+ try:
133
+ shape = iterable.shape
134
+ except AttributeError:
135
+ pass
136
+ else:
137
+ if len(shape) > 1:
138
+ message = "The function does not works on multidimensional arrays."
139
+ raise NotImplementedError(message) from None
140
+
141
+ iterator = iter(iterable)
142
+ first = next(iterator, None)
143
+ return all(item == first for item in iterator)
144
+
145
+
146
+ def make_partitions(items, test):
147
+ """
148
+ Partitions items into sets based on the outcome of ``test(item1, item2)``.
149
+ Pairs of items for which `test` returns `True` end up in the same set.
150
+
151
+ Parameters
152
+ ----------
153
+ items : collections.abc.Iterable[collections.abc.Hashable]
154
+ Items to partition
155
+ test : collections.abc.Callable[collections.abc.Hashable, collections.abc.Hashable]
156
+ A function that will be called with 2 arguments, taken from items.
157
+ Should return `True` if those 2 items need to end up in the same
158
+ partition, and `False` otherwise.
159
+
160
+ Returns
161
+ -------
162
+ list[set]
163
+ A list of sets, with each set containing part of the items in `items`,
164
+ such that ``all(test(*pair) for pair in itertools.combinations(set, 2))
165
+ == True``
166
+
167
+ Notes
168
+ -----
169
+ The function `test` is assumed to be transitive: if ``test(a, b)`` and
170
+ ``test(b, c)`` return ``True``, then ``test(a, c)`` must also be ``True``.
171
+ """
172
+ partitions = []
173
+ for item in items:
174
+ for partition in partitions:
175
+ p_item = next(iter(partition))
176
+ if test(item, p_item):
177
+ partition.add(item)
178
+ break
179
+ else: # No break
180
+ partitions.append({item})
181
+ return partitions
182
+
183
+
184
+ def partition_to_color(partitions):
185
+ """
186
+ Creates a dictionary that maps each item in each partition to the index of
187
+ the partition to which it belongs.
188
+
189
+ Parameters
190
+ ----------
191
+ partitions: collections.abc.Sequence[collections.abc.Iterable]
192
+ As returned by :func:`make_partitions`.
193
+
194
+ Returns
195
+ -------
196
+ dict
197
+ """
198
+ colors = {}
199
+ for color, keys in enumerate(partitions):
200
+ for key in keys:
201
+ colors[key] = color
202
+ return colors
203
+
204
+
205
+ def intersect(collection_of_sets):
206
+ """
207
+ Given an collection of sets, returns the intersection of those sets.
208
+
209
+ Parameters
210
+ ----------
211
+ collection_of_sets: collections.abc.Collection[set]
212
+ A collection of sets.
213
+
214
+ Returns
215
+ -------
216
+ set
217
+ An intersection of all sets in `collection_of_sets`. Will have the same
218
+ type as the item initially taken from `collection_of_sets`.
219
+ """
220
+ collection_of_sets = list(collection_of_sets)
221
+ first = collection_of_sets.pop()
222
+ out = reduce(set.intersection, collection_of_sets, set(first))
223
+ return type(first)(out)
224
+
225
+
226
+ class ISMAGS:
227
+ """
228
+ Implements the ISMAGS subgraph matching algorithm. [1]_ ISMAGS stands for
229
+ "Index-based Subgraph Matching Algorithm with General Symmetries". As the
230
+ name implies, it is symmetry aware and will only generate non-symmetric
231
+ isomorphisms.
232
+
233
+ Notes
234
+ -----
235
+ The implementation imposes additional conditions compared to the VF2
236
+ algorithm on the graphs provided and the comparison functions
237
+ (:attr:`node_equality` and :attr:`edge_equality`):
238
+
239
+ - Node keys in both graphs must be orderable as well as hashable.
240
+ - Equality must be transitive: if A is equal to B, and B is equal to C,
241
+ then A must be equal to C.
242
+
243
+ Attributes
244
+ ----------
245
+ graph: networkx.Graph
246
+ subgraph: networkx.Graph
247
+ node_equality: collections.abc.Callable
248
+ The function called to see if two nodes should be considered equal.
249
+ It's signature looks like this:
250
+ ``f(graph1: networkx.Graph, node1, graph2: networkx.Graph, node2) -> bool``.
251
+ `node1` is a node in `graph1`, and `node2` a node in `graph2`.
252
+ Constructed from the argument `node_match`.
253
+ edge_equality: collections.abc.Callable
254
+ The function called to see if two edges should be considered equal.
255
+ It's signature looks like this:
256
+ ``f(graph1: networkx.Graph, edge1, graph2: networkx.Graph, edge2) -> bool``.
257
+ `edge1` is an edge in `graph1`, and `edge2` an edge in `graph2`.
258
+ Constructed from the argument `edge_match`.
259
+
260
+ References
261
+ ----------
262
+ .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
263
+ M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
264
+ Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
265
+ Enumeration", PLoS One 9(5): e97896, 2014.
266
+ https://doi.org/10.1371/journal.pone.0097896
267
+ """
268
+
269
+ def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None):
270
+ """
271
+ Parameters
272
+ ----------
273
+ graph: networkx.Graph
274
+ subgraph: networkx.Graph
275
+ node_match: collections.abc.Callable or None
276
+ Function used to determine whether two nodes are equivalent. Its
277
+ signature should look like ``f(n1: dict, n2: dict) -> bool``, with
278
+ `n1` and `n2` node property dicts. See also
279
+ :func:`~networkx.algorithms.isomorphism.categorical_node_match` and
280
+ friends.
281
+ If `None`, all nodes are considered equal.
282
+ edge_match: collections.abc.Callable or None
283
+ Function used to determine whether two edges are equivalent. Its
284
+ signature should look like ``f(e1: dict, e2: dict) -> bool``, with
285
+ `e1` and `e2` edge property dicts. See also
286
+ :func:`~networkx.algorithms.isomorphism.categorical_edge_match` and
287
+ friends.
288
+ If `None`, all edges are considered equal.
289
+ cache: collections.abc.Mapping
290
+ A cache used for caching graph symmetries.
291
+ """
292
+ # TODO: graph and subgraph setter methods that invalidate the caches.
293
+ # TODO: allow for precomputed partitions and colors
294
+ self.graph = graph
295
+ self.subgraph = subgraph
296
+ self._symmetry_cache = cache
297
+ # Naming conventions are taken from the original paper. For your
298
+ # sanity:
299
+ # sg: subgraph
300
+ # g: graph
301
+ # e: edge(s)
302
+ # n: node(s)
303
+ # So: sgn means "subgraph nodes".
304
+ self._sgn_partitions_ = None
305
+ self._sge_partitions_ = None
306
+
307
+ self._sgn_colors_ = None
308
+ self._sge_colors_ = None
309
+
310
+ self._gn_partitions_ = None
311
+ self._ge_partitions_ = None
312
+
313
+ self._gn_colors_ = None
314
+ self._ge_colors_ = None
315
+
316
+ self._node_compat_ = None
317
+ self._edge_compat_ = None
318
+
319
+ if node_match is None:
320
+ self.node_equality = self._node_match_maker(lambda n1, n2: True)
321
+ self._sgn_partitions_ = [set(self.subgraph.nodes)]
322
+ self._gn_partitions_ = [set(self.graph.nodes)]
323
+ self._node_compat_ = {0: 0}
324
+ else:
325
+ self.node_equality = self._node_match_maker(node_match)
326
+ if edge_match is None:
327
+ self.edge_equality = self._edge_match_maker(lambda e1, e2: True)
328
+ self._sge_partitions_ = [set(self.subgraph.edges)]
329
+ self._ge_partitions_ = [set(self.graph.edges)]
330
+ self._edge_compat_ = {0: 0}
331
+ else:
332
+ self.edge_equality = self._edge_match_maker(edge_match)
333
+
334
+ @property
335
+ def _sgn_partitions(self):
336
+ if self._sgn_partitions_ is None:
337
+
338
+ def nodematch(node1, node2):
339
+ return self.node_equality(self.subgraph, node1, self.subgraph, node2)
340
+
341
+ self._sgn_partitions_ = make_partitions(self.subgraph.nodes, nodematch)
342
+ return self._sgn_partitions_
343
+
344
+ @property
345
+ def _sge_partitions(self):
346
+ if self._sge_partitions_ is None:
347
+
348
+ def edgematch(edge1, edge2):
349
+ return self.edge_equality(self.subgraph, edge1, self.subgraph, edge2)
350
+
351
+ self._sge_partitions_ = make_partitions(self.subgraph.edges, edgematch)
352
+ return self._sge_partitions_
353
+
354
+ @property
355
+ def _gn_partitions(self):
356
+ if self._gn_partitions_ is None:
357
+
358
+ def nodematch(node1, node2):
359
+ return self.node_equality(self.graph, node1, self.graph, node2)
360
+
361
+ self._gn_partitions_ = make_partitions(self.graph.nodes, nodematch)
362
+ return self._gn_partitions_
363
+
364
+ @property
365
+ def _ge_partitions(self):
366
+ if self._ge_partitions_ is None:
367
+
368
+ def edgematch(edge1, edge2):
369
+ return self.edge_equality(self.graph, edge1, self.graph, edge2)
370
+
371
+ self._ge_partitions_ = make_partitions(self.graph.edges, edgematch)
372
+ return self._ge_partitions_
373
+
374
+ @property
375
+ def _sgn_colors(self):
376
+ if self._sgn_colors_ is None:
377
+ self._sgn_colors_ = partition_to_color(self._sgn_partitions)
378
+ return self._sgn_colors_
379
+
380
+ @property
381
+ def _sge_colors(self):
382
+ if self._sge_colors_ is None:
383
+ self._sge_colors_ = partition_to_color(self._sge_partitions)
384
+ return self._sge_colors_
385
+
386
+ @property
387
+ def _gn_colors(self):
388
+ if self._gn_colors_ is None:
389
+ self._gn_colors_ = partition_to_color(self._gn_partitions)
390
+ return self._gn_colors_
391
+
392
+ @property
393
+ def _ge_colors(self):
394
+ if self._ge_colors_ is None:
395
+ self._ge_colors_ = partition_to_color(self._ge_partitions)
396
+ return self._ge_colors_
397
+
398
+ @property
399
+ def _node_compatibility(self):
400
+ if self._node_compat_ is not None:
401
+ return self._node_compat_
402
+ self._node_compat_ = {}
403
+ for sgn_part_color, gn_part_color in itertools.product(
404
+ range(len(self._sgn_partitions)), range(len(self._gn_partitions))
405
+ ):
406
+ sgn = next(iter(self._sgn_partitions[sgn_part_color]))
407
+ gn = next(iter(self._gn_partitions[gn_part_color]))
408
+ if self.node_equality(self.subgraph, sgn, self.graph, gn):
409
+ self._node_compat_[sgn_part_color] = gn_part_color
410
+ return self._node_compat_
411
+
412
+ @property
413
+ def _edge_compatibility(self):
414
+ if self._edge_compat_ is not None:
415
+ return self._edge_compat_
416
+ self._edge_compat_ = {}
417
+ for sge_part_color, ge_part_color in itertools.product(
418
+ range(len(self._sge_partitions)), range(len(self._ge_partitions))
419
+ ):
420
+ sge = next(iter(self._sge_partitions[sge_part_color]))
421
+ ge = next(iter(self._ge_partitions[ge_part_color]))
422
+ if self.edge_equality(self.subgraph, sge, self.graph, ge):
423
+ self._edge_compat_[sge_part_color] = ge_part_color
424
+ return self._edge_compat_
425
+
426
+ @staticmethod
427
+ def _node_match_maker(cmp):
428
+ @wraps(cmp)
429
+ def comparer(graph1, node1, graph2, node2):
430
+ return cmp(graph1.nodes[node1], graph2.nodes[node2])
431
+
432
+ return comparer
433
+
434
+ @staticmethod
435
+ def _edge_match_maker(cmp):
436
+ @wraps(cmp)
437
+ def comparer(graph1, edge1, graph2, edge2):
438
+ return cmp(graph1.edges[edge1], graph2.edges[edge2])
439
+
440
+ return comparer
441
+
442
+ def find_isomorphisms(self, symmetry=True):
443
+ """Find all subgraph isomorphisms between subgraph and graph
444
+
445
+ Finds isomorphisms where :attr:`subgraph` <= :attr:`graph`.
446
+
447
+ Parameters
448
+ ----------
449
+ symmetry: bool
450
+ Whether symmetry should be taken into account. If False, found
451
+ isomorphisms may be symmetrically equivalent.
452
+
453
+ Yields
454
+ ------
455
+ dict
456
+ The found isomorphism mappings of {graph_node: subgraph_node}.
457
+ """
458
+ # The networkx VF2 algorithm is slightly funny in when it yields an
459
+ # empty dict and when not.
460
+ if not self.subgraph:
461
+ yield {}
462
+ return
463
+ elif not self.graph:
464
+ return
465
+ elif len(self.graph) < len(self.subgraph):
466
+ return
467
+
468
+ if symmetry:
469
+ _, cosets = self.analyze_symmetry(
470
+ self.subgraph, self._sgn_partitions, self._sge_colors
471
+ )
472
+ constraints = self._make_constraints(cosets)
473
+ else:
474
+ constraints = []
475
+
476
+ candidates = self._find_nodecolor_candidates()
477
+ la_candidates = self._get_lookahead_candidates()
478
+ for sgn in self.subgraph:
479
+ extra_candidates = la_candidates[sgn]
480
+ if extra_candidates:
481
+ candidates[sgn] = candidates[sgn] | {frozenset(extra_candidates)}
482
+
483
+ if any(candidates.values()):
484
+ start_sgn = min(candidates, key=lambda n: min(candidates[n], key=len))
485
+ candidates[start_sgn] = (intersect(candidates[start_sgn]),)
486
+ yield from self._map_nodes(start_sgn, candidates, constraints)
487
+ else:
488
+ return
489
+
490
+ @staticmethod
491
+ def _find_neighbor_color_count(graph, node, node_color, edge_color):
492
+ """
493
+ For `node` in `graph`, count the number of edges of a specific color
494
+ it has to nodes of a specific color.
495
+ """
496
+ counts = Counter()
497
+ neighbors = graph[node]
498
+ for neighbor in neighbors:
499
+ n_color = node_color[neighbor]
500
+ if (node, neighbor) in edge_color:
501
+ e_color = edge_color[node, neighbor]
502
+ else:
503
+ e_color = edge_color[neighbor, node]
504
+ counts[e_color, n_color] += 1
505
+ return counts
506
+
507
+ def _get_lookahead_candidates(self):
508
+ """
509
+ Returns a mapping of {subgraph node: collection of graph nodes} for
510
+ which the graph nodes are feasible candidates for the subgraph node, as
511
+ determined by looking ahead one edge.
512
+ """
513
+ g_counts = {}
514
+ for gn in self.graph:
515
+ g_counts[gn] = self._find_neighbor_color_count(
516
+ self.graph, gn, self._gn_colors, self._ge_colors
517
+ )
518
+ candidates = defaultdict(set)
519
+ for sgn in self.subgraph:
520
+ sg_count = self._find_neighbor_color_count(
521
+ self.subgraph, sgn, self._sgn_colors, self._sge_colors
522
+ )
523
+ new_sg_count = Counter()
524
+ for (sge_color, sgn_color), count in sg_count.items():
525
+ try:
526
+ ge_color = self._edge_compatibility[sge_color]
527
+ gn_color = self._node_compatibility[sgn_color]
528
+ except KeyError:
529
+ pass
530
+ else:
531
+ new_sg_count[ge_color, gn_color] = count
532
+
533
+ for gn, g_count in g_counts.items():
534
+ if all(new_sg_count[x] <= g_count[x] for x in new_sg_count):
535
+ # Valid candidate
536
+ candidates[sgn].add(gn)
537
+ return candidates
538
+
539
+ def largest_common_subgraph(self, symmetry=True):
540
+ """
541
+ Find the largest common induced subgraphs between :attr:`subgraph` and
542
+ :attr:`graph`.
543
+
544
+ Parameters
545
+ ----------
546
+ symmetry: bool
547
+ Whether symmetry should be taken into account. If False, found
548
+ largest common subgraphs may be symmetrically equivalent.
549
+
550
+ Yields
551
+ ------
552
+ dict
553
+ The found isomorphism mappings of {graph_node: subgraph_node}.
554
+ """
555
+ # The networkx VF2 algorithm is slightly funny in when it yields an
556
+ # empty dict and when not.
557
+ if not self.subgraph:
558
+ yield {}
559
+ return
560
+ elif not self.graph:
561
+ return
562
+
563
+ if symmetry:
564
+ _, cosets = self.analyze_symmetry(
565
+ self.subgraph, self._sgn_partitions, self._sge_colors
566
+ )
567
+ constraints = self._make_constraints(cosets)
568
+ else:
569
+ constraints = []
570
+
571
+ candidates = self._find_nodecolor_candidates()
572
+
573
+ if any(candidates.values()):
574
+ yield from self._largest_common_subgraph(candidates, constraints)
575
+ else:
576
+ return
577
+
578
+ def analyze_symmetry(self, graph, node_partitions, edge_colors):
579
+ """
580
+ Find a minimal set of permutations and corresponding co-sets that
581
+ describe the symmetry of `graph`, given the node and edge equalities
582
+ given by `node_partitions` and `edge_colors`, respectively.
583
+
584
+ Parameters
585
+ ----------
586
+ graph : networkx.Graph
587
+ The graph whose symmetry should be analyzed.
588
+ node_partitions : list of sets
589
+ A list of sets containing node keys. Node keys in the same set
590
+ are considered equivalent. Every node key in `graph` should be in
591
+ exactly one of the sets. If all nodes are equivalent, this should
592
+ be ``[set(graph.nodes)]``.
593
+ edge_colors : dict mapping edges to their colors
594
+ A dict mapping every edge in `graph` to its corresponding color.
595
+ Edges with the same color are considered equivalent. If all edges
596
+ are equivalent, this should be ``{e: 0 for e in graph.edges}``.
597
+
598
+
599
+ Returns
600
+ -------
601
+ set[frozenset]
602
+ The found permutations. This is a set of frozensets of pairs of node
603
+ keys which can be exchanged without changing :attr:`subgraph`.
604
+ dict[collections.abc.Hashable, set[collections.abc.Hashable]]
605
+ The found co-sets. The co-sets is a dictionary of
606
+ ``{node key: set of node keys}``.
607
+ Every key-value pair describes which ``values`` can be interchanged
608
+ without changing nodes less than ``key``.
609
+ """
610
+ if self._symmetry_cache is not None:
611
+ key = hash(
612
+ (
613
+ tuple(graph.nodes),
614
+ tuple(graph.edges),
615
+ tuple(map(tuple, node_partitions)),
616
+ tuple(edge_colors.items()),
617
+ )
618
+ )
619
+ if key in self._symmetry_cache:
620
+ return self._symmetry_cache[key]
621
+ node_partitions = list(
622
+ self._refine_node_partitions(graph, node_partitions, edge_colors)
623
+ )
624
+ assert len(node_partitions) == 1
625
+ node_partitions = node_partitions[0]
626
+ permutations, cosets = self._process_ordered_pair_partitions(
627
+ graph, node_partitions, node_partitions, edge_colors
628
+ )
629
+ if self._symmetry_cache is not None:
630
+ self._symmetry_cache[key] = permutations, cosets
631
+ return permutations, cosets
632
+
633
+ def is_isomorphic(self, symmetry=False):
634
+ """
635
+ Returns True if :attr:`graph` is isomorphic to :attr:`subgraph` and
636
+ False otherwise.
637
+
638
+ Returns
639
+ -------
640
+ bool
641
+ """
642
+ return len(self.subgraph) == len(self.graph) and self.subgraph_is_isomorphic(
643
+ symmetry
644
+ )
645
+
646
+ def subgraph_is_isomorphic(self, symmetry=False):
647
+ """
648
+ Returns True if a subgraph of :attr:`graph` is isomorphic to
649
+ :attr:`subgraph` and False otherwise.
650
+
651
+ Returns
652
+ -------
653
+ bool
654
+ """
655
+ # symmetry=False, since we only need to know whether there is any
656
+ # example; figuring out all symmetry elements probably costs more time
657
+ # than it gains.
658
+ isom = next(self.subgraph_isomorphisms_iter(symmetry=symmetry), None)
659
+ return isom is not None
660
+
661
+ def isomorphisms_iter(self, symmetry=True):
662
+ """
663
+ Does the same as :meth:`find_isomorphisms` if :attr:`graph` and
664
+ :attr:`subgraph` have the same number of nodes.
665
+ """
666
+ if len(self.graph) == len(self.subgraph):
667
+ yield from self.subgraph_isomorphisms_iter(symmetry=symmetry)
668
+
669
+ def subgraph_isomorphisms_iter(self, symmetry=True):
670
+ """Alternative name for :meth:`find_isomorphisms`."""
671
+ return self.find_isomorphisms(symmetry)
672
+
673
+ def _find_nodecolor_candidates(self):
674
+ """
675
+ Per node in subgraph find all nodes in graph that have the same color.
676
+ """
677
+ candidates = defaultdict(set)
678
+ for sgn in self.subgraph.nodes:
679
+ sgn_color = self._sgn_colors[sgn]
680
+ if sgn_color in self._node_compatibility:
681
+ gn_color = self._node_compatibility[sgn_color]
682
+ candidates[sgn].add(frozenset(self._gn_partitions[gn_color]))
683
+ else:
684
+ candidates[sgn].add(frozenset())
685
+ candidates = dict(candidates)
686
+ for sgn, options in candidates.items():
687
+ candidates[sgn] = frozenset(options)
688
+ return candidates
689
+
690
+ @staticmethod
691
+ def _make_constraints(cosets):
692
+ """
693
+ Turn cosets into constraints.
694
+ """
695
+ constraints = []
696
+ for node_i, node_ts in cosets.items():
697
+ for node_t in node_ts:
698
+ if node_i != node_t:
699
+ # Node i must be smaller than node t.
700
+ constraints.append((node_i, node_t))
701
+ return constraints
702
+
703
+ @staticmethod
704
+ def _find_node_edge_color(graph, node_colors, edge_colors):
705
+ """
706
+ For every node in graph, come up with a color that combines 1) the
707
+ color of the node, and 2) the number of edges of a color to each type
708
+ of node.
709
+ """
710
+ counts = defaultdict(lambda: defaultdict(int))
711
+ for node1, node2 in graph.edges:
712
+ if (node1, node2) in edge_colors:
713
+ # FIXME directed graphs
714
+ ecolor = edge_colors[node1, node2]
715
+ else:
716
+ ecolor = edge_colors[node2, node1]
717
+ # Count per node how many edges it has of what color to nodes of
718
+ # what color
719
+ counts[node1][ecolor, node_colors[node2]] += 1
720
+ counts[node2][ecolor, node_colors[node1]] += 1
721
+
722
+ node_edge_colors = {}
723
+ for node in graph.nodes:
724
+ node_edge_colors[node] = node_colors[node], set(counts[node].items())
725
+
726
+ return node_edge_colors
727
+
728
+ @staticmethod
729
+ def _get_permutations_by_length(items):
730
+ """
731
+ Get all permutations of items, but only permute items with the same
732
+ length.
733
+
734
+ >>> found = list(ISMAGS._get_permutations_by_length([[1], [2], [3, 4], [4, 5]]))
735
+ >>> answer = [
736
+ ... (([1], [2]), ([3, 4], [4, 5])),
737
+ ... (([1], [2]), ([4, 5], [3, 4])),
738
+ ... (([2], [1]), ([3, 4], [4, 5])),
739
+ ... (([2], [1]), ([4, 5], [3, 4])),
740
+ ... ]
741
+ >>> found == answer
742
+ True
743
+ """
744
+ by_len = defaultdict(list)
745
+ for item in items:
746
+ by_len[len(item)].append(item)
747
+
748
+ yield from itertools.product(
749
+ *(itertools.permutations(by_len[l]) for l in sorted(by_len))
750
+ )
751
+
752
+ @classmethod
753
+ def _refine_node_partitions(cls, graph, node_partitions, edge_colors, branch=False):
754
+ """
755
+ Given a partition of nodes in graph, make the partitions smaller such
756
+ that all nodes in a partition have 1) the same color, and 2) the same
757
+ number of edges to specific other partitions.
758
+ """
759
+
760
+ def equal_color(node1, node2):
761
+ return node_edge_colors[node1] == node_edge_colors[node2]
762
+
763
+ node_partitions = list(node_partitions)
764
+ node_colors = partition_to_color(node_partitions)
765
+ node_edge_colors = cls._find_node_edge_color(graph, node_colors, edge_colors)
766
+ if all(
767
+ are_all_equal(node_edge_colors[node] for node in partition)
768
+ for partition in node_partitions
769
+ ):
770
+ yield node_partitions
771
+ return
772
+
773
+ new_partitions = []
774
+ output = [new_partitions]
775
+ for partition in node_partitions:
776
+ if not are_all_equal(node_edge_colors[node] for node in partition):
777
+ refined = make_partitions(partition, equal_color)
778
+ if (
779
+ branch
780
+ and len(refined) != 1
781
+ and len({len(r) for r in refined}) != len([len(r) for r in refined])
782
+ ):
783
+ # This is where it breaks. There are multiple new cells
784
+ # in refined with the same length, and their order
785
+ # matters.
786
+ # So option 1) Hit it with a big hammer and simply make all
787
+ # orderings.
788
+ permutations = cls._get_permutations_by_length(refined)
789
+ new_output = []
790
+ for n_p in output:
791
+ for permutation in permutations:
792
+ new_output.append(n_p + list(permutation[0]))
793
+ output = new_output
794
+ else:
795
+ for n_p in output:
796
+ n_p.extend(sorted(refined, key=len))
797
+ else:
798
+ for n_p in output:
799
+ n_p.append(partition)
800
+ for n_p in output:
801
+ yield from cls._refine_node_partitions(graph, n_p, edge_colors, branch)
802
+
803
+ def _edges_of_same_color(self, sgn1, sgn2):
804
+ """
805
+ Returns all edges in :attr:`graph` that have the same colour as the
806
+ edge between sgn1 and sgn2 in :attr:`subgraph`.
807
+ """
808
+ if (sgn1, sgn2) in self._sge_colors:
809
+ # FIXME directed graphs
810
+ sge_color = self._sge_colors[sgn1, sgn2]
811
+ else:
812
+ sge_color = self._sge_colors[sgn2, sgn1]
813
+ if sge_color in self._edge_compatibility:
814
+ ge_color = self._edge_compatibility[sge_color]
815
+ g_edges = self._ge_partitions[ge_color]
816
+ else:
817
+ g_edges = []
818
+ return g_edges
819
+
820
+ def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=None):
821
+ """
822
+ Find all subgraph isomorphisms honoring constraints.
823
+ """
824
+ if mapping is None:
825
+ mapping = {}
826
+ else:
827
+ mapping = mapping.copy()
828
+ if to_be_mapped is None:
829
+ to_be_mapped = set(self.subgraph.nodes)
830
+
831
+ # Note, we modify candidates here. Doesn't seem to affect results, but
832
+ # remember this.
833
+ # candidates = candidates.copy()
834
+ sgn_candidates = intersect(candidates[sgn])
835
+ candidates[sgn] = frozenset([sgn_candidates])
836
+ for gn in sgn_candidates:
837
+ # We're going to try to map sgn to gn.
838
+ if gn in mapping.values() or sgn not in to_be_mapped:
839
+ # gn is already mapped to something
840
+ continue # pragma: no cover
841
+
842
+ # REDUCTION and COMBINATION
843
+ mapping[sgn] = gn
844
+ # BASECASE
845
+ if to_be_mapped == set(mapping.keys()):
846
+ yield {v: k for k, v in mapping.items()}
847
+ continue
848
+ left_to_map = to_be_mapped - set(mapping.keys())
849
+
850
+ new_candidates = candidates.copy()
851
+ sgn_nbrs = set(self.subgraph[sgn])
852
+ not_gn_nbrs = set(self.graph.nodes) - set(self.graph[gn])
853
+ for sgn2 in left_to_map:
854
+ if sgn2 not in sgn_nbrs:
855
+ gn2_options = not_gn_nbrs
856
+ else:
857
+ # Get all edges to gn of the right color:
858
+ g_edges = self._edges_of_same_color(sgn, sgn2)
859
+ # FIXME directed graphs
860
+ # And all nodes involved in those which are connected to gn
861
+ gn2_options = {n for e in g_edges for n in e if gn in e}
862
+ # Node color compatibility should be taken care of by the
863
+ # initial candidate lists made by find_subgraphs
864
+
865
+ # Add gn2_options to the right collection. Since new_candidates
866
+ # is a dict of frozensets of frozensets of node indices it's
867
+ # a bit clunky. We can't do .add, and + also doesn't work. We
868
+ # could do |, but I deem union to be clearer.
869
+ new_candidates[sgn2] = new_candidates[sgn2].union(
870
+ [frozenset(gn2_options)]
871
+ )
872
+
873
+ if (sgn, sgn2) in constraints:
874
+ gn2_options = {gn2 for gn2 in self.graph if gn2 > gn}
875
+ elif (sgn2, sgn) in constraints:
876
+ gn2_options = {gn2 for gn2 in self.graph if gn2 < gn}
877
+ else:
878
+ continue # pragma: no cover
879
+ new_candidates[sgn2] = new_candidates[sgn2].union(
880
+ [frozenset(gn2_options)]
881
+ )
882
+
883
+ # The next node is the one that is unmapped and has fewest
884
+ # candidates
885
+ next_sgn = min(left_to_map, key=lambda n: min(new_candidates[n], key=len))
886
+ yield from self._map_nodes(
887
+ next_sgn,
888
+ new_candidates,
889
+ constraints,
890
+ mapping=mapping,
891
+ to_be_mapped=to_be_mapped,
892
+ )
893
+ # Unmap sgn-gn. Strictly not necessary since it'd get overwritten
894
+ # when making a new mapping for sgn.
895
+ # del mapping[sgn]
896
+
897
+ def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None):
898
+ """
899
+ Find all largest common subgraphs honoring constraints.
900
+ """
901
+ if to_be_mapped is None:
902
+ to_be_mapped = {frozenset(self.subgraph.nodes)}
903
+
904
+ # The LCS problem is basically a repeated subgraph isomorphism problem
905
+ # with smaller and smaller subgraphs. We store the nodes that are
906
+ # "part of" the subgraph in to_be_mapped, and we make it a little
907
+ # smaller every iteration.
908
+
909
+ current_size = len(next(iter(to_be_mapped), []))
910
+
911
+ found_iso = False
912
+ if current_size <= len(self.graph):
913
+ # There's no point in trying to find isomorphisms of
914
+ # graph >= subgraph if subgraph has more nodes than graph.
915
+
916
+ # Try the isomorphism first with the nodes with lowest ID. So sort
917
+ # them. Those are more likely to be part of the final
918
+ # correspondence. This makes finding the first answer(s) faster. In
919
+ # theory.
920
+ for nodes in sorted(to_be_mapped, key=sorted):
921
+ # Find the isomorphism between subgraph[to_be_mapped] <= graph
922
+ next_sgn = min(nodes, key=lambda n: min(candidates[n], key=len))
923
+ isomorphs = self._map_nodes(
924
+ next_sgn, candidates, constraints, to_be_mapped=nodes
925
+ )
926
+
927
+ # This is effectively `yield from isomorphs`, except that we look
928
+ # whether an item was yielded.
929
+ try:
930
+ item = next(isomorphs)
931
+ except StopIteration:
932
+ pass
933
+ else:
934
+ yield item
935
+ yield from isomorphs
936
+ found_iso = True
937
+
938
+ # BASECASE
939
+ if found_iso or current_size == 1:
940
+ # Shrinking has no point because either 1) we end up with a smaller
941
+ # common subgraph (and we want the largest), or 2) there'll be no
942
+ # more subgraph.
943
+ return
944
+
945
+ left_to_be_mapped = set()
946
+ for nodes in to_be_mapped:
947
+ for sgn in nodes:
948
+ # We're going to remove sgn from to_be_mapped, but subject to
949
+ # symmetry constraints. We know that for every constraint we
950
+ # have those subgraph nodes are equal. So whenever we would
951
+ # remove the lower part of a constraint, remove the higher
952
+ # instead. This is all dealth with by _remove_node. And because
953
+ # left_to_be_mapped is a set, we don't do double work.
954
+
955
+ # And finally, make the subgraph one node smaller.
956
+ # REDUCTION
957
+ new_nodes = self._remove_node(sgn, nodes, constraints)
958
+ left_to_be_mapped.add(new_nodes)
959
+ # COMBINATION
960
+ yield from self._largest_common_subgraph(
961
+ candidates, constraints, to_be_mapped=left_to_be_mapped
962
+ )
963
+
964
+ @staticmethod
965
+ def _remove_node(node, nodes, constraints):
966
+ """
967
+ Returns a new set where node has been removed from nodes, subject to
968
+ symmetry constraints. We know, that for every constraint we have
969
+ those subgraph nodes are equal. So whenever we would remove the
970
+ lower part of a constraint, remove the higher instead.
971
+ """
972
+ while True:
973
+ for low, high in constraints:
974
+ if low == node and high in nodes:
975
+ node = high
976
+ break
977
+ else: # no break, couldn't find node in constraints
978
+ break
979
+ return frozenset(nodes - {node})
980
+
981
+ @staticmethod
982
+ def _find_permutations(top_partitions, bottom_partitions):
983
+ """
984
+ Return the pairs of top/bottom partitions where the partitions are
985
+ different. Ensures that all partitions in both top and bottom
986
+ partitions have size 1.
987
+ """
988
+ # Find permutations
989
+ permutations = set()
990
+ for top, bot in zip(top_partitions, bottom_partitions):
991
+ # top and bot have only one element
992
+ if len(top) != 1 or len(bot) != 1:
993
+ raise IndexError(
994
+ "Not all nodes are coupled. This is"
995
+ f" impossible: {top_partitions}, {bottom_partitions}"
996
+ )
997
+ if top != bot:
998
+ permutations.add(frozenset((next(iter(top)), next(iter(bot)))))
999
+ return permutations
1000
+
1001
+ @staticmethod
1002
+ def _update_orbits(orbits, permutations):
1003
+ """
1004
+ Update orbits based on permutations. Orbits is modified in place.
1005
+ For every pair of items in permutations their respective orbits are
1006
+ merged.
1007
+ """
1008
+ for permutation in permutations:
1009
+ node, node2 = permutation
1010
+ # Find the orbits that contain node and node2, and replace the
1011
+ # orbit containing node with the union
1012
+ first = second = None
1013
+ for idx, orbit in enumerate(orbits):
1014
+ if first is not None and second is not None:
1015
+ break
1016
+ if node in orbit:
1017
+ first = idx
1018
+ if node2 in orbit:
1019
+ second = idx
1020
+ if first != second:
1021
+ orbits[first].update(orbits[second])
1022
+ del orbits[second]
1023
+
1024
+ def _couple_nodes(
1025
+ self,
1026
+ top_partitions,
1027
+ bottom_partitions,
1028
+ pair_idx,
1029
+ t_node,
1030
+ b_node,
1031
+ graph,
1032
+ edge_colors,
1033
+ ):
1034
+ """
1035
+ Generate new partitions from top and bottom_partitions where t_node is
1036
+ coupled to b_node. pair_idx is the index of the partitions where t_ and
1037
+ b_node can be found.
1038
+ """
1039
+ t_partition = top_partitions[pair_idx]
1040
+ b_partition = bottom_partitions[pair_idx]
1041
+ assert t_node in t_partition and b_node in b_partition
1042
+ # Couple node to node2. This means they get their own partition
1043
+ new_top_partitions = [top.copy() for top in top_partitions]
1044
+ new_bottom_partitions = [bot.copy() for bot in bottom_partitions]
1045
+ new_t_groups = {t_node}, t_partition - {t_node}
1046
+ new_b_groups = {b_node}, b_partition - {b_node}
1047
+ # Replace the old partitions with the coupled ones
1048
+ del new_top_partitions[pair_idx]
1049
+ del new_bottom_partitions[pair_idx]
1050
+ new_top_partitions[pair_idx:pair_idx] = new_t_groups
1051
+ new_bottom_partitions[pair_idx:pair_idx] = new_b_groups
1052
+
1053
+ new_top_partitions = self._refine_node_partitions(
1054
+ graph, new_top_partitions, edge_colors
1055
+ )
1056
+ new_bottom_partitions = self._refine_node_partitions(
1057
+ graph, new_bottom_partitions, edge_colors, branch=True
1058
+ )
1059
+ new_top_partitions = list(new_top_partitions)
1060
+ assert len(new_top_partitions) == 1
1061
+ new_top_partitions = new_top_partitions[0]
1062
+ for bot in new_bottom_partitions:
1063
+ yield list(new_top_partitions), bot
1064
+
1065
+ def _process_ordered_pair_partitions(
1066
+ self,
1067
+ graph,
1068
+ top_partitions,
1069
+ bottom_partitions,
1070
+ edge_colors,
1071
+ orbits=None,
1072
+ cosets=None,
1073
+ ):
1074
+ """
1075
+ Processes ordered pair partitions as per the reference paper. Finds and
1076
+ returns all permutations and cosets that leave the graph unchanged.
1077
+ """
1078
+ if orbits is None:
1079
+ orbits = [{node} for node in graph.nodes]
1080
+ else:
1081
+ # Note that we don't copy orbits when we are given one. This means
1082
+ # we leak information between the recursive branches. This is
1083
+ # intentional!
1084
+ orbits = orbits
1085
+ if cosets is None:
1086
+ cosets = {}
1087
+ else:
1088
+ cosets = cosets.copy()
1089
+
1090
+ assert all(
1091
+ len(t_p) == len(b_p) for t_p, b_p in zip(top_partitions, bottom_partitions)
1092
+ )
1093
+
1094
+ # BASECASE
1095
+ if all(len(top) == 1 for top in top_partitions):
1096
+ # All nodes are mapped
1097
+ permutations = self._find_permutations(top_partitions, bottom_partitions)
1098
+ self._update_orbits(orbits, permutations)
1099
+ if permutations:
1100
+ return [permutations], cosets
1101
+ else:
1102
+ return [], cosets
1103
+
1104
+ permutations = []
1105
+ unmapped_nodes = {
1106
+ (node, idx)
1107
+ for idx, t_partition in enumerate(top_partitions)
1108
+ for node in t_partition
1109
+ if len(t_partition) > 1
1110
+ }
1111
+ node, pair_idx = min(unmapped_nodes)
1112
+ b_partition = bottom_partitions[pair_idx]
1113
+
1114
+ for node2 in sorted(b_partition):
1115
+ if len(b_partition) == 1:
1116
+ # Can never result in symmetry
1117
+ continue
1118
+ if node != node2 and any(
1119
+ node in orbit and node2 in orbit for orbit in orbits
1120
+ ):
1121
+ # Orbit prune branch
1122
+ continue
1123
+ # REDUCTION
1124
+ # Couple node to node2
1125
+ partitions = self._couple_nodes(
1126
+ top_partitions,
1127
+ bottom_partitions,
1128
+ pair_idx,
1129
+ node,
1130
+ node2,
1131
+ graph,
1132
+ edge_colors,
1133
+ )
1134
+ for opp in partitions:
1135
+ new_top_partitions, new_bottom_partitions = opp
1136
+
1137
+ new_perms, new_cosets = self._process_ordered_pair_partitions(
1138
+ graph,
1139
+ new_top_partitions,
1140
+ new_bottom_partitions,
1141
+ edge_colors,
1142
+ orbits,
1143
+ cosets,
1144
+ )
1145
+ # COMBINATION
1146
+ permutations += new_perms
1147
+ cosets.update(new_cosets)
1148
+
1149
+ mapped = {
1150
+ k
1151
+ for top, bottom in zip(top_partitions, bottom_partitions)
1152
+ for k in top
1153
+ if len(top) == 1 and top == bottom
1154
+ }
1155
+ ks = {k for k in graph.nodes if k < node}
1156
+ # Have all nodes with ID < node been mapped?
1157
+ find_coset = ks <= mapped and node not in cosets
1158
+ if find_coset:
1159
+ # Find the orbit that contains node
1160
+ for orbit in orbits:
1161
+ if node in orbit:
1162
+ cosets[node] = orbit.copy()
1163
+ return permutations, cosets
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorph.py ADDED
@@ -0,0 +1,249 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Graph isomorphism functions.
3
+ """
4
+
5
+ import networkx as nx
6
+ from networkx.exception import NetworkXError
7
+
8
+ __all__ = [
9
+ "could_be_isomorphic",
10
+ "fast_could_be_isomorphic",
11
+ "faster_could_be_isomorphic",
12
+ "is_isomorphic",
13
+ ]
14
+
15
+
16
+ @nx._dispatchable(graphs={"G1": 0, "G2": 1})
17
+ def could_be_isomorphic(G1, G2):
18
+ """Returns False if graphs are definitely not isomorphic.
19
+ True does NOT guarantee isomorphism.
20
+
21
+ Parameters
22
+ ----------
23
+ G1, G2 : graphs
24
+ The two graphs G1 and G2 must be the same type.
25
+
26
+ Notes
27
+ -----
28
+ Checks for matching degree, triangle, and number of cliques sequences.
29
+ The triangle sequence contains the number of triangles each node is part of.
30
+ The clique sequence contains for each node the number of maximal cliques
31
+ involving that node.
32
+
33
+ """
34
+
35
+ # Check global properties
36
+ if G1.order() != G2.order():
37
+ return False
38
+
39
+ # Check local properties
40
+ d1 = G1.degree()
41
+ t1 = nx.triangles(G1)
42
+ clqs_1 = list(nx.find_cliques(G1))
43
+ c1 = {n: sum(1 for c in clqs_1 if n in c) for n in G1} # number of cliques
44
+ props1 = [[d, t1[v], c1[v]] for v, d in d1]
45
+ props1.sort()
46
+
47
+ d2 = G2.degree()
48
+ t2 = nx.triangles(G2)
49
+ clqs_2 = list(nx.find_cliques(G2))
50
+ c2 = {n: sum(1 for c in clqs_2 if n in c) for n in G2} # number of cliques
51
+ props2 = [[d, t2[v], c2[v]] for v, d in d2]
52
+ props2.sort()
53
+
54
+ if props1 != props2:
55
+ return False
56
+
57
+ # OK...
58
+ return True
59
+
60
+
61
+ graph_could_be_isomorphic = could_be_isomorphic
62
+
63
+
64
+ @nx._dispatchable(graphs={"G1": 0, "G2": 1})
65
+ def fast_could_be_isomorphic(G1, G2):
66
+ """Returns False if graphs are definitely not isomorphic.
67
+
68
+ True does NOT guarantee isomorphism.
69
+
70
+ Parameters
71
+ ----------
72
+ G1, G2 : graphs
73
+ The two graphs G1 and G2 must be the same type.
74
+
75
+ Notes
76
+ -----
77
+ Checks for matching degree and triangle sequences. The triangle
78
+ sequence contains the number of triangles each node is part of.
79
+ """
80
+ # Check global properties
81
+ if G1.order() != G2.order():
82
+ return False
83
+
84
+ # Check local properties
85
+ d1 = G1.degree()
86
+ t1 = nx.triangles(G1)
87
+ props1 = [[d, t1[v]] for v, d in d1]
88
+ props1.sort()
89
+
90
+ d2 = G2.degree()
91
+ t2 = nx.triangles(G2)
92
+ props2 = [[d, t2[v]] for v, d in d2]
93
+ props2.sort()
94
+
95
+ if props1 != props2:
96
+ return False
97
+
98
+ # OK...
99
+ return True
100
+
101
+
102
+ fast_graph_could_be_isomorphic = fast_could_be_isomorphic
103
+
104
+
105
+ @nx._dispatchable(graphs={"G1": 0, "G2": 1})
106
+ def faster_could_be_isomorphic(G1, G2):
107
+ """Returns False if graphs are definitely not isomorphic.
108
+
109
+ True does NOT guarantee isomorphism.
110
+
111
+ Parameters
112
+ ----------
113
+ G1, G2 : graphs
114
+ The two graphs G1 and G2 must be the same type.
115
+
116
+ Notes
117
+ -----
118
+ Checks for matching degree sequences.
119
+ """
120
+ # Check global properties
121
+ if G1.order() != G2.order():
122
+ return False
123
+
124
+ # Check local properties
125
+ d1 = sorted(d for n, d in G1.degree())
126
+ d2 = sorted(d for n, d in G2.degree())
127
+
128
+ if d1 != d2:
129
+ return False
130
+
131
+ # OK...
132
+ return True
133
+
134
+
135
+ faster_graph_could_be_isomorphic = faster_could_be_isomorphic
136
+
137
+
138
+ @nx._dispatchable(
139
+ graphs={"G1": 0, "G2": 1},
140
+ preserve_edge_attrs="edge_match",
141
+ preserve_node_attrs="node_match",
142
+ )
143
+ def is_isomorphic(G1, G2, node_match=None, edge_match=None):
144
+ """Returns True if the graphs G1 and G2 are isomorphic and False otherwise.
145
+
146
+ Parameters
147
+ ----------
148
+ G1, G2: graphs
149
+ The two graphs G1 and G2 must be the same type.
150
+
151
+ node_match : callable
152
+ A function that returns True if node n1 in G1 and n2 in G2 should
153
+ be considered equal during the isomorphism test.
154
+ If node_match is not specified then node attributes are not considered.
155
+
156
+ The function will be called like
157
+
158
+ node_match(G1.nodes[n1], G2.nodes[n2]).
159
+
160
+ That is, the function will receive the node attribute dictionaries
161
+ for n1 and n2 as inputs.
162
+
163
+ edge_match : callable
164
+ A function that returns True if the edge attribute dictionary
165
+ for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
166
+ be considered equal during the isomorphism test. If edge_match is
167
+ not specified then edge attributes are not considered.
168
+
169
+ The function will be called like
170
+
171
+ edge_match(G1[u1][v1], G2[u2][v2]).
172
+
173
+ That is, the function will receive the edge attribute dictionaries
174
+ of the edges under consideration.
175
+
176
+ Notes
177
+ -----
178
+ Uses the vf2 algorithm [1]_.
179
+
180
+ Examples
181
+ --------
182
+ >>> import networkx.algorithms.isomorphism as iso
183
+
184
+ For digraphs G1 and G2, using 'weight' edge attribute (default: 1)
185
+
186
+ >>> G1 = nx.DiGraph()
187
+ >>> G2 = nx.DiGraph()
188
+ >>> nx.add_path(G1, [1, 2, 3, 4], weight=1)
189
+ >>> nx.add_path(G2, [10, 20, 30, 40], weight=2)
190
+ >>> em = iso.numerical_edge_match("weight", 1)
191
+ >>> nx.is_isomorphic(G1, G2) # no weights considered
192
+ True
193
+ >>> nx.is_isomorphic(G1, G2, edge_match=em) # match weights
194
+ False
195
+
196
+ For multidigraphs G1 and G2, using 'fill' node attribute (default: '')
197
+
198
+ >>> G1 = nx.MultiDiGraph()
199
+ >>> G2 = nx.MultiDiGraph()
200
+ >>> G1.add_nodes_from([1, 2, 3], fill="red")
201
+ >>> G2.add_nodes_from([10, 20, 30, 40], fill="red")
202
+ >>> nx.add_path(G1, [1, 2, 3, 4], weight=3, linewidth=2.5)
203
+ >>> nx.add_path(G2, [10, 20, 30, 40], weight=3)
204
+ >>> nm = iso.categorical_node_match("fill", "red")
205
+ >>> nx.is_isomorphic(G1, G2, node_match=nm)
206
+ True
207
+
208
+ For multidigraphs G1 and G2, using 'weight' edge attribute (default: 7)
209
+
210
+ >>> G1.add_edge(1, 2, weight=7)
211
+ 1
212
+ >>> G2.add_edge(10, 20)
213
+ 1
214
+ >>> em = iso.numerical_multiedge_match("weight", 7, rtol=1e-6)
215
+ >>> nx.is_isomorphic(G1, G2, edge_match=em)
216
+ True
217
+
218
+ For multigraphs G1 and G2, using 'weight' and 'linewidth' edge attributes
219
+ with default values 7 and 2.5. Also using 'fill' node attribute with
220
+ default value 'red'.
221
+
222
+ >>> em = iso.numerical_multiedge_match(["weight", "linewidth"], [7, 2.5])
223
+ >>> nm = iso.categorical_node_match("fill", "red")
224
+ >>> nx.is_isomorphic(G1, G2, edge_match=em, node_match=nm)
225
+ True
226
+
227
+ See Also
228
+ --------
229
+ numerical_node_match, numerical_edge_match, numerical_multiedge_match
230
+ categorical_node_match, categorical_edge_match, categorical_multiedge_match
231
+
232
+ References
233
+ ----------
234
+ .. [1] L. P. Cordella, P. Foggia, C. Sansone, M. Vento,
235
+ "An Improved Algorithm for Matching Large Graphs",
236
+ 3rd IAPR-TC15 Workshop on Graph-based Representations in
237
+ Pattern Recognition, Cuen, pp. 149-159, 2001.
238
+ https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
239
+ """
240
+ if G1.is_directed() and G2.is_directed():
241
+ GM = nx.algorithms.isomorphism.DiGraphMatcher
242
+ elif (not G1.is_directed()) and (not G2.is_directed()):
243
+ GM = nx.algorithms.isomorphism.GraphMatcher
244
+ else:
245
+ raise NetworkXError("Graphs G1 and G2 are not of the same type.")
246
+
247
+ gm = GM(G1, G2, node_match=node_match, edge_match=edge_match)
248
+
249
+ return gm.is_isomorphic()
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py ADDED
@@ -0,0 +1,1238 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ *************
3
+ VF2 Algorithm
4
+ *************
5
+
6
+ An implementation of VF2 algorithm for graph isomorphism testing.
7
+
8
+ The simplest interface to use this module is to call the
9
+ :func:`is_isomorphic <networkx.algorithms.isomorphism.is_isomorphic>`
10
+ function.
11
+
12
+ Introduction
13
+ ------------
14
+
15
+ The GraphMatcher and DiGraphMatcher are responsible for matching
16
+ graphs or directed graphs in a predetermined manner. This
17
+ usually means a check for an isomorphism, though other checks
18
+ are also possible. For example, a subgraph of one graph
19
+ can be checked for isomorphism to a second graph.
20
+
21
+ Matching is done via syntactic feasibility. It is also possible
22
+ to check for semantic feasibility. Feasibility, then, is defined
23
+ as the logical AND of the two functions.
24
+
25
+ To include a semantic check, the (Di)GraphMatcher class should be
26
+ subclassed, and the
27
+ :meth:`semantic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.semantic_feasibility>`
28
+ function should be redefined. By default, the semantic feasibility function always
29
+ returns ``True``. The effect of this is that semantics are not
30
+ considered in the matching of G1 and G2.
31
+
32
+ Examples
33
+ --------
34
+
35
+ Suppose G1 and G2 are isomorphic graphs. Verification is as follows:
36
+
37
+ >>> from networkx.algorithms import isomorphism
38
+ >>> G1 = nx.path_graph(4)
39
+ >>> G2 = nx.path_graph(4)
40
+ >>> GM = isomorphism.GraphMatcher(G1, G2)
41
+ >>> GM.is_isomorphic()
42
+ True
43
+
44
+ GM.mapping stores the isomorphism mapping from G1 to G2.
45
+
46
+ >>> GM.mapping
47
+ {0: 0, 1: 1, 2: 2, 3: 3}
48
+
49
+
50
+ Suppose G1 and G2 are isomorphic directed graphs.
51
+ Verification is as follows:
52
+
53
+ >>> G1 = nx.path_graph(4, create_using=nx.DiGraph)
54
+ >>> G2 = nx.path_graph(4, create_using=nx.DiGraph)
55
+ >>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
56
+ >>> DiGM.is_isomorphic()
57
+ True
58
+
59
+ DiGM.mapping stores the isomorphism mapping from G1 to G2.
60
+
61
+ >>> DiGM.mapping
62
+ {0: 0, 1: 1, 2: 2, 3: 3}
63
+
64
+
65
+
66
+ Subgraph Isomorphism
67
+ --------------------
68
+ Graph theory literature can be ambiguous about the meaning of the
69
+ above statement, and we seek to clarify it now.
70
+
71
+ In the VF2 literature, a mapping ``M`` is said to be a graph-subgraph
72
+ isomorphism iff ``M`` is an isomorphism between ``G2`` and a subgraph of ``G1``.
73
+ Thus, to say that ``G1`` and ``G2`` are graph-subgraph isomorphic is to say
74
+ that a subgraph of ``G1`` is isomorphic to ``G2``.
75
+
76
+ Other literature uses the phrase 'subgraph isomorphic' as in '``G1`` does
77
+ not have a subgraph isomorphic to ``G2``'. Another use is as an in adverb
78
+ for isomorphic. Thus, to say that ``G1`` and ``G2`` are subgraph isomorphic
79
+ is to say that a subgraph of ``G1`` is isomorphic to ``G2``.
80
+
81
+ Finally, the term 'subgraph' can have multiple meanings. In this
82
+ context, 'subgraph' always means a 'node-induced subgraph'. Edge-induced
83
+ subgraph isomorphisms are not directly supported, but one should be
84
+ able to perform the check by making use of
85
+ :func:`line_graph <networkx.generators.line.line_graph>`. For
86
+ subgraphs which are not induced, the term 'monomorphism' is preferred
87
+ over 'isomorphism'.
88
+
89
+ Let ``G = (N, E)`` be a graph with a set of nodes ``N`` and set of edges ``E``.
90
+
91
+ If ``G' = (N', E')`` is a subgraph, then:
92
+ ``N'`` is a subset of ``N`` and
93
+ ``E'`` is a subset of ``E``.
94
+
95
+ If ``G' = (N', E')`` is a node-induced subgraph, then:
96
+ ``N'`` is a subset of ``N`` and
97
+ ``E'`` is the subset of edges in ``E`` relating nodes in ``N'``.
98
+
99
+ If ``G' = (N', E')`` is an edge-induced subgraph, then:
100
+ ``N'`` is the subset of nodes in ``N`` related by edges in ``E'`` and
101
+ ``E'`` is a subset of ``E``.
102
+
103
+ If ``G' = (N', E')`` is a monomorphism, then:
104
+ ``N'`` is a subset of ``N`` and
105
+ ``E'`` is a subset of the set of edges in ``E`` relating nodes in ``N'``.
106
+
107
+ Note that if ``G'`` is a node-induced subgraph of ``G``, then it is always a
108
+ subgraph monomorphism of ``G``, but the opposite is not always true, as a
109
+ monomorphism can have fewer edges.
110
+
111
+ References
112
+ ----------
113
+ [1] Luigi P. Cordella, Pasquale Foggia, Carlo Sansone, Mario Vento,
114
+ "A (Sub)Graph Isomorphism Algorithm for Matching Large Graphs",
115
+ IEEE Transactions on Pattern Analysis and Machine Intelligence,
116
+ vol. 26, no. 10, pp. 1367-1372, Oct., 2004.
117
+ http://ieeexplore.ieee.org/iel5/34/29305/01323804.pdf
118
+
119
+ [2] L. P. Cordella, P. Foggia, C. Sansone, M. Vento, "An Improved
120
+ Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop
121
+ on Graph-based Representations in Pattern Recognition, Cuen,
122
+ pp. 149-159, 2001.
123
+ https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
124
+
125
+ See Also
126
+ --------
127
+ :meth:`semantic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.semantic_feasibility>`
128
+ :meth:`syntactic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.syntactic_feasibility>`
129
+
130
+ Notes
131
+ -----
132
+
133
+ The implementation handles both directed and undirected graphs as well
134
+ as multigraphs.
135
+
136
+ In general, the subgraph isomorphism problem is NP-complete whereas the
137
+ graph isomorphism problem is most likely not NP-complete (although no
138
+ polynomial-time algorithm is known to exist).
139
+
140
+ """
141
+
142
+ # This work was originally coded by Christopher Ellison
143
+ # as part of the Computational Mechanics Python (CMPy) project.
144
+ # James P. Crutchfield, principal investigator.
145
+ # Complexity Sciences Center and Physics Department, UC Davis.
146
+
147
+ import sys
148
+
149
+ __all__ = ["GraphMatcher", "DiGraphMatcher"]
150
+
151
+
152
+ class GraphMatcher:
153
+ """Implementation of VF2 algorithm for matching undirected graphs.
154
+
155
+ Suitable for Graph and MultiGraph instances.
156
+ """
157
+
158
+ def __init__(self, G1, G2):
159
+ """Initialize GraphMatcher.
160
+
161
+ Parameters
162
+ ----------
163
+ G1,G2: NetworkX Graph or MultiGraph instances.
164
+ The two graphs to check for isomorphism or monomorphism.
165
+
166
+ Examples
167
+ --------
168
+ To create a GraphMatcher which checks for syntactic feasibility:
169
+
170
+ >>> from networkx.algorithms import isomorphism
171
+ >>> G1 = nx.path_graph(4)
172
+ >>> G2 = nx.path_graph(4)
173
+ >>> GM = isomorphism.GraphMatcher(G1, G2)
174
+ """
175
+ self.G1 = G1
176
+ self.G2 = G2
177
+ self.G1_nodes = set(G1.nodes())
178
+ self.G2_nodes = set(G2.nodes())
179
+ self.G2_node_order = {n: i for i, n in enumerate(G2)}
180
+
181
+ # Set recursion limit.
182
+ self.old_recursion_limit = sys.getrecursionlimit()
183
+ expected_max_recursion_level = len(self.G2)
184
+ if self.old_recursion_limit < 1.5 * expected_max_recursion_level:
185
+ # Give some breathing room.
186
+ sys.setrecursionlimit(int(1.5 * expected_max_recursion_level))
187
+
188
+ # Declare that we will be searching for a graph-graph isomorphism.
189
+ self.test = "graph"
190
+
191
+ # Initialize state
192
+ self.initialize()
193
+
194
+ def reset_recursion_limit(self):
195
+ """Restores the recursion limit."""
196
+ # TODO:
197
+ # Currently, we use recursion and set the recursion level higher.
198
+ # It would be nice to restore the level, but because the
199
+ # (Di)GraphMatcher classes make use of cyclic references, garbage
200
+ # collection will never happen when we define __del__() to
201
+ # restore the recursion level. The result is a memory leak.
202
+ # So for now, we do not automatically restore the recursion level,
203
+ # and instead provide a method to do this manually. Eventually,
204
+ # we should turn this into a non-recursive implementation.
205
+ sys.setrecursionlimit(self.old_recursion_limit)
206
+
207
+ def candidate_pairs_iter(self):
208
+ """Iterator over candidate pairs of nodes in G1 and G2."""
209
+
210
+ # All computations are done using the current state!
211
+
212
+ G1_nodes = self.G1_nodes
213
+ G2_nodes = self.G2_nodes
214
+ min_key = self.G2_node_order.__getitem__
215
+
216
+ # First we compute the inout-terminal sets.
217
+ T1_inout = [node for node in self.inout_1 if node not in self.core_1]
218
+ T2_inout = [node for node in self.inout_2 if node not in self.core_2]
219
+
220
+ # If T1_inout and T2_inout are both nonempty.
221
+ # P(s) = T1_inout x {min T2_inout}
222
+ if T1_inout and T2_inout:
223
+ node_2 = min(T2_inout, key=min_key)
224
+ for node_1 in T1_inout:
225
+ yield node_1, node_2
226
+
227
+ else:
228
+ # If T1_inout and T2_inout were both empty....
229
+ # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
230
+ # if not (T1_inout or T2_inout): # as suggested by [2], incorrect
231
+ if 1: # as inferred from [1], correct
232
+ # First we determine the candidate node for G2
233
+ other_node = min(G2_nodes - set(self.core_2), key=min_key)
234
+ for node in self.G1:
235
+ if node not in self.core_1:
236
+ yield node, other_node
237
+
238
+ # For all other cases, we don't have any candidate pairs.
239
+
240
+ def initialize(self):
241
+ """Reinitializes the state of the algorithm.
242
+
243
+ This method should be redefined if using something other than GMState.
244
+ If only subclassing GraphMatcher, a redefinition is not necessary.
245
+
246
+ """
247
+
248
+ # core_1[n] contains the index of the node paired with n, which is m,
249
+ # provided n is in the mapping.
250
+ # core_2[m] contains the index of the node paired with m, which is n,
251
+ # provided m is in the mapping.
252
+ self.core_1 = {}
253
+ self.core_2 = {}
254
+
255
+ # See the paper for definitions of M_x and T_x^{y}
256
+
257
+ # inout_1[n] is non-zero if n is in M_1 or in T_1^{inout}
258
+ # inout_2[m] is non-zero if m is in M_2 or in T_2^{inout}
259
+ #
260
+ # The value stored is the depth of the SSR tree when the node became
261
+ # part of the corresponding set.
262
+ self.inout_1 = {}
263
+ self.inout_2 = {}
264
+ # Practically, these sets simply store the nodes in the subgraph.
265
+
266
+ self.state = GMState(self)
267
+
268
+ # Provide a convenient way to access the isomorphism mapping.
269
+ self.mapping = self.core_1.copy()
270
+
271
+ def is_isomorphic(self):
272
+ """Returns True if G1 and G2 are isomorphic graphs."""
273
+
274
+ # Let's do two very quick checks!
275
+ # QUESTION: Should we call faster_graph_could_be_isomorphic(G1,G2)?
276
+ # For now, I just copy the code.
277
+
278
+ # Check global properties
279
+ if self.G1.order() != self.G2.order():
280
+ return False
281
+
282
+ # Check local properties
283
+ d1 = sorted(d for n, d in self.G1.degree())
284
+ d2 = sorted(d for n, d in self.G2.degree())
285
+ if d1 != d2:
286
+ return False
287
+
288
+ try:
289
+ x = next(self.isomorphisms_iter())
290
+ return True
291
+ except StopIteration:
292
+ return False
293
+
294
+ def isomorphisms_iter(self):
295
+ """Generator over isomorphisms between G1 and G2."""
296
+ # Declare that we are looking for a graph-graph isomorphism.
297
+ self.test = "graph"
298
+ self.initialize()
299
+ yield from self.match()
300
+
301
+ def match(self):
302
+ """Extends the isomorphism mapping.
303
+
304
+ This function is called recursively to determine if a complete
305
+ isomorphism can be found between G1 and G2. It cleans up the class
306
+ variables after each recursive call. If an isomorphism is found,
307
+ we yield the mapping.
308
+
309
+ """
310
+ if len(self.core_1) == len(self.G2):
311
+ # Save the final mapping, otherwise garbage collection deletes it.
312
+ self.mapping = self.core_1.copy()
313
+ # The mapping is complete.
314
+ yield self.mapping
315
+ else:
316
+ for G1_node, G2_node in self.candidate_pairs_iter():
317
+ if self.syntactic_feasibility(G1_node, G2_node):
318
+ if self.semantic_feasibility(G1_node, G2_node):
319
+ # Recursive call, adding the feasible state.
320
+ newstate = self.state.__class__(self, G1_node, G2_node)
321
+ yield from self.match()
322
+
323
+ # restore data structures
324
+ newstate.restore()
325
+
326
+ def semantic_feasibility(self, G1_node, G2_node):
327
+ """Returns True if adding (G1_node, G2_node) is semantically feasible.
328
+
329
+ The semantic feasibility function should return True if it is
330
+ acceptable to add the candidate pair (G1_node, G2_node) to the current
331
+ partial isomorphism mapping. The logic should focus on semantic
332
+ information contained in the edge data or a formalized node class.
333
+
334
+ By acceptable, we mean that the subsequent mapping can still become a
335
+ complete isomorphism mapping. Thus, if adding the candidate pair
336
+ definitely makes it so that the subsequent mapping cannot become a
337
+ complete isomorphism mapping, then this function must return False.
338
+
339
+ The default semantic feasibility function always returns True. The
340
+ effect is that semantics are not considered in the matching of G1
341
+ and G2.
342
+
343
+ The semantic checks might differ based on the what type of test is
344
+ being performed. A keyword description of the test is stored in
345
+ self.test. Here is a quick description of the currently implemented
346
+ tests::
347
+
348
+ test='graph'
349
+ Indicates that the graph matcher is looking for a graph-graph
350
+ isomorphism.
351
+
352
+ test='subgraph'
353
+ Indicates that the graph matcher is looking for a subgraph-graph
354
+ isomorphism such that a subgraph of G1 is isomorphic to G2.
355
+
356
+ test='mono'
357
+ Indicates that the graph matcher is looking for a subgraph-graph
358
+ monomorphism such that a subgraph of G1 is monomorphic to G2.
359
+
360
+ Any subclass which redefines semantic_feasibility() must maintain
361
+ the above form to keep the match() method functional. Implementations
362
+ should consider multigraphs.
363
+ """
364
+ return True
365
+
366
+ def subgraph_is_isomorphic(self):
367
+ """Returns `True` if a subgraph of ``G1`` is isomorphic to ``G2``.
368
+
369
+ Examples
370
+ --------
371
+ When creating the `GraphMatcher`, the order of the arguments is important
372
+
373
+ >>> G = nx.Graph([("A", "B"), ("B", "C"), ("A", "C")])
374
+ >>> H = nx.Graph([(0, 1), (1, 2), (0, 2), (1, 3), (0, 4)])
375
+
376
+ Check whether a subgraph of G is isomorphic to H:
377
+
378
+ >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
379
+ >>> isomatcher.subgraph_is_isomorphic()
380
+ False
381
+
382
+ Check whether a subgraph of H is isomorphic to G:
383
+
384
+ >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
385
+ >>> isomatcher.subgraph_is_isomorphic()
386
+ True
387
+ """
388
+ try:
389
+ x = next(self.subgraph_isomorphisms_iter())
390
+ return True
391
+ except StopIteration:
392
+ return False
393
+
394
+ def subgraph_is_monomorphic(self):
395
+ """Returns `True` if a subgraph of ``G1`` is monomorphic to ``G2``.
396
+
397
+ Examples
398
+ --------
399
+ When creating the `GraphMatcher`, the order of the arguments is important.
400
+
401
+ >>> G = nx.Graph([("A", "B"), ("B", "C")])
402
+ >>> H = nx.Graph([(0, 1), (1, 2), (0, 2)])
403
+
404
+ Check whether a subgraph of G is monomorphic to H:
405
+
406
+ >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
407
+ >>> isomatcher.subgraph_is_monomorphic()
408
+ False
409
+
410
+ Check whether a subgraph of H is isomorphic to G:
411
+
412
+ >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
413
+ >>> isomatcher.subgraph_is_monomorphic()
414
+ True
415
+ """
416
+ try:
417
+ x = next(self.subgraph_monomorphisms_iter())
418
+ return True
419
+ except StopIteration:
420
+ return False
421
+
422
+ def subgraph_isomorphisms_iter(self):
423
+ """Generator over isomorphisms between a subgraph of ``G1`` and ``G2``.
424
+
425
+ Examples
426
+ --------
427
+ When creating the `GraphMatcher`, the order of the arguments is important
428
+
429
+ >>> G = nx.Graph([("A", "B"), ("B", "C"), ("A", "C")])
430
+ >>> H = nx.Graph([(0, 1), (1, 2), (0, 2), (1, 3), (0, 4)])
431
+
432
+ Yield isomorphic mappings between ``H`` and subgraphs of ``G``:
433
+
434
+ >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
435
+ >>> list(isomatcher.subgraph_isomorphisms_iter())
436
+ []
437
+
438
+ Yield isomorphic mappings between ``G`` and subgraphs of ``H``:
439
+
440
+ >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
441
+ >>> next(isomatcher.subgraph_isomorphisms_iter())
442
+ {0: 'A', 1: 'B', 2: 'C'}
443
+
444
+ """
445
+ # Declare that we are looking for graph-subgraph isomorphism.
446
+ self.test = "subgraph"
447
+ self.initialize()
448
+ yield from self.match()
449
+
450
+ def subgraph_monomorphisms_iter(self):
451
+ """Generator over monomorphisms between a subgraph of ``G1`` and ``G2``.
452
+
453
+ Examples
454
+ --------
455
+ When creating the `GraphMatcher`, the order of the arguments is important.
456
+
457
+ >>> G = nx.Graph([("A", "B"), ("B", "C")])
458
+ >>> H = nx.Graph([(0, 1), (1, 2), (0, 2)])
459
+
460
+ Yield monomorphic mappings between ``H`` and subgraphs of ``G``:
461
+
462
+ >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
463
+ >>> list(isomatcher.subgraph_monomorphisms_iter())
464
+ []
465
+
466
+ Yield monomorphic mappings between ``G`` and subgraphs of ``H``:
467
+
468
+ >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
469
+ >>> next(isomatcher.subgraph_monomorphisms_iter())
470
+ {0: 'A', 1: 'B', 2: 'C'}
471
+ """
472
+ # Declare that we are looking for graph-subgraph monomorphism.
473
+ self.test = "mono"
474
+ self.initialize()
475
+ yield from self.match()
476
+
477
+ def syntactic_feasibility(self, G1_node, G2_node):
478
+ """Returns True if adding (G1_node, G2_node) is syntactically feasible.
479
+
480
+ This function returns True if it is adding the candidate pair
481
+ to the current partial isomorphism/monomorphism mapping is allowable.
482
+ The addition is allowable if the inclusion of the candidate pair does
483
+ not make it impossible for an isomorphism/monomorphism to be found.
484
+ """
485
+
486
+ # The VF2 algorithm was designed to work with graphs having, at most,
487
+ # one edge connecting any two nodes. This is not the case when
488
+ # dealing with an MultiGraphs.
489
+ #
490
+ # Basically, when we test the look-ahead rules R_neighbor, we will
491
+ # make sure that the number of edges are checked. We also add
492
+ # a R_self check to verify that the number of selfloops is acceptable.
493
+ #
494
+ # Users might be comparing Graph instances with MultiGraph instances.
495
+ # So the generic GraphMatcher class must work with MultiGraphs.
496
+ # Care must be taken since the value in the innermost dictionary is a
497
+ # singlet for Graph instances. For MultiGraphs, the value in the
498
+ # innermost dictionary is a list.
499
+
500
+ ###
501
+ # Test at each step to get a return value as soon as possible.
502
+ ###
503
+
504
+ # Look ahead 0
505
+
506
+ # R_self
507
+
508
+ # The number of selfloops for G1_node must equal the number of
509
+ # self-loops for G2_node. Without this check, we would fail on
510
+ # R_neighbor at the next recursion level. But it is good to prune the
511
+ # search tree now.
512
+
513
+ if self.test == "mono":
514
+ if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
515
+ G2_node, G2_node
516
+ ):
517
+ return False
518
+ else:
519
+ if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
520
+ G2_node, G2_node
521
+ ):
522
+ return False
523
+
524
+ # R_neighbor
525
+
526
+ # For each neighbor n' of n in the partial mapping, the corresponding
527
+ # node m' is a neighbor of m, and vice versa. Also, the number of
528
+ # edges must be equal.
529
+ if self.test != "mono":
530
+ for neighbor in self.G1[G1_node]:
531
+ if neighbor in self.core_1:
532
+ if self.core_1[neighbor] not in self.G2[G2_node]:
533
+ return False
534
+ elif self.G1.number_of_edges(
535
+ neighbor, G1_node
536
+ ) != self.G2.number_of_edges(self.core_1[neighbor], G2_node):
537
+ return False
538
+
539
+ for neighbor in self.G2[G2_node]:
540
+ if neighbor in self.core_2:
541
+ if self.core_2[neighbor] not in self.G1[G1_node]:
542
+ return False
543
+ elif self.test == "mono":
544
+ if self.G1.number_of_edges(
545
+ self.core_2[neighbor], G1_node
546
+ ) < self.G2.number_of_edges(neighbor, G2_node):
547
+ return False
548
+ else:
549
+ if self.G1.number_of_edges(
550
+ self.core_2[neighbor], G1_node
551
+ ) != self.G2.number_of_edges(neighbor, G2_node):
552
+ return False
553
+
554
+ if self.test != "mono":
555
+ # Look ahead 1
556
+
557
+ # R_terminout
558
+ # The number of neighbors of n in T_1^{inout} is equal to the
559
+ # number of neighbors of m that are in T_2^{inout}, and vice versa.
560
+ num1 = 0
561
+ for neighbor in self.G1[G1_node]:
562
+ if (neighbor in self.inout_1) and (neighbor not in self.core_1):
563
+ num1 += 1
564
+ num2 = 0
565
+ for neighbor in self.G2[G2_node]:
566
+ if (neighbor in self.inout_2) and (neighbor not in self.core_2):
567
+ num2 += 1
568
+ if self.test == "graph":
569
+ if num1 != num2:
570
+ return False
571
+ else: # self.test == 'subgraph'
572
+ if not (num1 >= num2):
573
+ return False
574
+
575
+ # Look ahead 2
576
+
577
+ # R_new
578
+
579
+ # The number of neighbors of n that are neither in the core_1 nor
580
+ # T_1^{inout} is equal to the number of neighbors of m
581
+ # that are neither in core_2 nor T_2^{inout}.
582
+ num1 = 0
583
+ for neighbor in self.G1[G1_node]:
584
+ if neighbor not in self.inout_1:
585
+ num1 += 1
586
+ num2 = 0
587
+ for neighbor in self.G2[G2_node]:
588
+ if neighbor not in self.inout_2:
589
+ num2 += 1
590
+ if self.test == "graph":
591
+ if num1 != num2:
592
+ return False
593
+ else: # self.test == 'subgraph'
594
+ if not (num1 >= num2):
595
+ return False
596
+
597
+ # Otherwise, this node pair is syntactically feasible!
598
+ return True
599
+
600
+
601
+ class DiGraphMatcher(GraphMatcher):
602
+ """Implementation of VF2 algorithm for matching directed graphs.
603
+
604
+ Suitable for DiGraph and MultiDiGraph instances.
605
+ """
606
+
607
+ def __init__(self, G1, G2):
608
+ """Initialize DiGraphMatcher.
609
+
610
+ G1 and G2 should be nx.Graph or nx.MultiGraph instances.
611
+
612
+ Examples
613
+ --------
614
+ To create a GraphMatcher which checks for syntactic feasibility:
615
+
616
+ >>> from networkx.algorithms import isomorphism
617
+ >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
618
+ >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
619
+ >>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
620
+ """
621
+ super().__init__(G1, G2)
622
+
623
+ def candidate_pairs_iter(self):
624
+ """Iterator over candidate pairs of nodes in G1 and G2."""
625
+
626
+ # All computations are done using the current state!
627
+
628
+ G1_nodes = self.G1_nodes
629
+ G2_nodes = self.G2_nodes
630
+ min_key = self.G2_node_order.__getitem__
631
+
632
+ # First we compute the out-terminal sets.
633
+ T1_out = [node for node in self.out_1 if node not in self.core_1]
634
+ T2_out = [node for node in self.out_2 if node not in self.core_2]
635
+
636
+ # If T1_out and T2_out are both nonempty.
637
+ # P(s) = T1_out x {min T2_out}
638
+ if T1_out and T2_out:
639
+ node_2 = min(T2_out, key=min_key)
640
+ for node_1 in T1_out:
641
+ yield node_1, node_2
642
+
643
+ # If T1_out and T2_out were both empty....
644
+ # We compute the in-terminal sets.
645
+
646
+ # elif not (T1_out or T2_out): # as suggested by [2], incorrect
647
+ else: # as suggested by [1], correct
648
+ T1_in = [node for node in self.in_1 if node not in self.core_1]
649
+ T2_in = [node for node in self.in_2 if node not in self.core_2]
650
+
651
+ # If T1_in and T2_in are both nonempty.
652
+ # P(s) = T1_out x {min T2_out}
653
+ if T1_in and T2_in:
654
+ node_2 = min(T2_in, key=min_key)
655
+ for node_1 in T1_in:
656
+ yield node_1, node_2
657
+
658
+ # If all terminal sets are empty...
659
+ # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
660
+
661
+ # elif not (T1_in or T2_in): # as suggested by [2], incorrect
662
+ else: # as inferred from [1], correct
663
+ node_2 = min(G2_nodes - set(self.core_2), key=min_key)
664
+ for node_1 in G1_nodes:
665
+ if node_1 not in self.core_1:
666
+ yield node_1, node_2
667
+
668
+ # For all other cases, we don't have any candidate pairs.
669
+
670
+ def initialize(self):
671
+ """Reinitializes the state of the algorithm.
672
+
673
+ This method should be redefined if using something other than DiGMState.
674
+ If only subclassing GraphMatcher, a redefinition is not necessary.
675
+ """
676
+
677
+ # core_1[n] contains the index of the node paired with n, which is m,
678
+ # provided n is in the mapping.
679
+ # core_2[m] contains the index of the node paired with m, which is n,
680
+ # provided m is in the mapping.
681
+ self.core_1 = {}
682
+ self.core_2 = {}
683
+
684
+ # See the paper for definitions of M_x and T_x^{y}
685
+
686
+ # in_1[n] is non-zero if n is in M_1 or in T_1^{in}
687
+ # out_1[n] is non-zero if n is in M_1 or in T_1^{out}
688
+ #
689
+ # in_2[m] is non-zero if m is in M_2 or in T_2^{in}
690
+ # out_2[m] is non-zero if m is in M_2 or in T_2^{out}
691
+ #
692
+ # The value stored is the depth of the search tree when the node became
693
+ # part of the corresponding set.
694
+ self.in_1 = {}
695
+ self.in_2 = {}
696
+ self.out_1 = {}
697
+ self.out_2 = {}
698
+
699
+ self.state = DiGMState(self)
700
+
701
+ # Provide a convenient way to access the isomorphism mapping.
702
+ self.mapping = self.core_1.copy()
703
+
704
+ def syntactic_feasibility(self, G1_node, G2_node):
705
+ """Returns True if adding (G1_node, G2_node) is syntactically feasible.
706
+
707
+ This function returns True if it is adding the candidate pair
708
+ to the current partial isomorphism/monomorphism mapping is allowable.
709
+ The addition is allowable if the inclusion of the candidate pair does
710
+ not make it impossible for an isomorphism/monomorphism to be found.
711
+ """
712
+
713
+ # The VF2 algorithm was designed to work with graphs having, at most,
714
+ # one edge connecting any two nodes. This is not the case when
715
+ # dealing with an MultiGraphs.
716
+ #
717
+ # Basically, when we test the look-ahead rules R_pred and R_succ, we
718
+ # will make sure that the number of edges are checked. We also add
719
+ # a R_self check to verify that the number of selfloops is acceptable.
720
+
721
+ # Users might be comparing DiGraph instances with MultiDiGraph
722
+ # instances. So the generic DiGraphMatcher class must work with
723
+ # MultiDiGraphs. Care must be taken since the value in the innermost
724
+ # dictionary is a singlet for DiGraph instances. For MultiDiGraphs,
725
+ # the value in the innermost dictionary is a list.
726
+
727
+ ###
728
+ # Test at each step to get a return value as soon as possible.
729
+ ###
730
+
731
+ # Look ahead 0
732
+
733
+ # R_self
734
+
735
+ # The number of selfloops for G1_node must equal the number of
736
+ # self-loops for G2_node. Without this check, we would fail on R_pred
737
+ # at the next recursion level. This should prune the tree even further.
738
+ if self.test == "mono":
739
+ if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
740
+ G2_node, G2_node
741
+ ):
742
+ return False
743
+ else:
744
+ if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
745
+ G2_node, G2_node
746
+ ):
747
+ return False
748
+
749
+ # R_pred
750
+
751
+ # For each predecessor n' of n in the partial mapping, the
752
+ # corresponding node m' is a predecessor of m, and vice versa. Also,
753
+ # the number of edges must be equal
754
+ if self.test != "mono":
755
+ for predecessor in self.G1.pred[G1_node]:
756
+ if predecessor in self.core_1:
757
+ if self.core_1[predecessor] not in self.G2.pred[G2_node]:
758
+ return False
759
+ elif self.G1.number_of_edges(
760
+ predecessor, G1_node
761
+ ) != self.G2.number_of_edges(self.core_1[predecessor], G2_node):
762
+ return False
763
+
764
+ for predecessor in self.G2.pred[G2_node]:
765
+ if predecessor in self.core_2:
766
+ if self.core_2[predecessor] not in self.G1.pred[G1_node]:
767
+ return False
768
+ elif self.test == "mono":
769
+ if self.G1.number_of_edges(
770
+ self.core_2[predecessor], G1_node
771
+ ) < self.G2.number_of_edges(predecessor, G2_node):
772
+ return False
773
+ else:
774
+ if self.G1.number_of_edges(
775
+ self.core_2[predecessor], G1_node
776
+ ) != self.G2.number_of_edges(predecessor, G2_node):
777
+ return False
778
+
779
+ # R_succ
780
+
781
+ # For each successor n' of n in the partial mapping, the corresponding
782
+ # node m' is a successor of m, and vice versa. Also, the number of
783
+ # edges must be equal.
784
+ if self.test != "mono":
785
+ for successor in self.G1[G1_node]:
786
+ if successor in self.core_1:
787
+ if self.core_1[successor] not in self.G2[G2_node]:
788
+ return False
789
+ elif self.G1.number_of_edges(
790
+ G1_node, successor
791
+ ) != self.G2.number_of_edges(G2_node, self.core_1[successor]):
792
+ return False
793
+
794
+ for successor in self.G2[G2_node]:
795
+ if successor in self.core_2:
796
+ if self.core_2[successor] not in self.G1[G1_node]:
797
+ return False
798
+ elif self.test == "mono":
799
+ if self.G1.number_of_edges(
800
+ G1_node, self.core_2[successor]
801
+ ) < self.G2.number_of_edges(G2_node, successor):
802
+ return False
803
+ else:
804
+ if self.G1.number_of_edges(
805
+ G1_node, self.core_2[successor]
806
+ ) != self.G2.number_of_edges(G2_node, successor):
807
+ return False
808
+
809
+ if self.test != "mono":
810
+ # Look ahead 1
811
+
812
+ # R_termin
813
+ # The number of predecessors of n that are in T_1^{in} is equal to the
814
+ # number of predecessors of m that are in T_2^{in}.
815
+ num1 = 0
816
+ for predecessor in self.G1.pred[G1_node]:
817
+ if (predecessor in self.in_1) and (predecessor not in self.core_1):
818
+ num1 += 1
819
+ num2 = 0
820
+ for predecessor in self.G2.pred[G2_node]:
821
+ if (predecessor in self.in_2) and (predecessor not in self.core_2):
822
+ num2 += 1
823
+ if self.test == "graph":
824
+ if num1 != num2:
825
+ return False
826
+ else: # self.test == 'subgraph'
827
+ if not (num1 >= num2):
828
+ return False
829
+
830
+ # The number of successors of n that are in T_1^{in} is equal to the
831
+ # number of successors of m that are in T_2^{in}.
832
+ num1 = 0
833
+ for successor in self.G1[G1_node]:
834
+ if (successor in self.in_1) and (successor not in self.core_1):
835
+ num1 += 1
836
+ num2 = 0
837
+ for successor in self.G2[G2_node]:
838
+ if (successor in self.in_2) and (successor not in self.core_2):
839
+ num2 += 1
840
+ if self.test == "graph":
841
+ if num1 != num2:
842
+ return False
843
+ else: # self.test == 'subgraph'
844
+ if not (num1 >= num2):
845
+ return False
846
+
847
+ # R_termout
848
+
849
+ # The number of predecessors of n that are in T_1^{out} is equal to the
850
+ # number of predecessors of m that are in T_2^{out}.
851
+ num1 = 0
852
+ for predecessor in self.G1.pred[G1_node]:
853
+ if (predecessor in self.out_1) and (predecessor not in self.core_1):
854
+ num1 += 1
855
+ num2 = 0
856
+ for predecessor in self.G2.pred[G2_node]:
857
+ if (predecessor in self.out_2) and (predecessor not in self.core_2):
858
+ num2 += 1
859
+ if self.test == "graph":
860
+ if num1 != num2:
861
+ return False
862
+ else: # self.test == 'subgraph'
863
+ if not (num1 >= num2):
864
+ return False
865
+
866
+ # The number of successors of n that are in T_1^{out} is equal to the
867
+ # number of successors of m that are in T_2^{out}.
868
+ num1 = 0
869
+ for successor in self.G1[G1_node]:
870
+ if (successor in self.out_1) and (successor not in self.core_1):
871
+ num1 += 1
872
+ num2 = 0
873
+ for successor in self.G2[G2_node]:
874
+ if (successor in self.out_2) and (successor not in self.core_2):
875
+ num2 += 1
876
+ if self.test == "graph":
877
+ if num1 != num2:
878
+ return False
879
+ else: # self.test == 'subgraph'
880
+ if not (num1 >= num2):
881
+ return False
882
+
883
+ # Look ahead 2
884
+
885
+ # R_new
886
+
887
+ # The number of predecessors of n that are neither in the core_1 nor
888
+ # T_1^{in} nor T_1^{out} is equal to the number of predecessors of m
889
+ # that are neither in core_2 nor T_2^{in} nor T_2^{out}.
890
+ num1 = 0
891
+ for predecessor in self.G1.pred[G1_node]:
892
+ if (predecessor not in self.in_1) and (predecessor not in self.out_1):
893
+ num1 += 1
894
+ num2 = 0
895
+ for predecessor in self.G2.pred[G2_node]:
896
+ if (predecessor not in self.in_2) and (predecessor not in self.out_2):
897
+ num2 += 1
898
+ if self.test == "graph":
899
+ if num1 != num2:
900
+ return False
901
+ else: # self.test == 'subgraph'
902
+ if not (num1 >= num2):
903
+ return False
904
+
905
+ # The number of successors of n that are neither in the core_1 nor
906
+ # T_1^{in} nor T_1^{out} is equal to the number of successors of m
907
+ # that are neither in core_2 nor T_2^{in} nor T_2^{out}.
908
+ num1 = 0
909
+ for successor in self.G1[G1_node]:
910
+ if (successor not in self.in_1) and (successor not in self.out_1):
911
+ num1 += 1
912
+ num2 = 0
913
+ for successor in self.G2[G2_node]:
914
+ if (successor not in self.in_2) and (successor not in self.out_2):
915
+ num2 += 1
916
+ if self.test == "graph":
917
+ if num1 != num2:
918
+ return False
919
+ else: # self.test == 'subgraph'
920
+ if not (num1 >= num2):
921
+ return False
922
+
923
+ # Otherwise, this node pair is syntactically feasible!
924
+ return True
925
+
926
+ def subgraph_is_isomorphic(self):
927
+ """Returns `True` if a subgraph of ``G1`` is isomorphic to ``G2``.
928
+
929
+ Examples
930
+ --------
931
+ When creating the `DiGraphMatcher`, the order of the arguments is important
932
+
933
+ >>> G = nx.DiGraph([("A", "B"), ("B", "A"), ("B", "C"), ("C", "B")])
934
+ >>> H = nx.DiGraph(nx.path_graph(5))
935
+
936
+ Check whether a subgraph of G is isomorphic to H:
937
+
938
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
939
+ >>> isomatcher.subgraph_is_isomorphic()
940
+ False
941
+
942
+ Check whether a subgraph of H is isomorphic to G:
943
+
944
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
945
+ >>> isomatcher.subgraph_is_isomorphic()
946
+ True
947
+ """
948
+ return super().subgraph_is_isomorphic()
949
+
950
+ def subgraph_is_monomorphic(self):
951
+ """Returns `True` if a subgraph of ``G1`` is monomorphic to ``G2``.
952
+
953
+ Examples
954
+ --------
955
+ When creating the `DiGraphMatcher`, the order of the arguments is important.
956
+
957
+ >>> G = nx.DiGraph([("A", "B"), ("C", "B"), ("D", "C")])
958
+ >>> H = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 2)])
959
+
960
+ Check whether a subgraph of G is monomorphic to H:
961
+
962
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
963
+ >>> isomatcher.subgraph_is_monomorphic()
964
+ False
965
+
966
+ Check whether a subgraph of H is isomorphic to G:
967
+
968
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
969
+ >>> isomatcher.subgraph_is_monomorphic()
970
+ True
971
+ """
972
+ return super().subgraph_is_monomorphic()
973
+
974
+ def subgraph_isomorphisms_iter(self):
975
+ """Generator over isomorphisms between a subgraph of ``G1`` and ``G2``.
976
+
977
+ Examples
978
+ --------
979
+ When creating the `DiGraphMatcher`, the order of the arguments is important
980
+
981
+ >>> G = nx.DiGraph([("B", "C"), ("C", "B"), ("C", "D"), ("D", "C")])
982
+ >>> H = nx.DiGraph(nx.path_graph(5))
983
+
984
+ Yield isomorphic mappings between ``H`` and subgraphs of ``G``:
985
+
986
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
987
+ >>> list(isomatcher.subgraph_isomorphisms_iter())
988
+ []
989
+
990
+ Yield isomorphic mappings between ``G`` and subgraphs of ``H``:
991
+
992
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
993
+ >>> next(isomatcher.subgraph_isomorphisms_iter())
994
+ {0: 'B', 1: 'C', 2: 'D'}
995
+ """
996
+ return super().subgraph_isomorphisms_iter()
997
+
998
+ def subgraph_monomorphisms_iter(self):
999
+ """Generator over monomorphisms between a subgraph of ``G1`` and ``G2``.
1000
+
1001
+ Examples
1002
+ --------
1003
+ When creating the `DiGraphMatcher`, the order of the arguments is important.
1004
+
1005
+ >>> G = nx.DiGraph([("A", "B"), ("C", "B"), ("D", "C")])
1006
+ >>> H = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 2)])
1007
+
1008
+ Yield monomorphic mappings between ``H`` and subgraphs of ``G``:
1009
+
1010
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
1011
+ >>> list(isomatcher.subgraph_monomorphisms_iter())
1012
+ []
1013
+
1014
+ Yield monomorphic mappings between ``G`` and subgraphs of ``H``:
1015
+
1016
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
1017
+ >>> next(isomatcher.subgraph_monomorphisms_iter())
1018
+ {3: 'A', 2: 'B', 1: 'C', 0: 'D'}
1019
+ """
1020
+ return super().subgraph_monomorphisms_iter()
1021
+
1022
+
1023
+ class GMState:
1024
+ """Internal representation of state for the GraphMatcher class.
1025
+
1026
+ This class is used internally by the GraphMatcher class. It is used
1027
+ only to store state specific data. There will be at most G2.order() of
1028
+ these objects in memory at a time, due to the depth-first search
1029
+ strategy employed by the VF2 algorithm.
1030
+ """
1031
+
1032
+ def __init__(self, GM, G1_node=None, G2_node=None):
1033
+ """Initializes GMState object.
1034
+
1035
+ Pass in the GraphMatcher to which this GMState belongs and the
1036
+ new node pair that will be added to the GraphMatcher's current
1037
+ isomorphism mapping.
1038
+ """
1039
+ self.GM = GM
1040
+
1041
+ # Initialize the last stored node pair.
1042
+ self.G1_node = None
1043
+ self.G2_node = None
1044
+ self.depth = len(GM.core_1)
1045
+
1046
+ if G1_node is None or G2_node is None:
1047
+ # Then we reset the class variables
1048
+ GM.core_1 = {}
1049
+ GM.core_2 = {}
1050
+ GM.inout_1 = {}
1051
+ GM.inout_2 = {}
1052
+
1053
+ # Watch out! G1_node == 0 should evaluate to True.
1054
+ if G1_node is not None and G2_node is not None:
1055
+ # Add the node pair to the isomorphism mapping.
1056
+ GM.core_1[G1_node] = G2_node
1057
+ GM.core_2[G2_node] = G1_node
1058
+
1059
+ # Store the node that was added last.
1060
+ self.G1_node = G1_node
1061
+ self.G2_node = G2_node
1062
+
1063
+ # Now we must update the other two vectors.
1064
+ # We will add only if it is not in there already!
1065
+ self.depth = len(GM.core_1)
1066
+
1067
+ # First we add the new nodes...
1068
+ if G1_node not in GM.inout_1:
1069
+ GM.inout_1[G1_node] = self.depth
1070
+ if G2_node not in GM.inout_2:
1071
+ GM.inout_2[G2_node] = self.depth
1072
+
1073
+ # Now we add every other node...
1074
+
1075
+ # Updates for T_1^{inout}
1076
+ new_nodes = set()
1077
+ for node in GM.core_1:
1078
+ new_nodes.update(
1079
+ [neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1]
1080
+ )
1081
+ for node in new_nodes:
1082
+ if node not in GM.inout_1:
1083
+ GM.inout_1[node] = self.depth
1084
+
1085
+ # Updates for T_2^{inout}
1086
+ new_nodes = set()
1087
+ for node in GM.core_2:
1088
+ new_nodes.update(
1089
+ [neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2]
1090
+ )
1091
+ for node in new_nodes:
1092
+ if node not in GM.inout_2:
1093
+ GM.inout_2[node] = self.depth
1094
+
1095
+ def restore(self):
1096
+ """Deletes the GMState object and restores the class variables."""
1097
+ # First we remove the node that was added from the core vectors.
1098
+ # Watch out! G1_node == 0 should evaluate to True.
1099
+ if self.G1_node is not None and self.G2_node is not None:
1100
+ del self.GM.core_1[self.G1_node]
1101
+ del self.GM.core_2[self.G2_node]
1102
+
1103
+ # Now we revert the other two vectors.
1104
+ # Thus, we delete all entries which have this depth level.
1105
+ for vector in (self.GM.inout_1, self.GM.inout_2):
1106
+ for node in list(vector.keys()):
1107
+ if vector[node] == self.depth:
1108
+ del vector[node]
1109
+
1110
+
1111
+ class DiGMState:
1112
+ """Internal representation of state for the DiGraphMatcher class.
1113
+
1114
+ This class is used internally by the DiGraphMatcher class. It is used
1115
+ only to store state specific data. There will be at most G2.order() of
1116
+ these objects in memory at a time, due to the depth-first search
1117
+ strategy employed by the VF2 algorithm.
1118
+
1119
+ """
1120
+
1121
+ def __init__(self, GM, G1_node=None, G2_node=None):
1122
+ """Initializes DiGMState object.
1123
+
1124
+ Pass in the DiGraphMatcher to which this DiGMState belongs and the
1125
+ new node pair that will be added to the GraphMatcher's current
1126
+ isomorphism mapping.
1127
+ """
1128
+ self.GM = GM
1129
+
1130
+ # Initialize the last stored node pair.
1131
+ self.G1_node = None
1132
+ self.G2_node = None
1133
+ self.depth = len(GM.core_1)
1134
+
1135
+ if G1_node is None or G2_node is None:
1136
+ # Then we reset the class variables
1137
+ GM.core_1 = {}
1138
+ GM.core_2 = {}
1139
+ GM.in_1 = {}
1140
+ GM.in_2 = {}
1141
+ GM.out_1 = {}
1142
+ GM.out_2 = {}
1143
+
1144
+ # Watch out! G1_node == 0 should evaluate to True.
1145
+ if G1_node is not None and G2_node is not None:
1146
+ # Add the node pair to the isomorphism mapping.
1147
+ GM.core_1[G1_node] = G2_node
1148
+ GM.core_2[G2_node] = G1_node
1149
+
1150
+ # Store the node that was added last.
1151
+ self.G1_node = G1_node
1152
+ self.G2_node = G2_node
1153
+
1154
+ # Now we must update the other four vectors.
1155
+ # We will add only if it is not in there already!
1156
+ self.depth = len(GM.core_1)
1157
+
1158
+ # First we add the new nodes...
1159
+ for vector in (GM.in_1, GM.out_1):
1160
+ if G1_node not in vector:
1161
+ vector[G1_node] = self.depth
1162
+ for vector in (GM.in_2, GM.out_2):
1163
+ if G2_node not in vector:
1164
+ vector[G2_node] = self.depth
1165
+
1166
+ # Now we add every other node...
1167
+
1168
+ # Updates for T_1^{in}
1169
+ new_nodes = set()
1170
+ for node in GM.core_1:
1171
+ new_nodes.update(
1172
+ [
1173
+ predecessor
1174
+ for predecessor in GM.G1.predecessors(node)
1175
+ if predecessor not in GM.core_1
1176
+ ]
1177
+ )
1178
+ for node in new_nodes:
1179
+ if node not in GM.in_1:
1180
+ GM.in_1[node] = self.depth
1181
+
1182
+ # Updates for T_2^{in}
1183
+ new_nodes = set()
1184
+ for node in GM.core_2:
1185
+ new_nodes.update(
1186
+ [
1187
+ predecessor
1188
+ for predecessor in GM.G2.predecessors(node)
1189
+ if predecessor not in GM.core_2
1190
+ ]
1191
+ )
1192
+ for node in new_nodes:
1193
+ if node not in GM.in_2:
1194
+ GM.in_2[node] = self.depth
1195
+
1196
+ # Updates for T_1^{out}
1197
+ new_nodes = set()
1198
+ for node in GM.core_1:
1199
+ new_nodes.update(
1200
+ [
1201
+ successor
1202
+ for successor in GM.G1.successors(node)
1203
+ if successor not in GM.core_1
1204
+ ]
1205
+ )
1206
+ for node in new_nodes:
1207
+ if node not in GM.out_1:
1208
+ GM.out_1[node] = self.depth
1209
+
1210
+ # Updates for T_2^{out}
1211
+ new_nodes = set()
1212
+ for node in GM.core_2:
1213
+ new_nodes.update(
1214
+ [
1215
+ successor
1216
+ for successor in GM.G2.successors(node)
1217
+ if successor not in GM.core_2
1218
+ ]
1219
+ )
1220
+ for node in new_nodes:
1221
+ if node not in GM.out_2:
1222
+ GM.out_2[node] = self.depth
1223
+
1224
+ def restore(self):
1225
+ """Deletes the DiGMState object and restores the class variables."""
1226
+
1227
+ # First we remove the node that was added from the core vectors.
1228
+ # Watch out! G1_node == 0 should evaluate to True.
1229
+ if self.G1_node is not None and self.G2_node is not None:
1230
+ del self.GM.core_1[self.G1_node]
1231
+ del self.GM.core_2[self.G2_node]
1232
+
1233
+ # Now we revert the other four vectors.
1234
+ # Thus, we delete all entries which have this depth level.
1235
+ for vector in (self.GM.in_1, self.GM.in_2, self.GM.out_1, self.GM.out_2):
1236
+ for node in list(vector.keys()):
1237
+ if vector[node] == self.depth:
1238
+ del vector[node]
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/matchhelpers.py ADDED
@@ -0,0 +1,352 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions which help end users define customize node_match and
2
+ edge_match functions to use during isomorphism checks.
3
+ """
4
+
5
+ import math
6
+ import types
7
+ from itertools import permutations
8
+
9
+ __all__ = [
10
+ "categorical_node_match",
11
+ "categorical_edge_match",
12
+ "categorical_multiedge_match",
13
+ "numerical_node_match",
14
+ "numerical_edge_match",
15
+ "numerical_multiedge_match",
16
+ "generic_node_match",
17
+ "generic_edge_match",
18
+ "generic_multiedge_match",
19
+ ]
20
+
21
+
22
+ def copyfunc(f, name=None):
23
+ """Returns a deepcopy of a function."""
24
+ return types.FunctionType(
25
+ f.__code__, f.__globals__, name or f.__name__, f.__defaults__, f.__closure__
26
+ )
27
+
28
+
29
+ def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08):
30
+ """Returns True if x and y are sufficiently close, elementwise.
31
+
32
+ Parameters
33
+ ----------
34
+ rtol : float
35
+ The relative error tolerance.
36
+ atol : float
37
+ The absolute error tolerance.
38
+
39
+ """
40
+ # assume finite weights, see numpy.allclose() for reference
41
+ return all(math.isclose(xi, yi, rel_tol=rtol, abs_tol=atol) for xi, yi in zip(x, y))
42
+
43
+
44
+ categorical_doc = """
45
+ Returns a comparison function for a categorical node attribute.
46
+
47
+ The value(s) of the attr(s) must be hashable and comparable via the ==
48
+ operator since they are placed into a set([]) object. If the sets from
49
+ G1 and G2 are the same, then the constructed function returns True.
50
+
51
+ Parameters
52
+ ----------
53
+ attr : string | list
54
+ The categorical node attribute to compare, or a list of categorical
55
+ node attributes to compare.
56
+ default : value | list
57
+ The default value for the categorical node attribute, or a list of
58
+ default values for the categorical node attributes.
59
+
60
+ Returns
61
+ -------
62
+ match : function
63
+ The customized, categorical `node_match` function.
64
+
65
+ Examples
66
+ --------
67
+ >>> import networkx.algorithms.isomorphism as iso
68
+ >>> nm = iso.categorical_node_match("size", 1)
69
+ >>> nm = iso.categorical_node_match(["color", "size"], ["red", 2])
70
+
71
+ """
72
+
73
+
74
+ def categorical_node_match(attr, default):
75
+ if isinstance(attr, str):
76
+
77
+ def match(data1, data2):
78
+ return data1.get(attr, default) == data2.get(attr, default)
79
+
80
+ else:
81
+ attrs = list(zip(attr, default)) # Python 3
82
+
83
+ def match(data1, data2):
84
+ return all(data1.get(attr, d) == data2.get(attr, d) for attr, d in attrs)
85
+
86
+ return match
87
+
88
+
89
+ categorical_edge_match = copyfunc(categorical_node_match, "categorical_edge_match")
90
+
91
+
92
+ def categorical_multiedge_match(attr, default):
93
+ if isinstance(attr, str):
94
+
95
+ def match(datasets1, datasets2):
96
+ values1 = {data.get(attr, default) for data in datasets1.values()}
97
+ values2 = {data.get(attr, default) for data in datasets2.values()}
98
+ return values1 == values2
99
+
100
+ else:
101
+ attrs = list(zip(attr, default)) # Python 3
102
+
103
+ def match(datasets1, datasets2):
104
+ values1 = set()
105
+ for data1 in datasets1.values():
106
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
107
+ values1.add(x)
108
+ values2 = set()
109
+ for data2 in datasets2.values():
110
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
111
+ values2.add(x)
112
+ return values1 == values2
113
+
114
+ return match
115
+
116
+
117
+ # Docstrings for categorical functions.
118
+ categorical_node_match.__doc__ = categorical_doc
119
+ categorical_edge_match.__doc__ = categorical_doc.replace("node", "edge")
120
+ tmpdoc = categorical_doc.replace("node", "edge")
121
+ tmpdoc = tmpdoc.replace("categorical_edge_match", "categorical_multiedge_match")
122
+ categorical_multiedge_match.__doc__ = tmpdoc
123
+
124
+
125
+ numerical_doc = """
126
+ Returns a comparison function for a numerical node attribute.
127
+
128
+ The value(s) of the attr(s) must be numerical and sortable. If the
129
+ sorted list of values from G1 and G2 are the same within some
130
+ tolerance, then the constructed function returns True.
131
+
132
+ Parameters
133
+ ----------
134
+ attr : string | list
135
+ The numerical node attribute to compare, or a list of numerical
136
+ node attributes to compare.
137
+ default : value | list
138
+ The default value for the numerical node attribute, or a list of
139
+ default values for the numerical node attributes.
140
+ rtol : float
141
+ The relative error tolerance.
142
+ atol : float
143
+ The absolute error tolerance.
144
+
145
+ Returns
146
+ -------
147
+ match : function
148
+ The customized, numerical `node_match` function.
149
+
150
+ Examples
151
+ --------
152
+ >>> import networkx.algorithms.isomorphism as iso
153
+ >>> nm = iso.numerical_node_match("weight", 1.0)
154
+ >>> nm = iso.numerical_node_match(["weight", "linewidth"], [0.25, 0.5])
155
+
156
+ """
157
+
158
+
159
+ def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
160
+ if isinstance(attr, str):
161
+
162
+ def match(data1, data2):
163
+ return math.isclose(
164
+ data1.get(attr, default),
165
+ data2.get(attr, default),
166
+ rel_tol=rtol,
167
+ abs_tol=atol,
168
+ )
169
+
170
+ else:
171
+ attrs = list(zip(attr, default)) # Python 3
172
+
173
+ def match(data1, data2):
174
+ values1 = [data1.get(attr, d) for attr, d in attrs]
175
+ values2 = [data2.get(attr, d) for attr, d in attrs]
176
+ return allclose(values1, values2, rtol=rtol, atol=atol)
177
+
178
+ return match
179
+
180
+
181
+ numerical_edge_match = copyfunc(numerical_node_match, "numerical_edge_match")
182
+
183
+
184
+ def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
185
+ if isinstance(attr, str):
186
+
187
+ def match(datasets1, datasets2):
188
+ values1 = sorted(data.get(attr, default) for data in datasets1.values())
189
+ values2 = sorted(data.get(attr, default) for data in datasets2.values())
190
+ return allclose(values1, values2, rtol=rtol, atol=atol)
191
+
192
+ else:
193
+ attrs = list(zip(attr, default)) # Python 3
194
+
195
+ def match(datasets1, datasets2):
196
+ values1 = []
197
+ for data1 in datasets1.values():
198
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
199
+ values1.append(x)
200
+ values2 = []
201
+ for data2 in datasets2.values():
202
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
203
+ values2.append(x)
204
+ values1.sort()
205
+ values2.sort()
206
+ for xi, yi in zip(values1, values2):
207
+ if not allclose(xi, yi, rtol=rtol, atol=atol):
208
+ return False
209
+ else:
210
+ return True
211
+
212
+ return match
213
+
214
+
215
+ # Docstrings for numerical functions.
216
+ numerical_node_match.__doc__ = numerical_doc
217
+ numerical_edge_match.__doc__ = numerical_doc.replace("node", "edge")
218
+ tmpdoc = numerical_doc.replace("node", "edge")
219
+ tmpdoc = tmpdoc.replace("numerical_edge_match", "numerical_multiedge_match")
220
+ numerical_multiedge_match.__doc__ = tmpdoc
221
+
222
+
223
+ generic_doc = """
224
+ Returns a comparison function for a generic attribute.
225
+
226
+ The value(s) of the attr(s) are compared using the specified
227
+ operators. If all the attributes are equal, then the constructed
228
+ function returns True.
229
+
230
+ Parameters
231
+ ----------
232
+ attr : string | list
233
+ The node attribute to compare, or a list of node attributes
234
+ to compare.
235
+ default : value | list
236
+ The default value for the node attribute, or a list of
237
+ default values for the node attributes.
238
+ op : callable | list
239
+ The operator to use when comparing attribute values, or a list
240
+ of operators to use when comparing values for each attribute.
241
+
242
+ Returns
243
+ -------
244
+ match : function
245
+ The customized, generic `node_match` function.
246
+
247
+ Examples
248
+ --------
249
+ >>> from operator import eq
250
+ >>> from math import isclose
251
+ >>> from networkx.algorithms.isomorphism import generic_node_match
252
+ >>> nm = generic_node_match("weight", 1.0, isclose)
253
+ >>> nm = generic_node_match("color", "red", eq)
254
+ >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
255
+
256
+ """
257
+
258
+
259
+ def generic_node_match(attr, default, op):
260
+ if isinstance(attr, str):
261
+
262
+ def match(data1, data2):
263
+ return op(data1.get(attr, default), data2.get(attr, default))
264
+
265
+ else:
266
+ attrs = list(zip(attr, default, op)) # Python 3
267
+
268
+ def match(data1, data2):
269
+ for attr, d, operator in attrs:
270
+ if not operator(data1.get(attr, d), data2.get(attr, d)):
271
+ return False
272
+ else:
273
+ return True
274
+
275
+ return match
276
+
277
+
278
+ generic_edge_match = copyfunc(generic_node_match, "generic_edge_match")
279
+
280
+
281
+ def generic_multiedge_match(attr, default, op):
282
+ """Returns a comparison function for a generic attribute.
283
+
284
+ The value(s) of the attr(s) are compared using the specified
285
+ operators. If all the attributes are equal, then the constructed
286
+ function returns True. Potentially, the constructed edge_match
287
+ function can be slow since it must verify that no isomorphism
288
+ exists between the multiedges before it returns False.
289
+
290
+ Parameters
291
+ ----------
292
+ attr : string | list
293
+ The edge attribute to compare, or a list of node attributes
294
+ to compare.
295
+ default : value | list
296
+ The default value for the edge attribute, or a list of
297
+ default values for the edgeattributes.
298
+ op : callable | list
299
+ The operator to use when comparing attribute values, or a list
300
+ of operators to use when comparing values for each attribute.
301
+
302
+ Returns
303
+ -------
304
+ match : function
305
+ The customized, generic `edge_match` function.
306
+
307
+ Examples
308
+ --------
309
+ >>> from operator import eq
310
+ >>> from math import isclose
311
+ >>> from networkx.algorithms.isomorphism import generic_node_match
312
+ >>> nm = generic_node_match("weight", 1.0, isclose)
313
+ >>> nm = generic_node_match("color", "red", eq)
314
+ >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
315
+
316
+ """
317
+
318
+ # This is slow, but generic.
319
+ # We must test every possible isomorphism between the edges.
320
+ if isinstance(attr, str):
321
+ attr = [attr]
322
+ default = [default]
323
+ op = [op]
324
+ attrs = list(zip(attr, default)) # Python 3
325
+
326
+ def match(datasets1, datasets2):
327
+ values1 = []
328
+ for data1 in datasets1.values():
329
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
330
+ values1.append(x)
331
+ values2 = []
332
+ for data2 in datasets2.values():
333
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
334
+ values2.append(x)
335
+ for vals2 in permutations(values2):
336
+ for xi, yi in zip(values1, vals2):
337
+ if not all(map(lambda x, y, z: z(x, y), xi, yi, op)):
338
+ # This is not an isomorphism, go to next permutation.
339
+ break
340
+ else:
341
+ # Then we found an isomorphism.
342
+ return True
343
+ else:
344
+ # Then there are no isomorphisms between the multiedges.
345
+ return False
346
+
347
+ return match
348
+
349
+
350
+ # Docstrings for numerical functions.
351
+ generic_node_match.__doc__ = generic_doc
352
+ generic_edge_match.__doc__ = generic_doc.replace("node", "edge")
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py ADDED
@@ -0,0 +1,308 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ *****************************
3
+ Time-respecting VF2 Algorithm
4
+ *****************************
5
+
6
+ An extension of the VF2 algorithm for time-respecting graph isomorphism
7
+ testing in temporal graphs.
8
+
9
+ A temporal graph is one in which edges contain a datetime attribute,
10
+ denoting when interaction occurred between the incident nodes. A
11
+ time-respecting subgraph of a temporal graph is a subgraph such that
12
+ all interactions incident to a node occurred within a time threshold,
13
+ delta, of each other. A directed time-respecting subgraph has the
14
+ added constraint that incoming interactions to a node must precede
15
+ outgoing interactions from the same node - this enforces a sense of
16
+ directed flow.
17
+
18
+ Introduction
19
+ ------------
20
+
21
+ The TimeRespectingGraphMatcher and TimeRespectingDiGraphMatcher
22
+ extend the GraphMatcher and DiGraphMatcher classes, respectively,
23
+ to include temporal constraints on matches. This is achieved through
24
+ a semantic check, via the semantic_feasibility() function.
25
+
26
+ As well as including G1 (the graph in which to seek embeddings) and
27
+ G2 (the subgraph structure of interest), the name of the temporal
28
+ attribute on the edges and the time threshold, delta, must be supplied
29
+ as arguments to the matching constructors.
30
+
31
+ A delta of zero is the strictest temporal constraint on the match -
32
+ only embeddings in which all interactions occur at the same time will
33
+ be returned. A delta of one day will allow embeddings in which
34
+ adjacent interactions occur up to a day apart.
35
+
36
+ Examples
37
+ --------
38
+
39
+ Examples will be provided when the datetime type has been incorporated.
40
+
41
+
42
+ Temporal Subgraph Isomorphism
43
+ -----------------------------
44
+
45
+ A brief discussion of the somewhat diverse current literature will be
46
+ included here.
47
+
48
+ References
49
+ ----------
50
+
51
+ [1] Redmond, U. and Cunningham, P. Temporal subgraph isomorphism. In:
52
+ The 2013 IEEE/ACM International Conference on Advances in Social
53
+ Networks Analysis and Mining (ASONAM). Niagara Falls, Canada; 2013:
54
+ pages 1451 - 1452. [65]
55
+
56
+ For a discussion of the literature on temporal networks:
57
+
58
+ [3] P. Holme and J. Saramaki. Temporal networks. Physics Reports,
59
+ 519(3):97–125, 2012.
60
+
61
+ Notes
62
+ -----
63
+
64
+ Handles directed and undirected graphs and graphs with parallel edges.
65
+
66
+ """
67
+
68
+ import networkx as nx
69
+
70
+ from .isomorphvf2 import DiGraphMatcher, GraphMatcher
71
+
72
+ __all__ = ["TimeRespectingGraphMatcher", "TimeRespectingDiGraphMatcher"]
73
+
74
+
75
+ class TimeRespectingGraphMatcher(GraphMatcher):
76
+ def __init__(self, G1, G2, temporal_attribute_name, delta):
77
+ """Initialize TimeRespectingGraphMatcher.
78
+
79
+ G1 and G2 should be nx.Graph or nx.MultiGraph instances.
80
+
81
+ Examples
82
+ --------
83
+ To create a TimeRespectingGraphMatcher which checks for
84
+ syntactic and semantic feasibility:
85
+
86
+ >>> from networkx.algorithms import isomorphism
87
+ >>> from datetime import timedelta
88
+ >>> G1 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
89
+
90
+ >>> G2 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
91
+
92
+ >>> GM = isomorphism.TimeRespectingGraphMatcher(
93
+ ... G1, G2, "date", timedelta(days=1)
94
+ ... )
95
+ """
96
+ self.temporal_attribute_name = temporal_attribute_name
97
+ self.delta = delta
98
+ super().__init__(G1, G2)
99
+
100
+ def one_hop(self, Gx, Gx_node, neighbors):
101
+ """
102
+ Edges one hop out from a node in the mapping should be
103
+ time-respecting with respect to each other.
104
+ """
105
+ dates = []
106
+ for n in neighbors:
107
+ if isinstance(Gx, nx.Graph): # Graph G[u][v] returns the data dictionary.
108
+ dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
109
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
110
+ for edge in Gx[Gx_node][
111
+ n
112
+ ].values(): # Iterates all edges between node pair.
113
+ dates.append(edge[self.temporal_attribute_name])
114
+ if any(x is None for x in dates):
115
+ raise ValueError("Datetime not supplied for at least one edge.")
116
+ return not dates or max(dates) - min(dates) <= self.delta
117
+
118
+ def two_hop(self, Gx, core_x, Gx_node, neighbors):
119
+ """
120
+ Paths of length 2 from Gx_node should be time-respecting.
121
+ """
122
+ return all(
123
+ self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node])
124
+ for v in neighbors
125
+ )
126
+
127
+ def semantic_feasibility(self, G1_node, G2_node):
128
+ """Returns True if adding (G1_node, G2_node) is semantically
129
+ feasible.
130
+
131
+ Any subclass which redefines semantic_feasibility() must
132
+ maintain the self.tests if needed, to keep the match() method
133
+ functional. Implementations should consider multigraphs.
134
+ """
135
+ neighbors = [n for n in self.G1[G1_node] if n in self.core_1]
136
+ if not self.one_hop(self.G1, G1_node, neighbors): # Fail fast on first node.
137
+ return False
138
+ if not self.two_hop(self.G1, self.core_1, G1_node, neighbors):
139
+ return False
140
+ # Otherwise, this node is semantically feasible!
141
+ return True
142
+
143
+
144
+ class TimeRespectingDiGraphMatcher(DiGraphMatcher):
145
+ def __init__(self, G1, G2, temporal_attribute_name, delta):
146
+ """Initialize TimeRespectingDiGraphMatcher.
147
+
148
+ G1 and G2 should be nx.DiGraph or nx.MultiDiGraph instances.
149
+
150
+ Examples
151
+ --------
152
+ To create a TimeRespectingDiGraphMatcher which checks for
153
+ syntactic and semantic feasibility:
154
+
155
+ >>> from networkx.algorithms import isomorphism
156
+ >>> from datetime import timedelta
157
+ >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
158
+
159
+ >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
160
+
161
+ >>> GM = isomorphism.TimeRespectingDiGraphMatcher(
162
+ ... G1, G2, "date", timedelta(days=1)
163
+ ... )
164
+ """
165
+ self.temporal_attribute_name = temporal_attribute_name
166
+ self.delta = delta
167
+ super().__init__(G1, G2)
168
+
169
+ def get_pred_dates(self, Gx, Gx_node, core_x, pred):
170
+ """
171
+ Get the dates of edges from predecessors.
172
+ """
173
+ pred_dates = []
174
+ if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary.
175
+ for n in pred:
176
+ pred_dates.append(Gx[n][Gx_node][self.temporal_attribute_name])
177
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
178
+ for n in pred:
179
+ for edge in Gx[n][
180
+ Gx_node
181
+ ].values(): # Iterates all edge data between node pair.
182
+ pred_dates.append(edge[self.temporal_attribute_name])
183
+ return pred_dates
184
+
185
+ def get_succ_dates(self, Gx, Gx_node, core_x, succ):
186
+ """
187
+ Get the dates of edges to successors.
188
+ """
189
+ succ_dates = []
190
+ if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary.
191
+ for n in succ:
192
+ succ_dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
193
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
194
+ for n in succ:
195
+ for edge in Gx[Gx_node][
196
+ n
197
+ ].values(): # Iterates all edge data between node pair.
198
+ succ_dates.append(edge[self.temporal_attribute_name])
199
+ return succ_dates
200
+
201
+ def one_hop(self, Gx, Gx_node, core_x, pred, succ):
202
+ """
203
+ The ego node.
204
+ """
205
+ pred_dates = self.get_pred_dates(Gx, Gx_node, core_x, pred)
206
+ succ_dates = self.get_succ_dates(Gx, Gx_node, core_x, succ)
207
+ return self.test_one(pred_dates, succ_dates) and self.test_two(
208
+ pred_dates, succ_dates
209
+ )
210
+
211
+ def two_hop_pred(self, Gx, Gx_node, core_x, pred):
212
+ """
213
+ The predecessors of the ego node.
214
+ """
215
+ return all(
216
+ self.one_hop(
217
+ Gx,
218
+ p,
219
+ core_x,
220
+ self.preds(Gx, core_x, p),
221
+ self.succs(Gx, core_x, p, Gx_node),
222
+ )
223
+ for p in pred
224
+ )
225
+
226
+ def two_hop_succ(self, Gx, Gx_node, core_x, succ):
227
+ """
228
+ The successors of the ego node.
229
+ """
230
+ return all(
231
+ self.one_hop(
232
+ Gx,
233
+ s,
234
+ core_x,
235
+ self.preds(Gx, core_x, s, Gx_node),
236
+ self.succs(Gx, core_x, s),
237
+ )
238
+ for s in succ
239
+ )
240
+
241
+ def preds(self, Gx, core_x, v, Gx_node=None):
242
+ pred = [n for n in Gx.predecessors(v) if n in core_x]
243
+ if Gx_node:
244
+ pred.append(Gx_node)
245
+ return pred
246
+
247
+ def succs(self, Gx, core_x, v, Gx_node=None):
248
+ succ = [n for n in Gx.successors(v) if n in core_x]
249
+ if Gx_node:
250
+ succ.append(Gx_node)
251
+ return succ
252
+
253
+ def test_one(self, pred_dates, succ_dates):
254
+ """
255
+ Edges one hop out from Gx_node in the mapping should be
256
+ time-respecting with respect to each other, regardless of
257
+ direction.
258
+ """
259
+ time_respecting = True
260
+ dates = pred_dates + succ_dates
261
+
262
+ if any(x is None for x in dates):
263
+ raise ValueError("Date or datetime not supplied for at least one edge.")
264
+
265
+ dates.sort() # Small to large.
266
+ if 0 < len(dates) and not (dates[-1] - dates[0] <= self.delta):
267
+ time_respecting = False
268
+ return time_respecting
269
+
270
+ def test_two(self, pred_dates, succ_dates):
271
+ """
272
+ Edges from a dual Gx_node in the mapping should be ordered in
273
+ a time-respecting manner.
274
+ """
275
+ time_respecting = True
276
+ pred_dates.sort()
277
+ succ_dates.sort()
278
+ # First out before last in; negative of the necessary condition for time-respect.
279
+ if (
280
+ 0 < len(succ_dates)
281
+ and 0 < len(pred_dates)
282
+ and succ_dates[0] < pred_dates[-1]
283
+ ):
284
+ time_respecting = False
285
+ return time_respecting
286
+
287
+ def semantic_feasibility(self, G1_node, G2_node):
288
+ """Returns True if adding (G1_node, G2_node) is semantically
289
+ feasible.
290
+
291
+ Any subclass which redefines semantic_feasibility() must
292
+ maintain the self.tests if needed, to keep the match() method
293
+ functional. Implementations should consider multigraphs.
294
+ """
295
+ pred, succ = (
296
+ [n for n in self.G1.predecessors(G1_node) if n in self.core_1],
297
+ [n for n in self.G1.successors(G1_node) if n in self.core_1],
298
+ )
299
+ if not self.one_hop(
300
+ self.G1, G1_node, self.core_1, pred, succ
301
+ ): # Fail fast on first node.
302
+ return False
303
+ if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred):
304
+ return False
305
+ if not self.two_hop_succ(self.G1, G1_node, self.core_1, succ):
306
+ return False
307
+ # Otherwise, this node is semantically feasible!
308
+ return True
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__init__.py ADDED
File without changes
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (191 Bytes). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-310.pyc ADDED
Binary file (9.17 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-310.pyc ADDED
Binary file (2.26 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-310.pyc ADDED
Binary file (8.84 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-310.pyc ADDED
Binary file (2.25 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-310.pyc ADDED
Binary file (7.11 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-310.pyc ADDED
Binary file (4.62 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp.cpython-310.pyc ADDED
Binary file (31.8 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp_helpers.cpython-310.pyc ADDED
Binary file (50.3 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-310.pyc ADDED
Binary file (7.14 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 ADDED
Binary file (1.44 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99 ADDED
Binary file (1.44 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99 ADDED
Binary file (310 Bytes). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99 ADDED
Binary file (1.6 kB). View file
 
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py ADDED
@@ -0,0 +1,327 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Tests for ISMAGS isomorphism algorithm.
3
+ """
4
+
5
+ import pytest
6
+
7
+ import networkx as nx
8
+ from networkx.algorithms import isomorphism as iso
9
+
10
+
11
+ def _matches_to_sets(matches):
12
+ """
13
+ Helper function to facilitate comparing collections of dictionaries in
14
+ which order does not matter.
15
+ """
16
+ return {frozenset(m.items()) for m in matches}
17
+
18
+
19
+ class TestSelfIsomorphism:
20
+ data = [
21
+ (
22
+ [
23
+ (0, {"name": "a"}),
24
+ (1, {"name": "a"}),
25
+ (2, {"name": "b"}),
26
+ (3, {"name": "b"}),
27
+ (4, {"name": "a"}),
28
+ (5, {"name": "a"}),
29
+ ],
30
+ [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)],
31
+ ),
32
+ (range(1, 5), [(1, 2), (2, 4), (4, 3), (3, 1)]),
33
+ (
34
+ [],
35
+ [
36
+ (0, 1),
37
+ (1, 2),
38
+ (2, 3),
39
+ (3, 4),
40
+ (4, 5),
41
+ (5, 0),
42
+ (0, 6),
43
+ (6, 7),
44
+ (2, 8),
45
+ (8, 9),
46
+ (4, 10),
47
+ (10, 11),
48
+ ],
49
+ ),
50
+ ([], [(0, 1), (1, 2), (1, 4), (2, 3), (3, 5), (3, 6)]),
51
+ ]
52
+
53
+ def test_self_isomorphism(self):
54
+ """
55
+ For some small, symmetric graphs, make sure that 1) they are isomorphic
56
+ to themselves, and 2) that only the identity mapping is found.
57
+ """
58
+ for node_data, edge_data in self.data:
59
+ graph = nx.Graph()
60
+ graph.add_nodes_from(node_data)
61
+ graph.add_edges_from(edge_data)
62
+
63
+ ismags = iso.ISMAGS(
64
+ graph, graph, node_match=iso.categorical_node_match("name", None)
65
+ )
66
+ assert ismags.is_isomorphic()
67
+ assert ismags.subgraph_is_isomorphic()
68
+ assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
69
+ {n: n for n in graph.nodes}
70
+ ]
71
+
72
+ def test_edgecase_self_isomorphism(self):
73
+ """
74
+ This edgecase is one of the cases in which it is hard to find all
75
+ symmetry elements.
76
+ """
77
+ graph = nx.Graph()
78
+ nx.add_path(graph, range(5))
79
+ graph.add_edges_from([(2, 5), (5, 6)])
80
+
81
+ ismags = iso.ISMAGS(graph, graph)
82
+ ismags_answer = list(ismags.find_isomorphisms(True))
83
+ assert ismags_answer == [{n: n for n in graph.nodes}]
84
+
85
+ graph = nx.relabel_nodes(graph, {0: 0, 1: 1, 2: 2, 3: 3, 4: 6, 5: 4, 6: 5})
86
+ ismags = iso.ISMAGS(graph, graph)
87
+ ismags_answer = list(ismags.find_isomorphisms(True))
88
+ assert ismags_answer == [{n: n for n in graph.nodes}]
89
+
90
+ def test_directed_self_isomorphism(self):
91
+ """
92
+ For some small, directed, symmetric graphs, make sure that 1) they are
93
+ isomorphic to themselves, and 2) that only the identity mapping is
94
+ found.
95
+ """
96
+ for node_data, edge_data in self.data:
97
+ graph = nx.Graph()
98
+ graph.add_nodes_from(node_data)
99
+ graph.add_edges_from(edge_data)
100
+
101
+ ismags = iso.ISMAGS(
102
+ graph, graph, node_match=iso.categorical_node_match("name", None)
103
+ )
104
+ assert ismags.is_isomorphic()
105
+ assert ismags.subgraph_is_isomorphic()
106
+ assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
107
+ {n: n for n in graph.nodes}
108
+ ]
109
+
110
+
111
+ class TestSubgraphIsomorphism:
112
+ def test_isomorphism(self):
113
+ g1 = nx.Graph()
114
+ nx.add_cycle(g1, range(4))
115
+
116
+ g2 = nx.Graph()
117
+ nx.add_cycle(g2, range(4))
118
+ g2.add_edges_from(list(zip(g2, range(4, 8))))
119
+ ismags = iso.ISMAGS(g2, g1)
120
+ assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
121
+ {n: n for n in g1.nodes}
122
+ ]
123
+
124
+ def test_isomorphism2(self):
125
+ g1 = nx.Graph()
126
+ nx.add_path(g1, range(3))
127
+
128
+ g2 = g1.copy()
129
+ g2.add_edge(1, 3)
130
+
131
+ ismags = iso.ISMAGS(g2, g1)
132
+ matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
133
+ expected_symmetric = [
134
+ {0: 0, 1: 1, 2: 2},
135
+ {0: 0, 1: 1, 3: 2},
136
+ {2: 0, 1: 1, 3: 2},
137
+ ]
138
+ assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
139
+
140
+ matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
141
+ expected_asymmetric = [
142
+ {0: 2, 1: 1, 2: 0},
143
+ {0: 2, 1: 1, 3: 0},
144
+ {2: 2, 1: 1, 3: 0},
145
+ ]
146
+ assert _matches_to_sets(matches) == _matches_to_sets(
147
+ expected_symmetric + expected_asymmetric
148
+ )
149
+
150
+ def test_labeled_nodes(self):
151
+ g1 = nx.Graph()
152
+ nx.add_cycle(g1, range(3))
153
+ g1.nodes[1]["attr"] = True
154
+
155
+ g2 = g1.copy()
156
+ g2.add_edge(1, 3)
157
+ ismags = iso.ISMAGS(g2, g1, node_match=lambda x, y: x == y)
158
+ matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
159
+ expected_symmetric = [{0: 0, 1: 1, 2: 2}]
160
+ assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
161
+
162
+ matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
163
+ expected_asymmetric = [{0: 2, 1: 1, 2: 0}]
164
+ assert _matches_to_sets(matches) == _matches_to_sets(
165
+ expected_symmetric + expected_asymmetric
166
+ )
167
+
168
+ def test_labeled_edges(self):
169
+ g1 = nx.Graph()
170
+ nx.add_cycle(g1, range(3))
171
+ g1.edges[1, 2]["attr"] = True
172
+
173
+ g2 = g1.copy()
174
+ g2.add_edge(1, 3)
175
+ ismags = iso.ISMAGS(g2, g1, edge_match=lambda x, y: x == y)
176
+ matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
177
+ expected_symmetric = [{0: 0, 1: 1, 2: 2}]
178
+ assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
179
+
180
+ matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
181
+ expected_asymmetric = [{1: 2, 0: 0, 2: 1}]
182
+ assert _matches_to_sets(matches) == _matches_to_sets(
183
+ expected_symmetric + expected_asymmetric
184
+ )
185
+
186
+
187
+ class TestWikipediaExample:
188
+ # Nodes 'a', 'b', 'c' and 'd' form a column.
189
+ # Nodes 'g', 'h', 'i' and 'j' form a column.
190
+ g1edges = [
191
+ ["a", "g"],
192
+ ["a", "h"],
193
+ ["a", "i"],
194
+ ["b", "g"],
195
+ ["b", "h"],
196
+ ["b", "j"],
197
+ ["c", "g"],
198
+ ["c", "i"],
199
+ ["c", "j"],
200
+ ["d", "h"],
201
+ ["d", "i"],
202
+ ["d", "j"],
203
+ ]
204
+
205
+ # Nodes 1,2,3,4 form the clockwise corners of a large square.
206
+ # Nodes 5,6,7,8 form the clockwise corners of a small square
207
+ g2edges = [
208
+ [1, 2],
209
+ [2, 3],
210
+ [3, 4],
211
+ [4, 1],
212
+ [5, 6],
213
+ [6, 7],
214
+ [7, 8],
215
+ [8, 5],
216
+ [1, 5],
217
+ [2, 6],
218
+ [3, 7],
219
+ [4, 8],
220
+ ]
221
+
222
+ def test_graph(self):
223
+ g1 = nx.Graph()
224
+ g2 = nx.Graph()
225
+ g1.add_edges_from(self.g1edges)
226
+ g2.add_edges_from(self.g2edges)
227
+ gm = iso.ISMAGS(g1, g2)
228
+ assert gm.is_isomorphic()
229
+
230
+
231
+ class TestLargestCommonSubgraph:
232
+ def test_mcis(self):
233
+ # Example graphs from DOI: 10.1002/spe.588
234
+ graph1 = nx.Graph()
235
+ graph1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 4), (4, 5)])
236
+ graph1.nodes[1]["color"] = 0
237
+
238
+ graph2 = nx.Graph()
239
+ graph2.add_edges_from(
240
+ [(1, 2), (2, 3), (2, 4), (3, 4), (3, 5), (5, 6), (5, 7), (6, 7)]
241
+ )
242
+ graph2.nodes[1]["color"] = 1
243
+ graph2.nodes[6]["color"] = 2
244
+ graph2.nodes[7]["color"] = 2
245
+
246
+ ismags = iso.ISMAGS(
247
+ graph1, graph2, node_match=iso.categorical_node_match("color", None)
248
+ )
249
+ assert list(ismags.subgraph_isomorphisms_iter(True)) == []
250
+ assert list(ismags.subgraph_isomorphisms_iter(False)) == []
251
+ found_mcis = _matches_to_sets(ismags.largest_common_subgraph())
252
+ expected = _matches_to_sets(
253
+ [{2: 2, 3: 4, 4: 3, 5: 5}, {2: 4, 3: 2, 4: 3, 5: 5}]
254
+ )
255
+ assert expected == found_mcis
256
+
257
+ ismags = iso.ISMAGS(
258
+ graph2, graph1, node_match=iso.categorical_node_match("color", None)
259
+ )
260
+ assert list(ismags.subgraph_isomorphisms_iter(True)) == []
261
+ assert list(ismags.subgraph_isomorphisms_iter(False)) == []
262
+ found_mcis = _matches_to_sets(ismags.largest_common_subgraph())
263
+ # Same answer, but reversed.
264
+ expected = _matches_to_sets(
265
+ [{2: 2, 3: 4, 4: 3, 5: 5}, {4: 2, 2: 3, 3: 4, 5: 5}]
266
+ )
267
+ assert expected == found_mcis
268
+
269
+ def test_symmetry_mcis(self):
270
+ graph1 = nx.Graph()
271
+ nx.add_path(graph1, range(4))
272
+
273
+ graph2 = nx.Graph()
274
+ nx.add_path(graph2, range(3))
275
+ graph2.add_edge(1, 3)
276
+
277
+ # Only the symmetry of graph2 is taken into account here.
278
+ ismags1 = iso.ISMAGS(
279
+ graph1, graph2, node_match=iso.categorical_node_match("color", None)
280
+ )
281
+ assert list(ismags1.subgraph_isomorphisms_iter(True)) == []
282
+ found_mcis = _matches_to_sets(ismags1.largest_common_subgraph())
283
+ expected = _matches_to_sets([{0: 0, 1: 1, 2: 2}, {1: 0, 3: 2, 2: 1}])
284
+ assert expected == found_mcis
285
+
286
+ # Only the symmetry of graph1 is taken into account here.
287
+ ismags2 = iso.ISMAGS(
288
+ graph2, graph1, node_match=iso.categorical_node_match("color", None)
289
+ )
290
+ assert list(ismags2.subgraph_isomorphisms_iter(True)) == []
291
+ found_mcis = _matches_to_sets(ismags2.largest_common_subgraph())
292
+ expected = _matches_to_sets(
293
+ [
294
+ {3: 2, 0: 0, 1: 1},
295
+ {2: 0, 0: 2, 1: 1},
296
+ {3: 0, 0: 2, 1: 1},
297
+ {3: 0, 1: 1, 2: 2},
298
+ {0: 0, 1: 1, 2: 2},
299
+ {2: 0, 3: 2, 1: 1},
300
+ ]
301
+ )
302
+
303
+ assert expected == found_mcis
304
+
305
+ found_mcis1 = _matches_to_sets(ismags1.largest_common_subgraph(False))
306
+ found_mcis2 = ismags2.largest_common_subgraph(False)
307
+ found_mcis2 = [{v: k for k, v in d.items()} for d in found_mcis2]
308
+ found_mcis2 = _matches_to_sets(found_mcis2)
309
+
310
+ expected = _matches_to_sets(
311
+ [
312
+ {3: 2, 1: 3, 2: 1},
313
+ {2: 0, 0: 2, 1: 1},
314
+ {1: 2, 3: 3, 2: 1},
315
+ {3: 0, 1: 3, 2: 1},
316
+ {0: 2, 2: 3, 1: 1},
317
+ {3: 0, 1: 2, 2: 1},
318
+ {2: 0, 0: 3, 1: 1},
319
+ {0: 0, 2: 3, 1: 1},
320
+ {1: 0, 3: 3, 2: 1},
321
+ {1: 0, 3: 2, 2: 1},
322
+ {0: 3, 1: 1, 2: 2},
323
+ {0: 0, 1: 1, 2: 2},
324
+ ]
325
+ )
326
+ assert expected == found_mcis1
327
+ assert expected == found_mcis2
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms import isomorphism as iso
5
+
6
+
7
+ class TestIsomorph:
8
+ @classmethod
9
+ def setup_class(cls):
10
+ cls.G1 = nx.Graph()
11
+ cls.G2 = nx.Graph()
12
+ cls.G3 = nx.Graph()
13
+ cls.G4 = nx.Graph()
14
+ cls.G5 = nx.Graph()
15
+ cls.G6 = nx.Graph()
16
+ cls.G1.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 3]])
17
+ cls.G2.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50]])
18
+ cls.G3.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 5]])
19
+ cls.G4.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 4]])
20
+ cls.G5.add_edges_from([[1, 2], [1, 3]])
21
+ cls.G6.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50], [20, 50]])
22
+
23
+ def test_could_be_isomorphic(self):
24
+ assert iso.could_be_isomorphic(self.G1, self.G2)
25
+ assert iso.could_be_isomorphic(self.G1, self.G3)
26
+ assert not iso.could_be_isomorphic(self.G1, self.G4)
27
+ assert iso.could_be_isomorphic(self.G3, self.G2)
28
+ assert not iso.could_be_isomorphic(self.G1, self.G6)
29
+
30
+ def test_fast_could_be_isomorphic(self):
31
+ assert iso.fast_could_be_isomorphic(self.G3, self.G2)
32
+ assert not iso.fast_could_be_isomorphic(self.G3, self.G5)
33
+ assert not iso.fast_could_be_isomorphic(self.G1, self.G6)
34
+
35
+ def test_faster_could_be_isomorphic(self):
36
+ assert iso.faster_could_be_isomorphic(self.G3, self.G2)
37
+ assert not iso.faster_could_be_isomorphic(self.G3, self.G5)
38
+ assert not iso.faster_could_be_isomorphic(self.G1, self.G6)
39
+
40
+ def test_is_isomorphic(self):
41
+ assert iso.is_isomorphic(self.G1, self.G2)
42
+ assert not iso.is_isomorphic(self.G1, self.G4)
43
+ assert iso.is_isomorphic(self.G1.to_directed(), self.G2.to_directed())
44
+ assert not iso.is_isomorphic(self.G1.to_directed(), self.G4.to_directed())
45
+ with pytest.raises(
46
+ nx.NetworkXError, match="Graphs G1 and G2 are not of the same type."
47
+ ):
48
+ iso.is_isomorphic(self.G1.to_directed(), self.G1)
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py ADDED
@@ -0,0 +1,410 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Tests for VF2 isomorphism algorithm.
3
+ """
4
+
5
+ import importlib.resources
6
+ import os
7
+ import random
8
+ import struct
9
+
10
+ import networkx as nx
11
+ from networkx.algorithms import isomorphism as iso
12
+
13
+
14
+ class TestWikipediaExample:
15
+ # Source: https://en.wikipedia.org/wiki/Graph_isomorphism
16
+
17
+ # Nodes 'a', 'b', 'c' and 'd' form a column.
18
+ # Nodes 'g', 'h', 'i' and 'j' form a column.
19
+ g1edges = [
20
+ ["a", "g"],
21
+ ["a", "h"],
22
+ ["a", "i"],
23
+ ["b", "g"],
24
+ ["b", "h"],
25
+ ["b", "j"],
26
+ ["c", "g"],
27
+ ["c", "i"],
28
+ ["c", "j"],
29
+ ["d", "h"],
30
+ ["d", "i"],
31
+ ["d", "j"],
32
+ ]
33
+
34
+ # Nodes 1,2,3,4 form the clockwise corners of a large square.
35
+ # Nodes 5,6,7,8 form the clockwise corners of a small square
36
+ g2edges = [
37
+ [1, 2],
38
+ [2, 3],
39
+ [3, 4],
40
+ [4, 1],
41
+ [5, 6],
42
+ [6, 7],
43
+ [7, 8],
44
+ [8, 5],
45
+ [1, 5],
46
+ [2, 6],
47
+ [3, 7],
48
+ [4, 8],
49
+ ]
50
+
51
+ def test_graph(self):
52
+ g1 = nx.Graph()
53
+ g2 = nx.Graph()
54
+ g1.add_edges_from(self.g1edges)
55
+ g2.add_edges_from(self.g2edges)
56
+ gm = iso.GraphMatcher(g1, g2)
57
+ assert gm.is_isomorphic()
58
+ # Just testing some cases
59
+ assert gm.subgraph_is_monomorphic()
60
+
61
+ mapping = sorted(gm.mapping.items())
62
+
63
+ # this mapping is only one of the possibilities
64
+ # so this test needs to be reconsidered
65
+ # isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8),
66
+ # ('g', 2), ('h', 5), ('i', 4), ('j', 7)]
67
+ # assert_equal(mapping, isomap)
68
+
69
+ def test_subgraph(self):
70
+ g1 = nx.Graph()
71
+ g2 = nx.Graph()
72
+ g1.add_edges_from(self.g1edges)
73
+ g2.add_edges_from(self.g2edges)
74
+ g3 = g2.subgraph([1, 2, 3, 4])
75
+ gm = iso.GraphMatcher(g1, g3)
76
+ assert gm.subgraph_is_isomorphic()
77
+
78
+ def test_subgraph_mono(self):
79
+ g1 = nx.Graph()
80
+ g2 = nx.Graph()
81
+ g1.add_edges_from(self.g1edges)
82
+ g2.add_edges_from([[1, 2], [2, 3], [3, 4]])
83
+ gm = iso.GraphMatcher(g1, g2)
84
+ assert gm.subgraph_is_monomorphic()
85
+
86
+
87
+ class TestVF2GraphDB:
88
+ # https://web.archive.org/web/20090303210205/http://amalfi.dis.unina.it/graph/db/
89
+
90
+ @staticmethod
91
+ def create_graph(filename):
92
+ """Creates a Graph instance from the filename."""
93
+
94
+ # The file is assumed to be in the format from the VF2 graph database.
95
+ # Each file is composed of 16-bit numbers (unsigned short int).
96
+ # So we will want to read 2 bytes at a time.
97
+
98
+ # We can read the number as follows:
99
+ # number = struct.unpack('<H', file.read(2))
100
+ # This says, expect the data in little-endian encoding
101
+ # as an unsigned short int and unpack 2 bytes from the file.
102
+
103
+ fh = open(filename, mode="rb")
104
+
105
+ # Grab the number of nodes.
106
+ # Node numeration is 0-based, so the first node has index 0.
107
+ nodes = struct.unpack("<H", fh.read(2))[0]
108
+
109
+ graph = nx.Graph()
110
+ for from_node in range(nodes):
111
+ # Get the number of edges.
112
+ edges = struct.unpack("<H", fh.read(2))[0]
113
+ for edge in range(edges):
114
+ # Get the terminal node.
115
+ to_node = struct.unpack("<H", fh.read(2))[0]
116
+ graph.add_edge(from_node, to_node)
117
+
118
+ fh.close()
119
+ return graph
120
+
121
+ def test_graph(self):
122
+ head = importlib.resources.files("networkx.algorithms.isomorphism.tests")
123
+ g1 = self.create_graph(head / "iso_r01_s80.A99")
124
+ g2 = self.create_graph(head / "iso_r01_s80.B99")
125
+ gm = iso.GraphMatcher(g1, g2)
126
+ assert gm.is_isomorphic()
127
+
128
+ def test_subgraph(self):
129
+ # A is the subgraph
130
+ # B is the full graph
131
+ head = importlib.resources.files("networkx.algorithms.isomorphism.tests")
132
+ subgraph = self.create_graph(head / "si2_b06_m200.A99")
133
+ graph = self.create_graph(head / "si2_b06_m200.B99")
134
+ gm = iso.GraphMatcher(graph, subgraph)
135
+ assert gm.subgraph_is_isomorphic()
136
+ # Just testing some cases
137
+ assert gm.subgraph_is_monomorphic()
138
+
139
+ # There isn't a similar test implemented for subgraph monomorphism,
140
+ # feel free to create one.
141
+
142
+
143
+ class TestAtlas:
144
+ @classmethod
145
+ def setup_class(cls):
146
+ global atlas
147
+ from networkx.generators import atlas
148
+
149
+ cls.GAG = atlas.graph_atlas_g()
150
+
151
+ def test_graph_atlas(self):
152
+ # Atlas = nx.graph_atlas_g()[0:208] # 208, 6 nodes or less
153
+ Atlas = self.GAG[0:100]
154
+ alphabet = list(range(26))
155
+ for graph in Atlas:
156
+ nlist = list(graph)
157
+ labels = alphabet[: len(nlist)]
158
+ for s in range(10):
159
+ random.shuffle(labels)
160
+ d = dict(zip(nlist, labels))
161
+ relabel = nx.relabel_nodes(graph, d)
162
+ gm = iso.GraphMatcher(graph, relabel)
163
+ assert gm.is_isomorphic()
164
+
165
+
166
+ def test_multiedge():
167
+ # Simple test for multigraphs
168
+ # Need something much more rigorous
169
+ edges = [
170
+ (0, 1),
171
+ (1, 2),
172
+ (2, 3),
173
+ (3, 4),
174
+ (4, 5),
175
+ (5, 6),
176
+ (6, 7),
177
+ (7, 8),
178
+ (8, 9),
179
+ (9, 10),
180
+ (10, 11),
181
+ (10, 11),
182
+ (11, 12),
183
+ (11, 12),
184
+ (12, 13),
185
+ (12, 13),
186
+ (13, 14),
187
+ (13, 14),
188
+ (14, 15),
189
+ (14, 15),
190
+ (15, 16),
191
+ (15, 16),
192
+ (16, 17),
193
+ (16, 17),
194
+ (17, 18),
195
+ (17, 18),
196
+ (18, 19),
197
+ (18, 19),
198
+ (19, 0),
199
+ (19, 0),
200
+ ]
201
+ nodes = list(range(20))
202
+
203
+ for g1 in [nx.MultiGraph(), nx.MultiDiGraph()]:
204
+ g1.add_edges_from(edges)
205
+ for _ in range(10):
206
+ new_nodes = list(nodes)
207
+ random.shuffle(new_nodes)
208
+ d = dict(zip(nodes, new_nodes))
209
+ g2 = nx.relabel_nodes(g1, d)
210
+ if not g1.is_directed():
211
+ gm = iso.GraphMatcher(g1, g2)
212
+ else:
213
+ gm = iso.DiGraphMatcher(g1, g2)
214
+ assert gm.is_isomorphic()
215
+ # Testing if monomorphism works in multigraphs
216
+ assert gm.subgraph_is_monomorphic()
217
+
218
+
219
+ def test_selfloop():
220
+ # Simple test for graphs with selfloops
221
+ edges = [
222
+ (0, 1),
223
+ (0, 2),
224
+ (1, 2),
225
+ (1, 3),
226
+ (2, 2),
227
+ (2, 4),
228
+ (3, 1),
229
+ (3, 2),
230
+ (4, 2),
231
+ (4, 5),
232
+ (5, 4),
233
+ ]
234
+ nodes = list(range(6))
235
+
236
+ for g1 in [nx.Graph(), nx.DiGraph()]:
237
+ g1.add_edges_from(edges)
238
+ for _ in range(100):
239
+ new_nodes = list(nodes)
240
+ random.shuffle(new_nodes)
241
+ d = dict(zip(nodes, new_nodes))
242
+ g2 = nx.relabel_nodes(g1, d)
243
+ if not g1.is_directed():
244
+ gm = iso.GraphMatcher(g1, g2)
245
+ else:
246
+ gm = iso.DiGraphMatcher(g1, g2)
247
+ assert gm.is_isomorphic()
248
+
249
+
250
+ def test_selfloop_mono():
251
+ # Simple test for graphs with selfloops
252
+ edges0 = [
253
+ (0, 1),
254
+ (0, 2),
255
+ (1, 2),
256
+ (1, 3),
257
+ (2, 4),
258
+ (3, 1),
259
+ (3, 2),
260
+ (4, 2),
261
+ (4, 5),
262
+ (5, 4),
263
+ ]
264
+ edges = edges0 + [(2, 2)]
265
+ nodes = list(range(6))
266
+
267
+ for g1 in [nx.Graph(), nx.DiGraph()]:
268
+ g1.add_edges_from(edges)
269
+ for _ in range(100):
270
+ new_nodes = list(nodes)
271
+ random.shuffle(new_nodes)
272
+ d = dict(zip(nodes, new_nodes))
273
+ g2 = nx.relabel_nodes(g1, d)
274
+ g2.remove_edges_from(nx.selfloop_edges(g2))
275
+ if not g1.is_directed():
276
+ gm = iso.GraphMatcher(g2, g1)
277
+ else:
278
+ gm = iso.DiGraphMatcher(g2, g1)
279
+ assert not gm.subgraph_is_monomorphic()
280
+
281
+
282
+ def test_isomorphism_iter1():
283
+ # As described in:
284
+ # http://groups.google.com/group/networkx-discuss/browse_thread/thread/2ff65c67f5e3b99f/d674544ebea359bb?fwc=1
285
+ g1 = nx.DiGraph()
286
+ g2 = nx.DiGraph()
287
+ g3 = nx.DiGraph()
288
+ g1.add_edge("A", "B")
289
+ g1.add_edge("B", "C")
290
+ g2.add_edge("Y", "Z")
291
+ g3.add_edge("Z", "Y")
292
+ gm12 = iso.DiGraphMatcher(g1, g2)
293
+ gm13 = iso.DiGraphMatcher(g1, g3)
294
+ x = list(gm12.subgraph_isomorphisms_iter())
295
+ y = list(gm13.subgraph_isomorphisms_iter())
296
+ assert {"A": "Y", "B": "Z"} in x
297
+ assert {"B": "Y", "C": "Z"} in x
298
+ assert {"A": "Z", "B": "Y"} in y
299
+ assert {"B": "Z", "C": "Y"} in y
300
+ assert len(x) == len(y)
301
+ assert len(x) == 2
302
+
303
+
304
+ def test_monomorphism_iter1():
305
+ g1 = nx.DiGraph()
306
+ g2 = nx.DiGraph()
307
+ g1.add_edge("A", "B")
308
+ g1.add_edge("B", "C")
309
+ g1.add_edge("C", "A")
310
+ g2.add_edge("X", "Y")
311
+ g2.add_edge("Y", "Z")
312
+ gm12 = iso.DiGraphMatcher(g1, g2)
313
+ x = list(gm12.subgraph_monomorphisms_iter())
314
+ assert {"A": "X", "B": "Y", "C": "Z"} in x
315
+ assert {"A": "Y", "B": "Z", "C": "X"} in x
316
+ assert {"A": "Z", "B": "X", "C": "Y"} in x
317
+ assert len(x) == 3
318
+ gm21 = iso.DiGraphMatcher(g2, g1)
319
+ # Check if StopIteration exception returns False
320
+ assert not gm21.subgraph_is_monomorphic()
321
+
322
+
323
+ def test_isomorphism_iter2():
324
+ # Path
325
+ for L in range(2, 10):
326
+ g1 = nx.path_graph(L)
327
+ gm = iso.GraphMatcher(g1, g1)
328
+ s = len(list(gm.isomorphisms_iter()))
329
+ assert s == 2
330
+ # Cycle
331
+ for L in range(3, 10):
332
+ g1 = nx.cycle_graph(L)
333
+ gm = iso.GraphMatcher(g1, g1)
334
+ s = len(list(gm.isomorphisms_iter()))
335
+ assert s == 2 * L
336
+
337
+
338
+ def test_multiple():
339
+ # Verify that we can use the graph matcher multiple times
340
+ edges = [("A", "B"), ("B", "A"), ("B", "C")]
341
+ for g1, g2 in [(nx.Graph(), nx.Graph()), (nx.DiGraph(), nx.DiGraph())]:
342
+ g1.add_edges_from(edges)
343
+ g2.add_edges_from(edges)
344
+ g3 = nx.subgraph(g2, ["A", "B"])
345
+ if not g1.is_directed():
346
+ gmA = iso.GraphMatcher(g1, g2)
347
+ gmB = iso.GraphMatcher(g1, g3)
348
+ else:
349
+ gmA = iso.DiGraphMatcher(g1, g2)
350
+ gmB = iso.DiGraphMatcher(g1, g3)
351
+ assert gmA.is_isomorphic()
352
+ g2.remove_node("C")
353
+ if not g1.is_directed():
354
+ gmA = iso.GraphMatcher(g1, g2)
355
+ else:
356
+ gmA = iso.DiGraphMatcher(g1, g2)
357
+ assert gmA.subgraph_is_isomorphic()
358
+ assert gmB.subgraph_is_isomorphic()
359
+ assert gmA.subgraph_is_monomorphic()
360
+ assert gmB.subgraph_is_monomorphic()
361
+
362
+
363
+ # for m in [gmB.mapping, gmB.mapping]:
364
+ # assert_true(m['A'] == 'A')
365
+ # assert_true(m['B'] == 'B')
366
+ # assert_true('C' not in m)
367
+
368
+
369
+ def test_noncomparable_nodes():
370
+ node1 = object()
371
+ node2 = object()
372
+ node3 = object()
373
+
374
+ # Graph
375
+ G = nx.path_graph([node1, node2, node3])
376
+ gm = iso.GraphMatcher(G, G)
377
+ assert gm.is_isomorphic()
378
+ # Just testing some cases
379
+ assert gm.subgraph_is_monomorphic()
380
+
381
+ # DiGraph
382
+ G = nx.path_graph([node1, node2, node3], create_using=nx.DiGraph)
383
+ H = nx.path_graph([node3, node2, node1], create_using=nx.DiGraph)
384
+ dgm = iso.DiGraphMatcher(G, H)
385
+ assert dgm.is_isomorphic()
386
+ # Just testing some cases
387
+ assert gm.subgraph_is_monomorphic()
388
+
389
+
390
+ def test_monomorphism_edge_match():
391
+ G = nx.DiGraph()
392
+ G.add_node(1)
393
+ G.add_node(2)
394
+ G.add_edge(1, 2, label="A")
395
+ G.add_edge(2, 1, label="B")
396
+ G.add_edge(2, 2, label="C")
397
+
398
+ SG = nx.DiGraph()
399
+ SG.add_node(5)
400
+ SG.add_node(6)
401
+ SG.add_edge(5, 6, label="A")
402
+
403
+ gm = iso.DiGraphMatcher(G, SG, edge_match=iso.categorical_edge_match("label", None))
404
+ assert gm.subgraph_is_monomorphic()
405
+
406
+
407
+ def test_isomorphvf2pp_multidigraphs():
408
+ g = nx.MultiDiGraph({0: [1, 1, 2, 2, 3], 1: [2, 3, 3], 2: [3]})
409
+ h = nx.MultiDiGraph({0: [1, 1, 2, 2, 3], 1: [2, 3, 3], 3: [2]})
410
+ assert not (nx.vf2pp_is_isomorphic(g, h))
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_match_helpers.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from operator import eq
2
+
3
+ import networkx as nx
4
+ from networkx.algorithms import isomorphism as iso
5
+
6
+
7
+ def test_categorical_node_match():
8
+ nm = iso.categorical_node_match(["x", "y", "z"], [None] * 3)
9
+ assert nm({"x": 1, "y": 2, "z": 3}, {"x": 1, "y": 2, "z": 3})
10
+ assert not nm({"x": 1, "y": 2, "z": 2}, {"x": 1, "y": 2, "z": 1})
11
+
12
+
13
+ class TestGenericMultiEdgeMatch:
14
+ def setup_method(self):
15
+ self.G1 = nx.MultiDiGraph()
16
+ self.G2 = nx.MultiDiGraph()
17
+ self.G3 = nx.MultiDiGraph()
18
+ self.G4 = nx.MultiDiGraph()
19
+ attr_dict1 = {"id": "edge1", "minFlow": 0, "maxFlow": 10}
20
+ attr_dict2 = {"id": "edge2", "minFlow": -3, "maxFlow": 7}
21
+ attr_dict3 = {"id": "edge3", "minFlow": 13, "maxFlow": 117}
22
+ attr_dict4 = {"id": "edge4", "minFlow": 13, "maxFlow": 117}
23
+ attr_dict5 = {"id": "edge5", "minFlow": 8, "maxFlow": 12}
24
+ attr_dict6 = {"id": "edge6", "minFlow": 8, "maxFlow": 12}
25
+ for attr_dict in [
26
+ attr_dict1,
27
+ attr_dict2,
28
+ attr_dict3,
29
+ attr_dict4,
30
+ attr_dict5,
31
+ attr_dict6,
32
+ ]:
33
+ self.G1.add_edge(1, 2, **attr_dict)
34
+ for attr_dict in [
35
+ attr_dict5,
36
+ attr_dict3,
37
+ attr_dict6,
38
+ attr_dict1,
39
+ attr_dict4,
40
+ attr_dict2,
41
+ ]:
42
+ self.G2.add_edge(2, 3, **attr_dict)
43
+ for attr_dict in [attr_dict3, attr_dict5]:
44
+ self.G3.add_edge(3, 4, **attr_dict)
45
+ for attr_dict in [attr_dict6, attr_dict4]:
46
+ self.G4.add_edge(4, 5, **attr_dict)
47
+
48
+ def test_generic_multiedge_match(self):
49
+ full_match = iso.generic_multiedge_match(
50
+ ["id", "flowMin", "flowMax"], [None] * 3, [eq] * 3
51
+ )
52
+ flow_match = iso.generic_multiedge_match(
53
+ ["flowMin", "flowMax"], [None] * 2, [eq] * 2
54
+ )
55
+ min_flow_match = iso.generic_multiedge_match("flowMin", None, eq)
56
+ id_match = iso.generic_multiedge_match("id", None, eq)
57
+ assert flow_match(self.G1[1][2], self.G2[2][3])
58
+ assert min_flow_match(self.G1[1][2], self.G2[2][3])
59
+ assert id_match(self.G1[1][2], self.G2[2][3])
60
+ assert full_match(self.G1[1][2], self.G2[2][3])
61
+ assert flow_match(self.G3[3][4], self.G4[4][5])
62
+ assert min_flow_match(self.G3[3][4], self.G4[4][5])
63
+ assert not id_match(self.G3[3][4], self.G4[4][5])
64
+ assert not full_match(self.G3[3][4], self.G4[4][5])
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py ADDED
@@ -0,0 +1,212 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Tests for the temporal aspect of the Temporal VF2 isomorphism algorithm.
3
+ """
4
+
5
+ from datetime import date, datetime, timedelta
6
+
7
+ import networkx as nx
8
+ from networkx.algorithms import isomorphism as iso
9
+
10
+
11
+ def provide_g1_edgelist():
12
+ return [(0, 1), (0, 2), (1, 2), (2, 4), (1, 3), (3, 4), (4, 5)]
13
+
14
+
15
+ def put_same_time(G, att_name):
16
+ for e in G.edges(data=True):
17
+ e[2][att_name] = date(2015, 1, 1)
18
+ return G
19
+
20
+
21
+ def put_same_datetime(G, att_name):
22
+ for e in G.edges(data=True):
23
+ e[2][att_name] = datetime(2015, 1, 1)
24
+ return G
25
+
26
+
27
+ def put_sequence_time(G, att_name):
28
+ current_date = date(2015, 1, 1)
29
+ for e in G.edges(data=True):
30
+ current_date += timedelta(days=1)
31
+ e[2][att_name] = current_date
32
+ return G
33
+
34
+
35
+ def put_time_config_0(G, att_name):
36
+ G[0][1][att_name] = date(2015, 1, 2)
37
+ G[0][2][att_name] = date(2015, 1, 2)
38
+ G[1][2][att_name] = date(2015, 1, 3)
39
+ G[1][3][att_name] = date(2015, 1, 1)
40
+ G[2][4][att_name] = date(2015, 1, 1)
41
+ G[3][4][att_name] = date(2015, 1, 3)
42
+ G[4][5][att_name] = date(2015, 1, 3)
43
+ return G
44
+
45
+
46
+ def put_time_config_1(G, att_name):
47
+ G[0][1][att_name] = date(2015, 1, 2)
48
+ G[0][2][att_name] = date(2015, 1, 1)
49
+ G[1][2][att_name] = date(2015, 1, 3)
50
+ G[1][3][att_name] = date(2015, 1, 1)
51
+ G[2][4][att_name] = date(2015, 1, 2)
52
+ G[3][4][att_name] = date(2015, 1, 4)
53
+ G[4][5][att_name] = date(2015, 1, 3)
54
+ return G
55
+
56
+
57
+ def put_time_config_2(G, att_name):
58
+ G[0][1][att_name] = date(2015, 1, 1)
59
+ G[0][2][att_name] = date(2015, 1, 1)
60
+ G[1][2][att_name] = date(2015, 1, 3)
61
+ G[1][3][att_name] = date(2015, 1, 2)
62
+ G[2][4][att_name] = date(2015, 1, 2)
63
+ G[3][4][att_name] = date(2015, 1, 3)
64
+ G[4][5][att_name] = date(2015, 1, 2)
65
+ return G
66
+
67
+
68
+ class TestTimeRespectingGraphMatcher:
69
+ """
70
+ A test class for the undirected temporal graph matcher.
71
+ """
72
+
73
+ def provide_g1_topology(self):
74
+ G1 = nx.Graph()
75
+ G1.add_edges_from(provide_g1_edgelist())
76
+ return G1
77
+
78
+ def provide_g2_path_3edges(self):
79
+ G2 = nx.Graph()
80
+ G2.add_edges_from([(0, 1), (1, 2), (2, 3)])
81
+ return G2
82
+
83
+ def test_timdelta_zero_timeRespecting_returnsTrue(self):
84
+ G1 = self.provide_g1_topology()
85
+ temporal_name = "date"
86
+ G1 = put_same_time(G1, temporal_name)
87
+ G2 = self.provide_g2_path_3edges()
88
+ d = timedelta()
89
+ gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
90
+ assert gm.subgraph_is_isomorphic()
91
+
92
+ def test_timdelta_zero_datetime_timeRespecting_returnsTrue(self):
93
+ G1 = self.provide_g1_topology()
94
+ temporal_name = "date"
95
+ G1 = put_same_datetime(G1, temporal_name)
96
+ G2 = self.provide_g2_path_3edges()
97
+ d = timedelta()
98
+ gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
99
+ assert gm.subgraph_is_isomorphic()
100
+
101
+ def test_attNameStrange_timdelta_zero_timeRespecting_returnsTrue(self):
102
+ G1 = self.provide_g1_topology()
103
+ temporal_name = "strange_name"
104
+ G1 = put_same_time(G1, temporal_name)
105
+ G2 = self.provide_g2_path_3edges()
106
+ d = timedelta()
107
+ gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
108
+ assert gm.subgraph_is_isomorphic()
109
+
110
+ def test_notTimeRespecting_returnsFalse(self):
111
+ G1 = self.provide_g1_topology()
112
+ temporal_name = "date"
113
+ G1 = put_sequence_time(G1, temporal_name)
114
+ G2 = self.provide_g2_path_3edges()
115
+ d = timedelta()
116
+ gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
117
+ assert not gm.subgraph_is_isomorphic()
118
+
119
+ def test_timdelta_one_config0_returns_no_embeddings(self):
120
+ G1 = self.provide_g1_topology()
121
+ temporal_name = "date"
122
+ G1 = put_time_config_0(G1, temporal_name)
123
+ G2 = self.provide_g2_path_3edges()
124
+ d = timedelta(days=1)
125
+ gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
126
+ count_match = len(list(gm.subgraph_isomorphisms_iter()))
127
+ assert count_match == 0
128
+
129
+ def test_timdelta_one_config1_returns_four_embedding(self):
130
+ G1 = self.provide_g1_topology()
131
+ temporal_name = "date"
132
+ G1 = put_time_config_1(G1, temporal_name)
133
+ G2 = self.provide_g2_path_3edges()
134
+ d = timedelta(days=1)
135
+ gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
136
+ count_match = len(list(gm.subgraph_isomorphisms_iter()))
137
+ assert count_match == 4
138
+
139
+ def test_timdelta_one_config2_returns_ten_embeddings(self):
140
+ G1 = self.provide_g1_topology()
141
+ temporal_name = "date"
142
+ G1 = put_time_config_2(G1, temporal_name)
143
+ G2 = self.provide_g2_path_3edges()
144
+ d = timedelta(days=1)
145
+ gm = iso.TimeRespectingGraphMatcher(G1, G2, temporal_name, d)
146
+ L = list(gm.subgraph_isomorphisms_iter())
147
+ count_match = len(list(gm.subgraph_isomorphisms_iter()))
148
+ assert count_match == 10
149
+
150
+
151
+ class TestDiTimeRespectingGraphMatcher:
152
+ """
153
+ A test class for the directed time-respecting graph matcher.
154
+ """
155
+
156
+ def provide_g1_topology(self):
157
+ G1 = nx.DiGraph()
158
+ G1.add_edges_from(provide_g1_edgelist())
159
+ return G1
160
+
161
+ def provide_g2_path_3edges(self):
162
+ G2 = nx.DiGraph()
163
+ G2.add_edges_from([(0, 1), (1, 2), (2, 3)])
164
+ return G2
165
+
166
+ def test_timdelta_zero_same_dates_returns_true(self):
167
+ G1 = self.provide_g1_topology()
168
+ temporal_name = "date"
169
+ G1 = put_same_time(G1, temporal_name)
170
+ G2 = self.provide_g2_path_3edges()
171
+ d = timedelta()
172
+ gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
173
+ assert gm.subgraph_is_isomorphic()
174
+
175
+ def test_attNameStrange_timdelta_zero_same_dates_returns_true(self):
176
+ G1 = self.provide_g1_topology()
177
+ temporal_name = "strange"
178
+ G1 = put_same_time(G1, temporal_name)
179
+ G2 = self.provide_g2_path_3edges()
180
+ d = timedelta()
181
+ gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
182
+ assert gm.subgraph_is_isomorphic()
183
+
184
+ def test_timdelta_one_config0_returns_no_embeddings(self):
185
+ G1 = self.provide_g1_topology()
186
+ temporal_name = "date"
187
+ G1 = put_time_config_0(G1, temporal_name)
188
+ G2 = self.provide_g2_path_3edges()
189
+ d = timedelta(days=1)
190
+ gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
191
+ count_match = len(list(gm.subgraph_isomorphisms_iter()))
192
+ assert count_match == 0
193
+
194
+ def test_timdelta_one_config1_returns_one_embedding(self):
195
+ G1 = self.provide_g1_topology()
196
+ temporal_name = "date"
197
+ G1 = put_time_config_1(G1, temporal_name)
198
+ G2 = self.provide_g2_path_3edges()
199
+ d = timedelta(days=1)
200
+ gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
201
+ count_match = len(list(gm.subgraph_isomorphisms_iter()))
202
+ assert count_match == 1
203
+
204
+ def test_timdelta_one_config2_returns_two_embeddings(self):
205
+ G1 = self.provide_g1_topology()
206
+ temporal_name = "date"
207
+ G1 = put_time_config_2(G1, temporal_name)
208
+ G2 = self.provide_g2_path_3edges()
209
+ d = timedelta(days=1)
210
+ gm = iso.TimeRespectingDiGraphMatcher(G1, G2, temporal_name, d)
211
+ count_match = len(list(gm.subgraph_isomorphisms_iter()))
212
+ assert count_match == 2
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py ADDED
@@ -0,0 +1,292 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import random
2
+ import time
3
+
4
+ import pytest
5
+
6
+ import networkx as nx
7
+ from networkx.algorithms.isomorphism.tree_isomorphism import (
8
+ rooted_tree_isomorphism,
9
+ tree_isomorphism,
10
+ )
11
+ from networkx.classes.function import is_directed
12
+
13
+
14
+ @pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph))
15
+ def test_tree_isomorphism_raises_on_directed_and_multigraphs(graph_constructor):
16
+ t1 = graph_constructor([(0, 1)])
17
+ t2 = graph_constructor([(1, 2)])
18
+ with pytest.raises(nx.NetworkXNotImplemented):
19
+ nx.isomorphism.tree_isomorphism(t1, t2)
20
+
21
+
22
+ # have this work for graph
23
+ # given two trees (either the directed or undirected)
24
+ # transform t2 according to the isomorphism
25
+ # and confirm it is identical to t1
26
+ # randomize the order of the edges when constructing
27
+ def check_isomorphism(t1, t2, isomorphism):
28
+ # get the name of t1, given the name in t2
29
+ mapping = {v2: v1 for (v1, v2) in isomorphism}
30
+
31
+ # these should be the same
32
+ d1 = is_directed(t1)
33
+ d2 = is_directed(t2)
34
+ assert d1 == d2
35
+
36
+ edges_1 = []
37
+ for u, v in t1.edges():
38
+ if d1:
39
+ edges_1.append((u, v))
40
+ else:
41
+ # if not directed, then need to
42
+ # put the edge in a consistent direction
43
+ if u < v:
44
+ edges_1.append((u, v))
45
+ else:
46
+ edges_1.append((v, u))
47
+
48
+ edges_2 = []
49
+ for u, v in t2.edges():
50
+ # translate to names for t1
51
+ u = mapping[u]
52
+ v = mapping[v]
53
+ if d2:
54
+ edges_2.append((u, v))
55
+ else:
56
+ if u < v:
57
+ edges_2.append((u, v))
58
+ else:
59
+ edges_2.append((v, u))
60
+
61
+ return sorted(edges_1) == sorted(edges_2)
62
+
63
+
64
+ def test_hardcoded():
65
+ print("hardcoded test")
66
+
67
+ # define a test problem
68
+ edges_1 = [
69
+ ("a", "b"),
70
+ ("a", "c"),
71
+ ("a", "d"),
72
+ ("b", "e"),
73
+ ("b", "f"),
74
+ ("e", "j"),
75
+ ("e", "k"),
76
+ ("c", "g"),
77
+ ("c", "h"),
78
+ ("g", "m"),
79
+ ("d", "i"),
80
+ ("f", "l"),
81
+ ]
82
+
83
+ edges_2 = [
84
+ ("v", "y"),
85
+ ("v", "z"),
86
+ ("u", "x"),
87
+ ("q", "u"),
88
+ ("q", "v"),
89
+ ("p", "t"),
90
+ ("n", "p"),
91
+ ("n", "q"),
92
+ ("n", "o"),
93
+ ("o", "r"),
94
+ ("o", "s"),
95
+ ("s", "w"),
96
+ ]
97
+
98
+ # there are two possible correct isomorphisms
99
+ # it currently returns isomorphism1
100
+ # but the second is also correct
101
+ isomorphism1 = [
102
+ ("a", "n"),
103
+ ("b", "q"),
104
+ ("c", "o"),
105
+ ("d", "p"),
106
+ ("e", "v"),
107
+ ("f", "u"),
108
+ ("g", "s"),
109
+ ("h", "r"),
110
+ ("i", "t"),
111
+ ("j", "y"),
112
+ ("k", "z"),
113
+ ("l", "x"),
114
+ ("m", "w"),
115
+ ]
116
+
117
+ # could swap y and z
118
+ isomorphism2 = [
119
+ ("a", "n"),
120
+ ("b", "q"),
121
+ ("c", "o"),
122
+ ("d", "p"),
123
+ ("e", "v"),
124
+ ("f", "u"),
125
+ ("g", "s"),
126
+ ("h", "r"),
127
+ ("i", "t"),
128
+ ("j", "z"),
129
+ ("k", "y"),
130
+ ("l", "x"),
131
+ ("m", "w"),
132
+ ]
133
+
134
+ t1 = nx.Graph()
135
+ t1.add_edges_from(edges_1)
136
+ root1 = "a"
137
+
138
+ t2 = nx.Graph()
139
+ t2.add_edges_from(edges_2)
140
+ root2 = "n"
141
+
142
+ isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
143
+
144
+ # is correct by hand
145
+ assert isomorphism in (isomorphism1, isomorphism2)
146
+
147
+ # check algorithmically
148
+ assert check_isomorphism(t1, t2, isomorphism)
149
+
150
+ # try again as digraph
151
+ t1 = nx.DiGraph()
152
+ t1.add_edges_from(edges_1)
153
+ root1 = "a"
154
+
155
+ t2 = nx.DiGraph()
156
+ t2.add_edges_from(edges_2)
157
+ root2 = "n"
158
+
159
+ isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
160
+
161
+ # is correct by hand
162
+ assert isomorphism in (isomorphism1, isomorphism2)
163
+
164
+ # check algorithmically
165
+ assert check_isomorphism(t1, t2, isomorphism)
166
+
167
+
168
+ # randomly swap a tuple (a,b)
169
+ def random_swap(t):
170
+ (a, b) = t
171
+ if random.randint(0, 1) == 1:
172
+ return (a, b)
173
+ else:
174
+ return (b, a)
175
+
176
+
177
+ # given a tree t1, create a new tree t2
178
+ # that is isomorphic to t1, with a known isomorphism
179
+ # and test that our algorithm found the right one
180
+ def positive_single_tree(t1):
181
+ assert nx.is_tree(t1)
182
+
183
+ nodes1 = list(t1.nodes())
184
+ # get a random permutation of this
185
+ nodes2 = nodes1.copy()
186
+ random.shuffle(nodes2)
187
+
188
+ # this is one isomorphism, however they may be multiple
189
+ # so we don't necessarily get this one back
190
+ someisomorphism = list(zip(nodes1, nodes2))
191
+
192
+ # map from old to new
193
+ map1to2 = dict(someisomorphism)
194
+
195
+ # get the edges with the transformed names
196
+ edges2 = [random_swap((map1to2[u], map1to2[v])) for (u, v) in t1.edges()]
197
+ # randomly permute, to ensure we're not relying on edge order somehow
198
+ random.shuffle(edges2)
199
+
200
+ # so t2 is isomorphic to t1
201
+ t2 = nx.Graph()
202
+ t2.add_edges_from(edges2)
203
+
204
+ # lets call our code to see if t1 and t2 are isomorphic
205
+ isomorphism = tree_isomorphism(t1, t2)
206
+
207
+ # make sure we got a correct solution
208
+ # although not necessarily someisomorphism
209
+ assert len(isomorphism) > 0
210
+ assert check_isomorphism(t1, t2, isomorphism)
211
+
212
+
213
+ # run positive_single_tree over all the
214
+ # non-isomorphic trees for k from 4 to maxk
215
+ # k = 4 is the first level that has more than 1 non-isomorphic tree
216
+ # k = 13 takes about 2.86 seconds to run on my laptop
217
+ # larger values run slow down significantly
218
+ # as the number of trees grows rapidly
219
+ def test_positive(maxk=14):
220
+ print("positive test")
221
+
222
+ for k in range(2, maxk + 1):
223
+ start_time = time.time()
224
+ trial = 0
225
+ for t in nx.nonisomorphic_trees(k):
226
+ positive_single_tree(t)
227
+ trial += 1
228
+ print(k, trial, time.time() - start_time)
229
+
230
+
231
+ # test the trivial case of a single node in each tree
232
+ # note that nonisomorphic_trees doesn't work for k = 1
233
+ def test_trivial():
234
+ print("trivial test")
235
+
236
+ # back to an undirected graph
237
+ t1 = nx.Graph()
238
+ t1.add_node("a")
239
+ root1 = "a"
240
+
241
+ t2 = nx.Graph()
242
+ t2.add_node("n")
243
+ root2 = "n"
244
+
245
+ isomorphism = rooted_tree_isomorphism(t1, root1, t2, root2)
246
+
247
+ assert isomorphism == [("a", "n")]
248
+
249
+ assert check_isomorphism(t1, t2, isomorphism)
250
+
251
+
252
+ # test another trivial case where the two graphs have
253
+ # different numbers of nodes
254
+ def test_trivial_2():
255
+ print("trivial test 2")
256
+
257
+ edges_1 = [("a", "b"), ("a", "c")]
258
+
259
+ edges_2 = [("v", "y")]
260
+
261
+ t1 = nx.Graph()
262
+ t1.add_edges_from(edges_1)
263
+
264
+ t2 = nx.Graph()
265
+ t2.add_edges_from(edges_2)
266
+
267
+ isomorphism = tree_isomorphism(t1, t2)
268
+
269
+ # they cannot be isomorphic,
270
+ # since they have different numbers of nodes
271
+ assert isomorphism == []
272
+
273
+
274
+ # the function nonisomorphic_trees generates all the non-isomorphic
275
+ # trees of a given size. Take each pair of these and verify that
276
+ # they are not isomorphic
277
+ # k = 4 is the first level that has more than 1 non-isomorphic tree
278
+ # k = 11 takes about 4.76 seconds to run on my laptop
279
+ # larger values run slow down significantly
280
+ # as the number of trees grows rapidly
281
+ def test_negative(maxk=11):
282
+ print("negative test")
283
+
284
+ for k in range(4, maxk + 1):
285
+ test_trees = list(nx.nonisomorphic_trees(k))
286
+ start_time = time.time()
287
+ trial = 0
288
+ for i in range(len(test_trees) - 1):
289
+ for j in range(i + 1, len(test_trees)):
290
+ trial += 1
291
+ assert tree_isomorphism(test_trees[i], test_trees[j]) == []
292
+ print(k, trial, time.time() - start_time)
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py ADDED
@@ -0,0 +1,1608 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import itertools as it
2
+
3
+ import pytest
4
+
5
+ import networkx as nx
6
+ from networkx import vf2pp_is_isomorphic, vf2pp_isomorphism
7
+
8
+ labels_same = ["blue"]
9
+
10
+ labels_many = [
11
+ "white",
12
+ "red",
13
+ "blue",
14
+ "green",
15
+ "orange",
16
+ "black",
17
+ "purple",
18
+ "yellow",
19
+ "brown",
20
+ "cyan",
21
+ "solarized",
22
+ "pink",
23
+ "none",
24
+ ]
25
+
26
+
27
+ class TestPreCheck:
28
+ def test_first_graph_empty(self):
29
+ G1 = nx.Graph()
30
+ G2 = nx.Graph([(0, 1), (1, 2)])
31
+ assert not vf2pp_is_isomorphic(G1, G2)
32
+
33
+ def test_second_graph_empty(self):
34
+ G1 = nx.Graph([(0, 1), (1, 2)])
35
+ G2 = nx.Graph()
36
+ assert not vf2pp_is_isomorphic(G1, G2)
37
+
38
+ def test_different_order1(self):
39
+ G1 = nx.path_graph(5)
40
+ G2 = nx.path_graph(6)
41
+ assert not vf2pp_is_isomorphic(G1, G2)
42
+
43
+ def test_different_order2(self):
44
+ G1 = nx.barbell_graph(100, 20)
45
+ G2 = nx.barbell_graph(101, 20)
46
+ assert not vf2pp_is_isomorphic(G1, G2)
47
+
48
+ def test_different_order3(self):
49
+ G1 = nx.complete_graph(7)
50
+ G2 = nx.complete_graph(8)
51
+ assert not vf2pp_is_isomorphic(G1, G2)
52
+
53
+ def test_different_degree_sequences1(self):
54
+ G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (0, 4)])
55
+ G2 = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (0, 4), (2, 5)])
56
+ assert not vf2pp_is_isomorphic(G1, G2)
57
+
58
+ G2.remove_node(3)
59
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
60
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
61
+
62
+ assert vf2pp_is_isomorphic(G1, G2)
63
+
64
+ def test_different_degree_sequences2(self):
65
+ G1 = nx.Graph(
66
+ [
67
+ (0, 1),
68
+ (1, 2),
69
+ (0, 2),
70
+ (2, 3),
71
+ (3, 4),
72
+ (4, 5),
73
+ (5, 6),
74
+ (6, 3),
75
+ (4, 7),
76
+ (7, 8),
77
+ (8, 3),
78
+ ]
79
+ )
80
+ G2 = G1.copy()
81
+ G2.add_edge(8, 0)
82
+ assert not vf2pp_is_isomorphic(G1, G2)
83
+
84
+ G1.add_edge(6, 1)
85
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
86
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
87
+
88
+ assert vf2pp_is_isomorphic(G1, G2)
89
+
90
+ def test_different_degree_sequences3(self):
91
+ G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
92
+ G2 = nx.Graph(
93
+ [(0, 1), (0, 6), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)]
94
+ )
95
+ assert not vf2pp_is_isomorphic(G1, G2)
96
+
97
+ G1.add_edge(3, 5)
98
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
99
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
100
+
101
+ assert vf2pp_is_isomorphic(G1, G2)
102
+
103
+ def test_label_distribution(self):
104
+ G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
105
+ G2 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
106
+
107
+ colors1 = ["blue", "blue", "blue", "yellow", "black", "purple", "purple"]
108
+ colors2 = ["blue", "blue", "yellow", "yellow", "black", "purple", "purple"]
109
+
110
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(colors1[::-1]))), "label")
111
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle(colors2[::-1]))), "label")
112
+
113
+ assert not vf2pp_is_isomorphic(G1, G2, node_label="label")
114
+ G2.nodes[3]["label"] = "blue"
115
+ assert vf2pp_is_isomorphic(G1, G2, node_label="label")
116
+
117
+
118
+ class TestAllGraphTypesEdgeCases:
119
+ @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
120
+ def test_both_graphs_empty(self, graph_type):
121
+ G = graph_type()
122
+ H = graph_type()
123
+ assert vf2pp_isomorphism(G, H) is None
124
+
125
+ G.add_node(0)
126
+
127
+ assert vf2pp_isomorphism(G, H) is None
128
+ assert vf2pp_isomorphism(H, G) is None
129
+
130
+ H.add_node(0)
131
+ assert vf2pp_isomorphism(G, H) == {0: 0}
132
+
133
+ @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
134
+ def test_first_graph_empty(self, graph_type):
135
+ G = graph_type()
136
+ H = graph_type([(0, 1)])
137
+ assert vf2pp_isomorphism(G, H) is None
138
+
139
+ @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
140
+ def test_second_graph_empty(self, graph_type):
141
+ G = graph_type([(0, 1)])
142
+ H = graph_type()
143
+ assert vf2pp_isomorphism(G, H) is None
144
+
145
+
146
+ class TestGraphISOVF2pp:
147
+ def test_custom_graph1_same_labels(self):
148
+ G1 = nx.Graph()
149
+
150
+ mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
151
+ edges1 = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 6), (3, 4), (5, 1), (5, 2)]
152
+
153
+ G1.add_edges_from(edges1)
154
+ G2 = nx.relabel_nodes(G1, mapped)
155
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
156
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
157
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
158
+
159
+ # Add edge making G1 symmetrical
160
+ G1.add_edge(3, 7)
161
+ G1.nodes[7]["label"] = "blue"
162
+ assert vf2pp_isomorphism(G1, G2, node_label="label") is None
163
+
164
+ # Make G2 isomorphic to G1
165
+ G2.add_edges_from([(mapped[3], "X"), (mapped[6], mapped[5])])
166
+ G1.add_edge(4, 7)
167
+ G2.nodes["X"]["label"] = "blue"
168
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
169
+
170
+ # Re-structure maintaining isomorphism
171
+ G1.remove_edges_from([(1, 4), (1, 3)])
172
+ G2.remove_edges_from([(mapped[1], mapped[5]), (mapped[1], mapped[2])])
173
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
174
+
175
+ def test_custom_graph1_different_labels(self):
176
+ G1 = nx.Graph()
177
+
178
+ mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
179
+ edges1 = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 6), (3, 4), (5, 1), (5, 2)]
180
+
181
+ G1.add_edges_from(edges1)
182
+ G2 = nx.relabel_nodes(G1, mapped)
183
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
184
+ nx.set_node_attributes(
185
+ G2,
186
+ dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
187
+ "label",
188
+ )
189
+ assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
190
+
191
+ def test_custom_graph2_same_labels(self):
192
+ G1 = nx.Graph()
193
+
194
+ mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
195
+ edges1 = [(1, 2), (1, 5), (5, 6), (2, 3), (2, 4), (3, 4), (4, 5), (2, 7)]
196
+
197
+ G1.add_edges_from(edges1)
198
+ G2 = nx.relabel_nodes(G1, mapped)
199
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
200
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
201
+
202
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
203
+
204
+ # Obtain two isomorphic subgraphs from the graph
205
+ G2.remove_edge(mapped[1], mapped[2])
206
+ G2.add_edge(mapped[1], mapped[4])
207
+ H1 = nx.Graph(G1.subgraph([2, 3, 4, 7]))
208
+ H2 = nx.Graph(G2.subgraph([mapped[1], mapped[4], mapped[5], mapped[6]]))
209
+ assert vf2pp_isomorphism(H1, H2, node_label="label")
210
+
211
+ # Add edges maintaining isomorphism
212
+ H1.add_edges_from([(3, 7), (4, 7)])
213
+ H2.add_edges_from([(mapped[1], mapped[6]), (mapped[4], mapped[6])])
214
+ assert vf2pp_isomorphism(H1, H2, node_label="label")
215
+
216
+ def test_custom_graph2_different_labels(self):
217
+ G1 = nx.Graph()
218
+
219
+ mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
220
+ edges1 = [(1, 2), (1, 5), (5, 6), (2, 3), (2, 4), (3, 4), (4, 5), (2, 7)]
221
+
222
+ G1.add_edges_from(edges1)
223
+ G2 = nx.relabel_nodes(G1, mapped)
224
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
225
+ nx.set_node_attributes(
226
+ G2,
227
+ dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
228
+ "label",
229
+ )
230
+
231
+ # Adding new nodes
232
+ G1.add_node(0)
233
+ G2.add_node("Z")
234
+ G1.nodes[0]["label"] = G1.nodes[1]["label"]
235
+ G2.nodes["Z"]["label"] = G1.nodes[1]["label"]
236
+ mapped.update({0: "Z"})
237
+
238
+ assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
239
+
240
+ # Change the color of one of the nodes
241
+ G2.nodes["Z"]["label"] = G1.nodes[2]["label"]
242
+ assert vf2pp_isomorphism(G1, G2, node_label="label") is None
243
+
244
+ # Add an extra edge
245
+ G1.nodes[0]["label"] = "blue"
246
+ G2.nodes["Z"]["label"] = "blue"
247
+ G1.add_edge(0, 1)
248
+
249
+ assert vf2pp_isomorphism(G1, G2, node_label="label") is None
250
+
251
+ # Add extra edge to both
252
+ G2.add_edge("Z", "A")
253
+ assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
254
+
255
+ def test_custom_graph3_same_labels(self):
256
+ G1 = nx.Graph()
257
+
258
+ mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
259
+ edges1 = [
260
+ (1, 2),
261
+ (1, 3),
262
+ (2, 3),
263
+ (3, 4),
264
+ (4, 5),
265
+ (4, 7),
266
+ (4, 9),
267
+ (5, 8),
268
+ (8, 9),
269
+ (5, 6),
270
+ (6, 7),
271
+ (5, 2),
272
+ ]
273
+ G1.add_edges_from(edges1)
274
+ G2 = nx.relabel_nodes(G1, mapped)
275
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
276
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
277
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
278
+
279
+ # Connect nodes maintaining symmetry
280
+ G1.add_edges_from([(6, 9), (7, 8)])
281
+ G2.add_edges_from([(mapped[6], mapped[8]), (mapped[7], mapped[9])])
282
+ assert vf2pp_isomorphism(G1, G2, node_label="label") is None
283
+
284
+ # Make isomorphic
285
+ G1.add_edges_from([(6, 8), (7, 9)])
286
+ G2.add_edges_from([(mapped[6], mapped[9]), (mapped[7], mapped[8])])
287
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
288
+
289
+ # Connect more nodes
290
+ G1.add_edges_from([(2, 7), (3, 6)])
291
+ G2.add_edges_from([(mapped[2], mapped[7]), (mapped[3], mapped[6])])
292
+ G1.add_node(10)
293
+ G2.add_node("Z")
294
+ G1.nodes[10]["label"] = "blue"
295
+ G2.nodes["Z"]["label"] = "blue"
296
+
297
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
298
+
299
+ # Connect the newly added node, to opposite sides of the graph
300
+ G1.add_edges_from([(10, 1), (10, 5), (10, 8)])
301
+ G2.add_edges_from([("Z", mapped[1]), ("Z", mapped[4]), ("Z", mapped[9])])
302
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
303
+
304
+ # Get two subgraphs that are not isomorphic but are easy to make
305
+ H1 = nx.Graph(G1.subgraph([2, 3, 4, 5, 6, 7, 10]))
306
+ H2 = nx.Graph(
307
+ G2.subgraph(
308
+ [mapped[4], mapped[5], mapped[6], mapped[7], mapped[8], mapped[9], "Z"]
309
+ )
310
+ )
311
+ assert vf2pp_isomorphism(H1, H2, node_label="label") is None
312
+
313
+ # Restructure both to make them isomorphic
314
+ H1.add_edges_from([(10, 2), (10, 6), (3, 6), (2, 7), (2, 6), (3, 7)])
315
+ H2.add_edges_from(
316
+ [("Z", mapped[7]), (mapped[6], mapped[9]), (mapped[7], mapped[8])]
317
+ )
318
+ assert vf2pp_isomorphism(H1, H2, node_label="label")
319
+
320
+ # Add edges with opposite direction in each Graph
321
+ H1.add_edge(3, 5)
322
+ H2.add_edge(mapped[5], mapped[7])
323
+ assert vf2pp_isomorphism(H1, H2, node_label="label") is None
324
+
325
+ def test_custom_graph3_different_labels(self):
326
+ G1 = nx.Graph()
327
+
328
+ mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
329
+ edges1 = [
330
+ (1, 2),
331
+ (1, 3),
332
+ (2, 3),
333
+ (3, 4),
334
+ (4, 5),
335
+ (4, 7),
336
+ (4, 9),
337
+ (5, 8),
338
+ (8, 9),
339
+ (5, 6),
340
+ (6, 7),
341
+ (5, 2),
342
+ ]
343
+ G1.add_edges_from(edges1)
344
+ G2 = nx.relabel_nodes(G1, mapped)
345
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
346
+ nx.set_node_attributes(
347
+ G2,
348
+ dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
349
+ "label",
350
+ )
351
+ assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
352
+
353
+ # Add extra edge to G1
354
+ G1.add_edge(1, 7)
355
+ assert vf2pp_isomorphism(G1, G2, node_label="label") is None
356
+
357
+ # Compensate in G2
358
+ G2.add_edge(9, 1)
359
+ assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
360
+
361
+ # Add extra node
362
+ G1.add_node("A")
363
+ G2.add_node("K")
364
+ G1.nodes["A"]["label"] = "green"
365
+ G2.nodes["K"]["label"] = "green"
366
+ mapped.update({"A": "K"})
367
+
368
+ assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
369
+
370
+ # Connect A to one side of G1 and K to the opposite
371
+ G1.add_edge("A", 6)
372
+ G2.add_edge("K", 5)
373
+ assert vf2pp_isomorphism(G1, G2, node_label="label") is None
374
+
375
+ # Make the graphs symmetrical
376
+ G1.add_edge(1, 5)
377
+ G1.add_edge(2, 9)
378
+ G2.add_edge(9, 3)
379
+ G2.add_edge(8, 4)
380
+ assert vf2pp_isomorphism(G1, G2, node_label="label") is None
381
+
382
+ # Assign same colors so the two opposite sides are identical
383
+ for node in G1.nodes():
384
+ color = "red"
385
+ G1.nodes[node]["label"] = color
386
+ G2.nodes[mapped[node]]["label"] = color
387
+
388
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
389
+
390
+ def test_custom_graph4_different_labels(self):
391
+ G1 = nx.Graph()
392
+ edges1 = [
393
+ (1, 2),
394
+ (2, 3),
395
+ (3, 8),
396
+ (3, 4),
397
+ (4, 5),
398
+ (4, 6),
399
+ (3, 6),
400
+ (8, 7),
401
+ (8, 9),
402
+ (5, 9),
403
+ (10, 11),
404
+ (11, 12),
405
+ (12, 13),
406
+ (11, 13),
407
+ ]
408
+
409
+ mapped = {
410
+ 1: "n",
411
+ 2: "m",
412
+ 3: "l",
413
+ 4: "j",
414
+ 5: "k",
415
+ 6: "i",
416
+ 7: "g",
417
+ 8: "h",
418
+ 9: "f",
419
+ 10: "b",
420
+ 11: "a",
421
+ 12: "d",
422
+ 13: "e",
423
+ }
424
+
425
+ G1.add_edges_from(edges1)
426
+ G2 = nx.relabel_nodes(G1, mapped)
427
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
428
+ nx.set_node_attributes(
429
+ G2,
430
+ dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
431
+ "label",
432
+ )
433
+ assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
434
+
435
+ def test_custom_graph4_same_labels(self):
436
+ G1 = nx.Graph()
437
+ edges1 = [
438
+ (1, 2),
439
+ (2, 3),
440
+ (3, 8),
441
+ (3, 4),
442
+ (4, 5),
443
+ (4, 6),
444
+ (3, 6),
445
+ (8, 7),
446
+ (8, 9),
447
+ (5, 9),
448
+ (10, 11),
449
+ (11, 12),
450
+ (12, 13),
451
+ (11, 13),
452
+ ]
453
+
454
+ mapped = {
455
+ 1: "n",
456
+ 2: "m",
457
+ 3: "l",
458
+ 4: "j",
459
+ 5: "k",
460
+ 6: "i",
461
+ 7: "g",
462
+ 8: "h",
463
+ 9: "f",
464
+ 10: "b",
465
+ 11: "a",
466
+ 12: "d",
467
+ 13: "e",
468
+ }
469
+
470
+ G1.add_edges_from(edges1)
471
+ G2 = nx.relabel_nodes(G1, mapped)
472
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
473
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
474
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
475
+
476
+ # Add nodes of different label
477
+ G1.add_node(0)
478
+ G2.add_node("z")
479
+ G1.nodes[0]["label"] = "green"
480
+ G2.nodes["z"]["label"] = "blue"
481
+
482
+ assert vf2pp_isomorphism(G1, G2, node_label="label") is None
483
+
484
+ # Make the labels identical
485
+ G2.nodes["z"]["label"] = "green"
486
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
487
+
488
+ # Change the structure of the graphs, keeping them isomorphic
489
+ G1.add_edge(2, 5)
490
+ G2.remove_edge("i", "l")
491
+ G2.add_edge("g", "l")
492
+ G2.add_edge("m", "f")
493
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
494
+
495
+ # Change the structure of the disconnected sub-graph, keeping it isomorphic
496
+ G1.remove_node(13)
497
+ G2.remove_node("d")
498
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
499
+
500
+ # Connect the newly added node to the disconnected graph, which now is just a path of size 3
501
+ G1.add_edge(0, 10)
502
+ G2.add_edge("e", "z")
503
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
504
+
505
+ # Connect the two disconnected sub-graphs, forming a single graph
506
+ G1.add_edge(11, 3)
507
+ G1.add_edge(0, 8)
508
+ G2.add_edge("a", "l")
509
+ G2.add_edge("z", "j")
510
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
511
+
512
+ def test_custom_graph5_same_labels(self):
513
+ G1 = nx.Graph()
514
+ edges1 = [
515
+ (1, 5),
516
+ (1, 2),
517
+ (1, 4),
518
+ (2, 3),
519
+ (2, 6),
520
+ (3, 4),
521
+ (3, 7),
522
+ (4, 8),
523
+ (5, 8),
524
+ (5, 6),
525
+ (6, 7),
526
+ (7, 8),
527
+ ]
528
+ mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
529
+
530
+ G1.add_edges_from(edges1)
531
+ G2 = nx.relabel_nodes(G1, mapped)
532
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
533
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
534
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
535
+
536
+ # Add different edges in each graph, maintaining symmetry
537
+ G1.add_edges_from([(3, 6), (2, 7), (2, 5), (1, 3), (4, 7), (6, 8)])
538
+ G2.add_edges_from(
539
+ [
540
+ (mapped[6], mapped[3]),
541
+ (mapped[2], mapped[7]),
542
+ (mapped[1], mapped[6]),
543
+ (mapped[5], mapped[7]),
544
+ (mapped[3], mapped[8]),
545
+ (mapped[2], mapped[4]),
546
+ ]
547
+ )
548
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
549
+
550
+ # Obtain two different but isomorphic subgraphs from G1 and G2
551
+ H1 = nx.Graph(G1.subgraph([1, 5, 8, 6, 7, 3]))
552
+ H2 = nx.Graph(
553
+ G2.subgraph(
554
+ [mapped[1], mapped[4], mapped[8], mapped[7], mapped[3], mapped[5]]
555
+ )
556
+ )
557
+ assert vf2pp_isomorphism(H1, H2, node_label="label")
558
+
559
+ # Delete corresponding node from the two graphs
560
+ H1.remove_node(8)
561
+ H2.remove_node(mapped[7])
562
+ assert vf2pp_isomorphism(H1, H2, node_label="label")
563
+
564
+ # Re-orient, maintaining isomorphism
565
+ H1.add_edge(1, 6)
566
+ H1.remove_edge(3, 6)
567
+ assert vf2pp_isomorphism(H1, H2, node_label="label")
568
+
569
+ def test_custom_graph5_different_labels(self):
570
+ G1 = nx.Graph()
571
+ edges1 = [
572
+ (1, 5),
573
+ (1, 2),
574
+ (1, 4),
575
+ (2, 3),
576
+ (2, 6),
577
+ (3, 4),
578
+ (3, 7),
579
+ (4, 8),
580
+ (5, 8),
581
+ (5, 6),
582
+ (6, 7),
583
+ (7, 8),
584
+ ]
585
+ mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
586
+
587
+ G1.add_edges_from(edges1)
588
+ G2 = nx.relabel_nodes(G1, mapped)
589
+
590
+ colors = ["red", "blue", "grey", "none", "brown", "solarized", "yellow", "pink"]
591
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
592
+ nx.set_node_attributes(
593
+ G2,
594
+ dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
595
+ "label",
596
+ )
597
+ assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
598
+
599
+ # Assign different colors to matching nodes
600
+ c = 0
601
+ for node in G1.nodes():
602
+ color1 = colors[c]
603
+ color2 = colors[(c + 3) % len(colors)]
604
+ G1.nodes[node]["label"] = color1
605
+ G2.nodes[mapped[node]]["label"] = color2
606
+ c += 1
607
+
608
+ assert vf2pp_isomorphism(G1, G2, node_label="label") is None
609
+
610
+ # Get symmetrical sub-graphs of G1,G2 and compare them
611
+ H1 = G1.subgraph([1, 5])
612
+ H2 = G2.subgraph(["i", "c"])
613
+ c = 0
614
+ for node1, node2 in zip(H1.nodes(), H2.nodes()):
615
+ H1.nodes[node1]["label"] = "red"
616
+ H2.nodes[node2]["label"] = "red"
617
+ c += 1
618
+
619
+ assert vf2pp_isomorphism(H1, H2, node_label="label")
620
+
621
+ def test_disconnected_graph_all_same_labels(self):
622
+ G1 = nx.Graph()
623
+ G1.add_nodes_from(list(range(10)))
624
+
625
+ mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
626
+ G2 = nx.relabel_nodes(G1, mapped)
627
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
628
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
629
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
630
+
631
+ def test_disconnected_graph_all_different_labels(self):
632
+ G1 = nx.Graph()
633
+ G1.add_nodes_from(list(range(10)))
634
+
635
+ mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
636
+ G2 = nx.relabel_nodes(G1, mapped)
637
+
638
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
639
+ nx.set_node_attributes(
640
+ G2,
641
+ dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
642
+ "label",
643
+ )
644
+ assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
645
+
646
+ def test_disconnected_graph_some_same_labels(self):
647
+ G1 = nx.Graph()
648
+ G1.add_nodes_from(list(range(10)))
649
+
650
+ mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
651
+ G2 = nx.relabel_nodes(G1, mapped)
652
+
653
+ colors = [
654
+ "white",
655
+ "white",
656
+ "white",
657
+ "purple",
658
+ "purple",
659
+ "red",
660
+ "red",
661
+ "pink",
662
+ "pink",
663
+ "pink",
664
+ ]
665
+
666
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(colors))), "label")
667
+ nx.set_node_attributes(
668
+ G2, dict(zip([mapped[n] for n in G1], it.cycle(colors))), "label"
669
+ )
670
+
671
+ assert vf2pp_isomorphism(G1, G2, node_label="label")
672
+
673
+
674
+ class TestMultiGraphISOVF2pp:
675
+ def test_custom_multigraph1_same_labels(self):
676
+ G1 = nx.MultiGraph()
677
+
678
+ mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
679
+ edges1 = [
680
+ (1, 2),
681
+ (1, 3),
682
+ (1, 4),
683
+ (1, 4),
684
+ (1, 4),
685
+ (2, 3),
686
+ (2, 6),
687
+ (2, 6),
688
+ (3, 4),
689
+ (3, 4),
690
+ (5, 1),
691
+ (5, 1),
692
+ (5, 2),
693
+ (5, 2),
694
+ ]
695
+
696
+ G1.add_edges_from(edges1)
697
+ G2 = nx.relabel_nodes(G1, mapped)
698
+
699
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
700
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
701
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
702
+ assert m
703
+
704
+ # Transfer the 2-clique to the right side of G1
705
+ G1.remove_edges_from([(2, 6), (2, 6)])
706
+ G1.add_edges_from([(3, 6), (3, 6)])
707
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
708
+ assert not m
709
+
710
+ # Delete an edges, making them symmetrical, so the position of the 2-clique doesn't matter
711
+ G2.remove_edge(mapped[1], mapped[4])
712
+ G1.remove_edge(1, 4)
713
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
714
+ assert m
715
+
716
+ # Add self-loops
717
+ G1.add_edges_from([(5, 5), (5, 5), (1, 1)])
718
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
719
+ assert not m
720
+
721
+ # Compensate in G2
722
+ G2.add_edges_from(
723
+ [(mapped[1], mapped[1]), (mapped[4], mapped[4]), (mapped[4], mapped[4])]
724
+ )
725
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
726
+ assert m
727
+
728
+ def test_custom_multigraph1_different_labels(self):
729
+ G1 = nx.MultiGraph()
730
+
731
+ mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
732
+ edges1 = [
733
+ (1, 2),
734
+ (1, 3),
735
+ (1, 4),
736
+ (1, 4),
737
+ (1, 4),
738
+ (2, 3),
739
+ (2, 6),
740
+ (2, 6),
741
+ (3, 4),
742
+ (3, 4),
743
+ (5, 1),
744
+ (5, 1),
745
+ (5, 2),
746
+ (5, 2),
747
+ ]
748
+
749
+ G1.add_edges_from(edges1)
750
+ G2 = nx.relabel_nodes(G1, mapped)
751
+
752
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
753
+ nx.set_node_attributes(
754
+ G2,
755
+ dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
756
+ "label",
757
+ )
758
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
759
+ assert m
760
+ assert m == mapped
761
+
762
+ # Re-structure G1, maintaining the degree sequence
763
+ G1.remove_edge(1, 4)
764
+ G1.add_edge(1, 5)
765
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
766
+ assert not m
767
+
768
+ # Restructure G2, making it isomorphic to G1
769
+ G2.remove_edge("A", "D")
770
+ G2.add_edge("A", "Z")
771
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
772
+ assert m
773
+ assert m == mapped
774
+
775
+ # Add edge from node to itself
776
+ G1.add_edges_from([(6, 6), (6, 6), (6, 6)])
777
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
778
+ assert not m
779
+
780
+ # Same for G2
781
+ G2.add_edges_from([("E", "E"), ("E", "E"), ("E", "E")])
782
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
783
+ assert m
784
+ assert m == mapped
785
+
786
+ def test_custom_multigraph2_same_labels(self):
787
+ G1 = nx.MultiGraph()
788
+
789
+ mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
790
+ edges1 = [
791
+ (1, 2),
792
+ (1, 2),
793
+ (1, 5),
794
+ (1, 5),
795
+ (1, 5),
796
+ (5, 6),
797
+ (2, 3),
798
+ (2, 3),
799
+ (2, 4),
800
+ (3, 4),
801
+ (3, 4),
802
+ (4, 5),
803
+ (4, 5),
804
+ (4, 5),
805
+ (2, 7),
806
+ (2, 7),
807
+ (2, 7),
808
+ ]
809
+
810
+ G1.add_edges_from(edges1)
811
+ G2 = nx.relabel_nodes(G1, mapped)
812
+
813
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
814
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
815
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
816
+ assert m
817
+
818
+ # Obtain two non-isomorphic subgraphs from the graph
819
+ G2.remove_edges_from([(mapped[1], mapped[2]), (mapped[1], mapped[2])])
820
+ G2.add_edge(mapped[1], mapped[4])
821
+ H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 7]))
822
+ H2 = nx.MultiGraph(G2.subgraph([mapped[1], mapped[4], mapped[5], mapped[6]]))
823
+
824
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
825
+ assert not m
826
+
827
+ # Make them isomorphic
828
+ H1.remove_edge(3, 4)
829
+ H1.add_edges_from([(2, 3), (2, 4), (2, 4)])
830
+ H2.add_edges_from([(mapped[5], mapped[6]), (mapped[5], mapped[6])])
831
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
832
+ assert m
833
+
834
+ # Remove triangle edge
835
+ H1.remove_edges_from([(2, 3), (2, 3), (2, 3)])
836
+ H2.remove_edges_from([(mapped[5], mapped[4])] * 3)
837
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
838
+ assert m
839
+
840
+ # Change the edge orientation such that H1 is rotated H2
841
+ H1.remove_edges_from([(2, 7), (2, 7)])
842
+ H1.add_edges_from([(3, 4), (3, 4)])
843
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
844
+ assert m
845
+
846
+ # Add extra edges maintaining degree sequence, but in a non-symmetrical manner
847
+ H2.add_edge(mapped[5], mapped[1])
848
+ H1.add_edge(3, 4)
849
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
850
+ assert not m
851
+
852
+ def test_custom_multigraph2_different_labels(self):
853
+ G1 = nx.MultiGraph()
854
+
855
+ mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
856
+ edges1 = [
857
+ (1, 2),
858
+ (1, 2),
859
+ (1, 5),
860
+ (1, 5),
861
+ (1, 5),
862
+ (5, 6),
863
+ (2, 3),
864
+ (2, 3),
865
+ (2, 4),
866
+ (3, 4),
867
+ (3, 4),
868
+ (4, 5),
869
+ (4, 5),
870
+ (4, 5),
871
+ (2, 7),
872
+ (2, 7),
873
+ (2, 7),
874
+ ]
875
+
876
+ G1.add_edges_from(edges1)
877
+ G2 = nx.relabel_nodes(G1, mapped)
878
+
879
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
880
+ nx.set_node_attributes(
881
+ G2,
882
+ dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
883
+ "label",
884
+ )
885
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
886
+ assert m
887
+ assert m == mapped
888
+
889
+ # Re-structure G1
890
+ G1.remove_edge(2, 7)
891
+ G1.add_edge(5, 6)
892
+
893
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
894
+ assert not m
895
+
896
+ # Same for G2
897
+ G2.remove_edge("B", "C")
898
+ G2.add_edge("G", "F")
899
+
900
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
901
+ assert m
902
+ assert m == mapped
903
+
904
+ # Delete node from G1 and G2, keeping them isomorphic
905
+ G1.remove_node(3)
906
+ G2.remove_node("D")
907
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
908
+ assert m
909
+
910
+ # Change G1 edges
911
+ G1.remove_edge(1, 2)
912
+ G1.remove_edge(2, 7)
913
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
914
+ assert not m
915
+
916
+ # Make G2 identical to G1, but with different edge orientation and different labels
917
+ G2.add_edges_from([("A", "C"), ("C", "E"), ("C", "E")])
918
+ G2.remove_edges_from(
919
+ [("A", "G"), ("A", "G"), ("F", "G"), ("E", "G"), ("E", "G")]
920
+ )
921
+
922
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
923
+ assert not m
924
+
925
+ # Make all labels the same, so G1 and G2 are also isomorphic
926
+ for n1, n2 in zip(G1.nodes(), G2.nodes()):
927
+ G1.nodes[n1]["label"] = "blue"
928
+ G2.nodes[n2]["label"] = "blue"
929
+
930
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
931
+ assert m
932
+
933
+ def test_custom_multigraph3_same_labels(self):
934
+ G1 = nx.MultiGraph()
935
+
936
+ mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
937
+ edges1 = [
938
+ (1, 2),
939
+ (1, 3),
940
+ (1, 3),
941
+ (2, 3),
942
+ (2, 3),
943
+ (3, 4),
944
+ (4, 5),
945
+ (4, 7),
946
+ (4, 9),
947
+ (4, 9),
948
+ (4, 9),
949
+ (5, 8),
950
+ (5, 8),
951
+ (8, 9),
952
+ (8, 9),
953
+ (5, 6),
954
+ (6, 7),
955
+ (6, 7),
956
+ (6, 7),
957
+ (5, 2),
958
+ ]
959
+ G1.add_edges_from(edges1)
960
+ G2 = nx.relabel_nodes(G1, mapped)
961
+
962
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
963
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
964
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
965
+ assert m
966
+
967
+ # Connect nodes maintaining symmetry
968
+ G1.add_edges_from([(6, 9), (7, 8), (5, 8), (4, 9), (4, 9)])
969
+ G2.add_edges_from(
970
+ [
971
+ (mapped[6], mapped[8]),
972
+ (mapped[7], mapped[9]),
973
+ (mapped[5], mapped[8]),
974
+ (mapped[4], mapped[9]),
975
+ (mapped[4], mapped[9]),
976
+ ]
977
+ )
978
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
979
+ assert not m
980
+
981
+ # Make isomorphic
982
+ G1.add_edges_from([(6, 8), (6, 8), (7, 9), (7, 9), (7, 9)])
983
+ G2.add_edges_from(
984
+ [
985
+ (mapped[6], mapped[8]),
986
+ (mapped[6], mapped[9]),
987
+ (mapped[7], mapped[8]),
988
+ (mapped[7], mapped[9]),
989
+ (mapped[7], mapped[9]),
990
+ ]
991
+ )
992
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
993
+ assert m
994
+
995
+ # Connect more nodes
996
+ G1.add_edges_from([(2, 7), (2, 7), (3, 6), (3, 6)])
997
+ G2.add_edges_from(
998
+ [
999
+ (mapped[2], mapped[7]),
1000
+ (mapped[2], mapped[7]),
1001
+ (mapped[3], mapped[6]),
1002
+ (mapped[3], mapped[6]),
1003
+ ]
1004
+ )
1005
+ G1.add_node(10)
1006
+ G2.add_node("Z")
1007
+ G1.nodes[10]["label"] = "blue"
1008
+ G2.nodes["Z"]["label"] = "blue"
1009
+
1010
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1011
+ assert m
1012
+
1013
+ # Connect the newly added node, to opposite sides of the graph
1014
+ G1.add_edges_from([(10, 1), (10, 5), (10, 8), (10, 10), (10, 10)])
1015
+ G2.add_edges_from(
1016
+ [
1017
+ ("Z", mapped[1]),
1018
+ ("Z", mapped[4]),
1019
+ ("Z", mapped[9]),
1020
+ ("Z", "Z"),
1021
+ ("Z", "Z"),
1022
+ ]
1023
+ )
1024
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1025
+ assert not m
1026
+
1027
+ # We connected the new node to opposite sides, so G1 must be symmetrical to G2. Re-structure them to be so
1028
+ G1.remove_edges_from([(1, 3), (4, 9), (4, 9), (7, 9)])
1029
+ G2.remove_edges_from(
1030
+ [
1031
+ (mapped[1], mapped[3]),
1032
+ (mapped[4], mapped[9]),
1033
+ (mapped[4], mapped[9]),
1034
+ (mapped[7], mapped[9]),
1035
+ ]
1036
+ )
1037
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1038
+ assert m
1039
+
1040
+ # Get two subgraphs that are not isomorphic but are easy to make
1041
+ H1 = nx.Graph(G1.subgraph([2, 3, 4, 5, 6, 7, 10]))
1042
+ H2 = nx.Graph(
1043
+ G2.subgraph(
1044
+ [mapped[4], mapped[5], mapped[6], mapped[7], mapped[8], mapped[9], "Z"]
1045
+ )
1046
+ )
1047
+
1048
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1049
+ assert not m
1050
+
1051
+ # Restructure both to make them isomorphic
1052
+ H1.add_edges_from([(10, 2), (10, 6), (3, 6), (2, 7), (2, 6), (3, 7)])
1053
+ H2.add_edges_from(
1054
+ [("Z", mapped[7]), (mapped[6], mapped[9]), (mapped[7], mapped[8])]
1055
+ )
1056
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1057
+ assert m
1058
+
1059
+ # Remove one self-loop in H2
1060
+ H2.remove_edge("Z", "Z")
1061
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1062
+ assert not m
1063
+
1064
+ # Compensate in H1
1065
+ H1.remove_edge(10, 10)
1066
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1067
+ assert m
1068
+
1069
+ def test_custom_multigraph3_different_labels(self):
1070
+ G1 = nx.MultiGraph()
1071
+
1072
+ mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
1073
+ edges1 = [
1074
+ (1, 2),
1075
+ (1, 3),
1076
+ (1, 3),
1077
+ (2, 3),
1078
+ (2, 3),
1079
+ (3, 4),
1080
+ (4, 5),
1081
+ (4, 7),
1082
+ (4, 9),
1083
+ (4, 9),
1084
+ (4, 9),
1085
+ (5, 8),
1086
+ (5, 8),
1087
+ (8, 9),
1088
+ (8, 9),
1089
+ (5, 6),
1090
+ (6, 7),
1091
+ (6, 7),
1092
+ (6, 7),
1093
+ (5, 2),
1094
+ ]
1095
+
1096
+ G1.add_edges_from(edges1)
1097
+ G2 = nx.relabel_nodes(G1, mapped)
1098
+
1099
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
1100
+ nx.set_node_attributes(
1101
+ G2,
1102
+ dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
1103
+ "label",
1104
+ )
1105
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1106
+ assert m
1107
+ assert m == mapped
1108
+
1109
+ # Delete edge maintaining isomorphism
1110
+ G1.remove_edge(4, 9)
1111
+ G2.remove_edge(4, 6)
1112
+
1113
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1114
+ assert m
1115
+ assert m == mapped
1116
+
1117
+ # Change edge orientation such that G1 mirrors G2
1118
+ G1.add_edges_from([(4, 9), (1, 2), (1, 2)])
1119
+ G1.remove_edges_from([(1, 3), (1, 3)])
1120
+ G2.add_edges_from([(3, 5), (7, 9)])
1121
+ G2.remove_edge(8, 9)
1122
+
1123
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1124
+ assert not m
1125
+
1126
+ # Make all labels the same, so G1 and G2 are also isomorphic
1127
+ for n1, n2 in zip(G1.nodes(), G2.nodes()):
1128
+ G1.nodes[n1]["label"] = "blue"
1129
+ G2.nodes[n2]["label"] = "blue"
1130
+
1131
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1132
+ assert m
1133
+
1134
+ G1.add_node(10)
1135
+ G2.add_node("Z")
1136
+ G1.nodes[10]["label"] = "green"
1137
+ G2.nodes["Z"]["label"] = "green"
1138
+
1139
+ # Add different number of edges between the new nodes and themselves
1140
+ G1.add_edges_from([(10, 10), (10, 10)])
1141
+ G2.add_edges_from([("Z", "Z")])
1142
+
1143
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1144
+ assert not m
1145
+
1146
+ # Make the number of self-edges equal
1147
+ G1.remove_edge(10, 10)
1148
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1149
+ assert m
1150
+
1151
+ # Connect the new node to the graph
1152
+ G1.add_edges_from([(10, 3), (10, 4)])
1153
+ G2.add_edges_from([("Z", 8), ("Z", 3)])
1154
+
1155
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1156
+ assert m
1157
+
1158
+ # Remove central node
1159
+ G1.remove_node(4)
1160
+ G2.remove_node(3)
1161
+ G1.add_edges_from([(5, 6), (5, 6), (5, 7)])
1162
+ G2.add_edges_from([(1, 6), (1, 6), (6, 2)])
1163
+
1164
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1165
+ assert m
1166
+
1167
+ def test_custom_multigraph4_same_labels(self):
1168
+ G1 = nx.MultiGraph()
1169
+ edges1 = [
1170
+ (1, 2),
1171
+ (1, 2),
1172
+ (2, 2),
1173
+ (2, 3),
1174
+ (3, 8),
1175
+ (3, 8),
1176
+ (3, 4),
1177
+ (4, 5),
1178
+ (4, 5),
1179
+ (4, 5),
1180
+ (4, 6),
1181
+ (3, 6),
1182
+ (3, 6),
1183
+ (6, 6),
1184
+ (8, 7),
1185
+ (7, 7),
1186
+ (8, 9),
1187
+ (9, 9),
1188
+ (8, 9),
1189
+ (8, 9),
1190
+ (5, 9),
1191
+ (10, 11),
1192
+ (11, 12),
1193
+ (12, 13),
1194
+ (11, 13),
1195
+ (10, 10),
1196
+ (10, 11),
1197
+ (11, 13),
1198
+ ]
1199
+
1200
+ mapped = {
1201
+ 1: "n",
1202
+ 2: "m",
1203
+ 3: "l",
1204
+ 4: "j",
1205
+ 5: "k",
1206
+ 6: "i",
1207
+ 7: "g",
1208
+ 8: "h",
1209
+ 9: "f",
1210
+ 10: "b",
1211
+ 11: "a",
1212
+ 12: "d",
1213
+ 13: "e",
1214
+ }
1215
+
1216
+ G1.add_edges_from(edges1)
1217
+ G2 = nx.relabel_nodes(G1, mapped)
1218
+
1219
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
1220
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
1221
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1222
+ assert m
1223
+
1224
+ # Add extra but corresponding edges to both graphs
1225
+ G1.add_edges_from([(2, 2), (2, 3), (2, 8), (3, 4)])
1226
+ G2.add_edges_from([("m", "m"), ("m", "l"), ("m", "h"), ("l", "j")])
1227
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1228
+ assert m
1229
+
1230
+ # Obtain subgraphs
1231
+ H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 6, 10, 11, 12, 13]))
1232
+ H2 = nx.MultiGraph(
1233
+ G2.subgraph(
1234
+ [
1235
+ mapped[2],
1236
+ mapped[3],
1237
+ mapped[8],
1238
+ mapped[9],
1239
+ mapped[10],
1240
+ mapped[11],
1241
+ mapped[12],
1242
+ mapped[13],
1243
+ ]
1244
+ )
1245
+ )
1246
+
1247
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1248
+ assert not m
1249
+
1250
+ # Make them isomorphic
1251
+ H2.remove_edges_from(
1252
+ [(mapped[3], mapped[2]), (mapped[9], mapped[8]), (mapped[2], mapped[2])]
1253
+ )
1254
+ H2.add_edges_from([(mapped[9], mapped[9]), (mapped[2], mapped[8])])
1255
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1256
+ assert m
1257
+
1258
+ # Re-structure the disconnected sub-graph
1259
+ H1.remove_node(12)
1260
+ H2.remove_node(mapped[12])
1261
+ H1.add_edge(13, 13)
1262
+ H2.add_edge(mapped[13], mapped[13])
1263
+
1264
+ # Connect the two disconnected components, forming a single graph
1265
+ H1.add_edges_from([(3, 13), (6, 11)])
1266
+ H2.add_edges_from([(mapped[8], mapped[10]), (mapped[2], mapped[11])])
1267
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1268
+ assert m
1269
+
1270
+ # Change orientation of self-loops in one graph, maintaining the degree sequence
1271
+ H1.remove_edges_from([(2, 2), (3, 6)])
1272
+ H1.add_edges_from([(6, 6), (2, 3)])
1273
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1274
+ assert not m
1275
+
1276
+ def test_custom_multigraph4_different_labels(self):
1277
+ G1 = nx.MultiGraph()
1278
+ edges1 = [
1279
+ (1, 2),
1280
+ (1, 2),
1281
+ (2, 2),
1282
+ (2, 3),
1283
+ (3, 8),
1284
+ (3, 8),
1285
+ (3, 4),
1286
+ (4, 5),
1287
+ (4, 5),
1288
+ (4, 5),
1289
+ (4, 6),
1290
+ (3, 6),
1291
+ (3, 6),
1292
+ (6, 6),
1293
+ (8, 7),
1294
+ (7, 7),
1295
+ (8, 9),
1296
+ (9, 9),
1297
+ (8, 9),
1298
+ (8, 9),
1299
+ (5, 9),
1300
+ (10, 11),
1301
+ (11, 12),
1302
+ (12, 13),
1303
+ (11, 13),
1304
+ ]
1305
+
1306
+ mapped = {
1307
+ 1: "n",
1308
+ 2: "m",
1309
+ 3: "l",
1310
+ 4: "j",
1311
+ 5: "k",
1312
+ 6: "i",
1313
+ 7: "g",
1314
+ 8: "h",
1315
+ 9: "f",
1316
+ 10: "b",
1317
+ 11: "a",
1318
+ 12: "d",
1319
+ 13: "e",
1320
+ }
1321
+
1322
+ G1.add_edges_from(edges1)
1323
+ G2 = nx.relabel_nodes(G1, mapped)
1324
+
1325
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
1326
+ nx.set_node_attributes(
1327
+ G2,
1328
+ dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
1329
+ "label",
1330
+ )
1331
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1332
+ assert m == mapped
1333
+
1334
+ # Add extra but corresponding edges to both graphs
1335
+ G1.add_edges_from([(2, 2), (2, 3), (2, 8), (3, 4)])
1336
+ G2.add_edges_from([("m", "m"), ("m", "l"), ("m", "h"), ("l", "j")])
1337
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1338
+ assert m == mapped
1339
+
1340
+ # Obtain isomorphic subgraphs
1341
+ H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 6]))
1342
+ H2 = nx.MultiGraph(G2.subgraph(["m", "l", "j", "i"]))
1343
+
1344
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1345
+ assert m
1346
+
1347
+ # Delete the 3-clique, keeping only the path-graph. Also, H1 mirrors H2
1348
+ H1.remove_node(4)
1349
+ H2.remove_node("j")
1350
+ H1.remove_edges_from([(2, 2), (2, 3), (6, 6)])
1351
+ H2.remove_edges_from([("l", "i"), ("m", "m"), ("m", "m")])
1352
+
1353
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1354
+ assert not m
1355
+
1356
+ # Assign the same labels so that mirroring means isomorphic
1357
+ for n1, n2 in zip(H1.nodes(), H2.nodes()):
1358
+ H1.nodes[n1]["label"] = "red"
1359
+ H2.nodes[n2]["label"] = "red"
1360
+
1361
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1362
+ assert m
1363
+
1364
+ # Leave only one node with self-loop
1365
+ H1.remove_nodes_from([3, 6])
1366
+ H2.remove_nodes_from(["m", "l"])
1367
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1368
+ assert m
1369
+
1370
+ # Remove one self-loop from H1
1371
+ H1.remove_edge(2, 2)
1372
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1373
+ assert not m
1374
+
1375
+ # Same for H2
1376
+ H2.remove_edge("i", "i")
1377
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1378
+ assert m
1379
+
1380
+ # Compose H1 with the disconnected sub-graph of G1. Same for H2
1381
+ S1 = nx.compose(H1, nx.MultiGraph(G1.subgraph([10, 11, 12, 13])))
1382
+ S2 = nx.compose(H2, nx.MultiGraph(G2.subgraph(["a", "b", "d", "e"])))
1383
+
1384
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1385
+ assert m
1386
+
1387
+ # Connect the two components
1388
+ S1.add_edges_from([(13, 13), (13, 13), (2, 13)])
1389
+ S2.add_edges_from([("a", "a"), ("a", "a"), ("i", "e")])
1390
+ m = vf2pp_isomorphism(H1, H2, node_label="label")
1391
+ assert m
1392
+
1393
+ def test_custom_multigraph5_same_labels(self):
1394
+ G1 = nx.MultiGraph()
1395
+
1396
+ edges1 = [
1397
+ (1, 5),
1398
+ (1, 2),
1399
+ (1, 4),
1400
+ (2, 3),
1401
+ (2, 6),
1402
+ (3, 4),
1403
+ (3, 7),
1404
+ (4, 8),
1405
+ (5, 8),
1406
+ (5, 6),
1407
+ (6, 7),
1408
+ (7, 8),
1409
+ ]
1410
+ mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
1411
+
1412
+ G1.add_edges_from(edges1)
1413
+ G2 = nx.relabel_nodes(G1, mapped)
1414
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
1415
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
1416
+
1417
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1418
+ assert m
1419
+
1420
+ # Add multiple edges and self-loops, maintaining isomorphism
1421
+ G1.add_edges_from(
1422
+ [(1, 2), (1, 2), (3, 7), (8, 8), (8, 8), (7, 8), (2, 3), (5, 6)]
1423
+ )
1424
+ G2.add_edges_from(
1425
+ [
1426
+ ("a", "h"),
1427
+ ("a", "h"),
1428
+ ("d", "j"),
1429
+ ("c", "c"),
1430
+ ("c", "c"),
1431
+ ("j", "c"),
1432
+ ("d", "h"),
1433
+ ("g", "b"),
1434
+ ]
1435
+ )
1436
+
1437
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1438
+ assert m
1439
+
1440
+ # Make G2 to be the rotated G1
1441
+ G2.remove_edges_from(
1442
+ [
1443
+ ("a", "h"),
1444
+ ("a", "h"),
1445
+ ("d", "j"),
1446
+ ("c", "c"),
1447
+ ("c", "c"),
1448
+ ("j", "c"),
1449
+ ("d", "h"),
1450
+ ("g", "b"),
1451
+ ]
1452
+ )
1453
+ G2.add_edges_from(
1454
+ [
1455
+ ("d", "i"),
1456
+ ("a", "h"),
1457
+ ("g", "b"),
1458
+ ("g", "b"),
1459
+ ("i", "i"),
1460
+ ("i", "i"),
1461
+ ("b", "j"),
1462
+ ("d", "j"),
1463
+ ]
1464
+ )
1465
+
1466
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1467
+ assert m
1468
+
1469
+ def test_disconnected_multigraph_all_same_labels(self):
1470
+ G1 = nx.MultiGraph()
1471
+ G1.add_nodes_from(list(range(10)))
1472
+ G1.add_edges_from([(i, i) for i in range(10)])
1473
+
1474
+ mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
1475
+ G2 = nx.relabel_nodes(G1, mapped)
1476
+
1477
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
1478
+ nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
1479
+
1480
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1481
+ assert m
1482
+
1483
+ # Add self-loops to non-mapped nodes. Should be the same, as the graph is disconnected.
1484
+ G1.add_edges_from([(i, i) for i in range(5, 8)] * 3)
1485
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1486
+ assert not m
1487
+
1488
+ # Compensate in G2
1489
+ G2.add_edges_from([(i, i) for i in range(3)] * 3)
1490
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1491
+ assert m
1492
+
1493
+ # Add one more self-loop in G2
1494
+ G2.add_edges_from([(0, 0), (1, 1), (1, 1)])
1495
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1496
+ assert not m
1497
+
1498
+ # Compensate in G1
1499
+ G1.add_edges_from([(5, 5), (7, 7), (7, 7)])
1500
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1501
+ assert m
1502
+
1503
+ def test_disconnected_multigraph_all_different_labels(self):
1504
+ G1 = nx.MultiGraph()
1505
+ G1.add_nodes_from(list(range(10)))
1506
+ G1.add_edges_from([(i, i) for i in range(10)])
1507
+
1508
+ mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
1509
+ G2 = nx.relabel_nodes(G1, mapped)
1510
+
1511
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
1512
+ nx.set_node_attributes(
1513
+ G2,
1514
+ dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
1515
+ "label",
1516
+ )
1517
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1518
+ assert m
1519
+ assert m == mapped
1520
+
1521
+ # Add self-loops to non-mapped nodes. Now it is not the same, as there are different labels
1522
+ G1.add_edges_from([(i, i) for i in range(5, 8)] * 3)
1523
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1524
+ assert not m
1525
+
1526
+ # Add self-loops to non mapped nodes in G2 as well
1527
+ G2.add_edges_from([(mapped[i], mapped[i]) for i in range(3)] * 7)
1528
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1529
+ assert not m
1530
+
1531
+ # Add self-loops to mapped nodes in G2
1532
+ G2.add_edges_from([(mapped[i], mapped[i]) for i in range(5, 8)] * 3)
1533
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1534
+ assert not m
1535
+
1536
+ # Add self-loops to G1 so that they are even in both graphs
1537
+ G1.add_edges_from([(i, i) for i in range(3)] * 7)
1538
+ m = vf2pp_isomorphism(G1, G2, node_label="label")
1539
+ assert m
1540
+
1541
+
1542
+ class TestDiGraphISOVF2pp:
1543
+ def test_wikipedia_graph(self):
1544
+ edges1 = [
1545
+ (1, 5),
1546
+ (1, 2),
1547
+ (1, 4),
1548
+ (3, 2),
1549
+ (6, 2),
1550
+ (3, 4),
1551
+ (7, 3),
1552
+ (4, 8),
1553
+ (5, 8),
1554
+ (6, 5),
1555
+ (6, 7),
1556
+ (7, 8),
1557
+ ]
1558
+ mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
1559
+
1560
+ G1 = nx.DiGraph(edges1)
1561
+ G2 = nx.relabel_nodes(G1, mapped)
1562
+
1563
+ assert vf2pp_isomorphism(G1, G2) == mapped
1564
+
1565
+ # Change the direction of an edge
1566
+ G1.remove_edge(1, 5)
1567
+ G1.add_edge(5, 1)
1568
+ assert vf2pp_isomorphism(G1, G2) is None
1569
+
1570
+ def test_non_isomorphic_same_degree_sequence(self):
1571
+ r"""
1572
+ G1 G2
1573
+ x--------------x x--------------x
1574
+ | \ | | \ |
1575
+ | x-------x | | x-------x |
1576
+ | | | | | | | |
1577
+ | x-------x | | x-------x |
1578
+ | / | | \ |
1579
+ x--------------x x--------------x
1580
+ """
1581
+ edges1 = [
1582
+ (1, 5),
1583
+ (1, 2),
1584
+ (4, 1),
1585
+ (3, 2),
1586
+ (3, 4),
1587
+ (4, 8),
1588
+ (5, 8),
1589
+ (6, 5),
1590
+ (6, 7),
1591
+ (7, 8),
1592
+ ]
1593
+ edges2 = [
1594
+ (1, 5),
1595
+ (1, 2),
1596
+ (4, 1),
1597
+ (3, 2),
1598
+ (4, 3),
1599
+ (5, 8),
1600
+ (6, 5),
1601
+ (6, 7),
1602
+ (3, 7),
1603
+ (8, 7),
1604
+ ]
1605
+
1606
+ G1 = nx.DiGraph(edges1)
1607
+ G2 = nx.DiGraph(edges2)
1608
+ assert vf2pp_isomorphism(G1, G2) is None
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py ADDED
@@ -0,0 +1,3106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import itertools as it
2
+
3
+ import pytest
4
+
5
+ import networkx as nx
6
+ from networkx import vf2pp_is_isomorphic, vf2pp_isomorphism
7
+ from networkx.algorithms.isomorphism.vf2pp import (
8
+ _consistent_PT,
9
+ _cut_PT,
10
+ _feasibility,
11
+ _find_candidates,
12
+ _find_candidates_Di,
13
+ _GraphParameters,
14
+ _initialize_parameters,
15
+ _matching_order,
16
+ _restore_Tinout,
17
+ _restore_Tinout_Di,
18
+ _StateParameters,
19
+ _update_Tinout,
20
+ )
21
+
22
+ labels_same = ["blue"]
23
+
24
+ labels_many = [
25
+ "white",
26
+ "red",
27
+ "blue",
28
+ "green",
29
+ "orange",
30
+ "black",
31
+ "purple",
32
+ "yellow",
33
+ "brown",
34
+ "cyan",
35
+ "solarized",
36
+ "pink",
37
+ "none",
38
+ ]
39
+
40
+
41
+ class TestNodeOrdering:
42
+ def test_empty_graph(self):
43
+ G1 = nx.Graph()
44
+ G2 = nx.Graph()
45
+ gparams = _GraphParameters(G1, G2, None, None, None, None, None)
46
+ assert len(set(_matching_order(gparams))) == 0
47
+
48
+ def test_single_node(self):
49
+ G1 = nx.Graph()
50
+ G2 = nx.Graph()
51
+ G1.add_node(1)
52
+ G2.add_node(1)
53
+
54
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
55
+ nx.set_node_attributes(
56
+ G2,
57
+ dict(zip(G2, it.cycle(labels_many))),
58
+ "label",
59
+ )
60
+ l1, l2 = (
61
+ nx.get_node_attributes(G1, "label"),
62
+ nx.get_node_attributes(G2, "label"),
63
+ )
64
+
65
+ gparams = _GraphParameters(
66
+ G1,
67
+ G2,
68
+ l1,
69
+ l2,
70
+ nx.utils.groups(l1),
71
+ nx.utils.groups(l2),
72
+ nx.utils.groups(dict(G2.degree())),
73
+ )
74
+ m = _matching_order(gparams)
75
+ assert m == [1]
76
+
77
+ def test_matching_order(self):
78
+ labels = [
79
+ "blue",
80
+ "blue",
81
+ "red",
82
+ "red",
83
+ "red",
84
+ "red",
85
+ "green",
86
+ "green",
87
+ "green",
88
+ "yellow",
89
+ "purple",
90
+ "purple",
91
+ "blue",
92
+ "blue",
93
+ ]
94
+ G1 = nx.Graph(
95
+ [
96
+ (0, 1),
97
+ (0, 2),
98
+ (1, 2),
99
+ (2, 5),
100
+ (2, 4),
101
+ (1, 3),
102
+ (1, 4),
103
+ (3, 6),
104
+ (4, 6),
105
+ (6, 7),
106
+ (7, 8),
107
+ (9, 10),
108
+ (9, 11),
109
+ (11, 12),
110
+ (11, 13),
111
+ (12, 13),
112
+ (10, 13),
113
+ ]
114
+ )
115
+ G2 = G1.copy()
116
+ nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels))), "label")
117
+ nx.set_node_attributes(
118
+ G2,
119
+ dict(zip(G2, it.cycle(labels))),
120
+ "label",
121
+ )
122
+ l1, l2 = (
123
+ nx.get_node_attributes(G1, "label"),
124
+ nx.get_node_attributes(G2, "label"),
125
+ )
126
+ gparams = _GraphParameters(
127
+ G1,
128
+ G2,
129
+ l1,
130
+ l2,
131
+ nx.utils.groups(l1),
132
+ nx.utils.groups(l2),
133
+ nx.utils.groups(dict(G2.degree())),
134
+ )
135
+
136
+ expected = [9, 11, 10, 13, 12, 1, 2, 4, 0, 3, 6, 5, 7, 8]
137
+ assert _matching_order(gparams) == expected
138
+
139
+ def test_matching_order_all_branches(self):
140
+ G1 = nx.Graph(
141
+ [(0, 1), (0, 2), (0, 3), (0, 4), (1, 2), (1, 3), (1, 4), (2, 4), (3, 4)]
142
+ )
143
+ G1.add_node(5)
144
+ G2 = G1.copy()
145
+
146
+ G1.nodes[0]["label"] = "black"
147
+ G1.nodes[1]["label"] = "blue"
148
+ G1.nodes[2]["label"] = "blue"
149
+ G1.nodes[3]["label"] = "red"
150
+ G1.nodes[4]["label"] = "red"
151
+ G1.nodes[5]["label"] = "blue"
152
+
153
+ G2.nodes[0]["label"] = "black"
154
+ G2.nodes[1]["label"] = "blue"
155
+ G2.nodes[2]["label"] = "blue"
156
+ G2.nodes[3]["label"] = "red"
157
+ G2.nodes[4]["label"] = "red"
158
+ G2.nodes[5]["label"] = "blue"
159
+
160
+ l1, l2 = (
161
+ nx.get_node_attributes(G1, "label"),
162
+ nx.get_node_attributes(G2, "label"),
163
+ )
164
+ gparams = _GraphParameters(
165
+ G1,
166
+ G2,
167
+ l1,
168
+ l2,
169
+ nx.utils.groups(l1),
170
+ nx.utils.groups(l2),
171
+ nx.utils.groups(dict(G2.degree())),
172
+ )
173
+
174
+ expected = [0, 4, 1, 3, 2, 5]
175
+ assert _matching_order(gparams) == expected
176
+
177
+
178
+ class TestGraphCandidateSelection:
179
+ G1_edges = [
180
+ (1, 2),
181
+ (1, 4),
182
+ (1, 5),
183
+ (2, 3),
184
+ (2, 4),
185
+ (3, 4),
186
+ (4, 5),
187
+ (1, 6),
188
+ (6, 7),
189
+ (6, 8),
190
+ (8, 9),
191
+ (7, 9),
192
+ ]
193
+ mapped = {
194
+ 0: "x",
195
+ 1: "a",
196
+ 2: "b",
197
+ 3: "c",
198
+ 4: "d",
199
+ 5: "e",
200
+ 6: "f",
201
+ 7: "g",
202
+ 8: "h",
203
+ 9: "i",
204
+ }
205
+
206
+ def test_no_covered_neighbors_no_labels(self):
207
+ G1 = nx.Graph()
208
+ G1.add_edges_from(self.G1_edges)
209
+ G1.add_node(0)
210
+ G2 = nx.relabel_nodes(G1, self.mapped)
211
+
212
+ G1_degree = dict(G1.degree)
213
+ l1 = dict(G1.nodes(data="label", default=-1))
214
+ l2 = dict(G2.nodes(data="label", default=-1))
215
+ gparams = _GraphParameters(
216
+ G1,
217
+ G2,
218
+ l1,
219
+ l2,
220
+ nx.utils.groups(l1),
221
+ nx.utils.groups(l2),
222
+ nx.utils.groups(dict(G2.degree())),
223
+ )
224
+
225
+ m = {9: self.mapped[9], 1: self.mapped[1]}
226
+ m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
227
+
228
+ T1 = {7, 8, 2, 4, 5}
229
+ T1_tilde = {0, 3, 6}
230
+ T2 = {"g", "h", "b", "d", "e"}
231
+ T2_tilde = {"x", "c", "f"}
232
+
233
+ sparams = _StateParameters(
234
+ m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
235
+ )
236
+
237
+ u = 3
238
+ candidates = _find_candidates(u, gparams, sparams, G1_degree)
239
+ assert candidates == {self.mapped[u]}
240
+
241
+ u = 0
242
+ candidates = _find_candidates(u, gparams, sparams, G1_degree)
243
+ assert candidates == {self.mapped[u]}
244
+
245
+ m.pop(9)
246
+ m_rev.pop(self.mapped[9])
247
+
248
+ T1 = {2, 4, 5, 6}
249
+ T1_tilde = {0, 3, 7, 8, 9}
250
+ T2 = {"g", "h", "b", "d", "e", "f"}
251
+ T2_tilde = {"x", "c", "g", "h", "i"}
252
+
253
+ sparams = _StateParameters(
254
+ m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
255
+ )
256
+
257
+ u = 7
258
+ candidates = _find_candidates(u, gparams, sparams, G1_degree)
259
+ assert candidates == {
260
+ self.mapped[u],
261
+ self.mapped[8],
262
+ self.mapped[3],
263
+ self.mapped[9],
264
+ }
265
+
266
+ def test_no_covered_neighbors_with_labels(self):
267
+ G1 = nx.Graph()
268
+ G1.add_edges_from(self.G1_edges)
269
+ G1.add_node(0)
270
+ G2 = nx.relabel_nodes(G1, self.mapped)
271
+
272
+ G1_degree = dict(G1.degree)
273
+ nx.set_node_attributes(
274
+ G1,
275
+ dict(zip(G1, it.cycle(labels_many))),
276
+ "label",
277
+ )
278
+ nx.set_node_attributes(
279
+ G2,
280
+ dict(
281
+ zip(
282
+ [self.mapped[n] for n in G1],
283
+ it.cycle(labels_many),
284
+ )
285
+ ),
286
+ "label",
287
+ )
288
+ l1 = dict(G1.nodes(data="label", default=-1))
289
+ l2 = dict(G2.nodes(data="label", default=-1))
290
+ gparams = _GraphParameters(
291
+ G1,
292
+ G2,
293
+ l1,
294
+ l2,
295
+ nx.utils.groups(l1),
296
+ nx.utils.groups(l2),
297
+ nx.utils.groups(dict(G2.degree())),
298
+ )
299
+
300
+ m = {9: self.mapped[9], 1: self.mapped[1]}
301
+ m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
302
+
303
+ T1 = {7, 8, 2, 4, 5, 6}
304
+ T1_tilde = {0, 3}
305
+ T2 = {"g", "h", "b", "d", "e", "f"}
306
+ T2_tilde = {"x", "c"}
307
+
308
+ sparams = _StateParameters(
309
+ m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
310
+ )
311
+
312
+ u = 3
313
+ candidates = _find_candidates(u, gparams, sparams, G1_degree)
314
+ assert candidates == {self.mapped[u]}
315
+
316
+ u = 0
317
+ candidates = _find_candidates(u, gparams, sparams, G1_degree)
318
+ assert candidates == {self.mapped[u]}
319
+
320
+ # Change label of disconnected node
321
+ G1.nodes[u]["label"] = "blue"
322
+ l1 = dict(G1.nodes(data="label", default=-1))
323
+ l2 = dict(G2.nodes(data="label", default=-1))
324
+ gparams = _GraphParameters(
325
+ G1,
326
+ G2,
327
+ l1,
328
+ l2,
329
+ nx.utils.groups(l1),
330
+ nx.utils.groups(l2),
331
+ nx.utils.groups(dict(G2.degree())),
332
+ )
333
+
334
+ # No candidate
335
+ candidates = _find_candidates(u, gparams, sparams, G1_degree)
336
+ assert candidates == set()
337
+
338
+ m.pop(9)
339
+ m_rev.pop(self.mapped[9])
340
+
341
+ T1 = {2, 4, 5, 6}
342
+ T1_tilde = {0, 3, 7, 8, 9}
343
+ T2 = {"b", "d", "e", "f"}
344
+ T2_tilde = {"x", "c", "g", "h", "i"}
345
+
346
+ sparams = _StateParameters(
347
+ m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
348
+ )
349
+
350
+ u = 7
351
+ candidates = _find_candidates(u, gparams, sparams, G1_degree)
352
+ assert candidates == {self.mapped[u]}
353
+
354
+ G1.nodes[8]["label"] = G1.nodes[7]["label"]
355
+ G2.nodes[self.mapped[8]]["label"] = G1.nodes[7]["label"]
356
+ l1 = dict(G1.nodes(data="label", default=-1))
357
+ l2 = dict(G2.nodes(data="label", default=-1))
358
+ gparams = _GraphParameters(
359
+ G1,
360
+ G2,
361
+ l1,
362
+ l2,
363
+ nx.utils.groups(l1),
364
+ nx.utils.groups(l2),
365
+ nx.utils.groups(dict(G2.degree())),
366
+ )
367
+
368
+ candidates = _find_candidates(u, gparams, sparams, G1_degree)
369
+ assert candidates == {self.mapped[u], self.mapped[8]}
370
+
371
+ def test_covered_neighbors_no_labels(self):
372
+ G1 = nx.Graph()
373
+ G1.add_edges_from(self.G1_edges)
374
+ G1.add_node(0)
375
+ G2 = nx.relabel_nodes(G1, self.mapped)
376
+
377
+ G1_degree = dict(G1.degree)
378
+ l1 = dict(G1.nodes(data=None, default=-1))
379
+ l2 = dict(G2.nodes(data=None, default=-1))
380
+ gparams = _GraphParameters(
381
+ G1,
382
+ G2,
383
+ l1,
384
+ l2,
385
+ nx.utils.groups(l1),
386
+ nx.utils.groups(l2),
387
+ nx.utils.groups(dict(G2.degree())),
388
+ )
389
+
390
+ m = {9: self.mapped[9], 1: self.mapped[1]}
391
+ m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
392
+
393
+ T1 = {7, 8, 2, 4, 5, 6}
394
+ T1_tilde = {0, 3}
395
+ T2 = {"g", "h", "b", "d", "e", "f"}
396
+ T2_tilde = {"x", "c"}
397
+
398
+ sparams = _StateParameters(
399
+ m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
400
+ )
401
+
402
+ u = 5
403
+ candidates = _find_candidates(u, gparams, sparams, G1_degree)
404
+ assert candidates == {self.mapped[u]}
405
+
406
+ u = 6
407
+ candidates = _find_candidates(u, gparams, sparams, G1_degree)
408
+ assert candidates == {self.mapped[u], self.mapped[2]}
409
+
410
+ def test_covered_neighbors_with_labels(self):
411
+ G1 = nx.Graph()
412
+ G1.add_edges_from(self.G1_edges)
413
+ G1.add_node(0)
414
+ G2 = nx.relabel_nodes(G1, self.mapped)
415
+
416
+ G1_degree = dict(G1.degree)
417
+ nx.set_node_attributes(
418
+ G1,
419
+ dict(zip(G1, it.cycle(labels_many))),
420
+ "label",
421
+ )
422
+ nx.set_node_attributes(
423
+ G2,
424
+ dict(
425
+ zip(
426
+ [self.mapped[n] for n in G1],
427
+ it.cycle(labels_many),
428
+ )
429
+ ),
430
+ "label",
431
+ )
432
+ l1 = dict(G1.nodes(data="label", default=-1))
433
+ l2 = dict(G2.nodes(data="label", default=-1))
434
+ gparams = _GraphParameters(
435
+ G1,
436
+ G2,
437
+ l1,
438
+ l2,
439
+ nx.utils.groups(l1),
440
+ nx.utils.groups(l2),
441
+ nx.utils.groups(dict(G2.degree())),
442
+ )
443
+
444
+ m = {9: self.mapped[9], 1: self.mapped[1]}
445
+ m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
446
+
447
+ T1 = {7, 8, 2, 4, 5, 6}
448
+ T1_tilde = {0, 3}
449
+ T2 = {"g", "h", "b", "d", "e", "f"}
450
+ T2_tilde = {"x", "c"}
451
+
452
+ sparams = _StateParameters(
453
+ m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
454
+ )
455
+
456
+ u = 5
457
+ candidates = _find_candidates(u, gparams, sparams, G1_degree)
458
+ assert candidates == {self.mapped[u]}
459
+
460
+ u = 6
461
+ candidates = _find_candidates(u, gparams, sparams, G1_degree)
462
+ assert candidates == {self.mapped[u]}
463
+
464
+ # Assign to 2, the same label as 6
465
+ G1.nodes[2]["label"] = G1.nodes[u]["label"]
466
+ G2.nodes[self.mapped[2]]["label"] = G1.nodes[u]["label"]
467
+ l1 = dict(G1.nodes(data="label", default=-1))
468
+ l2 = dict(G2.nodes(data="label", default=-1))
469
+ gparams = _GraphParameters(
470
+ G1,
471
+ G2,
472
+ l1,
473
+ l2,
474
+ nx.utils.groups(l1),
475
+ nx.utils.groups(l2),
476
+ nx.utils.groups(dict(G2.degree())),
477
+ )
478
+
479
+ candidates = _find_candidates(u, gparams, sparams, G1_degree)
480
+ assert candidates == {self.mapped[u], self.mapped[2]}
481
+
482
+
483
+ class TestDiGraphCandidateSelection:
484
+ G1_edges = [
485
+ (1, 2),
486
+ (1, 4),
487
+ (5, 1),
488
+ (2, 3),
489
+ (4, 2),
490
+ (3, 4),
491
+ (4, 5),
492
+ (1, 6),
493
+ (6, 7),
494
+ (6, 8),
495
+ (8, 9),
496
+ (7, 9),
497
+ ]
498
+ mapped = {
499
+ 0: "x",
500
+ 1: "a",
501
+ 2: "b",
502
+ 3: "c",
503
+ 4: "d",
504
+ 5: "e",
505
+ 6: "f",
506
+ 7: "g",
507
+ 8: "h",
508
+ 9: "i",
509
+ }
510
+
511
+ def test_no_covered_neighbors_no_labels(self):
512
+ G1 = nx.DiGraph()
513
+ G1.add_edges_from(self.G1_edges)
514
+ G1.add_node(0)
515
+ G2 = nx.relabel_nodes(G1, self.mapped)
516
+
517
+ G1_degree = {
518
+ n: (in_degree, out_degree)
519
+ for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
520
+ }
521
+
522
+ l1 = dict(G1.nodes(data="label", default=-1))
523
+ l2 = dict(G2.nodes(data="label", default=-1))
524
+ gparams = _GraphParameters(
525
+ G1,
526
+ G2,
527
+ l1,
528
+ l2,
529
+ nx.utils.groups(l1),
530
+ nx.utils.groups(l2),
531
+ nx.utils.groups(
532
+ {
533
+ node: (in_degree, out_degree)
534
+ for (node, in_degree), (_, out_degree) in zip(
535
+ G2.in_degree(), G2.out_degree()
536
+ )
537
+ }
538
+ ),
539
+ )
540
+
541
+ m = {9: self.mapped[9], 1: self.mapped[1]}
542
+ m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
543
+
544
+ T1_out = {2, 4, 6}
545
+ T1_in = {5, 7, 8}
546
+ T1_tilde = {0, 3}
547
+ T2_out = {"b", "d", "f"}
548
+ T2_in = {"e", "g", "h"}
549
+ T2_tilde = {"x", "c"}
550
+
551
+ sparams = _StateParameters(
552
+ m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
553
+ )
554
+
555
+ u = 3
556
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
557
+ assert candidates == {self.mapped[u]}
558
+
559
+ u = 0
560
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
561
+ assert candidates == {self.mapped[u]}
562
+
563
+ m.pop(9)
564
+ m_rev.pop(self.mapped[9])
565
+
566
+ T1_out = {2, 4, 6}
567
+ T1_in = {5}
568
+ T1_tilde = {0, 3, 7, 8, 9}
569
+ T2_out = {"b", "d", "f"}
570
+ T2_in = {"e"}
571
+ T2_tilde = {"x", "c", "g", "h", "i"}
572
+
573
+ sparams = _StateParameters(
574
+ m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
575
+ )
576
+
577
+ u = 7
578
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
579
+ assert candidates == {self.mapped[u], self.mapped[8], self.mapped[3]}
580
+
581
+ def test_no_covered_neighbors_with_labels(self):
582
+ G1 = nx.DiGraph()
583
+ G1.add_edges_from(self.G1_edges)
584
+ G1.add_node(0)
585
+ G2 = nx.relabel_nodes(G1, self.mapped)
586
+
587
+ G1_degree = {
588
+ n: (in_degree, out_degree)
589
+ for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
590
+ }
591
+ nx.set_node_attributes(
592
+ G1,
593
+ dict(zip(G1, it.cycle(labels_many))),
594
+ "label",
595
+ )
596
+ nx.set_node_attributes(
597
+ G2,
598
+ dict(
599
+ zip(
600
+ [self.mapped[n] for n in G1],
601
+ it.cycle(labels_many),
602
+ )
603
+ ),
604
+ "label",
605
+ )
606
+ l1 = dict(G1.nodes(data="label", default=-1))
607
+ l2 = dict(G2.nodes(data="label", default=-1))
608
+ gparams = _GraphParameters(
609
+ G1,
610
+ G2,
611
+ l1,
612
+ l2,
613
+ nx.utils.groups(l1),
614
+ nx.utils.groups(l2),
615
+ nx.utils.groups(
616
+ {
617
+ node: (in_degree, out_degree)
618
+ for (node, in_degree), (_, out_degree) in zip(
619
+ G2.in_degree(), G2.out_degree()
620
+ )
621
+ }
622
+ ),
623
+ )
624
+
625
+ m = {9: self.mapped[9], 1: self.mapped[1]}
626
+ m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
627
+
628
+ T1_out = {2, 4, 6}
629
+ T1_in = {5, 7, 8}
630
+ T1_tilde = {0, 3}
631
+ T2_out = {"b", "d", "f"}
632
+ T2_in = {"e", "g", "h"}
633
+ T2_tilde = {"x", "c"}
634
+
635
+ sparams = _StateParameters(
636
+ m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
637
+ )
638
+
639
+ u = 3
640
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
641
+ assert candidates == {self.mapped[u]}
642
+
643
+ u = 0
644
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
645
+ assert candidates == {self.mapped[u]}
646
+
647
+ # Change label of disconnected node
648
+ G1.nodes[u]["label"] = "blue"
649
+ l1 = dict(G1.nodes(data="label", default=-1))
650
+ l2 = dict(G2.nodes(data="label", default=-1))
651
+ gparams = _GraphParameters(
652
+ G1,
653
+ G2,
654
+ l1,
655
+ l2,
656
+ nx.utils.groups(l1),
657
+ nx.utils.groups(l2),
658
+ nx.utils.groups(
659
+ {
660
+ node: (in_degree, out_degree)
661
+ for (node, in_degree), (_, out_degree) in zip(
662
+ G2.in_degree(), G2.out_degree()
663
+ )
664
+ }
665
+ ),
666
+ )
667
+
668
+ # No candidate
669
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
670
+ assert candidates == set()
671
+
672
+ m.pop(9)
673
+ m_rev.pop(self.mapped[9])
674
+
675
+ T1_out = {2, 4, 6}
676
+ T1_in = {5}
677
+ T1_tilde = {0, 3, 7, 8, 9}
678
+ T2_out = {"b", "d", "f"}
679
+ T2_in = {"e"}
680
+ T2_tilde = {"x", "c", "g", "h", "i"}
681
+
682
+ sparams = _StateParameters(
683
+ m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
684
+ )
685
+
686
+ u = 7
687
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
688
+ assert candidates == {self.mapped[u]}
689
+
690
+ G1.nodes[8]["label"] = G1.nodes[7]["label"]
691
+ G2.nodes[self.mapped[8]]["label"] = G1.nodes[7]["label"]
692
+ l1 = dict(G1.nodes(data="label", default=-1))
693
+ l2 = dict(G2.nodes(data="label", default=-1))
694
+ gparams = _GraphParameters(
695
+ G1,
696
+ G2,
697
+ l1,
698
+ l2,
699
+ nx.utils.groups(l1),
700
+ nx.utils.groups(l2),
701
+ nx.utils.groups(
702
+ {
703
+ node: (in_degree, out_degree)
704
+ for (node, in_degree), (_, out_degree) in zip(
705
+ G2.in_degree(), G2.out_degree()
706
+ )
707
+ }
708
+ ),
709
+ )
710
+
711
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
712
+ assert candidates == {self.mapped[u], self.mapped[8]}
713
+
714
+ def test_covered_neighbors_no_labels(self):
715
+ G1 = nx.DiGraph()
716
+ G1.add_edges_from(self.G1_edges)
717
+ G1.add_node(0)
718
+ G2 = nx.relabel_nodes(G1, self.mapped)
719
+
720
+ G1_degree = {
721
+ n: (in_degree, out_degree)
722
+ for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
723
+ }
724
+
725
+ l1 = dict(G1.nodes(data=None, default=-1))
726
+ l2 = dict(G2.nodes(data=None, default=-1))
727
+ gparams = _GraphParameters(
728
+ G1,
729
+ G2,
730
+ l1,
731
+ l2,
732
+ nx.utils.groups(l1),
733
+ nx.utils.groups(l2),
734
+ nx.utils.groups(
735
+ {
736
+ node: (in_degree, out_degree)
737
+ for (node, in_degree), (_, out_degree) in zip(
738
+ G2.in_degree(), G2.out_degree()
739
+ )
740
+ }
741
+ ),
742
+ )
743
+
744
+ m = {9: self.mapped[9], 1: self.mapped[1]}
745
+ m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
746
+
747
+ T1_out = {2, 4, 6}
748
+ T1_in = {5, 7, 8}
749
+ T1_tilde = {0, 3}
750
+ T2_out = {"b", "d", "f"}
751
+ T2_in = {"e", "g", "h"}
752
+ T2_tilde = {"x", "c"}
753
+
754
+ sparams = _StateParameters(
755
+ m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
756
+ )
757
+
758
+ u = 5
759
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
760
+ assert candidates == {self.mapped[u]}
761
+
762
+ u = 6
763
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
764
+ assert candidates == {self.mapped[u]}
765
+
766
+ # Change the direction of an edge to make the degree orientation same as first candidate of u.
767
+ G1.remove_edge(4, 2)
768
+ G1.add_edge(2, 4)
769
+ G2.remove_edge("d", "b")
770
+ G2.add_edge("b", "d")
771
+
772
+ gparams = _GraphParameters(
773
+ G1,
774
+ G2,
775
+ l1,
776
+ l2,
777
+ nx.utils.groups(l1),
778
+ nx.utils.groups(l2),
779
+ nx.utils.groups(
780
+ {
781
+ node: (in_degree, out_degree)
782
+ for (node, in_degree), (_, out_degree) in zip(
783
+ G2.in_degree(), G2.out_degree()
784
+ )
785
+ }
786
+ ),
787
+ )
788
+
789
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
790
+ assert candidates == {self.mapped[u], self.mapped[2]}
791
+
792
+ def test_covered_neighbors_with_labels(self):
793
+ G1 = nx.DiGraph()
794
+ G1.add_edges_from(self.G1_edges)
795
+ G1.add_node(0)
796
+ G2 = nx.relabel_nodes(G1, self.mapped)
797
+
798
+ G1.remove_edge(4, 2)
799
+ G1.add_edge(2, 4)
800
+ G2.remove_edge("d", "b")
801
+ G2.add_edge("b", "d")
802
+
803
+ G1_degree = {
804
+ n: (in_degree, out_degree)
805
+ for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
806
+ }
807
+
808
+ nx.set_node_attributes(
809
+ G1,
810
+ dict(zip(G1, it.cycle(labels_many))),
811
+ "label",
812
+ )
813
+ nx.set_node_attributes(
814
+ G2,
815
+ dict(
816
+ zip(
817
+ [self.mapped[n] for n in G1],
818
+ it.cycle(labels_many),
819
+ )
820
+ ),
821
+ "label",
822
+ )
823
+ l1 = dict(G1.nodes(data="label", default=-1))
824
+ l2 = dict(G2.nodes(data="label", default=-1))
825
+ gparams = _GraphParameters(
826
+ G1,
827
+ G2,
828
+ l1,
829
+ l2,
830
+ nx.utils.groups(l1),
831
+ nx.utils.groups(l2),
832
+ nx.utils.groups(
833
+ {
834
+ node: (in_degree, out_degree)
835
+ for (node, in_degree), (_, out_degree) in zip(
836
+ G2.in_degree(), G2.out_degree()
837
+ )
838
+ }
839
+ ),
840
+ )
841
+
842
+ m = {9: self.mapped[9], 1: self.mapped[1]}
843
+ m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
844
+
845
+ T1_out = {2, 4, 6}
846
+ T1_in = {5, 7, 8}
847
+ T1_tilde = {0, 3}
848
+ T2_out = {"b", "d", "f"}
849
+ T2_in = {"e", "g", "h"}
850
+ T2_tilde = {"x", "c"}
851
+
852
+ sparams = _StateParameters(
853
+ m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
854
+ )
855
+
856
+ u = 5
857
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
858
+ assert candidates == {self.mapped[u]}
859
+
860
+ u = 6
861
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
862
+ assert candidates == {self.mapped[u]}
863
+
864
+ # Assign to 2, the same label as 6
865
+ G1.nodes[2]["label"] = G1.nodes[u]["label"]
866
+ G2.nodes[self.mapped[2]]["label"] = G1.nodes[u]["label"]
867
+ l1 = dict(G1.nodes(data="label", default=-1))
868
+ l2 = dict(G2.nodes(data="label", default=-1))
869
+ gparams = _GraphParameters(
870
+ G1,
871
+ G2,
872
+ l1,
873
+ l2,
874
+ nx.utils.groups(l1),
875
+ nx.utils.groups(l2),
876
+ nx.utils.groups(
877
+ {
878
+ node: (in_degree, out_degree)
879
+ for (node, in_degree), (_, out_degree) in zip(
880
+ G2.in_degree(), G2.out_degree()
881
+ )
882
+ }
883
+ ),
884
+ )
885
+
886
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
887
+ assert candidates == {self.mapped[u], self.mapped[2]}
888
+
889
+ # Change the direction of an edge to make the degree orientation same as first candidate of u.
890
+ G1.remove_edge(2, 4)
891
+ G1.add_edge(4, 2)
892
+ G2.remove_edge("b", "d")
893
+ G2.add_edge("d", "b")
894
+
895
+ gparams = _GraphParameters(
896
+ G1,
897
+ G2,
898
+ l1,
899
+ l2,
900
+ nx.utils.groups(l1),
901
+ nx.utils.groups(l2),
902
+ nx.utils.groups(
903
+ {
904
+ node: (in_degree, out_degree)
905
+ for (node, in_degree), (_, out_degree) in zip(
906
+ G2.in_degree(), G2.out_degree()
907
+ )
908
+ }
909
+ ),
910
+ )
911
+
912
+ candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
913
+ assert candidates == {self.mapped[u]}
914
+
915
+ def test_same_in_out_degrees_no_candidate(self):
916
+ g1 = nx.DiGraph([(4, 1), (4, 2), (3, 4), (5, 4), (6, 4)])
917
+ g2 = nx.DiGraph([(1, 4), (2, 4), (3, 4), (4, 5), (4, 6)])
918
+
919
+ l1 = dict(g1.nodes(data=None, default=-1))
920
+ l2 = dict(g2.nodes(data=None, default=-1))
921
+ gparams = _GraphParameters(
922
+ g1,
923
+ g2,
924
+ l1,
925
+ l2,
926
+ nx.utils.groups(l1),
927
+ nx.utils.groups(l2),
928
+ nx.utils.groups(
929
+ {
930
+ node: (in_degree, out_degree)
931
+ for (node, in_degree), (_, out_degree) in zip(
932
+ g2.in_degree(), g2.out_degree()
933
+ )
934
+ }
935
+ ),
936
+ )
937
+
938
+ g1_degree = {
939
+ n: (in_degree, out_degree)
940
+ for (n, in_degree), (_, out_degree) in zip(g1.in_degree, g1.out_degree)
941
+ }
942
+
943
+ m = {1: 1, 2: 2, 3: 3}
944
+ m_rev = m.copy()
945
+
946
+ T1_out = {4}
947
+ T1_in = {4}
948
+ T1_tilde = {5, 6}
949
+ T2_out = {4}
950
+ T2_in = {4}
951
+ T2_tilde = {5, 6}
952
+
953
+ sparams = _StateParameters(
954
+ m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
955
+ )
956
+
957
+ u = 4
958
+ # despite the same in and out degree, there's no candidate for u=4
959
+ candidates = _find_candidates_Di(u, gparams, sparams, g1_degree)
960
+ assert candidates == set()
961
+ # Notice how the regular candidate selection method returns wrong result.
962
+ assert _find_candidates(u, gparams, sparams, g1_degree) == {4}
963
+
964
+
965
+ class TestGraphISOFeasibility:
966
+ def test_const_covered_neighbors(self):
967
+ G1 = nx.Graph([(0, 1), (1, 2), (3, 0), (3, 2)])
968
+ G2 = nx.Graph([("a", "b"), ("b", "c"), ("k", "a"), ("k", "c")])
969
+ gparams = _GraphParameters(G1, G2, None, None, None, None, None)
970
+ sparams = _StateParameters(
971
+ {0: "a", 1: "b", 2: "c"},
972
+ {"a": 0, "b": 1, "c": 2},
973
+ None,
974
+ None,
975
+ None,
976
+ None,
977
+ None,
978
+ None,
979
+ None,
980
+ None,
981
+ )
982
+ u, v = 3, "k"
983
+ assert _consistent_PT(u, v, gparams, sparams)
984
+
985
+ def test_const_no_covered_neighbors(self):
986
+ G1 = nx.Graph([(0, 1), (1, 2), (3, 4), (3, 5)])
987
+ G2 = nx.Graph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "z")])
988
+ gparams = _GraphParameters(G1, G2, None, None, None, None, None)
989
+ sparams = _StateParameters(
990
+ {0: "a", 1: "b", 2: "c"},
991
+ {"a": 0, "b": 1, "c": 2},
992
+ None,
993
+ None,
994
+ None,
995
+ None,
996
+ None,
997
+ None,
998
+ None,
999
+ None,
1000
+ )
1001
+ u, v = 3, "k"
1002
+ assert _consistent_PT(u, v, gparams, sparams)
1003
+
1004
+ def test_const_mixed_covered_uncovered_neighbors(self):
1005
+ G1 = nx.Graph([(0, 1), (1, 2), (3, 0), (3, 2), (3, 4), (3, 5)])
1006
+ G2 = nx.Graph(
1007
+ [("a", "b"), ("b", "c"), ("k", "a"), ("k", "c"), ("k", "w"), ("k", "z")]
1008
+ )
1009
+ gparams = _GraphParameters(G1, G2, None, None, None, None, None)
1010
+ sparams = _StateParameters(
1011
+ {0: "a", 1: "b", 2: "c"},
1012
+ {"a": 0, "b": 1, "c": 2},
1013
+ None,
1014
+ None,
1015
+ None,
1016
+ None,
1017
+ None,
1018
+ None,
1019
+ None,
1020
+ None,
1021
+ )
1022
+ u, v = 3, "k"
1023
+ assert _consistent_PT(u, v, gparams, sparams)
1024
+
1025
+ def test_const_fail_cases(self):
1026
+ G1 = nx.Graph(
1027
+ [
1028
+ (0, 1),
1029
+ (1, 2),
1030
+ (10, 0),
1031
+ (10, 3),
1032
+ (10, 4),
1033
+ (10, 5),
1034
+ (10, 6),
1035
+ (4, 1),
1036
+ (5, 3),
1037
+ ]
1038
+ )
1039
+ G2 = nx.Graph(
1040
+ [
1041
+ ("a", "b"),
1042
+ ("b", "c"),
1043
+ ("k", "a"),
1044
+ ("k", "d"),
1045
+ ("k", "e"),
1046
+ ("k", "f"),
1047
+ ("k", "g"),
1048
+ ("e", "b"),
1049
+ ("f", "d"),
1050
+ ]
1051
+ )
1052
+ gparams = _GraphParameters(G1, G2, None, None, None, None, None)
1053
+ sparams = _StateParameters(
1054
+ {0: "a", 1: "b", 2: "c", 3: "d"},
1055
+ {"a": 0, "b": 1, "c": 2, "d": 3},
1056
+ None,
1057
+ None,
1058
+ None,
1059
+ None,
1060
+ None,
1061
+ None,
1062
+ None,
1063
+ None,
1064
+ )
1065
+ u, v = 10, "k"
1066
+ assert _consistent_PT(u, v, gparams, sparams)
1067
+
1068
+ # Delete one uncovered neighbor of u. Notice how it still passes the test.
1069
+ # Two reasons for this:
1070
+ # 1. If u, v had different degrees from the beginning, they wouldn't
1071
+ # be selected as candidates in the first place.
1072
+ # 2. Even if they are selected, consistency is basically 1-look-ahead,
1073
+ # meaning that we take into consideration the relation of the
1074
+ # candidates with their mapped neighbors. The node we deleted is
1075
+ # not a covered neighbor.
1076
+ # Such nodes will be checked by the cut_PT function, which is
1077
+ # basically the 2-look-ahead, checking the relation of the
1078
+ # candidates with T1, T2 (in which belongs the node we just deleted).
1079
+ G1.remove_node(6)
1080
+ assert _consistent_PT(u, v, gparams, sparams)
1081
+
1082
+ # Add one more covered neighbor of u in G1
1083
+ G1.add_edge(u, 2)
1084
+ assert not _consistent_PT(u, v, gparams, sparams)
1085
+
1086
+ # Compensate in G2
1087
+ G2.add_edge(v, "c")
1088
+ assert _consistent_PT(u, v, gparams, sparams)
1089
+
1090
+ # Add one more covered neighbor of v in G2
1091
+ G2.add_edge(v, "x")
1092
+ G1.add_node(7)
1093
+ sparams.mapping.update({7: "x"})
1094
+ sparams.reverse_mapping.update({"x": 7})
1095
+ assert not _consistent_PT(u, v, gparams, sparams)
1096
+
1097
+ # Compendate in G1
1098
+ G1.add_edge(u, 7)
1099
+ assert _consistent_PT(u, v, gparams, sparams)
1100
+
1101
+ @pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph))
1102
+ def test_cut_inconsistent_labels(self, graph_type):
1103
+ G1 = graph_type(
1104
+ [
1105
+ (0, 1),
1106
+ (1, 2),
1107
+ (10, 0),
1108
+ (10, 3),
1109
+ (10, 4),
1110
+ (10, 5),
1111
+ (10, 6),
1112
+ (4, 1),
1113
+ (5, 3),
1114
+ ]
1115
+ )
1116
+ G2 = graph_type(
1117
+ [
1118
+ ("a", "b"),
1119
+ ("b", "c"),
1120
+ ("k", "a"),
1121
+ ("k", "d"),
1122
+ ("k", "e"),
1123
+ ("k", "f"),
1124
+ ("k", "g"),
1125
+ ("e", "b"),
1126
+ ("f", "d"),
1127
+ ]
1128
+ )
1129
+
1130
+ l1 = {n: "blue" for n in G1.nodes()}
1131
+ l2 = {n: "blue" for n in G2.nodes()}
1132
+ l1.update({6: "green"}) # Change the label of one neighbor of u
1133
+
1134
+ gparams = _GraphParameters(
1135
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
1136
+ )
1137
+ sparams = _StateParameters(
1138
+ {0: "a", 1: "b", 2: "c", 3: "d"},
1139
+ {"a": 0, "b": 1, "c": 2, "d": 3},
1140
+ None,
1141
+ None,
1142
+ None,
1143
+ None,
1144
+ None,
1145
+ None,
1146
+ None,
1147
+ None,
1148
+ )
1149
+
1150
+ u, v = 10, "k"
1151
+ assert _cut_PT(u, v, gparams, sparams)
1152
+
1153
+ def test_cut_consistent_labels(self):
1154
+ G1 = nx.Graph(
1155
+ [
1156
+ (0, 1),
1157
+ (1, 2),
1158
+ (10, 0),
1159
+ (10, 3),
1160
+ (10, 4),
1161
+ (10, 5),
1162
+ (10, 6),
1163
+ (4, 1),
1164
+ (5, 3),
1165
+ ]
1166
+ )
1167
+ G2 = nx.Graph(
1168
+ [
1169
+ ("a", "b"),
1170
+ ("b", "c"),
1171
+ ("k", "a"),
1172
+ ("k", "d"),
1173
+ ("k", "e"),
1174
+ ("k", "f"),
1175
+ ("k", "g"),
1176
+ ("e", "b"),
1177
+ ("f", "d"),
1178
+ ]
1179
+ )
1180
+
1181
+ l1 = {n: "blue" for n in G1.nodes()}
1182
+ l2 = {n: "blue" for n in G2.nodes()}
1183
+
1184
+ gparams = _GraphParameters(
1185
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
1186
+ )
1187
+ sparams = _StateParameters(
1188
+ {0: "a", 1: "b", 2: "c", 3: "d"},
1189
+ {"a": 0, "b": 1, "c": 2, "d": 3},
1190
+ {4, 5},
1191
+ None,
1192
+ {6},
1193
+ None,
1194
+ {"e", "f"},
1195
+ None,
1196
+ {"g"},
1197
+ None,
1198
+ )
1199
+
1200
+ u, v = 10, "k"
1201
+ assert not _cut_PT(u, v, gparams, sparams)
1202
+
1203
+ def test_cut_same_labels(self):
1204
+ G1 = nx.Graph(
1205
+ [
1206
+ (0, 1),
1207
+ (1, 2),
1208
+ (10, 0),
1209
+ (10, 3),
1210
+ (10, 4),
1211
+ (10, 5),
1212
+ (10, 6),
1213
+ (4, 1),
1214
+ (5, 3),
1215
+ ]
1216
+ )
1217
+ mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
1218
+ G2 = nx.relabel_nodes(G1, mapped)
1219
+ l1 = {n: "blue" for n in G1.nodes()}
1220
+ l2 = {n: "blue" for n in G2.nodes()}
1221
+
1222
+ gparams = _GraphParameters(
1223
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
1224
+ )
1225
+ sparams = _StateParameters(
1226
+ {0: "a", 1: "b", 2: "c", 3: "d"},
1227
+ {"a": 0, "b": 1, "c": 2, "d": 3},
1228
+ {4, 5},
1229
+ None,
1230
+ {6},
1231
+ None,
1232
+ {"e", "f"},
1233
+ None,
1234
+ {"g"},
1235
+ None,
1236
+ )
1237
+
1238
+ u, v = 10, "k"
1239
+ assert not _cut_PT(u, v, gparams, sparams)
1240
+
1241
+ # Change intersection between G1[u] and T1, so it's not the same as the one between G2[v] and T2
1242
+ G1.remove_edge(u, 4)
1243
+ assert _cut_PT(u, v, gparams, sparams)
1244
+
1245
+ # Compensate in G2
1246
+ G2.remove_edge(v, mapped[4])
1247
+ assert not _cut_PT(u, v, gparams, sparams)
1248
+
1249
+ # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde
1250
+ G2.remove_edge(v, mapped[6])
1251
+ assert _cut_PT(u, v, gparams, sparams)
1252
+
1253
+ # Compensate in G1
1254
+ G1.remove_edge(u, 6)
1255
+ assert not _cut_PT(u, v, gparams, sparams)
1256
+
1257
+ # Add disconnected nodes, which will form the new Ti_out
1258
+ G1.add_nodes_from([6, 7, 8])
1259
+ G2.add_nodes_from(["g", "y", "z"])
1260
+ sparams.T1_tilde.update({6, 7, 8})
1261
+ sparams.T2_tilde.update({"g", "y", "z"})
1262
+
1263
+ l1 = {n: "blue" for n in G1.nodes()}
1264
+ l2 = {n: "blue" for n in G2.nodes()}
1265
+ gparams = _GraphParameters(
1266
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
1267
+ )
1268
+
1269
+ assert not _cut_PT(u, v, gparams, sparams)
1270
+
1271
+ # Add some new nodes to the mapping
1272
+ sparams.mapping.update({6: "g", 7: "y"})
1273
+ sparams.reverse_mapping.update({"g": 6, "y": 7})
1274
+
1275
+ # Add more nodes to T1, T2.
1276
+ G1.add_edges_from([(6, 20), (7, 20), (6, 21)])
1277
+ G2.add_edges_from([("g", "i"), ("g", "j"), ("y", "j")])
1278
+
1279
+ sparams.mapping.update({20: "j", 21: "i"})
1280
+ sparams.reverse_mapping.update({"j": 20, "i": 21})
1281
+ sparams.T1.update({20, 21})
1282
+ sparams.T2.update({"i", "j"})
1283
+ sparams.T1_tilde.difference_update({6, 7})
1284
+ sparams.T2_tilde.difference_update({"g", "y"})
1285
+
1286
+ assert not _cut_PT(u, v, gparams, sparams)
1287
+
1288
+ # Add nodes from the new T1 and T2, as neighbors of u and v respectively
1289
+ G1.add_edges_from([(u, 20), (u, 21)])
1290
+ G2.add_edges_from([(v, "i"), (v, "j")])
1291
+ l1 = {n: "blue" for n in G1.nodes()}
1292
+ l2 = {n: "blue" for n in G2.nodes()}
1293
+ gparams = _GraphParameters(
1294
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
1295
+ )
1296
+
1297
+ assert not _cut_PT(u, v, gparams, sparams)
1298
+
1299
+ # Change the edges, maintaining the G1[u]-T1 intersection
1300
+ G1.remove_edge(u, 20)
1301
+ G1.add_edge(u, 4)
1302
+ assert not _cut_PT(u, v, gparams, sparams)
1303
+
1304
+ # Connect u to 8 which is still in T1_tilde
1305
+ G1.add_edge(u, 8)
1306
+ assert _cut_PT(u, v, gparams, sparams)
1307
+
1308
+ # Same for v and z, so that inters(G1[u], T1out) == inters(G2[v], T2out)
1309
+ G2.add_edge(v, "z")
1310
+ assert not _cut_PT(u, v, gparams, sparams)
1311
+
1312
+ def test_cut_different_labels(self):
1313
+ G1 = nx.Graph(
1314
+ [
1315
+ (0, 1),
1316
+ (1, 2),
1317
+ (1, 14),
1318
+ (0, 4),
1319
+ (1, 5),
1320
+ (2, 6),
1321
+ (3, 7),
1322
+ (3, 6),
1323
+ (4, 10),
1324
+ (4, 9),
1325
+ (6, 10),
1326
+ (20, 9),
1327
+ (20, 15),
1328
+ (20, 12),
1329
+ (20, 11),
1330
+ (12, 13),
1331
+ (11, 13),
1332
+ (20, 8),
1333
+ (20, 3),
1334
+ (20, 5),
1335
+ (20, 0),
1336
+ ]
1337
+ )
1338
+ mapped = {
1339
+ 0: "a",
1340
+ 1: "b",
1341
+ 2: "c",
1342
+ 3: "d",
1343
+ 4: "e",
1344
+ 5: "f",
1345
+ 6: "g",
1346
+ 7: "h",
1347
+ 8: "i",
1348
+ 9: "j",
1349
+ 10: "k",
1350
+ 11: "l",
1351
+ 12: "m",
1352
+ 13: "n",
1353
+ 14: "o",
1354
+ 15: "p",
1355
+ 20: "x",
1356
+ }
1357
+ G2 = nx.relabel_nodes(G1, mapped)
1358
+
1359
+ l1 = {n: "none" for n in G1.nodes()}
1360
+ l2 = {}
1361
+
1362
+ l1.update(
1363
+ {
1364
+ 9: "blue",
1365
+ 15: "blue",
1366
+ 12: "blue",
1367
+ 11: "green",
1368
+ 3: "green",
1369
+ 8: "red",
1370
+ 0: "red",
1371
+ 5: "yellow",
1372
+ }
1373
+ )
1374
+ l2.update({mapped[n]: l for n, l in l1.items()})
1375
+
1376
+ gparams = _GraphParameters(
1377
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
1378
+ )
1379
+ sparams = _StateParameters(
1380
+ {0: "a", 1: "b", 2: "c", 3: "d"},
1381
+ {"a": 0, "b": 1, "c": 2, "d": 3},
1382
+ {4, 5, 6, 7, 14},
1383
+ None,
1384
+ {9, 10, 15, 12, 11, 13, 8},
1385
+ None,
1386
+ {"e", "f", "g", "h", "o"},
1387
+ None,
1388
+ {"j", "k", "l", "m", "n", "i", "p"},
1389
+ None,
1390
+ )
1391
+
1392
+ u, v = 20, "x"
1393
+ assert not _cut_PT(u, v, gparams, sparams)
1394
+
1395
+ # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact
1396
+ l1.update({9: "red"})
1397
+ assert _cut_PT(u, v, gparams, sparams)
1398
+
1399
+ # compensate in G2
1400
+ l2.update({mapped[9]: "red"})
1401
+ assert not _cut_PT(u, v, gparams, sparams)
1402
+
1403
+ # Change the intersection of G1[u] and T1
1404
+ G1.add_edge(u, 4)
1405
+ assert _cut_PT(u, v, gparams, sparams)
1406
+
1407
+ # Same for G2[v] and T2
1408
+ G2.add_edge(v, mapped[4])
1409
+ assert not _cut_PT(u, v, gparams, sparams)
1410
+
1411
+ # Change the intersection of G2[v] and T2_tilde
1412
+ G2.remove_edge(v, mapped[8])
1413
+ assert _cut_PT(u, v, gparams, sparams)
1414
+
1415
+ # Same for G1[u] and T1_tilde
1416
+ G1.remove_edge(u, 8)
1417
+ assert not _cut_PT(u, v, gparams, sparams)
1418
+
1419
+ # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes
1420
+ G1.add_edge(8, 3)
1421
+ G2.add_edge(mapped[8], mapped[3])
1422
+ sparams.T1.add(8)
1423
+ sparams.T2.add(mapped[8])
1424
+ sparams.T1_tilde.remove(8)
1425
+ sparams.T2_tilde.remove(mapped[8])
1426
+
1427
+ assert not _cut_PT(u, v, gparams, sparams)
1428
+
1429
+ # Remove neighbor of u from T1
1430
+ G1.remove_node(5)
1431
+ l1.pop(5)
1432
+ sparams.T1.remove(5)
1433
+ assert _cut_PT(u, v, gparams, sparams)
1434
+
1435
+ # Same in G2
1436
+ G2.remove_node(mapped[5])
1437
+ l2.pop(mapped[5])
1438
+ sparams.T2.remove(mapped[5])
1439
+ assert not _cut_PT(u, v, gparams, sparams)
1440
+
1441
+ def test_feasibility_same_labels(self):
1442
+ G1 = nx.Graph(
1443
+ [
1444
+ (0, 1),
1445
+ (1, 2),
1446
+ (1, 14),
1447
+ (0, 4),
1448
+ (1, 5),
1449
+ (2, 6),
1450
+ (3, 7),
1451
+ (3, 6),
1452
+ (4, 10),
1453
+ (4, 9),
1454
+ (6, 10),
1455
+ (20, 9),
1456
+ (20, 15),
1457
+ (20, 12),
1458
+ (20, 11),
1459
+ (12, 13),
1460
+ (11, 13),
1461
+ (20, 8),
1462
+ (20, 2),
1463
+ (20, 5),
1464
+ (20, 0),
1465
+ ]
1466
+ )
1467
+ mapped = {
1468
+ 0: "a",
1469
+ 1: "b",
1470
+ 2: "c",
1471
+ 3: "d",
1472
+ 4: "e",
1473
+ 5: "f",
1474
+ 6: "g",
1475
+ 7: "h",
1476
+ 8: "i",
1477
+ 9: "j",
1478
+ 10: "k",
1479
+ 11: "l",
1480
+ 12: "m",
1481
+ 13: "n",
1482
+ 14: "o",
1483
+ 15: "p",
1484
+ 20: "x",
1485
+ }
1486
+ G2 = nx.relabel_nodes(G1, mapped)
1487
+
1488
+ l1 = {n: "blue" for n in G1.nodes()}
1489
+ l2 = {mapped[n]: "blue" for n in G1.nodes()}
1490
+
1491
+ gparams = _GraphParameters(
1492
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
1493
+ )
1494
+ sparams = _StateParameters(
1495
+ {0: "a", 1: "b", 2: "c", 3: "d"},
1496
+ {"a": 0, "b": 1, "c": 2, "d": 3},
1497
+ {4, 5, 6, 7, 14},
1498
+ None,
1499
+ {9, 10, 15, 12, 11, 13, 8},
1500
+ None,
1501
+ {"e", "f", "g", "h", "o"},
1502
+ None,
1503
+ {"j", "k", "l", "m", "n", "i", "p"},
1504
+ None,
1505
+ )
1506
+
1507
+ u, v = 20, "x"
1508
+ assert not _cut_PT(u, v, gparams, sparams)
1509
+
1510
+ # Change structure in G2 such that, ONLY consistency is harmed
1511
+ G2.remove_edge(mapped[20], mapped[2])
1512
+ G2.add_edge(mapped[20], mapped[3])
1513
+
1514
+ # Consistency check fails, while the cutting rules are satisfied!
1515
+ assert not _cut_PT(u, v, gparams, sparams)
1516
+ assert not _consistent_PT(u, v, gparams, sparams)
1517
+
1518
+ # Compensate in G1 and make it consistent
1519
+ G1.remove_edge(20, 2)
1520
+ G1.add_edge(20, 3)
1521
+ assert not _cut_PT(u, v, gparams, sparams)
1522
+ assert _consistent_PT(u, v, gparams, sparams)
1523
+
1524
+ # ONLY fail the cutting check
1525
+ G2.add_edge(v, mapped[10])
1526
+ assert _cut_PT(u, v, gparams, sparams)
1527
+ assert _consistent_PT(u, v, gparams, sparams)
1528
+
1529
+ def test_feasibility_different_labels(self):
1530
+ G1 = nx.Graph(
1531
+ [
1532
+ (0, 1),
1533
+ (1, 2),
1534
+ (1, 14),
1535
+ (0, 4),
1536
+ (1, 5),
1537
+ (2, 6),
1538
+ (3, 7),
1539
+ (3, 6),
1540
+ (4, 10),
1541
+ (4, 9),
1542
+ (6, 10),
1543
+ (20, 9),
1544
+ (20, 15),
1545
+ (20, 12),
1546
+ (20, 11),
1547
+ (12, 13),
1548
+ (11, 13),
1549
+ (20, 8),
1550
+ (20, 2),
1551
+ (20, 5),
1552
+ (20, 0),
1553
+ ]
1554
+ )
1555
+ mapped = {
1556
+ 0: "a",
1557
+ 1: "b",
1558
+ 2: "c",
1559
+ 3: "d",
1560
+ 4: "e",
1561
+ 5: "f",
1562
+ 6: "g",
1563
+ 7: "h",
1564
+ 8: "i",
1565
+ 9: "j",
1566
+ 10: "k",
1567
+ 11: "l",
1568
+ 12: "m",
1569
+ 13: "n",
1570
+ 14: "o",
1571
+ 15: "p",
1572
+ 20: "x",
1573
+ }
1574
+ G2 = nx.relabel_nodes(G1, mapped)
1575
+
1576
+ l1 = {n: "none" for n in G1.nodes()}
1577
+ l2 = {}
1578
+
1579
+ l1.update(
1580
+ {
1581
+ 9: "blue",
1582
+ 15: "blue",
1583
+ 12: "blue",
1584
+ 11: "green",
1585
+ 2: "green",
1586
+ 8: "red",
1587
+ 0: "red",
1588
+ 5: "yellow",
1589
+ }
1590
+ )
1591
+ l2.update({mapped[n]: l for n, l in l1.items()})
1592
+
1593
+ gparams = _GraphParameters(
1594
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
1595
+ )
1596
+ sparams = _StateParameters(
1597
+ {0: "a", 1: "b", 2: "c", 3: "d"},
1598
+ {"a": 0, "b": 1, "c": 2, "d": 3},
1599
+ {4, 5, 6, 7, 14},
1600
+ None,
1601
+ {9, 10, 15, 12, 11, 13, 8},
1602
+ None,
1603
+ {"e", "f", "g", "h", "o"},
1604
+ None,
1605
+ {"j", "k", "l", "m", "n", "i", "p"},
1606
+ None,
1607
+ )
1608
+
1609
+ u, v = 20, "x"
1610
+ assert not _cut_PT(u, v, gparams, sparams)
1611
+
1612
+ # Change structure in G2 such that, ONLY consistency is harmed
1613
+ G2.remove_edge(mapped[20], mapped[2])
1614
+ G2.add_edge(mapped[20], mapped[3])
1615
+ l2.update({mapped[3]: "green"})
1616
+
1617
+ # Consistency check fails, while the cutting rules are satisfied!
1618
+ assert not _cut_PT(u, v, gparams, sparams)
1619
+ assert not _consistent_PT(u, v, gparams, sparams)
1620
+
1621
+ # Compensate in G1 and make it consistent
1622
+ G1.remove_edge(20, 2)
1623
+ G1.add_edge(20, 3)
1624
+ l1.update({3: "green"})
1625
+ assert not _cut_PT(u, v, gparams, sparams)
1626
+ assert _consistent_PT(u, v, gparams, sparams)
1627
+
1628
+ # ONLY fail the cutting check
1629
+ l1.update({5: "red"})
1630
+ assert _cut_PT(u, v, gparams, sparams)
1631
+ assert _consistent_PT(u, v, gparams, sparams)
1632
+
1633
+
1634
+ class TestMultiGraphISOFeasibility:
1635
+ def test_const_covered_neighbors(self):
1636
+ G1 = nx.MultiGraph(
1637
+ [(0, 1), (0, 1), (1, 2), (3, 0), (3, 0), (3, 0), (3, 2), (3, 2)]
1638
+ )
1639
+ G2 = nx.MultiGraph(
1640
+ [
1641
+ ("a", "b"),
1642
+ ("a", "b"),
1643
+ ("b", "c"),
1644
+ ("k", "a"),
1645
+ ("k", "a"),
1646
+ ("k", "a"),
1647
+ ("k", "c"),
1648
+ ("k", "c"),
1649
+ ]
1650
+ )
1651
+ gparams = _GraphParameters(G1, G2, None, None, None, None, None)
1652
+ sparams = _StateParameters(
1653
+ {0: "a", 1: "b", 2: "c"},
1654
+ {"a": 0, "b": 1, "c": 2},
1655
+ None,
1656
+ None,
1657
+ None,
1658
+ None,
1659
+ None,
1660
+ None,
1661
+ None,
1662
+ None,
1663
+ )
1664
+ u, v = 3, "k"
1665
+ assert _consistent_PT(u, v, gparams, sparams)
1666
+
1667
+ def test_const_no_covered_neighbors(self):
1668
+ G1 = nx.MultiGraph([(0, 1), (0, 1), (1, 2), (3, 4), (3, 4), (3, 5)])
1669
+ G2 = nx.MultiGraph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "w"), ("k", "z")])
1670
+ gparams = _GraphParameters(G1, G2, None, None, None, None, None)
1671
+ sparams = _StateParameters(
1672
+ {0: "a", 1: "b", 2: "c"},
1673
+ {"a": 0, "b": 1, "c": 2},
1674
+ None,
1675
+ None,
1676
+ None,
1677
+ None,
1678
+ None,
1679
+ None,
1680
+ None,
1681
+ None,
1682
+ )
1683
+ u, v = 3, "k"
1684
+ assert _consistent_PT(u, v, gparams, sparams)
1685
+
1686
+ def test_const_mixed_covered_uncovered_neighbors(self):
1687
+ G1 = nx.MultiGraph(
1688
+ [(0, 1), (1, 2), (3, 0), (3, 0), (3, 0), (3, 2), (3, 2), (3, 4), (3, 5)]
1689
+ )
1690
+ G2 = nx.MultiGraph(
1691
+ [
1692
+ ("a", "b"),
1693
+ ("b", "c"),
1694
+ ("k", "a"),
1695
+ ("k", "a"),
1696
+ ("k", "a"),
1697
+ ("k", "c"),
1698
+ ("k", "c"),
1699
+ ("k", "w"),
1700
+ ("k", "z"),
1701
+ ]
1702
+ )
1703
+ gparams = _GraphParameters(G1, G2, None, None, None, None, None)
1704
+ sparams = _StateParameters(
1705
+ {0: "a", 1: "b", 2: "c"},
1706
+ {"a": 0, "b": 1, "c": 2},
1707
+ None,
1708
+ None,
1709
+ None,
1710
+ None,
1711
+ None,
1712
+ None,
1713
+ None,
1714
+ None,
1715
+ )
1716
+ u, v = 3, "k"
1717
+ assert _consistent_PT(u, v, gparams, sparams)
1718
+
1719
+ def test_const_fail_cases(self):
1720
+ G1 = nx.MultiGraph(
1721
+ [
1722
+ (0, 1),
1723
+ (1, 2),
1724
+ (10, 0),
1725
+ (10, 0),
1726
+ (10, 0),
1727
+ (10, 3),
1728
+ (10, 3),
1729
+ (10, 4),
1730
+ (10, 5),
1731
+ (10, 6),
1732
+ (10, 6),
1733
+ (4, 1),
1734
+ (5, 3),
1735
+ ]
1736
+ )
1737
+ mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
1738
+ G2 = nx.relabel_nodes(G1, mapped)
1739
+
1740
+ gparams = _GraphParameters(G1, G2, None, None, None, None, None)
1741
+ sparams = _StateParameters(
1742
+ {0: "a", 1: "b", 2: "c", 3: "d"},
1743
+ {"a": 0, "b": 1, "c": 2, "d": 3},
1744
+ None,
1745
+ None,
1746
+ None,
1747
+ None,
1748
+ None,
1749
+ None,
1750
+ None,
1751
+ None,
1752
+ )
1753
+ u, v = 10, "k"
1754
+ assert _consistent_PT(u, v, gparams, sparams)
1755
+
1756
+ # Delete one uncovered neighbor of u. Notice how it still passes the test. Two reasons for this:
1757
+ # 1. If u, v had different degrees from the beginning, they wouldn't be selected as candidates in the first
1758
+ # place.
1759
+ # 2. Even if they are selected, consistency is basically 1-look-ahead, meaning that we take into consideration
1760
+ # the relation of the candidates with their mapped neighbors. The node we deleted is not a covered neighbor.
1761
+ # Such nodes will be checked by the cut_PT function, which is basically the 2-look-ahead, checking the
1762
+ # relation of the candidates with T1, T2 (in which belongs the node we just deleted).
1763
+ G1.remove_node(6)
1764
+ assert _consistent_PT(u, v, gparams, sparams)
1765
+
1766
+ # Add one more covered neighbor of u in G1
1767
+ G1.add_edge(u, 2)
1768
+ assert not _consistent_PT(u, v, gparams, sparams)
1769
+
1770
+ # Compensate in G2
1771
+ G2.add_edge(v, "c")
1772
+ assert _consistent_PT(u, v, gparams, sparams)
1773
+
1774
+ # Add one more covered neighbor of v in G2
1775
+ G2.add_edge(v, "x")
1776
+ G1.add_node(7)
1777
+ sparams.mapping.update({7: "x"})
1778
+ sparams.reverse_mapping.update({"x": 7})
1779
+ assert not _consistent_PT(u, v, gparams, sparams)
1780
+
1781
+ # Compendate in G1
1782
+ G1.add_edge(u, 7)
1783
+ assert _consistent_PT(u, v, gparams, sparams)
1784
+
1785
+ # Delete an edge between u and a covered neighbor
1786
+ G1.remove_edges_from([(u, 0), (u, 0)])
1787
+ assert not _consistent_PT(u, v, gparams, sparams)
1788
+
1789
+ # Compensate in G2
1790
+ G2.remove_edges_from([(v, mapped[0]), (v, mapped[0])])
1791
+ assert _consistent_PT(u, v, gparams, sparams)
1792
+
1793
+ # Remove an edge between v and a covered neighbor
1794
+ G2.remove_edge(v, mapped[3])
1795
+ assert not _consistent_PT(u, v, gparams, sparams)
1796
+
1797
+ # Compensate in G1
1798
+ G1.remove_edge(u, 3)
1799
+ assert _consistent_PT(u, v, gparams, sparams)
1800
+
1801
+ def test_cut_same_labels(self):
1802
+ G1 = nx.MultiGraph(
1803
+ [
1804
+ (0, 1),
1805
+ (1, 2),
1806
+ (10, 0),
1807
+ (10, 0),
1808
+ (10, 0),
1809
+ (10, 3),
1810
+ (10, 3),
1811
+ (10, 4),
1812
+ (10, 4),
1813
+ (10, 5),
1814
+ (10, 5),
1815
+ (10, 5),
1816
+ (10, 5),
1817
+ (10, 6),
1818
+ (10, 6),
1819
+ (4, 1),
1820
+ (5, 3),
1821
+ ]
1822
+ )
1823
+ mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
1824
+ G2 = nx.relabel_nodes(G1, mapped)
1825
+ l1 = {n: "blue" for n in G1.nodes()}
1826
+ l2 = {n: "blue" for n in G2.nodes()}
1827
+
1828
+ gparams = _GraphParameters(
1829
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
1830
+ )
1831
+ sparams = _StateParameters(
1832
+ {0: "a", 1: "b", 2: "c", 3: "d"},
1833
+ {"a": 0, "b": 1, "c": 2, "d": 3},
1834
+ {4, 5},
1835
+ None,
1836
+ {6},
1837
+ None,
1838
+ {"e", "f"},
1839
+ None,
1840
+ {"g"},
1841
+ None,
1842
+ )
1843
+
1844
+ u, v = 10, "k"
1845
+ assert not _cut_PT(u, v, gparams, sparams)
1846
+
1847
+ # Remove one of the multiple edges between u and a neighbor
1848
+ G1.remove_edge(u, 4)
1849
+ assert _cut_PT(u, v, gparams, sparams)
1850
+
1851
+ # Compensate in G2
1852
+ G1.remove_edge(u, 4)
1853
+ G2.remove_edges_from([(v, mapped[4]), (v, mapped[4])])
1854
+ assert not _cut_PT(u, v, gparams, sparams)
1855
+
1856
+ # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde
1857
+ G2.remove_edge(v, mapped[6])
1858
+ assert _cut_PT(u, v, gparams, sparams)
1859
+
1860
+ # Compensate in G1
1861
+ G1.remove_edge(u, 6)
1862
+ assert not _cut_PT(u, v, gparams, sparams)
1863
+
1864
+ # Add more edges between u and neighbor which belongs in T1_tilde
1865
+ G1.add_edges_from([(u, 5), (u, 5), (u, 5)])
1866
+ assert _cut_PT(u, v, gparams, sparams)
1867
+
1868
+ # Compensate in G2
1869
+ G2.add_edges_from([(v, mapped[5]), (v, mapped[5]), (v, mapped[5])])
1870
+ assert not _cut_PT(u, v, gparams, sparams)
1871
+
1872
+ # Add disconnected nodes, which will form the new Ti_out
1873
+ G1.add_nodes_from([6, 7, 8])
1874
+ G2.add_nodes_from(["g", "y", "z"])
1875
+ G1.add_edges_from([(u, 6), (u, 6), (u, 6), (u, 8)])
1876
+ G2.add_edges_from([(v, "g"), (v, "g"), (v, "g"), (v, "z")])
1877
+
1878
+ sparams.T1_tilde.update({6, 7, 8})
1879
+ sparams.T2_tilde.update({"g", "y", "z"})
1880
+
1881
+ l1 = {n: "blue" for n in G1.nodes()}
1882
+ l2 = {n: "blue" for n in G2.nodes()}
1883
+ gparams = _GraphParameters(
1884
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
1885
+ )
1886
+
1887
+ assert not _cut_PT(u, v, gparams, sparams)
1888
+
1889
+ # Add some new nodes to the mapping
1890
+ sparams.mapping.update({6: "g", 7: "y"})
1891
+ sparams.reverse_mapping.update({"g": 6, "y": 7})
1892
+
1893
+ # Add more nodes to T1, T2.
1894
+ G1.add_edges_from([(6, 20), (7, 20), (6, 21)])
1895
+ G2.add_edges_from([("g", "i"), ("g", "j"), ("y", "j")])
1896
+
1897
+ sparams.T1.update({20, 21})
1898
+ sparams.T2.update({"i", "j"})
1899
+ sparams.T1_tilde.difference_update({6, 7})
1900
+ sparams.T2_tilde.difference_update({"g", "y"})
1901
+
1902
+ assert not _cut_PT(u, v, gparams, sparams)
1903
+
1904
+ # Remove some edges
1905
+ G2.remove_edge(v, "g")
1906
+ assert _cut_PT(u, v, gparams, sparams)
1907
+
1908
+ G1.remove_edge(u, 6)
1909
+ G1.add_edge(u, 8)
1910
+ G2.add_edge(v, "z")
1911
+ assert not _cut_PT(u, v, gparams, sparams)
1912
+
1913
+ # Add nodes from the new T1 and T2, as neighbors of u and v respectively
1914
+ G1.add_edges_from([(u, 20), (u, 20), (u, 20), (u, 21)])
1915
+ G2.add_edges_from([(v, "i"), (v, "i"), (v, "i"), (v, "j")])
1916
+ l1 = {n: "blue" for n in G1.nodes()}
1917
+ l2 = {n: "blue" for n in G2.nodes()}
1918
+ gparams = _GraphParameters(
1919
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
1920
+ )
1921
+
1922
+ assert not _cut_PT(u, v, gparams, sparams)
1923
+
1924
+ # Change the edges
1925
+ G1.remove_edge(u, 20)
1926
+ G1.add_edge(u, 4)
1927
+ assert _cut_PT(u, v, gparams, sparams)
1928
+
1929
+ G2.remove_edge(v, "i")
1930
+ G2.add_edge(v, mapped[4])
1931
+ assert not _cut_PT(u, v, gparams, sparams)
1932
+
1933
+ def test_cut_different_labels(self):
1934
+ G1 = nx.MultiGraph(
1935
+ [
1936
+ (0, 1),
1937
+ (0, 1),
1938
+ (1, 2),
1939
+ (1, 2),
1940
+ (1, 14),
1941
+ (0, 4),
1942
+ (1, 5),
1943
+ (2, 6),
1944
+ (3, 7),
1945
+ (3, 6),
1946
+ (4, 10),
1947
+ (4, 9),
1948
+ (6, 10),
1949
+ (20, 9),
1950
+ (20, 9),
1951
+ (20, 9),
1952
+ (20, 15),
1953
+ (20, 15),
1954
+ (20, 12),
1955
+ (20, 11),
1956
+ (20, 11),
1957
+ (20, 11),
1958
+ (12, 13),
1959
+ (11, 13),
1960
+ (20, 8),
1961
+ (20, 8),
1962
+ (20, 3),
1963
+ (20, 3),
1964
+ (20, 5),
1965
+ (20, 5),
1966
+ (20, 5),
1967
+ (20, 0),
1968
+ (20, 0),
1969
+ (20, 0),
1970
+ ]
1971
+ )
1972
+ mapped = {
1973
+ 0: "a",
1974
+ 1: "b",
1975
+ 2: "c",
1976
+ 3: "d",
1977
+ 4: "e",
1978
+ 5: "f",
1979
+ 6: "g",
1980
+ 7: "h",
1981
+ 8: "i",
1982
+ 9: "j",
1983
+ 10: "k",
1984
+ 11: "l",
1985
+ 12: "m",
1986
+ 13: "n",
1987
+ 14: "o",
1988
+ 15: "p",
1989
+ 20: "x",
1990
+ }
1991
+ G2 = nx.relabel_nodes(G1, mapped)
1992
+
1993
+ l1 = {n: "none" for n in G1.nodes()}
1994
+ l2 = {}
1995
+
1996
+ l1.update(
1997
+ {
1998
+ 9: "blue",
1999
+ 15: "blue",
2000
+ 12: "blue",
2001
+ 11: "green",
2002
+ 3: "green",
2003
+ 8: "red",
2004
+ 0: "red",
2005
+ 5: "yellow",
2006
+ }
2007
+ )
2008
+ l2.update({mapped[n]: l for n, l in l1.items()})
2009
+
2010
+ gparams = _GraphParameters(
2011
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
2012
+ )
2013
+ sparams = _StateParameters(
2014
+ {0: "a", 1: "b", 2: "c", 3: "d"},
2015
+ {"a": 0, "b": 1, "c": 2, "d": 3},
2016
+ {4, 5, 6, 7, 14},
2017
+ None,
2018
+ {9, 10, 15, 12, 11, 13, 8},
2019
+ None,
2020
+ {"e", "f", "g", "h", "o"},
2021
+ None,
2022
+ {"j", "k", "l", "m", "n", "i", "p"},
2023
+ None,
2024
+ )
2025
+
2026
+ u, v = 20, "x"
2027
+ assert not _cut_PT(u, v, gparams, sparams)
2028
+
2029
+ # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact
2030
+ l1.update({9: "red"})
2031
+ assert _cut_PT(u, v, gparams, sparams)
2032
+
2033
+ # compensate in G2
2034
+ l2.update({mapped[9]: "red"})
2035
+ assert not _cut_PT(u, v, gparams, sparams)
2036
+
2037
+ # Change the intersection of G1[u] and T1
2038
+ G1.add_edge(u, 4)
2039
+ assert _cut_PT(u, v, gparams, sparams)
2040
+
2041
+ # Same for G2[v] and T2
2042
+ G2.add_edge(v, mapped[4])
2043
+ assert not _cut_PT(u, v, gparams, sparams)
2044
+
2045
+ # Delete one from the multiple edges
2046
+ G2.remove_edge(v, mapped[8])
2047
+ assert _cut_PT(u, v, gparams, sparams)
2048
+
2049
+ # Same for G1[u] and T1_tilde
2050
+ G1.remove_edge(u, 8)
2051
+ assert not _cut_PT(u, v, gparams, sparams)
2052
+
2053
+ # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes
2054
+ G1.add_edges_from([(8, 3), (8, 3), (8, u)])
2055
+ G2.add_edges_from([(mapped[8], mapped[3]), (mapped[8], mapped[3])])
2056
+ sparams.T1.add(8)
2057
+ sparams.T2.add(mapped[8])
2058
+ sparams.T1_tilde.remove(8)
2059
+ sparams.T2_tilde.remove(mapped[8])
2060
+
2061
+ assert _cut_PT(u, v, gparams, sparams)
2062
+
2063
+ # Fix uneven edges
2064
+ G1.remove_edge(8, u)
2065
+ assert not _cut_PT(u, v, gparams, sparams)
2066
+
2067
+ # Remove neighbor of u from T1
2068
+ G1.remove_node(5)
2069
+ l1.pop(5)
2070
+ sparams.T1.remove(5)
2071
+ assert _cut_PT(u, v, gparams, sparams)
2072
+
2073
+ # Same in G2
2074
+ G2.remove_node(mapped[5])
2075
+ l2.pop(mapped[5])
2076
+ sparams.T2.remove(mapped[5])
2077
+ assert not _cut_PT(u, v, gparams, sparams)
2078
+
2079
+ def test_feasibility_same_labels(self):
2080
+ G1 = nx.MultiGraph(
2081
+ [
2082
+ (0, 1),
2083
+ (0, 1),
2084
+ (1, 2),
2085
+ (1, 2),
2086
+ (1, 14),
2087
+ (0, 4),
2088
+ (1, 5),
2089
+ (2, 6),
2090
+ (3, 7),
2091
+ (3, 6),
2092
+ (4, 10),
2093
+ (4, 9),
2094
+ (6, 10),
2095
+ (20, 9),
2096
+ (20, 9),
2097
+ (20, 9),
2098
+ (20, 15),
2099
+ (20, 15),
2100
+ (20, 12),
2101
+ (20, 11),
2102
+ (20, 11),
2103
+ (20, 11),
2104
+ (12, 13),
2105
+ (11, 13),
2106
+ (20, 8),
2107
+ (20, 8),
2108
+ (20, 3),
2109
+ (20, 3),
2110
+ (20, 5),
2111
+ (20, 5),
2112
+ (20, 5),
2113
+ (20, 0),
2114
+ (20, 0),
2115
+ (20, 0),
2116
+ ]
2117
+ )
2118
+ mapped = {
2119
+ 0: "a",
2120
+ 1: "b",
2121
+ 2: "c",
2122
+ 3: "d",
2123
+ 4: "e",
2124
+ 5: "f",
2125
+ 6: "g",
2126
+ 7: "h",
2127
+ 8: "i",
2128
+ 9: "j",
2129
+ 10: "k",
2130
+ 11: "l",
2131
+ 12: "m",
2132
+ 13: "n",
2133
+ 14: "o",
2134
+ 15: "p",
2135
+ 20: "x",
2136
+ }
2137
+ G2 = nx.relabel_nodes(G1, mapped)
2138
+ l1 = {n: "blue" for n in G1.nodes()}
2139
+ l2 = {mapped[n]: "blue" for n in G1.nodes()}
2140
+
2141
+ gparams = _GraphParameters(
2142
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
2143
+ )
2144
+ sparams = _StateParameters(
2145
+ {0: "a", 1: "b", 2: "c", 3: "d"},
2146
+ {"a": 0, "b": 1, "c": 2, "d": 3},
2147
+ {4, 5, 6, 7, 14},
2148
+ None,
2149
+ {9, 10, 15, 12, 11, 13, 8},
2150
+ None,
2151
+ {"e", "f", "g", "h", "o"},
2152
+ None,
2153
+ {"j", "k", "l", "m", "n", "i", "p"},
2154
+ None,
2155
+ )
2156
+
2157
+ u, v = 20, "x"
2158
+ assert not _cut_PT(u, v, gparams, sparams)
2159
+
2160
+ # Change structure in G2 such that, ONLY consistency is harmed
2161
+ G2.remove_edges_from([(mapped[20], mapped[3]), (mapped[20], mapped[3])])
2162
+ G2.add_edges_from([(mapped[20], mapped[2]), (mapped[20], mapped[2])])
2163
+
2164
+ # Consistency check fails, while the cutting rules are satisfied!
2165
+ assert not _cut_PT(u, v, gparams, sparams)
2166
+ assert not _consistent_PT(u, v, gparams, sparams)
2167
+
2168
+ # Compensate in G1 and make it consistent
2169
+ G1.remove_edges_from([(20, 3), (20, 3)])
2170
+ G1.add_edges_from([(20, 2), (20, 2)])
2171
+ assert not _cut_PT(u, v, gparams, sparams)
2172
+ assert _consistent_PT(u, v, gparams, sparams)
2173
+
2174
+ # ONLY fail the cutting check
2175
+ G2.add_edges_from([(v, mapped[10])] * 5)
2176
+ assert _cut_PT(u, v, gparams, sparams)
2177
+ assert _consistent_PT(u, v, gparams, sparams)
2178
+
2179
+ # Pass all tests
2180
+ G1.add_edges_from([(u, 10)] * 5)
2181
+ assert not _cut_PT(u, v, gparams, sparams)
2182
+ assert _consistent_PT(u, v, gparams, sparams)
2183
+
2184
+ def test_feasibility_different_labels(self):
2185
+ G1 = nx.MultiGraph(
2186
+ [
2187
+ (0, 1),
2188
+ (0, 1),
2189
+ (1, 2),
2190
+ (1, 2),
2191
+ (1, 14),
2192
+ (0, 4),
2193
+ (1, 5),
2194
+ (2, 6),
2195
+ (3, 7),
2196
+ (3, 6),
2197
+ (4, 10),
2198
+ (4, 9),
2199
+ (6, 10),
2200
+ (20, 9),
2201
+ (20, 9),
2202
+ (20, 9),
2203
+ (20, 15),
2204
+ (20, 15),
2205
+ (20, 12),
2206
+ (20, 11),
2207
+ (20, 11),
2208
+ (20, 11),
2209
+ (12, 13),
2210
+ (11, 13),
2211
+ (20, 8),
2212
+ (20, 8),
2213
+ (20, 2),
2214
+ (20, 2),
2215
+ (20, 5),
2216
+ (20, 5),
2217
+ (20, 5),
2218
+ (20, 0),
2219
+ (20, 0),
2220
+ (20, 0),
2221
+ ]
2222
+ )
2223
+ mapped = {
2224
+ 0: "a",
2225
+ 1: "b",
2226
+ 2: "c",
2227
+ 3: "d",
2228
+ 4: "e",
2229
+ 5: "f",
2230
+ 6: "g",
2231
+ 7: "h",
2232
+ 8: "i",
2233
+ 9: "j",
2234
+ 10: "k",
2235
+ 11: "l",
2236
+ 12: "m",
2237
+ 13: "n",
2238
+ 14: "o",
2239
+ 15: "p",
2240
+ 20: "x",
2241
+ }
2242
+ G2 = nx.relabel_nodes(G1, mapped)
2243
+ l1 = {n: "none" for n in G1.nodes()}
2244
+ l2 = {}
2245
+
2246
+ l1.update(
2247
+ {
2248
+ 9: "blue",
2249
+ 15: "blue",
2250
+ 12: "blue",
2251
+ 11: "green",
2252
+ 2: "green",
2253
+ 8: "red",
2254
+ 0: "red",
2255
+ 5: "yellow",
2256
+ }
2257
+ )
2258
+ l2.update({mapped[n]: l for n, l in l1.items()})
2259
+
2260
+ gparams = _GraphParameters(
2261
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
2262
+ )
2263
+ sparams = _StateParameters(
2264
+ {0: "a", 1: "b", 2: "c", 3: "d"},
2265
+ {"a": 0, "b": 1, "c": 2, "d": 3},
2266
+ {4, 5, 6, 7, 14},
2267
+ None,
2268
+ {9, 10, 15, 12, 11, 13, 8},
2269
+ None,
2270
+ {"e", "f", "g", "h", "o"},
2271
+ None,
2272
+ {"j", "k", "l", "m", "n", "i", "p"},
2273
+ None,
2274
+ )
2275
+
2276
+ u, v = 20, "x"
2277
+ assert not _cut_PT(u, v, gparams, sparams)
2278
+
2279
+ # Change structure in G2 such that, ONLY consistency is harmed
2280
+ G2.remove_edges_from([(mapped[20], mapped[2]), (mapped[20], mapped[2])])
2281
+ G2.add_edges_from([(mapped[20], mapped[3]), (mapped[20], mapped[3])])
2282
+ l2.update({mapped[3]: "green"})
2283
+
2284
+ # Consistency check fails, while the cutting rules are satisfied!
2285
+ assert not _cut_PT(u, v, gparams, sparams)
2286
+ assert not _consistent_PT(u, v, gparams, sparams)
2287
+
2288
+ # Compensate in G1 and make it consistent
2289
+ G1.remove_edges_from([(20, 2), (20, 2)])
2290
+ G1.add_edges_from([(20, 3), (20, 3)])
2291
+ l1.update({3: "green"})
2292
+ assert not _cut_PT(u, v, gparams, sparams)
2293
+ assert _consistent_PT(u, v, gparams, sparams)
2294
+
2295
+ # ONLY fail the cutting check
2296
+ l1.update({5: "red"})
2297
+ assert _cut_PT(u, v, gparams, sparams)
2298
+ assert _consistent_PT(u, v, gparams, sparams)
2299
+
2300
+
2301
+ class TestDiGraphISOFeasibility:
2302
+ def test_const_covered_neighbors(self):
2303
+ G1 = nx.DiGraph([(0, 1), (1, 2), (0, 3), (2, 3)])
2304
+ G2 = nx.DiGraph([("a", "b"), ("b", "c"), ("a", "k"), ("c", "k")])
2305
+ gparams = _GraphParameters(G1, G2, None, None, None, None, None)
2306
+ sparams = _StateParameters(
2307
+ {0: "a", 1: "b", 2: "c"},
2308
+ {"a": 0, "b": 1, "c": 2},
2309
+ None,
2310
+ None,
2311
+ None,
2312
+ None,
2313
+ None,
2314
+ None,
2315
+ None,
2316
+ None,
2317
+ )
2318
+ u, v = 3, "k"
2319
+ assert _consistent_PT(u, v, gparams, sparams)
2320
+
2321
+ def test_const_no_covered_neighbors(self):
2322
+ G1 = nx.DiGraph([(0, 1), (1, 2), (3, 4), (3, 5)])
2323
+ G2 = nx.DiGraph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "z")])
2324
+ gparams = _GraphParameters(G1, G2, None, None, None, None, None)
2325
+ sparams = _StateParameters(
2326
+ {0: "a", 1: "b", 2: "c"},
2327
+ {"a": 0, "b": 1, "c": 2},
2328
+ None,
2329
+ None,
2330
+ None,
2331
+ None,
2332
+ None,
2333
+ None,
2334
+ None,
2335
+ None,
2336
+ )
2337
+ u, v = 3, "k"
2338
+ assert _consistent_PT(u, v, gparams, sparams)
2339
+
2340
+ def test_const_mixed_covered_uncovered_neighbors(self):
2341
+ G1 = nx.DiGraph([(0, 1), (1, 2), (3, 0), (3, 2), (3, 4), (3, 5)])
2342
+ G2 = nx.DiGraph(
2343
+ [("a", "b"), ("b", "c"), ("k", "a"), ("k", "c"), ("k", "w"), ("k", "z")]
2344
+ )
2345
+ gparams = _GraphParameters(G1, G2, None, None, None, None, None)
2346
+ sparams = _StateParameters(
2347
+ {0: "a", 1: "b", 2: "c"},
2348
+ {"a": 0, "b": 1, "c": 2},
2349
+ None,
2350
+ None,
2351
+ None,
2352
+ None,
2353
+ None,
2354
+ None,
2355
+ None,
2356
+ None,
2357
+ )
2358
+ u, v = 3, "k"
2359
+ assert _consistent_PT(u, v, gparams, sparams)
2360
+
2361
+ def test_const_fail_cases(self):
2362
+ G1 = nx.DiGraph(
2363
+ [
2364
+ (0, 1),
2365
+ (2, 1),
2366
+ (10, 0),
2367
+ (10, 3),
2368
+ (10, 4),
2369
+ (5, 10),
2370
+ (10, 6),
2371
+ (1, 4),
2372
+ (5, 3),
2373
+ ]
2374
+ )
2375
+ G2 = nx.DiGraph(
2376
+ [
2377
+ ("a", "b"),
2378
+ ("c", "b"),
2379
+ ("k", "a"),
2380
+ ("k", "d"),
2381
+ ("k", "e"),
2382
+ ("f", "k"),
2383
+ ("k", "g"),
2384
+ ("b", "e"),
2385
+ ("f", "d"),
2386
+ ]
2387
+ )
2388
+ gparams = _GraphParameters(G1, G2, None, None, None, None, None)
2389
+ sparams = _StateParameters(
2390
+ {0: "a", 1: "b", 2: "c", 3: "d"},
2391
+ {"a": 0, "b": 1, "c": 2, "d": 3},
2392
+ None,
2393
+ None,
2394
+ None,
2395
+ None,
2396
+ None,
2397
+ None,
2398
+ None,
2399
+ None,
2400
+ )
2401
+ u, v = 10, "k"
2402
+ assert _consistent_PT(u, v, gparams, sparams)
2403
+
2404
+ # Delete one uncovered neighbor of u. Notice how it still passes the
2405
+ # test. Two reasons for this:
2406
+ # 1. If u, v had different degrees from the beginning, they wouldn't
2407
+ # be selected as candidates in the first place.
2408
+ # 2. Even if they are selected, consistency is basically
2409
+ # 1-look-ahead, meaning that we take into consideration the
2410
+ # relation of the candidates with their mapped neighbors.
2411
+ # The node we deleted is not a covered neighbor.
2412
+ # Such nodes will be checked by the cut_PT function, which is
2413
+ # basically the 2-look-ahead, checking the relation of the
2414
+ # candidates with T1, T2 (in which belongs the node we just deleted).
2415
+ G1.remove_node(6)
2416
+ assert _consistent_PT(u, v, gparams, sparams)
2417
+
2418
+ # Add one more covered neighbor of u in G1
2419
+ G1.add_edge(u, 2)
2420
+ assert not _consistent_PT(u, v, gparams, sparams)
2421
+
2422
+ # Compensate in G2
2423
+ G2.add_edge(v, "c")
2424
+ assert _consistent_PT(u, v, gparams, sparams)
2425
+
2426
+ # Add one more covered neighbor of v in G2
2427
+ G2.add_edge(v, "x")
2428
+ G1.add_node(7)
2429
+ sparams.mapping.update({7: "x"})
2430
+ sparams.reverse_mapping.update({"x": 7})
2431
+ assert not _consistent_PT(u, v, gparams, sparams)
2432
+
2433
+ # Compensate in G1
2434
+ G1.add_edge(u, 7)
2435
+ assert _consistent_PT(u, v, gparams, sparams)
2436
+
2437
+ def test_cut_inconsistent_labels(self):
2438
+ G1 = nx.DiGraph(
2439
+ [
2440
+ (0, 1),
2441
+ (2, 1),
2442
+ (10, 0),
2443
+ (10, 3),
2444
+ (10, 4),
2445
+ (5, 10),
2446
+ (10, 6),
2447
+ (1, 4),
2448
+ (5, 3),
2449
+ ]
2450
+ )
2451
+ G2 = nx.DiGraph(
2452
+ [
2453
+ ("a", "b"),
2454
+ ("c", "b"),
2455
+ ("k", "a"),
2456
+ ("k", "d"),
2457
+ ("k", "e"),
2458
+ ("f", "k"),
2459
+ ("k", "g"),
2460
+ ("b", "e"),
2461
+ ("f", "d"),
2462
+ ]
2463
+ )
2464
+
2465
+ l1 = {n: "blue" for n in G1.nodes()}
2466
+ l2 = {n: "blue" for n in G2.nodes()}
2467
+ l1.update({5: "green"}) # Change the label of one neighbor of u
2468
+
2469
+ gparams = _GraphParameters(
2470
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
2471
+ )
2472
+ sparams = _StateParameters(
2473
+ {0: "a", 1: "b", 2: "c", 3: "d"},
2474
+ {"a": 0, "b": 1, "c": 2, "d": 3},
2475
+ None,
2476
+ None,
2477
+ None,
2478
+ None,
2479
+ None,
2480
+ None,
2481
+ None,
2482
+ None,
2483
+ )
2484
+
2485
+ u, v = 10, "k"
2486
+ assert _cut_PT(u, v, gparams, sparams)
2487
+
2488
+ def test_cut_consistent_labels(self):
2489
+ G1 = nx.DiGraph(
2490
+ [
2491
+ (0, 1),
2492
+ (2, 1),
2493
+ (10, 0),
2494
+ (10, 3),
2495
+ (10, 4),
2496
+ (5, 10),
2497
+ (10, 6),
2498
+ (1, 4),
2499
+ (5, 3),
2500
+ ]
2501
+ )
2502
+ G2 = nx.DiGraph(
2503
+ [
2504
+ ("a", "b"),
2505
+ ("c", "b"),
2506
+ ("k", "a"),
2507
+ ("k", "d"),
2508
+ ("k", "e"),
2509
+ ("f", "k"),
2510
+ ("k", "g"),
2511
+ ("b", "e"),
2512
+ ("f", "d"),
2513
+ ]
2514
+ )
2515
+
2516
+ l1 = {n: "blue" for n in G1.nodes()}
2517
+ l2 = {n: "blue" for n in G2.nodes()}
2518
+
2519
+ gparams = _GraphParameters(
2520
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
2521
+ )
2522
+ sparams = _StateParameters(
2523
+ {0: "a", 1: "b", 2: "c", 3: "d"},
2524
+ {"a": 0, "b": 1, "c": 2, "d": 3},
2525
+ {4},
2526
+ {5, 10},
2527
+ {6},
2528
+ None,
2529
+ {"e"},
2530
+ {"f", "k"},
2531
+ {"g"},
2532
+ None,
2533
+ )
2534
+
2535
+ u, v = 10, "k"
2536
+ assert not _cut_PT(u, v, gparams, sparams)
2537
+
2538
+ def test_cut_same_labels(self):
2539
+ G1 = nx.DiGraph(
2540
+ [
2541
+ (0, 1),
2542
+ (2, 1),
2543
+ (10, 0),
2544
+ (10, 3),
2545
+ (10, 4),
2546
+ (5, 10),
2547
+ (10, 6),
2548
+ (1, 4),
2549
+ (5, 3),
2550
+ ]
2551
+ )
2552
+ mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
2553
+ G2 = nx.relabel_nodes(G1, mapped)
2554
+ l1 = {n: "blue" for n in G1.nodes()}
2555
+ l2 = {n: "blue" for n in G2.nodes()}
2556
+
2557
+ gparams = _GraphParameters(
2558
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
2559
+ )
2560
+ sparams = _StateParameters(
2561
+ {0: "a", 1: "b", 2: "c", 3: "d"},
2562
+ {"a": 0, "b": 1, "c": 2, "d": 3},
2563
+ {4},
2564
+ {5, 10},
2565
+ {6},
2566
+ None,
2567
+ {"e"},
2568
+ {"f", "k"},
2569
+ {"g"},
2570
+ None,
2571
+ )
2572
+
2573
+ u, v = 10, "k"
2574
+ assert not _cut_PT(u, v, gparams, sparams)
2575
+
2576
+ # Change intersection between G1[u] and T1_out, so it's not the same as the one between G2[v] and T2_out
2577
+ G1.remove_edge(u, 4)
2578
+ assert _cut_PT(u, v, gparams, sparams)
2579
+
2580
+ # Compensate in G2
2581
+ G2.remove_edge(v, mapped[4])
2582
+ assert not _cut_PT(u, v, gparams, sparams)
2583
+
2584
+ # Change intersection between G1[u] and T1_in, so it's not the same as the one between G2[v] and T2_in
2585
+ G1.remove_edge(5, u)
2586
+ assert _cut_PT(u, v, gparams, sparams)
2587
+
2588
+ # Compensate in G2
2589
+ G2.remove_edge(mapped[5], v)
2590
+ assert not _cut_PT(u, v, gparams, sparams)
2591
+
2592
+ # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde
2593
+ G2.remove_edge(v, mapped[6])
2594
+ assert _cut_PT(u, v, gparams, sparams)
2595
+
2596
+ # Compensate in G1
2597
+ G1.remove_edge(u, 6)
2598
+ assert not _cut_PT(u, v, gparams, sparams)
2599
+
2600
+ # Add disconnected nodes, which will form the new Ti_tilde
2601
+ G1.add_nodes_from([6, 7, 8])
2602
+ G2.add_nodes_from(["g", "y", "z"])
2603
+ sparams.T1_tilde.update({6, 7, 8})
2604
+ sparams.T2_tilde.update({"g", "y", "z"})
2605
+
2606
+ l1 = {n: "blue" for n in G1.nodes()}
2607
+ l2 = {n: "blue" for n in G2.nodes()}
2608
+ gparams = _GraphParameters(
2609
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
2610
+ )
2611
+
2612
+ assert not _cut_PT(u, v, gparams, sparams)
2613
+
2614
+ def test_cut_different_labels(self):
2615
+ G1 = nx.DiGraph(
2616
+ [
2617
+ (0, 1),
2618
+ (1, 2),
2619
+ (14, 1),
2620
+ (0, 4),
2621
+ (1, 5),
2622
+ (2, 6),
2623
+ (3, 7),
2624
+ (3, 6),
2625
+ (10, 4),
2626
+ (4, 9),
2627
+ (6, 10),
2628
+ (20, 9),
2629
+ (20, 15),
2630
+ (20, 12),
2631
+ (20, 11),
2632
+ (12, 13),
2633
+ (11, 13),
2634
+ (20, 8),
2635
+ (20, 3),
2636
+ (20, 5),
2637
+ (0, 20),
2638
+ ]
2639
+ )
2640
+ mapped = {
2641
+ 0: "a",
2642
+ 1: "b",
2643
+ 2: "c",
2644
+ 3: "d",
2645
+ 4: "e",
2646
+ 5: "f",
2647
+ 6: "g",
2648
+ 7: "h",
2649
+ 8: "i",
2650
+ 9: "j",
2651
+ 10: "k",
2652
+ 11: "l",
2653
+ 12: "m",
2654
+ 13: "n",
2655
+ 14: "o",
2656
+ 15: "p",
2657
+ 20: "x",
2658
+ }
2659
+ G2 = nx.relabel_nodes(G1, mapped)
2660
+
2661
+ l1 = {n: "none" for n in G1.nodes()}
2662
+ l2 = {}
2663
+
2664
+ l1.update(
2665
+ {
2666
+ 9: "blue",
2667
+ 15: "blue",
2668
+ 12: "blue",
2669
+ 11: "green",
2670
+ 3: "green",
2671
+ 8: "red",
2672
+ 0: "red",
2673
+ 5: "yellow",
2674
+ }
2675
+ )
2676
+ l2.update({mapped[n]: l for n, l in l1.items()})
2677
+
2678
+ gparams = _GraphParameters(
2679
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
2680
+ )
2681
+ sparams = _StateParameters(
2682
+ {0: "a", 1: "b", 2: "c", 3: "d"},
2683
+ {"a": 0, "b": 1, "c": 2, "d": 3},
2684
+ {4, 5, 6, 7, 20},
2685
+ {14, 20},
2686
+ {9, 10, 15, 12, 11, 13, 8},
2687
+ None,
2688
+ {"e", "f", "g", "x"},
2689
+ {"o", "x"},
2690
+ {"j", "k", "l", "m", "n", "i", "p"},
2691
+ None,
2692
+ )
2693
+
2694
+ u, v = 20, "x"
2695
+ assert not _cut_PT(u, v, gparams, sparams)
2696
+
2697
+ # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact
2698
+ l1.update({9: "red"})
2699
+ assert _cut_PT(u, v, gparams, sparams)
2700
+
2701
+ # compensate in G2
2702
+ l2.update({mapped[9]: "red"})
2703
+ assert not _cut_PT(u, v, gparams, sparams)
2704
+
2705
+ # Change the intersection of G1[u] and T1_out
2706
+ G1.add_edge(u, 4)
2707
+ assert _cut_PT(u, v, gparams, sparams)
2708
+
2709
+ # Same for G2[v] and T2_out
2710
+ G2.add_edge(v, mapped[4])
2711
+ assert not _cut_PT(u, v, gparams, sparams)
2712
+
2713
+ # Change the intersection of G1[u] and T1_in
2714
+ G1.add_edge(u, 14)
2715
+ assert _cut_PT(u, v, gparams, sparams)
2716
+
2717
+ # Same for G2[v] and T2_in
2718
+ G2.add_edge(v, mapped[14])
2719
+ assert not _cut_PT(u, v, gparams, sparams)
2720
+
2721
+ # Change the intersection of G2[v] and T2_tilde
2722
+ G2.remove_edge(v, mapped[8])
2723
+ assert _cut_PT(u, v, gparams, sparams)
2724
+
2725
+ # Same for G1[u] and T1_tilde
2726
+ G1.remove_edge(u, 8)
2727
+ assert not _cut_PT(u, v, gparams, sparams)
2728
+
2729
+ # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes
2730
+ G1.add_edge(8, 3)
2731
+ G2.add_edge(mapped[8], mapped[3])
2732
+ sparams.T1.add(8)
2733
+ sparams.T2.add(mapped[8])
2734
+ sparams.T1_tilde.remove(8)
2735
+ sparams.T2_tilde.remove(mapped[8])
2736
+
2737
+ assert not _cut_PT(u, v, gparams, sparams)
2738
+
2739
+ # Remove neighbor of u from T1
2740
+ G1.remove_node(5)
2741
+ l1.pop(5)
2742
+ sparams.T1.remove(5)
2743
+ assert _cut_PT(u, v, gparams, sparams)
2744
+
2745
+ # Same in G2
2746
+ G2.remove_node(mapped[5])
2747
+ l2.pop(mapped[5])
2748
+ sparams.T2.remove(mapped[5])
2749
+ assert not _cut_PT(u, v, gparams, sparams)
2750
+
2751
+ def test_predecessor_T1_in_fail(self):
2752
+ G1 = nx.DiGraph(
2753
+ [(0, 1), (0, 3), (4, 0), (1, 5), (5, 2), (3, 6), (4, 6), (6, 5)]
2754
+ )
2755
+ mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g"}
2756
+ G2 = nx.relabel_nodes(G1, mapped)
2757
+ l1 = {n: "blue" for n in G1.nodes()}
2758
+ l2 = {n: "blue" for n in G2.nodes()}
2759
+
2760
+ gparams = _GraphParameters(
2761
+ G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
2762
+ )
2763
+ sparams = _StateParameters(
2764
+ {0: "a", 1: "b", 2: "c"},
2765
+ {"a": 0, "b": 1, "c": 2},
2766
+ {3, 5},
2767
+ {4, 5},
2768
+ {6},
2769
+ None,
2770
+ {"d", "f"},
2771
+ {"f"}, # mapped[4] is missing from T2_in
2772
+ {"g"},
2773
+ None,
2774
+ )
2775
+
2776
+ u, v = 6, "g"
2777
+ assert _cut_PT(u, v, gparams, sparams)
2778
+
2779
+ sparams.T2_in.add("e")
2780
+ assert not _cut_PT(u, v, gparams, sparams)
2781
+
2782
+
2783
+ class TestGraphTinoutUpdating:
2784
+ edges = [
2785
+ (1, 3),
2786
+ (2, 3),
2787
+ (3, 4),
2788
+ (4, 9),
2789
+ (4, 5),
2790
+ (3, 9),
2791
+ (5, 8),
2792
+ (5, 7),
2793
+ (8, 7),
2794
+ (6, 7),
2795
+ ]
2796
+ mapped = {
2797
+ 0: "x",
2798
+ 1: "a",
2799
+ 2: "b",
2800
+ 3: "c",
2801
+ 4: "d",
2802
+ 5: "e",
2803
+ 6: "f",
2804
+ 7: "g",
2805
+ 8: "h",
2806
+ 9: "i",
2807
+ }
2808
+ G1 = nx.Graph()
2809
+ G1.add_edges_from(edges)
2810
+ G1.add_node(0)
2811
+ G2 = nx.relabel_nodes(G1, mapping=mapped)
2812
+
2813
+ def test_updating(self):
2814
+ G2_degree = dict(self.G2.degree)
2815
+ gparams, sparams = _initialize_parameters(self.G1, self.G2, G2_degree)
2816
+ m, m_rev, T1, _, T1_tilde, _, T2, _, T2_tilde, _ = sparams
2817
+
2818
+ # Add node to the mapping
2819
+ m[4] = self.mapped[4]
2820
+ m_rev[self.mapped[4]] = 4
2821
+ _update_Tinout(4, self.mapped[4], gparams, sparams)
2822
+
2823
+ assert T1 == {3, 5, 9}
2824
+ assert T2 == {"c", "i", "e"}
2825
+ assert T1_tilde == {0, 1, 2, 6, 7, 8}
2826
+ assert T2_tilde == {"x", "a", "b", "f", "g", "h"}
2827
+
2828
+ # Add node to the mapping
2829
+ m[5] = self.mapped[5]
2830
+ m_rev.update({self.mapped[5]: 5})
2831
+ _update_Tinout(5, self.mapped[5], gparams, sparams)
2832
+
2833
+ assert T1 == {3, 9, 8, 7}
2834
+ assert T2 == {"c", "i", "h", "g"}
2835
+ assert T1_tilde == {0, 1, 2, 6}
2836
+ assert T2_tilde == {"x", "a", "b", "f"}
2837
+
2838
+ # Add node to the mapping
2839
+ m[6] = self.mapped[6]
2840
+ m_rev.update({self.mapped[6]: 6})
2841
+ _update_Tinout(6, self.mapped[6], gparams, sparams)
2842
+
2843
+ assert T1 == {3, 9, 8, 7}
2844
+ assert T2 == {"c", "i", "h", "g"}
2845
+ assert T1_tilde == {0, 1, 2}
2846
+ assert T2_tilde == {"x", "a", "b"}
2847
+
2848
+ # Add node to the mapping
2849
+ m[3] = self.mapped[3]
2850
+ m_rev.update({self.mapped[3]: 3})
2851
+ _update_Tinout(3, self.mapped[3], gparams, sparams)
2852
+
2853
+ assert T1 == {1, 2, 9, 8, 7}
2854
+ assert T2 == {"a", "b", "i", "h", "g"}
2855
+ assert T1_tilde == {0}
2856
+ assert T2_tilde == {"x"}
2857
+
2858
+ # Add node to the mapping
2859
+ m[0] = self.mapped[0]
2860
+ m_rev.update({self.mapped[0]: 0})
2861
+ _update_Tinout(0, self.mapped[0], gparams, sparams)
2862
+
2863
+ assert T1 == {1, 2, 9, 8, 7}
2864
+ assert T2 == {"a", "b", "i", "h", "g"}
2865
+ assert T1_tilde == set()
2866
+ assert T2_tilde == set()
2867
+
2868
+ def test_restoring(self):
2869
+ m = {0: "x", 3: "c", 4: "d", 5: "e", 6: "f"}
2870
+ m_rev = {"x": 0, "c": 3, "d": 4, "e": 5, "f": 6}
2871
+
2872
+ T1 = {1, 2, 7, 9, 8}
2873
+ T2 = {"a", "b", "g", "i", "h"}
2874
+ T1_tilde = set()
2875
+ T2_tilde = set()
2876
+
2877
+ gparams = _GraphParameters(self.G1, self.G2, {}, {}, {}, {}, {})
2878
+ sparams = _StateParameters(
2879
+ m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
2880
+ )
2881
+
2882
+ # Remove a node from the mapping
2883
+ m.pop(0)
2884
+ m_rev.pop("x")
2885
+ _restore_Tinout(0, self.mapped[0], gparams, sparams)
2886
+
2887
+ assert T1 == {1, 2, 7, 9, 8}
2888
+ assert T2 == {"a", "b", "g", "i", "h"}
2889
+ assert T1_tilde == {0}
2890
+ assert T2_tilde == {"x"}
2891
+
2892
+ # Remove a node from the mapping
2893
+ m.pop(6)
2894
+ m_rev.pop("f")
2895
+ _restore_Tinout(6, self.mapped[6], gparams, sparams)
2896
+
2897
+ assert T1 == {1, 2, 7, 9, 8}
2898
+ assert T2 == {"a", "b", "g", "i", "h"}
2899
+ assert T1_tilde == {0, 6}
2900
+ assert T2_tilde == {"x", "f"}
2901
+
2902
+ # Remove a node from the mapping
2903
+ m.pop(3)
2904
+ m_rev.pop("c")
2905
+ _restore_Tinout(3, self.mapped[3], gparams, sparams)
2906
+
2907
+ assert T1 == {7, 9, 8, 3}
2908
+ assert T2 == {"g", "i", "h", "c"}
2909
+ assert T1_tilde == {0, 6, 1, 2}
2910
+ assert T2_tilde == {"x", "f", "a", "b"}
2911
+
2912
+ # Remove a node from the mapping
2913
+ m.pop(5)
2914
+ m_rev.pop("e")
2915
+ _restore_Tinout(5, self.mapped[5], gparams, sparams)
2916
+
2917
+ assert T1 == {9, 3, 5}
2918
+ assert T2 == {"i", "c", "e"}
2919
+ assert T1_tilde == {0, 6, 1, 2, 7, 8}
2920
+ assert T2_tilde == {"x", "f", "a", "b", "g", "h"}
2921
+
2922
+ # Remove a node from the mapping
2923
+ m.pop(4)
2924
+ m_rev.pop("d")
2925
+ _restore_Tinout(4, self.mapped[4], gparams, sparams)
2926
+
2927
+ assert T1 == set()
2928
+ assert T2 == set()
2929
+ assert T1_tilde == set(self.G1.nodes())
2930
+ assert T2_tilde == set(self.G2.nodes())
2931
+
2932
+
2933
+ class TestDiGraphTinoutUpdating:
2934
+ edges = [
2935
+ (1, 3),
2936
+ (3, 2),
2937
+ (3, 4),
2938
+ (4, 9),
2939
+ (4, 5),
2940
+ (3, 9),
2941
+ (5, 8),
2942
+ (5, 7),
2943
+ (8, 7),
2944
+ (7, 6),
2945
+ ]
2946
+ mapped = {
2947
+ 0: "x",
2948
+ 1: "a",
2949
+ 2: "b",
2950
+ 3: "c",
2951
+ 4: "d",
2952
+ 5: "e",
2953
+ 6: "f",
2954
+ 7: "g",
2955
+ 8: "h",
2956
+ 9: "i",
2957
+ }
2958
+ G1 = nx.DiGraph(edges)
2959
+ G1.add_node(0)
2960
+ G2 = nx.relabel_nodes(G1, mapping=mapped)
2961
+
2962
+ def test_updating(self):
2963
+ G2_degree = {
2964
+ n: (in_degree, out_degree)
2965
+ for (n, in_degree), (_, out_degree) in zip(
2966
+ self.G2.in_degree, self.G2.out_degree
2967
+ )
2968
+ }
2969
+ gparams, sparams = _initialize_parameters(self.G1, self.G2, G2_degree)
2970
+ m, m_rev, T1_out, T1_in, T1_tilde, _, T2_out, T2_in, T2_tilde, _ = sparams
2971
+
2972
+ # Add node to the mapping
2973
+ m[4] = self.mapped[4]
2974
+ m_rev[self.mapped[4]] = 4
2975
+ _update_Tinout(4, self.mapped[4], gparams, sparams)
2976
+
2977
+ assert T1_out == {5, 9}
2978
+ assert T1_in == {3}
2979
+ assert T2_out == {"i", "e"}
2980
+ assert T2_in == {"c"}
2981
+ assert T1_tilde == {0, 1, 2, 6, 7, 8}
2982
+ assert T2_tilde == {"x", "a", "b", "f", "g", "h"}
2983
+
2984
+ # Add node to the mapping
2985
+ m[5] = self.mapped[5]
2986
+ m_rev[self.mapped[5]] = 5
2987
+ _update_Tinout(5, self.mapped[5], gparams, sparams)
2988
+
2989
+ assert T1_out == {9, 8, 7}
2990
+ assert T1_in == {3}
2991
+ assert T2_out == {"i", "g", "h"}
2992
+ assert T2_in == {"c"}
2993
+ assert T1_tilde == {0, 1, 2, 6}
2994
+ assert T2_tilde == {"x", "a", "b", "f"}
2995
+
2996
+ # Add node to the mapping
2997
+ m[6] = self.mapped[6]
2998
+ m_rev[self.mapped[6]] = 6
2999
+ _update_Tinout(6, self.mapped[6], gparams, sparams)
3000
+
3001
+ assert T1_out == {9, 8, 7}
3002
+ assert T1_in == {3, 7}
3003
+ assert T2_out == {"i", "g", "h"}
3004
+ assert T2_in == {"c", "g"}
3005
+ assert T1_tilde == {0, 1, 2}
3006
+ assert T2_tilde == {"x", "a", "b"}
3007
+
3008
+ # Add node to the mapping
3009
+ m[3] = self.mapped[3]
3010
+ m_rev[self.mapped[3]] = 3
3011
+ _update_Tinout(3, self.mapped[3], gparams, sparams)
3012
+
3013
+ assert T1_out == {9, 8, 7, 2}
3014
+ assert T1_in == {7, 1}
3015
+ assert T2_out == {"i", "g", "h", "b"}
3016
+ assert T2_in == {"g", "a"}
3017
+ assert T1_tilde == {0}
3018
+ assert T2_tilde == {"x"}
3019
+
3020
+ # Add node to the mapping
3021
+ m[0] = self.mapped[0]
3022
+ m_rev[self.mapped[0]] = 0
3023
+ _update_Tinout(0, self.mapped[0], gparams, sparams)
3024
+
3025
+ assert T1_out == {9, 8, 7, 2}
3026
+ assert T1_in == {7, 1}
3027
+ assert T2_out == {"i", "g", "h", "b"}
3028
+ assert T2_in == {"g", "a"}
3029
+ assert T1_tilde == set()
3030
+ assert T2_tilde == set()
3031
+
3032
+ def test_restoring(self):
3033
+ m = {0: "x", 3: "c", 4: "d", 5: "e", 6: "f"}
3034
+ m_rev = {"x": 0, "c": 3, "d": 4, "e": 5, "f": 6}
3035
+
3036
+ T1_out = {2, 7, 9, 8}
3037
+ T1_in = {1, 7}
3038
+ T2_out = {"b", "g", "i", "h"}
3039
+ T2_in = {"a", "g"}
3040
+ T1_tilde = set()
3041
+ T2_tilde = set()
3042
+
3043
+ gparams = _GraphParameters(self.G1, self.G2, {}, {}, {}, {}, {})
3044
+ sparams = _StateParameters(
3045
+ m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
3046
+ )
3047
+
3048
+ # Remove a node from the mapping
3049
+ m.pop(0)
3050
+ m_rev.pop("x")
3051
+ _restore_Tinout_Di(0, self.mapped[0], gparams, sparams)
3052
+
3053
+ assert T1_out == {2, 7, 9, 8}
3054
+ assert T1_in == {1, 7}
3055
+ assert T2_out == {"b", "g", "i", "h"}
3056
+ assert T2_in == {"a", "g"}
3057
+ assert T1_tilde == {0}
3058
+ assert T2_tilde == {"x"}
3059
+
3060
+ # Remove a node from the mapping
3061
+ m.pop(6)
3062
+ m_rev.pop("f")
3063
+ _restore_Tinout_Di(6, self.mapped[6], gparams, sparams)
3064
+
3065
+ assert T1_out == {2, 9, 8, 7}
3066
+ assert T1_in == {1}
3067
+ assert T2_out == {"b", "i", "h", "g"}
3068
+ assert T2_in == {"a"}
3069
+ assert T1_tilde == {0, 6}
3070
+ assert T2_tilde == {"x", "f"}
3071
+
3072
+ # Remove a node from the mapping
3073
+ m.pop(3)
3074
+ m_rev.pop("c")
3075
+ _restore_Tinout_Di(3, self.mapped[3], gparams, sparams)
3076
+
3077
+ assert T1_out == {9, 8, 7}
3078
+ assert T1_in == {3}
3079
+ assert T2_out == {"i", "h", "g"}
3080
+ assert T2_in == {"c"}
3081
+ assert T1_tilde == {0, 6, 1, 2}
3082
+ assert T2_tilde == {"x", "f", "a", "b"}
3083
+
3084
+ # Remove a node from the mapping
3085
+ m.pop(5)
3086
+ m_rev.pop("e")
3087
+ _restore_Tinout_Di(5, self.mapped[5], gparams, sparams)
3088
+
3089
+ assert T1_out == {9, 5}
3090
+ assert T1_in == {3}
3091
+ assert T2_out == {"i", "e"}
3092
+ assert T2_in == {"c"}
3093
+ assert T1_tilde == {0, 6, 1, 2, 8, 7}
3094
+ assert T2_tilde == {"x", "f", "a", "b", "h", "g"}
3095
+
3096
+ # Remove a node from the mapping
3097
+ m.pop(4)
3098
+ m_rev.pop("d")
3099
+ _restore_Tinout_Di(4, self.mapped[4], gparams, sparams)
3100
+
3101
+ assert T1_out == set()
3102
+ assert T1_in == set()
3103
+ assert T2_out == set()
3104
+ assert T2_in == set()
3105
+ assert T1_tilde == set(self.G1.nodes())
3106
+ assert T2_tilde == set(self.G2.nodes())
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py ADDED
@@ -0,0 +1,200 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Tests for VF2 isomorphism algorithm for weighted graphs.
3
+ """
4
+
5
+ import math
6
+ from operator import eq
7
+
8
+ import networkx as nx
9
+ import networkx.algorithms.isomorphism as iso
10
+
11
+
12
+ def test_simple():
13
+ # 16 simple tests
14
+ w = "weight"
15
+ edges = [(0, 0, 1), (0, 0, 1.5), (0, 1, 2), (1, 0, 3)]
16
+ for g1 in [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()]:
17
+ g1.add_weighted_edges_from(edges)
18
+ g2 = g1.subgraph(g1.nodes())
19
+ if g1.is_multigraph():
20
+ em = iso.numerical_multiedge_match("weight", 1)
21
+ else:
22
+ em = iso.numerical_edge_match("weight", 1)
23
+ assert nx.is_isomorphic(g1, g2, edge_match=em)
24
+
25
+ for mod1, mod2 in [(False, True), (True, False), (True, True)]:
26
+ # mod1 tests a regular edge
27
+ # mod2 tests a selfloop
28
+ if g2.is_multigraph():
29
+ if mod1:
30
+ data1 = {0: {"weight": 10}}
31
+ if mod2:
32
+ data2 = {0: {"weight": 1}, 1: {"weight": 2.5}}
33
+ else:
34
+ if mod1:
35
+ data1 = {"weight": 10}
36
+ if mod2:
37
+ data2 = {"weight": 2.5}
38
+
39
+ g2 = g1.subgraph(g1.nodes()).copy()
40
+ if mod1:
41
+ if not g1.is_directed():
42
+ g2._adj[1][0] = data1
43
+ g2._adj[0][1] = data1
44
+ else:
45
+ g2._succ[1][0] = data1
46
+ g2._pred[0][1] = data1
47
+ if mod2:
48
+ if not g1.is_directed():
49
+ g2._adj[0][0] = data2
50
+ else:
51
+ g2._succ[0][0] = data2
52
+ g2._pred[0][0] = data2
53
+
54
+ assert not nx.is_isomorphic(g1, g2, edge_match=em)
55
+
56
+
57
+ def test_weightkey():
58
+ g1 = nx.DiGraph()
59
+ g2 = nx.DiGraph()
60
+
61
+ g1.add_edge("A", "B", weight=1)
62
+ g2.add_edge("C", "D", weight=0)
63
+
64
+ assert nx.is_isomorphic(g1, g2)
65
+ em = iso.numerical_edge_match("nonexistent attribute", 1)
66
+ assert nx.is_isomorphic(g1, g2, edge_match=em)
67
+ em = iso.numerical_edge_match("weight", 1)
68
+ assert not nx.is_isomorphic(g1, g2, edge_match=em)
69
+
70
+ g2 = nx.DiGraph()
71
+ g2.add_edge("C", "D")
72
+ assert nx.is_isomorphic(g1, g2, edge_match=em)
73
+
74
+
75
+ class TestNodeMatch_Graph:
76
+ def setup_method(self):
77
+ self.g1 = nx.Graph()
78
+ self.g2 = nx.Graph()
79
+ self.build()
80
+
81
+ def build(self):
82
+ self.nm = iso.categorical_node_match("color", "")
83
+ self.em = iso.numerical_edge_match("weight", 1)
84
+
85
+ self.g1.add_node("A", color="red")
86
+ self.g2.add_node("C", color="blue")
87
+
88
+ self.g1.add_edge("A", "B", weight=1)
89
+ self.g2.add_edge("C", "D", weight=1)
90
+
91
+ def test_noweight_nocolor(self):
92
+ assert nx.is_isomorphic(self.g1, self.g2)
93
+
94
+ def test_color1(self):
95
+ assert not nx.is_isomorphic(self.g1, self.g2, node_match=self.nm)
96
+
97
+ def test_color2(self):
98
+ self.g1.nodes["A"]["color"] = "blue"
99
+ assert nx.is_isomorphic(self.g1, self.g2, node_match=self.nm)
100
+
101
+ def test_weight1(self):
102
+ assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)
103
+
104
+ def test_weight2(self):
105
+ self.g1.add_edge("A", "B", weight=2)
106
+ assert not nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)
107
+
108
+ def test_colorsandweights1(self):
109
+ iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em)
110
+ assert not iso
111
+
112
+ def test_colorsandweights2(self):
113
+ self.g1.nodes["A"]["color"] = "blue"
114
+ iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em)
115
+ assert iso
116
+
117
+ def test_colorsandweights3(self):
118
+ # make the weights disagree
119
+ self.g1.add_edge("A", "B", weight=2)
120
+ assert not nx.is_isomorphic(
121
+ self.g1, self.g2, node_match=self.nm, edge_match=self.em
122
+ )
123
+
124
+
125
+ class TestEdgeMatch_MultiGraph:
126
+ def setup_method(self):
127
+ self.g1 = nx.MultiGraph()
128
+ self.g2 = nx.MultiGraph()
129
+ self.GM = iso.MultiGraphMatcher
130
+ self.build()
131
+
132
+ def build(self):
133
+ g1 = self.g1
134
+ g2 = self.g2
135
+
136
+ # We will assume integer weights only.
137
+ g1.add_edge("A", "B", color="green", weight=0, size=0.5)
138
+ g1.add_edge("A", "B", color="red", weight=1, size=0.35)
139
+ g1.add_edge("A", "B", color="red", weight=2, size=0.65)
140
+
141
+ g2.add_edge("C", "D", color="green", weight=1, size=0.5)
142
+ g2.add_edge("C", "D", color="red", weight=0, size=0.45)
143
+ g2.add_edge("C", "D", color="red", weight=2, size=0.65)
144
+
145
+ if g1.is_multigraph():
146
+ self.em = iso.numerical_multiedge_match("weight", 1)
147
+ self.emc = iso.categorical_multiedge_match("color", "")
148
+ self.emcm = iso.categorical_multiedge_match(["color", "weight"], ["", 1])
149
+ self.emg1 = iso.generic_multiedge_match("color", "red", eq)
150
+ self.emg2 = iso.generic_multiedge_match(
151
+ ["color", "weight", "size"],
152
+ ["red", 1, 0.5],
153
+ [eq, eq, math.isclose],
154
+ )
155
+ else:
156
+ self.em = iso.numerical_edge_match("weight", 1)
157
+ self.emc = iso.categorical_edge_match("color", "")
158
+ self.emcm = iso.categorical_edge_match(["color", "weight"], ["", 1])
159
+ self.emg1 = iso.generic_multiedge_match("color", "red", eq)
160
+ self.emg2 = iso.generic_edge_match(
161
+ ["color", "weight", "size"],
162
+ ["red", 1, 0.5],
163
+ [eq, eq, math.isclose],
164
+ )
165
+
166
+ def test_weights_only(self):
167
+ assert nx.is_isomorphic(self.g1, self.g2, edge_match=self.em)
168
+
169
+ def test_colors_only(self):
170
+ gm = self.GM(self.g1, self.g2, edge_match=self.emc)
171
+ assert gm.is_isomorphic()
172
+
173
+ def test_colorsandweights(self):
174
+ gm = self.GM(self.g1, self.g2, edge_match=self.emcm)
175
+ assert not gm.is_isomorphic()
176
+
177
+ def test_generic1(self):
178
+ gm = self.GM(self.g1, self.g2, edge_match=self.emg1)
179
+ assert gm.is_isomorphic()
180
+
181
+ def test_generic2(self):
182
+ gm = self.GM(self.g1, self.g2, edge_match=self.emg2)
183
+ assert not gm.is_isomorphic()
184
+
185
+
186
+ class TestEdgeMatch_DiGraph(TestNodeMatch_Graph):
187
+ def setup_method(self):
188
+ TestNodeMatch_Graph.setup_method(self)
189
+ self.g1 = nx.DiGraph()
190
+ self.g2 = nx.DiGraph()
191
+ self.build()
192
+
193
+
194
+ class TestEdgeMatch_MultiDiGraph(TestEdgeMatch_MultiGraph):
195
+ def setup_method(self):
196
+ TestEdgeMatch_MultiGraph.setup_method(self)
197
+ self.g1 = nx.MultiDiGraph()
198
+ self.g2 = nx.MultiDiGraph()
199
+ self.GM = iso.MultiDiGraphMatcher
200
+ self.build()
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py ADDED
@@ -0,0 +1,284 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ An algorithm for finding if two undirected trees are isomorphic,
3
+ and if so returns an isomorphism between the two sets of nodes.
4
+
5
+ This algorithm uses a routine to tell if two rooted trees (trees with a
6
+ specified root node) are isomorphic, which may be independently useful.
7
+
8
+ This implements an algorithm from:
9
+ The Design and Analysis of Computer Algorithms
10
+ by Aho, Hopcroft, and Ullman
11
+ Addison-Wesley Publishing 1974
12
+ Example 3.2 pp. 84-86.
13
+
14
+ A more understandable version of this algorithm is described in:
15
+ Homework Assignment 5
16
+ McGill University SOCS 308-250B, Winter 2002
17
+ by Matthew Suderman
18
+ http://crypto.cs.mcgill.ca/~crepeau/CS250/2004/HW5+.pdf
19
+ """
20
+
21
+ import networkx as nx
22
+ from networkx.utils.decorators import not_implemented_for
23
+
24
+ __all__ = ["rooted_tree_isomorphism", "tree_isomorphism"]
25
+
26
+
27
+ @nx._dispatchable(graphs={"t1": 0, "t2": 2}, returns_graph=True)
28
+ def root_trees(t1, root1, t2, root2):
29
+ """Create a single digraph dT of free trees t1 and t2
30
+ # with roots root1 and root2 respectively
31
+ # rename the nodes with consecutive integers
32
+ # so that all nodes get a unique name between both trees
33
+
34
+ # our new "fake" root node is 0
35
+ # t1 is numbers from 1 ... n
36
+ # t2 is numbered from n+1 to 2n
37
+ """
38
+
39
+ dT = nx.DiGraph()
40
+
41
+ newroot1 = 1 # left root will be 1
42
+ newroot2 = nx.number_of_nodes(t1) + 1 # right will be n+1
43
+
44
+ # may be overlap in node names here so need separate maps
45
+ # given the old name, what is the new
46
+ namemap1 = {root1: newroot1}
47
+ namemap2 = {root2: newroot2}
48
+
49
+ # add an edge from our new root to root1 and root2
50
+ dT.add_edge(0, namemap1[root1])
51
+ dT.add_edge(0, namemap2[root2])
52
+
53
+ for i, (v1, v2) in enumerate(nx.bfs_edges(t1, root1)):
54
+ namemap1[v2] = i + namemap1[root1] + 1
55
+ dT.add_edge(namemap1[v1], namemap1[v2])
56
+
57
+ for i, (v1, v2) in enumerate(nx.bfs_edges(t2, root2)):
58
+ namemap2[v2] = i + namemap2[root2] + 1
59
+ dT.add_edge(namemap2[v1], namemap2[v2])
60
+
61
+ # now we really want the inverse of namemap1 and namemap2
62
+ # giving the old name given the new
63
+ # since the values of namemap1 and namemap2 are unique
64
+ # there won't be collisions
65
+ namemap = {}
66
+ for old, new in namemap1.items():
67
+ namemap[new] = old
68
+ for old, new in namemap2.items():
69
+ namemap[new] = old
70
+
71
+ return (dT, namemap, newroot1, newroot2)
72
+
73
+
74
+ # figure out the level of each node, with 0 at root
75
+ @nx._dispatchable
76
+ def assign_levels(G, root):
77
+ level = {}
78
+ level[root] = 0
79
+ for v1, v2 in nx.bfs_edges(G, root):
80
+ level[v2] = level[v1] + 1
81
+
82
+ return level
83
+
84
+
85
+ # now group the nodes at each level
86
+ def group_by_levels(levels):
87
+ L = {}
88
+ for n, lev in levels.items():
89
+ if lev not in L:
90
+ L[lev] = []
91
+ L[lev].append(n)
92
+
93
+ return L
94
+
95
+
96
+ # now lets get the isomorphism by walking the ordered_children
97
+ def generate_isomorphism(v, w, M, ordered_children):
98
+ # make sure tree1 comes first
99
+ assert v < w
100
+ M.append((v, w))
101
+ for i, (x, y) in enumerate(zip(ordered_children[v], ordered_children[w])):
102
+ generate_isomorphism(x, y, M, ordered_children)
103
+
104
+
105
+ @nx._dispatchable(graphs={"t1": 0, "t2": 2})
106
+ def rooted_tree_isomorphism(t1, root1, t2, root2):
107
+ """
108
+ Given two rooted trees `t1` and `t2`,
109
+ with roots `root1` and `root2` respectively
110
+ this routine will determine if they are isomorphic.
111
+
112
+ These trees may be either directed or undirected,
113
+ but if they are directed, all edges should flow from the root.
114
+
115
+ It returns the isomorphism, a mapping of the nodes of `t1` onto the nodes
116
+ of `t2`, such that two trees are then identical.
117
+
118
+ Note that two trees may have more than one isomorphism, and this
119
+ routine just returns one valid mapping.
120
+
121
+ Parameters
122
+ ----------
123
+ `t1` : NetworkX graph
124
+ One of the trees being compared
125
+
126
+ `root1` : a node of `t1` which is the root of the tree
127
+
128
+ `t2` : undirected NetworkX graph
129
+ The other tree being compared
130
+
131
+ `root2` : a node of `t2` which is the root of the tree
132
+
133
+ This is a subroutine used to implement `tree_isomorphism`, but will
134
+ be somewhat faster if you already have rooted trees.
135
+
136
+ Returns
137
+ -------
138
+ isomorphism : list
139
+ A list of pairs in which the left element is a node in `t1`
140
+ and the right element is a node in `t2`. The pairs are in
141
+ arbitrary order. If the nodes in one tree is mapped to the names in
142
+ the other, then trees will be identical. Note that an isomorphism
143
+ will not necessarily be unique.
144
+
145
+ If `t1` and `t2` are not isomorphic, then it returns the empty list.
146
+ """
147
+
148
+ assert nx.is_tree(t1)
149
+ assert nx.is_tree(t2)
150
+
151
+ # get the rooted tree formed by combining them
152
+ # with unique names
153
+ (dT, namemap, newroot1, newroot2) = root_trees(t1, root1, t2, root2)
154
+
155
+ # compute the distance from the root, with 0 for our
156
+ levels = assign_levels(dT, 0)
157
+
158
+ # height
159
+ h = max(levels.values())
160
+
161
+ # collect nodes into a dict by level
162
+ L = group_by_levels(levels)
163
+
164
+ # each node has a label, initially set to 0
165
+ label = {v: 0 for v in dT}
166
+ # and also ordered_labels and ordered_children
167
+ # which will store ordered tuples
168
+ ordered_labels = {v: () for v in dT}
169
+ ordered_children = {v: () for v in dT}
170
+
171
+ # nothing to do on last level so start on h-1
172
+ # also nothing to do for our fake level 0, so skip that
173
+ for i in range(h - 1, 0, -1):
174
+ # update the ordered_labels and ordered_children
175
+ # for any children
176
+ for v in L[i]:
177
+ # nothing to do if no children
178
+ if dT.out_degree(v) > 0:
179
+ # get all the pairs of labels and nodes of children
180
+ # and sort by labels
181
+ s = sorted((label[u], u) for u in dT.successors(v))
182
+
183
+ # invert to give a list of two tuples
184
+ # the sorted labels, and the corresponding children
185
+ ordered_labels[v], ordered_children[v] = list(zip(*s))
186
+
187
+ # now collect and sort the sorted ordered_labels
188
+ # for all nodes in L[i], carrying along the node
189
+ forlabel = sorted((ordered_labels[v], v) for v in L[i])
190
+
191
+ # now assign labels to these nodes, according to the sorted order
192
+ # starting from 0, where identical ordered_labels get the same label
193
+ current = 0
194
+ for i, (ol, v) in enumerate(forlabel):
195
+ # advance to next label if not 0, and different from previous
196
+ if (i != 0) and (ol != forlabel[i - 1][0]):
197
+ current += 1
198
+ label[v] = current
199
+
200
+ # they are isomorphic if the labels of newroot1 and newroot2 are 0
201
+ isomorphism = []
202
+ if label[newroot1] == 0 and label[newroot2] == 0:
203
+ generate_isomorphism(newroot1, newroot2, isomorphism, ordered_children)
204
+
205
+ # get the mapping back in terms of the old names
206
+ # return in sorted order for neatness
207
+ isomorphism = [(namemap[u], namemap[v]) for (u, v) in isomorphism]
208
+
209
+ return isomorphism
210
+
211
+
212
+ @not_implemented_for("directed")
213
+ @not_implemented_for("multigraph")
214
+ @nx._dispatchable(graphs={"t1": 0, "t2": 1})
215
+ def tree_isomorphism(t1, t2):
216
+ """
217
+ Given two undirected (or free) trees `t1` and `t2`,
218
+ this routine will determine if they are isomorphic.
219
+ It returns the isomorphism, a mapping of the nodes of `t1` onto the nodes
220
+ of `t2`, such that two trees are then identical.
221
+
222
+ Note that two trees may have more than one isomorphism, and this
223
+ routine just returns one valid mapping.
224
+
225
+ Parameters
226
+ ----------
227
+ t1 : undirected NetworkX graph
228
+ One of the trees being compared
229
+
230
+ t2 : undirected NetworkX graph
231
+ The other tree being compared
232
+
233
+ Returns
234
+ -------
235
+ isomorphism : list
236
+ A list of pairs in which the left element is a node in `t1`
237
+ and the right element is a node in `t2`. The pairs are in
238
+ arbitrary order. If the nodes in one tree is mapped to the names in
239
+ the other, then trees will be identical. Note that an isomorphism
240
+ will not necessarily be unique.
241
+
242
+ If `t1` and `t2` are not isomorphic, then it returns the empty list.
243
+
244
+ Notes
245
+ -----
246
+ This runs in O(n*log(n)) time for trees with n nodes.
247
+ """
248
+
249
+ assert nx.is_tree(t1)
250
+ assert nx.is_tree(t2)
251
+
252
+ # To be isomorphic, t1 and t2 must have the same number of nodes.
253
+ if nx.number_of_nodes(t1) != nx.number_of_nodes(t2):
254
+ return []
255
+
256
+ # Another shortcut is that the sorted degree sequences need to be the same.
257
+ degree_sequence1 = sorted(d for (n, d) in t1.degree())
258
+ degree_sequence2 = sorted(d for (n, d) in t2.degree())
259
+
260
+ if degree_sequence1 != degree_sequence2:
261
+ return []
262
+
263
+ # A tree can have either 1 or 2 centers.
264
+ # If the number doesn't match then t1 and t2 are not isomorphic.
265
+ center1 = nx.center(t1)
266
+ center2 = nx.center(t2)
267
+
268
+ if len(center1) != len(center2):
269
+ return []
270
+
271
+ # If there is only 1 center in each, then use it.
272
+ if len(center1) == 1:
273
+ return rooted_tree_isomorphism(t1, center1[0], t2, center2[0])
274
+
275
+ # If there both have 2 centers, then try the first for t1
276
+ # with the first for t2.
277
+ attempts = rooted_tree_isomorphism(t1, center1[0], t2, center2[0])
278
+
279
+ # If that worked we're done.
280
+ if len(attempts) > 0:
281
+ return attempts
282
+
283
+ # Otherwise, try center1[0] with the center2[1], and see if that works
284
+ return rooted_tree_isomorphism(t1, center1[0], t2, center2[1])
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2pp.py ADDED
@@ -0,0 +1,1075 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ***************
3
+ VF2++ Algorithm
4
+ ***************
5
+
6
+ An implementation of the VF2++ algorithm [1]_ for Graph Isomorphism testing.
7
+
8
+ The simplest interface to use this module is to call:
9
+
10
+ `vf2pp_is_isomorphic`: to check whether two graphs are isomorphic.
11
+ `vf2pp_isomorphism`: to obtain the node mapping between two graphs,
12
+ in case they are isomorphic.
13
+ `vf2pp_all_isomorphisms`: to generate all possible mappings between two graphs,
14
+ if isomorphic.
15
+
16
+ Introduction
17
+ ------------
18
+ The VF2++ algorithm, follows a similar logic to that of VF2, while also
19
+ introducing new easy-to-check cutting rules and determining the optimal access
20
+ order of nodes. It is also implemented in a non-recursive manner, which saves
21
+ both time and space, when compared to its previous counterpart.
22
+
23
+ The optimal node ordering is obtained after taking into consideration both the
24
+ degree but also the label rarity of each node.
25
+ This way we place the nodes that are more likely to match, first in the order,
26
+ thus examining the most promising branches in the beginning.
27
+ The rules also consider node labels, making it easier to prune unfruitful
28
+ branches early in the process.
29
+
30
+ Examples
31
+ --------
32
+
33
+ Suppose G1 and G2 are Isomorphic Graphs. Verification is as follows:
34
+
35
+ Without node labels:
36
+
37
+ >>> import networkx as nx
38
+ >>> G1 = nx.path_graph(4)
39
+ >>> G2 = nx.path_graph(4)
40
+ >>> nx.vf2pp_is_isomorphic(G1, G2, node_label=None)
41
+ True
42
+ >>> nx.vf2pp_isomorphism(G1, G2, node_label=None)
43
+ {1: 1, 2: 2, 0: 0, 3: 3}
44
+
45
+ With node labels:
46
+
47
+ >>> G1 = nx.path_graph(4)
48
+ >>> G2 = nx.path_graph(4)
49
+ >>> mapped = {1: 1, 2: 2, 3: 3, 0: 0}
50
+ >>> nx.set_node_attributes(
51
+ ... G1, dict(zip(G1, ["blue", "red", "green", "yellow"])), "label"
52
+ ... )
53
+ >>> nx.set_node_attributes(
54
+ ... G2,
55
+ ... dict(zip([mapped[u] for u in G1], ["blue", "red", "green", "yellow"])),
56
+ ... "label",
57
+ ... )
58
+ >>> nx.vf2pp_is_isomorphic(G1, G2, node_label="label")
59
+ True
60
+ >>> nx.vf2pp_isomorphism(G1, G2, node_label="label")
61
+ {1: 1, 2: 2, 0: 0, 3: 3}
62
+
63
+ References
64
+ ----------
65
+ .. [1] Jüttner, Alpár & Madarasi, Péter. (2018). "VF2++—An improved subgraph
66
+ isomorphism algorithm". Discrete Applied Mathematics. 242.
67
+ https://doi.org/10.1016/j.dam.2018.02.018
68
+
69
+ """
70
+
71
+ import collections
72
+
73
+ import networkx as nx
74
+
75
+ __all__ = ["vf2pp_isomorphism", "vf2pp_is_isomorphic", "vf2pp_all_isomorphisms"]
76
+
77
+ _GraphParameters = collections.namedtuple(
78
+ "_GraphParameters",
79
+ [
80
+ "G1",
81
+ "G2",
82
+ "G1_labels",
83
+ "G2_labels",
84
+ "nodes_of_G1Labels",
85
+ "nodes_of_G2Labels",
86
+ "G2_nodes_of_degree",
87
+ ],
88
+ )
89
+
90
+ _StateParameters = collections.namedtuple(
91
+ "_StateParameters",
92
+ [
93
+ "mapping",
94
+ "reverse_mapping",
95
+ "T1",
96
+ "T1_in",
97
+ "T1_tilde",
98
+ "T1_tilde_in",
99
+ "T2",
100
+ "T2_in",
101
+ "T2_tilde",
102
+ "T2_tilde_in",
103
+ ],
104
+ )
105
+
106
+
107
+ @nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"})
108
+ def vf2pp_isomorphism(G1, G2, node_label=None, default_label=None):
109
+ """Return an isomorphic mapping between `G1` and `G2` if it exists.
110
+
111
+ Parameters
112
+ ----------
113
+ G1, G2 : NetworkX Graph or MultiGraph instances.
114
+ The two graphs to check for isomorphism.
115
+
116
+ node_label : str, optional
117
+ The name of the node attribute to be used when comparing nodes.
118
+ The default is `None`, meaning node attributes are not considered
119
+ in the comparison. Any node that doesn't have the `node_label`
120
+ attribute uses `default_label` instead.
121
+
122
+ default_label : scalar
123
+ Default value to use when a node doesn't have an attribute
124
+ named `node_label`. Default is `None`.
125
+
126
+ Returns
127
+ -------
128
+ dict or None
129
+ Node mapping if the two graphs are isomorphic. None otherwise.
130
+ """
131
+ try:
132
+ mapping = next(vf2pp_all_isomorphisms(G1, G2, node_label, default_label))
133
+ return mapping
134
+ except StopIteration:
135
+ return None
136
+
137
+
138
+ @nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"})
139
+ def vf2pp_is_isomorphic(G1, G2, node_label=None, default_label=None):
140
+ """Examines whether G1 and G2 are isomorphic.
141
+
142
+ Parameters
143
+ ----------
144
+ G1, G2 : NetworkX Graph or MultiGraph instances.
145
+ The two graphs to check for isomorphism.
146
+
147
+ node_label : str, optional
148
+ The name of the node attribute to be used when comparing nodes.
149
+ The default is `None`, meaning node attributes are not considered
150
+ in the comparison. Any node that doesn't have the `node_label`
151
+ attribute uses `default_label` instead.
152
+
153
+ default_label : scalar
154
+ Default value to use when a node doesn't have an attribute
155
+ named `node_label`. Default is `None`.
156
+
157
+ Returns
158
+ -------
159
+ bool
160
+ True if the two graphs are isomorphic, False otherwise.
161
+ """
162
+ if vf2pp_isomorphism(G1, G2, node_label, default_label) is not None:
163
+ return True
164
+ return False
165
+
166
+
167
+ @nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"})
168
+ def vf2pp_all_isomorphisms(G1, G2, node_label=None, default_label=None):
169
+ """Yields all the possible mappings between G1 and G2.
170
+
171
+ Parameters
172
+ ----------
173
+ G1, G2 : NetworkX Graph or MultiGraph instances.
174
+ The two graphs to check for isomorphism.
175
+
176
+ node_label : str, optional
177
+ The name of the node attribute to be used when comparing nodes.
178
+ The default is `None`, meaning node attributes are not considered
179
+ in the comparison. Any node that doesn't have the `node_label`
180
+ attribute uses `default_label` instead.
181
+
182
+ default_label : scalar
183
+ Default value to use when a node doesn't have an attribute
184
+ named `node_label`. Default is `None`.
185
+
186
+ Yields
187
+ ------
188
+ dict
189
+ Isomorphic mapping between the nodes in `G1` and `G2`.
190
+ """
191
+ if G1.number_of_nodes() == 0 or G2.number_of_nodes() == 0:
192
+ return False
193
+
194
+ # Create the degree dicts based on graph type
195
+ if G1.is_directed():
196
+ G1_degree = {
197
+ n: (in_degree, out_degree)
198
+ for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
199
+ }
200
+ G2_degree = {
201
+ n: (in_degree, out_degree)
202
+ for (n, in_degree), (_, out_degree) in zip(G2.in_degree, G2.out_degree)
203
+ }
204
+ else:
205
+ G1_degree = dict(G1.degree)
206
+ G2_degree = dict(G2.degree)
207
+
208
+ if not G1.is_directed():
209
+ find_candidates = _find_candidates
210
+ restore_Tinout = _restore_Tinout
211
+ else:
212
+ find_candidates = _find_candidates_Di
213
+ restore_Tinout = _restore_Tinout_Di
214
+
215
+ # Check that both graphs have the same number of nodes and degree sequence
216
+ if G1.order() != G2.order():
217
+ return False
218
+ if sorted(G1_degree.values()) != sorted(G2_degree.values()):
219
+ return False
220
+
221
+ # Initialize parameters and cache necessary information about degree and labels
222
+ graph_params, state_params = _initialize_parameters(
223
+ G1, G2, G2_degree, node_label, default_label
224
+ )
225
+
226
+ # Check if G1 and G2 have the same labels, and that number of nodes per label is equal between the two graphs
227
+ if not _precheck_label_properties(graph_params):
228
+ return False
229
+
230
+ # Calculate the optimal node ordering
231
+ node_order = _matching_order(graph_params)
232
+
233
+ # Initialize the stack
234
+ stack = []
235
+ candidates = iter(
236
+ find_candidates(node_order[0], graph_params, state_params, G1_degree)
237
+ )
238
+ stack.append((node_order[0], candidates))
239
+
240
+ mapping = state_params.mapping
241
+ reverse_mapping = state_params.reverse_mapping
242
+
243
+ # Index of the node from the order, currently being examined
244
+ matching_node = 1
245
+
246
+ while stack:
247
+ current_node, candidate_nodes = stack[-1]
248
+
249
+ try:
250
+ candidate = next(candidate_nodes)
251
+ except StopIteration:
252
+ # If no remaining candidates, return to a previous state, and follow another branch
253
+ stack.pop()
254
+ matching_node -= 1
255
+ if stack:
256
+ # Pop the previously added u-v pair, and look for a different candidate _v for u
257
+ popped_node1, _ = stack[-1]
258
+ popped_node2 = mapping[popped_node1]
259
+ mapping.pop(popped_node1)
260
+ reverse_mapping.pop(popped_node2)
261
+ restore_Tinout(popped_node1, popped_node2, graph_params, state_params)
262
+ continue
263
+
264
+ if _feasibility(current_node, candidate, graph_params, state_params):
265
+ # Terminate if mapping is extended to its full
266
+ if len(mapping) == G2.number_of_nodes() - 1:
267
+ cp_mapping = mapping.copy()
268
+ cp_mapping[current_node] = candidate
269
+ yield cp_mapping
270
+ continue
271
+
272
+ # Feasibility rules pass, so extend the mapping and update the parameters
273
+ mapping[current_node] = candidate
274
+ reverse_mapping[candidate] = current_node
275
+ _update_Tinout(current_node, candidate, graph_params, state_params)
276
+ # Append the next node and its candidates to the stack
277
+ candidates = iter(
278
+ find_candidates(
279
+ node_order[matching_node], graph_params, state_params, G1_degree
280
+ )
281
+ )
282
+ stack.append((node_order[matching_node], candidates))
283
+ matching_node += 1
284
+
285
+
286
+ def _precheck_label_properties(graph_params):
287
+ G1, G2, G1_labels, G2_labels, nodes_of_G1Labels, nodes_of_G2Labels, _ = graph_params
288
+ if any(
289
+ label not in nodes_of_G1Labels or len(nodes_of_G1Labels[label]) != len(nodes)
290
+ for label, nodes in nodes_of_G2Labels.items()
291
+ ):
292
+ return False
293
+ return True
294
+
295
+
296
+ def _initialize_parameters(G1, G2, G2_degree, node_label=None, default_label=-1):
297
+ """Initializes all the necessary parameters for VF2++
298
+
299
+ Parameters
300
+ ----------
301
+ G1,G2: NetworkX Graph or MultiGraph instances.
302
+ The two graphs to check for isomorphism or monomorphism
303
+
304
+ G1_labels,G2_labels: dict
305
+ The label of every node in G1 and G2 respectively
306
+
307
+ Returns
308
+ -------
309
+ graph_params: namedtuple
310
+ Contains all the Graph-related parameters:
311
+
312
+ G1,G2
313
+ G1_labels,G2_labels: dict
314
+
315
+ state_params: namedtuple
316
+ Contains all the State-related parameters:
317
+
318
+ mapping: dict
319
+ The mapping as extended so far. Maps nodes of G1 to nodes of G2
320
+
321
+ reverse_mapping: dict
322
+ The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
323
+
324
+ T1, T2: set
325
+ Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
326
+ neighbors of nodes that are.
327
+
328
+ T1_out, T2_out: set
329
+ Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
330
+ """
331
+ G1_labels = dict(G1.nodes(data=node_label, default=default_label))
332
+ G2_labels = dict(G2.nodes(data=node_label, default=default_label))
333
+
334
+ graph_params = _GraphParameters(
335
+ G1,
336
+ G2,
337
+ G1_labels,
338
+ G2_labels,
339
+ nx.utils.groups(G1_labels),
340
+ nx.utils.groups(G2_labels),
341
+ nx.utils.groups(G2_degree),
342
+ )
343
+
344
+ T1, T1_in = set(), set()
345
+ T2, T2_in = set(), set()
346
+ if G1.is_directed():
347
+ T1_tilde, T1_tilde_in = (
348
+ set(G1.nodes()),
349
+ set(),
350
+ ) # todo: do we need Ti_tilde_in? What nodes does it have?
351
+ T2_tilde, T2_tilde_in = set(G2.nodes()), set()
352
+ else:
353
+ T1_tilde, T1_tilde_in = set(G1.nodes()), set()
354
+ T2_tilde, T2_tilde_in = set(G2.nodes()), set()
355
+
356
+ state_params = _StateParameters(
357
+ {},
358
+ {},
359
+ T1,
360
+ T1_in,
361
+ T1_tilde,
362
+ T1_tilde_in,
363
+ T2,
364
+ T2_in,
365
+ T2_tilde,
366
+ T2_tilde_in,
367
+ )
368
+
369
+ return graph_params, state_params
370
+
371
+
372
+ def _matching_order(graph_params):
373
+ """The node ordering as introduced in VF2++.
374
+
375
+ Notes
376
+ -----
377
+ Taking into account the structure of the Graph and the node labeling, the nodes are placed in an order such that,
378
+ most of the unfruitful/infeasible branches of the search space can be pruned on high levels, significantly
379
+ decreasing the number of visited states. The premise is that, the algorithm will be able to recognize
380
+ inconsistencies early, proceeding to go deep into the search tree only if it's needed.
381
+
382
+ Parameters
383
+ ----------
384
+ graph_params: namedtuple
385
+ Contains:
386
+
387
+ G1,G2: NetworkX Graph or MultiGraph instances.
388
+ The two graphs to check for isomorphism or monomorphism.
389
+
390
+ G1_labels,G2_labels: dict
391
+ The label of every node in G1 and G2 respectively.
392
+
393
+ Returns
394
+ -------
395
+ node_order: list
396
+ The ordering of the nodes.
397
+ """
398
+ G1, G2, G1_labels, _, _, nodes_of_G2Labels, _ = graph_params
399
+ if not G1 and not G2:
400
+ return {}
401
+
402
+ if G1.is_directed():
403
+ G1 = G1.to_undirected(as_view=True)
404
+
405
+ V1_unordered = set(G1.nodes())
406
+ label_rarity = {label: len(nodes) for label, nodes in nodes_of_G2Labels.items()}
407
+ used_degrees = {node: 0 for node in G1}
408
+ node_order = []
409
+
410
+ while V1_unordered:
411
+ max_rarity = min(label_rarity[G1_labels[x]] for x in V1_unordered)
412
+ rarest_nodes = [
413
+ n for n in V1_unordered if label_rarity[G1_labels[n]] == max_rarity
414
+ ]
415
+ max_node = max(rarest_nodes, key=G1.degree)
416
+
417
+ for dlevel_nodes in nx.bfs_layers(G1, max_node):
418
+ nodes_to_add = dlevel_nodes.copy()
419
+ while nodes_to_add:
420
+ max_used_degree = max(used_degrees[n] for n in nodes_to_add)
421
+ max_used_degree_nodes = [
422
+ n for n in nodes_to_add if used_degrees[n] == max_used_degree
423
+ ]
424
+ max_degree = max(G1.degree[n] for n in max_used_degree_nodes)
425
+ max_degree_nodes = [
426
+ n for n in max_used_degree_nodes if G1.degree[n] == max_degree
427
+ ]
428
+ next_node = min(
429
+ max_degree_nodes, key=lambda x: label_rarity[G1_labels[x]]
430
+ )
431
+
432
+ node_order.append(next_node)
433
+ for node in G1.neighbors(next_node):
434
+ used_degrees[node] += 1
435
+
436
+ nodes_to_add.remove(next_node)
437
+ label_rarity[G1_labels[next_node]] -= 1
438
+ V1_unordered.discard(next_node)
439
+
440
+ return node_order
441
+
442
+
443
+ def _find_candidates(
444
+ u, graph_params, state_params, G1_degree
445
+ ): # todo: make the 4th argument the degree of u
446
+ """Given node u of G1, finds the candidates of u from G2.
447
+
448
+ Parameters
449
+ ----------
450
+ u: Graph node
451
+ The node from G1 for which to find the candidates from G2.
452
+
453
+ graph_params: namedtuple
454
+ Contains all the Graph-related parameters:
455
+
456
+ G1,G2: NetworkX Graph or MultiGraph instances.
457
+ The two graphs to check for isomorphism or monomorphism
458
+
459
+ G1_labels,G2_labels: dict
460
+ The label of every node in G1 and G2 respectively
461
+
462
+ state_params: namedtuple
463
+ Contains all the State-related parameters:
464
+
465
+ mapping: dict
466
+ The mapping as extended so far. Maps nodes of G1 to nodes of G2
467
+
468
+ reverse_mapping: dict
469
+ The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
470
+
471
+ T1, T2: set
472
+ Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
473
+ neighbors of nodes that are.
474
+
475
+ T1_tilde, T2_tilde: set
476
+ Ti_tilde contains all the nodes from Gi, that are neither in the mapping nor in Ti
477
+
478
+ Returns
479
+ -------
480
+ candidates: set
481
+ The nodes from G2 which are candidates for u.
482
+ """
483
+ G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params
484
+ mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params
485
+
486
+ covered_nbrs = [nbr for nbr in G1[u] if nbr in mapping]
487
+ if not covered_nbrs:
488
+ candidates = set(nodes_of_G2Labels[G1_labels[u]])
489
+ candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]])
490
+ candidates.intersection_update(T2_tilde)
491
+ candidates.difference_update(reverse_mapping)
492
+ if G1.is_multigraph():
493
+ candidates.difference_update(
494
+ {
495
+ node
496
+ for node in candidates
497
+ if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
498
+ }
499
+ )
500
+ return candidates
501
+
502
+ nbr1 = covered_nbrs[0]
503
+ common_nodes = set(G2[mapping[nbr1]])
504
+
505
+ for nbr1 in covered_nbrs[1:]:
506
+ common_nodes.intersection_update(G2[mapping[nbr1]])
507
+
508
+ common_nodes.difference_update(reverse_mapping)
509
+ common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]])
510
+ common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]])
511
+ if G1.is_multigraph():
512
+ common_nodes.difference_update(
513
+ {
514
+ node
515
+ for node in common_nodes
516
+ if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
517
+ }
518
+ )
519
+ return common_nodes
520
+
521
+
522
+ def _find_candidates_Di(u, graph_params, state_params, G1_degree):
523
+ G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params
524
+ mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params
525
+
526
+ covered_successors = [succ for succ in G1[u] if succ in mapping]
527
+ covered_predecessors = [pred for pred in G1.pred[u] if pred in mapping]
528
+
529
+ if not (covered_successors or covered_predecessors):
530
+ candidates = set(nodes_of_G2Labels[G1_labels[u]])
531
+ candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]])
532
+ candidates.intersection_update(T2_tilde)
533
+ candidates.difference_update(reverse_mapping)
534
+ if G1.is_multigraph():
535
+ candidates.difference_update(
536
+ {
537
+ node
538
+ for node in candidates
539
+ if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
540
+ }
541
+ )
542
+ return candidates
543
+
544
+ if covered_successors:
545
+ succ1 = covered_successors[0]
546
+ common_nodes = set(G2.pred[mapping[succ1]])
547
+
548
+ for succ1 in covered_successors[1:]:
549
+ common_nodes.intersection_update(G2.pred[mapping[succ1]])
550
+ else:
551
+ pred1 = covered_predecessors.pop()
552
+ common_nodes = set(G2[mapping[pred1]])
553
+
554
+ for pred1 in covered_predecessors:
555
+ common_nodes.intersection_update(G2[mapping[pred1]])
556
+
557
+ common_nodes.difference_update(reverse_mapping)
558
+ common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]])
559
+ common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]])
560
+ if G1.is_multigraph():
561
+ common_nodes.difference_update(
562
+ {
563
+ node
564
+ for node in common_nodes
565
+ if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
566
+ }
567
+ )
568
+ return common_nodes
569
+
570
+
571
+ def _feasibility(node1, node2, graph_params, state_params):
572
+ """Given a candidate pair of nodes u and v from G1 and G2 respectively, checks if it's feasible to extend the
573
+ mapping, i.e. if u and v can be matched.
574
+
575
+ Notes
576
+ -----
577
+ This function performs all the necessary checking by applying both consistency and cutting rules.
578
+
579
+ Parameters
580
+ ----------
581
+ node1, node2: Graph node
582
+ The candidate pair of nodes being checked for matching
583
+
584
+ graph_params: namedtuple
585
+ Contains all the Graph-related parameters:
586
+
587
+ G1,G2: NetworkX Graph or MultiGraph instances.
588
+ The two graphs to check for isomorphism or monomorphism
589
+
590
+ G1_labels,G2_labels: dict
591
+ The label of every node in G1 and G2 respectively
592
+
593
+ state_params: namedtuple
594
+ Contains all the State-related parameters:
595
+
596
+ mapping: dict
597
+ The mapping as extended so far. Maps nodes of G1 to nodes of G2
598
+
599
+ reverse_mapping: dict
600
+ The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
601
+
602
+ T1, T2: set
603
+ Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
604
+ neighbors of nodes that are.
605
+
606
+ T1_out, T2_out: set
607
+ Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
608
+
609
+ Returns
610
+ -------
611
+ True if all checks are successful, False otherwise.
612
+ """
613
+ G1 = graph_params.G1
614
+
615
+ if _cut_PT(node1, node2, graph_params, state_params):
616
+ return False
617
+
618
+ if G1.is_multigraph():
619
+ if not _consistent_PT(node1, node2, graph_params, state_params):
620
+ return False
621
+
622
+ return True
623
+
624
+
625
+ def _cut_PT(u, v, graph_params, state_params):
626
+ """Implements the cutting rules for the ISO problem.
627
+
628
+ Parameters
629
+ ----------
630
+ u, v: Graph node
631
+ The two candidate nodes being examined.
632
+
633
+ graph_params: namedtuple
634
+ Contains all the Graph-related parameters:
635
+
636
+ G1,G2: NetworkX Graph or MultiGraph instances.
637
+ The two graphs to check for isomorphism or monomorphism
638
+
639
+ G1_labels,G2_labels: dict
640
+ The label of every node in G1 and G2 respectively
641
+
642
+ state_params: namedtuple
643
+ Contains all the State-related parameters:
644
+
645
+ mapping: dict
646
+ The mapping as extended so far. Maps nodes of G1 to nodes of G2
647
+
648
+ reverse_mapping: dict
649
+ The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
650
+
651
+ T1, T2: set
652
+ Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
653
+ neighbors of nodes that are.
654
+
655
+ T1_tilde, T2_tilde: set
656
+ Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
657
+
658
+ Returns
659
+ -------
660
+ True if we should prune this branch, i.e. the node pair failed the cutting checks. False otherwise.
661
+ """
662
+ G1, G2, G1_labels, G2_labels, _, _, _ = graph_params
663
+ (
664
+ _,
665
+ _,
666
+ T1,
667
+ T1_in,
668
+ T1_tilde,
669
+ _,
670
+ T2,
671
+ T2_in,
672
+ T2_tilde,
673
+ _,
674
+ ) = state_params
675
+
676
+ u_labels_predecessors, v_labels_predecessors = {}, {}
677
+ if G1.is_directed():
678
+ u_labels_predecessors = nx.utils.groups(
679
+ {n1: G1_labels[n1] for n1 in G1.pred[u]}
680
+ )
681
+ v_labels_predecessors = nx.utils.groups(
682
+ {n2: G2_labels[n2] for n2 in G2.pred[v]}
683
+ )
684
+
685
+ if set(u_labels_predecessors.keys()) != set(v_labels_predecessors.keys()):
686
+ return True
687
+
688
+ u_labels_successors = nx.utils.groups({n1: G1_labels[n1] for n1 in G1[u]})
689
+ v_labels_successors = nx.utils.groups({n2: G2_labels[n2] for n2 in G2[v]})
690
+
691
+ # if the neighbors of u, do not have the same labels as those of v, NOT feasible.
692
+ if set(u_labels_successors.keys()) != set(v_labels_successors.keys()):
693
+ return True
694
+
695
+ for label, G1_nbh in u_labels_successors.items():
696
+ G2_nbh = v_labels_successors[label]
697
+
698
+ if G1.is_multigraph():
699
+ # Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2
700
+ u_nbrs_edges = sorted(G1.number_of_edges(u, x) for x in G1_nbh)
701
+ v_nbrs_edges = sorted(G2.number_of_edges(v, x) for x in G2_nbh)
702
+ if any(
703
+ u_nbr_edges != v_nbr_edges
704
+ for u_nbr_edges, v_nbr_edges in zip(u_nbrs_edges, v_nbrs_edges)
705
+ ):
706
+ return True
707
+
708
+ if len(T1.intersection(G1_nbh)) != len(T2.intersection(G2_nbh)):
709
+ return True
710
+ if len(T1_tilde.intersection(G1_nbh)) != len(T2_tilde.intersection(G2_nbh)):
711
+ return True
712
+ if G1.is_directed() and len(T1_in.intersection(G1_nbh)) != len(
713
+ T2_in.intersection(G2_nbh)
714
+ ):
715
+ return True
716
+
717
+ if not G1.is_directed():
718
+ return False
719
+
720
+ for label, G1_pred in u_labels_predecessors.items():
721
+ G2_pred = v_labels_predecessors[label]
722
+
723
+ if G1.is_multigraph():
724
+ # Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2
725
+ u_pred_edges = sorted(G1.number_of_edges(u, x) for x in G1_pred)
726
+ v_pred_edges = sorted(G2.number_of_edges(v, x) for x in G2_pred)
727
+ if any(
728
+ u_nbr_edges != v_nbr_edges
729
+ for u_nbr_edges, v_nbr_edges in zip(u_pred_edges, v_pred_edges)
730
+ ):
731
+ return True
732
+
733
+ if len(T1.intersection(G1_pred)) != len(T2.intersection(G2_pred)):
734
+ return True
735
+ if len(T1_tilde.intersection(G1_pred)) != len(T2_tilde.intersection(G2_pred)):
736
+ return True
737
+ if len(T1_in.intersection(G1_pred)) != len(T2_in.intersection(G2_pred)):
738
+ return True
739
+
740
+ return False
741
+
742
+
743
+ def _consistent_PT(u, v, graph_params, state_params):
744
+ """Checks the consistency of extending the mapping using the current node pair.
745
+
746
+ Parameters
747
+ ----------
748
+ u, v: Graph node
749
+ The two candidate nodes being examined.
750
+
751
+ graph_params: namedtuple
752
+ Contains all the Graph-related parameters:
753
+
754
+ G1,G2: NetworkX Graph or MultiGraph instances.
755
+ The two graphs to check for isomorphism or monomorphism
756
+
757
+ G1_labels,G2_labels: dict
758
+ The label of every node in G1 and G2 respectively
759
+
760
+ state_params: namedtuple
761
+ Contains all the State-related parameters:
762
+
763
+ mapping: dict
764
+ The mapping as extended so far. Maps nodes of G1 to nodes of G2
765
+
766
+ reverse_mapping: dict
767
+ The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
768
+
769
+ T1, T2: set
770
+ Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
771
+ neighbors of nodes that are.
772
+
773
+ T1_out, T2_out: set
774
+ Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
775
+
776
+ Returns
777
+ -------
778
+ True if the pair passes all the consistency checks successfully. False otherwise.
779
+ """
780
+ G1, G2 = graph_params.G1, graph_params.G2
781
+ mapping, reverse_mapping = state_params.mapping, state_params.reverse_mapping
782
+
783
+ for neighbor in G1[u]:
784
+ if neighbor in mapping:
785
+ if G1.number_of_edges(u, neighbor) != G2.number_of_edges(
786
+ v, mapping[neighbor]
787
+ ):
788
+ return False
789
+
790
+ for neighbor in G2[v]:
791
+ if neighbor in reverse_mapping:
792
+ if G1.number_of_edges(u, reverse_mapping[neighbor]) != G2.number_of_edges(
793
+ v, neighbor
794
+ ):
795
+ return False
796
+
797
+ if not G1.is_directed():
798
+ return True
799
+
800
+ for predecessor in G1.pred[u]:
801
+ if predecessor in mapping:
802
+ if G1.number_of_edges(predecessor, u) != G2.number_of_edges(
803
+ mapping[predecessor], v
804
+ ):
805
+ return False
806
+
807
+ for predecessor in G2.pred[v]:
808
+ if predecessor in reverse_mapping:
809
+ if G1.number_of_edges(
810
+ reverse_mapping[predecessor], u
811
+ ) != G2.number_of_edges(predecessor, v):
812
+ return False
813
+
814
+ return True
815
+
816
+
817
+ def _update_Tinout(new_node1, new_node2, graph_params, state_params):
818
+ """Updates the Ti/Ti_out (i=1,2) when a new node pair u-v is added to the mapping.
819
+
820
+ Notes
821
+ -----
822
+ This function should be called right after the feasibility checks are passed, and node1 is mapped to node2. The
823
+ purpose of this function is to avoid brute force computing of Ti/Ti_out by iterating over all nodes of the graph
824
+ and checking which nodes satisfy the necessary conditions. Instead, in every step of the algorithm we focus
825
+ exclusively on the two nodes that are being added to the mapping, incrementally updating Ti/Ti_out.
826
+
827
+ Parameters
828
+ ----------
829
+ new_node1, new_node2: Graph node
830
+ The two new nodes, added to the mapping.
831
+
832
+ graph_params: namedtuple
833
+ Contains all the Graph-related parameters:
834
+
835
+ G1,G2: NetworkX Graph or MultiGraph instances.
836
+ The two graphs to check for isomorphism or monomorphism
837
+
838
+ G1_labels,G2_labels: dict
839
+ The label of every node in G1 and G2 respectively
840
+
841
+ state_params: namedtuple
842
+ Contains all the State-related parameters:
843
+
844
+ mapping: dict
845
+ The mapping as extended so far. Maps nodes of G1 to nodes of G2
846
+
847
+ reverse_mapping: dict
848
+ The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
849
+
850
+ T1, T2: set
851
+ Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
852
+ neighbors of nodes that are.
853
+
854
+ T1_tilde, T2_tilde: set
855
+ Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
856
+ """
857
+ G1, G2, _, _, _, _, _ = graph_params
858
+ (
859
+ mapping,
860
+ reverse_mapping,
861
+ T1,
862
+ T1_in,
863
+ T1_tilde,
864
+ T1_tilde_in,
865
+ T2,
866
+ T2_in,
867
+ T2_tilde,
868
+ T2_tilde_in,
869
+ ) = state_params
870
+
871
+ uncovered_successors_G1 = {succ for succ in G1[new_node1] if succ not in mapping}
872
+ uncovered_successors_G2 = {
873
+ succ for succ in G2[new_node2] if succ not in reverse_mapping
874
+ }
875
+
876
+ # Add the uncovered neighbors of node1 and node2 in T1 and T2 respectively
877
+ T1.update(uncovered_successors_G1)
878
+ T2.update(uncovered_successors_G2)
879
+ T1.discard(new_node1)
880
+ T2.discard(new_node2)
881
+
882
+ T1_tilde.difference_update(uncovered_successors_G1)
883
+ T2_tilde.difference_update(uncovered_successors_G2)
884
+ T1_tilde.discard(new_node1)
885
+ T2_tilde.discard(new_node2)
886
+
887
+ if not G1.is_directed():
888
+ return
889
+
890
+ uncovered_predecessors_G1 = {
891
+ pred for pred in G1.pred[new_node1] if pred not in mapping
892
+ }
893
+ uncovered_predecessors_G2 = {
894
+ pred for pred in G2.pred[new_node2] if pred not in reverse_mapping
895
+ }
896
+
897
+ T1_in.update(uncovered_predecessors_G1)
898
+ T2_in.update(uncovered_predecessors_G2)
899
+ T1_in.discard(new_node1)
900
+ T2_in.discard(new_node2)
901
+
902
+ T1_tilde.difference_update(uncovered_predecessors_G1)
903
+ T2_tilde.difference_update(uncovered_predecessors_G2)
904
+ T1_tilde.discard(new_node1)
905
+ T2_tilde.discard(new_node2)
906
+
907
+
908
+ def _restore_Tinout(popped_node1, popped_node2, graph_params, state_params):
909
+ """Restores the previous version of Ti/Ti_out when a node pair is deleted from the mapping.
910
+
911
+ Parameters
912
+ ----------
913
+ popped_node1, popped_node2: Graph node
914
+ The two nodes deleted from the mapping.
915
+
916
+ graph_params: namedtuple
917
+ Contains all the Graph-related parameters:
918
+
919
+ G1,G2: NetworkX Graph or MultiGraph instances.
920
+ The two graphs to check for isomorphism or monomorphism
921
+
922
+ G1_labels,G2_labels: dict
923
+ The label of every node in G1 and G2 respectively
924
+
925
+ state_params: namedtuple
926
+ Contains all the State-related parameters:
927
+
928
+ mapping: dict
929
+ The mapping as extended so far. Maps nodes of G1 to nodes of G2
930
+
931
+ reverse_mapping: dict
932
+ The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
933
+
934
+ T1, T2: set
935
+ Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
936
+ neighbors of nodes that are.
937
+
938
+ T1_tilde, T2_tilde: set
939
+ Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
940
+ """
941
+ # If the node we want to remove from the mapping, has at least one covered neighbor, add it to T1.
942
+ G1, G2, _, _, _, _, _ = graph_params
943
+ (
944
+ mapping,
945
+ reverse_mapping,
946
+ T1,
947
+ T1_in,
948
+ T1_tilde,
949
+ T1_tilde_in,
950
+ T2,
951
+ T2_in,
952
+ T2_tilde,
953
+ T2_tilde_in,
954
+ ) = state_params
955
+
956
+ is_added = False
957
+ for neighbor in G1[popped_node1]:
958
+ if neighbor in mapping:
959
+ # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
960
+ is_added = True
961
+ T1.add(popped_node1)
962
+ else:
963
+ # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
964
+ if any(nbr in mapping for nbr in G1[neighbor]):
965
+ continue
966
+ T1.discard(neighbor)
967
+ T1_tilde.add(neighbor)
968
+
969
+ # Case where the node is not present in neither the mapping nor T1. By definition, it should belong to T1_tilde
970
+ if not is_added:
971
+ T1_tilde.add(popped_node1)
972
+
973
+ is_added = False
974
+ for neighbor in G2[popped_node2]:
975
+ if neighbor in reverse_mapping:
976
+ is_added = True
977
+ T2.add(popped_node2)
978
+ else:
979
+ if any(nbr in reverse_mapping for nbr in G2[neighbor]):
980
+ continue
981
+ T2.discard(neighbor)
982
+ T2_tilde.add(neighbor)
983
+
984
+ if not is_added:
985
+ T2_tilde.add(popped_node2)
986
+
987
+
988
+ def _restore_Tinout_Di(popped_node1, popped_node2, graph_params, state_params):
989
+ # If the node we want to remove from the mapping, has at least one covered neighbor, add it to T1.
990
+ G1, G2, _, _, _, _, _ = graph_params
991
+ (
992
+ mapping,
993
+ reverse_mapping,
994
+ T1,
995
+ T1_in,
996
+ T1_tilde,
997
+ T1_tilde_in,
998
+ T2,
999
+ T2_in,
1000
+ T2_tilde,
1001
+ T2_tilde_in,
1002
+ ) = state_params
1003
+
1004
+ is_added = False
1005
+ for successor in G1[popped_node1]:
1006
+ if successor in mapping:
1007
+ # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
1008
+ is_added = True
1009
+ T1_in.add(popped_node1)
1010
+ else:
1011
+ # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
1012
+ if not any(pred in mapping for pred in G1.pred[successor]):
1013
+ T1.discard(successor)
1014
+
1015
+ if not any(succ in mapping for succ in G1[successor]):
1016
+ T1_in.discard(successor)
1017
+
1018
+ if successor not in T1:
1019
+ if successor not in T1_in:
1020
+ T1_tilde.add(successor)
1021
+
1022
+ for predecessor in G1.pred[popped_node1]:
1023
+ if predecessor in mapping:
1024
+ # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
1025
+ is_added = True
1026
+ T1.add(popped_node1)
1027
+ else:
1028
+ # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
1029
+ if not any(pred in mapping for pred in G1.pred[predecessor]):
1030
+ T1.discard(predecessor)
1031
+
1032
+ if not any(succ in mapping for succ in G1[predecessor]):
1033
+ T1_in.discard(predecessor)
1034
+
1035
+ if not (predecessor in T1 or predecessor in T1_in):
1036
+ T1_tilde.add(predecessor)
1037
+
1038
+ # Case where the node is not present in neither the mapping nor T1. By definition it should belong to T1_tilde
1039
+ if not is_added:
1040
+ T1_tilde.add(popped_node1)
1041
+
1042
+ is_added = False
1043
+ for successor in G2[popped_node2]:
1044
+ if successor in reverse_mapping:
1045
+ is_added = True
1046
+ T2_in.add(popped_node2)
1047
+ else:
1048
+ if not any(pred in reverse_mapping for pred in G2.pred[successor]):
1049
+ T2.discard(successor)
1050
+
1051
+ if not any(succ in reverse_mapping for succ in G2[successor]):
1052
+ T2_in.discard(successor)
1053
+
1054
+ if successor not in T2:
1055
+ if successor not in T2_in:
1056
+ T2_tilde.add(successor)
1057
+
1058
+ for predecessor in G2.pred[popped_node2]:
1059
+ if predecessor in reverse_mapping:
1060
+ # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
1061
+ is_added = True
1062
+ T2.add(popped_node2)
1063
+ else:
1064
+ # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
1065
+ if not any(pred in reverse_mapping for pred in G2.pred[predecessor]):
1066
+ T2.discard(predecessor)
1067
+
1068
+ if not any(succ in reverse_mapping for succ in G2[predecessor]):
1069
+ T2_in.discard(predecessor)
1070
+
1071
+ if not (predecessor in T2 or predecessor in T2_in):
1072
+ T2_tilde.add(predecessor)
1073
+
1074
+ if not is_added:
1075
+ T2_tilde.add(popped_node2)
minigpt2/lib/python3.10/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py ADDED
@@ -0,0 +1,192 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Module to simplify the specification of user-defined equality functions for
3
+ node and edge attributes during isomorphism checks.
4
+
5
+ During the construction of an isomorphism, the algorithm considers two
6
+ candidate nodes n1 in G1 and n2 in G2. The graphs G1 and G2 are then
7
+ compared with respect to properties involving n1 and n2, and if the outcome
8
+ is good, then the candidate nodes are considered isomorphic. NetworkX
9
+ provides a simple mechanism for users to extend the comparisons to include
10
+ node and edge attributes.
11
+
12
+ Node attributes are handled by the node_match keyword. When considering
13
+ n1 and n2, the algorithm passes their node attribute dictionaries to
14
+ node_match, and if it returns False, then n1 and n2 cannot be
15
+ considered to be isomorphic.
16
+
17
+ Edge attributes are handled by the edge_match keyword. When considering
18
+ n1 and n2, the algorithm must verify that outgoing edges from n1 are
19
+ commensurate with the outgoing edges for n2. If the graph is directed,
20
+ then a similar check is also performed for incoming edges.
21
+
22
+ Focusing only on outgoing edges, we consider pairs of nodes (n1, v1) from
23
+ G1 and (n2, v2) from G2. For graphs and digraphs, there is only one edge
24
+ between (n1, v1) and only one edge between (n2, v2). Those edge attribute
25
+ dictionaries are passed to edge_match, and if it returns False, then
26
+ n1 and n2 cannot be considered isomorphic. For multigraphs and
27
+ multidigraphs, there can be multiple edges between (n1, v1) and also
28
+ multiple edges between (n2, v2). Now, there must exist an isomorphism
29
+ from "all the edges between (n1, v1)" to "all the edges between (n2, v2)".
30
+ So, all of the edge attribute dictionaries are passed to edge_match, and
31
+ it must determine if there is an isomorphism between the two sets of edges.
32
+ """
33
+
34
+ from . import isomorphvf2 as vf2
35
+
36
+ __all__ = ["GraphMatcher", "DiGraphMatcher", "MultiGraphMatcher", "MultiDiGraphMatcher"]
37
+
38
+
39
+ def _semantic_feasibility(self, G1_node, G2_node):
40
+ """Returns True if mapping G1_node to G2_node is semantically feasible."""
41
+ # Make sure the nodes match
42
+ if self.node_match is not None:
43
+ nm = self.node_match(self.G1.nodes[G1_node], self.G2.nodes[G2_node])
44
+ if not nm:
45
+ return False
46
+
47
+ # Make sure the edges match
48
+ if self.edge_match is not None:
49
+ # Cached lookups
50
+ G1nbrs = self.G1_adj[G1_node]
51
+ G2nbrs = self.G2_adj[G2_node]
52
+ core_1 = self.core_1
53
+ edge_match = self.edge_match
54
+
55
+ for neighbor in G1nbrs:
56
+ # G1_node is not in core_1, so we must handle R_self separately
57
+ if neighbor == G1_node:
58
+ if G2_node in G2nbrs and not edge_match(
59
+ G1nbrs[G1_node], G2nbrs[G2_node]
60
+ ):
61
+ return False
62
+ elif neighbor in core_1:
63
+ G2_nbr = core_1[neighbor]
64
+ if G2_nbr in G2nbrs and not edge_match(
65
+ G1nbrs[neighbor], G2nbrs[G2_nbr]
66
+ ):
67
+ return False
68
+ # syntactic check has already verified that neighbors are symmetric
69
+
70
+ return True
71
+
72
+
73
+ class GraphMatcher(vf2.GraphMatcher):
74
+ """VF2 isomorphism checker for undirected graphs."""
75
+
76
+ def __init__(self, G1, G2, node_match=None, edge_match=None):
77
+ """Initialize graph matcher.
78
+
79
+ Parameters
80
+ ----------
81
+ G1, G2: graph
82
+ The graphs to be tested.
83
+
84
+ node_match: callable
85
+ A function that returns True iff node n1 in G1 and n2 in G2
86
+ should be considered equal during the isomorphism test. The
87
+ function will be called like::
88
+
89
+ node_match(G1.nodes[n1], G2.nodes[n2])
90
+
91
+ That is, the function will receive the node attribute dictionaries
92
+ of the nodes under consideration. If None, then no attributes are
93
+ considered when testing for an isomorphism.
94
+
95
+ edge_match: callable
96
+ A function that returns True iff the edge attribute dictionary for
97
+ the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be
98
+ considered equal during the isomorphism test. The function will be
99
+ called like::
100
+
101
+ edge_match(G1[u1][v1], G2[u2][v2])
102
+
103
+ That is, the function will receive the edge attribute dictionaries
104
+ of the edges under consideration. If None, then no attributes are
105
+ considered when testing for an isomorphism.
106
+
107
+ """
108
+ vf2.GraphMatcher.__init__(self, G1, G2)
109
+
110
+ self.node_match = node_match
111
+ self.edge_match = edge_match
112
+
113
+ # These will be modified during checks to minimize code repeat.
114
+ self.G1_adj = self.G1.adj
115
+ self.G2_adj = self.G2.adj
116
+
117
+ semantic_feasibility = _semantic_feasibility
118
+
119
+
120
+ class DiGraphMatcher(vf2.DiGraphMatcher):
121
+ """VF2 isomorphism checker for directed graphs."""
122
+
123
+ def __init__(self, G1, G2, node_match=None, edge_match=None):
124
+ """Initialize graph matcher.
125
+
126
+ Parameters
127
+ ----------
128
+ G1, G2 : graph
129
+ The graphs to be tested.
130
+
131
+ node_match : callable
132
+ A function that returns True iff node n1 in G1 and n2 in G2
133
+ should be considered equal during the isomorphism test. The
134
+ function will be called like::
135
+
136
+ node_match(G1.nodes[n1], G2.nodes[n2])
137
+
138
+ That is, the function will receive the node attribute dictionaries
139
+ of the nodes under consideration. If None, then no attributes are
140
+ considered when testing for an isomorphism.
141
+
142
+ edge_match : callable
143
+ A function that returns True iff the edge attribute dictionary for
144
+ the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be
145
+ considered equal during the isomorphism test. The function will be
146
+ called like::
147
+
148
+ edge_match(G1[u1][v1], G2[u2][v2])
149
+
150
+ That is, the function will receive the edge attribute dictionaries
151
+ of the edges under consideration. If None, then no attributes are
152
+ considered when testing for an isomorphism.
153
+
154
+ """
155
+ vf2.DiGraphMatcher.__init__(self, G1, G2)
156
+
157
+ self.node_match = node_match
158
+ self.edge_match = edge_match
159
+
160
+ # These will be modified during checks to minimize code repeat.
161
+ self.G1_adj = self.G1.adj
162
+ self.G2_adj = self.G2.adj
163
+
164
+ def semantic_feasibility(self, G1_node, G2_node):
165
+ """Returns True if mapping G1_node to G2_node is semantically feasible."""
166
+
167
+ # Test node_match and also test edge_match on successors
168
+ feasible = _semantic_feasibility(self, G1_node, G2_node)
169
+ if not feasible:
170
+ return False
171
+
172
+ # Test edge_match on predecessors
173
+ self.G1_adj = self.G1.pred
174
+ self.G2_adj = self.G2.pred
175
+ feasible = _semantic_feasibility(self, G1_node, G2_node)
176
+ self.G1_adj = self.G1.adj
177
+ self.G2_adj = self.G2.adj
178
+
179
+ return feasible
180
+
181
+
182
+ # The "semantics" of edge_match are different for multi(di)graphs, but
183
+ # the implementation is the same. So, technically we do not need to
184
+ # provide "multi" versions, but we do so to match NetworkX's base classes.
185
+
186
+
187
+ class MultiGraphMatcher(GraphMatcher):
188
+ """VF2 isomorphism checker for undirected multigraphs."""
189
+
190
+
191
+ class MultiDiGraphMatcher(DiGraphMatcher):
192
+ """VF2 isomorphism checker for directed multigraphs."""