diff --git a/tuning-competition-baseline/.venv/bin/pip3 b/tuning-competition-baseline/.venv/bin/pip3
new file mode 100644
index 0000000000000000000000000000000000000000..02059b8ac7c3830306a1686667c040e157c1b1fb
--- /dev/null
+++ b/tuning-competition-baseline/.venv/bin/pip3
@@ -0,0 +1,8 @@
+#!/home/koiwa/work/tuning-competition-baseline/.venv/bin/python3.11
+# -*- coding: utf-8 -*-
+import re
+import sys
+from pip._internal.cli.main import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/METADATA b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..56e942902a96e7f012479a582c5cf89511219f9a
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/METADATA
@@ -0,0 +1,105 @@
+Metadata-Version: 2.1
+Name: Jinja2
+Version: 3.1.3
+Summary: A very fast and expressive template engine.
+Home-page: https://palletsprojects.com/p/jinja/
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Donate, https://palletsprojects.com/donate
+Project-URL: Documentation, https://jinja.palletsprojects.com/
+Project-URL: Changes, https://jinja.palletsprojects.com/changes/
+Project-URL: Source Code, https://github.com/pallets/jinja/
+Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/
+Project-URL: Chat, https://discord.gg/pallets
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Text Processing :: Markup :: HTML
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE.rst
+Requires-Dist: MarkupSafe >=2.0
+Provides-Extra: i18n
+Requires-Dist: Babel >=2.7 ; extra == 'i18n'
+
+Jinja
+=====
+
+Jinja is a fast, expressive, extensible templating engine. Special
+placeholders in the template allow writing code similar to Python
+syntax. Then the template is passed data to render the final document.
+
+It includes:
+
+- Template inheritance and inclusion.
+- Define and import macros within templates.
+- HTML templates can use autoescaping to prevent XSS from untrusted
+ user input.
+- A sandboxed environment can safely render untrusted templates.
+- AsyncIO support for generating templates and calling async
+ functions.
+- I18N support with Babel.
+- Templates are compiled to optimized Python code just-in-time and
+ cached, or can be compiled ahead-of-time.
+- Exceptions point to the correct line in templates to make debugging
+ easier.
+- Extensible filters, tests, functions, and even syntax.
+
+Jinja's philosophy is that while application logic belongs in Python if
+possible, it shouldn't make the template designer's job difficult by
+restricting functionality too much.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+ $ pip install -U Jinja2
+
+.. _pip: https://pip.pypa.io/en/stable/getting-started/
+
+
+In A Nutshell
+-------------
+
+.. code-block:: jinja
+
+ {% extends "base.html" %}
+ {% block title %}Members{% endblock %}
+ {% block content %}
+
+ {% endblock %}
+
+
+Donate
+------
+
+The Pallets organization develops and supports Jinja and other popular
+packages. In order to grow the community of contributors and users, and
+allow the maintainers to devote more time to the projects, `please
+donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+- Documentation: https://jinja.palletsprojects.com/
+- Changes: https://jinja.palletsprojects.com/changes/
+- PyPI Releases: https://pypi.org/project/Jinja2/
+- Source Code: https://github.com/pallets/jinja/
+- Issue Tracker: https://github.com/pallets/jinja/issues/
+- Chat: https://discord.gg/pallets
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/WHEEL b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..98c0d20b7a64f4f998d7913e1d38a05dba20916c
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/Jinja2-3.1.3.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.42.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/__init__.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5c0fd2d84faf7c34553e72976d94652c9babc1d5
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/__init__.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/boundary.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/boundary.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..537830a058f713c9ebbaf05c4836c5084bf06024
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/boundary.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/bridges.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/bridges.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b3c7ad9ee25f3853463b5342f7dc9057ca3efe45
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/bridges.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/chordal.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/chordal.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..44eac57a80a985c65188d09a6f965c8e596a7132
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/chordal.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/clique.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/clique.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8fae1db957f80446b0aad4a5f17c90715a5337e9
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/clique.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/cuts.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/cuts.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2908ad56a493bfceabef48095f8f7960630ffc82
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/cuts.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/cycles.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/cycles.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..65d87bd9abfb560ef4c0924dfa2c75932b270f54
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/cycles.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dag.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dag.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..744be01b6bc01731098ca895c2882ab21761ae59
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dag.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dominance.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dominance.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c372ccdedc1031d12e132b2f7cdaa145c81e7c1f
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dominance.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dominating.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dominating.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2f43b39bb13a7aee76f229e9083c62f5832eaf2e
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/dominating.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/efficiency_measures.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/efficiency_measures.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ba5601c45623e0cc3cc724fcfe758b64430e663e
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/efficiency_measures.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/euler.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/euler.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..60ea8757cc040263240f2fd2bc49769a591b189d
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/euler.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/graph_hashing.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/graph_hashing.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b1878aeeeb5fd4c933a14719f5e3362de327ae75
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/graph_hashing.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/graphical.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/graphical.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d90247670b15c771129efc4883a9509cd8e42ca5
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/graphical.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/isolate.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/isolate.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a22fd0ea12d776ce7990befc22039d80de8a0ae6
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/isolate.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/link_prediction.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/link_prediction.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d80188bfd20dff53a8f78adb8b7284fd1787cb7a
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/link_prediction.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/mis.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/mis.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e1d4584828b91709c9dde29993d223f93489a83c
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/mis.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/planar_drawing.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/planar_drawing.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ad0fdd2d92149803409e72fb7322a5dc2d8c8ec8
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/planar_drawing.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/regular.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/regular.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e7a8cb88c7124cc06a3385d70e6348ac62d9088a
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/regular.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/similarity.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/similarity.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..66a5d213d41fbf704835f8792f1c1bb5fcb52a84
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/similarity.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/structuralholes.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/structuralholes.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..084739139dd0fdc1cf2d8bfc2d9d3b63017d4708
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/structuralholes.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/summarization.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/summarization.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2a32f63ce41120034d37a354be53ff5c23e58c17
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/summarization.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/threshold.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/threshold.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b41b38f9c2539c5054ca48e256b5b4ee918d6bcd
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/threshold.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/triads.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/triads.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9f12d20f1127d0a5ce38a05c0f0772f608fbeead
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/__pycache__/triads.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6fc5c6b1e9766ca7da7f6132f94cf8ae423bc14d
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/clique.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/clique.py
new file mode 100644
index 0000000000000000000000000000000000000000..4a3d8beba6103172988f49aa1d7a91bf670f7201
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/clique.py
@@ -0,0 +1,258 @@
+"""Functions for computing large cliques and maximum independent sets."""
+import networkx as nx
+from networkx.algorithms.approximation import ramsey
+from networkx.utils import not_implemented_for
+
+__all__ = [
+ "clique_removal",
+ "max_clique",
+ "large_clique_size",
+ "maximum_independent_set",
+]
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatch
+def maximum_independent_set(G):
+ """Returns an approximate maximum independent set.
+
+ Independent set or stable set is a set of vertices in a graph, no two of
+ which are adjacent. That is, it is a set I of vertices such that for every
+ two vertices in I, there is no edge connecting the two. Equivalently, each
+ edge in the graph has at most one endpoint in I. The size of an independent
+ set is the number of vertices it contains [1]_.
+
+ A maximum independent set is a largest independent set for a given graph G
+ and its size is denoted $\\alpha(G)$. The problem of finding such a set is called
+ the maximum independent set problem and is an NP-hard optimization problem.
+ As such, it is unlikely that there exists an efficient algorithm for finding
+ a maximum independent set of a graph.
+
+ The Independent Set algorithm is based on [2]_.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ Undirected graph
+
+ Returns
+ -------
+ iset : Set
+ The apx-maximum independent set
+
+ Examples
+ --------
+ >>> G = nx.path_graph(10)
+ >>> nx.approximation.maximum_independent_set(G)
+ {0, 2, 4, 6, 9}
+
+ Raises
+ ------
+ NetworkXNotImplemented
+ If the graph is directed or is a multigraph.
+
+ Notes
+ -----
+ Finds the $O(|V|/(log|V|)^2)$ apx of independent set in the worst case.
+
+ References
+ ----------
+ .. [1] `Wikipedia: Independent set
+ `_
+ .. [2] Boppana, R., & Halldórsson, M. M. (1992).
+ Approximating maximum independent sets by excluding subgraphs.
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
+ """
+ iset, _ = clique_removal(G)
+ return iset
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatch
+def max_clique(G):
+ r"""Find the Maximum Clique
+
+ Finds the $O(|V|/(log|V|)^2)$ apx of maximum clique/independent set
+ in the worst case.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ Undirected graph
+
+ Returns
+ -------
+ clique : set
+ The apx-maximum clique of the graph
+
+ Examples
+ --------
+ >>> G = nx.path_graph(10)
+ >>> nx.approximation.max_clique(G)
+ {8, 9}
+
+ Raises
+ ------
+ NetworkXNotImplemented
+ If the graph is directed or is a multigraph.
+
+ Notes
+ -----
+ A clique in an undirected graph G = (V, E) is a subset of the vertex set
+ `C \subseteq V` such that for every two vertices in C there exists an edge
+ connecting the two. This is equivalent to saying that the subgraph
+ induced by C is complete (in some cases, the term clique may also refer
+ to the subgraph).
+
+ A maximum clique is a clique of the largest possible size in a given graph.
+ The clique number `\omega(G)` of a graph G is the number of
+ vertices in a maximum clique in G. The intersection number of
+ G is the smallest number of cliques that together cover all edges of G.
+
+ https://en.wikipedia.org/wiki/Maximum_clique
+
+ References
+ ----------
+ .. [1] Boppana, R., & Halldórsson, M. M. (1992).
+ Approximating maximum independent sets by excluding subgraphs.
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
+ doi:10.1007/BF01994876
+ """
+ # finding the maximum clique in a graph is equivalent to finding
+ # the independent set in the complementary graph
+ cgraph = nx.complement(G)
+ iset, _ = clique_removal(cgraph)
+ return iset
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatch
+def clique_removal(G):
+ r"""Repeatedly remove cliques from the graph.
+
+ Results in a $O(|V|/(\log |V|)^2)$ approximation of maximum clique
+ and independent set. Returns the largest independent set found, along
+ with found maximal cliques.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ Undirected graph
+
+ Returns
+ -------
+ max_ind_cliques : (set, list) tuple
+ 2-tuple of Maximal Independent Set and list of maximal cliques (sets).
+
+ Examples
+ --------
+ >>> G = nx.path_graph(10)
+ >>> nx.approximation.clique_removal(G)
+ ({0, 2, 4, 6, 9}, [{0, 1}, {2, 3}, {4, 5}, {6, 7}, {8, 9}])
+
+ Raises
+ ------
+ NetworkXNotImplemented
+ If the graph is directed or is a multigraph.
+
+ References
+ ----------
+ .. [1] Boppana, R., & Halldórsson, M. M. (1992).
+ Approximating maximum independent sets by excluding subgraphs.
+ BIT Numerical Mathematics, 32(2), 180–196. Springer.
+ """
+ graph = G.copy()
+ c_i, i_i = ramsey.ramsey_R2(graph)
+ cliques = [c_i]
+ isets = [i_i]
+ while graph:
+ graph.remove_nodes_from(c_i)
+ c_i, i_i = ramsey.ramsey_R2(graph)
+ if c_i:
+ cliques.append(c_i)
+ if i_i:
+ isets.append(i_i)
+ # Determine the largest independent set as measured by cardinality.
+ maxiset = max(isets, key=len)
+ return maxiset, cliques
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatch
+def large_clique_size(G):
+ """Find the size of a large clique in a graph.
+
+ A *clique* is a subset of nodes in which each pair of nodes is
+ adjacent. This function is a heuristic for finding the size of a
+ large clique in the graph.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+
+ Returns
+ -------
+ k: integer
+ The size of a large clique in the graph.
+
+ Examples
+ --------
+ >>> G = nx.path_graph(10)
+ >>> nx.approximation.large_clique_size(G)
+ 2
+
+ Raises
+ ------
+ NetworkXNotImplemented
+ If the graph is directed or is a multigraph.
+
+ Notes
+ -----
+ This implementation is from [1]_. Its worst case time complexity is
+ :math:`O(n d^2)`, where *n* is the number of nodes in the graph and
+ *d* is the maximum degree.
+
+ This function is a heuristic, which means it may work well in
+ practice, but there is no rigorous mathematical guarantee on the
+ ratio between the returned number and the actual largest clique size
+ in the graph.
+
+ References
+ ----------
+ .. [1] Pattabiraman, Bharath, et al.
+ "Fast Algorithms for the Maximum Clique Problem on Massive Graphs
+ with Applications to Overlapping Community Detection."
+ *Internet Mathematics* 11.4-5 (2015): 421--448.
+
+
+ See also
+ --------
+
+ :func:`networkx.algorithms.approximation.clique.max_clique`
+ A function that returns an approximate maximum clique with a
+ guarantee on the approximation ratio.
+
+ :mod:`networkx.algorithms.clique`
+ Functions for finding the exact maximum clique in a graph.
+
+ """
+ degrees = G.degree
+
+ def _clique_heuristic(G, U, size, best_size):
+ if not U:
+ return max(best_size, size)
+ u = max(U, key=degrees)
+ U.remove(u)
+ N_prime = {v for v in G[u] if degrees[v] >= best_size}
+ return _clique_heuristic(G, U & N_prime, size + 1, best_size)
+
+ best_size = 0
+ nodes = (u for u in G if degrees[u] >= best_size)
+ for u in nodes:
+ neighbors = {v for v in G[u] if degrees[v] >= best_size}
+ best_size = _clique_heuristic(G, neighbors, 1, best_size)
+ return best_size
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py
new file mode 100644
index 0000000000000000000000000000000000000000..65ba802171a6b43a5157f12010c8164e5e867eb8
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py
@@ -0,0 +1,303 @@
+# Test for approximation to k-components algorithm
+import pytest
+
+import networkx as nx
+from networkx.algorithms.approximation import k_components
+from networkx.algorithms.approximation.kcomponents import _AntiGraph, _same
+
+
+def build_k_number_dict(k_components):
+ k_num = {}
+ for k, comps in sorted(k_components.items()):
+ for comp in comps:
+ for node in comp:
+ k_num[node] = k
+ return k_num
+
+
+##
+# Some nice synthetic graphs
+##
+
+
+def graph_example_1():
+ G = nx.convert_node_labels_to_integers(
+ nx.grid_graph([5, 5]), label_attribute="labels"
+ )
+ rlabels = nx.get_node_attributes(G, "labels")
+ labels = {v: k for k, v in rlabels.items()}
+
+ for nodes in [
+ (labels[(0, 0)], labels[(1, 0)]),
+ (labels[(0, 4)], labels[(1, 4)]),
+ (labels[(3, 0)], labels[(4, 0)]),
+ (labels[(3, 4)], labels[(4, 4)]),
+ ]:
+ new_node = G.order() + 1
+ # Petersen graph is triconnected
+ P = nx.petersen_graph()
+ G = nx.disjoint_union(G, P)
+ # Add two edges between the grid and P
+ G.add_edge(new_node + 1, nodes[0])
+ G.add_edge(new_node, nodes[1])
+ # K5 is 4-connected
+ K = nx.complete_graph(5)
+ G = nx.disjoint_union(G, K)
+ # Add three edges between P and K5
+ G.add_edge(new_node + 2, new_node + 11)
+ G.add_edge(new_node + 3, new_node + 12)
+ G.add_edge(new_node + 4, new_node + 13)
+ # Add another K5 sharing a node
+ G = nx.disjoint_union(G, K)
+ nbrs = G[new_node + 10]
+ G.remove_node(new_node + 10)
+ for nbr in nbrs:
+ G.add_edge(new_node + 17, nbr)
+ G.add_edge(new_node + 16, new_node + 5)
+ return G
+
+
+def torrents_and_ferraro_graph():
+ G = nx.convert_node_labels_to_integers(
+ nx.grid_graph([5, 5]), label_attribute="labels"
+ )
+ rlabels = nx.get_node_attributes(G, "labels")
+ labels = {v: k for k, v in rlabels.items()}
+
+ for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]:
+ new_node = G.order() + 1
+ # Petersen graph is triconnected
+ P = nx.petersen_graph()
+ G = nx.disjoint_union(G, P)
+ # Add two edges between the grid and P
+ G.add_edge(new_node + 1, nodes[0])
+ G.add_edge(new_node, nodes[1])
+ # K5 is 4-connected
+ K = nx.complete_graph(5)
+ G = nx.disjoint_union(G, K)
+ # Add three edges between P and K5
+ G.add_edge(new_node + 2, new_node + 11)
+ G.add_edge(new_node + 3, new_node + 12)
+ G.add_edge(new_node + 4, new_node + 13)
+ # Add another K5 sharing a node
+ G = nx.disjoint_union(G, K)
+ nbrs = G[new_node + 10]
+ G.remove_node(new_node + 10)
+ for nbr in nbrs:
+ G.add_edge(new_node + 17, nbr)
+ # Commenting this makes the graph not biconnected !!
+ # This stupid mistake make one reviewer very angry :P
+ G.add_edge(new_node + 16, new_node + 8)
+
+ for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]:
+ new_node = G.order() + 1
+ # Petersen graph is triconnected
+ P = nx.petersen_graph()
+ G = nx.disjoint_union(G, P)
+ # Add two edges between the grid and P
+ G.add_edge(new_node + 1, nodes[0])
+ G.add_edge(new_node, nodes[1])
+ # K5 is 4-connected
+ K = nx.complete_graph(5)
+ G = nx.disjoint_union(G, K)
+ # Add three edges between P and K5
+ G.add_edge(new_node + 2, new_node + 11)
+ G.add_edge(new_node + 3, new_node + 12)
+ G.add_edge(new_node + 4, new_node + 13)
+ # Add another K5 sharing two nodes
+ G = nx.disjoint_union(G, K)
+ nbrs = G[new_node + 10]
+ G.remove_node(new_node + 10)
+ for nbr in nbrs:
+ G.add_edge(new_node + 17, nbr)
+ nbrs2 = G[new_node + 9]
+ G.remove_node(new_node + 9)
+ for nbr in nbrs2:
+ G.add_edge(new_node + 18, nbr)
+ return G
+
+
+# Helper function
+
+
+def _check_connectivity(G):
+ result = k_components(G)
+ for k, components in result.items():
+ if k < 3:
+ continue
+ for component in components:
+ C = G.subgraph(component)
+ K = nx.node_connectivity(C)
+ assert K >= k
+
+
+def test_torrents_and_ferraro_graph():
+ G = torrents_and_ferraro_graph()
+ _check_connectivity(G)
+
+
+def test_example_1():
+ G = graph_example_1()
+ _check_connectivity(G)
+
+
+def test_karate_0():
+ G = nx.karate_club_graph()
+ _check_connectivity(G)
+
+
+def test_karate_1():
+ karate_k_num = {
+ 0: 4,
+ 1: 4,
+ 2: 4,
+ 3: 4,
+ 4: 3,
+ 5: 3,
+ 6: 3,
+ 7: 4,
+ 8: 4,
+ 9: 2,
+ 10: 3,
+ 11: 1,
+ 12: 2,
+ 13: 4,
+ 14: 2,
+ 15: 2,
+ 16: 2,
+ 17: 2,
+ 18: 2,
+ 19: 3,
+ 20: 2,
+ 21: 2,
+ 22: 2,
+ 23: 3,
+ 24: 3,
+ 25: 3,
+ 26: 2,
+ 27: 3,
+ 28: 3,
+ 29: 3,
+ 30: 4,
+ 31: 3,
+ 32: 4,
+ 33: 4,
+ }
+ approx_karate_k_num = karate_k_num.copy()
+ approx_karate_k_num[24] = 2
+ approx_karate_k_num[25] = 2
+ G = nx.karate_club_graph()
+ k_comps = k_components(G)
+ k_num = build_k_number_dict(k_comps)
+ assert k_num in (karate_k_num, approx_karate_k_num)
+
+
+def test_example_1_detail_3_and_4():
+ G = graph_example_1()
+ result = k_components(G)
+ # In this example graph there are 8 3-components, 4 with 15 nodes
+ # and 4 with 5 nodes.
+ assert len(result[3]) == 8
+ assert len([c for c in result[3] if len(c) == 15]) == 4
+ assert len([c for c in result[3] if len(c) == 5]) == 4
+ # There are also 8 4-components all with 5 nodes.
+ assert len(result[4]) == 8
+ assert all(len(c) == 5 for c in result[4])
+ # Finally check that the k-components detected have actually node
+ # connectivity >= k.
+ for k, components in result.items():
+ if k < 3:
+ continue
+ for component in components:
+ K = nx.node_connectivity(G.subgraph(component))
+ assert K >= k
+
+
+def test_directed():
+ with pytest.raises(nx.NetworkXNotImplemented):
+ G = nx.gnp_random_graph(10, 0.4, directed=True)
+ kc = k_components(G)
+
+
+def test_same():
+ equal = {"A": 2, "B": 2, "C": 2}
+ slightly_different = {"A": 2, "B": 1, "C": 2}
+ different = {"A": 2, "B": 8, "C": 18}
+ assert _same(equal)
+ assert not _same(slightly_different)
+ assert _same(slightly_different, tol=1)
+ assert not _same(different)
+ assert not _same(different, tol=4)
+
+
+class TestAntiGraph:
+ @classmethod
+ def setup_class(cls):
+ cls.Gnp = nx.gnp_random_graph(20, 0.8, seed=42)
+ cls.Anp = _AntiGraph(nx.complement(cls.Gnp))
+ cls.Gd = nx.davis_southern_women_graph()
+ cls.Ad = _AntiGraph(nx.complement(cls.Gd))
+ cls.Gk = nx.karate_club_graph()
+ cls.Ak = _AntiGraph(nx.complement(cls.Gk))
+ cls.GA = [(cls.Gnp, cls.Anp), (cls.Gd, cls.Ad), (cls.Gk, cls.Ak)]
+
+ def test_size(self):
+ for G, A in self.GA:
+ n = G.order()
+ s = len(list(G.edges())) + len(list(A.edges()))
+ assert s == (n * (n - 1)) / 2
+
+ def test_degree(self):
+ for G, A in self.GA:
+ assert sorted(G.degree()) == sorted(A.degree())
+
+ def test_core_number(self):
+ for G, A in self.GA:
+ assert nx.core_number(G) == nx.core_number(A)
+
+ def test_connected_components(self):
+ # ccs are same unless isolated nodes or any node has degree=len(G)-1
+ # graphs in self.GA avoid this problem
+ for G, A in self.GA:
+ gc = [set(c) for c in nx.connected_components(G)]
+ ac = [set(c) for c in nx.connected_components(A)]
+ for comp in ac:
+ assert comp in gc
+
+ def test_adj(self):
+ for G, A in self.GA:
+ for n, nbrs in G.adj.items():
+ a_adj = sorted((n, sorted(ad)) for n, ad in A.adj.items())
+ g_adj = sorted((n, sorted(ad)) for n, ad in G.adj.items())
+ assert a_adj == g_adj
+
+ def test_adjacency(self):
+ for G, A in self.GA:
+ a_adj = list(A.adjacency())
+ for n, nbrs in G.adjacency():
+ assert (n, set(nbrs)) in a_adj
+
+ def test_neighbors(self):
+ for G, A in self.GA:
+ node = list(G.nodes())[0]
+ assert set(G.neighbors(node)) == set(A.neighbors(node))
+
+ def test_node_not_in_graph(self):
+ for G, A in self.GA:
+ node = "non_existent_node"
+ pytest.raises(nx.NetworkXError, A.neighbors, node)
+ pytest.raises(nx.NetworkXError, G.neighbors, node)
+
+ def test_degree_thingraph(self):
+ for G, A in self.GA:
+ node = list(G.nodes())[0]
+ nodes = list(G.nodes())[1:4]
+ assert G.degree(node) == A.degree(node)
+ assert sum(d for n, d in G.degree()) == sum(d for n, d in A.degree())
+ # AntiGraph is a ThinGraph, so all the weights are 1
+ assert sum(d for n, d in A.degree()) == sum(
+ d for n, d in A.degree(weight="weight")
+ )
+ assert sum(d for n, d in G.degree(nodes)) == sum(
+ d for n, d in A.degree(nodes)
+ )
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_matching.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_matching.py
new file mode 100644
index 0000000000000000000000000000000000000000..f50da3d2e07310fc19e1db2bd18fdce23223771c
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_matching.py
@@ -0,0 +1,8 @@
+import networkx as nx
+import networkx.algorithms.approximation as a
+
+
+def test_min_maximal_matching():
+ # smoke test
+ G = nx.Graph()
+ assert len(a.min_maximal_matching(G)) == 0
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_ramsey.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_ramsey.py
new file mode 100644
index 0000000000000000000000000000000000000000..32fe1fb8fa917c557954d9da0d960895a6953a11
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_ramsey.py
@@ -0,0 +1,31 @@
+import networkx as nx
+import networkx.algorithms.approximation as apxa
+
+
+def test_ramsey():
+ # this should only find the complete graph
+ graph = nx.complete_graph(10)
+ c, i = apxa.ramsey_R2(graph)
+ cdens = nx.density(graph.subgraph(c))
+ assert cdens == 1.0, "clique not correctly found by ramsey!"
+ idens = nx.density(graph.subgraph(i))
+ assert idens == 0.0, "i-set not correctly found by ramsey!"
+
+ # this trivial graph has no cliques. should just find i-sets
+ graph = nx.trivial_graph()
+ c, i = apxa.ramsey_R2(graph)
+ assert c == {0}, "clique not correctly found by ramsey!"
+ assert i == {0}, "i-set not correctly found by ramsey!"
+
+ graph = nx.barbell_graph(10, 5, nx.Graph())
+ c, i = apxa.ramsey_R2(graph)
+ cdens = nx.density(graph.subgraph(c))
+ assert cdens == 1.0, "clique not correctly found by ramsey!"
+ idens = nx.density(graph.subgraph(i))
+ assert idens == 0.0, "i-set not correctly found by ramsey!"
+
+ # add self-loops and test again
+ graph.add_edges_from([(n, n) for n in range(0, len(graph), 2)])
+ cc, ii = apxa.ramsey_R2(graph)
+ assert cc == c
+ assert ii == i
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py
new file mode 100644
index 0000000000000000000000000000000000000000..461b0f2ed2dd4d043902d054e10a5f39ffb069c9
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py
@@ -0,0 +1,280 @@
+import itertools
+
+import networkx as nx
+from networkx.algorithms.approximation import (
+ treewidth_min_degree,
+ treewidth_min_fill_in,
+)
+from networkx.algorithms.approximation.treewidth import (
+ MinDegreeHeuristic,
+ min_fill_in_heuristic,
+)
+
+
+def is_tree_decomp(graph, decomp):
+ """Check if the given tree decomposition is valid."""
+ for x in graph.nodes():
+ appear_once = False
+ for bag in decomp.nodes():
+ if x in bag:
+ appear_once = True
+ break
+ assert appear_once
+
+ # Check if each connected pair of nodes are at least once together in a bag
+ for x, y in graph.edges():
+ appear_together = False
+ for bag in decomp.nodes():
+ if x in bag and y in bag:
+ appear_together = True
+ break
+ assert appear_together
+
+ # Check if the nodes associated with vertex v form a connected subset of T
+ for v in graph.nodes():
+ subset = []
+ for bag in decomp.nodes():
+ if v in bag:
+ subset.append(bag)
+ sub_graph = decomp.subgraph(subset)
+ assert nx.is_connected(sub_graph)
+
+
+class TestTreewidthMinDegree:
+ """Unit tests for the min_degree function"""
+
+ @classmethod
+ def setup_class(cls):
+ """Setup for different kinds of trees"""
+ cls.complete = nx.Graph()
+ cls.complete.add_edge(1, 2)
+ cls.complete.add_edge(2, 3)
+ cls.complete.add_edge(1, 3)
+
+ cls.small_tree = nx.Graph()
+ cls.small_tree.add_edge(1, 3)
+ cls.small_tree.add_edge(4, 3)
+ cls.small_tree.add_edge(2, 3)
+ cls.small_tree.add_edge(3, 5)
+ cls.small_tree.add_edge(5, 6)
+ cls.small_tree.add_edge(5, 7)
+ cls.small_tree.add_edge(6, 7)
+
+ cls.deterministic_graph = nx.Graph()
+ cls.deterministic_graph.add_edge(0, 1) # deg(0) = 1
+
+ cls.deterministic_graph.add_edge(1, 2) # deg(1) = 2
+
+ cls.deterministic_graph.add_edge(2, 3)
+ cls.deterministic_graph.add_edge(2, 4) # deg(2) = 3
+
+ cls.deterministic_graph.add_edge(3, 4)
+ cls.deterministic_graph.add_edge(3, 5)
+ cls.deterministic_graph.add_edge(3, 6) # deg(3) = 4
+
+ cls.deterministic_graph.add_edge(4, 5)
+ cls.deterministic_graph.add_edge(4, 6)
+ cls.deterministic_graph.add_edge(4, 7) # deg(4) = 5
+
+ cls.deterministic_graph.add_edge(5, 6)
+ cls.deterministic_graph.add_edge(5, 7)
+ cls.deterministic_graph.add_edge(5, 8)
+ cls.deterministic_graph.add_edge(5, 9) # deg(5) = 6
+
+ cls.deterministic_graph.add_edge(6, 7)
+ cls.deterministic_graph.add_edge(6, 8)
+ cls.deterministic_graph.add_edge(6, 9) # deg(6) = 6
+
+ cls.deterministic_graph.add_edge(7, 8)
+ cls.deterministic_graph.add_edge(7, 9) # deg(7) = 5
+
+ cls.deterministic_graph.add_edge(8, 9) # deg(8) = 4
+
+ def test_petersen_graph(self):
+ """Test Petersen graph tree decomposition result"""
+ G = nx.petersen_graph()
+ _, decomp = treewidth_min_degree(G)
+ is_tree_decomp(G, decomp)
+
+ def test_small_tree_treewidth(self):
+ """Test small tree
+
+ Test if the computed treewidth of the known self.small_tree is 2.
+ As we know which value we can expect from our heuristic, values other
+ than two are regressions
+ """
+ G = self.small_tree
+ # the order of removal should be [1,2,4]3[5,6,7]
+ # (with [] denoting any order of the containing nodes)
+ # resulting in treewidth 2 for the heuristic
+ treewidth, _ = treewidth_min_fill_in(G)
+ assert treewidth == 2
+
+ def test_heuristic_abort(self):
+ """Test heuristic abort condition for fully connected graph"""
+ graph = {}
+ for u in self.complete:
+ graph[u] = set()
+ for v in self.complete[u]:
+ if u != v: # ignore self-loop
+ graph[u].add(v)
+
+ deg_heuristic = MinDegreeHeuristic(graph)
+ node = deg_heuristic.best_node(graph)
+ if node is None:
+ pass
+ else:
+ assert False
+
+ def test_empty_graph(self):
+ """Test empty graph"""
+ G = nx.Graph()
+ _, _ = treewidth_min_degree(G)
+
+ def test_two_component_graph(self):
+ G = nx.Graph()
+ G.add_node(1)
+ G.add_node(2)
+ treewidth, _ = treewidth_min_degree(G)
+ assert treewidth == 0
+
+ def test_not_sortable_nodes(self):
+ G = nx.Graph([(0, "a")])
+ treewidth_min_degree(G)
+
+ def test_heuristic_first_steps(self):
+ """Test first steps of min_degree heuristic"""
+ graph = {
+ n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph
+ }
+ deg_heuristic = MinDegreeHeuristic(graph)
+ elim_node = deg_heuristic.best_node(graph)
+ print(f"Graph {graph}:")
+ steps = []
+
+ while elim_node is not None:
+ print(f"Removing {elim_node}:")
+ steps.append(elim_node)
+ nbrs = graph[elim_node]
+
+ for u, v in itertools.permutations(nbrs, 2):
+ if v not in graph[u]:
+ graph[u].add(v)
+
+ for u in graph:
+ if elim_node in graph[u]:
+ graph[u].remove(elim_node)
+
+ del graph[elim_node]
+ print(f"Graph {graph}:")
+ elim_node = deg_heuristic.best_node(graph)
+
+ # check only the first 5 elements for equality
+ assert steps[:5] == [0, 1, 2, 3, 4]
+
+
+class TestTreewidthMinFillIn:
+ """Unit tests for the treewidth_min_fill_in function."""
+
+ @classmethod
+ def setup_class(cls):
+ """Setup for different kinds of trees"""
+ cls.complete = nx.Graph()
+ cls.complete.add_edge(1, 2)
+ cls.complete.add_edge(2, 3)
+ cls.complete.add_edge(1, 3)
+
+ cls.small_tree = nx.Graph()
+ cls.small_tree.add_edge(1, 2)
+ cls.small_tree.add_edge(2, 3)
+ cls.small_tree.add_edge(3, 4)
+ cls.small_tree.add_edge(1, 4)
+ cls.small_tree.add_edge(2, 4)
+ cls.small_tree.add_edge(4, 5)
+ cls.small_tree.add_edge(5, 6)
+ cls.small_tree.add_edge(5, 7)
+ cls.small_tree.add_edge(6, 7)
+
+ cls.deterministic_graph = nx.Graph()
+ cls.deterministic_graph.add_edge(1, 2)
+ cls.deterministic_graph.add_edge(1, 3)
+ cls.deterministic_graph.add_edge(3, 4)
+ cls.deterministic_graph.add_edge(2, 4)
+ cls.deterministic_graph.add_edge(3, 5)
+ cls.deterministic_graph.add_edge(4, 5)
+ cls.deterministic_graph.add_edge(3, 6)
+ cls.deterministic_graph.add_edge(5, 6)
+
+ def test_petersen_graph(self):
+ """Test Petersen graph tree decomposition result"""
+ G = nx.petersen_graph()
+ _, decomp = treewidth_min_fill_in(G)
+ is_tree_decomp(G, decomp)
+
+ def test_small_tree_treewidth(self):
+ """Test if the computed treewidth of the known self.small_tree is 2"""
+ G = self.small_tree
+ # the order of removal should be [1,2,4]3[5,6,7]
+ # (with [] denoting any order of the containing nodes)
+ # resulting in treewidth 2 for the heuristic
+ treewidth, _ = treewidth_min_fill_in(G)
+ assert treewidth == 2
+
+ def test_heuristic_abort(self):
+ """Test if min_fill_in returns None for fully connected graph"""
+ graph = {}
+ for u in self.complete:
+ graph[u] = set()
+ for v in self.complete[u]:
+ if u != v: # ignore self-loop
+ graph[u].add(v)
+ next_node = min_fill_in_heuristic(graph)
+ if next_node is None:
+ pass
+ else:
+ assert False
+
+ def test_empty_graph(self):
+ """Test empty graph"""
+ G = nx.Graph()
+ _, _ = treewidth_min_fill_in(G)
+
+ def test_two_component_graph(self):
+ G = nx.Graph()
+ G.add_node(1)
+ G.add_node(2)
+ treewidth, _ = treewidth_min_fill_in(G)
+ assert treewidth == 0
+
+ def test_not_sortable_nodes(self):
+ G = nx.Graph([(0, "a")])
+ treewidth_min_fill_in(G)
+
+ def test_heuristic_first_steps(self):
+ """Test first steps of min_fill_in heuristic"""
+ graph = {
+ n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph
+ }
+ print(f"Graph {graph}:")
+ elim_node = min_fill_in_heuristic(graph)
+ steps = []
+
+ while elim_node is not None:
+ print(f"Removing {elim_node}:")
+ steps.append(elim_node)
+ nbrs = graph[elim_node]
+
+ for u, v in itertools.permutations(nbrs, 2):
+ if v not in graph[u]:
+ graph[u].add(v)
+
+ for u in graph:
+ if elim_node in graph[u]:
+ graph[u].remove(elim_node)
+
+ del graph[elim_node]
+ print(f"Graph {graph}:")
+ elim_node = min_fill_in_heuristic(graph)
+
+ # check only the first 2 elements for equality
+ assert steps[:2] == [6, 5]
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..87e72e6109e8086e41b306916339eb9c9a328fba
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/modularity_max.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/modularity_max.py
new file mode 100644
index 0000000000000000000000000000000000000000..aba3267c33e8dcba06e1fa6ed95d5ec0b6b45dfe
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/modularity_max.py
@@ -0,0 +1,448 @@
+"""Functions for detecting communities based on modularity."""
+
+from collections import defaultdict
+
+import networkx as nx
+from networkx.algorithms.community.quality import modularity
+from networkx.utils import not_implemented_for
+from networkx.utils.mapped_queue import MappedQueue
+
+__all__ = [
+ "greedy_modularity_communities",
+ "naive_greedy_modularity_communities",
+]
+
+
+def _greedy_modularity_communities_generator(G, weight=None, resolution=1):
+ r"""Yield community partitions of G and the modularity change at each step.
+
+ This function performs Clauset-Newman-Moore greedy modularity maximization [2]_
+ At each step of the process it yields the change in modularity that will occur in
+ the next step followed by yielding the new community partition after that step.
+
+ Greedy modularity maximization begins with each node in its own community
+ and repeatedly joins the pair of communities that lead to the largest
+ modularity until one community contains all nodes (the partition has one set).
+
+ This function maximizes the generalized modularity, where `resolution`
+ is the resolution parameter, often expressed as $\gamma$.
+ See :func:`~networkx.algorithms.community.quality.modularity`.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+
+ weight : string or None, optional (default=None)
+ The name of an edge attribute that holds the numerical value used
+ as a weight. If None, then each edge has weight 1.
+ The degree is the sum of the edge weights adjacent to the node.
+
+ resolution : float (default=1)
+ If resolution is less than 1, modularity favors larger communities.
+ Greater than 1 favors smaller communities.
+
+ Yields
+ ------
+ Alternating yield statements produce the following two objects:
+
+ communities: dict_values
+ A dict_values of frozensets of nodes, one for each community.
+ This represents a partition of the nodes of the graph into communities.
+ The first yield is the partition with each node in its own community.
+
+ dq: float
+ The change in modularity when merging the next two communities
+ that leads to the largest modularity.
+
+ See Also
+ --------
+ modularity
+
+ References
+ ----------
+ .. [1] Newman, M. E. J. "Networks: An Introduction", page 224
+ Oxford University Press 2011.
+ .. [2] Clauset, A., Newman, M. E., & Moore, C.
+ "Finding community structure in very large networks."
+ Physical Review E 70(6), 2004.
+ .. [3] Reichardt and Bornholdt "Statistical Mechanics of Community
+ Detection" Phys. Rev. E74, 2006.
+ .. [4] Newman, M. E. J."Analysis of weighted networks"
+ Physical Review E 70(5 Pt 2):056131, 2004.
+ """
+ directed = G.is_directed()
+ N = G.number_of_nodes()
+
+ # Count edges (or the sum of edge-weights for weighted graphs)
+ m = G.size(weight)
+ q0 = 1 / m
+
+ # Calculate degrees (notation from the papers)
+ # a : the fraction of (weighted) out-degree for each node
+ # b : the fraction of (weighted) in-degree for each node
+ if directed:
+ a = {node: deg_out * q0 for node, deg_out in G.out_degree(weight=weight)}
+ b = {node: deg_in * q0 for node, deg_in in G.in_degree(weight=weight)}
+ else:
+ a = b = {node: deg * q0 * 0.5 for node, deg in G.degree(weight=weight)}
+
+ # this preliminary step collects the edge weights for each node pair
+ # It handles multigraph and digraph and works fine for graph.
+ dq_dict = defaultdict(lambda: defaultdict(float))
+ for u, v, wt in G.edges(data=weight, default=1):
+ if u == v:
+ continue
+ dq_dict[u][v] += wt
+ dq_dict[v][u] += wt
+
+ # now scale and subtract the expected edge-weights term
+ for u, nbrdict in dq_dict.items():
+ for v, wt in nbrdict.items():
+ dq_dict[u][v] = q0 * wt - resolution * (a[u] * b[v] + b[u] * a[v])
+
+ # Use -dq to get a max_heap instead of a min_heap
+ # dq_heap holds a heap for each node's neighbors
+ dq_heap = {u: MappedQueue({(u, v): -dq for v, dq in dq_dict[u].items()}) for u in G}
+ # H -> all_dq_heap holds a heap with the best items for each node
+ H = MappedQueue([dq_heap[n].heap[0] for n in G if len(dq_heap[n]) > 0])
+
+ # Initialize single-node communities
+ communities = {n: frozenset([n]) for n in G}
+ yield communities.values()
+
+ # Merge the two communities that lead to the largest modularity
+ while len(H) > 1:
+ # Find best merge
+ # Remove from heap of row maxes
+ # Ties will be broken by choosing the pair with lowest min community id
+ try:
+ negdq, u, v = H.pop()
+ except IndexError:
+ break
+ dq = -negdq
+ yield dq
+ # Remove best merge from row u heap
+ dq_heap[u].pop()
+ # Push new row max onto H
+ if len(dq_heap[u]) > 0:
+ H.push(dq_heap[u].heap[0])
+ # If this element was also at the root of row v, we need to remove the
+ # duplicate entry from H
+ if dq_heap[v].heap[0] == (v, u):
+ H.remove((v, u))
+ # Remove best merge from row v heap
+ dq_heap[v].remove((v, u))
+ # Push new row max onto H
+ if len(dq_heap[v]) > 0:
+ H.push(dq_heap[v].heap[0])
+ else:
+ # Duplicate wasn't in H, just remove from row v heap
+ dq_heap[v].remove((v, u))
+
+ # Perform merge
+ communities[v] = frozenset(communities[u] | communities[v])
+ del communities[u]
+
+ # Get neighbor communities connected to the merged communities
+ u_nbrs = set(dq_dict[u])
+ v_nbrs = set(dq_dict[v])
+ all_nbrs = (u_nbrs | v_nbrs) - {u, v}
+ both_nbrs = u_nbrs & v_nbrs
+ # Update dq for merge of u into v
+ for w in all_nbrs:
+ # Calculate new dq value
+ if w in both_nbrs:
+ dq_vw = dq_dict[v][w] + dq_dict[u][w]
+ elif w in v_nbrs:
+ dq_vw = dq_dict[v][w] - resolution * (a[u] * b[w] + a[w] * b[u])
+ else: # w in u_nbrs
+ dq_vw = dq_dict[u][w] - resolution * (a[v] * b[w] + a[w] * b[v])
+ # Update rows v and w
+ for row, col in [(v, w), (w, v)]:
+ dq_heap_row = dq_heap[row]
+ # Update dict for v,w only (u is removed below)
+ dq_dict[row][col] = dq_vw
+ # Save old max of per-row heap
+ if len(dq_heap_row) > 0:
+ d_oldmax = dq_heap_row.heap[0]
+ else:
+ d_oldmax = None
+ # Add/update heaps
+ d = (row, col)
+ d_negdq = -dq_vw
+ # Save old value for finding heap index
+ if w in v_nbrs:
+ # Update existing element in per-row heap
+ dq_heap_row.update(d, d, priority=d_negdq)
+ else:
+ # We're creating a new nonzero element, add to heap
+ dq_heap_row.push(d, priority=d_negdq)
+ # Update heap of row maxes if necessary
+ if d_oldmax is None:
+ # No entries previously in this row, push new max
+ H.push(d, priority=d_negdq)
+ else:
+ # We've updated an entry in this row, has the max changed?
+ row_max = dq_heap_row.heap[0]
+ if d_oldmax != row_max or d_oldmax.priority != row_max.priority:
+ H.update(d_oldmax, row_max)
+
+ # Remove row/col u from dq_dict matrix
+ for w in dq_dict[u]:
+ # Remove from dict
+ dq_old = dq_dict[w][u]
+ del dq_dict[w][u]
+ # Remove from heaps if we haven't already
+ if w != v:
+ # Remove both row and column
+ for row, col in [(w, u), (u, w)]:
+ dq_heap_row = dq_heap[row]
+ # Check if replaced dq is row max
+ d_old = (row, col)
+ if dq_heap_row.heap[0] == d_old:
+ # Update per-row heap and heap of row maxes
+ dq_heap_row.remove(d_old)
+ H.remove(d_old)
+ # Update row max
+ if len(dq_heap_row) > 0:
+ H.push(dq_heap_row.heap[0])
+ else:
+ # Only update per-row heap
+ dq_heap_row.remove(d_old)
+
+ del dq_dict[u]
+ # Mark row u as deleted, but keep placeholder
+ dq_heap[u] = MappedQueue()
+ # Merge u into v and update a
+ a[v] += a[u]
+ a[u] = 0
+ if directed:
+ b[v] += b[u]
+ b[u] = 0
+
+ yield communities.values()
+
+
+@nx._dispatch(edge_attrs="weight")
+def greedy_modularity_communities(
+ G,
+ weight=None,
+ resolution=1,
+ cutoff=1,
+ best_n=None,
+):
+ r"""Find communities in G using greedy modularity maximization.
+
+ This function uses Clauset-Newman-Moore greedy modularity maximization [2]_
+ to find the community partition with the largest modularity.
+
+ Greedy modularity maximization begins with each node in its own community
+ and repeatedly joins the pair of communities that lead to the largest
+ modularity until no further increase in modularity is possible (a maximum).
+ Two keyword arguments adjust the stopping condition. `cutoff` is a lower
+ limit on the number of communities so you can stop the process before
+ reaching a maximum (used to save computation time). `best_n` is an upper
+ limit on the number of communities so you can make the process continue
+ until at most n communities remain even if the maximum modularity occurs
+ for more. To obtain exactly n communities, set both `cutoff` and `best_n` to n.
+
+ This function maximizes the generalized modularity, where `resolution`
+ is the resolution parameter, often expressed as $\gamma$.
+ See :func:`~networkx.algorithms.community.quality.modularity`.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+
+ weight : string or None, optional (default=None)
+ The name of an edge attribute that holds the numerical value used
+ as a weight. If None, then each edge has weight 1.
+ The degree is the sum of the edge weights adjacent to the node.
+
+ resolution : float, optional (default=1)
+ If resolution is less than 1, modularity favors larger communities.
+ Greater than 1 favors smaller communities.
+
+ cutoff : int, optional (default=1)
+ A minimum number of communities below which the merging process stops.
+ The process stops at this number of communities even if modularity
+ is not maximized. The goal is to let the user stop the process early.
+ The process stops before the cutoff if it finds a maximum of modularity.
+
+ best_n : int or None, optional (default=None)
+ A maximum number of communities above which the merging process will
+ not stop. This forces community merging to continue after modularity
+ starts to decrease until `best_n` communities remain.
+ If ``None``, don't force it to continue beyond a maximum.
+
+ Raises
+ ------
+ ValueError : If the `cutoff` or `best_n` value is not in the range
+ ``[1, G.number_of_nodes()]``, or if `best_n` < `cutoff`.
+
+ Returns
+ -------
+ communities: list
+ A list of frozensets of nodes, one for each community.
+ Sorted by length with largest communities first.
+
+ Examples
+ --------
+ >>> G = nx.karate_club_graph()
+ >>> c = nx.community.greedy_modularity_communities(G)
+ >>> sorted(c[0])
+ [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
+
+ See Also
+ --------
+ modularity
+
+ References
+ ----------
+ .. [1] Newman, M. E. J. "Networks: An Introduction", page 224
+ Oxford University Press 2011.
+ .. [2] Clauset, A., Newman, M. E., & Moore, C.
+ "Finding community structure in very large networks."
+ Physical Review E 70(6), 2004.
+ .. [3] Reichardt and Bornholdt "Statistical Mechanics of Community
+ Detection" Phys. Rev. E74, 2006.
+ .. [4] Newman, M. E. J."Analysis of weighted networks"
+ Physical Review E 70(5 Pt 2):056131, 2004.
+ """
+ if (cutoff < 1) or (cutoff > G.number_of_nodes()):
+ raise ValueError(f"cutoff must be between 1 and {len(G)}. Got {cutoff}.")
+ if best_n is not None:
+ if (best_n < 1) or (best_n > G.number_of_nodes()):
+ raise ValueError(f"best_n must be between 1 and {len(G)}. Got {best_n}.")
+ if best_n < cutoff:
+ raise ValueError(f"Must have best_n >= cutoff. Got {best_n} < {cutoff}")
+ if best_n == 1:
+ return [set(G)]
+ else:
+ best_n = G.number_of_nodes()
+
+ # retrieve generator object to construct output
+ community_gen = _greedy_modularity_communities_generator(
+ G, weight=weight, resolution=resolution
+ )
+
+ # construct the first best community
+ communities = next(community_gen)
+
+ # continue merging communities until one of the breaking criteria is satisfied
+ while len(communities) > cutoff:
+ try:
+ dq = next(community_gen)
+ # StopIteration occurs when communities are the connected components
+ except StopIteration:
+ communities = sorted(communities, key=len, reverse=True)
+ # if best_n requires more merging, merge big sets for highest modularity
+ while len(communities) > best_n:
+ comm1, comm2, *rest = communities
+ communities = [comm1 ^ comm2]
+ communities.extend(rest)
+ return communities
+
+ # keep going unless max_mod is reached or best_n says to merge more
+ if dq < 0 and len(communities) <= best_n:
+ break
+ communities = next(community_gen)
+
+ return sorted(communities, key=len, reverse=True)
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+@nx._dispatch(edge_attrs="weight")
+def naive_greedy_modularity_communities(G, resolution=1, weight=None):
+ r"""Find communities in G using greedy modularity maximization.
+
+ This implementation is O(n^4), much slower than alternatives, but it is
+ provided as an easy-to-understand reference implementation.
+
+ Greedy modularity maximization begins with each node in its own community
+ and joins the pair of communities that most increases modularity until no
+ such pair exists.
+
+ This function maximizes the generalized modularity, where `resolution`
+ is the resolution parameter, often expressed as $\gamma$.
+ See :func:`~networkx.algorithms.community.quality.modularity`.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ Graph must be simple and undirected.
+
+ resolution : float (default=1)
+ If resolution is less than 1, modularity favors larger communities.
+ Greater than 1 favors smaller communities.
+
+ weight : string or None, optional (default=None)
+ The name of an edge attribute that holds the numerical value used
+ as a weight. If None, then each edge has weight 1.
+ The degree is the sum of the edge weights adjacent to the node.
+
+ Returns
+ -------
+ list
+ A list of sets of nodes, one for each community.
+ Sorted by length with largest communities first.
+
+ Examples
+ --------
+ >>> G = nx.karate_club_graph()
+ >>> c = nx.community.naive_greedy_modularity_communities(G)
+ >>> sorted(c[0])
+ [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
+
+ See Also
+ --------
+ greedy_modularity_communities
+ modularity
+ """
+ # First create one community for each node
+ communities = [frozenset([u]) for u in G.nodes()]
+ # Track merges
+ merges = []
+ # Greedily merge communities until no improvement is possible
+ old_modularity = None
+ new_modularity = modularity(G, communities, resolution=resolution, weight=weight)
+ while old_modularity is None or new_modularity > old_modularity:
+ # Save modularity for comparison
+ old_modularity = new_modularity
+ # Find best pair to merge
+ trial_communities = list(communities)
+ to_merge = None
+ for i, u in enumerate(communities):
+ for j, v in enumerate(communities):
+ # Skip i==j and empty communities
+ if j <= i or len(u) == 0 or len(v) == 0:
+ continue
+ # Merge communities u and v
+ trial_communities[j] = u | v
+ trial_communities[i] = frozenset([])
+ trial_modularity = modularity(
+ G, trial_communities, resolution=resolution, weight=weight
+ )
+ if trial_modularity >= new_modularity:
+ # Check if strictly better or tie
+ if trial_modularity > new_modularity:
+ # Found new best, save modularity and group indexes
+ new_modularity = trial_modularity
+ to_merge = (i, j, new_modularity - old_modularity)
+ elif to_merge and min(i, j) < min(to_merge[0], to_merge[1]):
+ # Break ties by choosing pair with lowest min id
+ new_modularity = trial_modularity
+ to_merge = (i, j, new_modularity - old_modularity)
+ # Un-merge
+ trial_communities[i] = u
+ trial_communities[j] = v
+ if to_merge is not None:
+ # If the best merge improves modularity, use it
+ merges.append(to_merge)
+ i, j, dq = to_merge
+ u, v = communities[i], communities[j]
+ communities[j] = u | v
+ communities[i] = frozenset([])
+ # Remove empty communities and sort
+ return sorted((c for c in communities if len(c) > 0), key=len, reverse=True)
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..02e9a339a774d4ed3a3d983a69584503ea9de9e4
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b9b5132aea0d380d9b6fc6fda76b106061aa4fe0
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5b317b6160e752db2411120a22620c2195cb71a8
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9eddaab91e95017a629612a51afd249e1fcad55d
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..846656a112d4bc8d9cfd94f2244ac5ff33dd157b
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..050a94a5a6425bf949ab9474e79bd7b32bdde74a
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/__pycache__/__init__.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..089361852168d27fd2870042a8c4d412e3e15a75
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/__pycache__/__init__.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py
new file mode 100644
index 0000000000000000000000000000000000000000..e313263668c07ad7b7a3cb2ad8f1b74b08347a14
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py
@@ -0,0 +1,90 @@
+import pytest
+
+import networkx as nx
+from networkx import NetworkXNotImplemented
+
+
+class TestWeaklyConnected:
+ @classmethod
+ def setup_class(cls):
+ cls.gc = []
+ G = nx.DiGraph()
+ G.add_edges_from(
+ [
+ (1, 2),
+ (2, 3),
+ (2, 8),
+ (3, 4),
+ (3, 7),
+ (4, 5),
+ (5, 3),
+ (5, 6),
+ (7, 4),
+ (7, 6),
+ (8, 1),
+ (8, 7),
+ ]
+ )
+ C = [[3, 4, 5, 7], [1, 2, 8], [6]]
+ cls.gc.append((G, C))
+
+ G = nx.DiGraph()
+ G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)])
+ C = [[2, 3, 4], [1]]
+ cls.gc.append((G, C))
+
+ G = nx.DiGraph()
+ G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)])
+ C = [[1, 2, 3]]
+ cls.gc.append((G, C))
+
+ # Eppstein's tests
+ G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []})
+ C = [[0], [1], [2], [3], [4], [5], [6]]
+ cls.gc.append((G, C))
+
+ G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]})
+ C = [[0, 1, 2], [3, 4]]
+ cls.gc.append((G, C))
+
+ def test_weakly_connected_components(self):
+ for G, C in self.gc:
+ U = G.to_undirected()
+ w = {frozenset(g) for g in nx.weakly_connected_components(G)}
+ c = {frozenset(g) for g in nx.connected_components(U)}
+ assert w == c
+
+ def test_number_weakly_connected_components(self):
+ for G, C in self.gc:
+ U = G.to_undirected()
+ w = nx.number_weakly_connected_components(G)
+ c = nx.number_connected_components(U)
+ assert w == c
+
+ def test_is_weakly_connected(self):
+ for G, C in self.gc:
+ U = G.to_undirected()
+ assert nx.is_weakly_connected(G) == nx.is_connected(U)
+
+ def test_null_graph(self):
+ G = nx.DiGraph()
+ assert list(nx.weakly_connected_components(G)) == []
+ assert nx.number_weakly_connected_components(G) == 0
+ with pytest.raises(nx.NetworkXPointlessConcept):
+ next(nx.is_weakly_connected(G))
+
+ def test_connected_raise(self):
+ G = nx.Graph()
+ with pytest.raises(NetworkXNotImplemented):
+ next(nx.weakly_connected_components(G))
+ pytest.raises(NetworkXNotImplemented, nx.number_weakly_connected_components, G)
+ pytest.raises(NetworkXNotImplemented, nx.is_weakly_connected, G)
+
+ def test_connected_mutability(self):
+ DG = nx.path_graph(5, create_using=nx.DiGraph)
+ G = nx.disjoint_union(DG, DG)
+ seen = set()
+ for component in nx.weakly_connected_components(G):
+ assert len(seen & component) == 0
+ seen.update(component)
+ component.clear()
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/weakly_connected.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/weakly_connected.py
new file mode 100644
index 0000000000000000000000000000000000000000..c8dc2350ef16f01c4b41426fd8d945a05b186511
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/components/weakly_connected.py
@@ -0,0 +1,196 @@
+"""Weakly connected components."""
+import networkx as nx
+from networkx.utils.decorators import not_implemented_for
+
+__all__ = [
+ "number_weakly_connected_components",
+ "weakly_connected_components",
+ "is_weakly_connected",
+]
+
+
+@not_implemented_for("undirected")
+@nx._dispatch
+def weakly_connected_components(G):
+ """Generate weakly connected components of G.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ A directed graph
+
+ Returns
+ -------
+ comp : generator of sets
+ A generator of sets of nodes, one for each weakly connected
+ component of G.
+
+ Raises
+ ------
+ NetworkXNotImplemented
+ If G is undirected.
+
+ Examples
+ --------
+ Generate a sorted list of weakly connected components, largest first.
+
+ >>> G = nx.path_graph(4, create_using=nx.DiGraph())
+ >>> nx.add_path(G, [10, 11, 12])
+ >>> [
+ ... len(c)
+ ... for c in sorted(nx.weakly_connected_components(G), key=len, reverse=True)
+ ... ]
+ [4, 3]
+
+ If you only want the largest component, it's more efficient to
+ use max instead of sort:
+
+ >>> largest_cc = max(nx.weakly_connected_components(G), key=len)
+
+ See Also
+ --------
+ connected_components
+ strongly_connected_components
+
+ Notes
+ -----
+ For directed graphs only.
+
+ """
+ seen = set()
+ for v in G:
+ if v not in seen:
+ c = set(_plain_bfs(G, v))
+ seen.update(c)
+ yield c
+
+
+@not_implemented_for("undirected")
+@nx._dispatch
+def number_weakly_connected_components(G):
+ """Returns the number of weakly connected components in G.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ A directed graph.
+
+ Returns
+ -------
+ n : integer
+ Number of weakly connected components
+
+ Raises
+ ------
+ NetworkXNotImplemented
+ If G is undirected.
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(0, 1), (2, 1), (3, 4)])
+ >>> nx.number_weakly_connected_components(G)
+ 2
+
+ See Also
+ --------
+ weakly_connected_components
+ number_connected_components
+ number_strongly_connected_components
+
+ Notes
+ -----
+ For directed graphs only.
+
+ """
+ return sum(1 for wcc in weakly_connected_components(G))
+
+
+@not_implemented_for("undirected")
+@nx._dispatch
+def is_weakly_connected(G):
+ """Test directed graph for weak connectivity.
+
+ A directed graph is weakly connected if and only if the graph
+ is connected when the direction of the edge between nodes is ignored.
+
+ Note that if a graph is strongly connected (i.e. the graph is connected
+ even when we account for directionality), it is by definition weakly
+ connected as well.
+
+ Parameters
+ ----------
+ G : NetworkX Graph
+ A directed graph.
+
+ Returns
+ -------
+ connected : bool
+ True if the graph is weakly connected, False otherwise.
+
+ Raises
+ ------
+ NetworkXNotImplemented
+ If G is undirected.
+
+ Examples
+ --------
+ >>> G = nx.DiGraph([(0, 1), (2, 1)])
+ >>> G.add_node(3)
+ >>> nx.is_weakly_connected(G) # node 3 is not connected to the graph
+ False
+ >>> G.add_edge(2, 3)
+ >>> nx.is_weakly_connected(G)
+ True
+
+ See Also
+ --------
+ is_strongly_connected
+ is_semiconnected
+ is_connected
+ is_biconnected
+ weakly_connected_components
+
+ Notes
+ -----
+ For directed graphs only.
+
+ """
+ if len(G) == 0:
+ raise nx.NetworkXPointlessConcept(
+ """Connectivity is undefined for the null graph."""
+ )
+
+ return len(next(weakly_connected_components(G))) == len(G)
+
+
+def _plain_bfs(G, source):
+ """A fast BFS node generator
+
+ The direction of the edge between nodes is ignored.
+
+ For directed graphs only.
+
+ """
+ n = len(G)
+ Gsucc = G._succ
+ Gpred = G._pred
+ seen = {source}
+ nextlevel = [source]
+
+ yield source
+ while nextlevel:
+ thislevel = nextlevel
+ nextlevel = []
+ for v in thislevel:
+ for w in Gsucc[v]:
+ if w not in seen:
+ seen.add(w)
+ nextlevel.append(w)
+ yield w
+ for w in Gpred[v]:
+ if w not in seen:
+ seen.add(w)
+ nextlevel.append(w)
+ yield w
+ if len(seen) == n:
+ return
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bdc7c71dc3637d8ddb9a419574c9b22cb88c8db0
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b5024907698e86a493f76f953c6c44ba9cc32b2d
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..feada30d2bdf3489970500ada7c15f75605a5b55
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1c333931e7d04e0fec571ed5557b6d6e561585c5
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/ismags.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/ismags.py
new file mode 100644
index 0000000000000000000000000000000000000000..25ce94c87ec9b76ed81a5650f67ecfb5cda130ba
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/ismags.py
@@ -0,0 +1,1169 @@
+"""
+ISMAGS Algorithm
+================
+
+Provides a Python implementation of the ISMAGS algorithm. [1]_
+
+It is capable of finding (subgraph) isomorphisms between two graphs, taking the
+symmetry of the subgraph into account. In most cases the VF2 algorithm is
+faster (at least on small graphs) than this implementation, but in some cases
+there is an exponential number of isomorphisms that are symmetrically
+equivalent. In that case, the ISMAGS algorithm will provide only one solution
+per symmetry group.
+
+>>> petersen = nx.petersen_graph()
+>>> ismags = nx.isomorphism.ISMAGS(petersen, petersen)
+>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=False))
+>>> len(isomorphisms)
+120
+>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=True))
+>>> answer = [{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}]
+>>> answer == isomorphisms
+True
+
+In addition, this implementation also provides an interface to find the
+largest common induced subgraph [2]_ between any two graphs, again taking
+symmetry into account. Given `graph` and `subgraph` the algorithm will remove
+nodes from the `subgraph` until `subgraph` is isomorphic to a subgraph of
+`graph`. Since only the symmetry of `subgraph` is taken into account it is
+worth thinking about how you provide your graphs:
+
+>>> graph1 = nx.path_graph(4)
+>>> graph2 = nx.star_graph(3)
+>>> ismags = nx.isomorphism.ISMAGS(graph1, graph2)
+>>> ismags.is_isomorphic()
+False
+>>> largest_common_subgraph = list(ismags.largest_common_subgraph())
+>>> answer = [{1: 0, 0: 1, 2: 2}, {2: 0, 1: 1, 3: 2}]
+>>> answer == largest_common_subgraph
+True
+>>> ismags2 = nx.isomorphism.ISMAGS(graph2, graph1)
+>>> largest_common_subgraph = list(ismags2.largest_common_subgraph())
+>>> answer = [
+... {1: 0, 0: 1, 2: 2},
+... {1: 0, 0: 1, 3: 2},
+... {2: 0, 0: 1, 1: 2},
+... {2: 0, 0: 1, 3: 2},
+... {3: 0, 0: 1, 1: 2},
+... {3: 0, 0: 1, 2: 2},
+... ]
+>>> answer == largest_common_subgraph
+True
+
+However, when not taking symmetry into account, it doesn't matter:
+
+>>> largest_common_subgraph = list(ismags.largest_common_subgraph(symmetry=False))
+>>> answer = [
+... {1: 0, 0: 1, 2: 2},
+... {1: 0, 2: 1, 0: 2},
+... {2: 0, 1: 1, 3: 2},
+... {2: 0, 3: 1, 1: 2},
+... {1: 0, 0: 1, 2: 3},
+... {1: 0, 2: 1, 0: 3},
+... {2: 0, 1: 1, 3: 3},
+... {2: 0, 3: 1, 1: 3},
+... {1: 0, 0: 2, 2: 3},
+... {1: 0, 2: 2, 0: 3},
+... {2: 0, 1: 2, 3: 3},
+... {2: 0, 3: 2, 1: 3},
+... ]
+>>> answer == largest_common_subgraph
+True
+>>> largest_common_subgraph = list(ismags2.largest_common_subgraph(symmetry=False))
+>>> answer = [
+... {1: 0, 0: 1, 2: 2},
+... {1: 0, 0: 1, 3: 2},
+... {2: 0, 0: 1, 1: 2},
+... {2: 0, 0: 1, 3: 2},
+... {3: 0, 0: 1, 1: 2},
+... {3: 0, 0: 1, 2: 2},
+... {1: 1, 0: 2, 2: 3},
+... {1: 1, 0: 2, 3: 3},
+... {2: 1, 0: 2, 1: 3},
+... {2: 1, 0: 2, 3: 3},
+... {3: 1, 0: 2, 1: 3},
+... {3: 1, 0: 2, 2: 3},
+... ]
+>>> answer == largest_common_subgraph
+True
+
+Notes
+-----
+- The current implementation works for undirected graphs only. The algorithm
+ in general should work for directed graphs as well though.
+- Node keys for both provided graphs need to be fully orderable as well as
+ hashable.
+- Node and edge equality is assumed to be transitive: if A is equal to B, and
+ B is equal to C, then A is equal to C.
+
+References
+----------
+.. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
+ M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
+ Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
+ Enumeration", PLoS One 9(5): e97896, 2014.
+ https://doi.org/10.1371/journal.pone.0097896
+.. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph
+"""
+
+__all__ = ["ISMAGS"]
+
+import itertools
+from collections import Counter, defaultdict
+from functools import reduce, wraps
+
+
+def are_all_equal(iterable):
+ """
+ Returns ``True`` if and only if all elements in `iterable` are equal; and
+ ``False`` otherwise.
+
+ Parameters
+ ----------
+ iterable: collections.abc.Iterable
+ The container whose elements will be checked.
+
+ Returns
+ -------
+ bool
+ ``True`` iff all elements in `iterable` compare equal, ``False``
+ otherwise.
+ """
+ try:
+ shape = iterable.shape
+ except AttributeError:
+ pass
+ else:
+ if len(shape) > 1:
+ message = "The function does not works on multidimensional arrays."
+ raise NotImplementedError(message) from None
+
+ iterator = iter(iterable)
+ first = next(iterator, None)
+ return all(item == first for item in iterator)
+
+
+def make_partitions(items, test):
+ """
+ Partitions items into sets based on the outcome of ``test(item1, item2)``.
+ Pairs of items for which `test` returns `True` end up in the same set.
+
+ Parameters
+ ----------
+ items : collections.abc.Iterable[collections.abc.Hashable]
+ Items to partition
+ test : collections.abc.Callable[collections.abc.Hashable, collections.abc.Hashable]
+ A function that will be called with 2 arguments, taken from items.
+ Should return `True` if those 2 items need to end up in the same
+ partition, and `False` otherwise.
+
+ Returns
+ -------
+ list[set]
+ A list of sets, with each set containing part of the items in `items`,
+ such that ``all(test(*pair) for pair in itertools.combinations(set, 2))
+ == True``
+
+ Notes
+ -----
+ The function `test` is assumed to be transitive: if ``test(a, b)`` and
+ ``test(b, c)`` return ``True``, then ``test(a, c)`` must also be ``True``.
+ """
+ partitions = []
+ for item in items:
+ for partition in partitions:
+ p_item = next(iter(partition))
+ if test(item, p_item):
+ partition.add(item)
+ break
+ else: # No break
+ partitions.append({item})
+ return partitions
+
+
+def partition_to_color(partitions):
+ """
+ Creates a dictionary that maps each item in each partition to the index of
+ the partition to which it belongs.
+
+ Parameters
+ ----------
+ partitions: collections.abc.Sequence[collections.abc.Iterable]
+ As returned by :func:`make_partitions`.
+
+ Returns
+ -------
+ dict
+ """
+ colors = {}
+ for color, keys in enumerate(partitions):
+ for key in keys:
+ colors[key] = color
+ return colors
+
+
+def intersect(collection_of_sets):
+ """
+ Given an collection of sets, returns the intersection of those sets.
+
+ Parameters
+ ----------
+ collection_of_sets: collections.abc.Collection[set]
+ A collection of sets.
+
+ Returns
+ -------
+ set
+ An intersection of all sets in `collection_of_sets`. Will have the same
+ type as the item initially taken from `collection_of_sets`.
+ """
+ collection_of_sets = list(collection_of_sets)
+ first = collection_of_sets.pop()
+ out = reduce(set.intersection, collection_of_sets, set(first))
+ return type(first)(out)
+
+
+class ISMAGS:
+ """
+ Implements the ISMAGS subgraph matching algorithm. [1]_ ISMAGS stands for
+ "Index-based Subgraph Matching Algorithm with General Symmetries". As the
+ name implies, it is symmetry aware and will only generate non-symmetric
+ isomorphisms.
+
+ Notes
+ -----
+ The implementation imposes additional conditions compared to the VF2
+ algorithm on the graphs provided and the comparison functions
+ (:attr:`node_equality` and :attr:`edge_equality`):
+
+ - Node keys in both graphs must be orderable as well as hashable.
+ - Equality must be transitive: if A is equal to B, and B is equal to C,
+ then A must be equal to C.
+
+ Attributes
+ ----------
+ graph: networkx.Graph
+ subgraph: networkx.Graph
+ node_equality: collections.abc.Callable
+ The function called to see if two nodes should be considered equal.
+ It's signature looks like this:
+ ``f(graph1: networkx.Graph, node1, graph2: networkx.Graph, node2) -> bool``.
+ `node1` is a node in `graph1`, and `node2` a node in `graph2`.
+ Constructed from the argument `node_match`.
+ edge_equality: collections.abc.Callable
+ The function called to see if two edges should be considered equal.
+ It's signature looks like this:
+ ``f(graph1: networkx.Graph, edge1, graph2: networkx.Graph, edge2) -> bool``.
+ `edge1` is an edge in `graph1`, and `edge2` an edge in `graph2`.
+ Constructed from the argument `edge_match`.
+
+ References
+ ----------
+ .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
+ M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
+ Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
+ Enumeration", PLoS One 9(5): e97896, 2014.
+ https://doi.org/10.1371/journal.pone.0097896
+ """
+
+ def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None):
+ """
+ Parameters
+ ----------
+ graph: networkx.Graph
+ subgraph: networkx.Graph
+ node_match: collections.abc.Callable or None
+ Function used to determine whether two nodes are equivalent. Its
+ signature should look like ``f(n1: dict, n2: dict) -> bool``, with
+ `n1` and `n2` node property dicts. See also
+ :func:`~networkx.algorithms.isomorphism.categorical_node_match` and
+ friends.
+ If `None`, all nodes are considered equal.
+ edge_match: collections.abc.Callable or None
+ Function used to determine whether two edges are equivalent. Its
+ signature should look like ``f(e1: dict, e2: dict) -> bool``, with
+ `e1` and `e2` edge property dicts. See also
+ :func:`~networkx.algorithms.isomorphism.categorical_edge_match` and
+ friends.
+ If `None`, all edges are considered equal.
+ cache: collections.abc.Mapping
+ A cache used for caching graph symmetries.
+ """
+ # TODO: graph and subgraph setter methods that invalidate the caches.
+ # TODO: allow for precomputed partitions and colors
+ self.graph = graph
+ self.subgraph = subgraph
+ self._symmetry_cache = cache
+ # Naming conventions are taken from the original paper. For your
+ # sanity:
+ # sg: subgraph
+ # g: graph
+ # e: edge(s)
+ # n: node(s)
+ # So: sgn means "subgraph nodes".
+ self._sgn_partitions_ = None
+ self._sge_partitions_ = None
+
+ self._sgn_colors_ = None
+ self._sge_colors_ = None
+
+ self._gn_partitions_ = None
+ self._ge_partitions_ = None
+
+ self._gn_colors_ = None
+ self._ge_colors_ = None
+
+ self._node_compat_ = None
+ self._edge_compat_ = None
+
+ if node_match is None:
+ self.node_equality = self._node_match_maker(lambda n1, n2: True)
+ self._sgn_partitions_ = [set(self.subgraph.nodes)]
+ self._gn_partitions_ = [set(self.graph.nodes)]
+ self._node_compat_ = {0: 0}
+ else:
+ self.node_equality = self._node_match_maker(node_match)
+ if edge_match is None:
+ self.edge_equality = self._edge_match_maker(lambda e1, e2: True)
+ self._sge_partitions_ = [set(self.subgraph.edges)]
+ self._ge_partitions_ = [set(self.graph.edges)]
+ self._edge_compat_ = {0: 0}
+ else:
+ self.edge_equality = self._edge_match_maker(edge_match)
+
+ @property
+ def _sgn_partitions(self):
+ if self._sgn_partitions_ is None:
+
+ def nodematch(node1, node2):
+ return self.node_equality(self.subgraph, node1, self.subgraph, node2)
+
+ self._sgn_partitions_ = make_partitions(self.subgraph.nodes, nodematch)
+ return self._sgn_partitions_
+
+ @property
+ def _sge_partitions(self):
+ if self._sge_partitions_ is None:
+
+ def edgematch(edge1, edge2):
+ return self.edge_equality(self.subgraph, edge1, self.subgraph, edge2)
+
+ self._sge_partitions_ = make_partitions(self.subgraph.edges, edgematch)
+ return self._sge_partitions_
+
+ @property
+ def _gn_partitions(self):
+ if self._gn_partitions_ is None:
+
+ def nodematch(node1, node2):
+ return self.node_equality(self.graph, node1, self.graph, node2)
+
+ self._gn_partitions_ = make_partitions(self.graph.nodes, nodematch)
+ return self._gn_partitions_
+
+ @property
+ def _ge_partitions(self):
+ if self._ge_partitions_ is None:
+
+ def edgematch(edge1, edge2):
+ return self.edge_equality(self.graph, edge1, self.graph, edge2)
+
+ self._ge_partitions_ = make_partitions(self.graph.edges, edgematch)
+ return self._ge_partitions_
+
+ @property
+ def _sgn_colors(self):
+ if self._sgn_colors_ is None:
+ self._sgn_colors_ = partition_to_color(self._sgn_partitions)
+ return self._sgn_colors_
+
+ @property
+ def _sge_colors(self):
+ if self._sge_colors_ is None:
+ self._sge_colors_ = partition_to_color(self._sge_partitions)
+ return self._sge_colors_
+
+ @property
+ def _gn_colors(self):
+ if self._gn_colors_ is None:
+ self._gn_colors_ = partition_to_color(self._gn_partitions)
+ return self._gn_colors_
+
+ @property
+ def _ge_colors(self):
+ if self._ge_colors_ is None:
+ self._ge_colors_ = partition_to_color(self._ge_partitions)
+ return self._ge_colors_
+
+ @property
+ def _node_compatibility(self):
+ if self._node_compat_ is not None:
+ return self._node_compat_
+ self._node_compat_ = {}
+ for sgn_part_color, gn_part_color in itertools.product(
+ range(len(self._sgn_partitions)), range(len(self._gn_partitions))
+ ):
+ sgn = next(iter(self._sgn_partitions[sgn_part_color]))
+ gn = next(iter(self._gn_partitions[gn_part_color]))
+ if self.node_equality(self.subgraph, sgn, self.graph, gn):
+ self._node_compat_[sgn_part_color] = gn_part_color
+ return self._node_compat_
+
+ @property
+ def _edge_compatibility(self):
+ if self._edge_compat_ is not None:
+ return self._edge_compat_
+ self._edge_compat_ = {}
+ for sge_part_color, ge_part_color in itertools.product(
+ range(len(self._sge_partitions)), range(len(self._ge_partitions))
+ ):
+ sge = next(iter(self._sge_partitions[sge_part_color]))
+ ge = next(iter(self._ge_partitions[ge_part_color]))
+ if self.edge_equality(self.subgraph, sge, self.graph, ge):
+ self._edge_compat_[sge_part_color] = ge_part_color
+ return self._edge_compat_
+
+ @staticmethod
+ def _node_match_maker(cmp):
+ @wraps(cmp)
+ def comparer(graph1, node1, graph2, node2):
+ return cmp(graph1.nodes[node1], graph2.nodes[node2])
+
+ return comparer
+
+ @staticmethod
+ def _edge_match_maker(cmp):
+ @wraps(cmp)
+ def comparer(graph1, edge1, graph2, edge2):
+ return cmp(graph1.edges[edge1], graph2.edges[edge2])
+
+ return comparer
+
+ def find_isomorphisms(self, symmetry=True):
+ """Find all subgraph isomorphisms between subgraph and graph
+
+ Finds isomorphisms where :attr:`subgraph` <= :attr:`graph`.
+
+ Parameters
+ ----------
+ symmetry: bool
+ Whether symmetry should be taken into account. If False, found
+ isomorphisms may be symmetrically equivalent.
+
+ Yields
+ ------
+ dict
+ The found isomorphism mappings of {graph_node: subgraph_node}.
+ """
+ # The networkx VF2 algorithm is slightly funny in when it yields an
+ # empty dict and when not.
+ if not self.subgraph:
+ yield {}
+ return
+ elif not self.graph:
+ return
+ elif len(self.graph) < len(self.subgraph):
+ return
+
+ if symmetry:
+ _, cosets = self.analyze_symmetry(
+ self.subgraph, self._sgn_partitions, self._sge_colors
+ )
+ constraints = self._make_constraints(cosets)
+ else:
+ constraints = []
+
+ candidates = self._find_nodecolor_candidates()
+ la_candidates = self._get_lookahead_candidates()
+ for sgn in self.subgraph:
+ extra_candidates = la_candidates[sgn]
+ if extra_candidates:
+ candidates[sgn] = candidates[sgn] | {frozenset(extra_candidates)}
+
+ if any(candidates.values()):
+ start_sgn = min(candidates, key=lambda n: min(candidates[n], key=len))
+ candidates[start_sgn] = (intersect(candidates[start_sgn]),)
+ yield from self._map_nodes(start_sgn, candidates, constraints)
+ else:
+ return
+
+ @staticmethod
+ def _find_neighbor_color_count(graph, node, node_color, edge_color):
+ """
+ For `node` in `graph`, count the number of edges of a specific color
+ it has to nodes of a specific color.
+ """
+ counts = Counter()
+ neighbors = graph[node]
+ for neighbor in neighbors:
+ n_color = node_color[neighbor]
+ if (node, neighbor) in edge_color:
+ e_color = edge_color[node, neighbor]
+ else:
+ e_color = edge_color[neighbor, node]
+ counts[e_color, n_color] += 1
+ return counts
+
+ def _get_lookahead_candidates(self):
+ """
+ Returns a mapping of {subgraph node: collection of graph nodes} for
+ which the graph nodes are feasible candidates for the subgraph node, as
+ determined by looking ahead one edge.
+ """
+ g_counts = {}
+ for gn in self.graph:
+ g_counts[gn] = self._find_neighbor_color_count(
+ self.graph, gn, self._gn_colors, self._ge_colors
+ )
+ candidates = defaultdict(set)
+ for sgn in self.subgraph:
+ sg_count = self._find_neighbor_color_count(
+ self.subgraph, sgn, self._sgn_colors, self._sge_colors
+ )
+ new_sg_count = Counter()
+ for (sge_color, sgn_color), count in sg_count.items():
+ try:
+ ge_color = self._edge_compatibility[sge_color]
+ gn_color = self._node_compatibility[sgn_color]
+ except KeyError:
+ pass
+ else:
+ new_sg_count[ge_color, gn_color] = count
+
+ for gn, g_count in g_counts.items():
+ if all(new_sg_count[x] <= g_count[x] for x in new_sg_count):
+ # Valid candidate
+ candidates[sgn].add(gn)
+ return candidates
+
+ def largest_common_subgraph(self, symmetry=True):
+ """
+ Find the largest common induced subgraphs between :attr:`subgraph` and
+ :attr:`graph`.
+
+ Parameters
+ ----------
+ symmetry: bool
+ Whether symmetry should be taken into account. If False, found
+ largest common subgraphs may be symmetrically equivalent.
+
+ Yields
+ ------
+ dict
+ The found isomorphism mappings of {graph_node: subgraph_node}.
+ """
+ # The networkx VF2 algorithm is slightly funny in when it yields an
+ # empty dict and when not.
+ if not self.subgraph:
+ yield {}
+ return
+ elif not self.graph:
+ return
+
+ if symmetry:
+ _, cosets = self.analyze_symmetry(
+ self.subgraph, self._sgn_partitions, self._sge_colors
+ )
+ constraints = self._make_constraints(cosets)
+ else:
+ constraints = []
+
+ candidates = self._find_nodecolor_candidates()
+
+ if any(candidates.values()):
+ yield from self._largest_common_subgraph(candidates, constraints)
+ else:
+ return
+
+ def analyze_symmetry(self, graph, node_partitions, edge_colors):
+ """
+ Find a minimal set of permutations and corresponding co-sets that
+ describe the symmetry of `graph`, given the node and edge equalities
+ given by `node_partitions` and `edge_colors`, respectively.
+
+ Parameters
+ ----------
+ graph : networkx.Graph
+ The graph whose symmetry should be analyzed.
+ node_partitions : list of sets
+ A list of sets containing node keys. Node keys in the same set
+ are considered equivalent. Every node key in `graph` should be in
+ exactly one of the sets. If all nodes are equivalent, this should
+ be ``[set(graph.nodes)]``.
+ edge_colors : dict mapping edges to their colors
+ A dict mapping every edge in `graph` to its corresponding color.
+ Edges with the same color are considered equivalent. If all edges
+ are equivalent, this should be ``{e: 0 for e in graph.edges}``.
+
+
+ Returns
+ -------
+ set[frozenset]
+ The found permutations. This is a set of frozensets of pairs of node
+ keys which can be exchanged without changing :attr:`subgraph`.
+ dict[collections.abc.Hashable, set[collections.abc.Hashable]]
+ The found co-sets. The co-sets is a dictionary of
+ ``{node key: set of node keys}``.
+ Every key-value pair describes which ``values`` can be interchanged
+ without changing nodes less than ``key``.
+ """
+ if self._symmetry_cache is not None:
+ key = hash(
+ (
+ tuple(graph.nodes),
+ tuple(graph.edges),
+ tuple(map(tuple, node_partitions)),
+ tuple(edge_colors.items()),
+ )
+ )
+ if key in self._symmetry_cache:
+ return self._symmetry_cache[key]
+ node_partitions = list(
+ self._refine_node_partitions(graph, node_partitions, edge_colors)
+ )
+ assert len(node_partitions) == 1
+ node_partitions = node_partitions[0]
+ permutations, cosets = self._process_ordered_pair_partitions(
+ graph, node_partitions, node_partitions, edge_colors
+ )
+ if self._symmetry_cache is not None:
+ self._symmetry_cache[key] = permutations, cosets
+ return permutations, cosets
+
+ def is_isomorphic(self, symmetry=False):
+ """
+ Returns True if :attr:`graph` is isomorphic to :attr:`subgraph` and
+ False otherwise.
+
+ Returns
+ -------
+ bool
+ """
+ return len(self.subgraph) == len(self.graph) and self.subgraph_is_isomorphic(
+ symmetry
+ )
+
+ def subgraph_is_isomorphic(self, symmetry=False):
+ """
+ Returns True if a subgraph of :attr:`graph` is isomorphic to
+ :attr:`subgraph` and False otherwise.
+
+ Returns
+ -------
+ bool
+ """
+ # symmetry=False, since we only need to know whether there is any
+ # example; figuring out all symmetry elements probably costs more time
+ # than it gains.
+ isom = next(self.subgraph_isomorphisms_iter(symmetry=symmetry), None)
+ return isom is not None
+
+ def isomorphisms_iter(self, symmetry=True):
+ """
+ Does the same as :meth:`find_isomorphisms` if :attr:`graph` and
+ :attr:`subgraph` have the same number of nodes.
+ """
+ if len(self.graph) == len(self.subgraph):
+ yield from self.subgraph_isomorphisms_iter(symmetry=symmetry)
+
+ def subgraph_isomorphisms_iter(self, symmetry=True):
+ """Alternative name for :meth:`find_isomorphisms`."""
+ return self.find_isomorphisms(symmetry)
+
+ def _find_nodecolor_candidates(self):
+ """
+ Per node in subgraph find all nodes in graph that have the same color.
+ """
+ candidates = defaultdict(set)
+ for sgn in self.subgraph.nodes:
+ sgn_color = self._sgn_colors[sgn]
+ if sgn_color in self._node_compatibility:
+ gn_color = self._node_compatibility[sgn_color]
+ candidates[sgn].add(frozenset(self._gn_partitions[gn_color]))
+ else:
+ candidates[sgn].add(frozenset())
+ candidates = dict(candidates)
+ for sgn, options in candidates.items():
+ candidates[sgn] = frozenset(options)
+ return candidates
+
+ @staticmethod
+ def _make_constraints(cosets):
+ """
+ Turn cosets into constraints.
+ """
+ constraints = []
+ for node_i, node_ts in cosets.items():
+ for node_t in node_ts:
+ if node_i != node_t:
+ # Node i must be smaller than node t.
+ constraints.append((node_i, node_t))
+ return constraints
+
+ @staticmethod
+ def _find_node_edge_color(graph, node_colors, edge_colors):
+ """
+ For every node in graph, come up with a color that combines 1) the
+ color of the node, and 2) the number of edges of a color to each type
+ of node.
+ """
+ counts = defaultdict(lambda: defaultdict(int))
+ for node1, node2 in graph.edges:
+ if (node1, node2) in edge_colors:
+ # FIXME directed graphs
+ ecolor = edge_colors[node1, node2]
+ else:
+ ecolor = edge_colors[node2, node1]
+ # Count per node how many edges it has of what color to nodes of
+ # what color
+ counts[node1][ecolor, node_colors[node2]] += 1
+ counts[node2][ecolor, node_colors[node1]] += 1
+
+ node_edge_colors = {}
+ for node in graph.nodes:
+ node_edge_colors[node] = node_colors[node], set(counts[node].items())
+
+ return node_edge_colors
+
+ @staticmethod
+ def _get_permutations_by_length(items):
+ """
+ Get all permutations of items, but only permute items with the same
+ length.
+
+ >>> found = list(ISMAGS._get_permutations_by_length([[1], [2], [3, 4], [4, 5]]))
+ >>> answer = [
+ ... (([1], [2]), ([3, 4], [4, 5])),
+ ... (([1], [2]), ([4, 5], [3, 4])),
+ ... (([2], [1]), ([3, 4], [4, 5])),
+ ... (([2], [1]), ([4, 5], [3, 4])),
+ ... ]
+ >>> found == answer
+ True
+ """
+ by_len = defaultdict(list)
+ for item in items:
+ by_len[len(item)].append(item)
+
+ yield from itertools.product(
+ *(itertools.permutations(by_len[l]) for l in sorted(by_len))
+ )
+
+ @classmethod
+ def _refine_node_partitions(cls, graph, node_partitions, edge_colors, branch=False):
+ """
+ Given a partition of nodes in graph, make the partitions smaller such
+ that all nodes in a partition have 1) the same color, and 2) the same
+ number of edges to specific other partitions.
+ """
+
+ def equal_color(node1, node2):
+ return node_edge_colors[node1] == node_edge_colors[node2]
+
+ node_partitions = list(node_partitions)
+ node_colors = partition_to_color(node_partitions)
+ node_edge_colors = cls._find_node_edge_color(graph, node_colors, edge_colors)
+ if all(
+ are_all_equal(node_edge_colors[node] for node in partition)
+ for partition in node_partitions
+ ):
+ yield node_partitions
+ return
+
+ new_partitions = []
+ output = [new_partitions]
+ for partition in node_partitions:
+ if not are_all_equal(node_edge_colors[node] for node in partition):
+ refined = make_partitions(partition, equal_color)
+ if (
+ branch
+ and len(refined) != 1
+ and len({len(r) for r in refined}) != len([len(r) for r in refined])
+ ):
+ # This is where it breaks. There are multiple new cells
+ # in refined with the same length, and their order
+ # matters.
+ # So option 1) Hit it with a big hammer and simply make all
+ # orderings.
+ permutations = cls._get_permutations_by_length(refined)
+ new_output = []
+ for n_p in output:
+ for permutation in permutations:
+ new_output.append(n_p + list(permutation[0]))
+ output = new_output
+ else:
+ for n_p in output:
+ n_p.extend(sorted(refined, key=len))
+ else:
+ for n_p in output:
+ n_p.append(partition)
+ for n_p in output:
+ yield from cls._refine_node_partitions(graph, n_p, edge_colors, branch)
+
+ def _edges_of_same_color(self, sgn1, sgn2):
+ """
+ Returns all edges in :attr:`graph` that have the same colour as the
+ edge between sgn1 and sgn2 in :attr:`subgraph`.
+ """
+ if (sgn1, sgn2) in self._sge_colors:
+ # FIXME directed graphs
+ sge_color = self._sge_colors[sgn1, sgn2]
+ else:
+ sge_color = self._sge_colors[sgn2, sgn1]
+ if sge_color in self._edge_compatibility:
+ ge_color = self._edge_compatibility[sge_color]
+ g_edges = self._ge_partitions[ge_color]
+ else:
+ g_edges = []
+ return g_edges
+
+ def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=None):
+ """
+ Find all subgraph isomorphisms honoring constraints.
+ """
+ if mapping is None:
+ mapping = {}
+ else:
+ mapping = mapping.copy()
+ if to_be_mapped is None:
+ to_be_mapped = set(self.subgraph.nodes)
+
+ # Note, we modify candidates here. Doesn't seem to affect results, but
+ # remember this.
+ # candidates = candidates.copy()
+ sgn_candidates = intersect(candidates[sgn])
+ candidates[sgn] = frozenset([sgn_candidates])
+ for gn in sgn_candidates:
+ # We're going to try to map sgn to gn.
+ if gn in mapping.values() or sgn not in to_be_mapped:
+ # gn is already mapped to something
+ continue # pragma: no cover
+
+ # REDUCTION and COMBINATION
+ mapping[sgn] = gn
+ # BASECASE
+ if to_be_mapped == set(mapping.keys()):
+ yield {v: k for k, v in mapping.items()}
+ continue
+ left_to_map = to_be_mapped - set(mapping.keys())
+
+ new_candidates = candidates.copy()
+ sgn_neighbours = set(self.subgraph[sgn])
+ not_gn_neighbours = set(self.graph.nodes) - set(self.graph[gn])
+ for sgn2 in left_to_map:
+ if sgn2 not in sgn_neighbours:
+ gn2_options = not_gn_neighbours
+ else:
+ # Get all edges to gn of the right color:
+ g_edges = self._edges_of_same_color(sgn, sgn2)
+ # FIXME directed graphs
+ # And all nodes involved in those which are connected to gn
+ gn2_options = {n for e in g_edges for n in e if gn in e}
+ # Node color compatibility should be taken care of by the
+ # initial candidate lists made by find_subgraphs
+
+ # Add gn2_options to the right collection. Since new_candidates
+ # is a dict of frozensets of frozensets of node indices it's
+ # a bit clunky. We can't do .add, and + also doesn't work. We
+ # could do |, but I deem union to be clearer.
+ new_candidates[sgn2] = new_candidates[sgn2].union(
+ [frozenset(gn2_options)]
+ )
+
+ if (sgn, sgn2) in constraints:
+ gn2_options = {gn2 for gn2 in self.graph if gn2 > gn}
+ elif (sgn2, sgn) in constraints:
+ gn2_options = {gn2 for gn2 in self.graph if gn2 < gn}
+ else:
+ continue # pragma: no cover
+ new_candidates[sgn2] = new_candidates[sgn2].union(
+ [frozenset(gn2_options)]
+ )
+
+ # The next node is the one that is unmapped and has fewest
+ # candidates
+ # Pylint disables because it's a one-shot function.
+ next_sgn = min(
+ left_to_map, key=lambda n: min(new_candidates[n], key=len)
+ ) # pylint: disable=cell-var-from-loop
+ yield from self._map_nodes(
+ next_sgn,
+ new_candidates,
+ constraints,
+ mapping=mapping,
+ to_be_mapped=to_be_mapped,
+ )
+ # Unmap sgn-gn. Strictly not necessary since it'd get overwritten
+ # when making a new mapping for sgn.
+ # del mapping[sgn]
+
+ def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None):
+ """
+ Find all largest common subgraphs honoring constraints.
+ """
+ if to_be_mapped is None:
+ to_be_mapped = {frozenset(self.subgraph.nodes)}
+
+ # The LCS problem is basically a repeated subgraph isomorphism problem
+ # with smaller and smaller subgraphs. We store the nodes that are
+ # "part of" the subgraph in to_be_mapped, and we make it a little
+ # smaller every iteration.
+
+ # pylint disable because it's guarded against by default value
+ current_size = len(
+ next(iter(to_be_mapped), [])
+ ) # pylint: disable=stop-iteration-return
+
+ found_iso = False
+ if current_size <= len(self.graph):
+ # There's no point in trying to find isomorphisms of
+ # graph >= subgraph if subgraph has more nodes than graph.
+
+ # Try the isomorphism first with the nodes with lowest ID. So sort
+ # them. Those are more likely to be part of the final
+ # correspondence. This makes finding the first answer(s) faster. In
+ # theory.
+ for nodes in sorted(to_be_mapped, key=sorted):
+ # Find the isomorphism between subgraph[to_be_mapped] <= graph
+ next_sgn = min(nodes, key=lambda n: min(candidates[n], key=len))
+ isomorphs = self._map_nodes(
+ next_sgn, candidates, constraints, to_be_mapped=nodes
+ )
+
+ # This is effectively `yield from isomorphs`, except that we look
+ # whether an item was yielded.
+ try:
+ item = next(isomorphs)
+ except StopIteration:
+ pass
+ else:
+ yield item
+ yield from isomorphs
+ found_iso = True
+
+ # BASECASE
+ if found_iso or current_size == 1:
+ # Shrinking has no point because either 1) we end up with a smaller
+ # common subgraph (and we want the largest), or 2) there'll be no
+ # more subgraph.
+ return
+
+ left_to_be_mapped = set()
+ for nodes in to_be_mapped:
+ for sgn in nodes:
+ # We're going to remove sgn from to_be_mapped, but subject to
+ # symmetry constraints. We know that for every constraint we
+ # have those subgraph nodes are equal. So whenever we would
+ # remove the lower part of a constraint, remove the higher
+ # instead. This is all dealth with by _remove_node. And because
+ # left_to_be_mapped is a set, we don't do double work.
+
+ # And finally, make the subgraph one node smaller.
+ # REDUCTION
+ new_nodes = self._remove_node(sgn, nodes, constraints)
+ left_to_be_mapped.add(new_nodes)
+ # COMBINATION
+ yield from self._largest_common_subgraph(
+ candidates, constraints, to_be_mapped=left_to_be_mapped
+ )
+
+ @staticmethod
+ def _remove_node(node, nodes, constraints):
+ """
+ Returns a new set where node has been removed from nodes, subject to
+ symmetry constraints. We know, that for every constraint we have
+ those subgraph nodes are equal. So whenever we would remove the
+ lower part of a constraint, remove the higher instead.
+ """
+ while True:
+ for low, high in constraints:
+ if low == node and high in nodes:
+ node = high
+ break
+ else: # no break, couldn't find node in constraints
+ break
+ return frozenset(nodes - {node})
+
+ @staticmethod
+ def _find_permutations(top_partitions, bottom_partitions):
+ """
+ Return the pairs of top/bottom partitions where the partitions are
+ different. Ensures that all partitions in both top and bottom
+ partitions have size 1.
+ """
+ # Find permutations
+ permutations = set()
+ for top, bot in zip(top_partitions, bottom_partitions):
+ # top and bot have only one element
+ if len(top) != 1 or len(bot) != 1:
+ raise IndexError(
+ "Not all nodes are coupled. This is"
+ f" impossible: {top_partitions}, {bottom_partitions}"
+ )
+ if top != bot:
+ permutations.add(frozenset((next(iter(top)), next(iter(bot)))))
+ return permutations
+
+ @staticmethod
+ def _update_orbits(orbits, permutations):
+ """
+ Update orbits based on permutations. Orbits is modified in place.
+ For every pair of items in permutations their respective orbits are
+ merged.
+ """
+ for permutation in permutations:
+ node, node2 = permutation
+ # Find the orbits that contain node and node2, and replace the
+ # orbit containing node with the union
+ first = second = None
+ for idx, orbit in enumerate(orbits):
+ if first is not None and second is not None:
+ break
+ if node in orbit:
+ first = idx
+ if node2 in orbit:
+ second = idx
+ if first != second:
+ orbits[first].update(orbits[second])
+ del orbits[second]
+
+ def _couple_nodes(
+ self,
+ top_partitions,
+ bottom_partitions,
+ pair_idx,
+ t_node,
+ b_node,
+ graph,
+ edge_colors,
+ ):
+ """
+ Generate new partitions from top and bottom_partitions where t_node is
+ coupled to b_node. pair_idx is the index of the partitions where t_ and
+ b_node can be found.
+ """
+ t_partition = top_partitions[pair_idx]
+ b_partition = bottom_partitions[pair_idx]
+ assert t_node in t_partition and b_node in b_partition
+ # Couple node to node2. This means they get their own partition
+ new_top_partitions = [top.copy() for top in top_partitions]
+ new_bottom_partitions = [bot.copy() for bot in bottom_partitions]
+ new_t_groups = {t_node}, t_partition - {t_node}
+ new_b_groups = {b_node}, b_partition - {b_node}
+ # Replace the old partitions with the coupled ones
+ del new_top_partitions[pair_idx]
+ del new_bottom_partitions[pair_idx]
+ new_top_partitions[pair_idx:pair_idx] = new_t_groups
+ new_bottom_partitions[pair_idx:pair_idx] = new_b_groups
+
+ new_top_partitions = self._refine_node_partitions(
+ graph, new_top_partitions, edge_colors
+ )
+ new_bottom_partitions = self._refine_node_partitions(
+ graph, new_bottom_partitions, edge_colors, branch=True
+ )
+ new_top_partitions = list(new_top_partitions)
+ assert len(new_top_partitions) == 1
+ new_top_partitions = new_top_partitions[0]
+ for bot in new_bottom_partitions:
+ yield list(new_top_partitions), bot
+
+ def _process_ordered_pair_partitions(
+ self,
+ graph,
+ top_partitions,
+ bottom_partitions,
+ edge_colors,
+ orbits=None,
+ cosets=None,
+ ):
+ """
+ Processes ordered pair partitions as per the reference paper. Finds and
+ returns all permutations and cosets that leave the graph unchanged.
+ """
+ if orbits is None:
+ orbits = [{node} for node in graph.nodes]
+ else:
+ # Note that we don't copy orbits when we are given one. This means
+ # we leak information between the recursive branches. This is
+ # intentional!
+ orbits = orbits
+ if cosets is None:
+ cosets = {}
+ else:
+ cosets = cosets.copy()
+
+ assert all(
+ len(t_p) == len(b_p) for t_p, b_p in zip(top_partitions, bottom_partitions)
+ )
+
+ # BASECASE
+ if all(len(top) == 1 for top in top_partitions):
+ # All nodes are mapped
+ permutations = self._find_permutations(top_partitions, bottom_partitions)
+ self._update_orbits(orbits, permutations)
+ if permutations:
+ return [permutations], cosets
+ else:
+ return [], cosets
+
+ permutations = []
+ unmapped_nodes = {
+ (node, idx)
+ for idx, t_partition in enumerate(top_partitions)
+ for node in t_partition
+ if len(t_partition) > 1
+ }
+ node, pair_idx = min(unmapped_nodes)
+ b_partition = bottom_partitions[pair_idx]
+
+ for node2 in sorted(b_partition):
+ if len(b_partition) == 1:
+ # Can never result in symmetry
+ continue
+ if node != node2 and any(
+ node in orbit and node2 in orbit for orbit in orbits
+ ):
+ # Orbit prune branch
+ continue
+ # REDUCTION
+ # Couple node to node2
+ partitions = self._couple_nodes(
+ top_partitions,
+ bottom_partitions,
+ pair_idx,
+ node,
+ node2,
+ graph,
+ edge_colors,
+ )
+ for opp in partitions:
+ new_top_partitions, new_bottom_partitions = opp
+
+ new_perms, new_cosets = self._process_ordered_pair_partitions(
+ graph,
+ new_top_partitions,
+ new_bottom_partitions,
+ edge_colors,
+ orbits,
+ cosets,
+ )
+ # COMBINATION
+ permutations += new_perms
+ cosets.update(new_cosets)
+
+ mapped = {
+ k
+ for top, bottom in zip(top_partitions, bottom_partitions)
+ for k in top
+ if len(top) == 1 and top == bottom
+ }
+ ks = {k for k in graph.nodes if k < node}
+ # Have all nodes with ID < node been mapped?
+ find_coset = ks <= mapped and node not in cosets
+ if find_coset:
+ # Find the orbit that contains node
+ for orbit in orbits:
+ if node in orbit:
+ cosets[node] = orbit.copy()
+ return permutations, cosets
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/isomorph.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/isomorph.py
new file mode 100644
index 0000000000000000000000000000000000000000..6f562400a3b5d8195ac0ab679e08de8b60da1c13
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/isomorph.py
@@ -0,0 +1,248 @@
+"""
+Graph isomorphism functions.
+"""
+import networkx as nx
+from networkx.exception import NetworkXError
+
+__all__ = [
+ "could_be_isomorphic",
+ "fast_could_be_isomorphic",
+ "faster_could_be_isomorphic",
+ "is_isomorphic",
+]
+
+
+@nx._dispatch(graphs={"G1": 0, "G2": 1})
+def could_be_isomorphic(G1, G2):
+ """Returns False if graphs are definitely not isomorphic.
+ True does NOT guarantee isomorphism.
+
+ Parameters
+ ----------
+ G1, G2 : graphs
+ The two graphs G1 and G2 must be the same type.
+
+ Notes
+ -----
+ Checks for matching degree, triangle, and number of cliques sequences.
+ The triangle sequence contains the number of triangles each node is part of.
+ The clique sequence contains for each node the number of maximal cliques
+ involving that node.
+
+ """
+
+ # Check global properties
+ if G1.order() != G2.order():
+ return False
+
+ # Check local properties
+ d1 = G1.degree()
+ t1 = nx.triangles(G1)
+ clqs_1 = list(nx.find_cliques(G1))
+ c1 = {n: sum(1 for c in clqs_1 if n in c) for n in G1} # number of cliques
+ props1 = [[d, t1[v], c1[v]] for v, d in d1]
+ props1.sort()
+
+ d2 = G2.degree()
+ t2 = nx.triangles(G2)
+ clqs_2 = list(nx.find_cliques(G2))
+ c2 = {n: sum(1 for c in clqs_2 if n in c) for n in G2} # number of cliques
+ props2 = [[d, t2[v], c2[v]] for v, d in d2]
+ props2.sort()
+
+ if props1 != props2:
+ return False
+
+ # OK...
+ return True
+
+
+graph_could_be_isomorphic = could_be_isomorphic
+
+
+@nx._dispatch(graphs={"G1": 0, "G2": 1})
+def fast_could_be_isomorphic(G1, G2):
+ """Returns False if graphs are definitely not isomorphic.
+
+ True does NOT guarantee isomorphism.
+
+ Parameters
+ ----------
+ G1, G2 : graphs
+ The two graphs G1 and G2 must be the same type.
+
+ Notes
+ -----
+ Checks for matching degree and triangle sequences. The triangle
+ sequence contains the number of triangles each node is part of.
+ """
+ # Check global properties
+ if G1.order() != G2.order():
+ return False
+
+ # Check local properties
+ d1 = G1.degree()
+ t1 = nx.triangles(G1)
+ props1 = [[d, t1[v]] for v, d in d1]
+ props1.sort()
+
+ d2 = G2.degree()
+ t2 = nx.triangles(G2)
+ props2 = [[d, t2[v]] for v, d in d2]
+ props2.sort()
+
+ if props1 != props2:
+ return False
+
+ # OK...
+ return True
+
+
+fast_graph_could_be_isomorphic = fast_could_be_isomorphic
+
+
+@nx._dispatch(graphs={"G1": 0, "G2": 1})
+def faster_could_be_isomorphic(G1, G2):
+ """Returns False if graphs are definitely not isomorphic.
+
+ True does NOT guarantee isomorphism.
+
+ Parameters
+ ----------
+ G1, G2 : graphs
+ The two graphs G1 and G2 must be the same type.
+
+ Notes
+ -----
+ Checks for matching degree sequences.
+ """
+ # Check global properties
+ if G1.order() != G2.order():
+ return False
+
+ # Check local properties
+ d1 = sorted(d for n, d in G1.degree())
+ d2 = sorted(d for n, d in G2.degree())
+
+ if d1 != d2:
+ return False
+
+ # OK...
+ return True
+
+
+faster_graph_could_be_isomorphic = faster_could_be_isomorphic
+
+
+@nx._dispatch(
+ graphs={"G1": 0, "G2": 1},
+ preserve_edge_attrs="edge_match",
+ preserve_node_attrs="node_match",
+)
+def is_isomorphic(G1, G2, node_match=None, edge_match=None):
+ """Returns True if the graphs G1 and G2 are isomorphic and False otherwise.
+
+ Parameters
+ ----------
+ G1, G2: graphs
+ The two graphs G1 and G2 must be the same type.
+
+ node_match : callable
+ A function that returns True if node n1 in G1 and n2 in G2 should
+ be considered equal during the isomorphism test.
+ If node_match is not specified then node attributes are not considered.
+
+ The function will be called like
+
+ node_match(G1.nodes[n1], G2.nodes[n2]).
+
+ That is, the function will receive the node attribute dictionaries
+ for n1 and n2 as inputs.
+
+ edge_match : callable
+ A function that returns True if the edge attribute dictionary
+ for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should
+ be considered equal during the isomorphism test. If edge_match is
+ not specified then edge attributes are not considered.
+
+ The function will be called like
+
+ edge_match(G1[u1][v1], G2[u2][v2]).
+
+ That is, the function will receive the edge attribute dictionaries
+ of the edges under consideration.
+
+ Notes
+ -----
+ Uses the vf2 algorithm [1]_.
+
+ Examples
+ --------
+ >>> import networkx.algorithms.isomorphism as iso
+
+ For digraphs G1 and G2, using 'weight' edge attribute (default: 1)
+
+ >>> G1 = nx.DiGraph()
+ >>> G2 = nx.DiGraph()
+ >>> nx.add_path(G1, [1, 2, 3, 4], weight=1)
+ >>> nx.add_path(G2, [10, 20, 30, 40], weight=2)
+ >>> em = iso.numerical_edge_match("weight", 1)
+ >>> nx.is_isomorphic(G1, G2) # no weights considered
+ True
+ >>> nx.is_isomorphic(G1, G2, edge_match=em) # match weights
+ False
+
+ For multidigraphs G1 and G2, using 'fill' node attribute (default: '')
+
+ >>> G1 = nx.MultiDiGraph()
+ >>> G2 = nx.MultiDiGraph()
+ >>> G1.add_nodes_from([1, 2, 3], fill="red")
+ >>> G2.add_nodes_from([10, 20, 30, 40], fill="red")
+ >>> nx.add_path(G1, [1, 2, 3, 4], weight=3, linewidth=2.5)
+ >>> nx.add_path(G2, [10, 20, 30, 40], weight=3)
+ >>> nm = iso.categorical_node_match("fill", "red")
+ >>> nx.is_isomorphic(G1, G2, node_match=nm)
+ True
+
+ For multidigraphs G1 and G2, using 'weight' edge attribute (default: 7)
+
+ >>> G1.add_edge(1, 2, weight=7)
+ 1
+ >>> G2.add_edge(10, 20)
+ 1
+ >>> em = iso.numerical_multiedge_match("weight", 7, rtol=1e-6)
+ >>> nx.is_isomorphic(G1, G2, edge_match=em)
+ True
+
+ For multigraphs G1 and G2, using 'weight' and 'linewidth' edge attributes
+ with default values 7 and 2.5. Also using 'fill' node attribute with
+ default value 'red'.
+
+ >>> em = iso.numerical_multiedge_match(["weight", "linewidth"], [7, 2.5])
+ >>> nm = iso.categorical_node_match("fill", "red")
+ >>> nx.is_isomorphic(G1, G2, edge_match=em, node_match=nm)
+ True
+
+ See Also
+ --------
+ numerical_node_match, numerical_edge_match, numerical_multiedge_match
+ categorical_node_match, categorical_edge_match, categorical_multiedge_match
+
+ References
+ ----------
+ .. [1] L. P. Cordella, P. Foggia, C. Sansone, M. Vento,
+ "An Improved Algorithm for Matching Large Graphs",
+ 3rd IAPR-TC15 Workshop on Graph-based Representations in
+ Pattern Recognition, Cuen, pp. 149-159, 2001.
+ https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
+ """
+ if G1.is_directed() and G2.is_directed():
+ GM = nx.algorithms.isomorphism.DiGraphMatcher
+ elif (not G1.is_directed()) and (not G2.is_directed()):
+ GM = nx.algorithms.isomorphism.GraphMatcher
+ else:
+ raise NetworkXError("Graphs G1 and G2 are not of the same type.")
+
+ gm = GM(G1, G2, node_match=node_match, edge_match=edge_match)
+
+ return gm.is_isomorphic()
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/matchhelpers.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/matchhelpers.py
new file mode 100644
index 0000000000000000000000000000000000000000..5239ed77e6cca40fadd538c187838b7fbec65266
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/matchhelpers.py
@@ -0,0 +1,352 @@
+"""Functions which help end users define customize node_match and
+edge_match functions to use during isomorphism checks.
+"""
+import math
+import types
+from itertools import permutations
+
+__all__ = [
+ "categorical_node_match",
+ "categorical_edge_match",
+ "categorical_multiedge_match",
+ "numerical_node_match",
+ "numerical_edge_match",
+ "numerical_multiedge_match",
+ "generic_node_match",
+ "generic_edge_match",
+ "generic_multiedge_match",
+]
+
+
+def copyfunc(f, name=None):
+ """Returns a deepcopy of a function."""
+ return types.FunctionType(
+ f.__code__, f.__globals__, name or f.__name__, f.__defaults__, f.__closure__
+ )
+
+
+def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08):
+ """Returns True if x and y are sufficiently close, elementwise.
+
+ Parameters
+ ----------
+ rtol : float
+ The relative error tolerance.
+ atol : float
+ The absolute error tolerance.
+
+ """
+ # assume finite weights, see numpy.allclose() for reference
+ return all(math.isclose(xi, yi, rel_tol=rtol, abs_tol=atol) for xi, yi in zip(x, y))
+
+
+categorical_doc = """
+Returns a comparison function for a categorical node attribute.
+
+The value(s) of the attr(s) must be hashable and comparable via the ==
+operator since they are placed into a set([]) object. If the sets from
+G1 and G2 are the same, then the constructed function returns True.
+
+Parameters
+----------
+attr : string | list
+ The categorical node attribute to compare, or a list of categorical
+ node attributes to compare.
+default : value | list
+ The default value for the categorical node attribute, or a list of
+ default values for the categorical node attributes.
+
+Returns
+-------
+match : function
+ The customized, categorical `node_match` function.
+
+Examples
+--------
+>>> import networkx.algorithms.isomorphism as iso
+>>> nm = iso.categorical_node_match("size", 1)
+>>> nm = iso.categorical_node_match(["color", "size"], ["red", 2])
+
+"""
+
+
+def categorical_node_match(attr, default):
+ if isinstance(attr, str):
+
+ def match(data1, data2):
+ return data1.get(attr, default) == data2.get(attr, default)
+
+ else:
+ attrs = list(zip(attr, default)) # Python 3
+
+ def match(data1, data2):
+ return all(data1.get(attr, d) == data2.get(attr, d) for attr, d in attrs)
+
+ return match
+
+
+categorical_edge_match = copyfunc(categorical_node_match, "categorical_edge_match")
+
+
+def categorical_multiedge_match(attr, default):
+ if isinstance(attr, str):
+
+ def match(datasets1, datasets2):
+ values1 = {data.get(attr, default) for data in datasets1.values()}
+ values2 = {data.get(attr, default) for data in datasets2.values()}
+ return values1 == values2
+
+ else:
+ attrs = list(zip(attr, default)) # Python 3
+
+ def match(datasets1, datasets2):
+ values1 = set()
+ for data1 in datasets1.values():
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
+ values1.add(x)
+ values2 = set()
+ for data2 in datasets2.values():
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
+ values2.add(x)
+ return values1 == values2
+
+ return match
+
+
+# Docstrings for categorical functions.
+categorical_node_match.__doc__ = categorical_doc
+categorical_edge_match.__doc__ = categorical_doc.replace("node", "edge")
+tmpdoc = categorical_doc.replace("node", "edge")
+tmpdoc = tmpdoc.replace("categorical_edge_match", "categorical_multiedge_match")
+categorical_multiedge_match.__doc__ = tmpdoc
+
+
+numerical_doc = """
+Returns a comparison function for a numerical node attribute.
+
+The value(s) of the attr(s) must be numerical and sortable. If the
+sorted list of values from G1 and G2 are the same within some
+tolerance, then the constructed function returns True.
+
+Parameters
+----------
+attr : string | list
+ The numerical node attribute to compare, or a list of numerical
+ node attributes to compare.
+default : value | list
+ The default value for the numerical node attribute, or a list of
+ default values for the numerical node attributes.
+rtol : float
+ The relative error tolerance.
+atol : float
+ The absolute error tolerance.
+
+Returns
+-------
+match : function
+ The customized, numerical `node_match` function.
+
+Examples
+--------
+>>> import networkx.algorithms.isomorphism as iso
+>>> nm = iso.numerical_node_match("weight", 1.0)
+>>> nm = iso.numerical_node_match(["weight", "linewidth"], [0.25, 0.5])
+
+"""
+
+
+def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
+ if isinstance(attr, str):
+
+ def match(data1, data2):
+ return math.isclose(
+ data1.get(attr, default),
+ data2.get(attr, default),
+ rel_tol=rtol,
+ abs_tol=atol,
+ )
+
+ else:
+ attrs = list(zip(attr, default)) # Python 3
+
+ def match(data1, data2):
+ values1 = [data1.get(attr, d) for attr, d in attrs]
+ values2 = [data2.get(attr, d) for attr, d in attrs]
+ return allclose(values1, values2, rtol=rtol, atol=atol)
+
+ return match
+
+
+numerical_edge_match = copyfunc(numerical_node_match, "numerical_edge_match")
+
+
+def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
+ if isinstance(attr, str):
+
+ def match(datasets1, datasets2):
+ values1 = sorted(data.get(attr, default) for data in datasets1.values())
+ values2 = sorted(data.get(attr, default) for data in datasets2.values())
+ return allclose(values1, values2, rtol=rtol, atol=atol)
+
+ else:
+ attrs = list(zip(attr, default)) # Python 3
+
+ def match(datasets1, datasets2):
+ values1 = []
+ for data1 in datasets1.values():
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
+ values1.append(x)
+ values2 = []
+ for data2 in datasets2.values():
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
+ values2.append(x)
+ values1.sort()
+ values2.sort()
+ for xi, yi in zip(values1, values2):
+ if not allclose(xi, yi, rtol=rtol, atol=atol):
+ return False
+ else:
+ return True
+
+ return match
+
+
+# Docstrings for numerical functions.
+numerical_node_match.__doc__ = numerical_doc
+numerical_edge_match.__doc__ = numerical_doc.replace("node", "edge")
+tmpdoc = numerical_doc.replace("node", "edge")
+tmpdoc = tmpdoc.replace("numerical_edge_match", "numerical_multiedge_match")
+numerical_multiedge_match.__doc__ = tmpdoc
+
+
+generic_doc = """
+Returns a comparison function for a generic attribute.
+
+The value(s) of the attr(s) are compared using the specified
+operators. If all the attributes are equal, then the constructed
+function returns True.
+
+Parameters
+----------
+attr : string | list
+ The node attribute to compare, or a list of node attributes
+ to compare.
+default : value | list
+ The default value for the node attribute, or a list of
+ default values for the node attributes.
+op : callable | list
+ The operator to use when comparing attribute values, or a list
+ of operators to use when comparing values for each attribute.
+
+Returns
+-------
+match : function
+ The customized, generic `node_match` function.
+
+Examples
+--------
+>>> from operator import eq
+>>> from math import isclose
+>>> from networkx.algorithms.isomorphism import generic_node_match
+>>> nm = generic_node_match("weight", 1.0, isclose)
+>>> nm = generic_node_match("color", "red", eq)
+>>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
+
+"""
+
+
+def generic_node_match(attr, default, op):
+ if isinstance(attr, str):
+
+ def match(data1, data2):
+ return op(data1.get(attr, default), data2.get(attr, default))
+
+ else:
+ attrs = list(zip(attr, default, op)) # Python 3
+
+ def match(data1, data2):
+ for attr, d, operator in attrs:
+ if not operator(data1.get(attr, d), data2.get(attr, d)):
+ return False
+ else:
+ return True
+
+ return match
+
+
+generic_edge_match = copyfunc(generic_node_match, "generic_edge_match")
+
+
+def generic_multiedge_match(attr, default, op):
+ """Returns a comparison function for a generic attribute.
+
+ The value(s) of the attr(s) are compared using the specified
+ operators. If all the attributes are equal, then the constructed
+ function returns True. Potentially, the constructed edge_match
+ function can be slow since it must verify that no isomorphism
+ exists between the multiedges before it returns False.
+
+ Parameters
+ ----------
+ attr : string | list
+ The edge attribute to compare, or a list of node attributes
+ to compare.
+ default : value | list
+ The default value for the edge attribute, or a list of
+ default values for the edgeattributes.
+ op : callable | list
+ The operator to use when comparing attribute values, or a list
+ of operators to use when comparing values for each attribute.
+
+ Returns
+ -------
+ match : function
+ The customized, generic `edge_match` function.
+
+ Examples
+ --------
+ >>> from operator import eq
+ >>> from math import isclose
+ >>> from networkx.algorithms.isomorphism import generic_node_match
+ >>> nm = generic_node_match("weight", 1.0, isclose)
+ >>> nm = generic_node_match("color", "red", eq)
+ >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
+ ...
+
+ """
+
+ # This is slow, but generic.
+ # We must test every possible isomorphism between the edges.
+ if isinstance(attr, str):
+ attr = [attr]
+ default = [default]
+ op = [op]
+ attrs = list(zip(attr, default)) # Python 3
+
+ def match(datasets1, datasets2):
+ values1 = []
+ for data1 in datasets1.values():
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
+ values1.append(x)
+ values2 = []
+ for data2 in datasets2.values():
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
+ values2.append(x)
+ for vals2 in permutations(values2):
+ for xi, yi in zip(values1, vals2):
+ if not all(map(lambda x, y, z: z(x, y), xi, yi, op)):
+ # This is not an isomorphism, go to next permutation.
+ break
+ else:
+ # Then we found an isomorphism.
+ return True
+ else:
+ # Then there are no isomorphisms between the multiedges.
+ return False
+
+ return match
+
+
+# Docstrings for numerical functions.
+generic_node_match.__doc__ = generic_doc
+generic_edge_match.__doc__ = generic_doc.replace("node", "edge")
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py
new file mode 100644
index 0000000000000000000000000000000000000000..62cacc77887efa99026c117687bb9ad82cebd4dd
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py
@@ -0,0 +1,308 @@
+"""
+*****************************
+Time-respecting VF2 Algorithm
+*****************************
+
+An extension of the VF2 algorithm for time-respecting graph isomorphism
+testing in temporal graphs.
+
+A temporal graph is one in which edges contain a datetime attribute,
+denoting when interaction occurred between the incident nodes. A
+time-respecting subgraph of a temporal graph is a subgraph such that
+all interactions incident to a node occurred within a time threshold,
+delta, of each other. A directed time-respecting subgraph has the
+added constraint that incoming interactions to a node must precede
+outgoing interactions from the same node - this enforces a sense of
+directed flow.
+
+Introduction
+------------
+
+The TimeRespectingGraphMatcher and TimeRespectingDiGraphMatcher
+extend the GraphMatcher and DiGraphMatcher classes, respectively,
+to include temporal constraints on matches. This is achieved through
+a semantic check, via the semantic_feasibility() function.
+
+As well as including G1 (the graph in which to seek embeddings) and
+G2 (the subgraph structure of interest), the name of the temporal
+attribute on the edges and the time threshold, delta, must be supplied
+as arguments to the matching constructors.
+
+A delta of zero is the strictest temporal constraint on the match -
+only embeddings in which all interactions occur at the same time will
+be returned. A delta of one day will allow embeddings in which
+adjacent interactions occur up to a day apart.
+
+Examples
+--------
+
+Examples will be provided when the datetime type has been incorporated.
+
+
+Temporal Subgraph Isomorphism
+-----------------------------
+
+A brief discussion of the somewhat diverse current literature will be
+included here.
+
+References
+----------
+
+[1] Redmond, U. and Cunningham, P. Temporal subgraph isomorphism. In:
+The 2013 IEEE/ACM International Conference on Advances in Social
+Networks Analysis and Mining (ASONAM). Niagara Falls, Canada; 2013:
+pages 1451 - 1452. [65]
+
+For a discussion of the literature on temporal networks:
+
+[3] P. Holme and J. Saramaki. Temporal networks. Physics Reports,
+519(3):97–125, 2012.
+
+Notes
+-----
+
+Handles directed and undirected graphs and graphs with parallel edges.
+
+"""
+
+import networkx as nx
+
+from .isomorphvf2 import DiGraphMatcher, GraphMatcher
+
+__all__ = ["TimeRespectingGraphMatcher", "TimeRespectingDiGraphMatcher"]
+
+
+class TimeRespectingGraphMatcher(GraphMatcher):
+ def __init__(self, G1, G2, temporal_attribute_name, delta):
+ """Initialize TimeRespectingGraphMatcher.
+
+ G1 and G2 should be nx.Graph or nx.MultiGraph instances.
+
+ Examples
+ --------
+ To create a TimeRespectingGraphMatcher which checks for
+ syntactic and semantic feasibility:
+
+ >>> from networkx.algorithms import isomorphism
+ >>> from datetime import timedelta
+ >>> G1 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
+
+ >>> G2 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
+
+ >>> GM = isomorphism.TimeRespectingGraphMatcher(
+ ... G1, G2, "date", timedelta(days=1)
+ ... )
+ """
+ self.temporal_attribute_name = temporal_attribute_name
+ self.delta = delta
+ super().__init__(G1, G2)
+
+ def one_hop(self, Gx, Gx_node, neighbors):
+ """
+ Edges one hop out from a node in the mapping should be
+ time-respecting with respect to each other.
+ """
+ dates = []
+ for n in neighbors:
+ if isinstance(Gx, nx.Graph): # Graph G[u][v] returns the data dictionary.
+ dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
+ for edge in Gx[Gx_node][
+ n
+ ].values(): # Iterates all edges between node pair.
+ dates.append(edge[self.temporal_attribute_name])
+ if any(x is None for x in dates):
+ raise ValueError("Datetime not supplied for at least one edge.")
+ return not dates or max(dates) - min(dates) <= self.delta
+
+ def two_hop(self, Gx, core_x, Gx_node, neighbors):
+ """
+ Paths of length 2 from Gx_node should be time-respecting.
+ """
+ return all(
+ self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node])
+ for v in neighbors
+ )
+
+ def semantic_feasibility(self, G1_node, G2_node):
+ """Returns True if adding (G1_node, G2_node) is semantically
+ feasible.
+
+ Any subclass which redefines semantic_feasibility() must
+ maintain the self.tests if needed, to keep the match() method
+ functional. Implementations should consider multigraphs.
+ """
+ neighbors = [n for n in self.G1[G1_node] if n in self.core_1]
+ if not self.one_hop(self.G1, G1_node, neighbors): # Fail fast on first node.
+ return False
+ if not self.two_hop(self.G1, self.core_1, G1_node, neighbors):
+ return False
+ # Otherwise, this node is semantically feasible!
+ return True
+
+
+class TimeRespectingDiGraphMatcher(DiGraphMatcher):
+ def __init__(self, G1, G2, temporal_attribute_name, delta):
+ """Initialize TimeRespectingDiGraphMatcher.
+
+ G1 and G2 should be nx.DiGraph or nx.MultiDiGraph instances.
+
+ Examples
+ --------
+ To create a TimeRespectingDiGraphMatcher which checks for
+ syntactic and semantic feasibility:
+
+ >>> from networkx.algorithms import isomorphism
+ >>> from datetime import timedelta
+ >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
+
+ >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
+
+ >>> GM = isomorphism.TimeRespectingDiGraphMatcher(
+ ... G1, G2, "date", timedelta(days=1)
+ ... )
+ """
+ self.temporal_attribute_name = temporal_attribute_name
+ self.delta = delta
+ super().__init__(G1, G2)
+
+ def get_pred_dates(self, Gx, Gx_node, core_x, pred):
+ """
+ Get the dates of edges from predecessors.
+ """
+ pred_dates = []
+ if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary.
+ for n in pred:
+ pred_dates.append(Gx[n][Gx_node][self.temporal_attribute_name])
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
+ for n in pred:
+ for edge in Gx[n][
+ Gx_node
+ ].values(): # Iterates all edge data between node pair.
+ pred_dates.append(edge[self.temporal_attribute_name])
+ return pred_dates
+
+ def get_succ_dates(self, Gx, Gx_node, core_x, succ):
+ """
+ Get the dates of edges to successors.
+ """
+ succ_dates = []
+ if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary.
+ for n in succ:
+ succ_dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
+ for n in succ:
+ for edge in Gx[Gx_node][
+ n
+ ].values(): # Iterates all edge data between node pair.
+ succ_dates.append(edge[self.temporal_attribute_name])
+ return succ_dates
+
+ def one_hop(self, Gx, Gx_node, core_x, pred, succ):
+ """
+ The ego node.
+ """
+ pred_dates = self.get_pred_dates(Gx, Gx_node, core_x, pred)
+ succ_dates = self.get_succ_dates(Gx, Gx_node, core_x, succ)
+ return self.test_one(pred_dates, succ_dates) and self.test_two(
+ pred_dates, succ_dates
+ )
+
+ def two_hop_pred(self, Gx, Gx_node, core_x, pred):
+ """
+ The predecessors of the ego node.
+ """
+ return all(
+ self.one_hop(
+ Gx,
+ p,
+ core_x,
+ self.preds(Gx, core_x, p),
+ self.succs(Gx, core_x, p, Gx_node),
+ )
+ for p in pred
+ )
+
+ def two_hop_succ(self, Gx, Gx_node, core_x, succ):
+ """
+ The successors of the ego node.
+ """
+ return all(
+ self.one_hop(
+ Gx,
+ s,
+ core_x,
+ self.preds(Gx, core_x, s, Gx_node),
+ self.succs(Gx, core_x, s),
+ )
+ for s in succ
+ )
+
+ def preds(self, Gx, core_x, v, Gx_node=None):
+ pred = [n for n in Gx.predecessors(v) if n in core_x]
+ if Gx_node:
+ pred.append(Gx_node)
+ return pred
+
+ def succs(self, Gx, core_x, v, Gx_node=None):
+ succ = [n for n in Gx.successors(v) if n in core_x]
+ if Gx_node:
+ succ.append(Gx_node)
+ return succ
+
+ def test_one(self, pred_dates, succ_dates):
+ """
+ Edges one hop out from Gx_node in the mapping should be
+ time-respecting with respect to each other, regardless of
+ direction.
+ """
+ time_respecting = True
+ dates = pred_dates + succ_dates
+
+ if any(x is None for x in dates):
+ raise ValueError("Date or datetime not supplied for at least one edge.")
+
+ dates.sort() # Small to large.
+ if 0 < len(dates) and not (dates[-1] - dates[0] <= self.delta):
+ time_respecting = False
+ return time_respecting
+
+ def test_two(self, pred_dates, succ_dates):
+ """
+ Edges from a dual Gx_node in the mapping should be ordered in
+ a time-respecting manner.
+ """
+ time_respecting = True
+ pred_dates.sort()
+ succ_dates.sort()
+ # First out before last in; negative of the necessary condition for time-respect.
+ if (
+ 0 < len(succ_dates)
+ and 0 < len(pred_dates)
+ and succ_dates[0] < pred_dates[-1]
+ ):
+ time_respecting = False
+ return time_respecting
+
+ def semantic_feasibility(self, G1_node, G2_node):
+ """Returns True if adding (G1_node, G2_node) is semantically
+ feasible.
+
+ Any subclass which redefines semantic_feasibility() must
+ maintain the self.tests if needed, to keep the match() method
+ functional. Implementations should consider multigraphs.
+ """
+ pred, succ = (
+ [n for n in self.G1.predecessors(G1_node) if n in self.core_1],
+ [n for n in self.G1.successors(G1_node) if n in self.core_1],
+ )
+ if not self.one_hop(
+ self.G1, G1_node, self.core_1, pred, succ
+ ): # Fail fast on first node.
+ return False
+ if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred):
+ return False
+ if not self.two_hop_succ(self.G1, G1_node, self.core_1, succ):
+ return False
+ # Otherwise, this node is semantically feasible!
+ return True
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9ff7e3db585383ecb16f3e1ea229ee4c349a658e
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e483b6b9d68f22850383121c6b75e8eb2ef26522
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..275db7a85db64ee6dbad18067fda0f34c2721ee5
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py
new file mode 100644
index 0000000000000000000000000000000000000000..00641519978edd6676396dafcb1d1f33b5490b82
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py
@@ -0,0 +1,327 @@
+"""
+ Tests for ISMAGS isomorphism algorithm.
+"""
+
+import pytest
+
+import networkx as nx
+from networkx.algorithms import isomorphism as iso
+
+
+def _matches_to_sets(matches):
+ """
+ Helper function to facilitate comparing collections of dictionaries in
+ which order does not matter.
+ """
+ return {frozenset(m.items()) for m in matches}
+
+
+class TestSelfIsomorphism:
+ data = [
+ (
+ [
+ (0, {"name": "a"}),
+ (1, {"name": "a"}),
+ (2, {"name": "b"}),
+ (3, {"name": "b"}),
+ (4, {"name": "a"}),
+ (5, {"name": "a"}),
+ ],
+ [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)],
+ ),
+ (range(1, 5), [(1, 2), (2, 4), (4, 3), (3, 1)]),
+ (
+ [],
+ [
+ (0, 1),
+ (1, 2),
+ (2, 3),
+ (3, 4),
+ (4, 5),
+ (5, 0),
+ (0, 6),
+ (6, 7),
+ (2, 8),
+ (8, 9),
+ (4, 10),
+ (10, 11),
+ ],
+ ),
+ ([], [(0, 1), (1, 2), (1, 4), (2, 3), (3, 5), (3, 6)]),
+ ]
+
+ def test_self_isomorphism(self):
+ """
+ For some small, symmetric graphs, make sure that 1) they are isomorphic
+ to themselves, and 2) that only the identity mapping is found.
+ """
+ for node_data, edge_data in self.data:
+ graph = nx.Graph()
+ graph.add_nodes_from(node_data)
+ graph.add_edges_from(edge_data)
+
+ ismags = iso.ISMAGS(
+ graph, graph, node_match=iso.categorical_node_match("name", None)
+ )
+ assert ismags.is_isomorphic()
+ assert ismags.subgraph_is_isomorphic()
+ assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
+ {n: n for n in graph.nodes}
+ ]
+
+ def test_edgecase_self_isomorphism(self):
+ """
+ This edgecase is one of the cases in which it is hard to find all
+ symmetry elements.
+ """
+ graph = nx.Graph()
+ nx.add_path(graph, range(5))
+ graph.add_edges_from([(2, 5), (5, 6)])
+
+ ismags = iso.ISMAGS(graph, graph)
+ ismags_answer = list(ismags.find_isomorphisms(True))
+ assert ismags_answer == [{n: n for n in graph.nodes}]
+
+ graph = nx.relabel_nodes(graph, {0: 0, 1: 1, 2: 2, 3: 3, 4: 6, 5: 4, 6: 5})
+ ismags = iso.ISMAGS(graph, graph)
+ ismags_answer = list(ismags.find_isomorphisms(True))
+ assert ismags_answer == [{n: n for n in graph.nodes}]
+
+ def test_directed_self_isomorphism(self):
+ """
+ For some small, directed, symmetric graphs, make sure that 1) they are
+ isomorphic to themselves, and 2) that only the identity mapping is
+ found.
+ """
+ for node_data, edge_data in self.data:
+ graph = nx.Graph()
+ graph.add_nodes_from(node_data)
+ graph.add_edges_from(edge_data)
+
+ ismags = iso.ISMAGS(
+ graph, graph, node_match=iso.categorical_node_match("name", None)
+ )
+ assert ismags.is_isomorphic()
+ assert ismags.subgraph_is_isomorphic()
+ assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
+ {n: n for n in graph.nodes}
+ ]
+
+
+class TestSubgraphIsomorphism:
+ def test_isomorphism(self):
+ g1 = nx.Graph()
+ nx.add_cycle(g1, range(4))
+
+ g2 = nx.Graph()
+ nx.add_cycle(g2, range(4))
+ g2.add_edges_from(list(zip(g2, range(4, 8))))
+ ismags = iso.ISMAGS(g2, g1)
+ assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [
+ {n: n for n in g1.nodes}
+ ]
+
+ def test_isomorphism2(self):
+ g1 = nx.Graph()
+ nx.add_path(g1, range(3))
+
+ g2 = g1.copy()
+ g2.add_edge(1, 3)
+
+ ismags = iso.ISMAGS(g2, g1)
+ matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
+ expected_symmetric = [
+ {0: 0, 1: 1, 2: 2},
+ {0: 0, 1: 1, 3: 2},
+ {2: 0, 1: 1, 3: 2},
+ ]
+ assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
+
+ matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
+ expected_asymmetric = [
+ {0: 2, 1: 1, 2: 0},
+ {0: 2, 1: 1, 3: 0},
+ {2: 2, 1: 1, 3: 0},
+ ]
+ assert _matches_to_sets(matches) == _matches_to_sets(
+ expected_symmetric + expected_asymmetric
+ )
+
+ def test_labeled_nodes(self):
+ g1 = nx.Graph()
+ nx.add_cycle(g1, range(3))
+ g1.nodes[1]["attr"] = True
+
+ g2 = g1.copy()
+ g2.add_edge(1, 3)
+ ismags = iso.ISMAGS(g2, g1, node_match=lambda x, y: x == y)
+ matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
+ expected_symmetric = [{0: 0, 1: 1, 2: 2}]
+ assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
+
+ matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
+ expected_asymmetric = [{0: 2, 1: 1, 2: 0}]
+ assert _matches_to_sets(matches) == _matches_to_sets(
+ expected_symmetric + expected_asymmetric
+ )
+
+ def test_labeled_edges(self):
+ g1 = nx.Graph()
+ nx.add_cycle(g1, range(3))
+ g1.edges[1, 2]["attr"] = True
+
+ g2 = g1.copy()
+ g2.add_edge(1, 3)
+ ismags = iso.ISMAGS(g2, g1, edge_match=lambda x, y: x == y)
+ matches = ismags.subgraph_isomorphisms_iter(symmetry=True)
+ expected_symmetric = [{0: 0, 1: 1, 2: 2}]
+ assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric)
+
+ matches = ismags.subgraph_isomorphisms_iter(symmetry=False)
+ expected_asymmetric = [{1: 2, 0: 0, 2: 1}]
+ assert _matches_to_sets(matches) == _matches_to_sets(
+ expected_symmetric + expected_asymmetric
+ )
+
+
+class TestWikipediaExample:
+ # Nodes 'a', 'b', 'c' and 'd' form a column.
+ # Nodes 'g', 'h', 'i' and 'j' form a column.
+ g1edges = [
+ ["a", "g"],
+ ["a", "h"],
+ ["a", "i"],
+ ["b", "g"],
+ ["b", "h"],
+ ["b", "j"],
+ ["c", "g"],
+ ["c", "i"],
+ ["c", "j"],
+ ["d", "h"],
+ ["d", "i"],
+ ["d", "j"],
+ ]
+
+ # Nodes 1,2,3,4 form the clockwise corners of a large square.
+ # Nodes 5,6,7,8 form the clockwise corners of a small square
+ g2edges = [
+ [1, 2],
+ [2, 3],
+ [3, 4],
+ [4, 1],
+ [5, 6],
+ [6, 7],
+ [7, 8],
+ [8, 5],
+ [1, 5],
+ [2, 6],
+ [3, 7],
+ [4, 8],
+ ]
+
+ def test_graph(self):
+ g1 = nx.Graph()
+ g2 = nx.Graph()
+ g1.add_edges_from(self.g1edges)
+ g2.add_edges_from(self.g2edges)
+ gm = iso.ISMAGS(g1, g2)
+ assert gm.is_isomorphic()
+
+
+class TestLargestCommonSubgraph:
+ def test_mcis(self):
+ # Example graphs from DOI: 10.1002/spe.588
+ graph1 = nx.Graph()
+ graph1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 4), (4, 5)])
+ graph1.nodes[1]["color"] = 0
+
+ graph2 = nx.Graph()
+ graph2.add_edges_from(
+ [(1, 2), (2, 3), (2, 4), (3, 4), (3, 5), (5, 6), (5, 7), (6, 7)]
+ )
+ graph2.nodes[1]["color"] = 1
+ graph2.nodes[6]["color"] = 2
+ graph2.nodes[7]["color"] = 2
+
+ ismags = iso.ISMAGS(
+ graph1, graph2, node_match=iso.categorical_node_match("color", None)
+ )
+ assert list(ismags.subgraph_isomorphisms_iter(True)) == []
+ assert list(ismags.subgraph_isomorphisms_iter(False)) == []
+ found_mcis = _matches_to_sets(ismags.largest_common_subgraph())
+ expected = _matches_to_sets(
+ [{2: 2, 3: 4, 4: 3, 5: 5}, {2: 4, 3: 2, 4: 3, 5: 5}]
+ )
+ assert expected == found_mcis
+
+ ismags = iso.ISMAGS(
+ graph2, graph1, node_match=iso.categorical_node_match("color", None)
+ )
+ assert list(ismags.subgraph_isomorphisms_iter(True)) == []
+ assert list(ismags.subgraph_isomorphisms_iter(False)) == []
+ found_mcis = _matches_to_sets(ismags.largest_common_subgraph())
+ # Same answer, but reversed.
+ expected = _matches_to_sets(
+ [{2: 2, 3: 4, 4: 3, 5: 5}, {4: 2, 2: 3, 3: 4, 5: 5}]
+ )
+ assert expected == found_mcis
+
+ def test_symmetry_mcis(self):
+ graph1 = nx.Graph()
+ nx.add_path(graph1, range(4))
+
+ graph2 = nx.Graph()
+ nx.add_path(graph2, range(3))
+ graph2.add_edge(1, 3)
+
+ # Only the symmetry of graph2 is taken into account here.
+ ismags1 = iso.ISMAGS(
+ graph1, graph2, node_match=iso.categorical_node_match("color", None)
+ )
+ assert list(ismags1.subgraph_isomorphisms_iter(True)) == []
+ found_mcis = _matches_to_sets(ismags1.largest_common_subgraph())
+ expected = _matches_to_sets([{0: 0, 1: 1, 2: 2}, {1: 0, 3: 2, 2: 1}])
+ assert expected == found_mcis
+
+ # Only the symmetry of graph1 is taken into account here.
+ ismags2 = iso.ISMAGS(
+ graph2, graph1, node_match=iso.categorical_node_match("color", None)
+ )
+ assert list(ismags2.subgraph_isomorphisms_iter(True)) == []
+ found_mcis = _matches_to_sets(ismags2.largest_common_subgraph())
+ expected = _matches_to_sets(
+ [
+ {3: 2, 0: 0, 1: 1},
+ {2: 0, 0: 2, 1: 1},
+ {3: 0, 0: 2, 1: 1},
+ {3: 0, 1: 1, 2: 2},
+ {0: 0, 1: 1, 2: 2},
+ {2: 0, 3: 2, 1: 1},
+ ]
+ )
+
+ assert expected == found_mcis
+
+ found_mcis1 = _matches_to_sets(ismags1.largest_common_subgraph(False))
+ found_mcis2 = ismags2.largest_common_subgraph(False)
+ found_mcis2 = [{v: k for k, v in d.items()} for d in found_mcis2]
+ found_mcis2 = _matches_to_sets(found_mcis2)
+
+ expected = _matches_to_sets(
+ [
+ {3: 2, 1: 3, 2: 1},
+ {2: 0, 0: 2, 1: 1},
+ {1: 2, 3: 3, 2: 1},
+ {3: 0, 1: 3, 2: 1},
+ {0: 2, 2: 3, 1: 1},
+ {3: 0, 1: 2, 2: 1},
+ {2: 0, 0: 3, 1: 1},
+ {0: 0, 2: 3, 1: 1},
+ {1: 0, 3: 3, 2: 1},
+ {1: 0, 3: 2, 2: 1},
+ {0: 3, 1: 1, 2: 2},
+ {0: 0, 1: 1, 2: 2},
+ ]
+ )
+ assert expected == found_mcis1
+ assert expected == found_mcis2
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py
new file mode 100644
index 0000000000000000000000000000000000000000..9484edc042ac127451b7141f9da659e3e429b679
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py
@@ -0,0 +1,192 @@
+"""
+ Module to simplify the specification of user-defined equality functions for
+ node and edge attributes during isomorphism checks.
+
+ During the construction of an isomorphism, the algorithm considers two
+ candidate nodes n1 in G1 and n2 in G2. The graphs G1 and G2 are then
+ compared with respect to properties involving n1 and n2, and if the outcome
+ is good, then the candidate nodes are considered isomorphic. NetworkX
+ provides a simple mechanism for users to extend the comparisons to include
+ node and edge attributes.
+
+ Node attributes are handled by the node_match keyword. When considering
+ n1 and n2, the algorithm passes their node attribute dictionaries to
+ node_match, and if it returns False, then n1 and n2 cannot be
+ considered to be isomorphic.
+
+ Edge attributes are handled by the edge_match keyword. When considering
+ n1 and n2, the algorithm must verify that outgoing edges from n1 are
+ commensurate with the outgoing edges for n2. If the graph is directed,
+ then a similar check is also performed for incoming edges.
+
+ Focusing only on outgoing edges, we consider pairs of nodes (n1, v1) from
+ G1 and (n2, v2) from G2. For graphs and digraphs, there is only one edge
+ between (n1, v1) and only one edge between (n2, v2). Those edge attribute
+ dictionaries are passed to edge_match, and if it returns False, then
+ n1 and n2 cannot be considered isomorphic. For multigraphs and
+ multidigraphs, there can be multiple edges between (n1, v1) and also
+ multiple edges between (n2, v2). Now, there must exist an isomorphism
+ from "all the edges between (n1, v1)" to "all the edges between (n2, v2)".
+ So, all of the edge attribute dictionaries are passed to edge_match, and
+ it must determine if there is an isomorphism between the two sets of edges.
+"""
+
+from . import isomorphvf2 as vf2
+
+__all__ = ["GraphMatcher", "DiGraphMatcher", "MultiGraphMatcher", "MultiDiGraphMatcher"]
+
+
+def _semantic_feasibility(self, G1_node, G2_node):
+ """Returns True if mapping G1_node to G2_node is semantically feasible."""
+ # Make sure the nodes match
+ if self.node_match is not None:
+ nm = self.node_match(self.G1.nodes[G1_node], self.G2.nodes[G2_node])
+ if not nm:
+ return False
+
+ # Make sure the edges match
+ if self.edge_match is not None:
+ # Cached lookups
+ G1nbrs = self.G1_adj[G1_node]
+ G2nbrs = self.G2_adj[G2_node]
+ core_1 = self.core_1
+ edge_match = self.edge_match
+
+ for neighbor in G1nbrs:
+ # G1_node is not in core_1, so we must handle R_self separately
+ if neighbor == G1_node:
+ if G2_node in G2nbrs and not edge_match(
+ G1nbrs[G1_node], G2nbrs[G2_node]
+ ):
+ return False
+ elif neighbor in core_1:
+ G2_nbr = core_1[neighbor]
+ if G2_nbr in G2nbrs and not edge_match(
+ G1nbrs[neighbor], G2nbrs[G2_nbr]
+ ):
+ return False
+ # syntactic check has already verified that neighbors are symmetric
+
+ return True
+
+
+class GraphMatcher(vf2.GraphMatcher):
+ """VF2 isomorphism checker for undirected graphs."""
+
+ def __init__(self, G1, G2, node_match=None, edge_match=None):
+ """Initialize graph matcher.
+
+ Parameters
+ ----------
+ G1, G2: graph
+ The graphs to be tested.
+
+ node_match: callable
+ A function that returns True iff node n1 in G1 and n2 in G2
+ should be considered equal during the isomorphism test. The
+ function will be called like::
+
+ node_match(G1.nodes[n1], G2.nodes[n2])
+
+ That is, the function will receive the node attribute dictionaries
+ of the nodes under consideration. If None, then no attributes are
+ considered when testing for an isomorphism.
+
+ edge_match: callable
+ A function that returns True iff the edge attribute dictionary for
+ the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be
+ considered equal during the isomorphism test. The function will be
+ called like::
+
+ edge_match(G1[u1][v1], G2[u2][v2])
+
+ That is, the function will receive the edge attribute dictionaries
+ of the edges under consideration. If None, then no attributes are
+ considered when testing for an isomorphism.
+
+ """
+ vf2.GraphMatcher.__init__(self, G1, G2)
+
+ self.node_match = node_match
+ self.edge_match = edge_match
+
+ # These will be modified during checks to minimize code repeat.
+ self.G1_adj = self.G1.adj
+ self.G2_adj = self.G2.adj
+
+ semantic_feasibility = _semantic_feasibility
+
+
+class DiGraphMatcher(vf2.DiGraphMatcher):
+ """VF2 isomorphism checker for directed graphs."""
+
+ def __init__(self, G1, G2, node_match=None, edge_match=None):
+ """Initialize graph matcher.
+
+ Parameters
+ ----------
+ G1, G2 : graph
+ The graphs to be tested.
+
+ node_match : callable
+ A function that returns True iff node n1 in G1 and n2 in G2
+ should be considered equal during the isomorphism test. The
+ function will be called like::
+
+ node_match(G1.nodes[n1], G2.nodes[n2])
+
+ That is, the function will receive the node attribute dictionaries
+ of the nodes under consideration. If None, then no attributes are
+ considered when testing for an isomorphism.
+
+ edge_match : callable
+ A function that returns True iff the edge attribute dictionary for
+ the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be
+ considered equal during the isomorphism test. The function will be
+ called like::
+
+ edge_match(G1[u1][v1], G2[u2][v2])
+
+ That is, the function will receive the edge attribute dictionaries
+ of the edges under consideration. If None, then no attributes are
+ considered when testing for an isomorphism.
+
+ """
+ vf2.DiGraphMatcher.__init__(self, G1, G2)
+
+ self.node_match = node_match
+ self.edge_match = edge_match
+
+ # These will be modified during checks to minimize code repeat.
+ self.G1_adj = self.G1.adj
+ self.G2_adj = self.G2.adj
+
+ def semantic_feasibility(self, G1_node, G2_node):
+ """Returns True if mapping G1_node to G2_node is semantically feasible."""
+
+ # Test node_match and also test edge_match on successors
+ feasible = _semantic_feasibility(self, G1_node, G2_node)
+ if not feasible:
+ return False
+
+ # Test edge_match on predecessors
+ self.G1_adj = self.G1.pred
+ self.G2_adj = self.G2.pred
+ feasible = _semantic_feasibility(self, G1_node, G2_node)
+ self.G1_adj = self.G1.adj
+ self.G2_adj = self.G2.adj
+
+ return feasible
+
+
+# The "semantics" of edge_match are different for multi(di)graphs, but
+# the implementation is the same. So, technically we do not need to
+# provide "multi" versions, but we do so to match NetworkX's base classes.
+
+
+class MultiGraphMatcher(GraphMatcher):
+ """VF2 isomorphism checker for undirected multigraphs."""
+
+
+class MultiDiGraphMatcher(DiGraphMatcher):
+ """VF2 isomorphism checker for directed multigraphs."""
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/operators/__pycache__/product.cpython-311.pyc b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/operators/__pycache__/product.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b50a84c191d44aaf6f67ced5785f672c877dbe8a
Binary files /dev/null and b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/operators/__pycache__/product.cpython-311.pyc differ
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/operators/binary.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/operators/binary.py
new file mode 100644
index 0000000000000000000000000000000000000000..1979e80b68aa246d919688146a91dcbe5ab84b90
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/operators/binary.py
@@ -0,0 +1,444 @@
+"""
+Operations on graphs including union, intersection, difference.
+"""
+import networkx as nx
+
+__all__ = [
+ "union",
+ "compose",
+ "disjoint_union",
+ "intersection",
+ "difference",
+ "symmetric_difference",
+ "full_join",
+]
+_G_H = {"G": 0, "H": 1}
+
+
+@nx._dispatch(graphs=_G_H, preserve_all_attrs=True)
+def union(G, H, rename=()):
+ """Combine graphs G and H. The names of nodes must be unique.
+
+ A name collision between the graphs will raise an exception.
+
+ A renaming facility is provided to avoid name collisions.
+
+
+ Parameters
+ ----------
+ G, H : graph
+ A NetworkX graph
+
+ rename : iterable , optional
+ Node names of G and H can be changed by specifying the tuple
+ rename=('G-','H-') (for example). Node "u" in G is then renamed
+ "G-u" and "v" in H is renamed "H-v".
+
+ Returns
+ -------
+ U : A union graph with the same type as G.
+
+ See Also
+ --------
+ compose
+ :func:`~networkx.Graph.update`
+ disjoint_union
+
+ Notes
+ -----
+ To combine graphs that have common nodes, consider compose(G, H)
+ or the method, Graph.update().
+
+ disjoint_union() is similar to union() except that it avoids name clashes
+ by relabeling the nodes with sequential integers.
+
+ Edge and node attributes are propagated from G and H to the union graph.
+ Graph attributes are also propagated, but if they are present in both G and H,
+ then the value from H is used.
+
+ Examples
+ --------
+ >>> G = nx.Graph([(0, 1), (0, 2), (1, 2)])
+ >>> H = nx.Graph([(0, 1), (0, 3), (1, 3), (1, 2)])
+ >>> U = nx.union(G, H, rename=("G", "H"))
+ >>> U.nodes
+ NodeView(('G0', 'G1', 'G2', 'H0', 'H1', 'H3', 'H2'))
+ >>> U.edges
+ EdgeView([('G0', 'G1'), ('G0', 'G2'), ('G1', 'G2'), ('H0', 'H1'), ('H0', 'H3'), ('H1', 'H3'), ('H1', 'H2')])
+
+
+ """
+ return nx.union_all([G, H], rename)
+
+
+@nx._dispatch(graphs=_G_H, preserve_all_attrs=True)
+def disjoint_union(G, H):
+ """Combine graphs G and H. The nodes are assumed to be unique (disjoint).
+
+ This algorithm automatically relabels nodes to avoid name collisions.
+
+ Parameters
+ ----------
+ G,H : graph
+ A NetworkX graph
+
+ Returns
+ -------
+ U : A union graph with the same type as G.
+
+ See Also
+ --------
+ union
+ compose
+ :func:`~networkx.Graph.update`
+
+ Notes
+ -----
+ A new graph is created, of the same class as G. It is recommended
+ that G and H be either both directed or both undirected.
+
+ The nodes of G are relabeled 0 to len(G)-1, and the nodes of H are
+ relabeled len(G) to len(G)+len(H)-1.
+
+ Renumbering forces G and H to be disjoint, so no exception is ever raised for a name collision.
+ To preserve the check for common nodes, use union().
+
+ Edge and node attributes are propagated from G and H to the union graph.
+ Graph attributes are also propagated, but if they are present in both G and H,
+ then the value from H is used.
+
+ To combine graphs that have common nodes, consider compose(G, H)
+ or the method, Graph.update().
+
+ Examples
+ --------
+ >>> G = nx.Graph([(0, 1), (0, 2), (1, 2)])
+ >>> H = nx.Graph([(0, 3), (1, 2), (2, 3)])
+ >>> G.nodes[0]["key1"] = 5
+ >>> H.nodes[0]["key2"] = 10
+ >>> U = nx.disjoint_union(G, H)
+ >>> U.nodes(data=True)
+ NodeDataView({0: {'key1': 5}, 1: {}, 2: {}, 3: {'key2': 10}, 4: {}, 5: {}, 6: {}})
+ >>> U.edges
+ EdgeView([(0, 1), (0, 2), (1, 2), (3, 4), (4, 6), (5, 6)])
+ """
+ return nx.disjoint_union_all([G, H])
+
+
+@nx._dispatch(graphs=_G_H)
+def intersection(G, H):
+ """Returns a new graph that contains only the nodes and the edges that exist in
+ both G and H.
+
+ Parameters
+ ----------
+ G,H : graph
+ A NetworkX graph. G and H can have different node sets but must be both graphs or both multigraphs.
+
+ Raises
+ ------
+ NetworkXError
+ If one is a MultiGraph and the other one is a graph.
+
+ Returns
+ -------
+ GH : A new graph with the same type as G.
+
+ Notes
+ -----
+ Attributes from the graph, nodes, and edges are not copied to the new
+ graph. If you want a new graph of the intersection of G and H
+ with the attributes (including edge data) from G use remove_nodes_from()
+ as follows
+
+ >>> G = nx.path_graph(3)
+ >>> H = nx.path_graph(5)
+ >>> R = G.copy()
+ >>> R.remove_nodes_from(n for n in G if n not in H)
+ >>> R.remove_edges_from(e for e in G.edges if e not in H.edges)
+
+ Examples
+ --------
+ >>> G = nx.Graph([(0, 1), (0, 2), (1, 2)])
+ >>> H = nx.Graph([(0, 3), (1, 2), (2, 3)])
+ >>> R = nx.intersection(G, H)
+ >>> R.nodes
+ NodeView((0, 1, 2))
+ >>> R.edges
+ EdgeView([(1, 2)])
+ """
+ return nx.intersection_all([G, H])
+
+
+@nx._dispatch(graphs=_G_H)
+def difference(G, H):
+ """Returns a new graph that contains the edges that exist in G but not in H.
+
+ The node sets of H and G must be the same.
+
+ Parameters
+ ----------
+ G,H : graph
+ A NetworkX graph. G and H must have the same node sets.
+
+ Returns
+ -------
+ D : A new graph with the same type as G.
+
+ Notes
+ -----
+ Attributes from the graph, nodes, and edges are not copied to the new
+ graph. If you want a new graph of the difference of G and H with
+ the attributes (including edge data) from G use remove_nodes_from()
+ as follows:
+
+ >>> G = nx.path_graph(3)
+ >>> H = nx.path_graph(5)
+ >>> R = G.copy()
+ >>> R.remove_nodes_from(n for n in G if n in H)
+
+ Examples
+ --------
+ >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3)])
+ >>> H = nx.Graph([(0, 1), (1, 2), (0, 3)])
+ >>> R = nx.difference(G, H)
+ >>> R.nodes
+ NodeView((0, 1, 2, 3))
+ >>> R.edges
+ EdgeView([(0, 2), (1, 3)])
+ """
+ # create new graph
+ if not G.is_multigraph() == H.is_multigraph():
+ raise nx.NetworkXError("G and H must both be graphs or multigraphs.")
+ R = nx.create_empty_copy(G)
+
+ if set(G) != set(H):
+ raise nx.NetworkXError("Node sets of graphs not equal")
+
+ if G.is_multigraph():
+ edges = G.edges(keys=True)
+ else:
+ edges = G.edges()
+ for e in edges:
+ if not H.has_edge(*e):
+ R.add_edge(*e)
+ return R
+
+
+@nx._dispatch(graphs=_G_H)
+def symmetric_difference(G, H):
+ """Returns new graph with edges that exist in either G or H but not both.
+
+ The node sets of H and G must be the same.
+
+ Parameters
+ ----------
+ G,H : graph
+ A NetworkX graph. G and H must have the same node sets.
+
+ Returns
+ -------
+ D : A new graph with the same type as G.
+
+ Notes
+ -----
+ Attributes from the graph, nodes, and edges are not copied to the new
+ graph.
+
+ Examples
+ --------
+ >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3)])
+ >>> H = nx.Graph([(0, 1), (1, 2), (0, 3)])
+ >>> R = nx.symmetric_difference(G, H)
+ >>> R.nodes
+ NodeView((0, 1, 2, 3))
+ >>> R.edges
+ EdgeView([(0, 2), (0, 3), (1, 3)])
+ """
+ # create new graph
+ if not G.is_multigraph() == H.is_multigraph():
+ raise nx.NetworkXError("G and H must both be graphs or multigraphs.")
+ R = nx.create_empty_copy(G)
+
+ if set(G) != set(H):
+ raise nx.NetworkXError("Node sets of graphs not equal")
+
+ gnodes = set(G) # set of nodes in G
+ hnodes = set(H) # set of nodes in H
+ nodes = gnodes.symmetric_difference(hnodes)
+ R.add_nodes_from(nodes)
+
+ if G.is_multigraph():
+ edges = G.edges(keys=True)
+ else:
+ edges = G.edges()
+ # we could copy the data here but then this function doesn't
+ # match intersection and difference
+ for e in edges:
+ if not H.has_edge(*e):
+ R.add_edge(*e)
+
+ if H.is_multigraph():
+ edges = H.edges(keys=True)
+ else:
+ edges = H.edges()
+ for e in edges:
+ if not G.has_edge(*e):
+ R.add_edge(*e)
+ return R
+
+
+@nx._dispatch(graphs=_G_H, preserve_all_attrs=True)
+def compose(G, H):
+ """Compose graph G with H by combining nodes and edges into a single graph.
+
+ The node sets and edges sets do not need to be disjoint.
+
+ Composing preserves the attributes of nodes and edges.
+ Attribute values from H take precedent over attribute values from G.
+
+ Parameters
+ ----------
+ G, H : graph
+ A NetworkX graph
+
+ Returns
+ -------
+ C: A new graph with the same type as G
+
+ See Also
+ --------
+ :func:`~networkx.Graph.update`
+ union
+ disjoint_union
+
+ Notes
+ -----
+ It is recommended that G and H be either both directed or both undirected.
+
+ For MultiGraphs, the edges are identified by incident nodes AND edge-key.
+ This can cause surprises (i.e., edge `(1, 2)` may or may not be the same
+ in two graphs) if you use MultiGraph without keeping track of edge keys.
+
+ If combining the attributes of common nodes is not desired, consider union(),
+ which raises an exception for name collisions.
+
+ Examples
+ --------
+ >>> G = nx.Graph([(0, 1), (0, 2)])
+ >>> H = nx.Graph([(0, 1), (1, 2)])
+ >>> R = nx.compose(G, H)
+ >>> R.nodes
+ NodeView((0, 1, 2))
+ >>> R.edges
+ EdgeView([(0, 1), (0, 2), (1, 2)])
+
+ By default, the attributes from `H` take precedent over attributes from `G`.
+ If you prefer another way of combining attributes, you can update them after the compose operation:
+
+ >>> G = nx.Graph([(0, 1, {'weight': 2.0}), (3, 0, {'weight': 100.0})])
+ >>> H = nx.Graph([(0, 1, {'weight': 10.0}), (1, 2, {'weight': -1.0})])
+ >>> nx.set_node_attributes(G, {0: 'dark', 1: 'light', 3: 'black'}, name='color')
+ >>> nx.set_node_attributes(H, {0: 'green', 1: 'orange', 2: 'yellow'}, name='color')
+ >>> GcomposeH = nx.compose(G, H)
+
+ Normally, color attribute values of nodes of GcomposeH come from H. We can workaround this as follows:
+
+ >>> node_data = {n: G.nodes[n]['color'] + " " + H.nodes[n]['color'] for n in G.nodes & H.nodes}
+ >>> nx.set_node_attributes(GcomposeH, node_data, 'color')
+ >>> print(GcomposeH.nodes[0]['color'])
+ dark green
+
+ >>> print(GcomposeH.nodes[3]['color'])
+ black
+
+ Similarly, we can update edge attributes after the compose operation in a way we prefer:
+
+ >>> edge_data = {e: G.edges[e]['weight'] * H.edges[e]['weight'] for e in G.edges & H.edges}
+ >>> nx.set_edge_attributes(GcomposeH, edge_data, 'weight')
+ >>> print(GcomposeH.edges[(0, 1)]['weight'])
+ 20.0
+
+ >>> print(GcomposeH.edges[(3, 0)]['weight'])
+ 100.0
+ """
+ return nx.compose_all([G, H])
+
+
+@nx._dispatch(graphs=_G_H, preserve_all_attrs=True)
+def full_join(G, H, rename=(None, None)):
+ """Returns the full join of graphs G and H.
+
+ Full join is the union of G and H in which all edges between
+ G and H are added.
+ The node sets of G and H must be disjoint,
+ otherwise an exception is raised.
+
+ Parameters
+ ----------
+ G, H : graph
+ A NetworkX graph
+
+ rename : tuple , default=(None, None)
+ Node names of G and H can be changed by specifying the tuple
+ rename=('G-','H-') (for example). Node "u" in G is then renamed
+ "G-u" and "v" in H is renamed "H-v".
+
+ Returns
+ -------
+ U : The full join graph with the same type as G.
+
+ Notes
+ -----
+ It is recommended that G and H be either both directed or both undirected.
+
+ If G is directed, then edges from G to H are added as well as from H to G.
+
+ Note that full_join() does not produce parallel edges for MultiGraphs.
+
+ The full join operation of graphs G and H is the same as getting
+ their complement, performing a disjoint union, and finally getting
+ the complement of the resulting graph.
+
+ Graph, edge, and node attributes are propagated from G and H
+ to the union graph. If a graph attribute is present in both
+ G and H the value from H is used.
+
+ Examples
+ --------
+ >>> G = nx.Graph([(0, 1), (0, 2)])
+ >>> H = nx.Graph([(3, 4)])
+ >>> R = nx.full_join(G, H, rename=("G", "H"))
+ >>> R.nodes
+ NodeView(('G0', 'G1', 'G2', 'H3', 'H4'))
+ >>> R.edges
+ EdgeView([('G0', 'G1'), ('G0', 'G2'), ('G0', 'H3'), ('G0', 'H4'), ('G1', 'H3'), ('G1', 'H4'), ('G2', 'H3'), ('G2', 'H4'), ('H3', 'H4')])
+
+ See Also
+ --------
+ union
+ disjoint_union
+ """
+ R = union(G, H, rename)
+
+ def add_prefix(graph, prefix):
+ if prefix is None:
+ return graph
+
+ def label(x):
+ return f"{prefix}{x}"
+
+ return nx.relabel_nodes(graph, label)
+
+ G = add_prefix(G, rename[0])
+ H = add_prefix(H, rename[1])
+
+ for i in G:
+ for j in H:
+ R.add_edge(i, j)
+ if R.is_directed():
+ for i in H:
+ for j in G:
+ R.add_edge(i, j)
+
+ return R
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/operators/tests/test_unary.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/operators/tests/test_unary.py
new file mode 100644
index 0000000000000000000000000000000000000000..d68e55cd9c9fa37459b497c32a7a095576c306c3
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/operators/tests/test_unary.py
@@ -0,0 +1,55 @@
+import pytest
+
+import networkx as nx
+
+
+def test_complement():
+ null = nx.null_graph()
+ empty1 = nx.empty_graph(1)
+ empty10 = nx.empty_graph(10)
+ K3 = nx.complete_graph(3)
+ K5 = nx.complete_graph(5)
+ K10 = nx.complete_graph(10)
+ P2 = nx.path_graph(2)
+ P3 = nx.path_graph(3)
+ P5 = nx.path_graph(5)
+ P10 = nx.path_graph(10)
+ # complement of the complete graph is empty
+
+ G = nx.complement(K3)
+ assert nx.is_isomorphic(G, nx.empty_graph(3))
+ G = nx.complement(K5)
+ assert nx.is_isomorphic(G, nx.empty_graph(5))
+ # for any G, G=complement(complement(G))
+ P3cc = nx.complement(nx.complement(P3))
+ assert nx.is_isomorphic(P3, P3cc)
+ nullcc = nx.complement(nx.complement(null))
+ assert nx.is_isomorphic(null, nullcc)
+ b = nx.bull_graph()
+ bcc = nx.complement(nx.complement(b))
+ assert nx.is_isomorphic(b, bcc)
+
+
+def test_complement_2():
+ G1 = nx.DiGraph()
+ G1.add_edge("A", "B")
+ G1.add_edge("A", "C")
+ G1.add_edge("A", "D")
+ G1C = nx.complement(G1)
+ assert sorted(G1C.edges()) == [
+ ("B", "A"),
+ ("B", "C"),
+ ("B", "D"),
+ ("C", "A"),
+ ("C", "B"),
+ ("C", "D"),
+ ("D", "A"),
+ ("D", "B"),
+ ("D", "C"),
+ ]
+
+
+def test_reverse1():
+ # Other tests for reverse are done by the DiGraph and MultiDigraph.
+ G1 = nx.Graph()
+ pytest.raises(nx.NetworkXError, nx.reverse, G1)
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_chains.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_chains.py
new file mode 100644
index 0000000000000000000000000000000000000000..6a1b1142caace872bdc6b779449c17cdc3089c77
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_chains.py
@@ -0,0 +1,140 @@
+"""Unit tests for the chain decomposition functions."""
+from itertools import cycle, islice
+
+import pytest
+
+import networkx as nx
+
+
+def cycles(seq):
+ """Yields cyclic permutations of the given sequence.
+
+ For example::
+
+ >>> list(cycles("abc"))
+ [('a', 'b', 'c'), ('b', 'c', 'a'), ('c', 'a', 'b')]
+
+ """
+ n = len(seq)
+ cycled_seq = cycle(seq)
+ for x in seq:
+ yield tuple(islice(cycled_seq, n))
+ next(cycled_seq)
+
+
+def cyclic_equals(seq1, seq2):
+ """Decide whether two sequences are equal up to cyclic permutations.
+
+ For example::
+
+ >>> cyclic_equals("xyz", "zxy")
+ True
+ >>> cyclic_equals("xyz", "zyx")
+ False
+
+ """
+ # Cast seq2 to a tuple since `cycles()` yields tuples.
+ seq2 = tuple(seq2)
+ return any(x == tuple(seq2) for x in cycles(seq1))
+
+
+class TestChainDecomposition:
+ """Unit tests for the chain decomposition function."""
+
+ def assertContainsChain(self, chain, expected):
+ # A cycle could be expressed in two different orientations, one
+ # forward and one backward, so we need to check for cyclic
+ # equality in both orientations.
+ reversed_chain = list(reversed([tuple(reversed(e)) for e in chain]))
+ for candidate in expected:
+ if cyclic_equals(chain, candidate):
+ break
+ if cyclic_equals(reversed_chain, candidate):
+ break
+ else:
+ self.fail("chain not found")
+
+ def test_decomposition(self):
+ edges = [
+ # DFS tree edges.
+ (1, 2),
+ (2, 3),
+ (3, 4),
+ (3, 5),
+ (5, 6),
+ (6, 7),
+ (7, 8),
+ (5, 9),
+ (9, 10),
+ # Nontree edges.
+ (1, 3),
+ (1, 4),
+ (2, 5),
+ (5, 10),
+ (6, 8),
+ ]
+ G = nx.Graph(edges)
+ expected = [
+ [(1, 3), (3, 2), (2, 1)],
+ [(1, 4), (4, 3)],
+ [(2, 5), (5, 3)],
+ [(5, 10), (10, 9), (9, 5)],
+ [(6, 8), (8, 7), (7, 6)],
+ ]
+ chains = list(nx.chain_decomposition(G, root=1))
+ assert len(chains) == len(expected)
+
+ # This chain decomposition isn't unique
+ # for chain in chains:
+ # print(chain)
+ # self.assertContainsChain(chain, expected)
+
+ def test_barbell_graph(self):
+ # The (3, 0) barbell graph has two triangles joined by a single edge.
+ G = nx.barbell_graph(3, 0)
+ chains = list(nx.chain_decomposition(G, root=0))
+ expected = [[(0, 1), (1, 2), (2, 0)], [(3, 4), (4, 5), (5, 3)]]
+ assert len(chains) == len(expected)
+ for chain in chains:
+ self.assertContainsChain(chain, expected)
+
+ def test_disconnected_graph(self):
+ """Test for a graph with multiple connected components."""
+ G = nx.barbell_graph(3, 0)
+ H = nx.barbell_graph(3, 0)
+ mapping = dict(zip(range(6), "abcdef"))
+ nx.relabel_nodes(H, mapping, copy=False)
+ G = nx.union(G, H)
+ chains = list(nx.chain_decomposition(G))
+ expected = [
+ [(0, 1), (1, 2), (2, 0)],
+ [(3, 4), (4, 5), (5, 3)],
+ [("a", "b"), ("b", "c"), ("c", "a")],
+ [("d", "e"), ("e", "f"), ("f", "d")],
+ ]
+ assert len(chains) == len(expected)
+ for chain in chains:
+ self.assertContainsChain(chain, expected)
+
+ def test_disconnected_graph_root_node(self):
+ """Test for a single component of a disconnected graph."""
+ G = nx.barbell_graph(3, 0)
+ H = nx.barbell_graph(3, 0)
+ mapping = dict(zip(range(6), "abcdef"))
+ nx.relabel_nodes(H, mapping, copy=False)
+ G = nx.union(G, H)
+ chains = list(nx.chain_decomposition(G, root="a"))
+ expected = [
+ [("a", "b"), ("b", "c"), ("c", "a")],
+ [("d", "e"), ("e", "f"), ("f", "d")],
+ ]
+ assert len(chains) == len(expected)
+ for chain in chains:
+ self.assertContainsChain(chain, expected)
+
+ def test_chain_decomposition_root_not_in_G(self):
+ """Test chain decomposition when root is not in graph"""
+ G = nx.Graph()
+ G.add_nodes_from([1, 2, 3])
+ with pytest.raises(nx.NodeNotFound):
+ nx.has_bridges(G, root=6)
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_chordal.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_chordal.py
new file mode 100644
index 0000000000000000000000000000000000000000..148b22f2632d722522483b556f11285a8e823126
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_chordal.py
@@ -0,0 +1,129 @@
+import pytest
+
+import networkx as nx
+
+
+class TestMCS:
+ @classmethod
+ def setup_class(cls):
+ # simple graph
+ connected_chordal_G = nx.Graph()
+ connected_chordal_G.add_edges_from(
+ [
+ (1, 2),
+ (1, 3),
+ (2, 3),
+ (2, 4),
+ (3, 4),
+ (3, 5),
+ (3, 6),
+ (4, 5),
+ (4, 6),
+ (5, 6),
+ ]
+ )
+ cls.connected_chordal_G = connected_chordal_G
+
+ chordal_G = nx.Graph()
+ chordal_G.add_edges_from(
+ [
+ (1, 2),
+ (1, 3),
+ (2, 3),
+ (2, 4),
+ (3, 4),
+ (3, 5),
+ (3, 6),
+ (4, 5),
+ (4, 6),
+ (5, 6),
+ (7, 8),
+ ]
+ )
+ chordal_G.add_node(9)
+ cls.chordal_G = chordal_G
+
+ non_chordal_G = nx.Graph()
+ non_chordal_G.add_edges_from([(1, 2), (1, 3), (2, 4), (2, 5), (3, 4), (3, 5)])
+ cls.non_chordal_G = non_chordal_G
+
+ self_loop_G = nx.Graph()
+ self_loop_G.add_edges_from([(1, 1)])
+ cls.self_loop_G = self_loop_G
+
+ @pytest.mark.parametrize("G", (nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()))
+ def test_is_chordal_not_implemented(self, G):
+ with pytest.raises(nx.NetworkXNotImplemented):
+ nx.is_chordal(G)
+
+ def test_is_chordal(self):
+ assert not nx.is_chordal(self.non_chordal_G)
+ assert nx.is_chordal(self.chordal_G)
+ assert nx.is_chordal(self.connected_chordal_G)
+ assert nx.is_chordal(nx.Graph())
+ assert nx.is_chordal(nx.complete_graph(3))
+ assert nx.is_chordal(nx.cycle_graph(3))
+ assert not nx.is_chordal(nx.cycle_graph(5))
+ assert nx.is_chordal(self.self_loop_G)
+
+ def test_induced_nodes(self):
+ G = nx.generators.classic.path_graph(10)
+ Induced_nodes = nx.find_induced_nodes(G, 1, 9, 2)
+ assert Induced_nodes == {1, 2, 3, 4, 5, 6, 7, 8, 9}
+ pytest.raises(
+ nx.NetworkXTreewidthBoundExceeded, nx.find_induced_nodes, G, 1, 9, 1
+ )
+ Induced_nodes = nx.find_induced_nodes(self.chordal_G, 1, 6)
+ assert Induced_nodes == {1, 2, 4, 6}
+ pytest.raises(nx.NetworkXError, nx.find_induced_nodes, self.non_chordal_G, 1, 5)
+
+ def test_graph_treewidth(self):
+ with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
+ nx.chordal_graph_treewidth(self.non_chordal_G)
+
+ def test_chordal_find_cliques(self):
+ cliques = {
+ frozenset([9]),
+ frozenset([7, 8]),
+ frozenset([1, 2, 3]),
+ frozenset([2, 3, 4]),
+ frozenset([3, 4, 5, 6]),
+ }
+ assert set(nx.chordal_graph_cliques(self.chordal_G)) == cliques
+ with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
+ set(nx.chordal_graph_cliques(self.non_chordal_G))
+ with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
+ set(nx.chordal_graph_cliques(self.self_loop_G))
+
+ def test_chordal_find_cliques_path(self):
+ G = nx.path_graph(10)
+ cliqueset = nx.chordal_graph_cliques(G)
+ for u, v in G.edges():
+ assert frozenset([u, v]) in cliqueset or frozenset([v, u]) in cliqueset
+
+ def test_chordal_find_cliquesCC(self):
+ cliques = {frozenset([1, 2, 3]), frozenset([2, 3, 4]), frozenset([3, 4, 5, 6])}
+ cgc = nx.chordal_graph_cliques
+ assert set(cgc(self.connected_chordal_G)) == cliques
+
+ def test_complete_to_chordal_graph(self):
+ fgrg = nx.fast_gnp_random_graph
+ test_graphs = [
+ nx.barbell_graph(6, 2),
+ nx.cycle_graph(15),
+ nx.wheel_graph(20),
+ nx.grid_graph([10, 4]),
+ nx.ladder_graph(15),
+ nx.star_graph(5),
+ nx.bull_graph(),
+ fgrg(20, 0.3, seed=1),
+ ]
+ for G in test_graphs:
+ H, a = nx.complete_to_chordal_graph(G)
+ assert nx.is_chordal(H)
+ assert len(a) == H.number_of_nodes()
+ if nx.is_chordal(G):
+ assert G.number_of_edges() == H.number_of_edges()
+ assert set(a.values()) == {0}
+ else:
+ assert len(set(a.values())) == H.number_of_nodes()
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_cluster.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_cluster.py
new file mode 100644
index 0000000000000000000000000000000000000000..d69f036ff6cc87e179af5fde6ce7ee21b6c193c4
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_cluster.py
@@ -0,0 +1,543 @@
+import pytest
+
+import networkx as nx
+
+
+class TestTriangles:
+ def test_empty(self):
+ G = nx.Graph()
+ assert list(nx.triangles(G).values()) == []
+
+ def test_path(self):
+ G = nx.path_graph(10)
+ assert list(nx.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
+ assert nx.triangles(G) == {
+ 0: 0,
+ 1: 0,
+ 2: 0,
+ 3: 0,
+ 4: 0,
+ 5: 0,
+ 6: 0,
+ 7: 0,
+ 8: 0,
+ 9: 0,
+ }
+
+ def test_cubical(self):
+ G = nx.cubical_graph()
+ assert list(nx.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0]
+ assert nx.triangles(G, 1) == 0
+ assert list(nx.triangles(G, [1, 2]).values()) == [0, 0]
+ assert nx.triangles(G, 1) == 0
+ assert nx.triangles(G, [1, 2]) == {1: 0, 2: 0}
+
+ def test_k5(self):
+ G = nx.complete_graph(5)
+ assert list(nx.triangles(G).values()) == [6, 6, 6, 6, 6]
+ assert sum(nx.triangles(G).values()) / 3 == 10
+ assert nx.triangles(G, 1) == 6
+ G.remove_edge(1, 2)
+ assert list(nx.triangles(G).values()) == [5, 3, 3, 5, 5]
+ assert nx.triangles(G, 1) == 3
+ G.add_edge(3, 3) # ignore self-edges
+ assert list(nx.triangles(G).values()) == [5, 3, 3, 5, 5]
+ assert nx.triangles(G, 3) == 5
+
+
+class TestDirectedClustering:
+ def test_clustering(self):
+ G = nx.DiGraph()
+ assert list(nx.clustering(G).values()) == []
+ assert nx.clustering(G) == {}
+
+ def test_path(self):
+ G = nx.path_graph(10, create_using=nx.DiGraph())
+ assert list(nx.clustering(G).values()) == [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ ]
+ assert nx.clustering(G) == {
+ 0: 0,
+ 1: 0,
+ 2: 0,
+ 3: 0,
+ 4: 0,
+ 5: 0,
+ 6: 0,
+ 7: 0,
+ 8: 0,
+ 9: 0,
+ }
+ assert nx.clustering(G, 0) == 0
+
+ def test_k5(self):
+ G = nx.complete_graph(5, create_using=nx.DiGraph())
+ assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1]
+ assert nx.average_clustering(G) == 1
+ G.remove_edge(1, 2)
+ assert list(nx.clustering(G).values()) == [
+ 11 / 12,
+ 1,
+ 1,
+ 11 / 12,
+ 11 / 12,
+ ]
+ assert nx.clustering(G, [1, 4]) == {1: 1, 4: 11 / 12}
+ G.remove_edge(2, 1)
+ assert list(nx.clustering(G).values()) == [
+ 5 / 6,
+ 1,
+ 1,
+ 5 / 6,
+ 5 / 6,
+ ]
+ assert nx.clustering(G, [1, 4]) == {1: 1, 4: 0.83333333333333337}
+ assert nx.clustering(G, 4) == 5 / 6
+
+ def test_triangle_and_edge(self):
+ G = nx.cycle_graph(3, create_using=nx.DiGraph())
+ G.add_edge(0, 4)
+ assert nx.clustering(G)[0] == 1 / 6
+
+
+class TestDirectedWeightedClustering:
+ @classmethod
+ def setup_class(cls):
+ global np
+ np = pytest.importorskip("numpy")
+
+ def test_clustering(self):
+ G = nx.DiGraph()
+ assert list(nx.clustering(G, weight="weight").values()) == []
+ assert nx.clustering(G) == {}
+
+ def test_path(self):
+ G = nx.path_graph(10, create_using=nx.DiGraph())
+ assert list(nx.clustering(G, weight="weight").values()) == [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ ]
+ assert nx.clustering(G, weight="weight") == {
+ 0: 0,
+ 1: 0,
+ 2: 0,
+ 3: 0,
+ 4: 0,
+ 5: 0,
+ 6: 0,
+ 7: 0,
+ 8: 0,
+ 9: 0,
+ }
+
+ def test_k5(self):
+ G = nx.complete_graph(5, create_using=nx.DiGraph())
+ assert list(nx.clustering(G, weight="weight").values()) == [1, 1, 1, 1, 1]
+ assert nx.average_clustering(G, weight="weight") == 1
+ G.remove_edge(1, 2)
+ assert list(nx.clustering(G, weight="weight").values()) == [
+ 11 / 12,
+ 1,
+ 1,
+ 11 / 12,
+ 11 / 12,
+ ]
+ assert nx.clustering(G, [1, 4], weight="weight") == {1: 1, 4: 11 / 12}
+ G.remove_edge(2, 1)
+ assert list(nx.clustering(G, weight="weight").values()) == [
+ 5 / 6,
+ 1,
+ 1,
+ 5 / 6,
+ 5 / 6,
+ ]
+ assert nx.clustering(G, [1, 4], weight="weight") == {
+ 1: 1,
+ 4: 0.83333333333333337,
+ }
+
+ def test_triangle_and_edge(self):
+ G = nx.cycle_graph(3, create_using=nx.DiGraph())
+ G.add_edge(0, 4, weight=2)
+ assert nx.clustering(G)[0] == 1 / 6
+ # Relaxed comparisons to allow graphblas-algorithms to pass tests
+ np.testing.assert_allclose(nx.clustering(G, weight="weight")[0], 1 / 12)
+ np.testing.assert_allclose(nx.clustering(G, 0, weight="weight"), 1 / 12)
+
+
+class TestWeightedClustering:
+ @classmethod
+ def setup_class(cls):
+ global np
+ np = pytest.importorskip("numpy")
+
+ def test_clustering(self):
+ G = nx.Graph()
+ assert list(nx.clustering(G, weight="weight").values()) == []
+ assert nx.clustering(G) == {}
+
+ def test_path(self):
+ G = nx.path_graph(10)
+ assert list(nx.clustering(G, weight="weight").values()) == [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ ]
+ assert nx.clustering(G, weight="weight") == {
+ 0: 0,
+ 1: 0,
+ 2: 0,
+ 3: 0,
+ 4: 0,
+ 5: 0,
+ 6: 0,
+ 7: 0,
+ 8: 0,
+ 9: 0,
+ }
+
+ def test_cubical(self):
+ G = nx.cubical_graph()
+ assert list(nx.clustering(G, weight="weight").values()) == [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ ]
+ assert nx.clustering(G, 1) == 0
+ assert list(nx.clustering(G, [1, 2], weight="weight").values()) == [0, 0]
+ assert nx.clustering(G, 1, weight="weight") == 0
+ assert nx.clustering(G, [1, 2], weight="weight") == {1: 0, 2: 0}
+
+ def test_k5(self):
+ G = nx.complete_graph(5)
+ assert list(nx.clustering(G, weight="weight").values()) == [1, 1, 1, 1, 1]
+ assert nx.average_clustering(G, weight="weight") == 1
+ G.remove_edge(1, 2)
+ assert list(nx.clustering(G, weight="weight").values()) == [
+ 5 / 6,
+ 1,
+ 1,
+ 5 / 6,
+ 5 / 6,
+ ]
+ assert nx.clustering(G, [1, 4], weight="weight") == {
+ 1: 1,
+ 4: 0.83333333333333337,
+ }
+
+ def test_triangle_and_edge(self):
+ G = nx.cycle_graph(3)
+ G.add_edge(0, 4, weight=2)
+ assert nx.clustering(G)[0] == 1 / 3
+ np.testing.assert_allclose(nx.clustering(G, weight="weight")[0], 1 / 6)
+ np.testing.assert_allclose(nx.clustering(G, 0, weight="weight"), 1 / 6)
+
+ def test_triangle_and_signed_edge(self):
+ G = nx.cycle_graph(3)
+ G.add_edge(0, 1, weight=-1)
+ G.add_edge(3, 0, weight=0)
+ assert nx.clustering(G)[0] == 1 / 3
+ assert nx.clustering(G, weight="weight")[0] == -1 / 3
+
+
+class TestClustering:
+ @classmethod
+ def setup_class(cls):
+ pytest.importorskip("numpy")
+
+ def test_clustering(self):
+ G = nx.Graph()
+ assert list(nx.clustering(G).values()) == []
+ assert nx.clustering(G) == {}
+
+ def test_path(self):
+ G = nx.path_graph(10)
+ assert list(nx.clustering(G).values()) == [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ ]
+ assert nx.clustering(G) == {
+ 0: 0,
+ 1: 0,
+ 2: 0,
+ 3: 0,
+ 4: 0,
+ 5: 0,
+ 6: 0,
+ 7: 0,
+ 8: 0,
+ 9: 0,
+ }
+
+ def test_cubical(self):
+ G = nx.cubical_graph()
+ assert list(nx.clustering(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0]
+ assert nx.clustering(G, 1) == 0
+ assert list(nx.clustering(G, [1, 2]).values()) == [0, 0]
+ assert nx.clustering(G, 1) == 0
+ assert nx.clustering(G, [1, 2]) == {1: 0, 2: 0}
+
+ def test_k5(self):
+ G = nx.complete_graph(5)
+ assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1]
+ assert nx.average_clustering(G) == 1
+ G.remove_edge(1, 2)
+ assert list(nx.clustering(G).values()) == [
+ 5 / 6,
+ 1,
+ 1,
+ 5 / 6,
+ 5 / 6,
+ ]
+ assert nx.clustering(G, [1, 4]) == {1: 1, 4: 0.83333333333333337}
+
+ def test_k5_signed(self):
+ G = nx.complete_graph(5)
+ assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1]
+ assert nx.average_clustering(G) == 1
+ G.remove_edge(1, 2)
+ G.add_edge(0, 1, weight=-1)
+ assert list(nx.clustering(G, weight="weight").values()) == [
+ 1 / 6,
+ -1 / 3,
+ 1,
+ 3 / 6,
+ 3 / 6,
+ ]
+
+
+class TestTransitivity:
+ def test_transitivity(self):
+ G = nx.Graph()
+ assert nx.transitivity(G) == 0
+
+ def test_path(self):
+ G = nx.path_graph(10)
+ assert nx.transitivity(G) == 0
+
+ def test_cubical(self):
+ G = nx.cubical_graph()
+ assert nx.transitivity(G) == 0
+
+ def test_k5(self):
+ G = nx.complete_graph(5)
+ assert nx.transitivity(G) == 1
+ G.remove_edge(1, 2)
+ assert nx.transitivity(G) == 0.875
+
+
+class TestSquareClustering:
+ def test_clustering(self):
+ G = nx.Graph()
+ assert list(nx.square_clustering(G).values()) == []
+ assert nx.square_clustering(G) == {}
+
+ def test_path(self):
+ G = nx.path_graph(10)
+ assert list(nx.square_clustering(G).values()) == [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ ]
+ assert nx.square_clustering(G) == {
+ 0: 0,
+ 1: 0,
+ 2: 0,
+ 3: 0,
+ 4: 0,
+ 5: 0,
+ 6: 0,
+ 7: 0,
+ 8: 0,
+ 9: 0,
+ }
+
+ def test_cubical(self):
+ G = nx.cubical_graph()
+ assert list(nx.square_clustering(G).values()) == [
+ 1 / 3,
+ 1 / 3,
+ 1 / 3,
+ 1 / 3,
+ 1 / 3,
+ 1 / 3,
+ 1 / 3,
+ 1 / 3,
+ ]
+ assert list(nx.square_clustering(G, [1, 2]).values()) == [1 / 3, 1 / 3]
+ assert nx.square_clustering(G, [1])[1] == 1 / 3
+ assert nx.square_clustering(G, 1) == 1 / 3
+ assert nx.square_clustering(G, [1, 2]) == {1: 1 / 3, 2: 1 / 3}
+
+ def test_k5(self):
+ G = nx.complete_graph(5)
+ assert list(nx.square_clustering(G).values()) == [1, 1, 1, 1, 1]
+
+ def test_bipartite_k5(self):
+ G = nx.complete_bipartite_graph(5, 5)
+ assert list(nx.square_clustering(G).values()) == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
+
+ def test_lind_square_clustering(self):
+ """Test C4 for figure 1 Lind et al (2005)"""
+ G = nx.Graph(
+ [
+ (1, 2),
+ (1, 3),
+ (1, 6),
+ (1, 7),
+ (2, 4),
+ (2, 5),
+ (3, 4),
+ (3, 5),
+ (6, 7),
+ (7, 8),
+ (6, 8),
+ (7, 9),
+ (7, 10),
+ (6, 11),
+ (6, 12),
+ (2, 13),
+ (2, 14),
+ (3, 15),
+ (3, 16),
+ ]
+ )
+ G1 = G.subgraph([1, 2, 3, 4, 5, 13, 14, 15, 16])
+ G2 = G.subgraph([1, 6, 7, 8, 9, 10, 11, 12])
+ assert nx.square_clustering(G, [1])[1] == 3 / 43
+ assert nx.square_clustering(G1, [1])[1] == 2 / 6
+ assert nx.square_clustering(G2, [1])[1] == 1 / 5
+
+ def test_peng_square_clustering(self):
+ """Test eq2 for figure 1 Peng et al (2008)"""
+ G = nx.Graph([(1, 2), (1, 3), (2, 4), (3, 4), (3, 5), (3, 6)])
+ assert nx.square_clustering(G, [1])[1] == 1 / 3
+
+
+class TestAverageClustering:
+ @classmethod
+ def setup_class(cls):
+ pytest.importorskip("numpy")
+
+ def test_empty(self):
+ G = nx.Graph()
+ with pytest.raises(ZeroDivisionError):
+ nx.average_clustering(G)
+
+ def test_average_clustering(self):
+ G = nx.cycle_graph(3)
+ G.add_edge(2, 3)
+ assert nx.average_clustering(G) == (1 + 1 + 1 / 3) / 4
+ assert nx.average_clustering(G, count_zeros=True) == (1 + 1 + 1 / 3) / 4
+ assert nx.average_clustering(G, count_zeros=False) == (1 + 1 + 1 / 3) / 3
+ assert nx.average_clustering(G, [1, 2, 3]) == (1 + 1 / 3) / 3
+ assert nx.average_clustering(G, [1, 2, 3], count_zeros=True) == (1 + 1 / 3) / 3
+ assert nx.average_clustering(G, [1, 2, 3], count_zeros=False) == (1 + 1 / 3) / 2
+
+ def test_average_clustering_signed(self):
+ G = nx.cycle_graph(3)
+ G.add_edge(2, 3)
+ G.add_edge(0, 1, weight=-1)
+ assert nx.average_clustering(G, weight="weight") == (-1 - 1 - 1 / 3) / 4
+ assert (
+ nx.average_clustering(G, weight="weight", count_zeros=True)
+ == (-1 - 1 - 1 / 3) / 4
+ )
+ assert (
+ nx.average_clustering(G, weight="weight", count_zeros=False)
+ == (-1 - 1 - 1 / 3) / 3
+ )
+
+
+class TestDirectedAverageClustering:
+ @classmethod
+ def setup_class(cls):
+ pytest.importorskip("numpy")
+
+ def test_empty(self):
+ G = nx.DiGraph()
+ with pytest.raises(ZeroDivisionError):
+ nx.average_clustering(G)
+
+ def test_average_clustering(self):
+ G = nx.cycle_graph(3, create_using=nx.DiGraph())
+ G.add_edge(2, 3)
+ assert nx.average_clustering(G) == (1 + 1 + 1 / 3) / 8
+ assert nx.average_clustering(G, count_zeros=True) == (1 + 1 + 1 / 3) / 8
+ assert nx.average_clustering(G, count_zeros=False) == (1 + 1 + 1 / 3) / 6
+ assert nx.average_clustering(G, [1, 2, 3]) == (1 + 1 / 3) / 6
+ assert nx.average_clustering(G, [1, 2, 3], count_zeros=True) == (1 + 1 / 3) / 6
+ assert nx.average_clustering(G, [1, 2, 3], count_zeros=False) == (1 + 1 / 3) / 4
+
+
+class TestGeneralizedDegree:
+ def test_generalized_degree(self):
+ G = nx.Graph()
+ assert nx.generalized_degree(G) == {}
+
+ def test_path(self):
+ G = nx.path_graph(5)
+ assert nx.generalized_degree(G, 0) == {0: 1}
+ assert nx.generalized_degree(G, 1) == {0: 2}
+
+ def test_cubical(self):
+ G = nx.cubical_graph()
+ assert nx.generalized_degree(G, 0) == {0: 3}
+
+ def test_k5(self):
+ G = nx.complete_graph(5)
+ assert nx.generalized_degree(G, 0) == {3: 4}
+ G.remove_edge(0, 1)
+ assert nx.generalized_degree(G, 0) == {2: 3}
+ assert nx.generalized_degree(G, [1, 2]) == {1: {2: 3}, 2: {2: 2, 3: 2}}
+ assert nx.generalized_degree(G) == {
+ 0: {2: 3},
+ 1: {2: 3},
+ 2: {2: 2, 3: 2},
+ 3: {2: 2, 3: 2},
+ 4: {2: 2, 3: 2},
+ }
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_hybrid.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_hybrid.py
new file mode 100644
index 0000000000000000000000000000000000000000..6af0016498549caed58772e304c93113a8b693d9
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_hybrid.py
@@ -0,0 +1,24 @@
+import networkx as nx
+
+
+def test_2d_grid_graph():
+ # FC article claims 2d grid graph of size n is (3,3)-connected
+ # and (5,9)-connected, but I don't think it is (5,9)-connected
+ G = nx.grid_2d_graph(8, 8, periodic=True)
+ assert nx.is_kl_connected(G, 3, 3)
+ assert not nx.is_kl_connected(G, 5, 9)
+ (H, graphOK) = nx.kl_connected_subgraph(G, 5, 9, same_as_graph=True)
+ assert not graphOK
+
+
+def test_small_graph():
+ G = nx.Graph()
+ G.add_edge(1, 2)
+ G.add_edge(1, 3)
+ G.add_edge(2, 3)
+ assert nx.is_kl_connected(G, 2, 2)
+ H = nx.kl_connected_subgraph(G, 2, 2)
+ (H, graphOK) = nx.kl_connected_subgraph(
+ G, 2, 2, low_memory=True, same_as_graph=True
+ )
+ assert graphOK
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_link_prediction.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_link_prediction.py
new file mode 100644
index 0000000000000000000000000000000000000000..7fc04d20672ef13058779f55be48ddac1b1a048d
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_link_prediction.py
@@ -0,0 +1,582 @@
+import math
+from functools import partial
+
+import pytest
+
+import networkx as nx
+
+
+def _test_func(G, ebunch, expected, predict_func, **kwargs):
+ result = predict_func(G, ebunch, **kwargs)
+ exp_dict = {tuple(sorted([u, v])): score for u, v, score in expected}
+ res_dict = {tuple(sorted([u, v])): score for u, v, score in result}
+
+ assert len(exp_dict) == len(res_dict)
+ for p in exp_dict:
+ assert exp_dict[p] == pytest.approx(res_dict[p], abs=1e-7)
+
+
+class TestResourceAllocationIndex:
+ @classmethod
+ def setup_class(cls):
+ cls.func = staticmethod(nx.resource_allocation_index)
+ cls.test = partial(_test_func, predict_func=cls.func)
+
+ def test_K5(self):
+ G = nx.complete_graph(5)
+ self.test(G, [(0, 1)], [(0, 1, 0.75)])
+
+ def test_P3(self):
+ G = nx.path_graph(3)
+ self.test(G, [(0, 2)], [(0, 2, 0.5)])
+
+ def test_S4(self):
+ G = nx.star_graph(4)
+ self.test(G, [(1, 2)], [(1, 2, 0.25)])
+
+ def test_notimplemented(self):
+ assert pytest.raises(
+ nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]
+ )
+ assert pytest.raises(
+ nx.NetworkXNotImplemented,
+ self.func,
+ nx.MultiGraph([(0, 1), (1, 2)]),
+ [(0, 2)],
+ )
+ assert pytest.raises(
+ nx.NetworkXNotImplemented,
+ self.func,
+ nx.MultiDiGraph([(0, 1), (1, 2)]),
+ [(0, 2)],
+ )
+
+ def test_no_common_neighbor(self):
+ G = nx.Graph()
+ G.add_nodes_from([0, 1])
+ self.test(G, [(0, 1)], [(0, 1, 0)])
+
+ def test_equal_nodes(self):
+ G = nx.complete_graph(4)
+ self.test(G, [(0, 0)], [(0, 0, 1)])
+
+ def test_all_nonexistent_edges(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+ self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)])
+
+
+class TestJaccardCoefficient:
+ @classmethod
+ def setup_class(cls):
+ cls.func = staticmethod(nx.jaccard_coefficient)
+ cls.test = partial(_test_func, predict_func=cls.func)
+
+ def test_K5(self):
+ G = nx.complete_graph(5)
+ self.test(G, [(0, 1)], [(0, 1, 0.6)])
+
+ def test_P4(self):
+ G = nx.path_graph(4)
+ self.test(G, [(0, 2)], [(0, 2, 0.5)])
+
+ def test_notimplemented(self):
+ assert pytest.raises(
+ nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]
+ )
+ assert pytest.raises(
+ nx.NetworkXNotImplemented,
+ self.func,
+ nx.MultiGraph([(0, 1), (1, 2)]),
+ [(0, 2)],
+ )
+ assert pytest.raises(
+ nx.NetworkXNotImplemented,
+ self.func,
+ nx.MultiDiGraph([(0, 1), (1, 2)]),
+ [(0, 2)],
+ )
+
+ def test_no_common_neighbor(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (2, 3)])
+ self.test(G, [(0, 2)], [(0, 2, 0)])
+
+ def test_isolated_nodes(self):
+ G = nx.Graph()
+ G.add_nodes_from([0, 1])
+ self.test(G, [(0, 1)], [(0, 1, 0)])
+
+ def test_all_nonexistent_edges(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+ self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)])
+
+
+class TestAdamicAdarIndex:
+ @classmethod
+ def setup_class(cls):
+ cls.func = staticmethod(nx.adamic_adar_index)
+ cls.test = partial(_test_func, predict_func=cls.func)
+
+ def test_K5(self):
+ G = nx.complete_graph(5)
+ self.test(G, [(0, 1)], [(0, 1, 3 / math.log(4))])
+
+ def test_P3(self):
+ G = nx.path_graph(3)
+ self.test(G, [(0, 2)], [(0, 2, 1 / math.log(2))])
+
+ def test_S4(self):
+ G = nx.star_graph(4)
+ self.test(G, [(1, 2)], [(1, 2, 1 / math.log(4))])
+
+ def test_notimplemented(self):
+ assert pytest.raises(
+ nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]
+ )
+ assert pytest.raises(
+ nx.NetworkXNotImplemented,
+ self.func,
+ nx.MultiGraph([(0, 1), (1, 2)]),
+ [(0, 2)],
+ )
+ assert pytest.raises(
+ nx.NetworkXNotImplemented,
+ self.func,
+ nx.MultiDiGraph([(0, 1), (1, 2)]),
+ [(0, 2)],
+ )
+
+ def test_no_common_neighbor(self):
+ G = nx.Graph()
+ G.add_nodes_from([0, 1])
+ self.test(G, [(0, 1)], [(0, 1, 0)])
+
+ def test_equal_nodes(self):
+ G = nx.complete_graph(4)
+ self.test(G, [(0, 0)], [(0, 0, 3 / math.log(3))])
+
+ def test_all_nonexistent_edges(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+ self.test(
+ G, None, [(0, 3, 1 / math.log(2)), (1, 2, 1 / math.log(2)), (1, 3, 0)]
+ )
+
+
+class TestCommonNeighborCentrality:
+ @classmethod
+ def setup_class(cls):
+ cls.func = staticmethod(nx.common_neighbor_centrality)
+ cls.test = partial(_test_func, predict_func=cls.func)
+
+ def test_K5(self):
+ G = nx.complete_graph(5)
+ self.test(G, [(0, 1)], [(0, 1, 3.0)], alpha=1)
+ self.test(G, [(0, 1)], [(0, 1, 5.0)], alpha=0)
+
+ def test_P3(self):
+ G = nx.path_graph(3)
+ self.test(G, [(0, 2)], [(0, 2, 1.25)], alpha=0.5)
+
+ def test_S4(self):
+ G = nx.star_graph(4)
+ self.test(G, [(1, 2)], [(1, 2, 1.75)], alpha=0.5)
+
+ @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph))
+ def test_notimplemented(self, graph_type):
+ assert pytest.raises(
+ nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)]
+ )
+
+ def test_no_common_neighbor(self):
+ G = nx.Graph()
+ G.add_nodes_from([0, 1])
+ self.test(G, [(0, 1)], [(0, 1, 0)])
+
+ def test_equal_nodes(self):
+ G = nx.complete_graph(4)
+ assert pytest.raises(nx.NetworkXAlgorithmError, self.test, G, [(0, 0)], [])
+
+ def test_all_nonexistent_edges(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+ self.test(G, None, [(0, 3, 1.5), (1, 2, 1.5), (1, 3, 2 / 3)], alpha=0.5)
+
+
+class TestPreferentialAttachment:
+ @classmethod
+ def setup_class(cls):
+ cls.func = staticmethod(nx.preferential_attachment)
+ cls.test = partial(_test_func, predict_func=cls.func)
+
+ def test_K5(self):
+ G = nx.complete_graph(5)
+ self.test(G, [(0, 1)], [(0, 1, 16)])
+
+ def test_P3(self):
+ G = nx.path_graph(3)
+ self.test(G, [(0, 1)], [(0, 1, 2)])
+
+ def test_S4(self):
+ G = nx.star_graph(4)
+ self.test(G, [(0, 2)], [(0, 2, 4)])
+
+ def test_notimplemented(self):
+ assert pytest.raises(
+ nx.NetworkXNotImplemented, self.func, nx.DiGraph([(0, 1), (1, 2)]), [(0, 2)]
+ )
+ assert pytest.raises(
+ nx.NetworkXNotImplemented,
+ self.func,
+ nx.MultiGraph([(0, 1), (1, 2)]),
+ [(0, 2)],
+ )
+ assert pytest.raises(
+ nx.NetworkXNotImplemented,
+ self.func,
+ nx.MultiDiGraph([(0, 1), (1, 2)]),
+ [(0, 2)],
+ )
+
+ def test_zero_degrees(self):
+ G = nx.Graph()
+ G.add_nodes_from([0, 1])
+ self.test(G, [(0, 1)], [(0, 1, 0)])
+
+ def test_all_nonexistent_edges(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+ self.test(G, None, [(0, 3, 2), (1, 2, 2), (1, 3, 1)])
+
+
+class TestCNSoundarajanHopcroft:
+ @classmethod
+ def setup_class(cls):
+ cls.func = staticmethod(nx.cn_soundarajan_hopcroft)
+ cls.test = partial(_test_func, predict_func=cls.func, community="community")
+
+ def test_K5(self):
+ G = nx.complete_graph(5)
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ G.nodes[2]["community"] = 0
+ G.nodes[3]["community"] = 0
+ G.nodes[4]["community"] = 1
+ self.test(G, [(0, 1)], [(0, 1, 5)])
+
+ def test_P3(self):
+ G = nx.path_graph(3)
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 1
+ G.nodes[2]["community"] = 0
+ self.test(G, [(0, 2)], [(0, 2, 1)])
+
+ def test_S4(self):
+ G = nx.star_graph(4)
+ G.nodes[0]["community"] = 1
+ G.nodes[1]["community"] = 1
+ G.nodes[2]["community"] = 1
+ G.nodes[3]["community"] = 0
+ G.nodes[4]["community"] = 0
+ self.test(G, [(1, 2)], [(1, 2, 2)])
+
+ def test_notimplemented(self):
+ G = nx.DiGraph([(0, 1), (1, 2)])
+ G.add_nodes_from([0, 1, 2], community=0)
+ assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
+ G = nx.MultiGraph([(0, 1), (1, 2)])
+ G.add_nodes_from([0, 1, 2], community=0)
+ assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
+ G = nx.MultiDiGraph([(0, 1), (1, 2)])
+ G.add_nodes_from([0, 1, 2], community=0)
+ assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
+
+ def test_no_common_neighbor(self):
+ G = nx.Graph()
+ G.add_nodes_from([0, 1])
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ self.test(G, [(0, 1)], [(0, 1, 0)])
+
+ def test_equal_nodes(self):
+ G = nx.complete_graph(3)
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ G.nodes[2]["community"] = 0
+ self.test(G, [(0, 0)], [(0, 0, 4)])
+
+ def test_different_community(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ G.nodes[2]["community"] = 0
+ G.nodes[3]["community"] = 1
+ self.test(G, [(0, 3)], [(0, 3, 2)])
+
+ def test_no_community_information(self):
+ G = nx.complete_graph(5)
+ assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)]))
+
+ def test_insufficient_community_information(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ G.nodes[3]["community"] = 0
+ assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)]))
+
+ def test_sufficient_community_information(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)])
+ G.nodes[1]["community"] = 0
+ G.nodes[2]["community"] = 0
+ G.nodes[3]["community"] = 0
+ G.nodes[4]["community"] = 0
+ self.test(G, [(1, 4)], [(1, 4, 4)])
+
+ def test_custom_community_attribute_name(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+ G.nodes[0]["cmty"] = 0
+ G.nodes[1]["cmty"] = 0
+ G.nodes[2]["cmty"] = 0
+ G.nodes[3]["cmty"] = 1
+ self.test(G, [(0, 3)], [(0, 3, 2)], community="cmty")
+
+ def test_all_nonexistent_edges(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 1
+ G.nodes[2]["community"] = 0
+ G.nodes[3]["community"] = 0
+ self.test(G, None, [(0, 3, 2), (1, 2, 1), (1, 3, 0)])
+
+
+class TestRAIndexSoundarajanHopcroft:
+ @classmethod
+ def setup_class(cls):
+ cls.func = staticmethod(nx.ra_index_soundarajan_hopcroft)
+ cls.test = partial(_test_func, predict_func=cls.func, community="community")
+
+ def test_K5(self):
+ G = nx.complete_graph(5)
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ G.nodes[2]["community"] = 0
+ G.nodes[3]["community"] = 0
+ G.nodes[4]["community"] = 1
+ self.test(G, [(0, 1)], [(0, 1, 0.5)])
+
+ def test_P3(self):
+ G = nx.path_graph(3)
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 1
+ G.nodes[2]["community"] = 0
+ self.test(G, [(0, 2)], [(0, 2, 0)])
+
+ def test_S4(self):
+ G = nx.star_graph(4)
+ G.nodes[0]["community"] = 1
+ G.nodes[1]["community"] = 1
+ G.nodes[2]["community"] = 1
+ G.nodes[3]["community"] = 0
+ G.nodes[4]["community"] = 0
+ self.test(G, [(1, 2)], [(1, 2, 0.25)])
+
+ def test_notimplemented(self):
+ G = nx.DiGraph([(0, 1), (1, 2)])
+ G.add_nodes_from([0, 1, 2], community=0)
+ assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
+ G = nx.MultiGraph([(0, 1), (1, 2)])
+ G.add_nodes_from([0, 1, 2], community=0)
+ assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
+ G = nx.MultiDiGraph([(0, 1), (1, 2)])
+ G.add_nodes_from([0, 1, 2], community=0)
+ assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
+
+ def test_no_common_neighbor(self):
+ G = nx.Graph()
+ G.add_nodes_from([0, 1])
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ self.test(G, [(0, 1)], [(0, 1, 0)])
+
+ def test_equal_nodes(self):
+ G = nx.complete_graph(3)
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ G.nodes[2]["community"] = 0
+ self.test(G, [(0, 0)], [(0, 0, 1)])
+
+ def test_different_community(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ G.nodes[2]["community"] = 0
+ G.nodes[3]["community"] = 1
+ self.test(G, [(0, 3)], [(0, 3, 0)])
+
+ def test_no_community_information(self):
+ G = nx.complete_graph(5)
+ assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)]))
+
+ def test_insufficient_community_information(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ G.nodes[3]["community"] = 0
+ assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)]))
+
+ def test_sufficient_community_information(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)])
+ G.nodes[1]["community"] = 0
+ G.nodes[2]["community"] = 0
+ G.nodes[3]["community"] = 0
+ G.nodes[4]["community"] = 0
+ self.test(G, [(1, 4)], [(1, 4, 1)])
+
+ def test_custom_community_attribute_name(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+ G.nodes[0]["cmty"] = 0
+ G.nodes[1]["cmty"] = 0
+ G.nodes[2]["cmty"] = 0
+ G.nodes[3]["cmty"] = 1
+ self.test(G, [(0, 3)], [(0, 3, 0)], community="cmty")
+
+ def test_all_nonexistent_edges(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 1
+ G.nodes[2]["community"] = 0
+ G.nodes[3]["community"] = 0
+ self.test(G, None, [(0, 3, 0.5), (1, 2, 0), (1, 3, 0)])
+
+
+class TestWithinInterCluster:
+ @classmethod
+ def setup_class(cls):
+ cls.delta = 0.001
+ cls.func = staticmethod(nx.within_inter_cluster)
+ cls.test = partial(
+ _test_func, predict_func=cls.func, delta=cls.delta, community="community"
+ )
+
+ def test_K5(self):
+ G = nx.complete_graph(5)
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ G.nodes[2]["community"] = 0
+ G.nodes[3]["community"] = 0
+ G.nodes[4]["community"] = 1
+ self.test(G, [(0, 1)], [(0, 1, 2 / (1 + self.delta))])
+
+ def test_P3(self):
+ G = nx.path_graph(3)
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 1
+ G.nodes[2]["community"] = 0
+ self.test(G, [(0, 2)], [(0, 2, 0)])
+
+ def test_S4(self):
+ G = nx.star_graph(4)
+ G.nodes[0]["community"] = 1
+ G.nodes[1]["community"] = 1
+ G.nodes[2]["community"] = 1
+ G.nodes[3]["community"] = 0
+ G.nodes[4]["community"] = 0
+ self.test(G, [(1, 2)], [(1, 2, 1 / self.delta)])
+
+ def test_notimplemented(self):
+ G = nx.DiGraph([(0, 1), (1, 2)])
+ G.add_nodes_from([0, 1, 2], community=0)
+ assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
+ G = nx.MultiGraph([(0, 1), (1, 2)])
+ G.add_nodes_from([0, 1, 2], community=0)
+ assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
+ G = nx.MultiDiGraph([(0, 1), (1, 2)])
+ G.add_nodes_from([0, 1, 2], community=0)
+ assert pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)])
+
+ def test_no_common_neighbor(self):
+ G = nx.Graph()
+ G.add_nodes_from([0, 1])
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ self.test(G, [(0, 1)], [(0, 1, 0)])
+
+ def test_equal_nodes(self):
+ G = nx.complete_graph(3)
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ G.nodes[2]["community"] = 0
+ self.test(G, [(0, 0)], [(0, 0, 2 / self.delta)])
+
+ def test_different_community(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ G.nodes[2]["community"] = 0
+ G.nodes[3]["community"] = 1
+ self.test(G, [(0, 3)], [(0, 3, 0)])
+
+ def test_no_inter_cluster_common_neighbor(self):
+ G = nx.complete_graph(4)
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ G.nodes[2]["community"] = 0
+ G.nodes[3]["community"] = 0
+ self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)])
+
+ def test_no_community_information(self):
+ G = nx.complete_graph(5)
+ assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)]))
+
+ def test_insufficient_community_information(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)])
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 0
+ G.nodes[3]["community"] = 0
+ assert pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)]))
+
+ def test_sufficient_community_information(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)])
+ G.nodes[1]["community"] = 0
+ G.nodes[2]["community"] = 0
+ G.nodes[3]["community"] = 0
+ G.nodes[4]["community"] = 0
+ self.test(G, [(1, 4)], [(1, 4, 2 / self.delta)])
+
+ def test_invalid_delta(self):
+ G = nx.complete_graph(3)
+ G.add_nodes_from([0, 1, 2], community=0)
+ assert pytest.raises(nx.NetworkXAlgorithmError, self.func, G, [(0, 1)], 0)
+ assert pytest.raises(nx.NetworkXAlgorithmError, self.func, G, [(0, 1)], -0.5)
+
+ def test_custom_community_attribute_name(self):
+ G = nx.complete_graph(4)
+ G.nodes[0]["cmty"] = 0
+ G.nodes[1]["cmty"] = 0
+ G.nodes[2]["cmty"] = 0
+ G.nodes[3]["cmty"] = 0
+ self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)], community="cmty")
+
+ def test_all_nonexistent_edges(self):
+ G = nx.Graph()
+ G.add_edges_from([(0, 1), (0, 2), (2, 3)])
+ G.nodes[0]["community"] = 0
+ G.nodes[1]["community"] = 1
+ G.nodes[2]["community"] = 0
+ G.nodes[3]["community"] = 0
+ self.test(G, None, [(0, 3, 1 / self.delta), (1, 2, 0), (1, 3, 0)])
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_matching.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_matching.py
new file mode 100644
index 0000000000000000000000000000000000000000..37853e3896c0fd6bcac1f46524a844ae2e2fb518
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_matching.py
@@ -0,0 +1,605 @@
+import math
+from itertools import permutations
+
+from pytest import raises
+
+import networkx as nx
+from networkx.algorithms.matching import matching_dict_to_set
+from networkx.utils import edges_equal
+
+
+class TestMaxWeightMatching:
+ """Unit tests for the
+ :func:`~networkx.algorithms.matching.max_weight_matching` function.
+
+ """
+
+ def test_trivial1(self):
+ """Empty graph"""
+ G = nx.Graph()
+ assert nx.max_weight_matching(G) == set()
+ assert nx.min_weight_matching(G) == set()
+
+ def test_selfloop(self):
+ G = nx.Graph()
+ G.add_edge(0, 0, weight=100)
+ assert nx.max_weight_matching(G) == set()
+ assert nx.min_weight_matching(G) == set()
+
+ def test_single_edge(self):
+ G = nx.Graph()
+ G.add_edge(0, 1)
+ assert edges_equal(
+ nx.max_weight_matching(G), matching_dict_to_set({0: 1, 1: 0})
+ )
+ assert edges_equal(
+ nx.min_weight_matching(G), matching_dict_to_set({0: 1, 1: 0})
+ )
+
+ def test_two_path(self):
+ G = nx.Graph()
+ G.add_edge("one", "two", weight=10)
+ G.add_edge("two", "three", weight=11)
+ assert edges_equal(
+ nx.max_weight_matching(G),
+ matching_dict_to_set({"three": "two", "two": "three"}),
+ )
+ assert edges_equal(
+ nx.min_weight_matching(G),
+ matching_dict_to_set({"one": "two", "two": "one"}),
+ )
+
+ def test_path(self):
+ G = nx.Graph()
+ G.add_edge(1, 2, weight=5)
+ G.add_edge(2, 3, weight=11)
+ G.add_edge(3, 4, weight=5)
+ assert edges_equal(
+ nx.max_weight_matching(G), matching_dict_to_set({2: 3, 3: 2})
+ )
+ assert edges_equal(
+ nx.max_weight_matching(G, 1), matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3})
+ )
+ assert edges_equal(
+ nx.min_weight_matching(G), matching_dict_to_set({1: 2, 3: 4})
+ )
+ assert edges_equal(
+ nx.min_weight_matching(G, 1), matching_dict_to_set({1: 2, 3: 4})
+ )
+
+ def test_square(self):
+ G = nx.Graph()
+ G.add_edge(1, 4, weight=2)
+ G.add_edge(2, 3, weight=2)
+ G.add_edge(1, 2, weight=1)
+ G.add_edge(3, 4, weight=4)
+ assert edges_equal(
+ nx.max_weight_matching(G), matching_dict_to_set({1: 2, 3: 4})
+ )
+ assert edges_equal(
+ nx.min_weight_matching(G), matching_dict_to_set({1: 4, 2: 3})
+ )
+
+ def test_edge_attribute_name(self):
+ G = nx.Graph()
+ G.add_edge("one", "two", weight=10, abcd=11)
+ G.add_edge("two", "three", weight=11, abcd=10)
+ assert edges_equal(
+ nx.max_weight_matching(G, weight="abcd"),
+ matching_dict_to_set({"one": "two", "two": "one"}),
+ )
+ assert edges_equal(
+ nx.min_weight_matching(G, weight="abcd"),
+ matching_dict_to_set({"three": "two"}),
+ )
+
+ def test_floating_point_weights(self):
+ G = nx.Graph()
+ G.add_edge(1, 2, weight=math.pi)
+ G.add_edge(2, 3, weight=math.exp(1))
+ G.add_edge(1, 3, weight=3.0)
+ G.add_edge(1, 4, weight=math.sqrt(2.0))
+ assert edges_equal(
+ nx.max_weight_matching(G), matching_dict_to_set({1: 4, 2: 3, 3: 2, 4: 1})
+ )
+ assert edges_equal(
+ nx.min_weight_matching(G), matching_dict_to_set({1: 4, 2: 3, 3: 2, 4: 1})
+ )
+
+ def test_negative_weights(self):
+ G = nx.Graph()
+ G.add_edge(1, 2, weight=2)
+ G.add_edge(1, 3, weight=-2)
+ G.add_edge(2, 3, weight=1)
+ G.add_edge(2, 4, weight=-1)
+ G.add_edge(3, 4, weight=-6)
+ assert edges_equal(
+ nx.max_weight_matching(G), matching_dict_to_set({1: 2, 2: 1})
+ )
+ assert edges_equal(
+ nx.max_weight_matching(G, maxcardinality=True),
+ matching_dict_to_set({1: 3, 2: 4, 3: 1, 4: 2}),
+ )
+ assert edges_equal(
+ nx.min_weight_matching(G), matching_dict_to_set({1: 2, 3: 4})
+ )
+
+ def test_s_blossom(self):
+ """Create S-blossom and use it for augmentation:"""
+ G = nx.Graph()
+ G.add_weighted_edges_from([(1, 2, 8), (1, 3, 9), (2, 3, 10), (3, 4, 7)])
+ answer = matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3})
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ G.add_weighted_edges_from([(1, 6, 5), (4, 5, 6)])
+ answer = matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1})
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ def test_s_t_blossom(self):
+ """Create S-blossom, relabel as T-blossom, use for augmentation:"""
+ G = nx.Graph()
+ G.add_weighted_edges_from(
+ [(1, 2, 9), (1, 3, 8), (2, 3, 10), (1, 4, 5), (4, 5, 4), (1, 6, 3)]
+ )
+ answer = matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 5, 5: 4, 6: 1})
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ G.add_edge(4, 5, weight=3)
+ G.add_edge(1, 6, weight=4)
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ G.remove_edge(1, 6)
+ G.add_edge(3, 6, weight=4)
+ answer = matching_dict_to_set({1: 2, 2: 1, 3: 6, 4: 5, 5: 4, 6: 3})
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ def test_nested_s_blossom(self):
+ """Create nested S-blossom, use for augmentation:"""
+
+ G = nx.Graph()
+ G.add_weighted_edges_from(
+ [
+ (1, 2, 9),
+ (1, 3, 9),
+ (2, 3, 10),
+ (2, 4, 8),
+ (3, 5, 8),
+ (4, 5, 10),
+ (5, 6, 6),
+ ]
+ )
+ dict_format = {1: 3, 2: 4, 3: 1, 4: 2, 5: 6, 6: 5}
+ expected = {frozenset(e) for e in matching_dict_to_set(dict_format)}
+ answer = {frozenset(e) for e in nx.max_weight_matching(G)}
+ assert answer == expected
+ answer = {frozenset(e) for e in nx.min_weight_matching(G)}
+ assert answer == expected
+
+ def test_nested_s_blossom_relabel(self):
+ """Create S-blossom, relabel as S, include in nested S-blossom:"""
+ G = nx.Graph()
+ G.add_weighted_edges_from(
+ [
+ (1, 2, 10),
+ (1, 7, 10),
+ (2, 3, 12),
+ (3, 4, 20),
+ (3, 5, 20),
+ (4, 5, 25),
+ (5, 6, 10),
+ (6, 7, 10),
+ (7, 8, 8),
+ ]
+ )
+ answer = matching_dict_to_set({1: 2, 2: 1, 3: 4, 4: 3, 5: 6, 6: 5, 7: 8, 8: 7})
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ def test_nested_s_blossom_expand(self):
+ """Create nested S-blossom, augment, expand recursively:"""
+ G = nx.Graph()
+ G.add_weighted_edges_from(
+ [
+ (1, 2, 8),
+ (1, 3, 8),
+ (2, 3, 10),
+ (2, 4, 12),
+ (3, 5, 12),
+ (4, 5, 14),
+ (4, 6, 12),
+ (5, 7, 12),
+ (6, 7, 14),
+ (7, 8, 12),
+ ]
+ )
+ answer = matching_dict_to_set({1: 2, 2: 1, 3: 5, 4: 6, 5: 3, 6: 4, 7: 8, 8: 7})
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ def test_s_blossom_relabel_expand(self):
+ """Create S-blossom, relabel as T, expand:"""
+ G = nx.Graph()
+ G.add_weighted_edges_from(
+ [
+ (1, 2, 23),
+ (1, 5, 22),
+ (1, 6, 15),
+ (2, 3, 25),
+ (3, 4, 22),
+ (4, 5, 25),
+ (4, 8, 14),
+ (5, 7, 13),
+ ]
+ )
+ answer = matching_dict_to_set({1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4})
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ def test_nested_s_blossom_relabel_expand(self):
+ """Create nested S-blossom, relabel as T, expand:"""
+ G = nx.Graph()
+ G.add_weighted_edges_from(
+ [
+ (1, 2, 19),
+ (1, 3, 20),
+ (1, 8, 8),
+ (2, 3, 25),
+ (2, 4, 18),
+ (3, 5, 18),
+ (4, 5, 13),
+ (4, 7, 7),
+ (5, 6, 7),
+ ]
+ )
+ answer = matching_dict_to_set({1: 8, 2: 3, 3: 2, 4: 7, 5: 6, 6: 5, 7: 4, 8: 1})
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ def test_nasty_blossom1(self):
+ """Create blossom, relabel as T in more than one way, expand,
+ augment:
+ """
+ G = nx.Graph()
+ G.add_weighted_edges_from(
+ [
+ (1, 2, 45),
+ (1, 5, 45),
+ (2, 3, 50),
+ (3, 4, 45),
+ (4, 5, 50),
+ (1, 6, 30),
+ (3, 9, 35),
+ (4, 8, 35),
+ (5, 7, 26),
+ (9, 10, 5),
+ ]
+ )
+ ansdict = {1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4, 9: 10, 10: 9}
+ answer = matching_dict_to_set(ansdict)
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ def test_nasty_blossom2(self):
+ """Again but slightly different:"""
+ G = nx.Graph()
+ G.add_weighted_edges_from(
+ [
+ (1, 2, 45),
+ (1, 5, 45),
+ (2, 3, 50),
+ (3, 4, 45),
+ (4, 5, 50),
+ (1, 6, 30),
+ (3, 9, 35),
+ (4, 8, 26),
+ (5, 7, 40),
+ (9, 10, 5),
+ ]
+ )
+ ans = {1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4, 9: 10, 10: 9}
+ answer = matching_dict_to_set(ans)
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ def test_nasty_blossom_least_slack(self):
+ """Create blossom, relabel as T, expand such that a new
+ least-slack S-to-free dge is produced, augment:
+ """
+ G = nx.Graph()
+ G.add_weighted_edges_from(
+ [
+ (1, 2, 45),
+ (1, 5, 45),
+ (2, 3, 50),
+ (3, 4, 45),
+ (4, 5, 50),
+ (1, 6, 30),
+ (3, 9, 35),
+ (4, 8, 28),
+ (5, 7, 26),
+ (9, 10, 5),
+ ]
+ )
+ ans = {1: 6, 2: 3, 3: 2, 4: 8, 5: 7, 6: 1, 7: 5, 8: 4, 9: 10, 10: 9}
+ answer = matching_dict_to_set(ans)
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ def test_nasty_blossom_augmenting(self):
+ """Create nested blossom, relabel as T in more than one way"""
+ # expand outer blossom such that inner blossom ends up on an
+ # augmenting path:
+ G = nx.Graph()
+ G.add_weighted_edges_from(
+ [
+ (1, 2, 45),
+ (1, 7, 45),
+ (2, 3, 50),
+ (3, 4, 45),
+ (4, 5, 95),
+ (4, 6, 94),
+ (5, 6, 94),
+ (6, 7, 50),
+ (1, 8, 30),
+ (3, 11, 35),
+ (5, 9, 36),
+ (7, 10, 26),
+ (11, 12, 5),
+ ]
+ )
+ ans = {
+ 1: 8,
+ 2: 3,
+ 3: 2,
+ 4: 6,
+ 5: 9,
+ 6: 4,
+ 7: 10,
+ 8: 1,
+ 9: 5,
+ 10: 7,
+ 11: 12,
+ 12: 11,
+ }
+ answer = matching_dict_to_set(ans)
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ def test_nasty_blossom_expand_recursively(self):
+ """Create nested S-blossom, relabel as S, expand recursively:"""
+ G = nx.Graph()
+ G.add_weighted_edges_from(
+ [
+ (1, 2, 40),
+ (1, 3, 40),
+ (2, 3, 60),
+ (2, 4, 55),
+ (3, 5, 55),
+ (4, 5, 50),
+ (1, 8, 15),
+ (5, 7, 30),
+ (7, 6, 10),
+ (8, 10, 10),
+ (4, 9, 30),
+ ]
+ )
+ ans = {1: 2, 2: 1, 3: 5, 4: 9, 5: 3, 6: 7, 7: 6, 8: 10, 9: 4, 10: 8}
+ answer = matching_dict_to_set(ans)
+ assert edges_equal(nx.max_weight_matching(G), answer)
+ assert edges_equal(nx.min_weight_matching(G), answer)
+
+ def test_wrong_graph_type(self):
+ error = nx.NetworkXNotImplemented
+ raises(error, nx.max_weight_matching, nx.MultiGraph())
+ raises(error, nx.max_weight_matching, nx.MultiDiGraph())
+ raises(error, nx.max_weight_matching, nx.DiGraph())
+ raises(error, nx.min_weight_matching, nx.DiGraph())
+
+
+class TestIsMatching:
+ """Unit tests for the
+ :func:`~networkx.algorithms.matching.is_matching` function.
+
+ """
+
+ def test_dict(self):
+ G = nx.path_graph(4)
+ assert nx.is_matching(G, {0: 1, 1: 0, 2: 3, 3: 2})
+
+ def test_empty_matching(self):
+ G = nx.path_graph(4)
+ assert nx.is_matching(G, set())
+
+ def test_single_edge(self):
+ G = nx.path_graph(4)
+ assert nx.is_matching(G, {(1, 2)})
+
+ def test_edge_order(self):
+ G = nx.path_graph(4)
+ assert nx.is_matching(G, {(0, 1), (2, 3)})
+ assert nx.is_matching(G, {(1, 0), (2, 3)})
+ assert nx.is_matching(G, {(0, 1), (3, 2)})
+ assert nx.is_matching(G, {(1, 0), (3, 2)})
+
+ def test_valid_matching(self):
+ G = nx.path_graph(4)
+ assert nx.is_matching(G, {(0, 1), (2, 3)})
+
+ def test_invalid_input(self):
+ error = nx.NetworkXError
+ G = nx.path_graph(4)
+ # edge to node not in G
+ raises(error, nx.is_matching, G, {(0, 5), (2, 3)})
+ # edge not a 2-tuple
+ raises(error, nx.is_matching, G, {(0, 1, 2), (2, 3)})
+ raises(error, nx.is_matching, G, {(0,), (2, 3)})
+
+ def test_selfloops(self):
+ error = nx.NetworkXError
+ G = nx.path_graph(4)
+ # selfloop for node not in G
+ raises(error, nx.is_matching, G, {(5, 5), (2, 3)})
+ # selfloop edge not in G
+ assert not nx.is_matching(G, {(0, 0), (1, 2), (2, 3)})
+ # selfloop edge in G
+ G.add_edge(0, 0)
+ assert not nx.is_matching(G, {(0, 0), (1, 2)})
+
+ def test_invalid_matching(self):
+ G = nx.path_graph(4)
+ assert not nx.is_matching(G, {(0, 1), (1, 2), (2, 3)})
+
+ def test_invalid_edge(self):
+ G = nx.path_graph(4)
+ assert not nx.is_matching(G, {(0, 3), (1, 2)})
+ raises(nx.NetworkXError, nx.is_matching, G, {(0, 55)})
+
+ G = nx.DiGraph(G.edges)
+ assert nx.is_matching(G, {(0, 1)})
+ assert not nx.is_matching(G, {(1, 0)})
+
+
+class TestIsMaximalMatching:
+ """Unit tests for the
+ :func:`~networkx.algorithms.matching.is_maximal_matching` function.
+
+ """
+
+ def test_dict(self):
+ G = nx.path_graph(4)
+ assert nx.is_maximal_matching(G, {0: 1, 1: 0, 2: 3, 3: 2})
+
+ def test_invalid_input(self):
+ error = nx.NetworkXError
+ G = nx.path_graph(4)
+ # edge to node not in G
+ raises(error, nx.is_maximal_matching, G, {(0, 5)})
+ raises(error, nx.is_maximal_matching, G, {(5, 0)})
+ # edge not a 2-tuple
+ raises(error, nx.is_maximal_matching, G, {(0, 1, 2), (2, 3)})
+ raises(error, nx.is_maximal_matching, G, {(0,), (2, 3)})
+
+ def test_valid(self):
+ G = nx.path_graph(4)
+ assert nx.is_maximal_matching(G, {(0, 1), (2, 3)})
+
+ def test_not_matching(self):
+ G = nx.path_graph(4)
+ assert not nx.is_maximal_matching(G, {(0, 1), (1, 2), (2, 3)})
+ assert not nx.is_maximal_matching(G, {(0, 3)})
+ G.add_edge(0, 0)
+ assert not nx.is_maximal_matching(G, {(0, 0)})
+
+ def test_not_maximal(self):
+ G = nx.path_graph(4)
+ assert not nx.is_maximal_matching(G, {(0, 1)})
+
+
+class TestIsPerfectMatching:
+ """Unit tests for the
+ :func:`~networkx.algorithms.matching.is_perfect_matching` function.
+
+ """
+
+ def test_dict(self):
+ G = nx.path_graph(4)
+ assert nx.is_perfect_matching(G, {0: 1, 1: 0, 2: 3, 3: 2})
+
+ def test_valid(self):
+ G = nx.path_graph(4)
+ assert nx.is_perfect_matching(G, {(0, 1), (2, 3)})
+
+ def test_valid_not_path(self):
+ G = nx.cycle_graph(4)
+ G.add_edge(0, 4)
+ G.add_edge(1, 4)
+ G.add_edge(5, 2)
+
+ assert nx.is_perfect_matching(G, {(1, 4), (0, 3), (5, 2)})
+
+ def test_invalid_input(self):
+ error = nx.NetworkXError
+ G = nx.path_graph(4)
+ # edge to node not in G
+ raises(error, nx.is_perfect_matching, G, {(0, 5)})
+ raises(error, nx.is_perfect_matching, G, {(5, 0)})
+ # edge not a 2-tuple
+ raises(error, nx.is_perfect_matching, G, {(0, 1, 2), (2, 3)})
+ raises(error, nx.is_perfect_matching, G, {(0,), (2, 3)})
+
+ def test_selfloops(self):
+ error = nx.NetworkXError
+ G = nx.path_graph(4)
+ # selfloop for node not in G
+ raises(error, nx.is_perfect_matching, G, {(5, 5), (2, 3)})
+ # selfloop edge not in G
+ assert not nx.is_perfect_matching(G, {(0, 0), (1, 2), (2, 3)})
+ # selfloop edge in G
+ G.add_edge(0, 0)
+ assert not nx.is_perfect_matching(G, {(0, 0), (1, 2)})
+
+ def test_not_matching(self):
+ G = nx.path_graph(4)
+ assert not nx.is_perfect_matching(G, {(0, 3)})
+ assert not nx.is_perfect_matching(G, {(0, 1), (1, 2), (2, 3)})
+
+ def test_maximal_but_not_perfect(self):
+ G = nx.cycle_graph(4)
+ G.add_edge(0, 4)
+ G.add_edge(1, 4)
+
+ assert not nx.is_perfect_matching(G, {(1, 4), (0, 3)})
+
+
+class TestMaximalMatching:
+ """Unit tests for the
+ :func:`~networkx.algorithms.matching.maximal_matching`.
+
+ """
+
+ def test_valid_matching(self):
+ edges = [(1, 2), (1, 5), (2, 3), (2, 5), (3, 4), (3, 6), (5, 6)]
+ G = nx.Graph(edges)
+ matching = nx.maximal_matching(G)
+ assert nx.is_maximal_matching(G, matching)
+
+ def test_single_edge_matching(self):
+ # In the star graph, any maximal matching has just one edge.
+ G = nx.star_graph(5)
+ matching = nx.maximal_matching(G)
+ assert 1 == len(matching)
+ assert nx.is_maximal_matching(G, matching)
+
+ def test_self_loops(self):
+ # Create the path graph with two self-loops.
+ G = nx.path_graph(3)
+ G.add_edges_from([(0, 0), (1, 1)])
+ matching = nx.maximal_matching(G)
+ assert len(matching) == 1
+ # The matching should never include self-loops.
+ assert not any(u == v for u, v in matching)
+ assert nx.is_maximal_matching(G, matching)
+
+ def test_ordering(self):
+ """Tests that a maximal matching is computed correctly
+ regardless of the order in which nodes are added to the graph.
+
+ """
+ for nodes in permutations(range(3)):
+ G = nx.Graph()
+ G.add_nodes_from(nodes)
+ G.add_edges_from([(0, 1), (0, 2)])
+ matching = nx.maximal_matching(G)
+ assert len(matching) == 1
+ assert nx.is_maximal_matching(G, matching)
+
+ def test_wrong_graph_type(self):
+ error = nx.NetworkXNotImplemented
+ raises(error, nx.maximal_matching, nx.MultiGraph())
+ raises(error, nx.maximal_matching, nx.MultiDiGraph())
+ raises(error, nx.maximal_matching, nx.DiGraph())
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_mis.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_mis.py
new file mode 100644
index 0000000000000000000000000000000000000000..379c5c07c7a050ac2ab799355070eb5e674fc621
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_mis.py
@@ -0,0 +1,62 @@
+"""
+Tests for maximal (not maximum) independent sets.
+
+"""
+
+import random
+
+import pytest
+
+import networkx as nx
+
+
+def test_random_seed():
+ G = nx.empty_graph(5)
+ assert nx.maximal_independent_set(G, seed=1) == [1, 0, 3, 2, 4]
+
+
+@pytest.mark.parametrize("graph", [nx.complete_graph(5), nx.complete_graph(55)])
+def test_K5(graph):
+ """Maximal independent set for complete graphs"""
+ assert all(nx.maximal_independent_set(graph, [n]) == [n] for n in graph)
+
+
+def test_exceptions():
+ """Bad input should raise exception."""
+ G = nx.florentine_families_graph()
+ pytest.raises(nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Smith"])
+ pytest.raises(
+ nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Salviati", "Pazzi"]
+ )
+ # MaximalIndependentSet is not implemented for directed graphs
+ pytest.raises(nx.NetworkXNotImplemented, nx.maximal_independent_set, nx.DiGraph(G))
+
+
+def test_florentine_family():
+ G = nx.florentine_families_graph()
+ indep = nx.maximal_independent_set(G, ["Medici", "Bischeri"])
+ assert set(indep) == {
+ "Medici",
+ "Bischeri",
+ "Castellani",
+ "Pazzi",
+ "Ginori",
+ "Lamberteschi",
+ }
+
+
+def test_bipartite():
+ G = nx.complete_bipartite_graph(12, 34)
+ indep = nx.maximal_independent_set(G, [4, 5, 9, 10])
+ assert sorted(indep) == list(range(12))
+
+
+def test_random_graphs():
+ """Generate 5 random graphs of different types and sizes and
+ make sure that all sets are independent and maximal."""
+ for i in range(0, 50, 10):
+ G = nx.erdos_renyi_graph(i * 10 + 1, random.random())
+ IS = nx.maximal_independent_set(G)
+ assert G.subgraph(IS).number_of_edges() == 0
+ neighbors_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS))
+ assert all(v in neighbors_of_MIS for v in set(G.nodes()).difference(IS))
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_planarity.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_planarity.py
new file mode 100644
index 0000000000000000000000000000000000000000..470b1d23bb806e9e1297b8cb52f5c03a569a47ff
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_planarity.py
@@ -0,0 +1,442 @@
+import pytest
+
+import networkx as nx
+from networkx.algorithms.planarity import (
+ check_planarity_recursive,
+ get_counterexample,
+ get_counterexample_recursive,
+)
+
+
+class TestLRPlanarity:
+ """Nose Unit tests for the :mod:`networkx.algorithms.planarity` module.
+
+ Tests three things:
+ 1. Check that the result is correct
+ (returns planar if and only if the graph is actually planar)
+ 2. In case a counter example is returned: Check if it is correct
+ 3. In case an embedding is returned: Check if its actually an embedding
+ """
+
+ @staticmethod
+ def check_graph(G, is_planar=None):
+ """Raises an exception if the lr_planarity check returns a wrong result
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ is_planar : bool
+ The expected result of the planarity check.
+ If set to None only counter example or embedding are verified.
+
+ """
+
+ # obtain results of planarity check
+ is_planar_lr, result = nx.check_planarity(G, True)
+ is_planar_lr_rec, result_rec = check_planarity_recursive(G, True)
+
+ if is_planar is not None:
+ # set a message for the assert
+ if is_planar:
+ msg = "Wrong planarity check result. Should be planar."
+ else:
+ msg = "Wrong planarity check result. Should be non-planar."
+
+ # check if the result is as expected
+ assert is_planar == is_planar_lr, msg
+ assert is_planar == is_planar_lr_rec, msg
+
+ if is_planar_lr:
+ # check embedding
+ check_embedding(G, result)
+ check_embedding(G, result_rec)
+ else:
+ # check counter example
+ check_counterexample(G, result)
+ check_counterexample(G, result_rec)
+
+ def test_simple_planar_graph(self):
+ e = [
+ (1, 2),
+ (2, 3),
+ (3, 4),
+ (4, 6),
+ (6, 7),
+ (7, 1),
+ (1, 5),
+ (5, 2),
+ (2, 4),
+ (4, 5),
+ (5, 7),
+ ]
+ self.check_graph(nx.Graph(e), is_planar=True)
+
+ def test_planar_with_selfloop(self):
+ e = [
+ (1, 1),
+ (2, 2),
+ (3, 3),
+ (4, 4),
+ (5, 5),
+ (1, 2),
+ (1, 3),
+ (1, 5),
+ (2, 5),
+ (2, 4),
+ (3, 4),
+ (3, 5),
+ (4, 5),
+ ]
+ self.check_graph(nx.Graph(e), is_planar=True)
+
+ def test_k3_3(self):
+ self.check_graph(nx.complete_bipartite_graph(3, 3), is_planar=False)
+
+ def test_k5(self):
+ self.check_graph(nx.complete_graph(5), is_planar=False)
+
+ def test_multiple_components_planar(self):
+ e = [(1, 2), (2, 3), (3, 1), (4, 5), (5, 6), (6, 4)]
+ self.check_graph(nx.Graph(e), is_planar=True)
+
+ def test_multiple_components_non_planar(self):
+ G = nx.complete_graph(5)
+ # add another planar component to the non planar component
+ # G stays non planar
+ G.add_edges_from([(6, 7), (7, 8), (8, 6)])
+ self.check_graph(G, is_planar=False)
+
+ def test_non_planar_with_selfloop(self):
+ G = nx.complete_graph(5)
+ # add self loops
+ for i in range(5):
+ G.add_edge(i, i)
+ self.check_graph(G, is_planar=False)
+
+ def test_non_planar1(self):
+ # tests a graph that has no subgraph directly isomorph to K5 or K3_3
+ e = [
+ (1, 5),
+ (1, 6),
+ (1, 7),
+ (2, 6),
+ (2, 3),
+ (3, 5),
+ (3, 7),
+ (4, 5),
+ (4, 6),
+ (4, 7),
+ ]
+ self.check_graph(nx.Graph(e), is_planar=False)
+
+ def test_loop(self):
+ # test a graph with a selfloop
+ e = [(1, 2), (2, 2)]
+ G = nx.Graph(e)
+ self.check_graph(G, is_planar=True)
+
+ def test_comp(self):
+ # test multiple component graph
+ e = [(1, 2), (3, 4)]
+ G = nx.Graph(e)
+ G.remove_edge(1, 2)
+ self.check_graph(G, is_planar=True)
+
+ def test_goldner_harary(self):
+ # test goldner-harary graph (a maximal planar graph)
+ e = [
+ (1, 2),
+ (1, 3),
+ (1, 4),
+ (1, 5),
+ (1, 7),
+ (1, 8),
+ (1, 10),
+ (1, 11),
+ (2, 3),
+ (2, 4),
+ (2, 6),
+ (2, 7),
+ (2, 9),
+ (2, 10),
+ (2, 11),
+ (3, 4),
+ (4, 5),
+ (4, 6),
+ (4, 7),
+ (5, 7),
+ (6, 7),
+ (7, 8),
+ (7, 9),
+ (7, 10),
+ (8, 10),
+ (9, 10),
+ (10, 11),
+ ]
+ G = nx.Graph(e)
+ self.check_graph(G, is_planar=True)
+
+ def test_planar_multigraph(self):
+ G = nx.MultiGraph([(1, 2), (1, 2), (1, 2), (1, 2), (2, 3), (3, 1)])
+ self.check_graph(G, is_planar=True)
+
+ def test_non_planar_multigraph(self):
+ G = nx.MultiGraph(nx.complete_graph(5))
+ G.add_edges_from([(1, 2)] * 5)
+ self.check_graph(G, is_planar=False)
+
+ def test_planar_digraph(self):
+ G = nx.DiGraph([(1, 2), (2, 3), (2, 4), (4, 1), (4, 2), (1, 4), (3, 2)])
+ self.check_graph(G, is_planar=True)
+
+ def test_non_planar_digraph(self):
+ G = nx.DiGraph(nx.complete_graph(5))
+ G.remove_edge(1, 2)
+ G.remove_edge(4, 1)
+ self.check_graph(G, is_planar=False)
+
+ def test_single_component(self):
+ # Test a graph with only a single node
+ G = nx.Graph()
+ G.add_node(1)
+ self.check_graph(G, is_planar=True)
+
+ def test_graph1(self):
+ G = nx.Graph(
+ [
+ (3, 10),
+ (2, 13),
+ (1, 13),
+ (7, 11),
+ (0, 8),
+ (8, 13),
+ (0, 2),
+ (0, 7),
+ (0, 10),
+ (1, 7),
+ ]
+ )
+ self.check_graph(G, is_planar=True)
+
+ def test_graph2(self):
+ G = nx.Graph(
+ [
+ (1, 2),
+ (4, 13),
+ (0, 13),
+ (4, 5),
+ (7, 10),
+ (1, 7),
+ (0, 3),
+ (2, 6),
+ (5, 6),
+ (7, 13),
+ (4, 8),
+ (0, 8),
+ (0, 9),
+ (2, 13),
+ (6, 7),
+ (3, 6),
+ (2, 8),
+ ]
+ )
+ self.check_graph(G, is_planar=False)
+
+ def test_graph3(self):
+ G = nx.Graph(
+ [
+ (0, 7),
+ (3, 11),
+ (3, 4),
+ (8, 9),
+ (4, 11),
+ (1, 7),
+ (1, 13),
+ (1, 11),
+ (3, 5),
+ (5, 7),
+ (1, 3),
+ (0, 4),
+ (5, 11),
+ (5, 13),
+ ]
+ )
+ self.check_graph(G, is_planar=False)
+
+ def test_counterexample_planar(self):
+ with pytest.raises(nx.NetworkXException):
+ # Try to get a counterexample of a planar graph
+ G = nx.Graph()
+ G.add_node(1)
+ get_counterexample(G)
+
+ def test_counterexample_planar_recursive(self):
+ with pytest.raises(nx.NetworkXException):
+ # Try to get a counterexample of a planar graph
+ G = nx.Graph()
+ G.add_node(1)
+ get_counterexample_recursive(G)
+
+
+def check_embedding(G, embedding):
+ """Raises an exception if the combinatorial embedding is not correct
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ embedding : a dict mapping nodes to a list of edges
+ This specifies the ordering of the outgoing edges from a node for
+ a combinatorial embedding
+
+ Notes
+ -----
+ Checks the following things:
+ - The type of the embedding is correct
+ - The nodes and edges match the original graph
+ - Every half edge has its matching opposite half edge
+ - No intersections of edges (checked by Euler's formula)
+ """
+
+ if not isinstance(embedding, nx.PlanarEmbedding):
+ raise nx.NetworkXException("Bad embedding. Not of type nx.PlanarEmbedding")
+
+ # Check structure
+ embedding.check_structure()
+
+ # Check that graphs are equivalent
+
+ assert set(G.nodes) == set(
+ embedding.nodes
+ ), "Bad embedding. Nodes don't match the original graph."
+
+ # Check that the edges are equal
+ g_edges = set()
+ for edge in G.edges:
+ if edge[0] != edge[1]:
+ g_edges.add((edge[0], edge[1]))
+ g_edges.add((edge[1], edge[0]))
+ assert g_edges == set(
+ embedding.edges
+ ), "Bad embedding. Edges don't match the original graph."
+
+
+def check_counterexample(G, sub_graph):
+ """Raises an exception if the counterexample is wrong.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ subdivision_nodes : set
+ A set of nodes inducing a subgraph as a counterexample
+ """
+ # 1. Create the sub graph
+ sub_graph = nx.Graph(sub_graph)
+
+ # 2. Remove self loops
+ for u in sub_graph:
+ if sub_graph.has_edge(u, u):
+ sub_graph.remove_edge(u, u)
+
+ # keep track of nodes we might need to contract
+ contract = list(sub_graph)
+
+ # 3. Contract Edges
+ while len(contract) > 0:
+ contract_node = contract.pop()
+ if contract_node not in sub_graph:
+ # Node was already contracted
+ continue
+ degree = sub_graph.degree[contract_node]
+ # Check if we can remove the node
+ if degree == 2:
+ # Get the two neighbors
+ neighbors = iter(sub_graph[contract_node])
+ u = next(neighbors)
+ v = next(neighbors)
+ # Save nodes for later
+ contract.append(u)
+ contract.append(v)
+ # Contract edge
+ sub_graph.remove_node(contract_node)
+ sub_graph.add_edge(u, v)
+
+ # 4. Check for isomorphism with K5 or K3_3 graphs
+ if len(sub_graph) == 5:
+ if not nx.is_isomorphic(nx.complete_graph(5), sub_graph):
+ raise nx.NetworkXException("Bad counter example.")
+ elif len(sub_graph) == 6:
+ if not nx.is_isomorphic(nx.complete_bipartite_graph(3, 3), sub_graph):
+ raise nx.NetworkXException("Bad counter example.")
+ else:
+ raise nx.NetworkXException("Bad counter example.")
+
+
+class TestPlanarEmbeddingClass:
+ def test_get_data(self):
+ embedding = self.get_star_embedding(3)
+ data = embedding.get_data()
+ data_cmp = {0: [2, 1], 1: [0], 2: [0]}
+ assert data == data_cmp
+
+ def test_missing_edge_orientation(self):
+ with pytest.raises(nx.NetworkXException):
+ embedding = nx.PlanarEmbedding()
+ embedding.add_edge(1, 2)
+ embedding.add_edge(2, 1)
+ # Invalid structure because the orientation of the edge was not set
+ embedding.check_structure()
+
+ def test_invalid_edge_orientation(self):
+ with pytest.raises(nx.NetworkXException):
+ embedding = nx.PlanarEmbedding()
+ embedding.add_half_edge_first(1, 2)
+ embedding.add_half_edge_first(2, 1)
+ embedding.add_edge(1, 3)
+ embedding.check_structure()
+
+ def test_missing_half_edge(self):
+ with pytest.raises(nx.NetworkXException):
+ embedding = nx.PlanarEmbedding()
+ embedding.add_half_edge_first(1, 2)
+ # Invalid structure because other half edge is missing
+ embedding.check_structure()
+
+ def test_not_fulfilling_euler_formula(self):
+ with pytest.raises(nx.NetworkXException):
+ embedding = nx.PlanarEmbedding()
+ for i in range(5):
+ for j in range(5):
+ if i != j:
+ embedding.add_half_edge_first(i, j)
+ embedding.check_structure()
+
+ def test_missing_reference(self):
+ with pytest.raises(nx.NetworkXException):
+ embedding = nx.PlanarEmbedding()
+ embedding.add_half_edge_cw(1, 2, 3)
+
+ def test_connect_components(self):
+ embedding = nx.PlanarEmbedding()
+ embedding.connect_components(1, 2)
+
+ def test_successful_face_traversal(self):
+ embedding = nx.PlanarEmbedding()
+ embedding.add_half_edge_first(1, 2)
+ embedding.add_half_edge_first(2, 1)
+ face = embedding.traverse_face(1, 2)
+ assert face == [1, 2]
+
+ def test_unsuccessful_face_traversal(self):
+ with pytest.raises(nx.NetworkXException):
+ embedding = nx.PlanarEmbedding()
+ embedding.add_edge(1, 2, ccw=2, cw=3)
+ embedding.add_edge(2, 1, ccw=1, cw=3)
+ embedding.traverse_face(1, 2)
+
+ @staticmethod
+ def get_star_embedding(n):
+ embedding = nx.PlanarEmbedding()
+ for i in range(1, n):
+ embedding.add_half_edge_first(0, i)
+ embedding.add_half_edge_first(i, 0)
+ return embedding
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_polynomials.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_polynomials.py
new file mode 100644
index 0000000000000000000000000000000000000000..a81d6a69551ead74d3335fda408111a0b580bf6a
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_polynomials.py
@@ -0,0 +1,57 @@
+"""Unit tests for the :mod:`networkx.algorithms.polynomials` module."""
+
+import pytest
+
+import networkx as nx
+
+sympy = pytest.importorskip("sympy")
+
+
+# Mapping of input graphs to a string representation of their tutte polynomials
+_test_tutte_graphs = {
+ nx.complete_graph(1): "1",
+ nx.complete_graph(4): "x**3 + 3*x**2 + 4*x*y + 2*x + y**3 + 3*y**2 + 2*y",
+ nx.cycle_graph(5): "x**4 + x**3 + x**2 + x + y",
+ nx.diamond_graph(): "x**3 + 2*x**2 + 2*x*y + x + y**2 + y",
+}
+
+_test_chromatic_graphs = {
+ nx.complete_graph(1): "x",
+ nx.complete_graph(4): "x**4 - 6*x**3 + 11*x**2 - 6*x",
+ nx.cycle_graph(5): "x**5 - 5*x**4 + 10*x**3 - 10*x**2 + 4*x",
+ nx.diamond_graph(): "x**4 - 5*x**3 + 8*x**2 - 4*x",
+ nx.path_graph(5): "x**5 - 4*x**4 + 6*x**3 - 4*x**2 + x",
+}
+
+
+@pytest.mark.parametrize(("G", "expected"), _test_tutte_graphs.items())
+def test_tutte_polynomial(G, expected):
+ assert nx.tutte_polynomial(G).equals(expected)
+
+
+@pytest.mark.parametrize("G", _test_tutte_graphs.keys())
+def test_tutte_polynomial_disjoint(G):
+ """Tutte polynomial factors into the Tutte polynomials of its components.
+ Verify this property with the disjoint union of two copies of the input graph.
+ """
+ t_g = nx.tutte_polynomial(G)
+ H = nx.disjoint_union(G, G)
+ t_h = nx.tutte_polynomial(H)
+ assert sympy.simplify(t_g * t_g).equals(t_h)
+
+
+@pytest.mark.parametrize(("G", "expected"), _test_chromatic_graphs.items())
+def test_chromatic_polynomial(G, expected):
+ assert nx.chromatic_polynomial(G).equals(expected)
+
+
+@pytest.mark.parametrize("G", _test_chromatic_graphs.keys())
+def test_chromatic_polynomial_disjoint(G):
+ """Chromatic polynomial factors into the Chromatic polynomials of its
+ components. Verify this property with the disjoint union of two copies of
+ the input graph.
+ """
+ x_g = nx.chromatic_polynomial(G)
+ H = nx.disjoint_union(G, G)
+ x_h = nx.chromatic_polynomial(H)
+ assert sympy.simplify(x_g * x_g).equals(x_h)
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_reciprocity.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_reciprocity.py
new file mode 100644
index 0000000000000000000000000000000000000000..e713bc4303f9bfea1199f01d8369c6bdab1a221f
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_reciprocity.py
@@ -0,0 +1,37 @@
+import pytest
+
+import networkx as nx
+
+
+class TestReciprocity:
+ # test overall reciprocity by passing whole graph
+ def test_reciprocity_digraph(self):
+ DG = nx.DiGraph([(1, 2), (2, 1)])
+ reciprocity = nx.reciprocity(DG)
+ assert reciprocity == 1.0
+
+ # test empty graph's overall reciprocity which will throw an error
+ def test_overall_reciprocity_empty_graph(self):
+ with pytest.raises(nx.NetworkXError):
+ DG = nx.DiGraph()
+ nx.overall_reciprocity(DG)
+
+ # test for reciprocity for a list of nodes
+ def test_reciprocity_graph_nodes(self):
+ DG = nx.DiGraph([(1, 2), (2, 3), (3, 2)])
+ reciprocity = nx.reciprocity(DG, [1, 2])
+ expected_reciprocity = {1: 0.0, 2: 0.6666666666666666}
+ assert reciprocity == expected_reciprocity
+
+ # test for reciprocity for a single node
+ def test_reciprocity_graph_node(self):
+ DG = nx.DiGraph([(1, 2), (2, 3), (3, 2)])
+ reciprocity = nx.reciprocity(DG, 2)
+ assert reciprocity == 0.6666666666666666
+
+ # test for reciprocity for an isolated node
+ def test_reciprocity_graph_isolated_nodes(self):
+ with pytest.raises(nx.NetworkXError):
+ DG = nx.DiGraph([(1, 2)])
+ DG.add_node(4)
+ nx.reciprocity(DG, 4)
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_smallworld.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_smallworld.py
new file mode 100644
index 0000000000000000000000000000000000000000..d115dd99b796fc256341f1e8ff75fd4bc01b9b17
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_smallworld.py
@@ -0,0 +1,78 @@
+import pytest
+
+pytest.importorskip("numpy")
+
+import random
+
+import networkx as nx
+from networkx import lattice_reference, omega, random_reference, sigma
+
+rng = 42
+
+
+def test_random_reference():
+ G = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng)
+ Gr = random_reference(G, niter=1, seed=rng)
+ C = nx.average_clustering(G)
+ Cr = nx.average_clustering(Gr)
+ assert C > Cr
+
+ with pytest.raises(nx.NetworkXError):
+ next(random_reference(nx.Graph()))
+ with pytest.raises(nx.NetworkXNotImplemented):
+ next(random_reference(nx.DiGraph()))
+
+ H = nx.Graph(((0, 1), (2, 3)))
+ Hl = random_reference(H, niter=1, seed=rng)
+
+
+def test_lattice_reference():
+ G = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng)
+ Gl = lattice_reference(G, niter=1, seed=rng)
+ L = nx.average_shortest_path_length(G)
+ Ll = nx.average_shortest_path_length(Gl)
+ assert Ll > L
+
+ pytest.raises(nx.NetworkXError, lattice_reference, nx.Graph())
+ pytest.raises(nx.NetworkXNotImplemented, lattice_reference, nx.DiGraph())
+
+ H = nx.Graph(((0, 1), (2, 3)))
+ Hl = lattice_reference(H, niter=1)
+
+
+def test_sigma():
+ Gs = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng)
+ Gr = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng)
+ sigmas = sigma(Gs, niter=1, nrand=2, seed=rng)
+ sigmar = sigma(Gr, niter=1, nrand=2, seed=rng)
+ assert sigmar < sigmas
+
+
+def test_omega():
+ Gl = nx.connected_watts_strogatz_graph(50, 6, 0, seed=rng)
+ Gr = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng)
+ Gs = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng)
+ omegal = omega(Gl, niter=1, nrand=1, seed=rng)
+ omegar = omega(Gr, niter=1, nrand=1, seed=rng)
+ omegas = omega(Gs, niter=1, nrand=1, seed=rng)
+ assert omegal < omegas and omegas < omegar
+
+ # Test that omega lies within the [-1, 1] bounds
+ G_barbell = nx.barbell_graph(5, 1)
+ G_karate = nx.karate_club_graph()
+
+ omega_barbell = nx.omega(G_barbell)
+ omega_karate = nx.omega(G_karate, nrand=2)
+
+ omegas = (omegal, omegar, omegas, omega_barbell, omega_karate)
+
+ for o in omegas:
+ assert -1 <= o <= 1
+
+
+@pytest.mark.parametrize("f", (nx.random_reference, nx.lattice_reference))
+def test_graph_no_edges(f):
+ G = nx.Graph()
+ G.add_nodes_from([0, 1, 2, 3])
+ with pytest.raises(nx.NetworkXError, match="Graph has fewer that 2 edges"):
+ f(G)
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_sparsifiers.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_sparsifiers.py
new file mode 100644
index 0000000000000000000000000000000000000000..78cabceed0102bf2ffe01d8675102c1ae85efac2
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_sparsifiers.py
@@ -0,0 +1,137 @@
+"""Unit tests for the sparsifier computation functions."""
+import pytest
+
+import networkx as nx
+from networkx.utils import py_random_state
+
+_seed = 2
+
+
+def _test_spanner(G, spanner, stretch, weight=None):
+ """Test whether a spanner is valid.
+
+ This function tests whether the given spanner is a subgraph of the
+ given graph G with the same node set. It also tests for all shortest
+ paths whether they adhere to the given stretch.
+
+ Parameters
+ ----------
+ G : NetworkX graph
+ The original graph for which the spanner was constructed.
+
+ spanner : NetworkX graph
+ The spanner to be tested.
+
+ stretch : float
+ The proclaimed stretch of the spanner.
+
+ weight : object
+ The edge attribute to use as distance.
+ """
+ # check node set
+ assert set(G.nodes()) == set(spanner.nodes())
+
+ # check edge set and weights
+ for u, v in spanner.edges():
+ assert G.has_edge(u, v)
+ if weight:
+ assert spanner[u][v][weight] == G[u][v][weight]
+
+ # check connectivity and stretch
+ original_length = dict(nx.shortest_path_length(G, weight=weight))
+ spanner_length = dict(nx.shortest_path_length(spanner, weight=weight))
+ for u in G.nodes():
+ for v in G.nodes():
+ if u in original_length and v in original_length[u]:
+ assert spanner_length[u][v] <= stretch * original_length[u][v]
+
+
+@py_random_state(1)
+def _assign_random_weights(G, seed=None):
+ """Assigns random weights to the edges of a graph.
+
+ Parameters
+ ----------
+
+ G : NetworkX graph
+ The original graph for which the spanner was constructed.
+
+ seed : integer, random_state, or None (default)
+ Indicator of random number generation state.
+ See :ref:`Randomness`.
+ """
+ for u, v in G.edges():
+ G[u][v]["weight"] = seed.random()
+
+
+def test_spanner_trivial():
+ """Test a trivial spanner with stretch 1."""
+ G = nx.complete_graph(20)
+ spanner = nx.spanner(G, 1, seed=_seed)
+
+ for u, v in G.edges:
+ assert spanner.has_edge(u, v)
+
+
+def test_spanner_unweighted_complete_graph():
+ """Test spanner construction on a complete unweighted graph."""
+ G = nx.complete_graph(20)
+
+ spanner = nx.spanner(G, 4, seed=_seed)
+ _test_spanner(G, spanner, 4)
+
+ spanner = nx.spanner(G, 10, seed=_seed)
+ _test_spanner(G, spanner, 10)
+
+
+def test_spanner_weighted_complete_graph():
+ """Test spanner construction on a complete weighted graph."""
+ G = nx.complete_graph(20)
+ _assign_random_weights(G, seed=_seed)
+
+ spanner = nx.spanner(G, 4, weight="weight", seed=_seed)
+ _test_spanner(G, spanner, 4, weight="weight")
+
+ spanner = nx.spanner(G, 10, weight="weight", seed=_seed)
+ _test_spanner(G, spanner, 10, weight="weight")
+
+
+def test_spanner_unweighted_gnp_graph():
+ """Test spanner construction on an unweighted gnp graph."""
+ G = nx.gnp_random_graph(20, 0.4, seed=_seed)
+
+ spanner = nx.spanner(G, 4, seed=_seed)
+ _test_spanner(G, spanner, 4)
+
+ spanner = nx.spanner(G, 10, seed=_seed)
+ _test_spanner(G, spanner, 10)
+
+
+def test_spanner_weighted_gnp_graph():
+ """Test spanner construction on an weighted gnp graph."""
+ G = nx.gnp_random_graph(20, 0.4, seed=_seed)
+ _assign_random_weights(G, seed=_seed)
+
+ spanner = nx.spanner(G, 4, weight="weight", seed=_seed)
+ _test_spanner(G, spanner, 4, weight="weight")
+
+ spanner = nx.spanner(G, 10, weight="weight", seed=_seed)
+ _test_spanner(G, spanner, 10, weight="weight")
+
+
+def test_spanner_unweighted_disconnected_graph():
+ """Test spanner construction on a disconnected graph."""
+ G = nx.disjoint_union(nx.complete_graph(10), nx.complete_graph(10))
+
+ spanner = nx.spanner(G, 4, seed=_seed)
+ _test_spanner(G, spanner, 4)
+
+ spanner = nx.spanner(G, 10, seed=_seed)
+ _test_spanner(G, spanner, 10)
+
+
+def test_spanner_invalid_stretch():
+ """Check whether an invalid stretch is caught."""
+ with pytest.raises(ValueError):
+ G = nx.empty_graph()
+ nx.spanner(G, 0)
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_summarization.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_summarization.py
new file mode 100644
index 0000000000000000000000000000000000000000..823a645d34b14edd2db199d630df397290c543fb
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_summarization.py
@@ -0,0 +1,641 @@
+"""
+Unit tests for dedensification and graph summarization
+"""
+import pytest
+
+import networkx as nx
+
+
+class TestDirectedDedensification:
+ def build_original_graph(self):
+ original_matrix = [
+ ("1", "BC"),
+ ("2", "ABC"),
+ ("3", ["A", "B", "6"]),
+ ("4", "ABC"),
+ ("5", "AB"),
+ ("6", ["5"]),
+ ("A", ["6"]),
+ ]
+ graph = nx.DiGraph()
+ for source, targets in original_matrix:
+ for target in targets:
+ graph.add_edge(source, target)
+ return graph
+
+ def build_compressed_graph(self):
+ compressed_matrix = [
+ ("1", "BC"),
+ ("2", ["ABC"]),
+ ("3", ["A", "B", "6"]),
+ ("4", ["ABC"]),
+ ("5", "AB"),
+ ("6", ["5"]),
+ ("A", ["6"]),
+ ("ABC", "ABC"),
+ ]
+ compressed_graph = nx.DiGraph()
+ for source, targets in compressed_matrix:
+ for target in targets:
+ compressed_graph.add_edge(source, target)
+ return compressed_graph
+
+ def test_empty(self):
+ """
+ Verify that an empty directed graph results in no compressor nodes
+ """
+ G = nx.DiGraph()
+ compressed_graph, c_nodes = nx.dedensify(G, threshold=2)
+ assert c_nodes == set()
+
+ @staticmethod
+ def densify(G, compressor_nodes, copy=True):
+ """
+ Reconstructs the original graph from a dedensified, directed graph
+
+ Parameters
+ ----------
+ G: dedensified graph
+ A networkx graph
+ compressor_nodes: iterable
+ Iterable of compressor nodes in the dedensified graph
+ inplace: bool, optional (default: False)
+ Indicates if densification should be done inplace
+
+ Returns
+ -------
+ G: graph
+ A densified networkx graph
+ """
+ if copy:
+ G = G.copy()
+ for compressor_node in compressor_nodes:
+ all_neighbors = set(nx.all_neighbors(G, compressor_node))
+ out_neighbors = set(G.neighbors(compressor_node))
+ for out_neighbor in out_neighbors:
+ G.remove_edge(compressor_node, out_neighbor)
+ in_neighbors = all_neighbors - out_neighbors
+ for in_neighbor in in_neighbors:
+ G.remove_edge(in_neighbor, compressor_node)
+ for out_neighbor in out_neighbors:
+ G.add_edge(in_neighbor, out_neighbor)
+ G.remove_node(compressor_node)
+ return G
+
+ def setup_method(self):
+ self.c_nodes = ("ABC",)
+
+ def test_dedensify_edges(self):
+ """
+ Verifies that dedensify produced the correct edges to/from compressor
+ nodes in a directed graph
+ """
+ G = self.build_original_graph()
+ compressed_G = self.build_compressed_graph()
+ compressed_graph, c_nodes = nx.dedensify(G, threshold=2)
+ for s, t in compressed_graph.edges():
+ o_s = "".join(sorted(s))
+ o_t = "".join(sorted(t))
+ compressed_graph_exists = compressed_graph.has_edge(s, t)
+ verified_compressed_exists = compressed_G.has_edge(o_s, o_t)
+ assert compressed_graph_exists == verified_compressed_exists
+ assert len(c_nodes) == len(self.c_nodes)
+
+ def test_dedensify_edge_count(self):
+ """
+ Verifies that dedensify produced the correct number of compressor nodes
+ in a directed graph
+ """
+ G = self.build_original_graph()
+ original_edge_count = len(G.edges())
+ c_G, c_nodes = nx.dedensify(G, threshold=2)
+ compressed_edge_count = len(c_G.edges())
+ assert compressed_edge_count <= original_edge_count
+ compressed_G = self.build_compressed_graph()
+ assert compressed_edge_count == len(compressed_G.edges())
+
+ def test_densify_edges(self):
+ """
+ Verifies that densification produces the correct edges from the
+ original directed graph
+ """
+ compressed_G = self.build_compressed_graph()
+ original_graph = self.densify(compressed_G, self.c_nodes, copy=True)
+ G = self.build_original_graph()
+ for s, t in G.edges():
+ assert G.has_edge(s, t) == original_graph.has_edge(s, t)
+
+ def test_densify_edge_count(self):
+ """
+ Verifies that densification produces the correct number of edges in the
+ original directed graph
+ """
+ compressed_G = self.build_compressed_graph()
+ compressed_edge_count = len(compressed_G.edges())
+ original_graph = self.densify(compressed_G, self.c_nodes)
+ original_edge_count = len(original_graph.edges())
+ assert compressed_edge_count <= original_edge_count
+ G = self.build_original_graph()
+ assert original_edge_count == len(G.edges())
+
+
+class TestUnDirectedDedensification:
+ def build_original_graph(self):
+ """
+ Builds graph shown in the original research paper
+ """
+ original_matrix = [
+ ("1", "CB"),
+ ("2", "ABC"),
+ ("3", ["A", "B", "6"]),
+ ("4", "ABC"),
+ ("5", "AB"),
+ ("6", ["5"]),
+ ("A", ["6"]),
+ ]
+ graph = nx.Graph()
+ for source, targets in original_matrix:
+ for target in targets:
+ graph.add_edge(source, target)
+ return graph
+
+ def test_empty(self):
+ """
+ Verify that an empty undirected graph results in no compressor nodes
+ """
+ G = nx.Graph()
+ compressed_G, c_nodes = nx.dedensify(G, threshold=2)
+ assert c_nodes == set()
+
+ def setup_method(self):
+ self.c_nodes = ("6AB", "ABC")
+
+ def build_compressed_graph(self):
+ compressed_matrix = [
+ ("1", ["B", "C"]),
+ ("2", ["ABC"]),
+ ("3", ["6AB"]),
+ ("4", ["ABC"]),
+ ("5", ["6AB"]),
+ ("6", ["6AB", "A"]),
+ ("A", ["6AB", "ABC"]),
+ ("B", ["ABC", "6AB"]),
+ ("C", ["ABC"]),
+ ]
+ compressed_graph = nx.Graph()
+ for source, targets in compressed_matrix:
+ for target in targets:
+ compressed_graph.add_edge(source, target)
+ return compressed_graph
+
+ def test_dedensify_edges(self):
+ """
+ Verifies that dedensify produced correct compressor nodes and the
+ correct edges to/from the compressor nodes in an undirected graph
+ """
+ G = self.build_original_graph()
+ c_G, c_nodes = nx.dedensify(G, threshold=2)
+ v_compressed_G = self.build_compressed_graph()
+ for s, t in c_G.edges():
+ o_s = "".join(sorted(s))
+ o_t = "".join(sorted(t))
+ has_compressed_edge = c_G.has_edge(s, t)
+ verified_has_compressed_edge = v_compressed_G.has_edge(o_s, o_t)
+ assert has_compressed_edge == verified_has_compressed_edge
+ assert len(c_nodes) == len(self.c_nodes)
+
+ def test_dedensify_edge_count(self):
+ """
+ Verifies that dedensify produced the correct number of edges in an
+ undirected graph
+ """
+ G = self.build_original_graph()
+ c_G, c_nodes = nx.dedensify(G, threshold=2, copy=True)
+ compressed_edge_count = len(c_G.edges())
+ verified_original_edge_count = len(G.edges())
+ assert compressed_edge_count <= verified_original_edge_count
+ verified_compressed_G = self.build_compressed_graph()
+ verified_compressed_edge_count = len(verified_compressed_G.edges())
+ assert compressed_edge_count == verified_compressed_edge_count
+
+
+@pytest.mark.parametrize(
+ "graph_type", [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph]
+)
+def test_summarization_empty(graph_type):
+ G = graph_type()
+ summary_graph = nx.snap_aggregation(G, node_attributes=("color",))
+ assert nx.is_isomorphic(summary_graph, G)
+
+
+class AbstractSNAP:
+ node_attributes = ("color",)
+
+ def build_original_graph(self):
+ pass
+
+ def build_summary_graph(self):
+ pass
+
+ def test_summary_graph(self):
+ original_graph = self.build_original_graph()
+ summary_graph = self.build_summary_graph()
+
+ relationship_attributes = ("type",)
+ generated_summary_graph = nx.snap_aggregation(
+ original_graph, self.node_attributes, relationship_attributes
+ )
+ relabeled_summary_graph = self.deterministic_labels(generated_summary_graph)
+ assert nx.is_isomorphic(summary_graph, relabeled_summary_graph)
+
+ def deterministic_labels(self, G):
+ node_labels = list(G.nodes)
+ node_labels = sorted(node_labels, key=lambda n: sorted(G.nodes[n]["group"])[0])
+ node_labels.sort()
+
+ label_mapping = {}
+ for index, node in enumerate(node_labels):
+ label = "Supernode-%s" % index
+ label_mapping[node] = label
+
+ return nx.relabel_nodes(G, label_mapping)
+
+
+class TestSNAPNoEdgeTypes(AbstractSNAP):
+ relationship_attributes = ()
+
+ def test_summary_graph(self):
+ original_graph = self.build_original_graph()
+ summary_graph = self.build_summary_graph()
+
+ relationship_attributes = ("type",)
+ generated_summary_graph = nx.snap_aggregation(
+ original_graph, self.node_attributes
+ )
+ relabeled_summary_graph = self.deterministic_labels(generated_summary_graph)
+ assert nx.is_isomorphic(summary_graph, relabeled_summary_graph)
+
+ def build_original_graph(self):
+ nodes = {
+ "A": {"color": "Red"},
+ "B": {"color": "Red"},
+ "C": {"color": "Red"},
+ "D": {"color": "Red"},
+ "E": {"color": "Blue"},
+ "F": {"color": "Blue"},
+ "G": {"color": "Blue"},
+ "H": {"color": "Blue"},
+ "I": {"color": "Yellow"},
+ "J": {"color": "Yellow"},
+ "K": {"color": "Yellow"},
+ "L": {"color": "Yellow"},
+ }
+ edges = [
+ ("A", "B"),
+ ("A", "C"),
+ ("A", "E"),
+ ("A", "I"),
+ ("B", "D"),
+ ("B", "J"),
+ ("B", "F"),
+ ("C", "G"),
+ ("D", "H"),
+ ("I", "J"),
+ ("J", "K"),
+ ("I", "L"),
+ ]
+ G = nx.Graph()
+ for node in nodes:
+ attributes = nodes[node]
+ G.add_node(node, **attributes)
+
+ for source, target in edges:
+ G.add_edge(source, target)
+
+ return G
+
+ def build_summary_graph(self):
+ nodes = {
+ "Supernode-0": {"color": "Red"},
+ "Supernode-1": {"color": "Red"},
+ "Supernode-2": {"color": "Blue"},
+ "Supernode-3": {"color": "Blue"},
+ "Supernode-4": {"color": "Yellow"},
+ "Supernode-5": {"color": "Yellow"},
+ }
+ edges = [
+ ("Supernode-0", "Supernode-0"),
+ ("Supernode-0", "Supernode-1"),
+ ("Supernode-0", "Supernode-2"),
+ ("Supernode-0", "Supernode-4"),
+ ("Supernode-1", "Supernode-3"),
+ ("Supernode-4", "Supernode-4"),
+ ("Supernode-4", "Supernode-5"),
+ ]
+ G = nx.Graph()
+ for node in nodes:
+ attributes = nodes[node]
+ G.add_node(node, **attributes)
+
+ for source, target in edges:
+ G.add_edge(source, target)
+
+ supernodes = {
+ "Supernode-0": {"A", "B"},
+ "Supernode-1": {"C", "D"},
+ "Supernode-2": {"E", "F"},
+ "Supernode-3": {"G", "H"},
+ "Supernode-4": {"I", "J"},
+ "Supernode-5": {"K", "L"},
+ }
+ nx.set_node_attributes(G, supernodes, "group")
+ return G
+
+
+class TestSNAPUndirected(AbstractSNAP):
+ def build_original_graph(self):
+ nodes = {
+ "A": {"color": "Red"},
+ "B": {"color": "Red"},
+ "C": {"color": "Red"},
+ "D": {"color": "Red"},
+ "E": {"color": "Blue"},
+ "F": {"color": "Blue"},
+ "G": {"color": "Blue"},
+ "H": {"color": "Blue"},
+ "I": {"color": "Yellow"},
+ "J": {"color": "Yellow"},
+ "K": {"color": "Yellow"},
+ "L": {"color": "Yellow"},
+ }
+ edges = [
+ ("A", "B", "Strong"),
+ ("A", "C", "Weak"),
+ ("A", "E", "Strong"),
+ ("A", "I", "Weak"),
+ ("B", "D", "Weak"),
+ ("B", "J", "Weak"),
+ ("B", "F", "Strong"),
+ ("C", "G", "Weak"),
+ ("D", "H", "Weak"),
+ ("I", "J", "Strong"),
+ ("J", "K", "Strong"),
+ ("I", "L", "Strong"),
+ ]
+ G = nx.Graph()
+ for node in nodes:
+ attributes = nodes[node]
+ G.add_node(node, **attributes)
+
+ for source, target, type in edges:
+ G.add_edge(source, target, type=type)
+
+ return G
+
+ def build_summary_graph(self):
+ nodes = {
+ "Supernode-0": {"color": "Red"},
+ "Supernode-1": {"color": "Red"},
+ "Supernode-2": {"color": "Blue"},
+ "Supernode-3": {"color": "Blue"},
+ "Supernode-4": {"color": "Yellow"},
+ "Supernode-5": {"color": "Yellow"},
+ }
+ edges = [
+ ("Supernode-0", "Supernode-0", "Strong"),
+ ("Supernode-0", "Supernode-1", "Weak"),
+ ("Supernode-0", "Supernode-2", "Strong"),
+ ("Supernode-0", "Supernode-4", "Weak"),
+ ("Supernode-1", "Supernode-3", "Weak"),
+ ("Supernode-4", "Supernode-4", "Strong"),
+ ("Supernode-4", "Supernode-5", "Strong"),
+ ]
+ G = nx.Graph()
+ for node in nodes:
+ attributes = nodes[node]
+ G.add_node(node, **attributes)
+
+ for source, target, type in edges:
+ G.add_edge(source, target, types=[{"type": type}])
+
+ supernodes = {
+ "Supernode-0": {"A", "B"},
+ "Supernode-1": {"C", "D"},
+ "Supernode-2": {"E", "F"},
+ "Supernode-3": {"G", "H"},
+ "Supernode-4": {"I", "J"},
+ "Supernode-5": {"K", "L"},
+ }
+ nx.set_node_attributes(G, supernodes, "group")
+ return G
+
+
+class TestSNAPDirected(AbstractSNAP):
+ def build_original_graph(self):
+ nodes = {
+ "A": {"color": "Red"},
+ "B": {"color": "Red"},
+ "C": {"color": "Green"},
+ "D": {"color": "Green"},
+ "E": {"color": "Blue"},
+ "F": {"color": "Blue"},
+ "G": {"color": "Yellow"},
+ "H": {"color": "Yellow"},
+ }
+ edges = [
+ ("A", "C", "Strong"),
+ ("A", "E", "Strong"),
+ ("A", "F", "Weak"),
+ ("B", "D", "Strong"),
+ ("B", "E", "Weak"),
+ ("B", "F", "Strong"),
+ ("C", "G", "Strong"),
+ ("C", "F", "Strong"),
+ ("D", "E", "Strong"),
+ ("D", "H", "Strong"),
+ ("G", "E", "Strong"),
+ ("H", "F", "Strong"),
+ ]
+ G = nx.DiGraph()
+ for node in nodes:
+ attributes = nodes[node]
+ G.add_node(node, **attributes)
+
+ for source, target, type in edges:
+ G.add_edge(source, target, type=type)
+
+ return G
+
+ def build_summary_graph(self):
+ nodes = {
+ "Supernode-0": {"color": "Red"},
+ "Supernode-1": {"color": "Green"},
+ "Supernode-2": {"color": "Blue"},
+ "Supernode-3": {"color": "Yellow"},
+ }
+ edges = [
+ ("Supernode-0", "Supernode-1", [{"type": "Strong"}]),
+ ("Supernode-0", "Supernode-2", [{"type": "Weak"}, {"type": "Strong"}]),
+ ("Supernode-1", "Supernode-2", [{"type": "Strong"}]),
+ ("Supernode-1", "Supernode-3", [{"type": "Strong"}]),
+ ("Supernode-3", "Supernode-2", [{"type": "Strong"}]),
+ ]
+ G = nx.DiGraph()
+ for node in nodes:
+ attributes = nodes[node]
+ G.add_node(node, **attributes)
+
+ for source, target, types in edges:
+ G.add_edge(source, target, types=types)
+
+ supernodes = {
+ "Supernode-0": {"A", "B"},
+ "Supernode-1": {"C", "D"},
+ "Supernode-2": {"E", "F"},
+ "Supernode-3": {"G", "H"},
+ "Supernode-4": {"I", "J"},
+ "Supernode-5": {"K", "L"},
+ }
+ nx.set_node_attributes(G, supernodes, "group")
+ return G
+
+
+class TestSNAPUndirectedMulti(AbstractSNAP):
+ def build_original_graph(self):
+ nodes = {
+ "A": {"color": "Red"},
+ "B": {"color": "Red"},
+ "C": {"color": "Red"},
+ "D": {"color": "Blue"},
+ "E": {"color": "Blue"},
+ "F": {"color": "Blue"},
+ "G": {"color": "Yellow"},
+ "H": {"color": "Yellow"},
+ "I": {"color": "Yellow"},
+ }
+ edges = [
+ ("A", "D", ["Weak", "Strong"]),
+ ("B", "E", ["Weak", "Strong"]),
+ ("D", "I", ["Strong"]),
+ ("E", "H", ["Strong"]),
+ ("F", "G", ["Weak"]),
+ ("I", "G", ["Weak", "Strong"]),
+ ("I", "H", ["Weak", "Strong"]),
+ ("G", "H", ["Weak", "Strong"]),
+ ]
+ G = nx.MultiGraph()
+ for node in nodes:
+ attributes = nodes[node]
+ G.add_node(node, **attributes)
+
+ for source, target, types in edges:
+ for type in types:
+ G.add_edge(source, target, type=type)
+
+ return G
+
+ def build_summary_graph(self):
+ nodes = {
+ "Supernode-0": {"color": "Red"},
+ "Supernode-1": {"color": "Blue"},
+ "Supernode-2": {"color": "Yellow"},
+ "Supernode-3": {"color": "Blue"},
+ "Supernode-4": {"color": "Yellow"},
+ "Supernode-5": {"color": "Red"},
+ }
+ edges = [
+ ("Supernode-1", "Supernode-2", [{"type": "Weak"}]),
+ ("Supernode-2", "Supernode-4", [{"type": "Weak"}, {"type": "Strong"}]),
+ ("Supernode-3", "Supernode-4", [{"type": "Strong"}]),
+ ("Supernode-3", "Supernode-5", [{"type": "Weak"}, {"type": "Strong"}]),
+ ("Supernode-4", "Supernode-4", [{"type": "Weak"}, {"type": "Strong"}]),
+ ]
+ G = nx.MultiGraph()
+ for node in nodes:
+ attributes = nodes[node]
+ G.add_node(node, **attributes)
+
+ for source, target, types in edges:
+ for type in types:
+ G.add_edge(source, target, type=type)
+
+ supernodes = {
+ "Supernode-0": {"A", "B"},
+ "Supernode-1": {"C", "D"},
+ "Supernode-2": {"E", "F"},
+ "Supernode-3": {"G", "H"},
+ "Supernode-4": {"I", "J"},
+ "Supernode-5": {"K", "L"},
+ }
+ nx.set_node_attributes(G, supernodes, "group")
+ return G
+
+
+class TestSNAPDirectedMulti(AbstractSNAP):
+ def build_original_graph(self):
+ nodes = {
+ "A": {"color": "Red"},
+ "B": {"color": "Red"},
+ "C": {"color": "Green"},
+ "D": {"color": "Green"},
+ "E": {"color": "Blue"},
+ "F": {"color": "Blue"},
+ "G": {"color": "Yellow"},
+ "H": {"color": "Yellow"},
+ }
+ edges = [
+ ("A", "C", ["Weak", "Strong"]),
+ ("A", "E", ["Strong"]),
+ ("A", "F", ["Weak"]),
+ ("B", "D", ["Weak", "Strong"]),
+ ("B", "E", ["Weak"]),
+ ("B", "F", ["Strong"]),
+ ("C", "G", ["Weak", "Strong"]),
+ ("C", "F", ["Strong"]),
+ ("D", "E", ["Strong"]),
+ ("D", "H", ["Weak", "Strong"]),
+ ("G", "E", ["Strong"]),
+ ("H", "F", ["Strong"]),
+ ]
+ G = nx.MultiDiGraph()
+ for node in nodes:
+ attributes = nodes[node]
+ G.add_node(node, **attributes)
+
+ for source, target, types in edges:
+ for type in types:
+ G.add_edge(source, target, type=type)
+
+ return G
+
+ def build_summary_graph(self):
+ nodes = {
+ "Supernode-0": {"color": "Red"},
+ "Supernode-1": {"color": "Blue"},
+ "Supernode-2": {"color": "Yellow"},
+ "Supernode-3": {"color": "Blue"},
+ }
+ edges = [
+ ("Supernode-0", "Supernode-1", ["Weak", "Strong"]),
+ ("Supernode-0", "Supernode-2", ["Weak", "Strong"]),
+ ("Supernode-1", "Supernode-2", ["Strong"]),
+ ("Supernode-1", "Supernode-3", ["Weak", "Strong"]),
+ ("Supernode-3", "Supernode-2", ["Strong"]),
+ ]
+ G = nx.MultiDiGraph()
+ for node in nodes:
+ attributes = nodes[node]
+ G.add_node(node, **attributes)
+
+ for source, target, types in edges:
+ for type in types:
+ G.add_edge(source, target, type=type)
+
+ supernodes = {
+ "Supernode-0": {"A", "B"},
+ "Supernode-1": {"C", "D"},
+ "Supernode-2": {"E", "F"},
+ "Supernode-3": {"G", "H"},
+ }
+ nx.set_node_attributes(G, supernodes, "group")
+ return G
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_threshold.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_threshold.py
new file mode 100644
index 0000000000000000000000000000000000000000..07aad44bb268a42944260b4217bce15b1278ebfd
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/algorithms/tests/test_threshold.py
@@ -0,0 +1,269 @@
+"""
+Threshold Graphs
+================
+"""
+
+import pytest
+
+import networkx as nx
+import networkx.algorithms.threshold as nxt
+from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic
+
+cnlti = nx.convert_node_labels_to_integers
+
+
+class TestGeneratorThreshold:
+ def test_threshold_sequence_graph_test(self):
+ G = nx.star_graph(10)
+ assert nxt.is_threshold_graph(G)
+ assert nxt.is_threshold_sequence([d for n, d in G.degree()])
+
+ G = nx.complete_graph(10)
+ assert nxt.is_threshold_graph(G)
+ assert nxt.is_threshold_sequence([d for n, d in G.degree()])
+
+ deg = [3, 2, 2, 1, 1, 1]
+ assert not nxt.is_threshold_sequence(deg)
+
+ deg = [3, 2, 2, 1]
+ assert nxt.is_threshold_sequence(deg)
+
+ G = nx.generators.havel_hakimi_graph(deg)
+ assert nxt.is_threshold_graph(G)
+
+ def test_creation_sequences(self):
+ deg = [3, 2, 2, 1]
+ G = nx.generators.havel_hakimi_graph(deg)
+
+ with pytest.raises(ValueError):
+ nxt.creation_sequence(deg, with_labels=True, compact=True)
+
+ cs0 = nxt.creation_sequence(deg)
+ H0 = nxt.threshold_graph(cs0)
+ assert "".join(cs0) == "ddid"
+
+ cs1 = nxt.creation_sequence(deg, with_labels=True)
+ H1 = nxt.threshold_graph(cs1)
+ assert cs1 == [(1, "d"), (2, "d"), (3, "i"), (0, "d")]
+
+ cs2 = nxt.creation_sequence(deg, compact=True)
+ H2 = nxt.threshold_graph(cs2)
+ assert cs2 == [2, 1, 1]
+ assert "".join(nxt.uncompact(cs2)) == "ddid"
+ assert graph_could_be_isomorphic(H0, G)
+ assert graph_could_be_isomorphic(H0, H1)
+ assert graph_could_be_isomorphic(H0, H2)
+
+ def test_make_compact(self):
+ assert nxt.make_compact(["d", "d", "d", "i", "d", "d"]) == [3, 1, 2]
+ assert nxt.make_compact([3, 1, 2]) == [3, 1, 2]
+ assert pytest.raises(TypeError, nxt.make_compact, [3.0, 1.0, 2.0])
+
+ def test_uncompact(self):
+ assert nxt.uncompact([3, 1, 2]) == ["d", "d", "d", "i", "d", "d"]
+ assert nxt.uncompact(["d", "d", "i", "d"]) == ["d", "d", "i", "d"]
+ assert nxt.uncompact(
+ nxt.uncompact([(1, "d"), (2, "d"), (3, "i"), (0, "d")])
+ ) == nxt.uncompact([(1, "d"), (2, "d"), (3, "i"), (0, "d")])
+ assert pytest.raises(TypeError, nxt.uncompact, [3.0, 1.0, 2.0])
+
+ def test_creation_sequence_to_weights(self):
+ assert nxt.creation_sequence_to_weights([3, 1, 2]) == [
+ 0.5,
+ 0.5,
+ 0.5,
+ 0.25,
+ 0.75,
+ 0.75,
+ ]
+ assert pytest.raises(
+ TypeError, nxt.creation_sequence_to_weights, [3.0, 1.0, 2.0]
+ )
+
+ def test_weights_to_creation_sequence(self):
+ deg = [3, 2, 2, 1]
+ with pytest.raises(ValueError):
+ nxt.weights_to_creation_sequence(deg, with_labels=True, compact=True)
+ assert nxt.weights_to_creation_sequence(deg, with_labels=True) == [
+ (3, "d"),
+ (1, "d"),
+ (2, "d"),
+ (0, "d"),
+ ]
+ assert nxt.weights_to_creation_sequence(deg, compact=True) == [4]
+
+ def test_find_alternating_4_cycle(self):
+ G = nx.Graph()
+ G.add_edge(1, 2)
+ assert not nxt.find_alternating_4_cycle(G)
+
+ def test_shortest_path(self):
+ deg = [3, 2, 2, 1]
+ G = nx.generators.havel_hakimi_graph(deg)
+ cs1 = nxt.creation_sequence(deg, with_labels=True)
+ for n, m in [(3, 0), (0, 3), (0, 2), (0, 1), (1, 3), (3, 1), (1, 2), (2, 3)]:
+ assert nxt.shortest_path(cs1, n, m) == nx.shortest_path(G, n, m)
+
+ spl = nxt.shortest_path_length(cs1, 3)
+ spl2 = nxt.shortest_path_length([t for v, t in cs1], 2)
+ assert spl == spl2
+
+ spld = {}
+ for j, pl in enumerate(spl):
+ n = cs1[j][0]
+ spld[n] = pl
+ assert spld == nx.single_source_shortest_path_length(G, 3)
+
+ assert nxt.shortest_path(["d", "d", "d", "i", "d", "d"], 1, 2) == [1, 2]
+ assert nxt.shortest_path([3, 1, 2], 1, 2) == [1, 2]
+ assert pytest.raises(TypeError, nxt.shortest_path, [3.0, 1.0, 2.0], 1, 2)
+ assert pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], "a", 2)
+ assert pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], 1, "b")
+ assert nxt.shortest_path([3, 1, 2], 1, 1) == [1]
+
+ def test_shortest_path_length(self):
+ assert nxt.shortest_path_length([3, 1, 2], 1) == [1, 0, 1, 2, 1, 1]
+ assert nxt.shortest_path_length(["d", "d", "d", "i", "d", "d"], 1) == [
+ 1,
+ 0,
+ 1,
+ 2,
+ 1,
+ 1,
+ ]
+ assert nxt.shortest_path_length(("d", "d", "d", "i", "d", "d"), 1) == [
+ 1,
+ 0,
+ 1,
+ 2,
+ 1,
+ 1,
+ ]
+ assert pytest.raises(TypeError, nxt.shortest_path, [3.0, 1.0, 2.0], 1)
+
+ def test_random_threshold_sequence(self):
+ assert len(nxt.random_threshold_sequence(10, 0.5)) == 10
+ assert nxt.random_threshold_sequence(10, 0.5, seed=42) == [
+ "d",
+ "i",
+ "d",
+ "d",
+ "d",
+ "i",
+ "i",
+ "i",
+ "d",
+ "d",
+ ]
+ assert pytest.raises(ValueError, nxt.random_threshold_sequence, 10, 1.5)
+
+ def test_right_d_threshold_sequence(self):
+ assert nxt.right_d_threshold_sequence(3, 2) == ["d", "i", "d"]
+ assert pytest.raises(ValueError, nxt.right_d_threshold_sequence, 2, 3)
+
+ def test_left_d_threshold_sequence(self):
+ assert nxt.left_d_threshold_sequence(3, 2) == ["d", "i", "d"]
+ assert pytest.raises(ValueError, nxt.left_d_threshold_sequence, 2, 3)
+
+ def test_weights_thresholds(self):
+ wseq = [3, 4, 3, 3, 5, 6, 5, 4, 5, 6]
+ cs = nxt.weights_to_creation_sequence(wseq, threshold=10)
+ wseq = nxt.creation_sequence_to_weights(cs)
+ cs2 = nxt.weights_to_creation_sequence(wseq)
+ assert cs == cs2
+
+ wseq = nxt.creation_sequence_to_weights(nxt.uncompact([3, 1, 2, 3, 3, 2, 3]))
+ assert wseq == [
+ s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7]
+ ]
+
+ wseq = nxt.creation_sequence_to_weights([3, 1, 2, 3, 3, 2, 3])
+ assert wseq == [
+ s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7]
+ ]
+
+ wseq = nxt.creation_sequence_to_weights(list(enumerate("ddidiiidididi")))
+ assert wseq == [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]]
+
+ wseq = nxt.creation_sequence_to_weights("ddidiiidididi")
+ assert wseq == [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]]
+
+ wseq = nxt.creation_sequence_to_weights("ddidiiidididid")
+ ws = [s / 12 for s in [6, 6, 5, 7, 4, 4, 4, 8, 3, 9, 2, 10, 1, 11]]
+ assert sum(abs(c - d) for c, d in zip(wseq, ws)) < 1e-14
+
+ def test_finding_routines(self):
+ G = nx.Graph({1: [2], 2: [3], 3: [4], 4: [5], 5: [6]})
+ G.add_edge(2, 4)
+ G.add_edge(2, 5)
+ G.add_edge(2, 7)
+ G.add_edge(3, 6)
+ G.add_edge(4, 6)
+
+ # Alternating 4 cycle
+ assert nxt.find_alternating_4_cycle(G) == [1, 2, 3, 6]
+
+ # Threshold graph
+ TG = nxt.find_threshold_graph(G)
+ assert nxt.is_threshold_graph(TG)
+ assert sorted(TG.nodes()) == [1, 2, 3, 4, 5, 7]
+
+ cs = nxt.creation_sequence(dict(TG.degree()), with_labels=True)
+ assert nxt.find_creation_sequence(G) == cs
+
+ def test_fast_versions_properties_threshold_graphs(self):
+ cs = "ddiiddid"
+ G = nxt.threshold_graph(cs)
+ assert nxt.density("ddiiddid") == nx.density(G)
+ assert sorted(nxt.degree_sequence(cs)) == sorted(d for n, d in G.degree())
+
+ ts = nxt.triangle_sequence(cs)
+ assert ts == list(nx.triangles(G).values())
+ assert sum(ts) // 3 == nxt.triangles(cs)
+
+ c1 = nxt.cluster_sequence(cs)
+ c2 = list(nx.clustering(G).values())
+ assert sum(abs(c - d) for c, d in zip(c1, c2)) == pytest.approx(0, abs=1e-7)
+
+ b1 = nx.betweenness_centrality(G).values()
+ b2 = nxt.betweenness_sequence(cs)
+ assert sum(abs(c - d) for c, d in zip(b1, b2)) < 1e-7
+
+ assert nxt.eigenvalues(cs) == [0, 1, 3, 3, 5, 7, 7, 8]
+
+ # Degree Correlation
+ assert abs(nxt.degree_correlation(cs) + 0.593038821954) < 1e-12
+ assert nxt.degree_correlation("diiiddi") == -0.8
+ assert nxt.degree_correlation("did") == -1.0
+ assert nxt.degree_correlation("ddd") == 1.0
+ assert nxt.eigenvalues("dddiii") == [0, 0, 0, 0, 3, 3]
+ assert nxt.eigenvalues("dddiiid") == [0, 1, 1, 1, 4, 4, 7]
+
+ def test_tg_creation_routines(self):
+ s = nxt.left_d_threshold_sequence(5, 7)
+ s = nxt.right_d_threshold_sequence(5, 7)
+ s1 = nxt.swap_d(s, 1.0, 1.0)
+ s1 = nxt.swap_d(s, 1.0, 1.0, seed=1)
+
+ def test_eigenvectors(self):
+ np = pytest.importorskip("numpy")
+ eigenval = np.linalg.eigvals
+ pytest.importorskip("scipy")
+
+ cs = "ddiiddid"
+ G = nxt.threshold_graph(cs)
+ (tgeval, tgevec) = nxt.eigenvectors(cs)
+ np.testing.assert_allclose([np.dot(lv, lv) for lv in tgevec], 1.0, rtol=1e-9)
+ lapl = nx.laplacian_matrix(G)
+
+ def test_create_using(self):
+ cs = "ddiiddid"
+ G = nxt.threshold_graph(cs)
+ assert pytest.raises(
+ nx.exception.NetworkXError,
+ nxt.threshold_graph,
+ cs,
+ create_using=nx.DiGraph(),
+ )
+ MG = nxt.threshold_graph(cs, create_using=nx.MultiGraph())
+ assert sorted(MG.edges()) == sorted(G.edges())
diff --git a/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/laplacianmatrix.py b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/laplacianmatrix.py
new file mode 100644
index 0000000000000000000000000000000000000000..13763828131825824a73b121d36d2be8892fb63a
--- /dev/null
+++ b/tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/linalg/laplacianmatrix.py
@@ -0,0 +1,428 @@
+"""Laplacian matrix of graphs.
+"""
+import networkx as nx
+from networkx.utils import not_implemented_for
+
+__all__ = [
+ "laplacian_matrix",
+ "normalized_laplacian_matrix",
+ "total_spanning_tree_weight",
+ "directed_laplacian_matrix",
+ "directed_combinatorial_laplacian_matrix",
+]
+
+
+@not_implemented_for("directed")
+@nx._dispatch(edge_attrs="weight")
+def laplacian_matrix(G, nodelist=None, weight="weight"):
+ """Returns the Laplacian matrix of G.
+
+ The graph Laplacian is the matrix L = D - A, where
+ A is the adjacency matrix and D is the diagonal matrix of node degrees.
+
+ Parameters
+ ----------
+ G : graph
+ A NetworkX graph
+
+ nodelist : list, optional
+ The rows and columns are ordered according to the nodes in nodelist.
+ If nodelist is None, then the ordering is produced by G.nodes().
+
+ weight : string or None, optional (default='weight')
+ The edge data key used to compute each value in the matrix.
+ If None, then each edge has weight 1.
+
+ Returns
+ -------
+ L : SciPy sparse array
+ The Laplacian matrix of G.
+
+ Notes
+ -----
+ For MultiGraph, the edges weights are summed.
+
+ See Also
+ --------
+ :func:`~networkx.convert_matrix.to_numpy_array`
+ normalized_laplacian_matrix
+ :func:`~networkx.linalg.spectrum.laplacian_spectrum`
+
+ Examples
+ --------
+ For graphs with multiple connected components, L is permutation-similar
+ to a block diagonal matrix where each block is the respective Laplacian
+ matrix for each component.
+
+ >>> G = nx.Graph([(1, 2), (2, 3), (4, 5)])
+ >>> print(nx.laplacian_matrix(G).toarray())
+ [[ 1 -1 0 0 0]
+ [-1 2 -1 0 0]
+ [ 0 -1 1 0 0]
+ [ 0 0 0 1 -1]
+ [ 0 0 0 -1 1]]
+
+ """
+ import scipy as sp
+
+ if nodelist is None:
+ nodelist = list(G)
+ A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, format="csr")
+ n, m = A.shape
+ # TODO: rm csr_array wrapper when spdiags can produce arrays
+ D = sp.sparse.csr_array(sp.sparse.spdiags(A.sum(axis=1), 0, m, n, format="csr"))
+ return D - A
+
+
+@not_implemented_for("directed")
+@nx._dispatch(edge_attrs="weight")
+def normalized_laplacian_matrix(G, nodelist=None, weight="weight"):
+ r"""Returns the normalized Laplacian matrix of G.
+
+ The normalized graph Laplacian is the matrix
+
+ .. math::
+
+ N = D^{-1/2} L D^{-1/2}
+
+ where `L` is the graph Laplacian and `D` is the diagonal matrix of
+ node degrees [1]_.
+
+ Parameters
+ ----------
+ G : graph
+ A NetworkX graph
+
+ nodelist : list, optional
+ The rows and columns are ordered according to the nodes in nodelist.
+ If nodelist is None, then the ordering is produced by G.nodes().
+
+ weight : string or None, optional (default='weight')
+ The edge data key used to compute each value in the matrix.
+ If None, then each edge has weight 1.
+
+ Returns
+ -------
+ N : SciPy sparse array
+ The normalized Laplacian matrix of G.
+
+ Notes
+ -----
+ For MultiGraph, the edges weights are summed.
+ See :func:`to_numpy_array` for other options.
+
+ If the Graph contains selfloops, D is defined as ``diag(sum(A, 1))``, where A is
+ the adjacency matrix [2]_.
+
+ See Also
+ --------
+ laplacian_matrix
+ normalized_laplacian_spectrum
+
+ References
+ ----------
+ .. [1] Fan Chung-Graham, Spectral Graph Theory,
+ CBMS Regional Conference Series in Mathematics, Number 92, 1997.
+ .. [2] Steve Butler, Interlacing For Weighted Graphs Using The Normalized
+ Laplacian, Electronic Journal of Linear Algebra, Volume 16, pp. 90-98,
+ March 2007.
+ """
+ import numpy as np
+ import scipy as sp
+
+ if nodelist is None:
+ nodelist = list(G)
+ A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, format="csr")
+ n, m = A.shape
+ diags = A.sum(axis=1)
+ # TODO: rm csr_array wrapper when spdiags can produce arrays
+ D = sp.sparse.csr_array(sp.sparse.spdiags(diags, 0, m, n, format="csr"))
+ L = D - A
+ with np.errstate(divide="ignore"):
+ diags_sqrt = 1.0 / np.sqrt(diags)
+ diags_sqrt[np.isinf(diags_sqrt)] = 0
+ # TODO: rm csr_array wrapper when spdiags can produce arrays
+ DH = sp.sparse.csr_array(sp.sparse.spdiags(diags_sqrt, 0, m, n, format="csr"))
+ return DH @ (L @ DH)
+
+
+@nx._dispatch(edge_attrs="weight")
+def total_spanning_tree_weight(G, weight=None):
+ """
+ Returns the total weight of all spanning trees of `G`.
+
+ Kirchoff's Tree Matrix Theorem states that the determinant of any cofactor of the
+ Laplacian matrix of a graph is the number of spanning trees in the graph. For a
+ weighted Laplacian matrix, it is the sum across all spanning trees of the
+ multiplicative weight of each tree. That is, the weight of each tree is the
+ product of its edge weights.
+
+ Parameters
+ ----------
+ G : NetworkX Graph
+ The graph to use Kirchhoff's theorem on.
+
+ weight : string or None
+ The key for the edge attribute holding the edge weight. If `None`, then
+ each edge is assumed to have a weight of 1 and this function returns the
+ total number of spanning trees in `G`.
+
+ Returns
+ -------
+ float
+ The sum of the total multiplicative weights for all spanning trees in `G`
+ """
+ import numpy as np
+
+ G_laplacian = nx.laplacian_matrix(G, weight=weight).toarray()
+ # Determinant ignoring first row and column
+ return abs(np.linalg.det(G_laplacian[1:, 1:]))
+
+
+###############################################################################
+# Code based on work from https://github.com/bjedwards
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatch(edge_attrs="weight")
+def directed_laplacian_matrix(
+ G, nodelist=None, weight="weight", walk_type=None, alpha=0.95
+):
+ r"""Returns the directed Laplacian matrix of G.
+
+ The graph directed Laplacian is the matrix
+
+ .. math::
+
+ L = I - (\Phi^{1/2} P \Phi^{-1/2} + \Phi^{-1/2} P^T \Phi^{1/2} ) / 2
+
+ where `I` is the identity matrix, `P` is the transition matrix of the
+ graph, and `\Phi` a matrix with the Perron vector of `P` in the diagonal and
+ zeros elsewhere [1]_.
+
+ Depending on the value of walk_type, `P` can be the transition matrix
+ induced by a random walk, a lazy random walk, or a random walk with
+ teleportation (PageRank).
+
+ Parameters
+ ----------
+ G : DiGraph
+ A NetworkX graph
+
+ nodelist : list, optional
+ The rows and columns are ordered according to the nodes in nodelist.
+ If nodelist is None, then the ordering is produced by G.nodes().
+
+ weight : string or None, optional (default='weight')
+ The edge data key used to compute each value in the matrix.
+ If None, then each edge has weight 1.
+
+ walk_type : string or None, optional (default=None)
+ If None, `P` is selected depending on the properties of the
+ graph. Otherwise is one of 'random', 'lazy', or 'pagerank'
+
+ alpha : real
+ (1 - alpha) is the teleportation probability used with pagerank
+
+ Returns
+ -------
+ L : NumPy matrix
+ Normalized Laplacian of G.
+
+ Notes
+ -----
+ Only implemented for DiGraphs
+
+ See Also
+ --------
+ laplacian_matrix
+
+ References
+ ----------
+ .. [1] Fan Chung (2005).
+ Laplacians and the Cheeger inequality for directed graphs.
+ Annals of Combinatorics, 9(1), 2005
+ """
+ import numpy as np
+ import scipy as sp
+
+ # NOTE: P has type ndarray if walk_type=="pagerank", else csr_array
+ P = _transition_matrix(
+ G, nodelist=nodelist, weight=weight, walk_type=walk_type, alpha=alpha
+ )
+
+ n, m = P.shape
+
+ evals, evecs = sp.sparse.linalg.eigs(P.T, k=1)
+ v = evecs.flatten().real
+ p = v / v.sum()
+ # p>=0 by Perron-Frobenius Thm. Use abs() to fix roundoff across zero gh-6865
+ sqrtp = np.sqrt(np.abs(p))
+ Q = (
+ # TODO: rm csr_array wrapper when spdiags creates arrays
+ sp.sparse.csr_array(sp.sparse.spdiags(sqrtp, 0, n, n))
+ @ P
+ # TODO: rm csr_array wrapper when spdiags creates arrays
+ @ sp.sparse.csr_array(sp.sparse.spdiags(1.0 / sqrtp, 0, n, n))
+ )
+ # NOTE: This could be sparsified for the non-pagerank cases
+ I = np.identity(len(G))
+
+ return I - (Q + Q.T) / 2.0
+
+
+@not_implemented_for("undirected")
+@not_implemented_for("multigraph")
+@nx._dispatch(edge_attrs="weight")
+def directed_combinatorial_laplacian_matrix(
+ G, nodelist=None, weight="weight", walk_type=None, alpha=0.95
+):
+ r"""Return the directed combinatorial Laplacian matrix of G.
+
+ The graph directed combinatorial Laplacian is the matrix
+
+ .. math::
+
+ L = \Phi - (\Phi P + P^T \Phi) / 2
+
+ where `P` is the transition matrix of the graph and `\Phi` a matrix
+ with the Perron vector of `P` in the diagonal and zeros elsewhere [1]_.
+
+ Depending on the value of walk_type, `P` can be the transition matrix
+ induced by a random walk, a lazy random walk, or a random walk with
+ teleportation (PageRank).
+
+ Parameters
+ ----------
+ G : DiGraph
+ A NetworkX graph
+
+ nodelist : list, optional
+ The rows and columns are ordered according to the nodes in nodelist.
+ If nodelist is None, then the ordering is produced by G.nodes().
+
+ weight : string or None, optional (default='weight')
+ The edge data key used to compute each value in the matrix.
+ If None, then each edge has weight 1.
+
+ walk_type : string or None, optional (default=None)
+ If None, `P` is selected depending on the properties of the
+ graph. Otherwise is one of 'random', 'lazy', or 'pagerank'
+
+ alpha : real
+ (1 - alpha) is the teleportation probability used with pagerank
+
+ Returns
+ -------
+ L : NumPy matrix
+ Combinatorial Laplacian of G.
+
+ Notes
+ -----
+ Only implemented for DiGraphs
+
+ See Also
+ --------
+ laplacian_matrix
+
+ References
+ ----------
+ .. [1] Fan Chung (2005).
+ Laplacians and the Cheeger inequality for directed graphs.
+ Annals of Combinatorics, 9(1), 2005
+ """
+ import scipy as sp
+
+ P = _transition_matrix(
+ G, nodelist=nodelist, weight=weight, walk_type=walk_type, alpha=alpha
+ )
+
+ n, m = P.shape
+
+ evals, evecs = sp.sparse.linalg.eigs(P.T, k=1)
+ v = evecs.flatten().real
+ p = v / v.sum()
+ # NOTE: could be improved by not densifying
+ # TODO: Rm csr_array wrapper when spdiags array creation becomes available
+ Phi = sp.sparse.csr_array(sp.sparse.spdiags(p, 0, n, n)).toarray()
+
+ return Phi - (Phi @ P + P.T @ Phi) / 2.0
+
+
+def _transition_matrix(G, nodelist=None, weight="weight", walk_type=None, alpha=0.95):
+ """Returns the transition matrix of G.
+
+ This is a row stochastic giving the transition probabilities while
+ performing a random walk on the graph. Depending on the value of walk_type,
+ P can be the transition matrix induced by a random walk, a lazy random walk,
+ or a random walk with teleportation (PageRank).
+
+ Parameters
+ ----------
+ G : DiGraph
+ A NetworkX graph
+
+ nodelist : list, optional
+ The rows and columns are ordered according to the nodes in nodelist.
+ If nodelist is None, then the ordering is produced by G.nodes().
+
+ weight : string or None, optional (default='weight')
+ The edge data key used to compute each value in the matrix.
+ If None, then each edge has weight 1.
+
+ walk_type : string or None, optional (default=None)
+ If None, `P` is selected depending on the properties of the
+ graph. Otherwise is one of 'random', 'lazy', or 'pagerank'
+
+ alpha : real
+ (1 - alpha) is the teleportation probability used with pagerank
+
+ Returns
+ -------
+ P : numpy.ndarray
+ transition matrix of G.
+
+ Raises
+ ------
+ NetworkXError
+ If walk_type not specified or alpha not in valid range
+ """
+ import numpy as np
+ import scipy as sp
+
+ if walk_type is None:
+ if nx.is_strongly_connected(G):
+ if nx.is_aperiodic(G):
+ walk_type = "random"
+ else:
+ walk_type = "lazy"
+ else:
+ walk_type = "pagerank"
+
+ A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, dtype=float)
+ n, m = A.shape
+ if walk_type in ["random", "lazy"]:
+ # TODO: Rm csr_array wrapper when spdiags array creation becomes available
+ DI = sp.sparse.csr_array(sp.sparse.spdiags(1.0 / A.sum(axis=1), 0, n, n))
+ if walk_type == "random":
+ P = DI @ A
+ else:
+ # TODO: Rm csr_array wrapper when identity array creation becomes available
+ I = sp.sparse.csr_array(sp.sparse.identity(n))
+ P = (I + DI @ A) / 2.0
+
+ elif walk_type == "pagerank":
+ if not (0 < alpha < 1):
+ raise nx.NetworkXError("alpha must be between 0 and 1")
+ # this is using a dense representation. NOTE: This should be sparsified!
+ A = A.toarray()
+ # add constant to dangling nodes' row
+ A[A.sum(axis=1) == 0, :] = 1 / n
+ # normalize
+ A = A / A.sum(axis=1)[np.newaxis, :].T
+ P = alpha * A + (1 - alpha) / n
+ else:
+ raise nx.NetworkXError("walk_type must be random, lazy, or pagerank")
+
+ return P