Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/__init__.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/allowed_functions.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/bytecode_analysis.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/bytecode_transformation.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/comptime.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/config_utils.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/convert_frame.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/debug_utils.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/eval_frame.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/exc.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/hooks.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/logging.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/output_graph.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/profiler.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/resume_execution.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/symbolic_convert.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/test_case.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/testing.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/utils.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/backends/__pycache__/common.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/backends/__pycache__/tensorrt.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/__init__.py +89 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/__pycache__/builder.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/__pycache__/builtin.cpython-310.pyc +0 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/base.py +296 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/dicts.py +440 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/functions.py +476 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/nn_module.py +636 -0
- openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/user_defined.py +410 -0
- openflamingo/lib/python3.10/site-packages/torch/_subclasses/meta_utils.py +522 -0
- phi4/lib/python3.10/site-packages/networkx/__init__.py +53 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/__init__.py +133 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/bridges.py +205 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/broadcasting.py +155 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/chains.py +172 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/chordal.py +443 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/clique.py +755 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/core.py +649 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/covering.py +142 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/cycles.py +1230 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/dag.py +1418 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/distance_measures.py +1022 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/distance_regular.py +238 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/dominance.py +135 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/dominating.py +95 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/graph_hashing.py +328 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/graphical.py +483 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/hierarchy.py +57 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/hybrid.py +196 -0
- phi4/lib/python3.10/site-packages/networkx/algorithms/lowest_common_ancestors.py +269 -0
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (3.18 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/allowed_functions.cpython-310.pyc
ADDED
|
Binary file (8.73 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/bytecode_analysis.cpython-310.pyc
ADDED
|
Binary file (6.15 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/bytecode_transformation.cpython-310.pyc
ADDED
|
Binary file (14.5 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/comptime.cpython-310.pyc
ADDED
|
Binary file (11.6 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/config_utils.cpython-310.pyc
ADDED
|
Binary file (7.21 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/convert_frame.cpython-310.pyc
ADDED
|
Binary file (11.5 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/debug_utils.cpython-310.pyc
ADDED
|
Binary file (28.1 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/eval_frame.cpython-310.pyc
ADDED
|
Binary file (23.3 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/exc.cpython-310.pyc
ADDED
|
Binary file (6.14 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/hooks.cpython-310.pyc
ADDED
|
Binary file (613 Bytes). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/logging.cpython-310.pyc
ADDED
|
Binary file (2.56 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/output_graph.cpython-310.pyc
ADDED
|
Binary file (21.6 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/profiler.cpython-310.pyc
ADDED
|
Binary file (5.92 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/resume_execution.cpython-310.pyc
ADDED
|
Binary file (7.64 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/symbolic_convert.cpython-310.pyc
ADDED
|
Binary file (56.8 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/test_case.cpython-310.pyc
ADDED
|
Binary file (2.09 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/testing.cpython-310.pyc
ADDED
|
Binary file (9.52 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/__pycache__/utils.cpython-310.pyc
ADDED
|
Binary file (38.3 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/backends/__pycache__/common.cpython-310.pyc
ADDED
|
Binary file (3.64 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/backends/__pycache__/tensorrt.cpython-310.pyc
ADDED
|
Binary file (256 Bytes). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/__init__.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .base import VariableTracker
|
| 2 |
+
from .builtin import BuiltinVariable
|
| 3 |
+
from .constant import ConstantVariable, EnumVariable
|
| 4 |
+
from .dicts import ConstDictVariable, DataClassVariable, DefaultDictVariable
|
| 5 |
+
from .functions import (
|
| 6 |
+
NestedUserFunctionVariable,
|
| 7 |
+
UserFunctionVariable,
|
| 8 |
+
UserMethodVariable,
|
| 9 |
+
)
|
| 10 |
+
from .lists import (
|
| 11 |
+
BaseListVariable,
|
| 12 |
+
ListIteratorVariable,
|
| 13 |
+
ListVariable,
|
| 14 |
+
NamedTupleVariable,
|
| 15 |
+
RangeVariable,
|
| 16 |
+
SliceVariable,
|
| 17 |
+
TupleVariable,
|
| 18 |
+
)
|
| 19 |
+
from .misc import (
|
| 20 |
+
AutogradFunctionVariable,
|
| 21 |
+
BlackHoleVariable,
|
| 22 |
+
ClosureVariable,
|
| 23 |
+
ContextWrappingVariable,
|
| 24 |
+
CUDAStreamContextVariable,
|
| 25 |
+
CUDAStreamVariable,
|
| 26 |
+
GetAttrVariable,
|
| 27 |
+
GradModeVariable,
|
| 28 |
+
InspectSignatureVariable,
|
| 29 |
+
LambdaVariable,
|
| 30 |
+
NewCellVariable,
|
| 31 |
+
NewGlobalVariable,
|
| 32 |
+
NumpyVariable,
|
| 33 |
+
PythonModuleVariable,
|
| 34 |
+
SuperVariable,
|
| 35 |
+
UnknownVariable,
|
| 36 |
+
WithExitFunctionVariable,
|
| 37 |
+
)
|
| 38 |
+
from .nn_module import NNModuleVariable, UnspecializedNNModuleVariable
|
| 39 |
+
from .tensor import (
|
| 40 |
+
FakeItemVariable,
|
| 41 |
+
SymNodeVariable,
|
| 42 |
+
TensorVariable,
|
| 43 |
+
UnspecializedPythonVariable,
|
| 44 |
+
)
|
| 45 |
+
from .torch import TorchVariable
|
| 46 |
+
from .user_defined import UserDefinedClassVariable, UserDefinedObjectVariable
|
| 47 |
+
|
| 48 |
+
__all__ = [
|
| 49 |
+
"AutogradFunctionVariable",
|
| 50 |
+
"BaseListVariable",
|
| 51 |
+
"BlackHoleVariable",
|
| 52 |
+
"BuiltinVariable",
|
| 53 |
+
"ClosureVariable",
|
| 54 |
+
"ConstantVariable",
|
| 55 |
+
"ConstDictVariable",
|
| 56 |
+
"ContextWrappingVariable",
|
| 57 |
+
"DataClassVariable",
|
| 58 |
+
"DefaultDictVariable",
|
| 59 |
+
"EnumVariable",
|
| 60 |
+
"FakeItemVariable",
|
| 61 |
+
"GetAttrVariable",
|
| 62 |
+
"GradModeVariable",
|
| 63 |
+
"InspectSignatureVariable",
|
| 64 |
+
"LambdaVariable",
|
| 65 |
+
"ListIteratorVariable",
|
| 66 |
+
"ListVariable",
|
| 67 |
+
"NamedTupleVariable",
|
| 68 |
+
"NestedUserFunctionVariable",
|
| 69 |
+
"NewCellVariable",
|
| 70 |
+
"NewGlobalVariable",
|
| 71 |
+
"NNModuleVariable",
|
| 72 |
+
"NumpyVariable",
|
| 73 |
+
"PythonModuleVariable",
|
| 74 |
+
"RangeVariable",
|
| 75 |
+
"SliceVariable",
|
| 76 |
+
"SuperVariable",
|
| 77 |
+
"TensorVariable",
|
| 78 |
+
"TorchVariable",
|
| 79 |
+
"TupleVariable",
|
| 80 |
+
"UnknownVariable",
|
| 81 |
+
"UnspecializedNNModuleVariable",
|
| 82 |
+
"UnspecializedPythonVariable",
|
| 83 |
+
"UserDefinedClassVariable",
|
| 84 |
+
"UserDefinedObjectVariable",
|
| 85 |
+
"UserFunctionVariable",
|
| 86 |
+
"UserMethodVariable",
|
| 87 |
+
"VariableTracker",
|
| 88 |
+
"WithExitFunctionVariable",
|
| 89 |
+
]
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/__pycache__/builder.cpython-310.pyc
ADDED
|
Binary file (20.8 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/__pycache__/builtin.cpython-310.pyc
ADDED
|
Binary file (29.2 kB). View file
|
|
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/base.py
ADDED
|
@@ -0,0 +1,296 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
from typing import Any, Callable, Dict, List, Optional, Set
|
| 3 |
+
|
| 4 |
+
from .. import variables
|
| 5 |
+
from ..exc import unimplemented
|
| 6 |
+
from ..source import AttrSource, Source
|
| 7 |
+
from ..utils import dict_values, identity, istype, odict_values
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class MutableLocal:
|
| 11 |
+
"""
|
| 12 |
+
Marker used to indicate this (list, iter, etc) was constructed in
|
| 13 |
+
local scope and can be mutated safely in analysis without leaking
|
| 14 |
+
state.
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
def __hash__(self):
|
| 18 |
+
return id(self)
|
| 19 |
+
|
| 20 |
+
def __eq__(self, other):
|
| 21 |
+
return self is other
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
# metaclass to call post_init
|
| 25 |
+
class HasPostInit(type):
|
| 26 |
+
def __call__(cls, *args, **kwargs):
|
| 27 |
+
obj = type.__call__(cls, *args, **kwargs)
|
| 28 |
+
obj.__post_init__(*args, **kwargs)
|
| 29 |
+
return obj
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class VariableTracker(metaclass=HasPostInit):
|
| 33 |
+
"""
|
| 34 |
+
Base class for tracked locals and stack values
|
| 35 |
+
|
| 36 |
+
VariableTracker instances are immutable and should be copied in
|
| 37 |
+
order to change them.
|
| 38 |
+
"""
|
| 39 |
+
|
| 40 |
+
# fields to leave unmodified in apply()
|
| 41 |
+
_nonvar_fields = ["value"]
|
| 42 |
+
|
| 43 |
+
@staticmethod
|
| 44 |
+
def propagate(*vars: List[List["VariableTracker"]]):
|
| 45 |
+
"""Combine the guards from many VariableTracker into **kwargs for a new instance"""
|
| 46 |
+
guards = set()
|
| 47 |
+
|
| 48 |
+
def visit(var):
|
| 49 |
+
if type(var) in (list, tuple, dict_values, odict_values):
|
| 50 |
+
for i in var:
|
| 51 |
+
visit(i)
|
| 52 |
+
else:
|
| 53 |
+
assert isinstance(var, VariableTracker), typestr(var)
|
| 54 |
+
guards.update(var.guards)
|
| 55 |
+
|
| 56 |
+
visit(vars)
|
| 57 |
+
return {
|
| 58 |
+
"guards": guards,
|
| 59 |
+
}
|
| 60 |
+
|
| 61 |
+
def clone(self, **kwargs):
|
| 62 |
+
"""Shallow copy with some (optional) changes"""
|
| 63 |
+
args = dict(self.__dict__)
|
| 64 |
+
args.update(kwargs)
|
| 65 |
+
return self.__class__(**args)
|
| 66 |
+
|
| 67 |
+
@classmethod
|
| 68 |
+
def copy(cls, value):
|
| 69 |
+
"""Deeper (but not full) copy, leaving FX and user objects alone"""
|
| 70 |
+
return cls.apply(identity, value)
|
| 71 |
+
|
| 72 |
+
@classmethod
|
| 73 |
+
def apply(
|
| 74 |
+
cls,
|
| 75 |
+
fn: Callable[["VariableTracker"], "VariableTracker"],
|
| 76 |
+
value,
|
| 77 |
+
cache=None,
|
| 78 |
+
skip_fn=lambda _: False, # Whether we should skip applying to this var
|
| 79 |
+
):
|
| 80 |
+
"""
|
| 81 |
+
Walk this object and call fn on all the VariableTracker
|
| 82 |
+
instances to produce a new VariableTracker with the results.
|
| 83 |
+
"""
|
| 84 |
+
if cache is None:
|
| 85 |
+
cache = dict()
|
| 86 |
+
|
| 87 |
+
idx = id(value)
|
| 88 |
+
if idx in cache:
|
| 89 |
+
return cache[idx][0]
|
| 90 |
+
|
| 91 |
+
if isinstance(value, VariableTracker):
|
| 92 |
+
if not skip_fn(value):
|
| 93 |
+
updated_dict = dict(value.__dict__)
|
| 94 |
+
for key in updated_dict.keys():
|
| 95 |
+
if key not in value._nonvar_fields:
|
| 96 |
+
updated_dict[key] = cls.apply(
|
| 97 |
+
fn, updated_dict[key], cache, skip_fn
|
| 98 |
+
)
|
| 99 |
+
result = fn(value.clone(**updated_dict))
|
| 100 |
+
else:
|
| 101 |
+
result = fn(value)
|
| 102 |
+
|
| 103 |
+
elif istype(value, list):
|
| 104 |
+
result = [cls.apply(fn, v, cache, skip_fn) for v in value]
|
| 105 |
+
elif istype(value, tuple):
|
| 106 |
+
result = tuple(cls.apply(fn, v, cache, skip_fn) for v in value)
|
| 107 |
+
elif istype(value, collections.OrderedDict):
|
| 108 |
+
result = collections.OrderedDict(
|
| 109 |
+
cls.apply(fn, v, cache, skip_fn) for v in value.items()
|
| 110 |
+
)
|
| 111 |
+
elif istype(value, dict):
|
| 112 |
+
result = {
|
| 113 |
+
k: cls.apply(fn, v, cache, skip_fn) for k, v in list(value.items())
|
| 114 |
+
}
|
| 115 |
+
else:
|
| 116 |
+
result = value
|
| 117 |
+
|
| 118 |
+
# save `value` to keep it alive and ensure id() isn't reused
|
| 119 |
+
cache[idx] = (result, value)
|
| 120 |
+
return result
|
| 121 |
+
|
| 122 |
+
def add_guard(self, guard):
|
| 123 |
+
return self.clone(guards=set.union(self.guards, {guard}))
|
| 124 |
+
|
| 125 |
+
def add_guards(self, guards):
|
| 126 |
+
if guards is None:
|
| 127 |
+
return self
|
| 128 |
+
assert isinstance(guards, set)
|
| 129 |
+
return self.clone(guards=set.union(self.guards, guards))
|
| 130 |
+
|
| 131 |
+
def add_options(self, options, *more):
|
| 132 |
+
if more:
|
| 133 |
+
return self.add_options(options).add_options(*more)
|
| 134 |
+
if isinstance(options, VariableTracker):
|
| 135 |
+
return self.add_guards(options.guards)
|
| 136 |
+
assert isinstance(options, dict)
|
| 137 |
+
return self.add_guards(options.get("guards", set()))
|
| 138 |
+
|
| 139 |
+
def __str__(self):
|
| 140 |
+
return f"{self.__class__.__name__}()"
|
| 141 |
+
|
| 142 |
+
def __repr__(self):
|
| 143 |
+
return str(self)
|
| 144 |
+
|
| 145 |
+
def python_type(self):
|
| 146 |
+
raise NotImplementedError(f"{self} has no type")
|
| 147 |
+
|
| 148 |
+
def as_python_constant(self):
|
| 149 |
+
"""For constants"""
|
| 150 |
+
raise NotImplementedError(f"{self} is not a constant")
|
| 151 |
+
|
| 152 |
+
def is_python_constant(self):
|
| 153 |
+
try:
|
| 154 |
+
self.as_python_constant()
|
| 155 |
+
return True
|
| 156 |
+
except NotImplementedError:
|
| 157 |
+
return False
|
| 158 |
+
|
| 159 |
+
def as_specialized(self, tx):
|
| 160 |
+
"""
|
| 161 |
+
For specialized variables, return itself,
|
| 162 |
+
For unspecialized variables, convert to constant variable and return.
|
| 163 |
+
"""
|
| 164 |
+
return self
|
| 165 |
+
|
| 166 |
+
def can_make_guard(self):
|
| 167 |
+
try:
|
| 168 |
+
self.make_guard(None)
|
| 169 |
+
return True
|
| 170 |
+
except NotImplementedError:
|
| 171 |
+
return False
|
| 172 |
+
|
| 173 |
+
def make_guard(self, fn):
|
| 174 |
+
if self.source:
|
| 175 |
+
return self.source.make_guard(fn)
|
| 176 |
+
raise NotImplementedError()
|
| 177 |
+
|
| 178 |
+
def replace_guards(self, guards, *fns):
|
| 179 |
+
name = self.source.name()
|
| 180 |
+
new_guards = {g for g in (guards or []) if g.name != name}
|
| 181 |
+
new_guards.update(self.source.make_guard(fn) for fn in fns)
|
| 182 |
+
return new_guards
|
| 183 |
+
|
| 184 |
+
def const_getattr(self, tx, name: str) -> Any:
|
| 185 |
+
"""getattr(self, name) returning a python constant"""
|
| 186 |
+
raise NotImplementedError()
|
| 187 |
+
|
| 188 |
+
def var_getattr(self, tx, name: str) -> "VariableTracker":
|
| 189 |
+
"""getattr(self, name) returning a new variable"""
|
| 190 |
+
options = VariableTracker.propagate(self)
|
| 191 |
+
value = self.const_getattr(tx, name)
|
| 192 |
+
if not variables.ConstantVariable.is_literal(value):
|
| 193 |
+
raise NotImplementedError()
|
| 194 |
+
if self.source:
|
| 195 |
+
options["source"] = AttrSource(self.source, name)
|
| 196 |
+
return variables.ConstantVariable(value, **options)
|
| 197 |
+
|
| 198 |
+
def is_proxy(self):
|
| 199 |
+
try:
|
| 200 |
+
self.as_proxy()
|
| 201 |
+
return True
|
| 202 |
+
except NotImplementedError:
|
| 203 |
+
return False
|
| 204 |
+
|
| 205 |
+
def as_proxy(self):
|
| 206 |
+
raise NotImplementedError(str(self))
|
| 207 |
+
|
| 208 |
+
def reconstruct(self, codegen):
|
| 209 |
+
raise NotImplementedError()
|
| 210 |
+
|
| 211 |
+
def unpack_var_sequence(self, tx):
|
| 212 |
+
raise NotImplementedError()
|
| 213 |
+
|
| 214 |
+
def has_unpack_var_sequence(self, tx):
|
| 215 |
+
try:
|
| 216 |
+
self.unpack_var_sequence(tx)
|
| 217 |
+
return True
|
| 218 |
+
except NotImplementedError:
|
| 219 |
+
return False
|
| 220 |
+
|
| 221 |
+
def num_parameters(self):
|
| 222 |
+
unimplemented(f"num_parameters: {self}")
|
| 223 |
+
|
| 224 |
+
def call_hasattr(self, tx, name: str) -> "VariableTracker":
|
| 225 |
+
unimplemented(f"hasattr: {repr(self)}")
|
| 226 |
+
|
| 227 |
+
def call_function(
|
| 228 |
+
self, tx, args: "List[VariableTracker]", kwargs: "Dict[str, VariableTracker]"
|
| 229 |
+
) -> "VariableTracker":
|
| 230 |
+
unimplemented(f"call_function {self} {args} {kwargs}")
|
| 231 |
+
|
| 232 |
+
def call_method(
|
| 233 |
+
self,
|
| 234 |
+
tx,
|
| 235 |
+
name,
|
| 236 |
+
args: "List[VariableTracker]",
|
| 237 |
+
kwargs: "Dict[str, VariableTracker]",
|
| 238 |
+
) -> "VariableTracker":
|
| 239 |
+
if name == "__len__" and self.has_unpack_var_sequence(tx):
|
| 240 |
+
assert not (args or kwargs)
|
| 241 |
+
return variables.ConstantVariable(
|
| 242 |
+
len(self.unpack_var_sequence(tx)), **VariableTracker.propagate(self)
|
| 243 |
+
)
|
| 244 |
+
elif (
|
| 245 |
+
name == "__getattr__"
|
| 246 |
+
and len(args) == 1
|
| 247 |
+
and args[0].is_python_constant()
|
| 248 |
+
and not kwargs
|
| 249 |
+
):
|
| 250 |
+
return self.var_getattr(tx, args[0].as_python_constant()).add_options(
|
| 251 |
+
self, args[0]
|
| 252 |
+
)
|
| 253 |
+
raise unimplemented(f"call_method {self} {name} {args} {kwargs}")
|
| 254 |
+
|
| 255 |
+
def __init__(
|
| 256 |
+
self,
|
| 257 |
+
guards: Optional[Set] = None,
|
| 258 |
+
source: Source = None,
|
| 259 |
+
mutable_local: MutableLocal = None,
|
| 260 |
+
recursively_contains: Optional[Set] = None,
|
| 261 |
+
):
|
| 262 |
+
super().__init__()
|
| 263 |
+
self.guards = guards or set()
|
| 264 |
+
self.source = source
|
| 265 |
+
self.mutable_local = mutable_local
|
| 266 |
+
self.recursively_contains = (
|
| 267 |
+
recursively_contains # provides hint to replace_all when replacing vars
|
| 268 |
+
)
|
| 269 |
+
|
| 270 |
+
def __post_init__(self, *args, **kwargs):
|
| 271 |
+
if self.recursively_contains is None:
|
| 272 |
+
self.recursively_contains = set()
|
| 273 |
+
|
| 274 |
+
def aggregate_mutables(var):
|
| 275 |
+
self.recursively_contains.update(var.recursively_contains)
|
| 276 |
+
if var.mutable_local is not None:
|
| 277 |
+
self.recursively_contains.add(var.mutable_local)
|
| 278 |
+
|
| 279 |
+
return var
|
| 280 |
+
|
| 281 |
+
VariableTracker.apply(
|
| 282 |
+
aggregate_mutables, self, skip_fn=lambda var: var is not self
|
| 283 |
+
)
|
| 284 |
+
|
| 285 |
+
assert None not in self.recursively_contains
|
| 286 |
+
|
| 287 |
+
|
| 288 |
+
def typestr(*objs):
|
| 289 |
+
if len(objs) == 1:
|
| 290 |
+
(obj,) = objs
|
| 291 |
+
if isinstance(obj, VariableTracker):
|
| 292 |
+
return str(obj)
|
| 293 |
+
else:
|
| 294 |
+
return type(obj).__name__
|
| 295 |
+
else:
|
| 296 |
+
return " ".join(map(typestr, objs))
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/dicts.py
ADDED
|
@@ -0,0 +1,440 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import dataclasses
|
| 3 |
+
import functools
|
| 4 |
+
import inspect
|
| 5 |
+
from typing import Dict, List
|
| 6 |
+
|
| 7 |
+
from .. import variables
|
| 8 |
+
from ..bytecode_transformation import create_instruction
|
| 9 |
+
from ..eval_frame import skip_code
|
| 10 |
+
from ..exc import unimplemented
|
| 11 |
+
from ..source import AttrSource, GlobalWeakRefSource
|
| 12 |
+
from ..utils import global_key_name, istensor
|
| 13 |
+
from .base import MutableLocal, VariableTracker
|
| 14 |
+
from .constant import ConstantVariable
|
| 15 |
+
from .tensor import TensorVariable
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class ConstDictVariable(VariableTracker):
|
| 19 |
+
def __init__(self, items, user_cls, recursively_contains=None, **kwargs):
|
| 20 |
+
super().__init__(recursively_contains=recursively_contains, **kwargs)
|
| 21 |
+
|
| 22 |
+
self.guards.update(VariableTracker.propagate(items.values())["guards"])
|
| 23 |
+
self.items = items
|
| 24 |
+
self.user_cls = user_cls
|
| 25 |
+
|
| 26 |
+
def as_proxy(self):
|
| 27 |
+
return {k: v.as_proxy() for k, v in self.items.items()}
|
| 28 |
+
|
| 29 |
+
def as_python_constant(self):
|
| 30 |
+
return {k: v.as_python_constant() for k, v in self.items.items()}
|
| 31 |
+
|
| 32 |
+
def python_type(self):
|
| 33 |
+
return self.user_cls
|
| 34 |
+
|
| 35 |
+
def reconstruct(self, codegen):
|
| 36 |
+
for key, value in self.items.items():
|
| 37 |
+
if istensor(key):
|
| 38 |
+
codegen.extend_output(
|
| 39 |
+
[
|
| 40 |
+
codegen.create_load_global(global_key_name(key), add=True),
|
| 41 |
+
create_instruction("CALL_FUNCTION", 0),
|
| 42 |
+
]
|
| 43 |
+
)
|
| 44 |
+
else:
|
| 45 |
+
codegen.append_output(codegen.create_load_const(key))
|
| 46 |
+
codegen(self.items[key])
|
| 47 |
+
|
| 48 |
+
return [create_instruction("BUILD_MAP", len(self.items))]
|
| 49 |
+
|
| 50 |
+
def getitem_const(self, arg: VariableTracker):
|
| 51 |
+
return self.items[ConstDictVariable.get_key(arg)].add_options(self, arg)
|
| 52 |
+
|
| 53 |
+
def call_method(
|
| 54 |
+
self,
|
| 55 |
+
tx,
|
| 56 |
+
name,
|
| 57 |
+
args: "List[VariableTracker]",
|
| 58 |
+
kwargs: "Dict[str, VariableTracker]",
|
| 59 |
+
) -> "VariableTracker":
|
| 60 |
+
from . import ConstantVariable, TupleVariable
|
| 61 |
+
|
| 62 |
+
options = VariableTracker.propagate(self, args, kwargs.values())
|
| 63 |
+
val = self.items
|
| 64 |
+
|
| 65 |
+
if name == "__getitem__":
|
| 66 |
+
return self.getitem_const(args[0])
|
| 67 |
+
|
| 68 |
+
elif name == "items":
|
| 69 |
+
assert not (args or kwargs)
|
| 70 |
+
return TupleVariable(
|
| 71 |
+
[
|
| 72 |
+
TupleVariable(
|
| 73 |
+
[
|
| 74 |
+
ConstDictVariable._key_to_var(
|
| 75 |
+
tx,
|
| 76 |
+
k,
|
| 77 |
+
**options,
|
| 78 |
+
),
|
| 79 |
+
v,
|
| 80 |
+
],
|
| 81 |
+
**options,
|
| 82 |
+
)
|
| 83 |
+
for k, v in val.items()
|
| 84 |
+
],
|
| 85 |
+
**options,
|
| 86 |
+
)
|
| 87 |
+
elif name == "keys":
|
| 88 |
+
assert not (args or kwargs)
|
| 89 |
+
return TupleVariable(
|
| 90 |
+
[
|
| 91 |
+
ConstDictVariable._key_to_var(
|
| 92 |
+
tx,
|
| 93 |
+
k,
|
| 94 |
+
**options,
|
| 95 |
+
)
|
| 96 |
+
for k in val.keys()
|
| 97 |
+
],
|
| 98 |
+
**options,
|
| 99 |
+
)
|
| 100 |
+
|
| 101 |
+
elif name == "values":
|
| 102 |
+
assert not (args or kwargs)
|
| 103 |
+
return TupleVariable(list(val.values()), **options)
|
| 104 |
+
elif name == "__len__":
|
| 105 |
+
assert not (args or kwargs)
|
| 106 |
+
return ConstantVariable(len(self.items), **options)
|
| 107 |
+
elif (
|
| 108 |
+
name == "__setitem__"
|
| 109 |
+
and args
|
| 110 |
+
and ConstDictVariable.is_valid_key(args[0])
|
| 111 |
+
and self.mutable_local
|
| 112 |
+
):
|
| 113 |
+
assert not kwargs and len(args) == 2
|
| 114 |
+
k = ConstDictVariable.get_key(args[0])
|
| 115 |
+
|
| 116 |
+
if istensor(k):
|
| 117 |
+
tx.store_dict_key(global_key_name(k), k)
|
| 118 |
+
newval = collections.OrderedDict(val)
|
| 119 |
+
newval[k] = args[1]
|
| 120 |
+
|
| 121 |
+
new_rec_contains = self.recursively_contains.union(
|
| 122 |
+
args[1].recursively_contains
|
| 123 |
+
)
|
| 124 |
+
if args[1].mutable_local is not None:
|
| 125 |
+
new_rec_contains.add(args[1].mutable_local)
|
| 126 |
+
|
| 127 |
+
return tx.replace_all(
|
| 128 |
+
self,
|
| 129 |
+
self.modifed(newval, new_rec_contains, **options),
|
| 130 |
+
)
|
| 131 |
+
elif (
|
| 132 |
+
name in ("pop", "get")
|
| 133 |
+
and args
|
| 134 |
+
and ConstDictVariable.is_valid_key(args[0])
|
| 135 |
+
and ConstDictVariable.get_key(args[0]) not in self.items
|
| 136 |
+
and len(args) == 2
|
| 137 |
+
):
|
| 138 |
+
# missing item, return the default value
|
| 139 |
+
return args[1].add_options(options)
|
| 140 |
+
elif (
|
| 141 |
+
name == "pop"
|
| 142 |
+
and args
|
| 143 |
+
and ConstDictVariable.is_valid_key(args[0])
|
| 144 |
+
and self.mutable_local
|
| 145 |
+
):
|
| 146 |
+
newval = collections.OrderedDict(val)
|
| 147 |
+
result = newval.pop(ConstDictVariable.get_key(args[0]))
|
| 148 |
+
tx.replace_all(self, self.modifed(newval, None, **options))
|
| 149 |
+
return result.add_options(options)
|
| 150 |
+
elif (
|
| 151 |
+
name == "update"
|
| 152 |
+
and args
|
| 153 |
+
and isinstance(args[0], ConstDictVariable)
|
| 154 |
+
and self.mutable_local
|
| 155 |
+
):
|
| 156 |
+
newval = collections.OrderedDict(val)
|
| 157 |
+
newval.update(args[0].items)
|
| 158 |
+
new_rec_contains = self.recursively_contains.union(
|
| 159 |
+
args[0].recursively_contains
|
| 160 |
+
)
|
| 161 |
+
result = self.modifed(
|
| 162 |
+
newval, recursively_contains=new_rec_contains, **options
|
| 163 |
+
)
|
| 164 |
+
return tx.replace_all(self, result)
|
| 165 |
+
elif (
|
| 166 |
+
name in ("get", "__getattr__")
|
| 167 |
+
and args
|
| 168 |
+
and ConstDictVariable.is_valid_key(args[0])
|
| 169 |
+
and ConstDictVariable.get_key(args[0]) in self.items
|
| 170 |
+
):
|
| 171 |
+
result = self.items[ConstDictVariable.get_key(args[0])]
|
| 172 |
+
return result.add_options(options)
|
| 173 |
+
elif (
|
| 174 |
+
name == "__contains__" and args and ConstDictVariable.is_valid_key(args[0])
|
| 175 |
+
):
|
| 176 |
+
return ConstantVariable(
|
| 177 |
+
ConstDictVariable.get_key(args[0]) in self.items, **options
|
| 178 |
+
)
|
| 179 |
+
else:
|
| 180 |
+
return super().call_method(tx, name, args, kwargs)
|
| 181 |
+
|
| 182 |
+
def modifed(self, items, recursively_contains, **options):
|
| 183 |
+
"""a copy of self with different items"""
|
| 184 |
+
return self.clone(
|
| 185 |
+
items=items, recursively_contains=recursively_contains, **options
|
| 186 |
+
)
|
| 187 |
+
|
| 188 |
+
def unpack_var_sequence(self, tx):
|
| 189 |
+
options = VariableTracker.propagate([self])
|
| 190 |
+
val = self.items
|
| 191 |
+
result = [ConstDictVariable._key_to_var(tx, k, **options) for k in val.keys()]
|
| 192 |
+
return result
|
| 193 |
+
|
| 194 |
+
@classmethod
|
| 195 |
+
def get_key(cls, arg: VariableTracker):
|
| 196 |
+
if isinstance(arg, TensorVariable) and arg.specialized_value is not None:
|
| 197 |
+
return arg.specialized_value
|
| 198 |
+
else:
|
| 199 |
+
return arg.as_python_constant()
|
| 200 |
+
|
| 201 |
+
@classmethod
|
| 202 |
+
def is_valid_key(cls, key):
|
| 203 |
+
return (
|
| 204 |
+
key.is_python_constant()
|
| 205 |
+
or isinstance(key, TensorVariable)
|
| 206 |
+
and key.specialized_value is not None
|
| 207 |
+
)
|
| 208 |
+
|
| 209 |
+
@classmethod
|
| 210 |
+
def _key_to_var(cls, tx, key, **options):
|
| 211 |
+
from .builder import VariableBuilder
|
| 212 |
+
|
| 213 |
+
if istensor(key):
|
| 214 |
+
return VariableBuilder(tx, GlobalWeakRefSource(global_key_name(key)))(key)
|
| 215 |
+
else:
|
| 216 |
+
assert ConstantVariable.is_literal(key)
|
| 217 |
+
return ConstantVariable(key, **options)
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
class DefaultDictVariable(ConstDictVariable):
|
| 221 |
+
def __init__(self, items, user_cls, default_factory=None, **kwargs):
|
| 222 |
+
super().__init__(items, user_cls, **kwargs)
|
| 223 |
+
assert user_cls is collections.defaultdict
|
| 224 |
+
self.default_factory = default_factory
|
| 225 |
+
|
| 226 |
+
def call_method(
|
| 227 |
+
self,
|
| 228 |
+
tx,
|
| 229 |
+
name,
|
| 230 |
+
args: "List[VariableTracker]",
|
| 231 |
+
kwargs: "Dict[str, VariableTracker]",
|
| 232 |
+
) -> "VariableTracker":
|
| 233 |
+
from . import ListVariable, TupleVariable
|
| 234 |
+
|
| 235 |
+
options = VariableTracker.propagate(self, args, kwargs.values())
|
| 236 |
+
|
| 237 |
+
if name == "__getitem__":
|
| 238 |
+
k = ConstDictVariable.get_key(args[0])
|
| 239 |
+
|
| 240 |
+
if k in self.items:
|
| 241 |
+
return self.getitem_const(args[0])
|
| 242 |
+
else:
|
| 243 |
+
if self.default_factory is None:
|
| 244 |
+
raise KeyError(f"{k}")
|
| 245 |
+
else:
|
| 246 |
+
if istensor(k):
|
| 247 |
+
tx.store_dict_key(global_key_name(k), k)
|
| 248 |
+
new_val = collections.OrderedDict(self.items)
|
| 249 |
+
if self.default_factory is list:
|
| 250 |
+
default_var = ListVariable([], mutable_local=MutableLocal())
|
| 251 |
+
elif self.default_factory is tuple:
|
| 252 |
+
default_var = TupleVariable([], mutable_local=MutableLocal())
|
| 253 |
+
elif self.default_factory is dict:
|
| 254 |
+
default_var = ConstDictVariable(
|
| 255 |
+
{}, dict, mutable_local=MutableLocal()
|
| 256 |
+
)
|
| 257 |
+
else:
|
| 258 |
+
unimplemented(
|
| 259 |
+
f"defaultdict with default_factory = {self.default_factory}"
|
| 260 |
+
)
|
| 261 |
+
new_val[k] = default_var
|
| 262 |
+
new_rec_contains = self.recursively_contains.union(
|
| 263 |
+
default_var.recursively_contains
|
| 264 |
+
)
|
| 265 |
+
if default_var.mutable_local is not None:
|
| 266 |
+
new_rec_contains.add(default_var.mutable_local)
|
| 267 |
+
tx.replace_all(
|
| 268 |
+
self, self.modifed(new_val, new_rec_contains, **options)
|
| 269 |
+
)
|
| 270 |
+
return default_var
|
| 271 |
+
else:
|
| 272 |
+
return super().call_method(tx, name, args, kwargs)
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
class DataClassVariable(ConstDictVariable):
|
| 276 |
+
"""
|
| 277 |
+
This is a bit of a hack to deal with
|
| 278 |
+
transformers.file_utils.ModelOutput() from huggingface.
|
| 279 |
+
|
| 280 |
+
ModelOutput causes trouble because it a a mix of a dataclass and a
|
| 281 |
+
OrderedDict and it calls super() methods implemented in C.
|
| 282 |
+
"""
|
| 283 |
+
|
| 284 |
+
# ModelOutput() excludes None, though generic datclasses don't
|
| 285 |
+
include_none = False
|
| 286 |
+
|
| 287 |
+
@staticmethod
|
| 288 |
+
@functools.lru_cache(None)
|
| 289 |
+
def _patch_once():
|
| 290 |
+
from transformers.file_utils import ModelOutput
|
| 291 |
+
|
| 292 |
+
for obj in ModelOutput.__dict__.values():
|
| 293 |
+
if callable(obj):
|
| 294 |
+
skip_code(obj.__code__)
|
| 295 |
+
|
| 296 |
+
@staticmethod
|
| 297 |
+
def is_matching_cls(cls):
|
| 298 |
+
try:
|
| 299 |
+
from transformers.file_utils import ModelOutput
|
| 300 |
+
|
| 301 |
+
return issubclass(cls, ModelOutput)
|
| 302 |
+
except ImportError:
|
| 303 |
+
return False
|
| 304 |
+
|
| 305 |
+
@classmethod
|
| 306 |
+
def is_matching_object(cls, obj):
|
| 307 |
+
return cls.is_matching_cls(type(obj))
|
| 308 |
+
|
| 309 |
+
@classmethod
|
| 310 |
+
def create(cls, user_cls, args, kwargs, options):
|
| 311 |
+
DataClassVariable._patch_once()
|
| 312 |
+
|
| 313 |
+
skip_code(user_cls.__init__.__code__)
|
| 314 |
+
keys = [f.name for f in dataclasses.fields(user_cls)]
|
| 315 |
+
bound = inspect.signature(user_cls).bind(*args, **kwargs)
|
| 316 |
+
bound.apply_defaults()
|
| 317 |
+
assert set(bound.arguments.keys()) == set(keys)
|
| 318 |
+
items = collections.OrderedDict()
|
| 319 |
+
for key in keys:
|
| 320 |
+
val = bound.arguments[key]
|
| 321 |
+
if isinstance(val, VariableTracker):
|
| 322 |
+
items[key] = val
|
| 323 |
+
else:
|
| 324 |
+
if cls.include_none:
|
| 325 |
+
assert variables.ConstantVariable.is_literal(val)
|
| 326 |
+
items[key] = variables.ConstantVariable(val)
|
| 327 |
+
else:
|
| 328 |
+
assert val is None, f"unexpected {val}"
|
| 329 |
+
|
| 330 |
+
if len(items) == 1 and not isinstance(items[keys[0]], variables.TensorVariable):
|
| 331 |
+
unimplemented("DataClassVariable iterator constructor")
|
| 332 |
+
# TODO(jansel): implement unpacking logic in ModelOutput.__post_init__
|
| 333 |
+
|
| 334 |
+
return cls(items, user_cls, **options)
|
| 335 |
+
|
| 336 |
+
@classmethod
|
| 337 |
+
def wrap(cls, builder, obj):
|
| 338 |
+
user_cls = type(obj)
|
| 339 |
+
keys = [f.name for f in dataclasses.fields(user_cls)]
|
| 340 |
+
|
| 341 |
+
excluded = []
|
| 342 |
+
items = collections.OrderedDict()
|
| 343 |
+
for key in keys:
|
| 344 |
+
# __init__ function of a dataclass might not have yet defined the key
|
| 345 |
+
if hasattr(obj, key):
|
| 346 |
+
val = getattr(obj, key)
|
| 347 |
+
var = builder.__class__(
|
| 348 |
+
tx=builder.tx, source=AttrSource(builder.source, key)
|
| 349 |
+
)(val)
|
| 350 |
+
if val is not None or cls.include_none:
|
| 351 |
+
items[key] = var
|
| 352 |
+
else:
|
| 353 |
+
excluded.append(var)
|
| 354 |
+
return cls(
|
| 355 |
+
items, user_cls, **VariableTracker.propagate(excluded, items.values())
|
| 356 |
+
)
|
| 357 |
+
|
| 358 |
+
def __init__(self, items, user_cls, **options):
|
| 359 |
+
super().__init__(items, user_cls, **options)
|
| 360 |
+
assert self.is_matching_cls(user_cls)
|
| 361 |
+
|
| 362 |
+
def as_proxy(self):
|
| 363 |
+
raise NotImplementedError()
|
| 364 |
+
|
| 365 |
+
def reconstruct(self, codegen):
|
| 366 |
+
codegen.extend_output([codegen._create_load_const(self.user_cls)])
|
| 367 |
+
keys = tuple(self.items.keys())
|
| 368 |
+
for key in keys:
|
| 369 |
+
codegen(self.items[key])
|
| 370 |
+
return [
|
| 371 |
+
codegen.create_load_const(keys),
|
| 372 |
+
create_instruction("CALL_FUNCTION_KW", len(keys)),
|
| 373 |
+
]
|
| 374 |
+
|
| 375 |
+
def call_method(
|
| 376 |
+
self,
|
| 377 |
+
tx,
|
| 378 |
+
name,
|
| 379 |
+
args: "List[VariableTracker]",
|
| 380 |
+
kwargs: "Dict[str, VariableTracker]",
|
| 381 |
+
) -> "VariableTracker":
|
| 382 |
+
options = VariableTracker.propagate(self, args, kwargs.values())
|
| 383 |
+
if name == "__getitem__":
|
| 384 |
+
assert not kwargs and len(args) == 1
|
| 385 |
+
index = args[0].as_python_constant()
|
| 386 |
+
if isinstance(index, str):
|
| 387 |
+
return self.items[index].add_options(options)
|
| 388 |
+
else:
|
| 389 |
+
return (
|
| 390 |
+
self.call_method(tx, "to_tuple", [], {})
|
| 391 |
+
.call_method(tx, "__getitem__", args, kwargs)
|
| 392 |
+
.add_options(options)
|
| 393 |
+
)
|
| 394 |
+
elif name == "to_tuple":
|
| 395 |
+
assert not (args or kwargs)
|
| 396 |
+
return variables.TupleVariable(list(self.items.values()), **options)
|
| 397 |
+
elif name == "__setattr__":
|
| 398 |
+
name = "__setitem__"
|
| 399 |
+
return super().call_method(tx, name, args, kwargs)
|
| 400 |
+
|
| 401 |
+
def var_getattr(self, tx, name: str) -> "VariableTracker":
|
| 402 |
+
if name in self.items:
|
| 403 |
+
return self.call_method(
|
| 404 |
+
tx, "__getitem__", [variables.ConstantVariable(name)], {}
|
| 405 |
+
)
|
| 406 |
+
elif not self.include_none:
|
| 407 |
+
defaults = {f.name: f.default for f in dataclasses.fields(self.user_cls)}
|
| 408 |
+
if name in defaults:
|
| 409 |
+
assert variables.ConstantVariable.is_literal(defaults[name])
|
| 410 |
+
return variables.ConstantVariable(defaults[name]).add_options(self)
|
| 411 |
+
super().var_getattr(tx, name)
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
class HFPretrainedConfigVariable(VariableTracker):
|
| 415 |
+
"""
|
| 416 |
+
Hack for HuggingFace PretrainedConfig
|
| 417 |
+
"""
|
| 418 |
+
|
| 419 |
+
@staticmethod
|
| 420 |
+
def is_matching_cls(cls):
|
| 421 |
+
try:
|
| 422 |
+
from transformers.configuration_utils import PretrainedConfig
|
| 423 |
+
|
| 424 |
+
return issubclass(cls, PretrainedConfig)
|
| 425 |
+
except ImportError:
|
| 426 |
+
return False
|
| 427 |
+
|
| 428 |
+
@classmethod
|
| 429 |
+
def is_matching_object(cls, obj):
|
| 430 |
+
return cls.is_matching_cls(type(obj))
|
| 431 |
+
|
| 432 |
+
def __init__(self, obj, **kwargs):
|
| 433 |
+
super().__init__(**kwargs)
|
| 434 |
+
self.obj = obj
|
| 435 |
+
assert self.is_matching_cls(type(obj))
|
| 436 |
+
|
| 437 |
+
def var_getattr(self, tx, name: str) -> "VariableTracker":
|
| 438 |
+
from . import ConstantVariable
|
| 439 |
+
|
| 440 |
+
return ConstantVariable(getattr(self.obj, name))
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/functions.py
ADDED
|
@@ -0,0 +1,476 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import enum
|
| 3 |
+
import functools
|
| 4 |
+
import inspect
|
| 5 |
+
import itertools
|
| 6 |
+
import types
|
| 7 |
+
from typing import Dict, List
|
| 8 |
+
|
| 9 |
+
import torch
|
| 10 |
+
|
| 11 |
+
from .. import variables
|
| 12 |
+
from ..bytecode_transformation import create_instruction
|
| 13 |
+
from ..exc import unimplemented
|
| 14 |
+
from ..source import AttrSource, ConstantSource, DefaultsSource, GetItemSource
|
| 15 |
+
from ..utils import istensor, istype, make_cell
|
| 16 |
+
from .base import typestr, VariableTracker
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def wrap_bound_arg(tx, val, options, source=None):
|
| 20 |
+
# Source propagation is best effort since not every object we encounter has a source to begin with.
|
| 21 |
+
assert (
|
| 22 |
+
"source" not in options
|
| 23 |
+
), "Source needs to be separate from options due to recursive calls for lists/dicts"
|
| 24 |
+
|
| 25 |
+
if isinstance(val, dict):
|
| 26 |
+
return variables.ConstDictVariable(
|
| 27 |
+
{
|
| 28 |
+
k: wrap_bound_arg(tx, v, options, source=getattr(v, "source", None))
|
| 29 |
+
for k, v in val.items()
|
| 30 |
+
},
|
| 31 |
+
dict,
|
| 32 |
+
**options,
|
| 33 |
+
)
|
| 34 |
+
elif isinstance(val, (tuple, list)):
|
| 35 |
+
cls = variables.BaseListVariable.cls_for(type(val))
|
| 36 |
+
return cls(
|
| 37 |
+
[
|
| 38 |
+
wrap_bound_arg(tx, x, options, source=getattr(x, "source", None))
|
| 39 |
+
for x in val
|
| 40 |
+
],
|
| 41 |
+
**options,
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
if variables.ConstantVariable.is_literal(val) or istype(
|
| 45 |
+
val, (torch.Size, torch.device, torch.dtype)
|
| 46 |
+
):
|
| 47 |
+
return variables.ConstantVariable(val, **options)
|
| 48 |
+
elif isinstance(val, types.FunctionType):
|
| 49 |
+
return variables.UserFunctionVariable(val, source=source, **options)
|
| 50 |
+
elif isinstance(val, enum.Enum):
|
| 51 |
+
return variables.EnumVariable(val, source=source, **options)
|
| 52 |
+
elif isinstance(val, (type, abc.ABCMeta)):
|
| 53 |
+
return variables.UserDefinedClassVariable(val, source=source, **options)
|
| 54 |
+
elif istensor(val):
|
| 55 |
+
from torch._dynamo.variables.builder import VariableBuilder
|
| 56 |
+
|
| 57 |
+
return VariableBuilder(tx, source=source, **options)(val)
|
| 58 |
+
else:
|
| 59 |
+
assert isinstance(val, VariableTracker), typestr(val)
|
| 60 |
+
return val
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def wrap_args_kwargs(tx, result, options):
|
| 64 |
+
for k, v in list(result.items()):
|
| 65 |
+
if isinstance(v, (tuple, dict)):
|
| 66 |
+
# args/kwargs
|
| 67 |
+
result[k] = wrap_bound_arg(tx, v, options)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def init_cellvars(parent, result, code):
|
| 71 |
+
closure_cells = dict()
|
| 72 |
+
side_effects = parent.output.side_effects
|
| 73 |
+
|
| 74 |
+
for name in code.co_cellvars:
|
| 75 |
+
closure_cells[name] = side_effects.track_cell_new()
|
| 76 |
+
if name in result:
|
| 77 |
+
side_effects.store_cell(closure_cells[name], result.pop(name))
|
| 78 |
+
|
| 79 |
+
return closure_cells
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class BaseUserFunctionVariable(VariableTracker):
|
| 83 |
+
def get_filename(self):
|
| 84 |
+
return self.get_code().co_filename
|
| 85 |
+
|
| 86 |
+
def get_name(self):
|
| 87 |
+
return self.get_code().co_name
|
| 88 |
+
|
| 89 |
+
def call_function(
|
| 90 |
+
self, tx, args: "List[VariableTracker]", kwargs: "Dict[str, VariableTracker]"
|
| 91 |
+
) -> "VariableTracker":
|
| 92 |
+
return tx.inline_user_function_return(
|
| 93 |
+
self, list(self.self_args()) + list(args), kwargs
|
| 94 |
+
)
|
| 95 |
+
|
| 96 |
+
def num_parameters(self):
|
| 97 |
+
return len(inspect.signature(self.get_function()).parameters)
|
| 98 |
+
|
| 99 |
+
def closure_vars(self, tx):
|
| 100 |
+
return {}
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class UserFunctionVariable(BaseUserFunctionVariable):
|
| 104 |
+
"""Some unsupported user-defined global function"""
|
| 105 |
+
|
| 106 |
+
def __init__(self, fn, is_constant=False, **kwargs):
|
| 107 |
+
super().__init__(**kwargs)
|
| 108 |
+
if getattr(fn, "_dynamo_marked_constant", False):
|
| 109 |
+
# This method should be treated as a constant for the purposes of compilation
|
| 110 |
+
self.is_constant = True
|
| 111 |
+
else:
|
| 112 |
+
self.is_constant = False
|
| 113 |
+
|
| 114 |
+
assert isinstance(
|
| 115 |
+
fn, (types.FunctionType, torch.jit.ScriptFunction)
|
| 116 |
+
), f"expected FunctionType found {typestr(fn)} {fn}"
|
| 117 |
+
# unpack @torch._dynamo.optimize()(fn) wrapped function
|
| 118 |
+
fn = inspect.getattr_static(fn, "_torchdynamo_inline", fn)
|
| 119 |
+
# unpack torch.jit.script_if_tracing
|
| 120 |
+
if inspect.getattr_static(fn, "__script_if_tracing_wrapper", False):
|
| 121 |
+
fn = inspect.getattr_static(fn, "__original_fn", fn)
|
| 122 |
+
self.fn: types.FunctionType = fn
|
| 123 |
+
|
| 124 |
+
def self_args(self):
|
| 125 |
+
return []
|
| 126 |
+
|
| 127 |
+
def get_function(self):
|
| 128 |
+
return self.fn
|
| 129 |
+
|
| 130 |
+
def get_code(self):
|
| 131 |
+
return self.fn.__code__
|
| 132 |
+
|
| 133 |
+
def python_type(self):
|
| 134 |
+
return types.FunctionType
|
| 135 |
+
|
| 136 |
+
def has_self(self):
|
| 137 |
+
return getattr(self.fn, "__self__", None) is not None
|
| 138 |
+
|
| 139 |
+
def get_globals(self):
|
| 140 |
+
return self.fn.__globals__
|
| 141 |
+
|
| 142 |
+
def bind_args(self, parent, args, kwargs):
|
| 143 |
+
assert not self.is_constant
|
| 144 |
+
options = VariableTracker.propagate([self])
|
| 145 |
+
tx = parent.output.root_tx
|
| 146 |
+
wrap = functools.partial(wrap_bound_arg, tx=tx, options=options)
|
| 147 |
+
|
| 148 |
+
fn: types.FunctionType = self.fn
|
| 149 |
+
defaults = fn.__defaults__ or []
|
| 150 |
+
defaults_sources = [
|
| 151 |
+
None if self.source is None else DefaultsSource(self.source, idx)
|
| 152 |
+
for idx, _ in enumerate(defaults)
|
| 153 |
+
]
|
| 154 |
+
fake_func = types.FunctionType(
|
| 155 |
+
fn.__code__,
|
| 156 |
+
fn.__globals__,
|
| 157 |
+
fn.__name__,
|
| 158 |
+
tuple(
|
| 159 |
+
[
|
| 160 |
+
wrap(val=arg, source=source)
|
| 161 |
+
for arg, source in zip(defaults, defaults_sources)
|
| 162 |
+
]
|
| 163 |
+
),
|
| 164 |
+
fn.__closure__,
|
| 165 |
+
)
|
| 166 |
+
if fn.__kwdefaults__:
|
| 167 |
+
kwdefaults_sources = {
|
| 168 |
+
k: None
|
| 169 |
+
if self.source is None
|
| 170 |
+
else DefaultsSource(self.source, k, is_kw=True)
|
| 171 |
+
for k in fn.__kwdefaults__
|
| 172 |
+
}
|
| 173 |
+
fake_func.__kwdefaults__ = {
|
| 174 |
+
k: wrap(val=v, source=kwdefaults_sources[k])
|
| 175 |
+
for k, v in fn.__kwdefaults__.items()
|
| 176 |
+
}
|
| 177 |
+
|
| 178 |
+
bound = inspect.signature(fake_func).bind(*args, **kwargs)
|
| 179 |
+
bound.apply_defaults()
|
| 180 |
+
result = dict(bound.arguments.items())
|
| 181 |
+
|
| 182 |
+
wrap_args_kwargs(tx, result, options)
|
| 183 |
+
closure_cells = init_cellvars(parent, result, fn.__code__)
|
| 184 |
+
closure = self.fn.__closure__ or ()
|
| 185 |
+
assert len(closure) == len(self.fn.__code__.co_freevars)
|
| 186 |
+
for idx, name, cell in zip(
|
| 187 |
+
itertools.count(), self.fn.__code__.co_freevars, closure
|
| 188 |
+
):
|
| 189 |
+
if name == "__class__":
|
| 190 |
+
source = AttrSource(self.source, "__class__") if self.source else None
|
| 191 |
+
result[name] = variables.UserDefinedClassVariable(
|
| 192 |
+
cell.cell_contents,
|
| 193 |
+
source=source,
|
| 194 |
+
)
|
| 195 |
+
else:
|
| 196 |
+
var = tx.match_nested_cell(name, cell)
|
| 197 |
+
if var is not None:
|
| 198 |
+
# optimization for cleaner codegen
|
| 199 |
+
result[name] = var
|
| 200 |
+
elif self.source:
|
| 201 |
+
from .builder import VariableBuilder
|
| 202 |
+
|
| 203 |
+
side_effects = parent.output.side_effects
|
| 204 |
+
if cell in side_effects:
|
| 205 |
+
out = side_effects[cell]
|
| 206 |
+
else:
|
| 207 |
+
closure_cell = GetItemSource(
|
| 208 |
+
AttrSource(self.source, "__closure__"), idx
|
| 209 |
+
)
|
| 210 |
+
closure_cell_contents = AttrSource(
|
| 211 |
+
closure_cell, "cell_contents"
|
| 212 |
+
)
|
| 213 |
+
contents_var = VariableBuilder(parent, closure_cell_contents)(
|
| 214 |
+
cell.cell_contents
|
| 215 |
+
)
|
| 216 |
+
|
| 217 |
+
if (
|
| 218 |
+
closure_cell_contents.name()
|
| 219 |
+
not in tx.mutated_closure_cell_contents
|
| 220 |
+
):
|
| 221 |
+
# Optimistically don't allocate the cell, to
|
| 222 |
+
# reduce the number of side effects. This is
|
| 223 |
+
# important for cond, as without it, any accesses
|
| 224 |
+
# to closures create side effects and cond doesn't
|
| 225 |
+
# support side effects. If we're wrong and this
|
| 226 |
+
# closure cell gets written to, we will restart
|
| 227 |
+
# the analysis with this cell's name in the
|
| 228 |
+
# mutated list here
|
| 229 |
+
result[name] = contents_var
|
| 230 |
+
continue
|
| 231 |
+
|
| 232 |
+
# cells are written to with "cell_contents",
|
| 233 |
+
# so the source should just be the closure_cell, not its contents
|
| 234 |
+
out = side_effects.track_cell_existing(closure_cell, cell)
|
| 235 |
+
side_effects.store_cell(
|
| 236 |
+
out,
|
| 237 |
+
contents_var,
|
| 238 |
+
)
|
| 239 |
+
|
| 240 |
+
result[name] = out
|
| 241 |
+
|
| 242 |
+
else:
|
| 243 |
+
unimplemented("inline with __closure__")
|
| 244 |
+
|
| 245 |
+
return result, closure_cells
|
| 246 |
+
|
| 247 |
+
def export_freevars(self, parent, child):
|
| 248 |
+
pass
|
| 249 |
+
|
| 250 |
+
def call_function(
|
| 251 |
+
self, tx, args: "List[VariableTracker]", kwargs: "Dict[str, VariableTracker]"
|
| 252 |
+
) -> "VariableTracker":
|
| 253 |
+
if self.is_constant:
|
| 254 |
+
options = VariableTracker.propagate(self, args, kwargs.values())
|
| 255 |
+
return invoke_and_store_as_constant(
|
| 256 |
+
tx, self.fn, self.get_name(), options, args, kwargs
|
| 257 |
+
)
|
| 258 |
+
|
| 259 |
+
return super().call_function(tx, args, kwargs)
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
class UserMethodVariable(UserFunctionVariable):
|
| 263 |
+
"""Some unsupported user-defined method"""
|
| 264 |
+
|
| 265 |
+
def __init__(self, fn, obj, **kwargs):
|
| 266 |
+
super().__init__(fn=fn, **kwargs)
|
| 267 |
+
self.obj = obj
|
| 268 |
+
|
| 269 |
+
def __str__(self):
|
| 270 |
+
return f"{self.__class__.__name__}({self.fn}, {self.obj})"
|
| 271 |
+
|
| 272 |
+
def self_args(self):
|
| 273 |
+
return [self.obj]
|
| 274 |
+
|
| 275 |
+
def python_type(self):
|
| 276 |
+
return types.MethodType
|
| 277 |
+
|
| 278 |
+
def call_function(
|
| 279 |
+
self, tx, args: "List[VariableTracker]", kwargs: "Dict[str, VariableTracker]"
|
| 280 |
+
) -> "VariableTracker":
|
| 281 |
+
if isinstance(self.obj, variables.NNModuleVariable):
|
| 282 |
+
module_attr = getattr(self.fn, "__module__", "")
|
| 283 |
+
if (
|
| 284 |
+
module_attr is not None
|
| 285 |
+
and module_attr.startswith("torch.nn.")
|
| 286 |
+
or self.is_constant
|
| 287 |
+
):
|
| 288 |
+
return self.obj.call_method(
|
| 289 |
+
tx, self.fn.__name__, args, kwargs, constant=self.is_constant
|
| 290 |
+
).add_options(self)
|
| 291 |
+
return super().call_function(tx, args, kwargs)
|
| 292 |
+
|
| 293 |
+
def num_parameters(self):
|
| 294 |
+
return super().num_parameters() - 1
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
class WrappedUserMethodVariable(UserMethodVariable):
|
| 298 |
+
def __init__(self, wrapped, context, **kwargs):
|
| 299 |
+
kwargs.pop("fn", None)
|
| 300 |
+
kwargs.pop("obj", None)
|
| 301 |
+
super().__init__(wrapped.fn, wrapped.obj, **kwargs)
|
| 302 |
+
self.wrapped = wrapped
|
| 303 |
+
self.context = context
|
| 304 |
+
|
| 305 |
+
def call_function(
|
| 306 |
+
self, tx, args: "List[VariableTracker]", kwargs: "Dict[str, VariableTracker]"
|
| 307 |
+
) -> "VariableTracker":
|
| 308 |
+
self.context.enter(tx)
|
| 309 |
+
result = super().call_function(tx, args, kwargs)
|
| 310 |
+
self.context.exit(tx)
|
| 311 |
+
return result
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
class WrappedUserFunctionVariable(UserFunctionVariable):
|
| 315 |
+
def __init__(self, wrapped, context, **kwargs):
|
| 316 |
+
kwargs.pop("fn", None)
|
| 317 |
+
kwargs.pop("obj", None)
|
| 318 |
+
super().__init__(wrapped.fn, **kwargs)
|
| 319 |
+
self.wrapped = wrapped
|
| 320 |
+
self.context = context
|
| 321 |
+
|
| 322 |
+
def call_function(
|
| 323 |
+
self, tx, args: "List[VariableTracker]", kwargs: "Dict[str, VariableTracker]"
|
| 324 |
+
) -> "VariableTracker":
|
| 325 |
+
self.context.enter(tx)
|
| 326 |
+
result = super().call_function(tx, args, kwargs)
|
| 327 |
+
self.context.exit(tx)
|
| 328 |
+
return result
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
def invoke_and_store_as_constant(tx, fn, name, options, args, kwargs):
|
| 332 |
+
def convert(x):
|
| 333 |
+
if isinstance(x, variables.TensorVariable):
|
| 334 |
+
return x.get_real_value()
|
| 335 |
+
return x.as_python_constant()
|
| 336 |
+
|
| 337 |
+
args = [convert(x) for x in args]
|
| 338 |
+
kwargs = {k: convert(v) for k, v in kwargs.items()}
|
| 339 |
+
res = fn(*args, **kwargs)
|
| 340 |
+
return tx.output.register_attr_or_module(
|
| 341 |
+
res,
|
| 342 |
+
name,
|
| 343 |
+
source=ConstantSource(name),
|
| 344 |
+
**options,
|
| 345 |
+
)
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
class NestedUserFunctionVariable(BaseUserFunctionVariable):
|
| 349 |
+
def __init__(
|
| 350 |
+
self,
|
| 351 |
+
fn_name,
|
| 352 |
+
code,
|
| 353 |
+
f_globals,
|
| 354 |
+
defaults,
|
| 355 |
+
kwdefaults,
|
| 356 |
+
annotations,
|
| 357 |
+
closure,
|
| 358 |
+
closure_scope,
|
| 359 |
+
**kwargs,
|
| 360 |
+
):
|
| 361 |
+
super().__init__(**kwargs)
|
| 362 |
+
assert isinstance(fn_name.as_python_constant(), str)
|
| 363 |
+
assert isinstance(code.as_python_constant(), types.CodeType)
|
| 364 |
+
assert isinstance(f_globals, dict)
|
| 365 |
+
self.fn_name = fn_name
|
| 366 |
+
self.code = code
|
| 367 |
+
self.f_globals = f_globals
|
| 368 |
+
self.defaults = defaults
|
| 369 |
+
self.kwdefaults = kwdefaults
|
| 370 |
+
self.annotations = annotations
|
| 371 |
+
self.closure = closure
|
| 372 |
+
if closure is None:
|
| 373 |
+
closure_scope = None
|
| 374 |
+
self.closure_scope = closure_scope
|
| 375 |
+
|
| 376 |
+
def self_args(self):
|
| 377 |
+
return []
|
| 378 |
+
|
| 379 |
+
def get_code(self):
|
| 380 |
+
return self.code.as_python_constant()
|
| 381 |
+
|
| 382 |
+
def get_function(self):
|
| 383 |
+
if self.closure:
|
| 384 |
+
raise NotImplementedError()
|
| 385 |
+
func = types.FunctionType(
|
| 386 |
+
self.code.as_python_constant(),
|
| 387 |
+
self.f_globals,
|
| 388 |
+
self.fn_name.as_python_constant(),
|
| 389 |
+
)
|
| 390 |
+
if self.defaults:
|
| 391 |
+
func.__defaults__ = self.defaults.as_python_constant()
|
| 392 |
+
if self.kwdefaults:
|
| 393 |
+
func.__kwdefaults__ = self.kwdefaults.as_python_constant()
|
| 394 |
+
if self.annotations:
|
| 395 |
+
annotations = self.annotations.as_python_constant()
|
| 396 |
+
if isinstance(annotations, tuple):
|
| 397 |
+
from itertools import pairwise
|
| 398 |
+
|
| 399 |
+
annotations = dict(pairwise(annotations))
|
| 400 |
+
|
| 401 |
+
# TypeError: __annotations__ must be set to a dict object
|
| 402 |
+
assert isinstance(annotations, dict)
|
| 403 |
+
func.__annotations__ = annotations
|
| 404 |
+
return func
|
| 405 |
+
|
| 406 |
+
def has_closure(self):
|
| 407 |
+
return self.closure is not None
|
| 408 |
+
|
| 409 |
+
def has_self(self):
|
| 410 |
+
return False
|
| 411 |
+
|
| 412 |
+
def get_globals(self):
|
| 413 |
+
return self.f_globals
|
| 414 |
+
|
| 415 |
+
def bind_args(self, parent, args, kwargs):
|
| 416 |
+
code = self.get_code()
|
| 417 |
+
func = types.FunctionType(
|
| 418 |
+
code,
|
| 419 |
+
self.f_globals,
|
| 420 |
+
self.fn_name.as_python_constant(),
|
| 421 |
+
tuple(self.defaults.items) if self.defaults else None,
|
| 422 |
+
tuple(make_cell(None) for _ in range(len(self.get_code().co_freevars))),
|
| 423 |
+
)
|
| 424 |
+
if self.kwdefaults:
|
| 425 |
+
func.__kwdefaults__ = self.kwdefaults.items
|
| 426 |
+
|
| 427 |
+
bound = inspect.signature(func).bind(*args, **kwargs)
|
| 428 |
+
bound.apply_defaults()
|
| 429 |
+
result = dict(bound.arguments.items())
|
| 430 |
+
wrap_args_kwargs(parent.output.root_tx, result, VariableTracker.propagate(self))
|
| 431 |
+
closure_cells = init_cellvars(parent, result, code)
|
| 432 |
+
|
| 433 |
+
for idx, name in enumerate(code.co_freevars):
|
| 434 |
+
assert getattr(self.closure.items[idx], name, name) == name
|
| 435 |
+
assert name not in result
|
| 436 |
+
closure_cells[name] = self.closure.items[idx]
|
| 437 |
+
|
| 438 |
+
return result, closure_cells
|
| 439 |
+
|
| 440 |
+
def export_freevars(self, parent, child):
|
| 441 |
+
code = self.get_code()
|
| 442 |
+
for var in code.co_freevars:
|
| 443 |
+
if var in child.symbolic_locals:
|
| 444 |
+
parent.symbolic_locals[var] = child.symbolic_locals[var]
|
| 445 |
+
|
| 446 |
+
def reconstruct(self, codegen):
|
| 447 |
+
flags = 0x00
|
| 448 |
+
if self.defaults:
|
| 449 |
+
flags |= 0x01
|
| 450 |
+
codegen(self.defaults)
|
| 451 |
+
if self.kwdefaults:
|
| 452 |
+
flags |= 0x02
|
| 453 |
+
codegen(self.kwdefaults)
|
| 454 |
+
if isinstance(self.annotations, variables.ConstDictVariable) or isinstance(
|
| 455 |
+
self.annotations, variables.TupleVariable
|
| 456 |
+
):
|
| 457 |
+
flags |= 0x04
|
| 458 |
+
try:
|
| 459 |
+
if isinstance(self.annotations, variables.ConstDictVariable):
|
| 460 |
+
annotations = {
|
| 461 |
+
k: v.as_python_constant()
|
| 462 |
+
for k, v in self.annotations.items.items()
|
| 463 |
+
}
|
| 464 |
+
else:
|
| 465 |
+
annotations = tuple(
|
| 466 |
+
[v.as_python_constant() for v in self.annotations.items]
|
| 467 |
+
)
|
| 468 |
+
codegen.extend_output([codegen._create_load_const(annotations)])
|
| 469 |
+
except NotImplementedError:
|
| 470 |
+
codegen(self.annotations)
|
| 471 |
+
if self.closure:
|
| 472 |
+
flags |= 0x08
|
| 473 |
+
codegen(self.closure)
|
| 474 |
+
codegen(self.code)
|
| 475 |
+
codegen(self.fn_name)
|
| 476 |
+
return [create_instruction("MAKE_FUNCTION", flags)]
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/nn_module.py
ADDED
|
@@ -0,0 +1,636 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import inspect
|
| 3 |
+
import itertools
|
| 4 |
+
import types
|
| 5 |
+
from contextlib import contextmanager
|
| 6 |
+
from typing import Dict, List
|
| 7 |
+
|
| 8 |
+
import torch.nn
|
| 9 |
+
|
| 10 |
+
from .. import skipfiles, variables
|
| 11 |
+
from ..allowed_functions import is_allowed
|
| 12 |
+
from ..exc import RestartAnalysis, unimplemented
|
| 13 |
+
from ..guards import GuardBuilder
|
| 14 |
+
from ..mutation_guard import GenerationTracker
|
| 15 |
+
from ..source import AttrSource, GetItemSource, NNModuleSource, NotNNModuleSource
|
| 16 |
+
from ..utils import (
|
| 17 |
+
is_lazy_module,
|
| 18 |
+
is_safe_constant,
|
| 19 |
+
istensor,
|
| 20 |
+
istype,
|
| 21 |
+
proxy_args_kwargs,
|
| 22 |
+
)
|
| 23 |
+
from .base import MutableLocal, typestr, VariableTracker
|
| 24 |
+
from .functions import invoke_and_store_as_constant
|
| 25 |
+
from .lists import SliceVariable
|
| 26 |
+
from .user_defined import UserDefinedObjectVariable
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class NNModuleVariable(VariableTracker):
|
| 30 |
+
_nonvar_fields = ["module_type", "module_key"]
|
| 31 |
+
|
| 32 |
+
def __init__(self, module_type: type, module_key: str, **kwargs):
|
| 33 |
+
super().__init__(**kwargs)
|
| 34 |
+
self.module_type = module_type
|
| 35 |
+
self.module_key = module_key
|
| 36 |
+
assert self.source
|
| 37 |
+
|
| 38 |
+
def python_type(self):
|
| 39 |
+
return self.module_type
|
| 40 |
+
|
| 41 |
+
def _wrap_submodule(self, tx, source, submod, *key_extra, **options):
|
| 42 |
+
return
|
| 43 |
+
|
| 44 |
+
def unpack_var_sequence(self, tx):
|
| 45 |
+
# implement list/iter/tuple/etc calls
|
| 46 |
+
base = tx.output.get_submodule(self.module_key)
|
| 47 |
+
options = VariableTracker.propagate([self])
|
| 48 |
+
assert isinstance(
|
| 49 |
+
base, (torch.nn.ModuleList, torch.nn.ParameterList, torch.nn.Sequential)
|
| 50 |
+
), typestr(base)
|
| 51 |
+
assert self.source
|
| 52 |
+
result = []
|
| 53 |
+
for idx, submod in enumerate(base):
|
| 54 |
+
result.append(
|
| 55 |
+
tx.output.register_attr_or_module(
|
| 56 |
+
submod,
|
| 57 |
+
self.module_key,
|
| 58 |
+
idx,
|
| 59 |
+
source=NNModuleSource(GetItemSource(self.source, idx)),
|
| 60 |
+
**options,
|
| 61 |
+
)
|
| 62 |
+
)
|
| 63 |
+
return result
|
| 64 |
+
|
| 65 |
+
def call_hasattr(self, tx, name: str) -> "VariableTracker":
|
| 66 |
+
options = VariableTracker.propagate(self)
|
| 67 |
+
mod = tx.output.get_submodule(self.module_key)
|
| 68 |
+
result = hasattr(mod, name)
|
| 69 |
+
return variables.ConstantVariable(result, **options).add_guard(
|
| 70 |
+
NNModuleSource(AttrSource(self.source, name)).make_guard(
|
| 71 |
+
GuardBuilder.HASATTR
|
| 72 |
+
)
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
def is_training(self, tx):
|
| 76 |
+
mod = tx.output.get_submodule(self.module_key)
|
| 77 |
+
return getattr(mod, "training", False)
|
| 78 |
+
|
| 79 |
+
def convert_to_unspecialized(self, tx):
|
| 80 |
+
"""Restart analysis treating this module as an UnspecializedNNModuleVariable"""
|
| 81 |
+
mod = tx.output.get_submodule(self.module_key)
|
| 82 |
+
GenerationTracker.tag(mod)
|
| 83 |
+
|
| 84 |
+
# Mark the class dynamic unless its module initialization
|
| 85 |
+
if tx.f_code.co_name != "__init__":
|
| 86 |
+
GenerationTracker.mark_class_dynamic(type(mod))
|
| 87 |
+
raise RestartAnalysis()
|
| 88 |
+
|
| 89 |
+
def var_getattr(self, tx, name):
|
| 90 |
+
from .builder import VariableBuilder
|
| 91 |
+
|
| 92 |
+
options = VariableTracker.propagate(self)
|
| 93 |
+
guards = options.get("guards", set())
|
| 94 |
+
|
| 95 |
+
if self.source:
|
| 96 |
+
source = AttrSource(self.source, name)
|
| 97 |
+
options["source"] = source
|
| 98 |
+
else:
|
| 99 |
+
source = None
|
| 100 |
+
|
| 101 |
+
base = tx.output.get_submodule(self.module_key)
|
| 102 |
+
base_dict = object.__getattribute__(base, "__dict__")
|
| 103 |
+
object_member = True
|
| 104 |
+
all_class_attribute_names = set()
|
| 105 |
+
for x in inspect.getmro(base.__class__):
|
| 106 |
+
all_class_attribute_names.update(x.__dict__.keys())
|
| 107 |
+
|
| 108 |
+
if not self.source:
|
| 109 |
+
unimplemented("GETATTR with no source")
|
| 110 |
+
|
| 111 |
+
if name in base_dict:
|
| 112 |
+
subobj = base_dict[name]
|
| 113 |
+
elif (
|
| 114 |
+
"_modules" in base_dict
|
| 115 |
+
and name in base_dict["_modules"]
|
| 116 |
+
and name not in all_class_attribute_names
|
| 117 |
+
):
|
| 118 |
+
subobj = base_dict["_modules"][name]
|
| 119 |
+
elif "_parameters" in base_dict and name in base_dict["_parameters"]:
|
| 120 |
+
subobj = base_dict["_parameters"][name]
|
| 121 |
+
elif "_buffers" in base_dict and name in base_dict["_buffers"]:
|
| 122 |
+
subobj = base_dict["_buffers"][name]
|
| 123 |
+
else:
|
| 124 |
+
subobj = inspect.getattr_static(base, name)
|
| 125 |
+
object_member = False
|
| 126 |
+
|
| 127 |
+
if name == "__class__" and not object_member:
|
| 128 |
+
return variables.UserDefinedClassVariable(base.__class__, **options)
|
| 129 |
+
|
| 130 |
+
if object_member:
|
| 131 |
+
return VariableBuilder(tx, NNModuleSource(source))(subobj)
|
| 132 |
+
else:
|
| 133 |
+
if istype(subobj, property):
|
| 134 |
+
return variables.UserFunctionVariable(
|
| 135 |
+
subobj.fget,
|
| 136 |
+
guards=guards,
|
| 137 |
+
source=source,
|
| 138 |
+
).call_function(tx, [(self)], {})
|
| 139 |
+
elif istype(subobj, classmethod):
|
| 140 |
+
return variables.UserMethodVariable(
|
| 141 |
+
subobj.__func__,
|
| 142 |
+
variables.UserDefinedObjectVariable(type(base), guards=guards),
|
| 143 |
+
**options,
|
| 144 |
+
)
|
| 145 |
+
elif istype(subobj, staticmethod):
|
| 146 |
+
return variables.UserFunctionVariable(subobj.__get__(base), **options)
|
| 147 |
+
elif istype(subobj, types.FunctionType):
|
| 148 |
+
return variables.UserMethodVariable(subobj, self, **options)
|
| 149 |
+
elif is_safe_constant(subobj) or istensor(subobj):
|
| 150 |
+
# Support possibly common cases of class members
|
| 151 |
+
return VariableBuilder(tx, NNModuleSource(source))(subobj)
|
| 152 |
+
else:
|
| 153 |
+
unimplemented(f"class property {typestr(base)} {typestr(subobj)}")
|
| 154 |
+
|
| 155 |
+
return variables.GetAttrVariable(self, name, **options)
|
| 156 |
+
|
| 157 |
+
def call_function(
|
| 158 |
+
self,
|
| 159 |
+
tx,
|
| 160 |
+
args: "List[VariableTracker]",
|
| 161 |
+
kwargs: "Dict[str, VariableTracker]",
|
| 162 |
+
) -> "VariableTracker":
|
| 163 |
+
options = VariableTracker.propagate(self, args, kwargs.values())
|
| 164 |
+
mod = tx.output.get_submodule(self.module_key)
|
| 165 |
+
|
| 166 |
+
@contextmanager
|
| 167 |
+
def record_nn_module_stack():
|
| 168 |
+
try:
|
| 169 |
+
tx.nn_module_stack[self.module_key] = type(mod)
|
| 170 |
+
yield
|
| 171 |
+
finally:
|
| 172 |
+
del tx.nn_module_stack[self.module_key]
|
| 173 |
+
|
| 174 |
+
with record_nn_module_stack():
|
| 175 |
+
is_lazy = is_lazy_module(mod)
|
| 176 |
+
if (
|
| 177 |
+
isinstance(mod, torch.nn.Sequential)
|
| 178 |
+
and mod.__class__.forward is torch.nn.Sequential.forward
|
| 179 |
+
):
|
| 180 |
+
# unroll Sequential()
|
| 181 |
+
assert not kwargs
|
| 182 |
+
(arg,) = args
|
| 183 |
+
for idx, submod in enumerate(mod):
|
| 184 |
+
tx.call_function(
|
| 185 |
+
tx.output.register_attr_or_module(
|
| 186 |
+
submod,
|
| 187 |
+
self.module_key,
|
| 188 |
+
idx,
|
| 189 |
+
source=NNModuleSource(GetItemSource(self.source, idx)),
|
| 190 |
+
**options,
|
| 191 |
+
),
|
| 192 |
+
[arg],
|
| 193 |
+
{},
|
| 194 |
+
)
|
| 195 |
+
arg = tx.pop()
|
| 196 |
+
return arg
|
| 197 |
+
elif is_allowed(mod.__class__):
|
| 198 |
+
# The module type will change after it is called
|
| 199 |
+
if is_lazy:
|
| 200 |
+
self.module_type = mod.cls_to_become
|
| 201 |
+
from .builder import wrap_fx_proxy
|
| 202 |
+
|
| 203 |
+
return wrap_fx_proxy(
|
| 204 |
+
tx=tx,
|
| 205 |
+
proxy=tx.output.create_proxy(
|
| 206 |
+
"call_module",
|
| 207 |
+
self.module_key,
|
| 208 |
+
*proxy_args_kwargs(args, kwargs),
|
| 209 |
+
),
|
| 210 |
+
**options,
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
else:
|
| 214 |
+
# for lazy modules, run the pre-hooks which will update the type
|
| 215 |
+
# TODO mlazos: we don't fully support all of the hooks that exist,
|
| 216 |
+
# so restrict using __call__ only to lazy modules for now
|
| 217 |
+
assert self.source, (
|
| 218 |
+
"Must provide a valid source in order to inline, "
|
| 219 |
+
"since inlined function may have default args which must be guarded."
|
| 220 |
+
)
|
| 221 |
+
if is_lazy:
|
| 222 |
+
if istype(mod.__call__, types.FunctionType):
|
| 223 |
+
fn = mod.__call__
|
| 224 |
+
fn_source = AttrSource(self.source, "__call__")
|
| 225 |
+
else:
|
| 226 |
+
assert istype(mod.__call__, types.MethodType)
|
| 227 |
+
fn = mod.__call__.__func__
|
| 228 |
+
fn_source = AttrSource(
|
| 229 |
+
AttrSource(self.source, "__call__"), "__func__"
|
| 230 |
+
)
|
| 231 |
+
args = [self] + args
|
| 232 |
+
else:
|
| 233 |
+
if istype(mod.forward, types.FunctionType):
|
| 234 |
+
fn = mod.forward
|
| 235 |
+
fn_source = AttrSource(self.source, "forward")
|
| 236 |
+
else:
|
| 237 |
+
assert istype(mod.forward, types.MethodType)
|
| 238 |
+
fn = mod.forward.__func__
|
| 239 |
+
fn_source = AttrSource(
|
| 240 |
+
AttrSource(self.source, "forward"), "__func__"
|
| 241 |
+
)
|
| 242 |
+
args = [self] + args
|
| 243 |
+
options["source"] = fn_source
|
| 244 |
+
return tx.inline_user_function_return(
|
| 245 |
+
variables.UserFunctionVariable(fn, **options),
|
| 246 |
+
args,
|
| 247 |
+
kwargs,
|
| 248 |
+
)
|
| 249 |
+
|
| 250 |
+
def call_method(
|
| 251 |
+
self,
|
| 252 |
+
tx,
|
| 253 |
+
name,
|
| 254 |
+
args: "List[VariableTracker]",
|
| 255 |
+
kwargs: "Dict[str, VariableTracker]",
|
| 256 |
+
constant=False,
|
| 257 |
+
) -> "VariableTracker":
|
| 258 |
+
from . import ConstantVariable, ListIteratorVariable, TupleVariable
|
| 259 |
+
|
| 260 |
+
options = VariableTracker.propagate(self, args, kwargs.values())
|
| 261 |
+
key = self.module_key
|
| 262 |
+
module = tx.output.get_submodule(key)
|
| 263 |
+
|
| 264 |
+
if name == "forward":
|
| 265 |
+
return self.call_function(tx, args, kwargs)
|
| 266 |
+
|
| 267 |
+
if name == "_check_input_dim" and skipfiles.is_torch_inline_allowed(
|
| 268 |
+
inspect.getfile(module.__class__._check_input_dim)
|
| 269 |
+
):
|
| 270 |
+
return ConstantVariable(True, **options)
|
| 271 |
+
|
| 272 |
+
if name == "_get_item_by_idx":
|
| 273 |
+
assert args[1].is_python_constant()
|
| 274 |
+
assert isinstance(args[0], TupleVariable)
|
| 275 |
+
mod_var = args[0].items[args[1].value]
|
| 276 |
+
key = mod_var.module_key
|
| 277 |
+
submod = tx.output.get_submodule(key)
|
| 278 |
+
return tx.output.register_attr_or_module(
|
| 279 |
+
submod,
|
| 280 |
+
key,
|
| 281 |
+
key,
|
| 282 |
+
source=NNModuleSource(GetItemSource(self.source, key)),
|
| 283 |
+
**options,
|
| 284 |
+
)
|
| 285 |
+
|
| 286 |
+
if constant:
|
| 287 |
+
fn = getattr(module, name)
|
| 288 |
+
name = f"{module.__class__.__name__}_{name}_result"
|
| 289 |
+
return invoke_and_store_as_constant(tx, fn, name, options, args, kwargs)
|
| 290 |
+
|
| 291 |
+
def assert_all_args_kwargs_const():
|
| 292 |
+
if not all(
|
| 293 |
+
x.is_python_constant() for x in itertools.chain(args, kwargs.values())
|
| 294 |
+
):
|
| 295 |
+
raise unimplemented(f"non-const NNModule method {name}")
|
| 296 |
+
|
| 297 |
+
def get_kwargs(*names):
|
| 298 |
+
assert_all_args_kwargs_const()
|
| 299 |
+
fn = getattr(module, name)
|
| 300 |
+
bound_args = inspect.signature(fn).bind(
|
| 301 |
+
*([x.as_python_constant() for x in args]),
|
| 302 |
+
**{k: v.as_python_constant() for k, v in kwargs.items()},
|
| 303 |
+
)
|
| 304 |
+
bound_args.apply_defaults()
|
| 305 |
+
bound_args = bound_args.arguments
|
| 306 |
+
return {k: bound_args[k] for k in names}
|
| 307 |
+
|
| 308 |
+
def wrap_values(items):
|
| 309 |
+
result = []
|
| 310 |
+
for name, submod in items:
|
| 311 |
+
result.append(
|
| 312 |
+
tx.output.register_attr_or_module(
|
| 313 |
+
submod,
|
| 314 |
+
key,
|
| 315 |
+
name,
|
| 316 |
+
source=NNModuleSource(gen_source(self.source, name)),
|
| 317 |
+
**options,
|
| 318 |
+
)
|
| 319 |
+
)
|
| 320 |
+
return ListIteratorVariable(result, mutable_local=MutableLocal(), **options)
|
| 321 |
+
|
| 322 |
+
def named_embed(name, obj):
|
| 323 |
+
return TupleVariable(
|
| 324 |
+
[
|
| 325 |
+
ConstantVariable(name, **options),
|
| 326 |
+
tx.output.register_attr_or_module(
|
| 327 |
+
obj,
|
| 328 |
+
key,
|
| 329 |
+
name,
|
| 330 |
+
source=NNModuleSource(gen_source(self.source, name)),
|
| 331 |
+
**options,
|
| 332 |
+
),
|
| 333 |
+
]
|
| 334 |
+
)
|
| 335 |
+
|
| 336 |
+
def gen_source(source, name):
|
| 337 |
+
name_split = name.split(".")
|
| 338 |
+
if name_split[0] == "":
|
| 339 |
+
return source
|
| 340 |
+
while len(name_split) > 0:
|
| 341 |
+
x = name_split.pop(0)
|
| 342 |
+
source = AttrSource(source, x)
|
| 343 |
+
return source
|
| 344 |
+
|
| 345 |
+
if name == "children":
|
| 346 |
+
assert not (args or kwargs)
|
| 347 |
+
return wrap_values(module.named_children())
|
| 348 |
+
elif name == "named_parameters":
|
| 349 |
+
result = []
|
| 350 |
+
for name, param in module.named_parameters(
|
| 351 |
+
**get_kwargs("prefix", "recurse")
|
| 352 |
+
):
|
| 353 |
+
result.append(named_embed(name, param))
|
| 354 |
+
return ListIteratorVariable(result, mutable_local=MutableLocal(), **options)
|
| 355 |
+
elif name == "named_buffers":
|
| 356 |
+
result = []
|
| 357 |
+
for name, buffer in module.named_buffers(
|
| 358 |
+
**get_kwargs("prefix", "recurse", "remove_duplicate")
|
| 359 |
+
):
|
| 360 |
+
result.append(named_embed(name, buffer))
|
| 361 |
+
return ListIteratorVariable(result, mutable_local=MutableLocal(), **options)
|
| 362 |
+
elif name == "named_modules":
|
| 363 |
+
result = []
|
| 364 |
+
for name, submod in module.named_modules(
|
| 365 |
+
**get_kwargs("memo", "prefix", "remove_duplicate")
|
| 366 |
+
):
|
| 367 |
+
result.append(named_embed(name, submod))
|
| 368 |
+
return ListIteratorVariable(result, mutable_local=MutableLocal(), **options)
|
| 369 |
+
elif name == "modules":
|
| 370 |
+
return wrap_values(module.named_modules())
|
| 371 |
+
elif name == "parameters":
|
| 372 |
+
return wrap_values(module.named_parameters(**get_kwargs("recurse")))
|
| 373 |
+
elif name == "keys":
|
| 374 |
+
assert not (args or kwargs)
|
| 375 |
+
result = []
|
| 376 |
+
for name in module.keys():
|
| 377 |
+
result.append(ConstantVariable(name, **options))
|
| 378 |
+
return ListIteratorVariable(result, mutable_local=MutableLocal(), **options)
|
| 379 |
+
elif name == "values":
|
| 380 |
+
assert not (args or kwargs)
|
| 381 |
+
return wrap_values(module.items())
|
| 382 |
+
elif name == "items":
|
| 383 |
+
assert not (args or kwargs)
|
| 384 |
+
result = []
|
| 385 |
+
for name, submod in module.items():
|
| 386 |
+
result.append(named_embed(name, submod))
|
| 387 |
+
return ListIteratorVariable(result, mutable_local=MutableLocal(), **options)
|
| 388 |
+
elif name == "__len__":
|
| 389 |
+
assert not (args or kwargs)
|
| 390 |
+
return ConstantVariable(len(module), **options)
|
| 391 |
+
elif (
|
| 392 |
+
name == "__contains__"
|
| 393 |
+
and isinstance(module, (torch.nn.ModuleDict, torch.nn.ParameterDict))
|
| 394 |
+
and args
|
| 395 |
+
and args[0].is_python_constant()
|
| 396 |
+
):
|
| 397 |
+
return ConstantVariable(
|
| 398 |
+
args[0].as_python_constant() in module._modules, **options
|
| 399 |
+
)
|
| 400 |
+
elif name == "__getitem__":
|
| 401 |
+
assert not kwargs and len(args) == 1
|
| 402 |
+
builtin_supported = (
|
| 403 |
+
torch.nn.ModuleDict.__getitem__,
|
| 404 |
+
torch.nn.ModuleList.__getitem__,
|
| 405 |
+
torch.nn.ParameterList.__getitem__,
|
| 406 |
+
torch.nn.Sequential.__getitem__,
|
| 407 |
+
)
|
| 408 |
+
|
| 409 |
+
if type(module).__getitem__ not in builtin_supported:
|
| 410 |
+
assert isinstance(args[0], variables.ConstantVariable), typestr(args[0])
|
| 411 |
+
key = args[0].as_python_constant()
|
| 412 |
+
assert isinstance(key, (str, int))
|
| 413 |
+
fn = getattr(module, name).__func__
|
| 414 |
+
|
| 415 |
+
assert isinstance(fn, types.FunctionType)
|
| 416 |
+
|
| 417 |
+
src = AttrSource(AttrSource(self.source, name), "__func__")
|
| 418 |
+
return tx.inline_user_function_return(
|
| 419 |
+
variables.UserFunctionVariable(fn, source=src, **options),
|
| 420 |
+
[self] + list(args),
|
| 421 |
+
kwargs,
|
| 422 |
+
)
|
| 423 |
+
|
| 424 |
+
assert self.source
|
| 425 |
+
|
| 426 |
+
if isinstance(args[0], SliceVariable):
|
| 427 |
+
# Build a TupleVariable of NNModules
|
| 428 |
+
result = []
|
| 429 |
+
submods = []
|
| 430 |
+
|
| 431 |
+
# Turn the slice into the list of integers
|
| 432 |
+
keys = list(range(len(module)))[args[0].as_python_constant()]
|
| 433 |
+
for idx, submod in enumerate(module[args[0].as_python_constant()]):
|
| 434 |
+
key = keys[idx]
|
| 435 |
+
src = NNModuleSource(GetItemSource(self.source, key))
|
| 436 |
+
result.append(
|
| 437 |
+
tx.output.register_attr_or_module(
|
| 438 |
+
submod,
|
| 439 |
+
key,
|
| 440 |
+
source=src,
|
| 441 |
+
**options,
|
| 442 |
+
)
|
| 443 |
+
)
|
| 444 |
+
submods.append(submod)
|
| 445 |
+
|
| 446 |
+
new_module = torch.nn.Sequential(*submods)
|
| 447 |
+
new_module_variable = tx.output.register_attr_or_module(
|
| 448 |
+
new_module,
|
| 449 |
+
f"{self}.__getitem__(slice)",
|
| 450 |
+
source=NNModuleSource(
|
| 451 |
+
GetItemSource(self.source, args[0].as_python_constant())
|
| 452 |
+
),
|
| 453 |
+
**options,
|
| 454 |
+
)
|
| 455 |
+
return new_module_variable
|
| 456 |
+
|
| 457 |
+
key = args[0].as_python_constant()
|
| 458 |
+
submod = module[key]
|
| 459 |
+
return tx.output.register_attr_or_module(
|
| 460 |
+
submod,
|
| 461 |
+
key,
|
| 462 |
+
args[0].as_python_constant(),
|
| 463 |
+
source=NNModuleSource(GetItemSource(self.source, key)),
|
| 464 |
+
**options,
|
| 465 |
+
)
|
| 466 |
+
elif name == "_get_abs_string_index":
|
| 467 |
+
# Inline the function
|
| 468 |
+
fn = getattr(module, name).__func__
|
| 469 |
+
src = AttrSource(AttrSource(self.source, name), "__func__")
|
| 470 |
+
return tx.inline_user_function_return(
|
| 471 |
+
variables.UserFunctionVariable(fn, source=src, **options),
|
| 472 |
+
[self] + args,
|
| 473 |
+
kwargs,
|
| 474 |
+
)
|
| 475 |
+
# A loose heuristic, but seems to be generally good before we drop into the
|
| 476 |
+
# manual handling of inputs
|
| 477 |
+
elif (
|
| 478 |
+
name in module.__class__.__dict__
|
| 479 |
+
and callable(module.__class__.__dict__[name])
|
| 480 |
+
and all(
|
| 481 |
+
isinstance(x, variables.TensorVariable)
|
| 482 |
+
for x in itertools.chain(args, kwargs.values())
|
| 483 |
+
)
|
| 484 |
+
):
|
| 485 |
+
# TODO(voz): Refactor this into a generic as_proxy() for nn module
|
| 486 |
+
# We use variations of this pattern in a few places now.
|
| 487 |
+
def make_attr(name):
|
| 488 |
+
node = tx.output.create_proxy(
|
| 489 |
+
"get_attr",
|
| 490 |
+
name,
|
| 491 |
+
tuple(),
|
| 492 |
+
{},
|
| 493 |
+
)
|
| 494 |
+
return node
|
| 495 |
+
|
| 496 |
+
# Bind in self
|
| 497 |
+
tx.output.register_attr_or_module(
|
| 498 |
+
module,
|
| 499 |
+
self.module_key,
|
| 500 |
+
self.module_key,
|
| 501 |
+
source=NNModuleSource(GetItemSource(self.source, self.module_key)),
|
| 502 |
+
**options,
|
| 503 |
+
)
|
| 504 |
+
proxy_for_mod = make_attr(self.module_key)
|
| 505 |
+
proxy_for_mod.node.meta["example_value"] = module
|
| 506 |
+
|
| 507 |
+
proxy_args, proxy_kwargs = proxy_args_kwargs(args, kwargs)
|
| 508 |
+
|
| 509 |
+
from .builder import wrap_fx_proxy
|
| 510 |
+
|
| 511 |
+
return wrap_fx_proxy(
|
| 512 |
+
tx=tx,
|
| 513 |
+
proxy=tx.output.create_proxy(
|
| 514 |
+
"call_method",
|
| 515 |
+
name,
|
| 516 |
+
args=(proxy_for_mod, *proxy_args),
|
| 517 |
+
kwargs=proxy_kwargs,
|
| 518 |
+
),
|
| 519 |
+
**options,
|
| 520 |
+
)
|
| 521 |
+
else:
|
| 522 |
+
return super().call_method(tx, name, args, kwargs)
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
class UnspecializedNNModuleVariable(UserDefinedObjectVariable):
|
| 526 |
+
"""
|
| 527 |
+
The above class will specialize on the id() of a module and place
|
| 528 |
+
parameters on the torch.fx.GraphModule. Giving one graph per
|
| 529 |
+
module instance. This version treats nn.Modules() like other user
|
| 530 |
+
defined objects and will pass parameters into the FX graph as inputs.
|
| 531 |
+
Giving one graph per module class.
|
| 532 |
+
"""
|
| 533 |
+
|
| 534 |
+
def __init__(self, value, **kwargs):
|
| 535 |
+
super().__init__(value=value, **kwargs)
|
| 536 |
+
if self.source and self.source.is_nn_module():
|
| 537 |
+
# force guard checks even when `not config.guard_nn_modules``
|
| 538 |
+
self.source = NotNNModuleSource(self.source)
|
| 539 |
+
|
| 540 |
+
@staticmethod
|
| 541 |
+
@functools.lru_cache(None)
|
| 542 |
+
def _nn_module_method_ids():
|
| 543 |
+
return {
|
| 544 |
+
id(x.__code__)
|
| 545 |
+
for x in torch.nn.Module.__dict__.values()
|
| 546 |
+
if hasattr(x, "__code__")
|
| 547 |
+
}
|
| 548 |
+
|
| 549 |
+
def unpack_var_sequence(self, tx):
|
| 550 |
+
from .builder import VariableBuilder
|
| 551 |
+
|
| 552 |
+
try:
|
| 553 |
+
fn = inspect.getattr_static(self.value_type, "__iter__")
|
| 554 |
+
except AttributeError as e:
|
| 555 |
+
raise NotImplementedError from e
|
| 556 |
+
|
| 557 |
+
if fn in (
|
| 558 |
+
torch.nn.ModuleList.__iter__,
|
| 559 |
+
torch.nn.ParameterList.__iter__,
|
| 560 |
+
torch.nn.Sequential.__iter__,
|
| 561 |
+
):
|
| 562 |
+
assert self.source
|
| 563 |
+
return [
|
| 564 |
+
VariableBuilder(tx, source=GetItemSource(self.source, idx))(
|
| 565 |
+
item
|
| 566 |
+
).add_options(self)
|
| 567 |
+
for idx, item in enumerate(self.value)
|
| 568 |
+
]
|
| 569 |
+
|
| 570 |
+
return super().unpack_var_sequence(tx)
|
| 571 |
+
|
| 572 |
+
def call_function(
|
| 573 |
+
self, tx, args: "List[VariableTracker]", kwargs: "Dict[str, VariableTracker]"
|
| 574 |
+
) -> "VariableTracker":
|
| 575 |
+
options = VariableTracker.propagate(self, args, kwargs.values())
|
| 576 |
+
|
| 577 |
+
# TODO mlazos: only support __call__ for lazy modules
|
| 578 |
+
# until we can support a larger swath of python
|
| 579 |
+
if is_lazy_module(self.value):
|
| 580 |
+
fn = self.value_type.__call__
|
| 581 |
+
source = AttrSource(AttrSource(self.source, "__class__"), "__call__")
|
| 582 |
+
else:
|
| 583 |
+
fn = self.value_type.forward
|
| 584 |
+
source = AttrSource(AttrSource(self.source, "__class__"), "forward")
|
| 585 |
+
|
| 586 |
+
return variables.UserFunctionVariable(
|
| 587 |
+
fn, source=source, **options
|
| 588 |
+
).call_function(tx, [self] + list(args), kwargs)
|
| 589 |
+
|
| 590 |
+
def call_method(
|
| 591 |
+
self,
|
| 592 |
+
tx,
|
| 593 |
+
name,
|
| 594 |
+
args: "List[VariableTracker]",
|
| 595 |
+
kwargs: "Dict[str, VariableTracker]",
|
| 596 |
+
) -> "VariableTracker":
|
| 597 |
+
from .builder import VariableBuilder
|
| 598 |
+
|
| 599 |
+
options = VariableTracker.propagate(self, args, kwargs.values())
|
| 600 |
+
|
| 601 |
+
if name not in getattr(self.value, "__dict__", {}):
|
| 602 |
+
try:
|
| 603 |
+
method = inspect.getattr_static(type(self.value), name)
|
| 604 |
+
except AttributeError:
|
| 605 |
+
method = None
|
| 606 |
+
|
| 607 |
+
if method is torch.nn.Module.parameters:
|
| 608 |
+
assert not args or kwargs
|
| 609 |
+
options["guards"].add(
|
| 610 |
+
self.source.make_guard(GuardBuilder.NN_MODULE_PARAM_NAMES)
|
| 611 |
+
)
|
| 612 |
+
items = []
|
| 613 |
+
for name, value in self.value.named_parameters():
|
| 614 |
+
items.append(
|
| 615 |
+
VariableBuilder(tx, AttrSource(self.source, name))(
|
| 616 |
+
value
|
| 617 |
+
).add_options(options)
|
| 618 |
+
)
|
| 619 |
+
return variables.ListIteratorVariable(
|
| 620 |
+
items, mutable_local=MutableLocal(), **options
|
| 621 |
+
)
|
| 622 |
+
elif isinstance(method, staticmethod):
|
| 623 |
+
source = AttrSource(
|
| 624 |
+
AttrSource(AttrSource(self.source, "__class__"), name), "__func__"
|
| 625 |
+
)
|
| 626 |
+
return tx.inline_user_function_return(
|
| 627 |
+
variables.UserFunctionVariable(
|
| 628 |
+
method.__func__, source=source, **options
|
| 629 |
+
),
|
| 630 |
+
args,
|
| 631 |
+
kwargs,
|
| 632 |
+
)
|
| 633 |
+
if id(method.__code__) in self._nn_module_method_ids():
|
| 634 |
+
unimplemented(f"UnspecializedNNModuleVariable missing {name}")
|
| 635 |
+
|
| 636 |
+
return super().call_method(tx, name, args, kwargs)
|
openflamingo/lib/python3.10/site-packages/torch/_dynamo/variables/user_defined.py
ADDED
|
@@ -0,0 +1,410 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import contextlib
|
| 3 |
+
import functools
|
| 4 |
+
import importlib
|
| 5 |
+
import inspect
|
| 6 |
+
import random
|
| 7 |
+
import types
|
| 8 |
+
from typing import Dict, List
|
| 9 |
+
|
| 10 |
+
import torch.nn
|
| 11 |
+
|
| 12 |
+
from .. import variables
|
| 13 |
+
from ..exc import unimplemented
|
| 14 |
+
from ..guards import GuardBuilder
|
| 15 |
+
from ..source import AttrSource, ODictGetItemSource, RandomValueSource
|
| 16 |
+
from ..utils import is_namedtuple_cls, namedtuple_fields
|
| 17 |
+
from .base import MutableLocal, VariableTracker
|
| 18 |
+
from .misc import NullContextVariable
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class UserDefinedVariable(VariableTracker):
|
| 22 |
+
pass
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class UserDefinedClassVariable(UserDefinedVariable):
|
| 26 |
+
def __init__(self, value, **kwargs):
|
| 27 |
+
super().__init__(**kwargs)
|
| 28 |
+
self.value = value
|
| 29 |
+
|
| 30 |
+
def as_python_constant(self):
|
| 31 |
+
return self.value
|
| 32 |
+
|
| 33 |
+
def python_type(self):
|
| 34 |
+
return type(self.value)
|
| 35 |
+
|
| 36 |
+
def var_getattr(self, tx, name: str) -> "VariableTracker":
|
| 37 |
+
from . import ConstantVariable
|
| 38 |
+
from .builder import VariableBuilder
|
| 39 |
+
|
| 40 |
+
options = VariableTracker.propagate(self)
|
| 41 |
+
source = AttrSource(self.source, name) if self.source is not None else None
|
| 42 |
+
try:
|
| 43 |
+
obj = inspect.getattr_static(self.value, name)
|
| 44 |
+
except AttributeError:
|
| 45 |
+
obj = None
|
| 46 |
+
if isinstance(obj, staticmethod):
|
| 47 |
+
return variables.UserFunctionVariable(
|
| 48 |
+
obj.__get__(self.value), source=source, **options
|
| 49 |
+
)
|
| 50 |
+
elif isinstance(obj, classmethod):
|
| 51 |
+
return variables.UserMethodVariable(
|
| 52 |
+
obj.__func__, self, source=source, **options
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
if name in getattr(self.value, "__dict__", {}) or ConstantVariable.is_literal(
|
| 56 |
+
obj
|
| 57 |
+
):
|
| 58 |
+
if source:
|
| 59 |
+
return VariableBuilder(tx, source)(obj).add_options(options)
|
| 60 |
+
elif ConstantVariable.is_literal(obj):
|
| 61 |
+
return ConstantVariable(obj, **options)
|
| 62 |
+
|
| 63 |
+
return super().var_getattr(tx, name)
|
| 64 |
+
|
| 65 |
+
def call_method(
|
| 66 |
+
self,
|
| 67 |
+
tx,
|
| 68 |
+
name,
|
| 69 |
+
args: "List[VariableTracker]",
|
| 70 |
+
kwargs: "Dict[str, VariableTracker]",
|
| 71 |
+
) -> "VariableTracker":
|
| 72 |
+
if (
|
| 73 |
+
name == "__subclasses__"
|
| 74 |
+
and len(args) == 0
|
| 75 |
+
and not kwargs
|
| 76 |
+
and "__subclasses__" not in self.value.__dict__
|
| 77 |
+
):
|
| 78 |
+
options = VariableTracker.propagate(self, args, kwargs.values())
|
| 79 |
+
options["mutable_local"] = MutableLocal()
|
| 80 |
+
subs_as_vars: List[VariableTracker] = list()
|
| 81 |
+
for sub in self.value.__subclasses__():
|
| 82 |
+
source = AttrSource(tx.import_source(sub.__module__), sub.__name__)
|
| 83 |
+
subs_as_vars.append(
|
| 84 |
+
variables.UserDefinedClassVariable(sub, source=source)
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
return variables.ListVariable(subs_as_vars, **options)
|
| 88 |
+
|
| 89 |
+
return super().call_method(tx, name, args, kwargs)
|
| 90 |
+
|
| 91 |
+
def call_function(
|
| 92 |
+
self, tx, args: "List[VariableTracker]", kwargs: "Dict[str, VariableTracker]"
|
| 93 |
+
) -> "VariableTracker":
|
| 94 |
+
from ..side_effects import SideEffects
|
| 95 |
+
|
| 96 |
+
options = VariableTracker.propagate(self, args, kwargs.values())
|
| 97 |
+
|
| 98 |
+
if self.value in (
|
| 99 |
+
contextlib.nullcontext,
|
| 100 |
+
torch.autograd.profiler.profile,
|
| 101 |
+
):
|
| 102 |
+
return NullContextVariable(**options)
|
| 103 |
+
elif is_namedtuple_cls(self.value):
|
| 104 |
+
fields = namedtuple_fields(self.value)
|
| 105 |
+
items = list(args)
|
| 106 |
+
items.extend([None] * (len(fields) - len(items)))
|
| 107 |
+
for name, value in kwargs.items():
|
| 108 |
+
assert name in fields
|
| 109 |
+
items[fields.index(name)] = value
|
| 110 |
+
assert all(x is not None for x in items)
|
| 111 |
+
return variables.NamedTupleVariable(
|
| 112 |
+
items, self.value, **VariableTracker.propagate(self, items)
|
| 113 |
+
)
|
| 114 |
+
elif (
|
| 115 |
+
inspect.getattr_static(self.value, "__new__", None) in (object.__new__,)
|
| 116 |
+
and SideEffects.cls_supports_mutation_side_effects(self.value)
|
| 117 |
+
and self.source
|
| 118 |
+
):
|
| 119 |
+
var = tx.output.side_effects.track_object_new(
|
| 120 |
+
self.source, self.value, UserDefinedObjectVariable, options
|
| 121 |
+
)
|
| 122 |
+
return var.add_options(var.call_method(tx, "__init__", args, kwargs))
|
| 123 |
+
elif variables.DataClassVariable.is_matching_cls(self.value):
|
| 124 |
+
options["mutable_local"] = MutableLocal()
|
| 125 |
+
return variables.DataClassVariable.create(self.value, args, kwargs, options)
|
| 126 |
+
|
| 127 |
+
return super().call_function(tx, args, kwargs)
|
| 128 |
+
|
| 129 |
+
def const_getattr(self, tx, name):
|
| 130 |
+
if name == "__name__":
|
| 131 |
+
return self.value.__name__
|
| 132 |
+
return super().const_getattr(tx, name)
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
class UserDefinedObjectVariable(UserDefinedVariable):
|
| 136 |
+
"""
|
| 137 |
+
Mostly objects of defined type. Catch-all for something where we only know the type.
|
| 138 |
+
"""
|
| 139 |
+
|
| 140 |
+
def __init__(self, value, value_type=None, **kwargs):
|
| 141 |
+
super().__init__(**kwargs)
|
| 142 |
+
self.value = value
|
| 143 |
+
self.value_type = value_type or type(value)
|
| 144 |
+
assert type(value) is self.value_type
|
| 145 |
+
|
| 146 |
+
def __str__(self):
|
| 147 |
+
inner = self.value_type.__name__
|
| 148 |
+
if inner in [
|
| 149 |
+
"builtin_function_or_method",
|
| 150 |
+
"getset_descriptor",
|
| 151 |
+
"method_descriptor",
|
| 152 |
+
"method",
|
| 153 |
+
]:
|
| 154 |
+
inner = str(getattr(self.value, "__name__", None))
|
| 155 |
+
return f"{self.__class__.__name__}({inner})"
|
| 156 |
+
|
| 157 |
+
def python_type(self):
|
| 158 |
+
return self.value_type
|
| 159 |
+
|
| 160 |
+
@staticmethod
|
| 161 |
+
@functools.lru_cache(None)
|
| 162 |
+
def _supported_random_functions():
|
| 163 |
+
fns = {
|
| 164 |
+
random.random,
|
| 165 |
+
random.randint,
|
| 166 |
+
random.randrange,
|
| 167 |
+
random.uniform,
|
| 168 |
+
}
|
| 169 |
+
return fns
|
| 170 |
+
|
| 171 |
+
def call_method(
|
| 172 |
+
self,
|
| 173 |
+
tx,
|
| 174 |
+
name,
|
| 175 |
+
args: "List[VariableTracker]",
|
| 176 |
+
kwargs: "Dict[str, VariableTracker]",
|
| 177 |
+
) -> "VariableTracker":
|
| 178 |
+
from . import ConstantVariable, TupleVariable, UserMethodVariable
|
| 179 |
+
|
| 180 |
+
options = VariableTracker.propagate(self, args, kwargs.values())
|
| 181 |
+
|
| 182 |
+
if name not in getattr(self.value, "__dict__", {}):
|
| 183 |
+
try:
|
| 184 |
+
method = inspect.getattr_static(type(self.value), name)
|
| 185 |
+
except AttributeError:
|
| 186 |
+
method = None
|
| 187 |
+
if method is object.__init__:
|
| 188 |
+
return ConstantVariable(None, **options)
|
| 189 |
+
|
| 190 |
+
if method is collections.OrderedDict.keys and self.source:
|
| 191 |
+
# subclass of OrderedDict
|
| 192 |
+
assert not (args or kwargs)
|
| 193 |
+
keys = list(self.value.keys())
|
| 194 |
+
assert all(map(ConstantVariable.is_literal, keys))
|
| 195 |
+
return TupleVariable(
|
| 196 |
+
[ConstantVariable(k, **options) for k in keys], **options
|
| 197 |
+
).add_guard(self.source.make_guard(GuardBuilder.ODICT_KEYS))
|
| 198 |
+
|
| 199 |
+
if (
|
| 200 |
+
method is collections.OrderedDict.items
|
| 201 |
+
and isinstance(self.value, collections.OrderedDict)
|
| 202 |
+
and self.source
|
| 203 |
+
):
|
| 204 |
+
assert not (args or kwargs)
|
| 205 |
+
items = []
|
| 206 |
+
keys = self.call_method(tx, "keys", [], {})
|
| 207 |
+
options = VariableTracker.propagate(self, args, kwargs.values(), keys)
|
| 208 |
+
for key in keys.unpack_var_sequence(tx):
|
| 209 |
+
items.append(
|
| 210 |
+
TupleVariable(
|
| 211 |
+
[key, self.odict_getitem(tx, key)],
|
| 212 |
+
**options,
|
| 213 |
+
)
|
| 214 |
+
)
|
| 215 |
+
return TupleVariable(items, **options)
|
| 216 |
+
|
| 217 |
+
if method is collections.OrderedDict.__getitem__ and len(args) == 1:
|
| 218 |
+
assert not kwargs
|
| 219 |
+
return self.odict_getitem(tx, args[0])
|
| 220 |
+
|
| 221 |
+
# check for methods implemented in C++
|
| 222 |
+
if isinstance(method, types.FunctionType):
|
| 223 |
+
source = (
|
| 224 |
+
None
|
| 225 |
+
if self.source is None
|
| 226 |
+
else AttrSource(AttrSource(self.source, "__class__"), name)
|
| 227 |
+
)
|
| 228 |
+
# TODO(jansel): add a guard to check for monkey patching?
|
| 229 |
+
return UserMethodVariable(
|
| 230 |
+
method, self, source=source, **options
|
| 231 |
+
).call_function(tx, args, kwargs)
|
| 232 |
+
|
| 233 |
+
return super().call_method(tx, name, args, kwargs)
|
| 234 |
+
|
| 235 |
+
def is_supported_random(self):
|
| 236 |
+
try:
|
| 237 |
+
return self.value in self._supported_random_functions()
|
| 238 |
+
except TypeError:
|
| 239 |
+
# TypeError: unhashable type
|
| 240 |
+
return False
|
| 241 |
+
|
| 242 |
+
def call_function(
|
| 243 |
+
self, tx, args: "List[VariableTracker]", kwargs: "Dict[str, VariableTracker]"
|
| 244 |
+
) -> "VariableTracker":
|
| 245 |
+
from .builder import VariableBuilder
|
| 246 |
+
|
| 247 |
+
if (
|
| 248 |
+
self.is_supported_random()
|
| 249 |
+
and all(k.is_python_constant() for k in args)
|
| 250 |
+
and all(v.is_python_constant() for v in kwargs.values())
|
| 251 |
+
):
|
| 252 |
+
args = [x.as_python_constant() for x in args]
|
| 253 |
+
kwargs = {k: v.as_python_constant() for k, v in kwargs.items()}
|
| 254 |
+
random_call_index = len(tx.random_calls)
|
| 255 |
+
if random_call_index == 0:
|
| 256 |
+
tx.output.initial_random_state = random.getstate()
|
| 257 |
+
example_value = self.value(*args, **kwargs)
|
| 258 |
+
source = RandomValueSource(random_call_index)
|
| 259 |
+
tx.random_calls.append((self.value, args, kwargs))
|
| 260 |
+
return VariableBuilder(tx, source).wrap_unspecialized_primitive(
|
| 261 |
+
example_value
|
| 262 |
+
)
|
| 263 |
+
|
| 264 |
+
return super().call_function(tx, args, kwargs)
|
| 265 |
+
|
| 266 |
+
def _check_for_getattribute(self):
|
| 267 |
+
try:
|
| 268 |
+
if isinstance(
|
| 269 |
+
inspect.getattr_static(type(self.value), "__getattribute__"),
|
| 270 |
+
types.FunctionType,
|
| 271 |
+
):
|
| 272 |
+
unimplemented("UserDefinedObjectVariable with custom __getattribute__")
|
| 273 |
+
except AttributeError:
|
| 274 |
+
pass
|
| 275 |
+
|
| 276 |
+
def _check_for_getattr(self):
|
| 277 |
+
try:
|
| 278 |
+
getattr_fn = inspect.getattr_static(type(self.value), "__getattr__")
|
| 279 |
+
except AttributeError:
|
| 280 |
+
getattr_fn = None
|
| 281 |
+
if getattr_fn is torch.nn.Module.__getattr__:
|
| 282 |
+
# ignore this case of getattr
|
| 283 |
+
getattr_fn = None
|
| 284 |
+
return getattr_fn
|
| 285 |
+
|
| 286 |
+
def _getattr_static(self, name):
|
| 287 |
+
if (
|
| 288 |
+
isinstance(self.value, torch.nn.Module)
|
| 289 |
+
or "__slots__" in self.value.__class__.__dict__
|
| 290 |
+
):
|
| 291 |
+
# getattr_static doesn't work on these
|
| 292 |
+
subobj = getattr(self.value, name)
|
| 293 |
+
else:
|
| 294 |
+
subobj = inspect.getattr_static(self.value, name)
|
| 295 |
+
return subobj
|
| 296 |
+
|
| 297 |
+
def var_getattr(self, tx, name):
|
| 298 |
+
from . import ConstantVariable
|
| 299 |
+
from .builder import VariableBuilder
|
| 300 |
+
|
| 301 |
+
options = VariableTracker.propagate(self)
|
| 302 |
+
value = self.value
|
| 303 |
+
source = AttrSource(self.source, name) if self.source else None
|
| 304 |
+
self._check_for_getattribute()
|
| 305 |
+
getattr_fn = self._check_for_getattr()
|
| 306 |
+
|
| 307 |
+
try:
|
| 308 |
+
subobj = self._getattr_static(name)
|
| 309 |
+
except AttributeError:
|
| 310 |
+
subobj = None
|
| 311 |
+
if isinstance(getattr_fn, types.FunctionType):
|
| 312 |
+
return variables.UserMethodVariable(
|
| 313 |
+
getattr_fn, self, source=source, **options
|
| 314 |
+
).call_function(tx, [ConstantVariable(name)], {})
|
| 315 |
+
elif getattr_fn is not None:
|
| 316 |
+
unimplemented("UserDefined with non-function __getattr__")
|
| 317 |
+
|
| 318 |
+
if isinstance(subobj, property):
|
| 319 |
+
return variables.UserMethodVariable(
|
| 320 |
+
subobj.fget, self, source=source, **options
|
| 321 |
+
).call_function(tx, [], {})
|
| 322 |
+
elif isinstance(subobj, staticmethod):
|
| 323 |
+
return variables.UserFunctionVariable(
|
| 324 |
+
subobj.__get__(self.value), source=source, **options
|
| 325 |
+
)
|
| 326 |
+
elif isinstance(subobj, classmethod):
|
| 327 |
+
return variables.UserMethodVariable(
|
| 328 |
+
subobj.__func__, self, source=source, **options
|
| 329 |
+
)
|
| 330 |
+
elif isinstance(subobj, types.FunctionType):
|
| 331 |
+
return variables.UserMethodVariable(subobj, self, source=source, **options)
|
| 332 |
+
|
| 333 |
+
if (
|
| 334 |
+
name in getattr(value, "__dict__", {})
|
| 335 |
+
or ConstantVariable.is_literal(subobj)
|
| 336 |
+
or isinstance(
|
| 337 |
+
subobj,
|
| 338 |
+
(
|
| 339 |
+
torch.Tensor,
|
| 340 |
+
torch.nn.Module,
|
| 341 |
+
),
|
| 342 |
+
)
|
| 343 |
+
):
|
| 344 |
+
if source:
|
| 345 |
+
return VariableBuilder(tx, source)(subobj).add_options(options)
|
| 346 |
+
elif ConstantVariable.is_literal(subobj):
|
| 347 |
+
return ConstantVariable(subobj, **options)
|
| 348 |
+
|
| 349 |
+
if (
|
| 350 |
+
name not in getattr(value, "__dict__", {})
|
| 351 |
+
and type(value).__module__.startswith("torch.")
|
| 352 |
+
and "torch.optim" not in type(value).__module__
|
| 353 |
+
and not callable(value)
|
| 354 |
+
):
|
| 355 |
+
if not source:
|
| 356 |
+
assert getattr(
|
| 357 |
+
importlib.import_module(type(value).__module__),
|
| 358 |
+
type(value).__name__,
|
| 359 |
+
) is type(value)
|
| 360 |
+
source = AttrSource(
|
| 361 |
+
AttrSource(
|
| 362 |
+
tx.import_source(type(value).__module__), type(value).__name__
|
| 363 |
+
),
|
| 364 |
+
name,
|
| 365 |
+
)
|
| 366 |
+
|
| 367 |
+
return VariableBuilder(tx, source)(subobj).add_options(options)
|
| 368 |
+
options["source"] = source
|
| 369 |
+
if isinstance(
|
| 370 |
+
subobj,
|
| 371 |
+
(
|
| 372 |
+
torch.distributions.constraints._Interval,
|
| 373 |
+
torch.distributions.constraints._Real,
|
| 374 |
+
torch.distributions.constraints.Constraint,
|
| 375 |
+
),
|
| 376 |
+
):
|
| 377 |
+
return UserDefinedObjectVariable(subobj, **options)
|
| 378 |
+
|
| 379 |
+
if name == "__class__":
|
| 380 |
+
return UserDefinedClassVariable(type(self.value), **options)
|
| 381 |
+
|
| 382 |
+
return variables.GetAttrVariable(self, name, **options)
|
| 383 |
+
|
| 384 |
+
def call_hasattr(self, tx, name: str) -> "VariableTracker":
|
| 385 |
+
if not self.source:
|
| 386 |
+
unimplemented("hasattr no source")
|
| 387 |
+
options = VariableTracker.propagate(self)
|
| 388 |
+
options["guards"].add(
|
| 389 |
+
AttrSource(self.source, name).make_guard(GuardBuilder.HASATTR)
|
| 390 |
+
)
|
| 391 |
+
if self._check_for_getattribute() or self._check_for_getattr():
|
| 392 |
+
unimplemented("hasattr with custom __getattr__")
|
| 393 |
+
|
| 394 |
+
try:
|
| 395 |
+
self._getattr_static(name)
|
| 396 |
+
return variables.ConstantVariable(True, **options)
|
| 397 |
+
except AttributeError:
|
| 398 |
+
return variables.ConstantVariable(False, **options)
|
| 399 |
+
|
| 400 |
+
def odict_getitem(self, tx, key):
|
| 401 |
+
from .builder import VariableBuilder
|
| 402 |
+
|
| 403 |
+
return VariableBuilder(
|
| 404 |
+
tx,
|
| 405 |
+
ODictGetItemSource(self.source, key.as_python_constant()),
|
| 406 |
+
)(
|
| 407 |
+
collections.OrderedDict.__getitem__(self.value, key.as_python_constant())
|
| 408 |
+
).add_options(
|
| 409 |
+
key, self
|
| 410 |
+
)
|
openflamingo/lib/python3.10/site-packages/torch/_subclasses/meta_utils.py
ADDED
|
@@ -0,0 +1,522 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import warnings
|
| 3 |
+
import weakref
|
| 4 |
+
from typing import ContextManager, Optional
|
| 5 |
+
|
| 6 |
+
import torch
|
| 7 |
+
from torch._guards import Source
|
| 8 |
+
from torch.multiprocessing.reductions import StorageWeakRef
|
| 9 |
+
from torch.utils.weak import WeakIdRef
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def safe_is_leaf(t):
|
| 13 |
+
try:
|
| 14 |
+
return t.is_leaf
|
| 15 |
+
except RuntimeError:
|
| 16 |
+
# inference mode can trigger this
|
| 17 |
+
return False
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def safe_grad(t):
|
| 21 |
+
with warnings.catch_warnings():
|
| 22 |
+
warnings.filterwarnings("ignore", "The .grad attribute of a Tensor")
|
| 23 |
+
return t.grad
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def assert_eq(a, b):
|
| 27 |
+
assert a == b, f"{a} != {b}"
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def assert_metadata_eq(assert_eq, m1, m2, *, skip_symbolic=False):
|
| 31 |
+
def go(m1, m2):
|
| 32 |
+
assert_eq(m1.dtype, m2.dtype)
|
| 33 |
+
if not skip_symbolic:
|
| 34 |
+
assert_eq(m1.shape, m2.shape)
|
| 35 |
+
assert_eq(m1.requires_grad, m2.requires_grad)
|
| 36 |
+
assert_eq(m1.is_leaf, m2.is_leaf)
|
| 37 |
+
assert_eq(m1.grad_fn is None, m2.grad_fn is None)
|
| 38 |
+
assert_eq(m1.is_sparse, m2.is_sparse)
|
| 39 |
+
assert_eq(m1.is_inference(), m2.is_inference())
|
| 40 |
+
assert_eq(m1.is_conj(), m2.is_conj())
|
| 41 |
+
assert_eq(m1.is_neg(), m2.is_neg())
|
| 42 |
+
assert_eq(safe_grad(m1) is not None, safe_grad(m2) is not None)
|
| 43 |
+
if safe_grad(m1) is not None:
|
| 44 |
+
go(safe_grad(m1), safe_grad(m2))
|
| 45 |
+
if m1.is_sparse:
|
| 46 |
+
assert_eq(m1.dense_dim(), m2.dense_dim())
|
| 47 |
+
assert_eq(m1.sparse_dim(), m2.sparse_dim())
|
| 48 |
+
assert_eq(m1.is_coalesced(), m2.is_coalesced())
|
| 49 |
+
else:
|
| 50 |
+
if not skip_symbolic:
|
| 51 |
+
assert_eq(m1.stride(), m2.stride())
|
| 52 |
+
assert_eq(m1.storage_offset(), m2.storage_offset())
|
| 53 |
+
assert_eq(m1._is_view(), m2._is_view())
|
| 54 |
+
if m1._is_view():
|
| 55 |
+
go(m1._base, m2._base)
|
| 56 |
+
# TODO: test if is resizable (no direct query for this atm)
|
| 57 |
+
# TODO: audit AutogradMeta to see if it matches
|
| 58 |
+
# TODO: test forward AD
|
| 59 |
+
|
| 60 |
+
return go(m1, m2)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
# This is a class for converting multiple tensors into meta tensors which
|
| 64 |
+
# share the same view/storage structure. The operation model is you allocate
|
| 65 |
+
# one of these, and then call it repeatedly on all the tensors you want to
|
| 66 |
+
# convert. It's important to use the same object for tensors you want to
|
| 67 |
+
# share storage because this is how we correlate shared storages to the same
|
| 68 |
+
# meta storages. This class will hold weak references to cached tenosrs
|
| 69 |
+
# and tensor storages.
|
| 70 |
+
class MetaConverter:
|
| 71 |
+
def __init__(self):
|
| 72 |
+
self.storage_memo = {}
|
| 73 |
+
self.tensor_memo: weakref.WeakValueDictionary = weakref.WeakValueDictionary()
|
| 74 |
+
self.maybe_storages_to_delete = []
|
| 75 |
+
self.check_expired_frequency = 128
|
| 76 |
+
self.check_expired_count = 0
|
| 77 |
+
self.hit = 0
|
| 78 |
+
self.miss = 0
|
| 79 |
+
self.del_hook = None
|
| 80 |
+
self.arg_cnt = 0
|
| 81 |
+
|
| 82 |
+
def successful(self):
|
| 83 |
+
return self.hit > 0 and self.miss == 0
|
| 84 |
+
|
| 85 |
+
def check_for_expired_weak_storages(self):
|
| 86 |
+
new_li = []
|
| 87 |
+
stor_to_delete = []
|
| 88 |
+
for obj in self.maybe_storages_to_delete:
|
| 89 |
+
if not obj.expired():
|
| 90 |
+
new_li.append(obj)
|
| 91 |
+
else:
|
| 92 |
+
stor_to_delete.append(obj)
|
| 93 |
+
for obj in stor_to_delete:
|
| 94 |
+
self.storage_memo.pop(obj, None)
|
| 95 |
+
self.maybe_storages_to_delete = new_li
|
| 96 |
+
|
| 97 |
+
# if for some reason we have aquired many storages which have not expired
|
| 98 |
+
# even though a tensor with their storage has expired (aliasing or otherwise)
|
| 99 |
+
# check for expired storages less often so as to bound the amount of work we
|
| 100 |
+
# do checking for expired storages
|
| 101 |
+
self.check_expired_frequency = max(
|
| 102 |
+
self.check_expired_frequency, len(self.maybe_storages_to_delete)
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
def get_tensor_memo(self, t):
|
| 106 |
+
return self.tensor_memo.get(WeakIdRef(t), None)
|
| 107 |
+
|
| 108 |
+
def set_tensor_memo(self, t, v):
|
| 109 |
+
# hold a weak ref to self, otherwise it will be kept alive
|
| 110 |
+
# by the del_ten closure
|
| 111 |
+
self_weak_ref = weakref.ref(self)
|
| 112 |
+
if t.is_sparse or t.is_mkldnn:
|
| 113 |
+
weak_st = None
|
| 114 |
+
else:
|
| 115 |
+
weak_st = StorageWeakRef(t._typed_storage())
|
| 116 |
+
tensor_ref_key = WeakIdRef(t)
|
| 117 |
+
|
| 118 |
+
def del_ten():
|
| 119 |
+
# tensor outlives the converter
|
| 120 |
+
self_ref = self_weak_ref()
|
| 121 |
+
if self_ref is None:
|
| 122 |
+
return
|
| 123 |
+
# on shutdown, tensor_ref_key may not be in memo
|
| 124 |
+
self_ref.tensor_memo.pop(tensor_ref_key, None)
|
| 125 |
+
if weak_st and weak_st.expired():
|
| 126 |
+
self_ref.storage_memo.pop(weak_st, None)
|
| 127 |
+
elif weak_st is not None:
|
| 128 |
+
# [expired-storages]
|
| 129 |
+
# NB: even though the tensor has died,
|
| 130 |
+
# the deallocation of its storage can take longer,
|
| 131 |
+
# even when the storage has no other uses/views.
|
| 132 |
+
# In this case, the StorageWeakRef object will be kept alive
|
| 133 |
+
# longer than it needs to be, however the storage itself
|
| 134 |
+
# will be deallocated. We retain the possibly dead storages
|
| 135 |
+
# and periodically check if any of them are expired and
|
| 136 |
+
# can be freed.
|
| 137 |
+
self_ref.maybe_storages_to_delete.append(weak_st)
|
| 138 |
+
|
| 139 |
+
weakref.finalize(t, del_ten)
|
| 140 |
+
self.tensor_memo[tensor_ref_key] = v
|
| 141 |
+
|
| 142 |
+
# NB: doesn't actually return a storage, because meta storage is
|
| 143 |
+
# not supported
|
| 144 |
+
def meta_storage(self, s, callback):
|
| 145 |
+
# NB: TypedStorage is freshly allocated and cannot be used as hash
|
| 146 |
+
# key index.
|
| 147 |
+
|
| 148 |
+
# Use a Weak Ref to s in order to not leak memory
|
| 149 |
+
swr = StorageWeakRef(s)
|
| 150 |
+
if swr not in self.storage_memo:
|
| 151 |
+
self.storage_memo[swr] = callback(
|
| 152 |
+
lambda: torch.empty(s.size(), dtype=torch.uint8, device="meta")
|
| 153 |
+
).untyped_storage()
|
| 154 |
+
return self.storage_memo[swr]
|
| 155 |
+
|
| 156 |
+
# This function assumes that it's possible to do the conversion
|
| 157 |
+
# NB: name here is used in a conventional way by Dynamo; it corresponds
|
| 158 |
+
# precisely to the Source.name() of the tensor we're fakeifying and
|
| 159 |
+
# corresponds to a valid Python expression. When we construct sub-names
|
| 160 |
+
# as part of this process, we will maintain this invariant! (Even though
|
| 161 |
+
# other users of this may not need it this property to be upheld.)
|
| 162 |
+
def meta_tensor(
|
| 163 |
+
self, t, shape_env=None, callback=lambda t: t(), source: Optional[Source] = None
|
| 164 |
+
):
|
| 165 |
+
if source is None:
|
| 166 |
+
from torch._dynamo.source import ConstantSource
|
| 167 |
+
|
| 168 |
+
# TODO: make a dedicated UnknownSource for this?
|
| 169 |
+
source = ConstantSource(f"__unknown_tensor{len(self.tensor_memo)}")
|
| 170 |
+
|
| 171 |
+
# This indicates you set no_dispatch() before calling into this
|
| 172 |
+
# function. This is an error: we may be creating fake tensors and
|
| 173 |
+
# will perform operations on them which need fake tensor mode to
|
| 174 |
+
# be active. You will segfault if you are in a no_dispatch() block.
|
| 175 |
+
assert not torch._C._dispatch_tls_local_exclude_set().has(
|
| 176 |
+
torch._C.DispatchKey.Python
|
| 177 |
+
)
|
| 178 |
+
arg_cnt = self.arg_cnt
|
| 179 |
+
self.arg_cnt += 1
|
| 180 |
+
|
| 181 |
+
# When we make as_strided calls, we end up generating a guard
|
| 182 |
+
# that the new as_strided tensor is in bounds for the old storage
|
| 183 |
+
# for the base (since as_strided calls can "bust" out of their
|
| 184 |
+
# bounding box.) This guard is unnecessary: if a user is able
|
| 185 |
+
# to provide us a tensor with the view base setup this way, we
|
| 186 |
+
# don't need to produce a guard, because the fact that they
|
| 187 |
+
# were able to produce the view base means its in bounds.
|
| 188 |
+
#
|
| 189 |
+
# Now, ordinarily, this guard would be harmless. However, the
|
| 190 |
+
# generated guard refers to variables bound on the base variable.
|
| 191 |
+
# At the moment, Dynamo doesn't actually guard on x._base, because
|
| 192 |
+
# according to Voz this results in a lot of spurious invalidations,
|
| 193 |
+
# and also if the user doesn't directly make use of _base, its
|
| 194 |
+
# pointless anyway (because programs should be parametric over
|
| 195 |
+
# whether or not the input tensor is a view or not--unless you're
|
| 196 |
+
# mutating the input, but that's a whole 'nother ballgame). So
|
| 197 |
+
# for expediency, we suppress these guards so we don't have to
|
| 198 |
+
# deal with this (yet, anyway.)
|
| 199 |
+
#
|
| 200 |
+
# NB: An old version of this code suppressed guards for ALL operations
|
| 201 |
+
# happening during meta conversion, not just as_strided calls.
|
| 202 |
+
# This is too aggressive: we do duck sizing and 0/1 simplification
|
| 203 |
+
# as we allocate variables, and we do need to register guards for
|
| 204 |
+
# these cases.
|
| 205 |
+
maybe_suppress = contextlib.nullcontext
|
| 206 |
+
if shape_env is not None:
|
| 207 |
+
maybe_suppress = shape_env.suppress_guards
|
| 208 |
+
|
| 209 |
+
make_symbolic = shape_env is not None
|
| 210 |
+
|
| 211 |
+
def sym_sizes_strides_storage_offset(t):
|
| 212 |
+
if make_symbolic:
|
| 213 |
+
return shape_env.create_symbolic_sizes_strides_storage_offset(t, source)
|
| 214 |
+
return (t.size(), t.stride(), t.storage_offset())
|
| 215 |
+
|
| 216 |
+
# see expired-storages
|
| 217 |
+
self.check_expired_count += 1
|
| 218 |
+
if self.check_expired_count >= self.check_expired_frequency:
|
| 219 |
+
self.check_for_expired_weak_storages()
|
| 220 |
+
self.check_expired_count = 0
|
| 221 |
+
|
| 222 |
+
if self.get_tensor_memo(t) is None:
|
| 223 |
+
with torch.inference_mode(t.is_inference()):
|
| 224 |
+
if t.is_sparse:
|
| 225 |
+
assert shape_env is None, "symbolic on sparse NYI"
|
| 226 |
+
is_leaf = safe_is_leaf(t)
|
| 227 |
+
r = callback(
|
| 228 |
+
lambda: torch.ops.aten._sparse_coo_tensor_with_dims(
|
| 229 |
+
t.sparse_dim(),
|
| 230 |
+
t.dense_dim(),
|
| 231 |
+
t.shape,
|
| 232 |
+
dtype=t.dtype,
|
| 233 |
+
layout=torch.sparse_coo,
|
| 234 |
+
device="meta",
|
| 235 |
+
)
|
| 236 |
+
)
|
| 237 |
+
assert safe_is_leaf(r), "the callback you passed in doesn't detach"
|
| 238 |
+
# Note [is_coalesced is dispatched]
|
| 239 |
+
# Strangely enough, is_coalesced() is a dispatched operator,
|
| 240 |
+
# which means that it will get caught by fake tensor mode.
|
| 241 |
+
# Ordinarily this would error, but there's some logic in
|
| 242 |
+
# fake tensor ensure this doesn't happen.
|
| 243 |
+
r._coalesced_(t.is_coalesced())
|
| 244 |
+
if t.requires_grad:
|
| 245 |
+
r.requires_grad = True
|
| 246 |
+
if t.requires_grad and not is_leaf:
|
| 247 |
+
with torch.enable_grad():
|
| 248 |
+
r = r.clone()
|
| 249 |
+
r._coalesced_(t.is_coalesced())
|
| 250 |
+
elif t.is_mkldnn:
|
| 251 |
+
is_leaf = safe_is_leaf(t)
|
| 252 |
+
sizes, strides, _storage_offset = sym_sizes_strides_storage_offset(
|
| 253 |
+
t
|
| 254 |
+
)
|
| 255 |
+
r = callback(
|
| 256 |
+
lambda: torch.empty_strided(
|
| 257 |
+
sizes, strides, dtype=t.dtype, device="meta"
|
| 258 |
+
)
|
| 259 |
+
)
|
| 260 |
+
assert safe_is_leaf(r), "the callback you passed in doesn't detach"
|
| 261 |
+
if t.requires_grad:
|
| 262 |
+
r.requires_grad = True
|
| 263 |
+
if t.requires_grad and not is_leaf:
|
| 264 |
+
with torch.enable_grad():
|
| 265 |
+
r = r.clone()
|
| 266 |
+
elif t._is_view():
|
| 267 |
+
# Construct views in two steps: recursively meta-fy their
|
| 268 |
+
# base, and then create view(s) off that. NB: doing it
|
| 269 |
+
# directly from storage is WRONG because this won't cause
|
| 270 |
+
# version counters to get shared.
|
| 271 |
+
assert t._is_view()
|
| 272 |
+
|
| 273 |
+
from torch._dynamo.source import AttrSource
|
| 274 |
+
|
| 275 |
+
base = self.meta_tensor(
|
| 276 |
+
t._base, shape_env, callback, source=AttrSource(source, "_base")
|
| 277 |
+
)
|
| 278 |
+
|
| 279 |
+
def is_c_of_r(complex_dtype, real_dtype):
|
| 280 |
+
return (
|
| 281 |
+
utils.is_complex_dtype(complex_dtype)
|
| 282 |
+
and utils.corresponding_real_dtype(complex_dtype)
|
| 283 |
+
== real_dtype
|
| 284 |
+
)
|
| 285 |
+
|
| 286 |
+
# In some situations, MetaConverter may be called in a
|
| 287 |
+
# context where autograd is disabled. For the _is_view
|
| 288 |
+
# assert to pass, we have to setup the autograd view
|
| 289 |
+
# metadata anyway. Do this by reenabling the
|
| 290 |
+
# ADInplaceOrView key. This is kind of a hack.
|
| 291 |
+
old_exclude = torch._C._dispatch_tls_is_dispatch_key_excluded(
|
| 292 |
+
torch._C.DispatchKey.ADInplaceOrView
|
| 293 |
+
)
|
| 294 |
+
torch._C._dispatch_tls_set_dispatch_key_excluded(
|
| 295 |
+
torch._C.DispatchKey.ADInplaceOrView, False
|
| 296 |
+
)
|
| 297 |
+
try:
|
| 298 |
+
|
| 299 |
+
if base.dtype == t.dtype:
|
| 300 |
+
pass
|
| 301 |
+
elif is_c_of_r(base.dtype, t.dtype):
|
| 302 |
+
base = torch.view_as_real(base)
|
| 303 |
+
elif is_c_of_r(t.dtype, base.dtype):
|
| 304 |
+
base = torch.view_as_complex(base)
|
| 305 |
+
else:
|
| 306 |
+
# This is not guaranteed to succeed. If it fails, it
|
| 307 |
+
# means there is another dtype-converting view function
|
| 308 |
+
# that hasn't been handled here
|
| 309 |
+
base = base.view(t.dtype)
|
| 310 |
+
|
| 311 |
+
# This is very tricky. Naively, you might expect this
|
| 312 |
+
# to hold:
|
| 313 |
+
#
|
| 314 |
+
# if t.requires_grad and not safe_is_leaf(t)
|
| 315 |
+
# assert t._base.requires_grad
|
| 316 |
+
#
|
| 317 |
+
# But it's not true! As you can see in the following
|
| 318 |
+
# program:
|
| 319 |
+
#
|
| 320 |
+
# x = torch.zeros(4)
|
| 321 |
+
# y = x.view(1, 4)
|
| 322 |
+
# y.requires_grad = True
|
| 323 |
+
# z = y.view(1, 1, 4)
|
| 324 |
+
# assert z._base is x
|
| 325 |
+
#
|
| 326 |
+
# So we may have to do *two* views out of the base to
|
| 327 |
+
# recreate this situation.
|
| 328 |
+
|
| 329 |
+
(
|
| 330 |
+
sizes,
|
| 331 |
+
strides,
|
| 332 |
+
storage_offset,
|
| 333 |
+
) = sym_sizes_strides_storage_offset(t)
|
| 334 |
+
|
| 335 |
+
if safe_is_leaf(t):
|
| 336 |
+
# Leaf views that track view metadata are created by
|
| 337 |
+
# creating a view inside a no_grad block
|
| 338 |
+
with torch.no_grad(), maybe_suppress():
|
| 339 |
+
r = base.as_strided(sizes, strides, storage_offset)
|
| 340 |
+
# As it's a leaf, we can directly assign requires_grad
|
| 341 |
+
r.requires_grad = t.requires_grad
|
| 342 |
+
else:
|
| 343 |
+
if t._base.requires_grad == t.requires_grad:
|
| 344 |
+
# Easy case, just run the view op
|
| 345 |
+
with torch.enable_grad(), maybe_suppress():
|
| 346 |
+
r = base.as_strided(sizes, strides, storage_offset)
|
| 347 |
+
else:
|
| 348 |
+
# Obscure case. Create a leaf view and give it the
|
| 349 |
+
# correct requires_grad, then do the final view.
|
| 350 |
+
# NB: Can't have a non-leaf without requiring grad!
|
| 351 |
+
assert t.requires_grad
|
| 352 |
+
with torch.no_grad():
|
| 353 |
+
mid = base.view(base.shape)
|
| 354 |
+
mid.requires_grad = t.requires_grad
|
| 355 |
+
with torch.enable_grad(), maybe_suppress():
|
| 356 |
+
r = mid.as_strided(sizes, strides, storage_offset)
|
| 357 |
+
finally:
|
| 358 |
+
torch._C._dispatch_tls_set_dispatch_key_excluded(
|
| 359 |
+
torch._C.DispatchKey.ADInplaceOrView, old_exclude
|
| 360 |
+
)
|
| 361 |
+
|
| 362 |
+
else:
|
| 363 |
+
is_leaf = safe_is_leaf(t)
|
| 364 |
+
sizes, strides, storage_offset = sym_sizes_strides_storage_offset(t)
|
| 365 |
+
r = callback(
|
| 366 |
+
lambda: torch.empty_strided(
|
| 367 |
+
sizes, strides, dtype=t.dtype, device="meta"
|
| 368 |
+
)
|
| 369 |
+
)
|
| 370 |
+
assert safe_is_leaf(r), "the callback you passed in doesn't detach"
|
| 371 |
+
if t.requires_grad:
|
| 372 |
+
r.requires_grad = t.requires_grad
|
| 373 |
+
if not is_leaf:
|
| 374 |
+
# Fake up some autograd history.
|
| 375 |
+
with torch.enable_grad():
|
| 376 |
+
# preserve_format is the default, but we want to
|
| 377 |
+
# emphasize how important it is to preserve
|
| 378 |
+
# format here
|
| 379 |
+
r = r.clone(memory_format=torch.preserve_format)
|
| 380 |
+
|
| 381 |
+
s = t.untyped_storage()
|
| 382 |
+
swr = StorageWeakRef(s)
|
| 383 |
+
if (
|
| 384 |
+
swr not in self.storage_memo
|
| 385 |
+
and r.stride() == strides
|
| 386 |
+
and r.storage_offset() == storage_offset
|
| 387 |
+
):
|
| 388 |
+
# You're normal and happy, install the fresh storage into the memo
|
| 389 |
+
self.storage_memo[swr] = r.untyped_storage()
|
| 390 |
+
else:
|
| 391 |
+
# You're in crazy town; somehow you gave us a tensor
|
| 392 |
+
# that wasn't a view, but had nonzero storage offset,
|
| 393 |
+
# nontrivial strides (such that clone() couldn't
|
| 394 |
+
# preserve them), or already aliases with another
|
| 395 |
+
# tensor's storage. The most typical way to end
|
| 396 |
+
# up here is with set_. So use set_ to bludgeon this
|
| 397 |
+
# in.
|
| 398 |
+
r_s = self.meta_storage(s, callback=callback)
|
| 399 |
+
# NB: In principle, this should always work, but there
|
| 400 |
+
# is some subtle difference in the autograd metadata
|
| 401 |
+
# that means we will backprop the set_ call, even if
|
| 402 |
+
# r is declared as an input to grad.
|
| 403 |
+
# See https://github.com/pytorch/pytorch/issues/87956
|
| 404 |
+
# for the reproducer.
|
| 405 |
+
# NB: The in_kernel_invocation_manager here is necessary
|
| 406 |
+
# for fake tensor. If we run the set_ call with fake
|
| 407 |
+
# tensor on, r will improperly report that it is NOT a
|
| 408 |
+
# meta tensor but a cpu tensor, and then the set_ call
|
| 409 |
+
# will fail due to device mismatch. no_dispatch() is
|
| 410 |
+
# not enough, because the fake tensor will still claim
|
| 411 |
+
# to be a CPU tensor and you'll end up in the CPU
|
| 412 |
+
# kernel. Arguably this is a hack; a cleaner way to
|
| 413 |
+
# solve this is to have a FakeStorage concept which
|
| 414 |
+
# would report it's CPU device--no problem now! But
|
| 415 |
+
# this is difficult to do because we don't have storage
|
| 416 |
+
# subclasses. Relevant test is
|
| 417 |
+
# DynamicShapesFunctionTests::test_add_dynamic_shapes in
|
| 418 |
+
# test/dynamo/test_dynamic_shapes.py
|
| 419 |
+
maybe_fake_mgr: ContextManager[None] = contextlib.nullcontext()
|
| 420 |
+
from torch._subclasses.fake_tensor import (
|
| 421 |
+
FakeTensor,
|
| 422 |
+
in_kernel_invocation_manager,
|
| 423 |
+
)
|
| 424 |
+
|
| 425 |
+
if isinstance(r, FakeTensor):
|
| 426 |
+
maybe_fake_mgr = in_kernel_invocation_manager(r.fake_mode)
|
| 427 |
+
with maybe_fake_mgr, torch.no_grad():
|
| 428 |
+
r.set_(r_s, storage_offset, sizes, strides)
|
| 429 |
+
|
| 430 |
+
if safe_grad(t) is not None:
|
| 431 |
+
from torch._dynamo.source import AttrSource
|
| 432 |
+
|
| 433 |
+
r.grad = self.meta_tensor(
|
| 434 |
+
safe_grad(t),
|
| 435 |
+
shape_env,
|
| 436 |
+
callback,
|
| 437 |
+
source=AttrSource(source, "grad"),
|
| 438 |
+
)
|
| 439 |
+
torch._C._set_conj(r, t.is_conj())
|
| 440 |
+
torch._C._set_neg(r, t.is_neg())
|
| 441 |
+
# This can be skipped if necessary for performance reasons
|
| 442 |
+
assert_metadata_eq(assert_eq, t, r, skip_symbolic=True)
|
| 443 |
+
self.set_tensor_memo(t, r)
|
| 444 |
+
|
| 445 |
+
return self.get_tensor_memo(t)
|
| 446 |
+
|
| 447 |
+
def __call__(
|
| 448 |
+
self,
|
| 449 |
+
t,
|
| 450 |
+
shape_env=None,
|
| 451 |
+
*,
|
| 452 |
+
callback=lambda t: t(),
|
| 453 |
+
ignore_subclass=False,
|
| 454 |
+
source=None,
|
| 455 |
+
):
|
| 456 |
+
# TODO: zero tensors? We appear to have eliminated them by
|
| 457 |
+
# excluding complex for now
|
| 458 |
+
from torch._subclasses.fake_tensor import FakeTensor
|
| 459 |
+
|
| 460 |
+
if (
|
| 461 |
+
type(t) is torch.Tensor
|
| 462 |
+
or type(t) is torch.nn.Parameter
|
| 463 |
+
or (ignore_subclass and isinstance(t, torch.Tensor))
|
| 464 |
+
or isinstance(t, FakeTensor)
|
| 465 |
+
):
|
| 466 |
+
if any(
|
| 467 |
+
[
|
| 468 |
+
t.is_sparse_csr,
|
| 469 |
+
t.layout in [torch.sparse_csc, torch.sparse_bsr, torch.sparse_bsc],
|
| 470 |
+
t.is_quantized,
|
| 471 |
+
t.is_nested,
|
| 472 |
+
t._is_view() and t._base is not None and t._base.is_sparse,
|
| 473 |
+
torch._is_functional_tensor(t),
|
| 474 |
+
# these are supported in meta conversion but the fallbacks
|
| 475 |
+
# don't work
|
| 476 |
+
t.is_neg(),
|
| 477 |
+
t.is_conj(),
|
| 478 |
+
t.device.type in ("lazy"),
|
| 479 |
+
# We need a way to test if a tensor is batched but there
|
| 480 |
+
# is no official APi to do it
|
| 481 |
+
# torch._C._is_batched(t),
|
| 482 |
+
]
|
| 483 |
+
):
|
| 484 |
+
# TODO: sparse should support meta
|
| 485 |
+
# NB technically to('meta') does work but our logging
|
| 486 |
+
# instrumentation will see the meta conversions and the
|
| 487 |
+
# tests all break so we just exclude this. In any case
|
| 488 |
+
# the to conversion isn't really right anyhow.
|
| 489 |
+
self.miss += 1
|
| 490 |
+
return NotImplemented
|
| 491 |
+
else:
|
| 492 |
+
self.hit += 1
|
| 493 |
+
# When ignoring subclasses, we treat the input tensor "as if" it
|
| 494 |
+
# were a normal tensor and create a non-subclassed fake tensor
|
| 495 |
+
# that, modulo type and attributes, resembles the original tensor.
|
| 496 |
+
# This can be helpful if you're planning to simulate the subclassness
|
| 497 |
+
# by hand, e.g., as is done in Dynamo
|
| 498 |
+
ctx = contextlib.nullcontext()
|
| 499 |
+
if ignore_subclass:
|
| 500 |
+
ctx = torch._C.DisableTorchFunctionSubclass()
|
| 501 |
+
with ctx:
|
| 502 |
+
r = self.meta_tensor(
|
| 503 |
+
t, shape_env=shape_env, callback=callback, source=source
|
| 504 |
+
)
|
| 505 |
+
# TODO: this is suspicious, now that we have callback argument
|
| 506 |
+
if type(t) is torch.nn.Parameter:
|
| 507 |
+
r = torch.nn.Parameter(r, requires_grad=r.requires_grad)
|
| 508 |
+
return r
|
| 509 |
+
elif torch.overrides.is_tensor_like(t):
|
| 510 |
+
# Blindly converting tensor subclasses to meta can cause
|
| 511 |
+
# unpredictable problems; e.g., FX tests will trace meta
|
| 512 |
+
# tensors into their trace / some subclasses don't correctly
|
| 513 |
+
# support meta. Trying to YOLO this is more trouble than it's
|
| 514 |
+
# worth.
|
| 515 |
+
self.miss += 1
|
| 516 |
+
return NotImplemented
|
| 517 |
+
else:
|
| 518 |
+
# non-Tensor types don't count as hit or miss
|
| 519 |
+
return t
|
| 520 |
+
|
| 521 |
+
|
| 522 |
+
import torch._prims_common as utils
|
phi4/lib/python3.10/site-packages/networkx/__init__.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
NetworkX
|
| 3 |
+
========
|
| 4 |
+
|
| 5 |
+
NetworkX is a Python package for the creation, manipulation, and study of the
|
| 6 |
+
structure, dynamics, and functions of complex networks.
|
| 7 |
+
|
| 8 |
+
See https://networkx.org for complete documentation.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
__version__ = "3.4.2"
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# These are imported in order as listed
|
| 15 |
+
from networkx.lazy_imports import _lazy_import
|
| 16 |
+
|
| 17 |
+
from networkx.exception import *
|
| 18 |
+
|
| 19 |
+
from networkx import utils
|
| 20 |
+
from networkx.utils import _clear_cache, _dispatchable
|
| 21 |
+
|
| 22 |
+
# load_and_call entry_points, set configs
|
| 23 |
+
config = utils.backends._set_configs_from_environment()
|
| 24 |
+
utils.config = utils.configs.config = config # type: ignore[attr-defined]
|
| 25 |
+
|
| 26 |
+
from networkx import classes
|
| 27 |
+
from networkx.classes import filters
|
| 28 |
+
from networkx.classes import *
|
| 29 |
+
|
| 30 |
+
from networkx import convert
|
| 31 |
+
from networkx.convert import *
|
| 32 |
+
|
| 33 |
+
from networkx import convert_matrix
|
| 34 |
+
from networkx.convert_matrix import *
|
| 35 |
+
|
| 36 |
+
from networkx import relabel
|
| 37 |
+
from networkx.relabel import *
|
| 38 |
+
|
| 39 |
+
from networkx import generators
|
| 40 |
+
from networkx.generators import *
|
| 41 |
+
|
| 42 |
+
from networkx import readwrite
|
| 43 |
+
from networkx.readwrite import *
|
| 44 |
+
|
| 45 |
+
# Need to test with SciPy, when available
|
| 46 |
+
from networkx import algorithms
|
| 47 |
+
from networkx.algorithms import *
|
| 48 |
+
|
| 49 |
+
from networkx import linalg
|
| 50 |
+
from networkx.linalg import *
|
| 51 |
+
|
| 52 |
+
from networkx import drawing
|
| 53 |
+
from networkx.drawing import *
|
phi4/lib/python3.10/site-packages/networkx/algorithms/__init__.py
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from networkx.algorithms.assortativity import *
|
| 2 |
+
from networkx.algorithms.asteroidal import *
|
| 3 |
+
from networkx.algorithms.boundary import *
|
| 4 |
+
from networkx.algorithms.broadcasting import *
|
| 5 |
+
from networkx.algorithms.bridges import *
|
| 6 |
+
from networkx.algorithms.chains import *
|
| 7 |
+
from networkx.algorithms.centrality import *
|
| 8 |
+
from networkx.algorithms.chordal import *
|
| 9 |
+
from networkx.algorithms.cluster import *
|
| 10 |
+
from networkx.algorithms.clique import *
|
| 11 |
+
from networkx.algorithms.communicability_alg import *
|
| 12 |
+
from networkx.algorithms.components import *
|
| 13 |
+
from networkx.algorithms.coloring import *
|
| 14 |
+
from networkx.algorithms.core import *
|
| 15 |
+
from networkx.algorithms.covering import *
|
| 16 |
+
from networkx.algorithms.cycles import *
|
| 17 |
+
from networkx.algorithms.cuts import *
|
| 18 |
+
from networkx.algorithms.d_separation import *
|
| 19 |
+
from networkx.algorithms.dag import *
|
| 20 |
+
from networkx.algorithms.distance_measures import *
|
| 21 |
+
from networkx.algorithms.distance_regular import *
|
| 22 |
+
from networkx.algorithms.dominance import *
|
| 23 |
+
from networkx.algorithms.dominating import *
|
| 24 |
+
from networkx.algorithms.efficiency_measures import *
|
| 25 |
+
from networkx.algorithms.euler import *
|
| 26 |
+
from networkx.algorithms.graphical import *
|
| 27 |
+
from networkx.algorithms.hierarchy import *
|
| 28 |
+
from networkx.algorithms.hybrid import *
|
| 29 |
+
from networkx.algorithms.link_analysis import *
|
| 30 |
+
from networkx.algorithms.link_prediction import *
|
| 31 |
+
from networkx.algorithms.lowest_common_ancestors import *
|
| 32 |
+
from networkx.algorithms.isolate import *
|
| 33 |
+
from networkx.algorithms.matching import *
|
| 34 |
+
from networkx.algorithms.minors import *
|
| 35 |
+
from networkx.algorithms.mis import *
|
| 36 |
+
from networkx.algorithms.moral import *
|
| 37 |
+
from networkx.algorithms.non_randomness import *
|
| 38 |
+
from networkx.algorithms.operators import *
|
| 39 |
+
from networkx.algorithms.planarity import *
|
| 40 |
+
from networkx.algorithms.planar_drawing import *
|
| 41 |
+
from networkx.algorithms.polynomials import *
|
| 42 |
+
from networkx.algorithms.reciprocity import *
|
| 43 |
+
from networkx.algorithms.regular import *
|
| 44 |
+
from networkx.algorithms.richclub import *
|
| 45 |
+
from networkx.algorithms.shortest_paths import *
|
| 46 |
+
from networkx.algorithms.similarity import *
|
| 47 |
+
from networkx.algorithms.graph_hashing import *
|
| 48 |
+
from networkx.algorithms.simple_paths import *
|
| 49 |
+
from networkx.algorithms.smallworld import *
|
| 50 |
+
from networkx.algorithms.smetric import *
|
| 51 |
+
from networkx.algorithms.structuralholes import *
|
| 52 |
+
from networkx.algorithms.sparsifiers import *
|
| 53 |
+
from networkx.algorithms.summarization import *
|
| 54 |
+
from networkx.algorithms.swap import *
|
| 55 |
+
from networkx.algorithms.time_dependent import *
|
| 56 |
+
from networkx.algorithms.traversal import *
|
| 57 |
+
from networkx.algorithms.triads import *
|
| 58 |
+
from networkx.algorithms.vitality import *
|
| 59 |
+
from networkx.algorithms.voronoi import *
|
| 60 |
+
from networkx.algorithms.walks import *
|
| 61 |
+
from networkx.algorithms.wiener import *
|
| 62 |
+
|
| 63 |
+
# Make certain subpackages available to the user as direct imports from
|
| 64 |
+
# the `networkx` namespace.
|
| 65 |
+
from networkx.algorithms import approximation
|
| 66 |
+
from networkx.algorithms import assortativity
|
| 67 |
+
from networkx.algorithms import bipartite
|
| 68 |
+
from networkx.algorithms import node_classification
|
| 69 |
+
from networkx.algorithms import centrality
|
| 70 |
+
from networkx.algorithms import chordal
|
| 71 |
+
from networkx.algorithms import cluster
|
| 72 |
+
from networkx.algorithms import clique
|
| 73 |
+
from networkx.algorithms import components
|
| 74 |
+
from networkx.algorithms import connectivity
|
| 75 |
+
from networkx.algorithms import community
|
| 76 |
+
from networkx.algorithms import coloring
|
| 77 |
+
from networkx.algorithms import flow
|
| 78 |
+
from networkx.algorithms import isomorphism
|
| 79 |
+
from networkx.algorithms import link_analysis
|
| 80 |
+
from networkx.algorithms import lowest_common_ancestors
|
| 81 |
+
from networkx.algorithms import operators
|
| 82 |
+
from networkx.algorithms import shortest_paths
|
| 83 |
+
from networkx.algorithms import tournament
|
| 84 |
+
from networkx.algorithms import traversal
|
| 85 |
+
from networkx.algorithms import tree
|
| 86 |
+
|
| 87 |
+
# Make certain functions from some of the previous subpackages available
|
| 88 |
+
# to the user as direct imports from the `networkx` namespace.
|
| 89 |
+
from networkx.algorithms.bipartite import complete_bipartite_graph
|
| 90 |
+
from networkx.algorithms.bipartite import is_bipartite
|
| 91 |
+
from networkx.algorithms.bipartite import projected_graph
|
| 92 |
+
from networkx.algorithms.connectivity import all_pairs_node_connectivity
|
| 93 |
+
from networkx.algorithms.connectivity import all_node_cuts
|
| 94 |
+
from networkx.algorithms.connectivity import average_node_connectivity
|
| 95 |
+
from networkx.algorithms.connectivity import edge_connectivity
|
| 96 |
+
from networkx.algorithms.connectivity import edge_disjoint_paths
|
| 97 |
+
from networkx.algorithms.connectivity import k_components
|
| 98 |
+
from networkx.algorithms.connectivity import k_edge_components
|
| 99 |
+
from networkx.algorithms.connectivity import k_edge_subgraphs
|
| 100 |
+
from networkx.algorithms.connectivity import k_edge_augmentation
|
| 101 |
+
from networkx.algorithms.connectivity import is_k_edge_connected
|
| 102 |
+
from networkx.algorithms.connectivity import minimum_edge_cut
|
| 103 |
+
from networkx.algorithms.connectivity import minimum_node_cut
|
| 104 |
+
from networkx.algorithms.connectivity import node_connectivity
|
| 105 |
+
from networkx.algorithms.connectivity import node_disjoint_paths
|
| 106 |
+
from networkx.algorithms.connectivity import stoer_wagner
|
| 107 |
+
from networkx.algorithms.flow import capacity_scaling
|
| 108 |
+
from networkx.algorithms.flow import cost_of_flow
|
| 109 |
+
from networkx.algorithms.flow import gomory_hu_tree
|
| 110 |
+
from networkx.algorithms.flow import max_flow_min_cost
|
| 111 |
+
from networkx.algorithms.flow import maximum_flow
|
| 112 |
+
from networkx.algorithms.flow import maximum_flow_value
|
| 113 |
+
from networkx.algorithms.flow import min_cost_flow
|
| 114 |
+
from networkx.algorithms.flow import min_cost_flow_cost
|
| 115 |
+
from networkx.algorithms.flow import minimum_cut
|
| 116 |
+
from networkx.algorithms.flow import minimum_cut_value
|
| 117 |
+
from networkx.algorithms.flow import network_simplex
|
| 118 |
+
from networkx.algorithms.isomorphism import could_be_isomorphic
|
| 119 |
+
from networkx.algorithms.isomorphism import fast_could_be_isomorphic
|
| 120 |
+
from networkx.algorithms.isomorphism import faster_could_be_isomorphic
|
| 121 |
+
from networkx.algorithms.isomorphism import is_isomorphic
|
| 122 |
+
from networkx.algorithms.isomorphism.vf2pp import *
|
| 123 |
+
from networkx.algorithms.tree.branchings import maximum_branching
|
| 124 |
+
from networkx.algorithms.tree.branchings import maximum_spanning_arborescence
|
| 125 |
+
from networkx.algorithms.tree.branchings import minimum_branching
|
| 126 |
+
from networkx.algorithms.tree.branchings import minimum_spanning_arborescence
|
| 127 |
+
from networkx.algorithms.tree.branchings import ArborescenceIterator
|
| 128 |
+
from networkx.algorithms.tree.coding import *
|
| 129 |
+
from networkx.algorithms.tree.decomposition import *
|
| 130 |
+
from networkx.algorithms.tree.mst import *
|
| 131 |
+
from networkx.algorithms.tree.operations import *
|
| 132 |
+
from networkx.algorithms.tree.recognition import *
|
| 133 |
+
from networkx.algorithms.tournament import is_tournament
|
phi4/lib/python3.10/site-packages/networkx/algorithms/bridges.py
ADDED
|
@@ -0,0 +1,205 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Bridge-finding algorithms."""
|
| 2 |
+
|
| 3 |
+
from itertools import chain
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.utils import not_implemented_for
|
| 7 |
+
|
| 8 |
+
__all__ = ["bridges", "has_bridges", "local_bridges"]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@not_implemented_for("directed")
|
| 12 |
+
@nx._dispatchable
|
| 13 |
+
def bridges(G, root=None):
|
| 14 |
+
"""Generate all bridges in a graph.
|
| 15 |
+
|
| 16 |
+
A *bridge* in a graph is an edge whose removal causes the number of
|
| 17 |
+
connected components of the graph to increase. Equivalently, a bridge is an
|
| 18 |
+
edge that does not belong to any cycle. Bridges are also known as cut-edges,
|
| 19 |
+
isthmuses, or cut arcs.
|
| 20 |
+
|
| 21 |
+
Parameters
|
| 22 |
+
----------
|
| 23 |
+
G : undirected graph
|
| 24 |
+
|
| 25 |
+
root : node (optional)
|
| 26 |
+
A node in the graph `G`. If specified, only the bridges in the
|
| 27 |
+
connected component containing this node will be returned.
|
| 28 |
+
|
| 29 |
+
Yields
|
| 30 |
+
------
|
| 31 |
+
e : edge
|
| 32 |
+
An edge in the graph whose removal disconnects the graph (or
|
| 33 |
+
causes the number of connected components to increase).
|
| 34 |
+
|
| 35 |
+
Raises
|
| 36 |
+
------
|
| 37 |
+
NodeNotFound
|
| 38 |
+
If `root` is not in the graph `G`.
|
| 39 |
+
|
| 40 |
+
NetworkXNotImplemented
|
| 41 |
+
If `G` is a directed graph.
|
| 42 |
+
|
| 43 |
+
Examples
|
| 44 |
+
--------
|
| 45 |
+
The barbell graph with parameter zero has a single bridge:
|
| 46 |
+
|
| 47 |
+
>>> G = nx.barbell_graph(10, 0)
|
| 48 |
+
>>> list(nx.bridges(G))
|
| 49 |
+
[(9, 10)]
|
| 50 |
+
|
| 51 |
+
Notes
|
| 52 |
+
-----
|
| 53 |
+
This is an implementation of the algorithm described in [1]_. An edge is a
|
| 54 |
+
bridge if and only if it is not contained in any chain. Chains are found
|
| 55 |
+
using the :func:`networkx.chain_decomposition` function.
|
| 56 |
+
|
| 57 |
+
The algorithm described in [1]_ requires a simple graph. If the provided
|
| 58 |
+
graph is a multigraph, we convert it to a simple graph and verify that any
|
| 59 |
+
bridges discovered by the chain decomposition algorithm are not multi-edges.
|
| 60 |
+
|
| 61 |
+
Ignoring polylogarithmic factors, the worst-case time complexity is the
|
| 62 |
+
same as the :func:`networkx.chain_decomposition` function,
|
| 63 |
+
$O(m + n)$, where $n$ is the number of nodes in the graph and $m$ is
|
| 64 |
+
the number of edges.
|
| 65 |
+
|
| 66 |
+
References
|
| 67 |
+
----------
|
| 68 |
+
.. [1] https://en.wikipedia.org/wiki/Bridge_%28graph_theory%29#Bridge-Finding_with_Chain_Decompositions
|
| 69 |
+
"""
|
| 70 |
+
multigraph = G.is_multigraph()
|
| 71 |
+
H = nx.Graph(G) if multigraph else G
|
| 72 |
+
chains = nx.chain_decomposition(H, root=root)
|
| 73 |
+
chain_edges = set(chain.from_iterable(chains))
|
| 74 |
+
if root is not None:
|
| 75 |
+
H = H.subgraph(nx.node_connected_component(H, root)).copy()
|
| 76 |
+
for u, v in H.edges():
|
| 77 |
+
if (u, v) not in chain_edges and (v, u) not in chain_edges:
|
| 78 |
+
if multigraph and len(G[u][v]) > 1:
|
| 79 |
+
continue
|
| 80 |
+
yield u, v
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
@not_implemented_for("directed")
|
| 84 |
+
@nx._dispatchable
|
| 85 |
+
def has_bridges(G, root=None):
|
| 86 |
+
"""Decide whether a graph has any bridges.
|
| 87 |
+
|
| 88 |
+
A *bridge* in a graph is an edge whose removal causes the number of
|
| 89 |
+
connected components of the graph to increase.
|
| 90 |
+
|
| 91 |
+
Parameters
|
| 92 |
+
----------
|
| 93 |
+
G : undirected graph
|
| 94 |
+
|
| 95 |
+
root : node (optional)
|
| 96 |
+
A node in the graph `G`. If specified, only the bridges in the
|
| 97 |
+
connected component containing this node will be considered.
|
| 98 |
+
|
| 99 |
+
Returns
|
| 100 |
+
-------
|
| 101 |
+
bool
|
| 102 |
+
Whether the graph (or the connected component containing `root`)
|
| 103 |
+
has any bridges.
|
| 104 |
+
|
| 105 |
+
Raises
|
| 106 |
+
------
|
| 107 |
+
NodeNotFound
|
| 108 |
+
If `root` is not in the graph `G`.
|
| 109 |
+
|
| 110 |
+
NetworkXNotImplemented
|
| 111 |
+
If `G` is a directed graph.
|
| 112 |
+
|
| 113 |
+
Examples
|
| 114 |
+
--------
|
| 115 |
+
The barbell graph with parameter zero has a single bridge::
|
| 116 |
+
|
| 117 |
+
>>> G = nx.barbell_graph(10, 0)
|
| 118 |
+
>>> nx.has_bridges(G)
|
| 119 |
+
True
|
| 120 |
+
|
| 121 |
+
On the other hand, the cycle graph has no bridges::
|
| 122 |
+
|
| 123 |
+
>>> G = nx.cycle_graph(5)
|
| 124 |
+
>>> nx.has_bridges(G)
|
| 125 |
+
False
|
| 126 |
+
|
| 127 |
+
Notes
|
| 128 |
+
-----
|
| 129 |
+
This implementation uses the :func:`networkx.bridges` function, so
|
| 130 |
+
it shares its worst-case time complexity, $O(m + n)$, ignoring
|
| 131 |
+
polylogarithmic factors, where $n$ is the number of nodes in the
|
| 132 |
+
graph and $m$ is the number of edges.
|
| 133 |
+
|
| 134 |
+
"""
|
| 135 |
+
try:
|
| 136 |
+
next(bridges(G, root=root))
|
| 137 |
+
except StopIteration:
|
| 138 |
+
return False
|
| 139 |
+
else:
|
| 140 |
+
return True
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
@not_implemented_for("multigraph")
|
| 144 |
+
@not_implemented_for("directed")
|
| 145 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 146 |
+
def local_bridges(G, with_span=True, weight=None):
|
| 147 |
+
"""Iterate over local bridges of `G` optionally computing the span
|
| 148 |
+
|
| 149 |
+
A *local bridge* is an edge whose endpoints have no common neighbors.
|
| 150 |
+
That is, the edge is not part of a triangle in the graph.
|
| 151 |
+
|
| 152 |
+
The *span* of a *local bridge* is the shortest path length between
|
| 153 |
+
the endpoints if the local bridge is removed.
|
| 154 |
+
|
| 155 |
+
Parameters
|
| 156 |
+
----------
|
| 157 |
+
G : undirected graph
|
| 158 |
+
|
| 159 |
+
with_span : bool
|
| 160 |
+
If True, yield a 3-tuple `(u, v, span)`
|
| 161 |
+
|
| 162 |
+
weight : function, string or None (default: None)
|
| 163 |
+
If function, used to compute edge weights for the span.
|
| 164 |
+
If string, the edge data attribute used in calculating span.
|
| 165 |
+
If None, all edges have weight 1.
|
| 166 |
+
|
| 167 |
+
Yields
|
| 168 |
+
------
|
| 169 |
+
e : edge
|
| 170 |
+
The local bridges as an edge 2-tuple of nodes `(u, v)` or
|
| 171 |
+
as a 3-tuple `(u, v, span)` when `with_span is True`.
|
| 172 |
+
|
| 173 |
+
Raises
|
| 174 |
+
------
|
| 175 |
+
NetworkXNotImplemented
|
| 176 |
+
If `G` is a directed graph or multigraph.
|
| 177 |
+
|
| 178 |
+
Examples
|
| 179 |
+
--------
|
| 180 |
+
A cycle graph has every edge a local bridge with span N-1.
|
| 181 |
+
|
| 182 |
+
>>> G = nx.cycle_graph(9)
|
| 183 |
+
>>> (0, 8, 8) in set(nx.local_bridges(G))
|
| 184 |
+
True
|
| 185 |
+
"""
|
| 186 |
+
if with_span is not True:
|
| 187 |
+
for u, v in G.edges:
|
| 188 |
+
if not (set(G[u]) & set(G[v])):
|
| 189 |
+
yield u, v
|
| 190 |
+
else:
|
| 191 |
+
wt = nx.weighted._weight_function(G, weight)
|
| 192 |
+
for u, v in G.edges:
|
| 193 |
+
if not (set(G[u]) & set(G[v])):
|
| 194 |
+
enodes = {u, v}
|
| 195 |
+
|
| 196 |
+
def hide_edge(n, nbr, d):
|
| 197 |
+
if n not in enodes or nbr not in enodes:
|
| 198 |
+
return wt(n, nbr, d)
|
| 199 |
+
return None
|
| 200 |
+
|
| 201 |
+
try:
|
| 202 |
+
span = nx.shortest_path_length(G, u, v, weight=hide_edge)
|
| 203 |
+
yield u, v, span
|
| 204 |
+
except nx.NetworkXNoPath:
|
| 205 |
+
yield u, v, float("inf")
|
phi4/lib/python3.10/site-packages/networkx/algorithms/broadcasting.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Routines to calculate the broadcast time of certain graphs.
|
| 2 |
+
|
| 3 |
+
Broadcasting is an information dissemination problem in which a node in a graph,
|
| 4 |
+
called the originator, must distribute a message to all other nodes by placing
|
| 5 |
+
a series of calls along the edges of the graph. Once informed, other nodes aid
|
| 6 |
+
the originator in distributing the message.
|
| 7 |
+
|
| 8 |
+
The broadcasting must be completed as quickly as possible subject to the
|
| 9 |
+
following constraints:
|
| 10 |
+
- Each call requires one unit of time.
|
| 11 |
+
- A node can only participate in one call per unit of time.
|
| 12 |
+
- Each call only involves two adjacent nodes: a sender and a receiver.
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
import networkx as nx
|
| 16 |
+
from networkx import NetworkXError
|
| 17 |
+
from networkx.utils import not_implemented_for
|
| 18 |
+
|
| 19 |
+
__all__ = [
|
| 20 |
+
"tree_broadcast_center",
|
| 21 |
+
"tree_broadcast_time",
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def _get_max_broadcast_value(G, U, v, values):
|
| 26 |
+
adj = sorted(set(G.neighbors(v)) & U, key=values.get, reverse=True)
|
| 27 |
+
return max(values[u] + i for i, u in enumerate(adj, start=1))
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def _get_broadcast_centers(G, v, values, target):
|
| 31 |
+
adj = sorted(G.neighbors(v), key=values.get, reverse=True)
|
| 32 |
+
j = next(i for i, u in enumerate(adj, start=1) if values[u] + i == target)
|
| 33 |
+
return set([v] + adj[:j])
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
@not_implemented_for("directed")
|
| 37 |
+
@not_implemented_for("multigraph")
|
| 38 |
+
@nx._dispatchable
|
| 39 |
+
def tree_broadcast_center(G):
|
| 40 |
+
"""Return the Broadcast Center of the tree `G`.
|
| 41 |
+
|
| 42 |
+
The broadcast center of a graph G denotes the set of nodes having
|
| 43 |
+
minimum broadcast time [1]_. This is a linear algorithm for determining
|
| 44 |
+
the broadcast center of a tree with ``N`` nodes, as a by-product it also
|
| 45 |
+
determines the broadcast time from the broadcast center.
|
| 46 |
+
|
| 47 |
+
Parameters
|
| 48 |
+
----------
|
| 49 |
+
G : undirected graph
|
| 50 |
+
The graph should be an undirected tree
|
| 51 |
+
|
| 52 |
+
Returns
|
| 53 |
+
-------
|
| 54 |
+
BC : (int, set) tuple
|
| 55 |
+
minimum broadcast number of the tree, set of broadcast centers
|
| 56 |
+
|
| 57 |
+
Raises
|
| 58 |
+
------
|
| 59 |
+
NetworkXNotImplemented
|
| 60 |
+
If the graph is directed or is a multigraph.
|
| 61 |
+
|
| 62 |
+
References
|
| 63 |
+
----------
|
| 64 |
+
.. [1] Slater, P.J., Cockayne, E.J., Hedetniemi, S.T,
|
| 65 |
+
Information dissemination in trees. SIAM J.Comput. 10(4), 692–701 (1981)
|
| 66 |
+
"""
|
| 67 |
+
# Assert that the graph G is a tree
|
| 68 |
+
if not nx.is_tree(G):
|
| 69 |
+
NetworkXError("Input graph is not a tree")
|
| 70 |
+
# step 0
|
| 71 |
+
if G.number_of_nodes() == 2:
|
| 72 |
+
return 1, set(G.nodes())
|
| 73 |
+
if G.number_of_nodes() == 1:
|
| 74 |
+
return 0, set(G.nodes())
|
| 75 |
+
|
| 76 |
+
# step 1
|
| 77 |
+
U = {node for node, deg in G.degree if deg == 1}
|
| 78 |
+
values = {n: 0 for n in U}
|
| 79 |
+
T = G.copy()
|
| 80 |
+
T.remove_nodes_from(U)
|
| 81 |
+
|
| 82 |
+
# step 2
|
| 83 |
+
W = {node for node, deg in T.degree if deg == 1}
|
| 84 |
+
values.update((w, G.degree[w] - 1) for w in W)
|
| 85 |
+
|
| 86 |
+
# step 3
|
| 87 |
+
while T.number_of_nodes() >= 2:
|
| 88 |
+
# step 4
|
| 89 |
+
w = min(W, key=lambda n: values[n])
|
| 90 |
+
v = next(T.neighbors(w))
|
| 91 |
+
|
| 92 |
+
# step 5
|
| 93 |
+
U.add(w)
|
| 94 |
+
W.remove(w)
|
| 95 |
+
T.remove_node(w)
|
| 96 |
+
|
| 97 |
+
# step 6
|
| 98 |
+
if T.degree(v) == 1:
|
| 99 |
+
# update t(v)
|
| 100 |
+
values.update({v: _get_max_broadcast_value(G, U, v, values)})
|
| 101 |
+
W.add(v)
|
| 102 |
+
|
| 103 |
+
# step 7
|
| 104 |
+
v = nx.utils.arbitrary_element(T)
|
| 105 |
+
b_T = _get_max_broadcast_value(G, U, v, values)
|
| 106 |
+
return b_T, _get_broadcast_centers(G, v, values, b_T)
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
@not_implemented_for("directed")
|
| 110 |
+
@not_implemented_for("multigraph")
|
| 111 |
+
@nx._dispatchable
|
| 112 |
+
def tree_broadcast_time(G, node=None):
|
| 113 |
+
"""Return the Broadcast Time of the tree `G`.
|
| 114 |
+
|
| 115 |
+
The minimum broadcast time of a node is defined as the minimum amount
|
| 116 |
+
of time required to complete broadcasting starting from the
|
| 117 |
+
originator. The broadcast time of a graph is the maximum over
|
| 118 |
+
all nodes of the minimum broadcast time from that node [1]_.
|
| 119 |
+
This function returns the minimum broadcast time of `node`.
|
| 120 |
+
If `node` is None the broadcast time for the graph is returned.
|
| 121 |
+
|
| 122 |
+
Parameters
|
| 123 |
+
----------
|
| 124 |
+
G : undirected graph
|
| 125 |
+
The graph should be an undirected tree
|
| 126 |
+
node: int, optional
|
| 127 |
+
index of starting node. If `None`, the algorithm returns the broadcast
|
| 128 |
+
time of the tree.
|
| 129 |
+
|
| 130 |
+
Returns
|
| 131 |
+
-------
|
| 132 |
+
BT : int
|
| 133 |
+
Broadcast Time of a node in a tree
|
| 134 |
+
|
| 135 |
+
Raises
|
| 136 |
+
------
|
| 137 |
+
NetworkXNotImplemented
|
| 138 |
+
If the graph is directed or is a multigraph.
|
| 139 |
+
|
| 140 |
+
References
|
| 141 |
+
----------
|
| 142 |
+
.. [1] Harutyunyan, H. A. and Li, Z.
|
| 143 |
+
"A Simple Construction of Broadcast Graphs."
|
| 144 |
+
In Computing and Combinatorics. COCOON 2019
|
| 145 |
+
(Ed. D. Z. Du and C. Tian.) Springer, pp. 240-253, 2019.
|
| 146 |
+
"""
|
| 147 |
+
b_T, b_C = tree_broadcast_center(G)
|
| 148 |
+
if node is not None:
|
| 149 |
+
return b_T + min(nx.shortest_path_length(G, node, u) for u in b_C)
|
| 150 |
+
dist_from_center = dict.fromkeys(G, len(G))
|
| 151 |
+
for u in b_C:
|
| 152 |
+
for v, dist in nx.shortest_path_length(G, u).items():
|
| 153 |
+
if dist < dist_from_center[v]:
|
| 154 |
+
dist_from_center[v] = dist
|
| 155 |
+
return b_T + max(dist_from_center.values())
|
phi4/lib/python3.10/site-packages/networkx/algorithms/chains.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for finding chains in a graph."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.utils import not_implemented_for
|
| 5 |
+
|
| 6 |
+
__all__ = ["chain_decomposition"]
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@not_implemented_for("directed")
|
| 10 |
+
@not_implemented_for("multigraph")
|
| 11 |
+
@nx._dispatchable
|
| 12 |
+
def chain_decomposition(G, root=None):
|
| 13 |
+
"""Returns the chain decomposition of a graph.
|
| 14 |
+
|
| 15 |
+
The *chain decomposition* of a graph with respect a depth-first
|
| 16 |
+
search tree is a set of cycles or paths derived from the set of
|
| 17 |
+
fundamental cycles of the tree in the following manner. Consider
|
| 18 |
+
each fundamental cycle with respect to the given tree, represented
|
| 19 |
+
as a list of edges beginning with the nontree edge oriented away
|
| 20 |
+
from the root of the tree. For each fundamental cycle, if it
|
| 21 |
+
overlaps with any previous fundamental cycle, just take the initial
|
| 22 |
+
non-overlapping segment, which is a path instead of a cycle. Each
|
| 23 |
+
cycle or path is called a *chain*. For more information, see [1]_.
|
| 24 |
+
|
| 25 |
+
Parameters
|
| 26 |
+
----------
|
| 27 |
+
G : undirected graph
|
| 28 |
+
|
| 29 |
+
root : node (optional)
|
| 30 |
+
A node in the graph `G`. If specified, only the chain
|
| 31 |
+
decomposition for the connected component containing this node
|
| 32 |
+
will be returned. This node indicates the root of the depth-first
|
| 33 |
+
search tree.
|
| 34 |
+
|
| 35 |
+
Yields
|
| 36 |
+
------
|
| 37 |
+
chain : list
|
| 38 |
+
A list of edges representing a chain. There is no guarantee on
|
| 39 |
+
the orientation of the edges in each chain (for example, if a
|
| 40 |
+
chain includes the edge joining nodes 1 and 2, the chain may
|
| 41 |
+
include either (1, 2) or (2, 1)).
|
| 42 |
+
|
| 43 |
+
Raises
|
| 44 |
+
------
|
| 45 |
+
NodeNotFound
|
| 46 |
+
If `root` is not in the graph `G`.
|
| 47 |
+
|
| 48 |
+
Examples
|
| 49 |
+
--------
|
| 50 |
+
>>> G = nx.Graph([(0, 1), (1, 4), (3, 4), (3, 5), (4, 5)])
|
| 51 |
+
>>> list(nx.chain_decomposition(G))
|
| 52 |
+
[[(4, 5), (5, 3), (3, 4)]]
|
| 53 |
+
|
| 54 |
+
Notes
|
| 55 |
+
-----
|
| 56 |
+
The worst-case running time of this implementation is linear in the
|
| 57 |
+
number of nodes and number of edges [1]_.
|
| 58 |
+
|
| 59 |
+
References
|
| 60 |
+
----------
|
| 61 |
+
.. [1] Jens M. Schmidt (2013). "A simple test on 2-vertex-
|
| 62 |
+
and 2-edge-connectivity." *Information Processing Letters*,
|
| 63 |
+
113, 241–244. Elsevier. <https://doi.org/10.1016/j.ipl.2013.01.016>
|
| 64 |
+
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
def _dfs_cycle_forest(G, root=None):
|
| 68 |
+
"""Builds a directed graph composed of cycles from the given graph.
|
| 69 |
+
|
| 70 |
+
`G` is an undirected simple graph. `root` is a node in the graph
|
| 71 |
+
from which the depth-first search is started.
|
| 72 |
+
|
| 73 |
+
This function returns both the depth-first search cycle graph
|
| 74 |
+
(as a :class:`~networkx.DiGraph`) and the list of nodes in
|
| 75 |
+
depth-first preorder. The depth-first search cycle graph is a
|
| 76 |
+
directed graph whose edges are the edges of `G` oriented toward
|
| 77 |
+
the root if the edge is a tree edge and away from the root if
|
| 78 |
+
the edge is a non-tree edge. If `root` is not specified, this
|
| 79 |
+
performs a depth-first search on each connected component of `G`
|
| 80 |
+
and returns a directed forest instead.
|
| 81 |
+
|
| 82 |
+
If `root` is not in the graph, this raises :exc:`KeyError`.
|
| 83 |
+
|
| 84 |
+
"""
|
| 85 |
+
# Create a directed graph from the depth-first search tree with
|
| 86 |
+
# root node `root` in which tree edges are directed toward the
|
| 87 |
+
# root and nontree edges are directed away from the root. For
|
| 88 |
+
# each node with an incident nontree edge, this creates a
|
| 89 |
+
# directed cycle starting with the nontree edge and returning to
|
| 90 |
+
# that node.
|
| 91 |
+
#
|
| 92 |
+
# The `parent` node attribute stores the parent of each node in
|
| 93 |
+
# the DFS tree. The `nontree` edge attribute indicates whether
|
| 94 |
+
# the edge is a tree edge or a nontree edge.
|
| 95 |
+
#
|
| 96 |
+
# We also store the order of the nodes found in the depth-first
|
| 97 |
+
# search in the `nodes` list.
|
| 98 |
+
H = nx.DiGraph()
|
| 99 |
+
nodes = []
|
| 100 |
+
for u, v, d in nx.dfs_labeled_edges(G, source=root):
|
| 101 |
+
if d == "forward":
|
| 102 |
+
# `dfs_labeled_edges()` yields (root, root, 'forward')
|
| 103 |
+
# if it is beginning the search on a new connected
|
| 104 |
+
# component.
|
| 105 |
+
if u == v:
|
| 106 |
+
H.add_node(v, parent=None)
|
| 107 |
+
nodes.append(v)
|
| 108 |
+
else:
|
| 109 |
+
H.add_node(v, parent=u)
|
| 110 |
+
H.add_edge(v, u, nontree=False)
|
| 111 |
+
nodes.append(v)
|
| 112 |
+
# `dfs_labeled_edges` considers nontree edges in both
|
| 113 |
+
# orientations, so we need to not add the edge if it its
|
| 114 |
+
# other orientation has been added.
|
| 115 |
+
elif d == "nontree" and v not in H[u]:
|
| 116 |
+
H.add_edge(v, u, nontree=True)
|
| 117 |
+
else:
|
| 118 |
+
# Do nothing on 'reverse' edges; we only care about
|
| 119 |
+
# forward and nontree edges.
|
| 120 |
+
pass
|
| 121 |
+
return H, nodes
|
| 122 |
+
|
| 123 |
+
def _build_chain(G, u, v, visited):
|
| 124 |
+
"""Generate the chain starting from the given nontree edge.
|
| 125 |
+
|
| 126 |
+
`G` is a DFS cycle graph as constructed by
|
| 127 |
+
:func:`_dfs_cycle_graph`. The edge (`u`, `v`) is a nontree edge
|
| 128 |
+
that begins a chain. `visited` is a set representing the nodes
|
| 129 |
+
in `G` that have already been visited.
|
| 130 |
+
|
| 131 |
+
This function yields the edges in an initial segment of the
|
| 132 |
+
fundamental cycle of `G` starting with the nontree edge (`u`,
|
| 133 |
+
`v`) that includes all the edges up until the first node that
|
| 134 |
+
appears in `visited`. The tree edges are given by the 'parent'
|
| 135 |
+
node attribute. The `visited` set is updated to add each node in
|
| 136 |
+
an edge yielded by this function.
|
| 137 |
+
|
| 138 |
+
"""
|
| 139 |
+
while v not in visited:
|
| 140 |
+
yield u, v
|
| 141 |
+
visited.add(v)
|
| 142 |
+
u, v = v, G.nodes[v]["parent"]
|
| 143 |
+
yield u, v
|
| 144 |
+
|
| 145 |
+
# Check if the root is in the graph G. If not, raise NodeNotFound
|
| 146 |
+
if root is not None and root not in G:
|
| 147 |
+
raise nx.NodeNotFound(f"Root node {root} is not in graph")
|
| 148 |
+
|
| 149 |
+
# Create a directed version of H that has the DFS edges directed
|
| 150 |
+
# toward the root and the nontree edges directed away from the root
|
| 151 |
+
# (in each connected component).
|
| 152 |
+
H, nodes = _dfs_cycle_forest(G, root)
|
| 153 |
+
|
| 154 |
+
# Visit the nodes again in DFS order. For each node, and for each
|
| 155 |
+
# nontree edge leaving that node, compute the fundamental cycle for
|
| 156 |
+
# that nontree edge starting with that edge. If the fundamental
|
| 157 |
+
# cycle overlaps with any visited nodes, just take the prefix of the
|
| 158 |
+
# cycle up to the point of visited nodes.
|
| 159 |
+
#
|
| 160 |
+
# We repeat this process for each connected component (implicitly,
|
| 161 |
+
# since `nodes` already has a list of the nodes grouped by connected
|
| 162 |
+
# component).
|
| 163 |
+
visited = set()
|
| 164 |
+
for u in nodes:
|
| 165 |
+
visited.add(u)
|
| 166 |
+
# For each nontree edge going out of node u...
|
| 167 |
+
edges = ((u, v) for u, v, d in H.out_edges(u, data="nontree") if d)
|
| 168 |
+
for u, v in edges:
|
| 169 |
+
# Create the cycle or cycle prefix starting with the
|
| 170 |
+
# nontree edge.
|
| 171 |
+
chain = list(_build_chain(H, u, v, visited))
|
| 172 |
+
yield chain
|
phi4/lib/python3.10/site-packages/networkx/algorithms/chordal.py
ADDED
|
@@ -0,0 +1,443 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Algorithms for chordal graphs.
|
| 3 |
+
|
| 4 |
+
A graph is chordal if every cycle of length at least 4 has a chord
|
| 5 |
+
(an edge joining two nodes not adjacent in the cycle).
|
| 6 |
+
https://en.wikipedia.org/wiki/Chordal_graph
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import sys
|
| 10 |
+
|
| 11 |
+
import networkx as nx
|
| 12 |
+
from networkx.algorithms.components import connected_components
|
| 13 |
+
from networkx.utils import arbitrary_element, not_implemented_for
|
| 14 |
+
|
| 15 |
+
__all__ = [
|
| 16 |
+
"is_chordal",
|
| 17 |
+
"find_induced_nodes",
|
| 18 |
+
"chordal_graph_cliques",
|
| 19 |
+
"chordal_graph_treewidth",
|
| 20 |
+
"NetworkXTreewidthBoundExceeded",
|
| 21 |
+
"complete_to_chordal_graph",
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class NetworkXTreewidthBoundExceeded(nx.NetworkXException):
|
| 26 |
+
"""Exception raised when a treewidth bound has been provided and it has
|
| 27 |
+
been exceeded"""
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
@not_implemented_for("directed")
|
| 31 |
+
@not_implemented_for("multigraph")
|
| 32 |
+
@nx._dispatchable
|
| 33 |
+
def is_chordal(G):
|
| 34 |
+
"""Checks whether G is a chordal graph.
|
| 35 |
+
|
| 36 |
+
A graph is chordal if every cycle of length at least 4 has a chord
|
| 37 |
+
(an edge joining two nodes not adjacent in the cycle).
|
| 38 |
+
|
| 39 |
+
Parameters
|
| 40 |
+
----------
|
| 41 |
+
G : graph
|
| 42 |
+
A NetworkX graph.
|
| 43 |
+
|
| 44 |
+
Returns
|
| 45 |
+
-------
|
| 46 |
+
chordal : bool
|
| 47 |
+
True if G is a chordal graph and False otherwise.
|
| 48 |
+
|
| 49 |
+
Raises
|
| 50 |
+
------
|
| 51 |
+
NetworkXNotImplemented
|
| 52 |
+
The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
|
| 53 |
+
|
| 54 |
+
Examples
|
| 55 |
+
--------
|
| 56 |
+
>>> e = [
|
| 57 |
+
... (1, 2),
|
| 58 |
+
... (1, 3),
|
| 59 |
+
... (2, 3),
|
| 60 |
+
... (2, 4),
|
| 61 |
+
... (3, 4),
|
| 62 |
+
... (3, 5),
|
| 63 |
+
... (3, 6),
|
| 64 |
+
... (4, 5),
|
| 65 |
+
... (4, 6),
|
| 66 |
+
... (5, 6),
|
| 67 |
+
... ]
|
| 68 |
+
>>> G = nx.Graph(e)
|
| 69 |
+
>>> nx.is_chordal(G)
|
| 70 |
+
True
|
| 71 |
+
|
| 72 |
+
Notes
|
| 73 |
+
-----
|
| 74 |
+
The routine tries to go through every node following maximum cardinality
|
| 75 |
+
search. It returns False when it finds that the separator for any node
|
| 76 |
+
is not a clique. Based on the algorithms in [1]_.
|
| 77 |
+
|
| 78 |
+
Self loops are ignored.
|
| 79 |
+
|
| 80 |
+
References
|
| 81 |
+
----------
|
| 82 |
+
.. [1] R. E. Tarjan and M. Yannakakis, Simple linear-time algorithms
|
| 83 |
+
to test chordality of graphs, test acyclicity of hypergraphs, and
|
| 84 |
+
selectively reduce acyclic hypergraphs, SIAM J. Comput., 13 (1984),
|
| 85 |
+
pp. 566–579.
|
| 86 |
+
"""
|
| 87 |
+
if len(G.nodes) <= 3:
|
| 88 |
+
return True
|
| 89 |
+
return len(_find_chordality_breaker(G)) == 0
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
@nx._dispatchable
|
| 93 |
+
def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize):
|
| 94 |
+
"""Returns the set of induced nodes in the path from s to t.
|
| 95 |
+
|
| 96 |
+
Parameters
|
| 97 |
+
----------
|
| 98 |
+
G : graph
|
| 99 |
+
A chordal NetworkX graph
|
| 100 |
+
s : node
|
| 101 |
+
Source node to look for induced nodes
|
| 102 |
+
t : node
|
| 103 |
+
Destination node to look for induced nodes
|
| 104 |
+
treewidth_bound: float
|
| 105 |
+
Maximum treewidth acceptable for the graph H. The search
|
| 106 |
+
for induced nodes will end as soon as the treewidth_bound is exceeded.
|
| 107 |
+
|
| 108 |
+
Returns
|
| 109 |
+
-------
|
| 110 |
+
induced_nodes : Set of nodes
|
| 111 |
+
The set of induced nodes in the path from s to t in G
|
| 112 |
+
|
| 113 |
+
Raises
|
| 114 |
+
------
|
| 115 |
+
NetworkXError
|
| 116 |
+
The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
|
| 117 |
+
If the input graph is an instance of one of these classes, a
|
| 118 |
+
:exc:`NetworkXError` is raised.
|
| 119 |
+
The algorithm can only be applied to chordal graphs. If the input
|
| 120 |
+
graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
|
| 121 |
+
|
| 122 |
+
Examples
|
| 123 |
+
--------
|
| 124 |
+
>>> G = nx.Graph()
|
| 125 |
+
>>> G = nx.generators.classic.path_graph(10)
|
| 126 |
+
>>> induced_nodes = nx.find_induced_nodes(G, 1, 9, 2)
|
| 127 |
+
>>> sorted(induced_nodes)
|
| 128 |
+
[1, 2, 3, 4, 5, 6, 7, 8, 9]
|
| 129 |
+
|
| 130 |
+
Notes
|
| 131 |
+
-----
|
| 132 |
+
G must be a chordal graph and (s,t) an edge that is not in G.
|
| 133 |
+
|
| 134 |
+
If a treewidth_bound is provided, the search for induced nodes will end
|
| 135 |
+
as soon as the treewidth_bound is exceeded.
|
| 136 |
+
|
| 137 |
+
The algorithm is inspired by Algorithm 4 in [1]_.
|
| 138 |
+
A formal definition of induced node can also be found on that reference.
|
| 139 |
+
|
| 140 |
+
Self Loops are ignored
|
| 141 |
+
|
| 142 |
+
References
|
| 143 |
+
----------
|
| 144 |
+
.. [1] Learning Bounded Treewidth Bayesian Networks.
|
| 145 |
+
Gal Elidan, Stephen Gould; JMLR, 9(Dec):2699--2731, 2008.
|
| 146 |
+
http://jmlr.csail.mit.edu/papers/volume9/elidan08a/elidan08a.pdf
|
| 147 |
+
"""
|
| 148 |
+
if not is_chordal(G):
|
| 149 |
+
raise nx.NetworkXError("Input graph is not chordal.")
|
| 150 |
+
|
| 151 |
+
H = nx.Graph(G)
|
| 152 |
+
H.add_edge(s, t)
|
| 153 |
+
induced_nodes = set()
|
| 154 |
+
triplet = _find_chordality_breaker(H, s, treewidth_bound)
|
| 155 |
+
while triplet:
|
| 156 |
+
(u, v, w) = triplet
|
| 157 |
+
induced_nodes.update(triplet)
|
| 158 |
+
for n in triplet:
|
| 159 |
+
if n != s:
|
| 160 |
+
H.add_edge(s, n)
|
| 161 |
+
triplet = _find_chordality_breaker(H, s, treewidth_bound)
|
| 162 |
+
if induced_nodes:
|
| 163 |
+
# Add t and the second node in the induced path from s to t.
|
| 164 |
+
induced_nodes.add(t)
|
| 165 |
+
for u in G[s]:
|
| 166 |
+
if len(induced_nodes & set(G[u])) == 2:
|
| 167 |
+
induced_nodes.add(u)
|
| 168 |
+
break
|
| 169 |
+
return induced_nodes
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
@nx._dispatchable
|
| 173 |
+
def chordal_graph_cliques(G):
|
| 174 |
+
"""Returns all maximal cliques of a chordal graph.
|
| 175 |
+
|
| 176 |
+
The algorithm breaks the graph in connected components and performs a
|
| 177 |
+
maximum cardinality search in each component to get the cliques.
|
| 178 |
+
|
| 179 |
+
Parameters
|
| 180 |
+
----------
|
| 181 |
+
G : graph
|
| 182 |
+
A NetworkX graph
|
| 183 |
+
|
| 184 |
+
Yields
|
| 185 |
+
------
|
| 186 |
+
frozenset of nodes
|
| 187 |
+
Maximal cliques, each of which is a frozenset of
|
| 188 |
+
nodes in `G`. The order of cliques is arbitrary.
|
| 189 |
+
|
| 190 |
+
Raises
|
| 191 |
+
------
|
| 192 |
+
NetworkXError
|
| 193 |
+
The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
|
| 194 |
+
The algorithm can only be applied to chordal graphs. If the input
|
| 195 |
+
graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
|
| 196 |
+
|
| 197 |
+
Examples
|
| 198 |
+
--------
|
| 199 |
+
>>> e = [
|
| 200 |
+
... (1, 2),
|
| 201 |
+
... (1, 3),
|
| 202 |
+
... (2, 3),
|
| 203 |
+
... (2, 4),
|
| 204 |
+
... (3, 4),
|
| 205 |
+
... (3, 5),
|
| 206 |
+
... (3, 6),
|
| 207 |
+
... (4, 5),
|
| 208 |
+
... (4, 6),
|
| 209 |
+
... (5, 6),
|
| 210 |
+
... (7, 8),
|
| 211 |
+
... ]
|
| 212 |
+
>>> G = nx.Graph(e)
|
| 213 |
+
>>> G.add_node(9)
|
| 214 |
+
>>> cliques = [c for c in chordal_graph_cliques(G)]
|
| 215 |
+
>>> cliques[0]
|
| 216 |
+
frozenset({1, 2, 3})
|
| 217 |
+
"""
|
| 218 |
+
for C in (G.subgraph(c).copy() for c in connected_components(G)):
|
| 219 |
+
if C.number_of_nodes() == 1:
|
| 220 |
+
if nx.number_of_selfloops(C) > 0:
|
| 221 |
+
raise nx.NetworkXError("Input graph is not chordal.")
|
| 222 |
+
yield frozenset(C.nodes())
|
| 223 |
+
else:
|
| 224 |
+
unnumbered = set(C.nodes())
|
| 225 |
+
v = arbitrary_element(C)
|
| 226 |
+
unnumbered.remove(v)
|
| 227 |
+
numbered = {v}
|
| 228 |
+
clique_wanna_be = {v}
|
| 229 |
+
while unnumbered:
|
| 230 |
+
v = _max_cardinality_node(C, unnumbered, numbered)
|
| 231 |
+
unnumbered.remove(v)
|
| 232 |
+
numbered.add(v)
|
| 233 |
+
new_clique_wanna_be = set(C.neighbors(v)) & numbered
|
| 234 |
+
sg = C.subgraph(clique_wanna_be)
|
| 235 |
+
if _is_complete_graph(sg):
|
| 236 |
+
new_clique_wanna_be.add(v)
|
| 237 |
+
if not new_clique_wanna_be >= clique_wanna_be:
|
| 238 |
+
yield frozenset(clique_wanna_be)
|
| 239 |
+
clique_wanna_be = new_clique_wanna_be
|
| 240 |
+
else:
|
| 241 |
+
raise nx.NetworkXError("Input graph is not chordal.")
|
| 242 |
+
yield frozenset(clique_wanna_be)
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
@nx._dispatchable
|
| 246 |
+
def chordal_graph_treewidth(G):
|
| 247 |
+
"""Returns the treewidth of the chordal graph G.
|
| 248 |
+
|
| 249 |
+
Parameters
|
| 250 |
+
----------
|
| 251 |
+
G : graph
|
| 252 |
+
A NetworkX graph
|
| 253 |
+
|
| 254 |
+
Returns
|
| 255 |
+
-------
|
| 256 |
+
treewidth : int
|
| 257 |
+
The size of the largest clique in the graph minus one.
|
| 258 |
+
|
| 259 |
+
Raises
|
| 260 |
+
------
|
| 261 |
+
NetworkXError
|
| 262 |
+
The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
|
| 263 |
+
The algorithm can only be applied to chordal graphs. If the input
|
| 264 |
+
graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
|
| 265 |
+
|
| 266 |
+
Examples
|
| 267 |
+
--------
|
| 268 |
+
>>> e = [
|
| 269 |
+
... (1, 2),
|
| 270 |
+
... (1, 3),
|
| 271 |
+
... (2, 3),
|
| 272 |
+
... (2, 4),
|
| 273 |
+
... (3, 4),
|
| 274 |
+
... (3, 5),
|
| 275 |
+
... (3, 6),
|
| 276 |
+
... (4, 5),
|
| 277 |
+
... (4, 6),
|
| 278 |
+
... (5, 6),
|
| 279 |
+
... (7, 8),
|
| 280 |
+
... ]
|
| 281 |
+
>>> G = nx.Graph(e)
|
| 282 |
+
>>> G.add_node(9)
|
| 283 |
+
>>> nx.chordal_graph_treewidth(G)
|
| 284 |
+
3
|
| 285 |
+
|
| 286 |
+
References
|
| 287 |
+
----------
|
| 288 |
+
.. [1] https://en.wikipedia.org/wiki/Tree_decomposition#Treewidth
|
| 289 |
+
"""
|
| 290 |
+
if not is_chordal(G):
|
| 291 |
+
raise nx.NetworkXError("Input graph is not chordal.")
|
| 292 |
+
|
| 293 |
+
max_clique = -1
|
| 294 |
+
for clique in nx.chordal_graph_cliques(G):
|
| 295 |
+
max_clique = max(max_clique, len(clique))
|
| 296 |
+
return max_clique - 1
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def _is_complete_graph(G):
|
| 300 |
+
"""Returns True if G is a complete graph."""
|
| 301 |
+
if nx.number_of_selfloops(G) > 0:
|
| 302 |
+
raise nx.NetworkXError("Self loop found in _is_complete_graph()")
|
| 303 |
+
n = G.number_of_nodes()
|
| 304 |
+
if n < 2:
|
| 305 |
+
return True
|
| 306 |
+
e = G.number_of_edges()
|
| 307 |
+
max_edges = (n * (n - 1)) / 2
|
| 308 |
+
return e == max_edges
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
def _find_missing_edge(G):
|
| 312 |
+
"""Given a non-complete graph G, returns a missing edge."""
|
| 313 |
+
nodes = set(G)
|
| 314 |
+
for u in G:
|
| 315 |
+
missing = nodes - set(list(G[u].keys()) + [u])
|
| 316 |
+
if missing:
|
| 317 |
+
return (u, missing.pop())
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def _max_cardinality_node(G, choices, wanna_connect):
|
| 321 |
+
"""Returns a the node in choices that has more connections in G
|
| 322 |
+
to nodes in wanna_connect.
|
| 323 |
+
"""
|
| 324 |
+
max_number = -1
|
| 325 |
+
for x in choices:
|
| 326 |
+
number = len([y for y in G[x] if y in wanna_connect])
|
| 327 |
+
if number > max_number:
|
| 328 |
+
max_number = number
|
| 329 |
+
max_cardinality_node = x
|
| 330 |
+
return max_cardinality_node
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize):
|
| 334 |
+
"""Given a graph G, starts a max cardinality search
|
| 335 |
+
(starting from s if s is given and from an arbitrary node otherwise)
|
| 336 |
+
trying to find a non-chordal cycle.
|
| 337 |
+
|
| 338 |
+
If it does find one, it returns (u,v,w) where u,v,w are the three
|
| 339 |
+
nodes that together with s are involved in the cycle.
|
| 340 |
+
|
| 341 |
+
It ignores any self loops.
|
| 342 |
+
"""
|
| 343 |
+
if len(G) == 0:
|
| 344 |
+
raise nx.NetworkXPointlessConcept("Graph has no nodes.")
|
| 345 |
+
unnumbered = set(G)
|
| 346 |
+
if s is None:
|
| 347 |
+
s = arbitrary_element(G)
|
| 348 |
+
unnumbered.remove(s)
|
| 349 |
+
numbered = {s}
|
| 350 |
+
current_treewidth = -1
|
| 351 |
+
while unnumbered: # and current_treewidth <= treewidth_bound:
|
| 352 |
+
v = _max_cardinality_node(G, unnumbered, numbered)
|
| 353 |
+
unnumbered.remove(v)
|
| 354 |
+
numbered.add(v)
|
| 355 |
+
clique_wanna_be = set(G[v]) & numbered
|
| 356 |
+
sg = G.subgraph(clique_wanna_be)
|
| 357 |
+
if _is_complete_graph(sg):
|
| 358 |
+
# The graph seems to be chordal by now. We update the treewidth
|
| 359 |
+
current_treewidth = max(current_treewidth, len(clique_wanna_be))
|
| 360 |
+
if current_treewidth > treewidth_bound:
|
| 361 |
+
raise nx.NetworkXTreewidthBoundExceeded(
|
| 362 |
+
f"treewidth_bound exceeded: {current_treewidth}"
|
| 363 |
+
)
|
| 364 |
+
else:
|
| 365 |
+
# sg is not a clique,
|
| 366 |
+
# look for an edge that is not included in sg
|
| 367 |
+
(u, w) = _find_missing_edge(sg)
|
| 368 |
+
return (u, v, w)
|
| 369 |
+
return ()
|
| 370 |
+
|
| 371 |
+
|
| 372 |
+
@not_implemented_for("directed")
|
| 373 |
+
@nx._dispatchable(returns_graph=True)
|
| 374 |
+
def complete_to_chordal_graph(G):
|
| 375 |
+
"""Return a copy of G completed to a chordal graph
|
| 376 |
+
|
| 377 |
+
Adds edges to a copy of G to create a chordal graph. A graph G=(V,E) is
|
| 378 |
+
called chordal if for each cycle with length bigger than 3, there exist
|
| 379 |
+
two non-adjacent nodes connected by an edge (called a chord).
|
| 380 |
+
|
| 381 |
+
Parameters
|
| 382 |
+
----------
|
| 383 |
+
G : NetworkX graph
|
| 384 |
+
Undirected graph
|
| 385 |
+
|
| 386 |
+
Returns
|
| 387 |
+
-------
|
| 388 |
+
H : NetworkX graph
|
| 389 |
+
The chordal enhancement of G
|
| 390 |
+
alpha : Dictionary
|
| 391 |
+
The elimination ordering of nodes of G
|
| 392 |
+
|
| 393 |
+
Notes
|
| 394 |
+
-----
|
| 395 |
+
There are different approaches to calculate the chordal
|
| 396 |
+
enhancement of a graph. The algorithm used here is called
|
| 397 |
+
MCS-M and gives at least minimal (local) triangulation of graph. Note
|
| 398 |
+
that this triangulation is not necessarily a global minimum.
|
| 399 |
+
|
| 400 |
+
https://en.wikipedia.org/wiki/Chordal_graph
|
| 401 |
+
|
| 402 |
+
References
|
| 403 |
+
----------
|
| 404 |
+
.. [1] Berry, Anne & Blair, Jean & Heggernes, Pinar & Peyton, Barry. (2004)
|
| 405 |
+
Maximum Cardinality Search for Computing Minimal Triangulations of
|
| 406 |
+
Graphs. Algorithmica. 39. 287-298. 10.1007/s00453-004-1084-3.
|
| 407 |
+
|
| 408 |
+
Examples
|
| 409 |
+
--------
|
| 410 |
+
>>> from networkx.algorithms.chordal import complete_to_chordal_graph
|
| 411 |
+
>>> G = nx.wheel_graph(10)
|
| 412 |
+
>>> H, alpha = complete_to_chordal_graph(G)
|
| 413 |
+
"""
|
| 414 |
+
H = G.copy()
|
| 415 |
+
alpha = {node: 0 for node in H}
|
| 416 |
+
if nx.is_chordal(H):
|
| 417 |
+
return H, alpha
|
| 418 |
+
chords = set()
|
| 419 |
+
weight = {node: 0 for node in H.nodes()}
|
| 420 |
+
unnumbered_nodes = list(H.nodes())
|
| 421 |
+
for i in range(len(H.nodes()), 0, -1):
|
| 422 |
+
# get the node in unnumbered_nodes with the maximum weight
|
| 423 |
+
z = max(unnumbered_nodes, key=lambda node: weight[node])
|
| 424 |
+
unnumbered_nodes.remove(z)
|
| 425 |
+
alpha[z] = i
|
| 426 |
+
update_nodes = []
|
| 427 |
+
for y in unnumbered_nodes:
|
| 428 |
+
if G.has_edge(y, z):
|
| 429 |
+
update_nodes.append(y)
|
| 430 |
+
else:
|
| 431 |
+
# y_weight will be bigger than node weights between y and z
|
| 432 |
+
y_weight = weight[y]
|
| 433 |
+
lower_nodes = [
|
| 434 |
+
node for node in unnumbered_nodes if weight[node] < y_weight
|
| 435 |
+
]
|
| 436 |
+
if nx.has_path(H.subgraph(lower_nodes + [z, y]), y, z):
|
| 437 |
+
update_nodes.append(y)
|
| 438 |
+
chords.add((z, y))
|
| 439 |
+
# during calculation of paths the weights should not be updated
|
| 440 |
+
for node in update_nodes:
|
| 441 |
+
weight[node] += 1
|
| 442 |
+
H.add_edges_from(chords)
|
| 443 |
+
return H, alpha
|
phi4/lib/python3.10/site-packages/networkx/algorithms/clique.py
ADDED
|
@@ -0,0 +1,755 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for finding and manipulating cliques.
|
| 2 |
+
|
| 3 |
+
Finding the largest clique in a graph is NP-complete problem, so most of
|
| 4 |
+
these algorithms have an exponential running time; for more information,
|
| 5 |
+
see the Wikipedia article on the clique problem [1]_.
|
| 6 |
+
|
| 7 |
+
.. [1] clique problem:: https://en.wikipedia.org/wiki/Clique_problem
|
| 8 |
+
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from collections import defaultdict, deque
|
| 12 |
+
from itertools import chain, combinations, islice
|
| 13 |
+
|
| 14 |
+
import networkx as nx
|
| 15 |
+
from networkx.utils import not_implemented_for
|
| 16 |
+
|
| 17 |
+
__all__ = [
|
| 18 |
+
"find_cliques",
|
| 19 |
+
"find_cliques_recursive",
|
| 20 |
+
"make_max_clique_graph",
|
| 21 |
+
"make_clique_bipartite",
|
| 22 |
+
"node_clique_number",
|
| 23 |
+
"number_of_cliques",
|
| 24 |
+
"enumerate_all_cliques",
|
| 25 |
+
"max_weight_clique",
|
| 26 |
+
]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
@not_implemented_for("directed")
|
| 30 |
+
@nx._dispatchable
|
| 31 |
+
def enumerate_all_cliques(G):
|
| 32 |
+
"""Returns all cliques in an undirected graph.
|
| 33 |
+
|
| 34 |
+
This function returns an iterator over cliques, each of which is a
|
| 35 |
+
list of nodes. The iteration is ordered by cardinality of the
|
| 36 |
+
cliques: first all cliques of size one, then all cliques of size
|
| 37 |
+
two, etc.
|
| 38 |
+
|
| 39 |
+
Parameters
|
| 40 |
+
----------
|
| 41 |
+
G : NetworkX graph
|
| 42 |
+
An undirected graph.
|
| 43 |
+
|
| 44 |
+
Returns
|
| 45 |
+
-------
|
| 46 |
+
iterator
|
| 47 |
+
An iterator over cliques, each of which is a list of nodes in
|
| 48 |
+
`G`. The cliques are ordered according to size.
|
| 49 |
+
|
| 50 |
+
Notes
|
| 51 |
+
-----
|
| 52 |
+
To obtain a list of all cliques, use
|
| 53 |
+
`list(enumerate_all_cliques(G))`. However, be aware that in the
|
| 54 |
+
worst-case, the length of this list can be exponential in the number
|
| 55 |
+
of nodes in the graph (for example, when the graph is the complete
|
| 56 |
+
graph). This function avoids storing all cliques in memory by only
|
| 57 |
+
keeping current candidate node lists in memory during its search.
|
| 58 |
+
|
| 59 |
+
The implementation is adapted from the algorithm by Zhang, et
|
| 60 |
+
al. (2005) [1]_ to output all cliques discovered.
|
| 61 |
+
|
| 62 |
+
This algorithm ignores self-loops and parallel edges, since cliques
|
| 63 |
+
are not conventionally defined with such edges.
|
| 64 |
+
|
| 65 |
+
References
|
| 66 |
+
----------
|
| 67 |
+
.. [1] Yun Zhang, Abu-Khzam, F.N., Baldwin, N.E., Chesler, E.J.,
|
| 68 |
+
Langston, M.A., Samatova, N.F.,
|
| 69 |
+
"Genome-Scale Computational Approaches to Memory-Intensive
|
| 70 |
+
Applications in Systems Biology".
|
| 71 |
+
*Supercomputing*, 2005. Proceedings of the ACM/IEEE SC 2005
|
| 72 |
+
Conference, pp. 12, 12--18 Nov. 2005.
|
| 73 |
+
<https://doi.org/10.1109/SC.2005.29>.
|
| 74 |
+
|
| 75 |
+
"""
|
| 76 |
+
index = {}
|
| 77 |
+
nbrs = {}
|
| 78 |
+
for u in G:
|
| 79 |
+
index[u] = len(index)
|
| 80 |
+
# Neighbors of u that appear after u in the iteration order of G.
|
| 81 |
+
nbrs[u] = {v for v in G[u] if v not in index}
|
| 82 |
+
|
| 83 |
+
queue = deque(([u], sorted(nbrs[u], key=index.__getitem__)) for u in G)
|
| 84 |
+
# Loop invariants:
|
| 85 |
+
# 1. len(base) is nondecreasing.
|
| 86 |
+
# 2. (base + cnbrs) is sorted with respect to the iteration order of G.
|
| 87 |
+
# 3. cnbrs is a set of common neighbors of nodes in base.
|
| 88 |
+
while queue:
|
| 89 |
+
base, cnbrs = map(list, queue.popleft())
|
| 90 |
+
yield base
|
| 91 |
+
for i, u in enumerate(cnbrs):
|
| 92 |
+
# Use generators to reduce memory consumption.
|
| 93 |
+
queue.append(
|
| 94 |
+
(
|
| 95 |
+
chain(base, [u]),
|
| 96 |
+
filter(nbrs[u].__contains__, islice(cnbrs, i + 1, None)),
|
| 97 |
+
)
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
@not_implemented_for("directed")
|
| 102 |
+
@nx._dispatchable
|
| 103 |
+
def find_cliques(G, nodes=None):
|
| 104 |
+
"""Returns all maximal cliques in an undirected graph.
|
| 105 |
+
|
| 106 |
+
For each node *n*, a *maximal clique for n* is a largest complete
|
| 107 |
+
subgraph containing *n*. The largest maximal clique is sometimes
|
| 108 |
+
called the *maximum clique*.
|
| 109 |
+
|
| 110 |
+
This function returns an iterator over cliques, each of which is a
|
| 111 |
+
list of nodes. It is an iterative implementation, so should not
|
| 112 |
+
suffer from recursion depth issues.
|
| 113 |
+
|
| 114 |
+
This function accepts a list of `nodes` and only the maximal cliques
|
| 115 |
+
containing all of these `nodes` are returned. It can considerably speed up
|
| 116 |
+
the running time if some specific cliques are desired.
|
| 117 |
+
|
| 118 |
+
Parameters
|
| 119 |
+
----------
|
| 120 |
+
G : NetworkX graph
|
| 121 |
+
An undirected graph.
|
| 122 |
+
|
| 123 |
+
nodes : list, optional (default=None)
|
| 124 |
+
If provided, only yield *maximal cliques* containing all nodes in `nodes`.
|
| 125 |
+
If `nodes` isn't a clique itself, a ValueError is raised.
|
| 126 |
+
|
| 127 |
+
Returns
|
| 128 |
+
-------
|
| 129 |
+
iterator
|
| 130 |
+
An iterator over maximal cliques, each of which is a list of
|
| 131 |
+
nodes in `G`. If `nodes` is provided, only the maximal cliques
|
| 132 |
+
containing all the nodes in `nodes` are returned. The order of
|
| 133 |
+
cliques is arbitrary.
|
| 134 |
+
|
| 135 |
+
Raises
|
| 136 |
+
------
|
| 137 |
+
ValueError
|
| 138 |
+
If `nodes` is not a clique.
|
| 139 |
+
|
| 140 |
+
Examples
|
| 141 |
+
--------
|
| 142 |
+
>>> from pprint import pprint # For nice dict formatting
|
| 143 |
+
>>> G = nx.karate_club_graph()
|
| 144 |
+
>>> sum(1 for c in nx.find_cliques(G)) # The number of maximal cliques in G
|
| 145 |
+
36
|
| 146 |
+
>>> max(nx.find_cliques(G), key=len) # The largest maximal clique in G
|
| 147 |
+
[0, 1, 2, 3, 13]
|
| 148 |
+
|
| 149 |
+
The size of the largest maximal clique is known as the *clique number* of
|
| 150 |
+
the graph, which can be found directly with:
|
| 151 |
+
|
| 152 |
+
>>> max(len(c) for c in nx.find_cliques(G))
|
| 153 |
+
5
|
| 154 |
+
|
| 155 |
+
One can also compute the number of maximal cliques in `G` that contain a given
|
| 156 |
+
node. The following produces a dictionary keyed by node whose
|
| 157 |
+
values are the number of maximal cliques in `G` that contain the node:
|
| 158 |
+
|
| 159 |
+
>>> pprint({n: sum(1 for c in nx.find_cliques(G) if n in c) for n in G})
|
| 160 |
+
{0: 13,
|
| 161 |
+
1: 6,
|
| 162 |
+
2: 7,
|
| 163 |
+
3: 3,
|
| 164 |
+
4: 2,
|
| 165 |
+
5: 3,
|
| 166 |
+
6: 3,
|
| 167 |
+
7: 1,
|
| 168 |
+
8: 3,
|
| 169 |
+
9: 2,
|
| 170 |
+
10: 2,
|
| 171 |
+
11: 1,
|
| 172 |
+
12: 1,
|
| 173 |
+
13: 2,
|
| 174 |
+
14: 1,
|
| 175 |
+
15: 1,
|
| 176 |
+
16: 1,
|
| 177 |
+
17: 1,
|
| 178 |
+
18: 1,
|
| 179 |
+
19: 2,
|
| 180 |
+
20: 1,
|
| 181 |
+
21: 1,
|
| 182 |
+
22: 1,
|
| 183 |
+
23: 3,
|
| 184 |
+
24: 2,
|
| 185 |
+
25: 2,
|
| 186 |
+
26: 1,
|
| 187 |
+
27: 3,
|
| 188 |
+
28: 2,
|
| 189 |
+
29: 2,
|
| 190 |
+
30: 2,
|
| 191 |
+
31: 4,
|
| 192 |
+
32: 9,
|
| 193 |
+
33: 14}
|
| 194 |
+
|
| 195 |
+
Or, similarly, the maximal cliques in `G` that contain a given node.
|
| 196 |
+
For example, the 4 maximal cliques that contain node 31:
|
| 197 |
+
|
| 198 |
+
>>> [c for c in nx.find_cliques(G) if 31 in c]
|
| 199 |
+
[[0, 31], [33, 32, 31], [33, 28, 31], [24, 25, 31]]
|
| 200 |
+
|
| 201 |
+
See Also
|
| 202 |
+
--------
|
| 203 |
+
find_cliques_recursive
|
| 204 |
+
A recursive version of the same algorithm.
|
| 205 |
+
|
| 206 |
+
Notes
|
| 207 |
+
-----
|
| 208 |
+
To obtain a list of all maximal cliques, use
|
| 209 |
+
`list(find_cliques(G))`. However, be aware that in the worst-case,
|
| 210 |
+
the length of this list can be exponential in the number of nodes in
|
| 211 |
+
the graph. This function avoids storing all cliques in memory by
|
| 212 |
+
only keeping current candidate node lists in memory during its search.
|
| 213 |
+
|
| 214 |
+
This implementation is based on the algorithm published by Bron and
|
| 215 |
+
Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi
|
| 216 |
+
(2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. It
|
| 217 |
+
essentially unrolls the recursion used in the references to avoid
|
| 218 |
+
issues of recursion stack depth (for a recursive implementation, see
|
| 219 |
+
:func:`find_cliques_recursive`).
|
| 220 |
+
|
| 221 |
+
This algorithm ignores self-loops and parallel edges, since cliques
|
| 222 |
+
are not conventionally defined with such edges.
|
| 223 |
+
|
| 224 |
+
References
|
| 225 |
+
----------
|
| 226 |
+
.. [1] Bron, C. and Kerbosch, J.
|
| 227 |
+
"Algorithm 457: finding all cliques of an undirected graph".
|
| 228 |
+
*Communications of the ACM* 16, 9 (Sep. 1973), 575--577.
|
| 229 |
+
<http://portal.acm.org/citation.cfm?doid=362342.362367>
|
| 230 |
+
|
| 231 |
+
.. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi,
|
| 232 |
+
"The worst-case time complexity for generating all maximal
|
| 233 |
+
cliques and computational experiments",
|
| 234 |
+
*Theoretical Computer Science*, Volume 363, Issue 1,
|
| 235 |
+
Computing and Combinatorics,
|
| 236 |
+
10th Annual International Conference on
|
| 237 |
+
Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42
|
| 238 |
+
<https://doi.org/10.1016/j.tcs.2006.06.015>
|
| 239 |
+
|
| 240 |
+
.. [3] F. Cazals, C. Karande,
|
| 241 |
+
"A note on the problem of reporting maximal cliques",
|
| 242 |
+
*Theoretical Computer Science*,
|
| 243 |
+
Volume 407, Issues 1--3, 6 November 2008, Pages 564--568,
|
| 244 |
+
<https://doi.org/10.1016/j.tcs.2008.05.010>
|
| 245 |
+
|
| 246 |
+
"""
|
| 247 |
+
if len(G) == 0:
|
| 248 |
+
return
|
| 249 |
+
|
| 250 |
+
adj = {u: {v for v in G[u] if v != u} for u in G}
|
| 251 |
+
|
| 252 |
+
# Initialize Q with the given nodes and subg, cand with their nbrs
|
| 253 |
+
Q = nodes[:] if nodes is not None else []
|
| 254 |
+
cand = set(G)
|
| 255 |
+
for node in Q:
|
| 256 |
+
if node not in cand:
|
| 257 |
+
raise ValueError(f"The given `nodes` {nodes} do not form a clique")
|
| 258 |
+
cand &= adj[node]
|
| 259 |
+
|
| 260 |
+
if not cand:
|
| 261 |
+
yield Q[:]
|
| 262 |
+
return
|
| 263 |
+
|
| 264 |
+
subg = cand.copy()
|
| 265 |
+
stack = []
|
| 266 |
+
Q.append(None)
|
| 267 |
+
|
| 268 |
+
u = max(subg, key=lambda u: len(cand & adj[u]))
|
| 269 |
+
ext_u = cand - adj[u]
|
| 270 |
+
|
| 271 |
+
try:
|
| 272 |
+
while True:
|
| 273 |
+
if ext_u:
|
| 274 |
+
q = ext_u.pop()
|
| 275 |
+
cand.remove(q)
|
| 276 |
+
Q[-1] = q
|
| 277 |
+
adj_q = adj[q]
|
| 278 |
+
subg_q = subg & adj_q
|
| 279 |
+
if not subg_q:
|
| 280 |
+
yield Q[:]
|
| 281 |
+
else:
|
| 282 |
+
cand_q = cand & adj_q
|
| 283 |
+
if cand_q:
|
| 284 |
+
stack.append((subg, cand, ext_u))
|
| 285 |
+
Q.append(None)
|
| 286 |
+
subg = subg_q
|
| 287 |
+
cand = cand_q
|
| 288 |
+
u = max(subg, key=lambda u: len(cand & adj[u]))
|
| 289 |
+
ext_u = cand - adj[u]
|
| 290 |
+
else:
|
| 291 |
+
Q.pop()
|
| 292 |
+
subg, cand, ext_u = stack.pop()
|
| 293 |
+
except IndexError:
|
| 294 |
+
pass
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
# TODO Should this also be not implemented for directed graphs?
|
| 298 |
+
@nx._dispatchable
|
| 299 |
+
def find_cliques_recursive(G, nodes=None):
|
| 300 |
+
"""Returns all maximal cliques in a graph.
|
| 301 |
+
|
| 302 |
+
For each node *v*, a *maximal clique for v* is a largest complete
|
| 303 |
+
subgraph containing *v*. The largest maximal clique is sometimes
|
| 304 |
+
called the *maximum clique*.
|
| 305 |
+
|
| 306 |
+
This function returns an iterator over cliques, each of which is a
|
| 307 |
+
list of nodes. It is a recursive implementation, so may suffer from
|
| 308 |
+
recursion depth issues, but is included for pedagogical reasons.
|
| 309 |
+
For a non-recursive implementation, see :func:`find_cliques`.
|
| 310 |
+
|
| 311 |
+
This function accepts a list of `nodes` and only the maximal cliques
|
| 312 |
+
containing all of these `nodes` are returned. It can considerably speed up
|
| 313 |
+
the running time if some specific cliques are desired.
|
| 314 |
+
|
| 315 |
+
Parameters
|
| 316 |
+
----------
|
| 317 |
+
G : NetworkX graph
|
| 318 |
+
|
| 319 |
+
nodes : list, optional (default=None)
|
| 320 |
+
If provided, only yield *maximal cliques* containing all nodes in `nodes`.
|
| 321 |
+
If `nodes` isn't a clique itself, a ValueError is raised.
|
| 322 |
+
|
| 323 |
+
Returns
|
| 324 |
+
-------
|
| 325 |
+
iterator
|
| 326 |
+
An iterator over maximal cliques, each of which is a list of
|
| 327 |
+
nodes in `G`. If `nodes` is provided, only the maximal cliques
|
| 328 |
+
containing all the nodes in `nodes` are yielded. The order of
|
| 329 |
+
cliques is arbitrary.
|
| 330 |
+
|
| 331 |
+
Raises
|
| 332 |
+
------
|
| 333 |
+
ValueError
|
| 334 |
+
If `nodes` is not a clique.
|
| 335 |
+
|
| 336 |
+
See Also
|
| 337 |
+
--------
|
| 338 |
+
find_cliques
|
| 339 |
+
An iterative version of the same algorithm. See docstring for examples.
|
| 340 |
+
|
| 341 |
+
Notes
|
| 342 |
+
-----
|
| 343 |
+
To obtain a list of all maximal cliques, use
|
| 344 |
+
`list(find_cliques_recursive(G))`. However, be aware that in the
|
| 345 |
+
worst-case, the length of this list can be exponential in the number
|
| 346 |
+
of nodes in the graph. This function avoids storing all cliques in memory
|
| 347 |
+
by only keeping current candidate node lists in memory during its search.
|
| 348 |
+
|
| 349 |
+
This implementation is based on the algorithm published by Bron and
|
| 350 |
+
Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi
|
| 351 |
+
(2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. For a
|
| 352 |
+
non-recursive implementation, see :func:`find_cliques`.
|
| 353 |
+
|
| 354 |
+
This algorithm ignores self-loops and parallel edges, since cliques
|
| 355 |
+
are not conventionally defined with such edges.
|
| 356 |
+
|
| 357 |
+
References
|
| 358 |
+
----------
|
| 359 |
+
.. [1] Bron, C. and Kerbosch, J.
|
| 360 |
+
"Algorithm 457: finding all cliques of an undirected graph".
|
| 361 |
+
*Communications of the ACM* 16, 9 (Sep. 1973), 575--577.
|
| 362 |
+
<http://portal.acm.org/citation.cfm?doid=362342.362367>
|
| 363 |
+
|
| 364 |
+
.. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi,
|
| 365 |
+
"The worst-case time complexity for generating all maximal
|
| 366 |
+
cliques and computational experiments",
|
| 367 |
+
*Theoretical Computer Science*, Volume 363, Issue 1,
|
| 368 |
+
Computing and Combinatorics,
|
| 369 |
+
10th Annual International Conference on
|
| 370 |
+
Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42
|
| 371 |
+
<https://doi.org/10.1016/j.tcs.2006.06.015>
|
| 372 |
+
|
| 373 |
+
.. [3] F. Cazals, C. Karande,
|
| 374 |
+
"A note on the problem of reporting maximal cliques",
|
| 375 |
+
*Theoretical Computer Science*,
|
| 376 |
+
Volume 407, Issues 1--3, 6 November 2008, Pages 564--568,
|
| 377 |
+
<https://doi.org/10.1016/j.tcs.2008.05.010>
|
| 378 |
+
|
| 379 |
+
"""
|
| 380 |
+
if len(G) == 0:
|
| 381 |
+
return iter([])
|
| 382 |
+
|
| 383 |
+
adj = {u: {v for v in G[u] if v != u} for u in G}
|
| 384 |
+
|
| 385 |
+
# Initialize Q with the given nodes and subg, cand with their nbrs
|
| 386 |
+
Q = nodes[:] if nodes is not None else []
|
| 387 |
+
cand_init = set(G)
|
| 388 |
+
for node in Q:
|
| 389 |
+
if node not in cand_init:
|
| 390 |
+
raise ValueError(f"The given `nodes` {nodes} do not form a clique")
|
| 391 |
+
cand_init &= adj[node]
|
| 392 |
+
|
| 393 |
+
if not cand_init:
|
| 394 |
+
return iter([Q])
|
| 395 |
+
|
| 396 |
+
subg_init = cand_init.copy()
|
| 397 |
+
|
| 398 |
+
def expand(subg, cand):
|
| 399 |
+
u = max(subg, key=lambda u: len(cand & adj[u]))
|
| 400 |
+
for q in cand - adj[u]:
|
| 401 |
+
cand.remove(q)
|
| 402 |
+
Q.append(q)
|
| 403 |
+
adj_q = adj[q]
|
| 404 |
+
subg_q = subg & adj_q
|
| 405 |
+
if not subg_q:
|
| 406 |
+
yield Q[:]
|
| 407 |
+
else:
|
| 408 |
+
cand_q = cand & adj_q
|
| 409 |
+
if cand_q:
|
| 410 |
+
yield from expand(subg_q, cand_q)
|
| 411 |
+
Q.pop()
|
| 412 |
+
|
| 413 |
+
return expand(subg_init, cand_init)
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
@nx._dispatchable(returns_graph=True)
|
| 417 |
+
def make_max_clique_graph(G, create_using=None):
|
| 418 |
+
"""Returns the maximal clique graph of the given graph.
|
| 419 |
+
|
| 420 |
+
The nodes of the maximal clique graph of `G` are the cliques of
|
| 421 |
+
`G` and an edge joins two cliques if the cliques are not disjoint.
|
| 422 |
+
|
| 423 |
+
Parameters
|
| 424 |
+
----------
|
| 425 |
+
G : NetworkX graph
|
| 426 |
+
|
| 427 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 428 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 429 |
+
|
| 430 |
+
Returns
|
| 431 |
+
-------
|
| 432 |
+
NetworkX graph
|
| 433 |
+
A graph whose nodes are the cliques of `G` and whose edges
|
| 434 |
+
join two cliques if they are not disjoint.
|
| 435 |
+
|
| 436 |
+
Notes
|
| 437 |
+
-----
|
| 438 |
+
This function behaves like the following code::
|
| 439 |
+
|
| 440 |
+
import networkx as nx
|
| 441 |
+
|
| 442 |
+
G = nx.make_clique_bipartite(G)
|
| 443 |
+
cliques = [v for v in G.nodes() if G.nodes[v]["bipartite"] == 0]
|
| 444 |
+
G = nx.bipartite.projected_graph(G, cliques)
|
| 445 |
+
G = nx.relabel_nodes(G, {-v: v - 1 for v in G})
|
| 446 |
+
|
| 447 |
+
It should be faster, though, since it skips all the intermediate
|
| 448 |
+
steps.
|
| 449 |
+
|
| 450 |
+
"""
|
| 451 |
+
if create_using is None:
|
| 452 |
+
B = G.__class__()
|
| 453 |
+
else:
|
| 454 |
+
B = nx.empty_graph(0, create_using)
|
| 455 |
+
cliques = list(enumerate(set(c) for c in find_cliques(G)))
|
| 456 |
+
# Add a numbered node for each clique.
|
| 457 |
+
B.add_nodes_from(i for i, c in cliques)
|
| 458 |
+
# Join cliques by an edge if they share a node.
|
| 459 |
+
clique_pairs = combinations(cliques, 2)
|
| 460 |
+
B.add_edges_from((i, j) for (i, c1), (j, c2) in clique_pairs if c1 & c2)
|
| 461 |
+
return B
|
| 462 |
+
|
| 463 |
+
|
| 464 |
+
@nx._dispatchable(returns_graph=True)
|
| 465 |
+
def make_clique_bipartite(G, fpos=None, create_using=None, name=None):
|
| 466 |
+
"""Returns the bipartite clique graph corresponding to `G`.
|
| 467 |
+
|
| 468 |
+
In the returned bipartite graph, the "bottom" nodes are the nodes of
|
| 469 |
+
`G` and the "top" nodes represent the maximal cliques of `G`.
|
| 470 |
+
There is an edge from node *v* to clique *C* in the returned graph
|
| 471 |
+
if and only if *v* is an element of *C*.
|
| 472 |
+
|
| 473 |
+
Parameters
|
| 474 |
+
----------
|
| 475 |
+
G : NetworkX graph
|
| 476 |
+
An undirected graph.
|
| 477 |
+
|
| 478 |
+
fpos : bool
|
| 479 |
+
If True or not None, the returned graph will have an
|
| 480 |
+
additional attribute, `pos`, a dictionary mapping node to
|
| 481 |
+
position in the Euclidean plane.
|
| 482 |
+
|
| 483 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 484 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 485 |
+
|
| 486 |
+
Returns
|
| 487 |
+
-------
|
| 488 |
+
NetworkX graph
|
| 489 |
+
A bipartite graph whose "bottom" set is the nodes of the graph
|
| 490 |
+
`G`, whose "top" set is the cliques of `G`, and whose edges
|
| 491 |
+
join nodes of `G` to the cliques that contain them.
|
| 492 |
+
|
| 493 |
+
The nodes of the graph `G` have the node attribute
|
| 494 |
+
'bipartite' set to 1 and the nodes representing cliques
|
| 495 |
+
have the node attribute 'bipartite' set to 0, as is the
|
| 496 |
+
convention for bipartite graphs in NetworkX.
|
| 497 |
+
|
| 498 |
+
"""
|
| 499 |
+
B = nx.empty_graph(0, create_using)
|
| 500 |
+
B.clear()
|
| 501 |
+
# The "bottom" nodes in the bipartite graph are the nodes of the
|
| 502 |
+
# original graph, G.
|
| 503 |
+
B.add_nodes_from(G, bipartite=1)
|
| 504 |
+
for i, cl in enumerate(find_cliques(G)):
|
| 505 |
+
# The "top" nodes in the bipartite graph are the cliques. These
|
| 506 |
+
# nodes get negative numbers as labels.
|
| 507 |
+
name = -i - 1
|
| 508 |
+
B.add_node(name, bipartite=0)
|
| 509 |
+
B.add_edges_from((v, name) for v in cl)
|
| 510 |
+
return B
|
| 511 |
+
|
| 512 |
+
|
| 513 |
+
@nx._dispatchable
|
| 514 |
+
def node_clique_number(G, nodes=None, cliques=None, separate_nodes=False):
|
| 515 |
+
"""Returns the size of the largest maximal clique containing each given node.
|
| 516 |
+
|
| 517 |
+
Returns a single or list depending on input nodes.
|
| 518 |
+
An optional list of cliques can be input if already computed.
|
| 519 |
+
|
| 520 |
+
Parameters
|
| 521 |
+
----------
|
| 522 |
+
G : NetworkX graph
|
| 523 |
+
An undirected graph.
|
| 524 |
+
|
| 525 |
+
cliques : list, optional (default=None)
|
| 526 |
+
A list of cliques, each of which is itself a list of nodes.
|
| 527 |
+
If not specified, the list of all cliques will be computed
|
| 528 |
+
using :func:`find_cliques`.
|
| 529 |
+
|
| 530 |
+
Returns
|
| 531 |
+
-------
|
| 532 |
+
int or dict
|
| 533 |
+
If `nodes` is a single node, returns the size of the
|
| 534 |
+
largest maximal clique in `G` containing that node.
|
| 535 |
+
Otherwise return a dict keyed by node to the size
|
| 536 |
+
of the largest maximal clique containing that node.
|
| 537 |
+
|
| 538 |
+
See Also
|
| 539 |
+
--------
|
| 540 |
+
find_cliques
|
| 541 |
+
find_cliques yields the maximal cliques of G.
|
| 542 |
+
It accepts a `nodes` argument which restricts consideration to
|
| 543 |
+
maximal cliques containing all the given `nodes`.
|
| 544 |
+
The search for the cliques is optimized for `nodes`.
|
| 545 |
+
"""
|
| 546 |
+
if cliques is None:
|
| 547 |
+
if nodes is not None:
|
| 548 |
+
# Use ego_graph to decrease size of graph
|
| 549 |
+
# check for single node
|
| 550 |
+
if nodes in G:
|
| 551 |
+
return max(len(c) for c in find_cliques(nx.ego_graph(G, nodes)))
|
| 552 |
+
# handle multiple nodes
|
| 553 |
+
return {
|
| 554 |
+
n: max(len(c) for c in find_cliques(nx.ego_graph(G, n))) for n in nodes
|
| 555 |
+
}
|
| 556 |
+
|
| 557 |
+
# nodes is None--find all cliques
|
| 558 |
+
cliques = list(find_cliques(G))
|
| 559 |
+
|
| 560 |
+
# single node requested
|
| 561 |
+
if nodes in G:
|
| 562 |
+
return max(len(c) for c in cliques if nodes in c)
|
| 563 |
+
|
| 564 |
+
# multiple nodes requested
|
| 565 |
+
# preprocess all nodes (faster than one at a time for even 2 nodes)
|
| 566 |
+
size_for_n = defaultdict(int)
|
| 567 |
+
for c in cliques:
|
| 568 |
+
size_of_c = len(c)
|
| 569 |
+
for n in c:
|
| 570 |
+
if size_for_n[n] < size_of_c:
|
| 571 |
+
size_for_n[n] = size_of_c
|
| 572 |
+
if nodes is None:
|
| 573 |
+
return size_for_n
|
| 574 |
+
return {n: size_for_n[n] for n in nodes}
|
| 575 |
+
|
| 576 |
+
|
| 577 |
+
def number_of_cliques(G, nodes=None, cliques=None):
|
| 578 |
+
"""Returns the number of maximal cliques for each node.
|
| 579 |
+
|
| 580 |
+
Returns a single or list depending on input nodes.
|
| 581 |
+
Optional list of cliques can be input if already computed.
|
| 582 |
+
"""
|
| 583 |
+
if cliques is None:
|
| 584 |
+
cliques = list(find_cliques(G))
|
| 585 |
+
|
| 586 |
+
if nodes is None:
|
| 587 |
+
nodes = list(G.nodes()) # none, get entire graph
|
| 588 |
+
|
| 589 |
+
if not isinstance(nodes, list): # check for a list
|
| 590 |
+
v = nodes
|
| 591 |
+
# assume it is a single value
|
| 592 |
+
numcliq = len([1 for c in cliques if v in c])
|
| 593 |
+
else:
|
| 594 |
+
numcliq = {}
|
| 595 |
+
for v in nodes:
|
| 596 |
+
numcliq[v] = len([1 for c in cliques if v in c])
|
| 597 |
+
return numcliq
|
| 598 |
+
|
| 599 |
+
|
| 600 |
+
class MaxWeightClique:
|
| 601 |
+
"""A class for the maximum weight clique algorithm.
|
| 602 |
+
|
| 603 |
+
This class is a helper for the `max_weight_clique` function. The class
|
| 604 |
+
should not normally be used directly.
|
| 605 |
+
|
| 606 |
+
Parameters
|
| 607 |
+
----------
|
| 608 |
+
G : NetworkX graph
|
| 609 |
+
The undirected graph for which a maximum weight clique is sought
|
| 610 |
+
weight : string or None, optional (default='weight')
|
| 611 |
+
The node attribute that holds the integer value used as a weight.
|
| 612 |
+
If None, then each node has weight 1.
|
| 613 |
+
|
| 614 |
+
Attributes
|
| 615 |
+
----------
|
| 616 |
+
G : NetworkX graph
|
| 617 |
+
The undirected graph for which a maximum weight clique is sought
|
| 618 |
+
node_weights: dict
|
| 619 |
+
The weight of each node
|
| 620 |
+
incumbent_nodes : list
|
| 621 |
+
The nodes of the incumbent clique (the best clique found so far)
|
| 622 |
+
incumbent_weight: int
|
| 623 |
+
The weight of the incumbent clique
|
| 624 |
+
"""
|
| 625 |
+
|
| 626 |
+
def __init__(self, G, weight):
|
| 627 |
+
self.G = G
|
| 628 |
+
self.incumbent_nodes = []
|
| 629 |
+
self.incumbent_weight = 0
|
| 630 |
+
|
| 631 |
+
if weight is None:
|
| 632 |
+
self.node_weights = {v: 1 for v in G.nodes()}
|
| 633 |
+
else:
|
| 634 |
+
for v in G.nodes():
|
| 635 |
+
if weight not in G.nodes[v]:
|
| 636 |
+
errmsg = f"Node {v!r} does not have the requested weight field."
|
| 637 |
+
raise KeyError(errmsg)
|
| 638 |
+
if not isinstance(G.nodes[v][weight], int):
|
| 639 |
+
errmsg = f"The {weight!r} field of node {v!r} is not an integer."
|
| 640 |
+
raise ValueError(errmsg)
|
| 641 |
+
self.node_weights = {v: G.nodes[v][weight] for v in G.nodes()}
|
| 642 |
+
|
| 643 |
+
def update_incumbent_if_improved(self, C, C_weight):
|
| 644 |
+
"""Update the incumbent if the node set C has greater weight.
|
| 645 |
+
|
| 646 |
+
C is assumed to be a clique.
|
| 647 |
+
"""
|
| 648 |
+
if C_weight > self.incumbent_weight:
|
| 649 |
+
self.incumbent_nodes = C[:]
|
| 650 |
+
self.incumbent_weight = C_weight
|
| 651 |
+
|
| 652 |
+
def greedily_find_independent_set(self, P):
|
| 653 |
+
"""Greedily find an independent set of nodes from a set of
|
| 654 |
+
nodes P."""
|
| 655 |
+
independent_set = []
|
| 656 |
+
P = P[:]
|
| 657 |
+
while P:
|
| 658 |
+
v = P[0]
|
| 659 |
+
independent_set.append(v)
|
| 660 |
+
P = [w for w in P if v != w and not self.G.has_edge(v, w)]
|
| 661 |
+
return independent_set
|
| 662 |
+
|
| 663 |
+
def find_branching_nodes(self, P, target):
|
| 664 |
+
"""Find a set of nodes to branch on."""
|
| 665 |
+
residual_wt = {v: self.node_weights[v] for v in P}
|
| 666 |
+
total_wt = 0
|
| 667 |
+
P = P[:]
|
| 668 |
+
while P:
|
| 669 |
+
independent_set = self.greedily_find_independent_set(P)
|
| 670 |
+
min_wt_in_class = min(residual_wt[v] for v in independent_set)
|
| 671 |
+
total_wt += min_wt_in_class
|
| 672 |
+
if total_wt > target:
|
| 673 |
+
break
|
| 674 |
+
for v in independent_set:
|
| 675 |
+
residual_wt[v] -= min_wt_in_class
|
| 676 |
+
P = [v for v in P if residual_wt[v] != 0]
|
| 677 |
+
return P
|
| 678 |
+
|
| 679 |
+
def expand(self, C, C_weight, P):
|
| 680 |
+
"""Look for the best clique that contains all the nodes in C and zero or
|
| 681 |
+
more of the nodes in P, backtracking if it can be shown that no such
|
| 682 |
+
clique has greater weight than the incumbent.
|
| 683 |
+
"""
|
| 684 |
+
self.update_incumbent_if_improved(C, C_weight)
|
| 685 |
+
branching_nodes = self.find_branching_nodes(P, self.incumbent_weight - C_weight)
|
| 686 |
+
while branching_nodes:
|
| 687 |
+
v = branching_nodes.pop()
|
| 688 |
+
P.remove(v)
|
| 689 |
+
new_C = C + [v]
|
| 690 |
+
new_C_weight = C_weight + self.node_weights[v]
|
| 691 |
+
new_P = [w for w in P if self.G.has_edge(v, w)]
|
| 692 |
+
self.expand(new_C, new_C_weight, new_P)
|
| 693 |
+
|
| 694 |
+
def find_max_weight_clique(self):
|
| 695 |
+
"""Find a maximum weight clique."""
|
| 696 |
+
# Sort nodes in reverse order of degree for speed
|
| 697 |
+
nodes = sorted(self.G.nodes(), key=lambda v: self.G.degree(v), reverse=True)
|
| 698 |
+
nodes = [v for v in nodes if self.node_weights[v] > 0]
|
| 699 |
+
self.expand([], 0, nodes)
|
| 700 |
+
|
| 701 |
+
|
| 702 |
+
@not_implemented_for("directed")
|
| 703 |
+
@nx._dispatchable(node_attrs="weight")
|
| 704 |
+
def max_weight_clique(G, weight="weight"):
|
| 705 |
+
"""Find a maximum weight clique in G.
|
| 706 |
+
|
| 707 |
+
A *clique* in a graph is a set of nodes such that every two distinct nodes
|
| 708 |
+
are adjacent. The *weight* of a clique is the sum of the weights of its
|
| 709 |
+
nodes. A *maximum weight clique* of graph G is a clique C in G such that
|
| 710 |
+
no clique in G has weight greater than the weight of C.
|
| 711 |
+
|
| 712 |
+
Parameters
|
| 713 |
+
----------
|
| 714 |
+
G : NetworkX graph
|
| 715 |
+
Undirected graph
|
| 716 |
+
weight : string or None, optional (default='weight')
|
| 717 |
+
The node attribute that holds the integer value used as a weight.
|
| 718 |
+
If None, then each node has weight 1.
|
| 719 |
+
|
| 720 |
+
Returns
|
| 721 |
+
-------
|
| 722 |
+
clique : list
|
| 723 |
+
the nodes of a maximum weight clique
|
| 724 |
+
weight : int
|
| 725 |
+
the weight of a maximum weight clique
|
| 726 |
+
|
| 727 |
+
Notes
|
| 728 |
+
-----
|
| 729 |
+
The implementation is recursive, and therefore it may run into recursion
|
| 730 |
+
depth issues if G contains a clique whose number of nodes is close to the
|
| 731 |
+
recursion depth limit.
|
| 732 |
+
|
| 733 |
+
At each search node, the algorithm greedily constructs a weighted
|
| 734 |
+
independent set cover of part of the graph in order to find a small set of
|
| 735 |
+
nodes on which to branch. The algorithm is very similar to the algorithm
|
| 736 |
+
of Tavares et al. [1]_, other than the fact that the NetworkX version does
|
| 737 |
+
not use bitsets. This style of algorithm for maximum weight clique (and
|
| 738 |
+
maximum weight independent set, which is the same problem but on the
|
| 739 |
+
complement graph) has a decades-long history. See Algorithm B of Warren
|
| 740 |
+
and Hicks [2]_ and the references in that paper.
|
| 741 |
+
|
| 742 |
+
References
|
| 743 |
+
----------
|
| 744 |
+
.. [1] Tavares, W.A., Neto, M.B.C., Rodrigues, C.D., Michelon, P.: Um
|
| 745 |
+
algoritmo de branch and bound para o problema da clique máxima
|
| 746 |
+
ponderada. Proceedings of XLVII SBPO 1 (2015).
|
| 747 |
+
|
| 748 |
+
.. [2] Warren, Jeffrey S, Hicks, Illya V.: Combinatorial Branch-and-Bound
|
| 749 |
+
for the Maximum Weight Independent Set Problem. Technical Report,
|
| 750 |
+
Texas A&M University (2016).
|
| 751 |
+
"""
|
| 752 |
+
|
| 753 |
+
mwc = MaxWeightClique(G, weight)
|
| 754 |
+
mwc.find_max_weight_clique()
|
| 755 |
+
return mwc.incumbent_nodes, mwc.incumbent_weight
|
phi4/lib/python3.10/site-packages/networkx/algorithms/core.py
ADDED
|
@@ -0,0 +1,649 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Find the k-cores of a graph.
|
| 3 |
+
|
| 4 |
+
The k-core is found by recursively pruning nodes with degrees less than k.
|
| 5 |
+
|
| 6 |
+
See the following references for details:
|
| 7 |
+
|
| 8 |
+
An O(m) Algorithm for Cores Decomposition of Networks
|
| 9 |
+
Vladimir Batagelj and Matjaz Zaversnik, 2003.
|
| 10 |
+
https://arxiv.org/abs/cs.DS/0310049
|
| 11 |
+
|
| 12 |
+
Generalized Cores
|
| 13 |
+
Vladimir Batagelj and Matjaz Zaversnik, 2002.
|
| 14 |
+
https://arxiv.org/pdf/cs/0202039
|
| 15 |
+
|
| 16 |
+
For directed graphs a more general notion is that of D-cores which
|
| 17 |
+
looks at (k, l) restrictions on (in, out) degree. The (k, k) D-core
|
| 18 |
+
is the k-core.
|
| 19 |
+
|
| 20 |
+
D-cores: Measuring Collaboration of Directed Graphs Based on Degeneracy
|
| 21 |
+
Christos Giatsidis, Dimitrios M. Thilikos, Michalis Vazirgiannis, ICDM 2011.
|
| 22 |
+
http://www.graphdegeneracy.org/dcores_ICDM_2011.pdf
|
| 23 |
+
|
| 24 |
+
Multi-scale structure and topological anomaly detection via a new network \
|
| 25 |
+
statistic: The onion decomposition
|
| 26 |
+
L. Hébert-Dufresne, J. A. Grochow, and A. Allard
|
| 27 |
+
Scientific Reports 6, 31708 (2016)
|
| 28 |
+
http://doi.org/10.1038/srep31708
|
| 29 |
+
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
import networkx as nx
|
| 33 |
+
|
| 34 |
+
__all__ = [
|
| 35 |
+
"core_number",
|
| 36 |
+
"k_core",
|
| 37 |
+
"k_shell",
|
| 38 |
+
"k_crust",
|
| 39 |
+
"k_corona",
|
| 40 |
+
"k_truss",
|
| 41 |
+
"onion_layers",
|
| 42 |
+
]
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
@nx.utils.not_implemented_for("multigraph")
|
| 46 |
+
@nx._dispatchable
|
| 47 |
+
def core_number(G):
|
| 48 |
+
"""Returns the core number for each node.
|
| 49 |
+
|
| 50 |
+
A k-core is a maximal subgraph that contains nodes of degree k or more.
|
| 51 |
+
|
| 52 |
+
The core number of a node is the largest value k of a k-core containing
|
| 53 |
+
that node.
|
| 54 |
+
|
| 55 |
+
Parameters
|
| 56 |
+
----------
|
| 57 |
+
G : NetworkX graph
|
| 58 |
+
An undirected or directed graph
|
| 59 |
+
|
| 60 |
+
Returns
|
| 61 |
+
-------
|
| 62 |
+
core_number : dictionary
|
| 63 |
+
A dictionary keyed by node to the core number.
|
| 64 |
+
|
| 65 |
+
Raises
|
| 66 |
+
------
|
| 67 |
+
NetworkXNotImplemented
|
| 68 |
+
If `G` is a multigraph or contains self loops.
|
| 69 |
+
|
| 70 |
+
Notes
|
| 71 |
+
-----
|
| 72 |
+
For directed graphs the node degree is defined to be the
|
| 73 |
+
in-degree + out-degree.
|
| 74 |
+
|
| 75 |
+
Examples
|
| 76 |
+
--------
|
| 77 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
| 78 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
| 79 |
+
>>> nx.core_number(H)
|
| 80 |
+
{0: 1, 1: 2, 2: 2, 3: 2, 4: 1, 5: 2, 6: 0}
|
| 81 |
+
>>> G = nx.DiGraph()
|
| 82 |
+
>>> G.add_edges_from([(1, 2), (2, 1), (2, 3), (2, 4), (3, 4), (4, 3)])
|
| 83 |
+
>>> nx.core_number(G)
|
| 84 |
+
{1: 2, 2: 2, 3: 2, 4: 2}
|
| 85 |
+
|
| 86 |
+
References
|
| 87 |
+
----------
|
| 88 |
+
.. [1] An O(m) Algorithm for Cores Decomposition of Networks
|
| 89 |
+
Vladimir Batagelj and Matjaz Zaversnik, 2003.
|
| 90 |
+
https://arxiv.org/abs/cs.DS/0310049
|
| 91 |
+
"""
|
| 92 |
+
if nx.number_of_selfloops(G) > 0:
|
| 93 |
+
msg = (
|
| 94 |
+
"Input graph has self loops which is not permitted; "
|
| 95 |
+
"Consider using G.remove_edges_from(nx.selfloop_edges(G))."
|
| 96 |
+
)
|
| 97 |
+
raise nx.NetworkXNotImplemented(msg)
|
| 98 |
+
degrees = dict(G.degree())
|
| 99 |
+
# Sort nodes by degree.
|
| 100 |
+
nodes = sorted(degrees, key=degrees.get)
|
| 101 |
+
bin_boundaries = [0]
|
| 102 |
+
curr_degree = 0
|
| 103 |
+
for i, v in enumerate(nodes):
|
| 104 |
+
if degrees[v] > curr_degree:
|
| 105 |
+
bin_boundaries.extend([i] * (degrees[v] - curr_degree))
|
| 106 |
+
curr_degree = degrees[v]
|
| 107 |
+
node_pos = {v: pos for pos, v in enumerate(nodes)}
|
| 108 |
+
# The initial guess for the core number of a node is its degree.
|
| 109 |
+
core = degrees
|
| 110 |
+
nbrs = {v: list(nx.all_neighbors(G, v)) for v in G}
|
| 111 |
+
for v in nodes:
|
| 112 |
+
for u in nbrs[v]:
|
| 113 |
+
if core[u] > core[v]:
|
| 114 |
+
nbrs[u].remove(v)
|
| 115 |
+
pos = node_pos[u]
|
| 116 |
+
bin_start = bin_boundaries[core[u]]
|
| 117 |
+
node_pos[u] = bin_start
|
| 118 |
+
node_pos[nodes[bin_start]] = pos
|
| 119 |
+
nodes[bin_start], nodes[pos] = nodes[pos], nodes[bin_start]
|
| 120 |
+
bin_boundaries[core[u]] += 1
|
| 121 |
+
core[u] -= 1
|
| 122 |
+
return core
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def _core_subgraph(G, k_filter, k=None, core=None):
|
| 126 |
+
"""Returns the subgraph induced by nodes passing filter `k_filter`.
|
| 127 |
+
|
| 128 |
+
Parameters
|
| 129 |
+
----------
|
| 130 |
+
G : NetworkX graph
|
| 131 |
+
The graph or directed graph to process
|
| 132 |
+
k_filter : filter function
|
| 133 |
+
This function filters the nodes chosen. It takes three inputs:
|
| 134 |
+
A node of G, the filter's cutoff, and the core dict of the graph.
|
| 135 |
+
The function should return a Boolean value.
|
| 136 |
+
k : int, optional
|
| 137 |
+
The order of the core. If not specified use the max core number.
|
| 138 |
+
This value is used as the cutoff for the filter.
|
| 139 |
+
core : dict, optional
|
| 140 |
+
Precomputed core numbers keyed by node for the graph `G`.
|
| 141 |
+
If not specified, the core numbers will be computed from `G`.
|
| 142 |
+
|
| 143 |
+
"""
|
| 144 |
+
if core is None:
|
| 145 |
+
core = core_number(G)
|
| 146 |
+
if k is None:
|
| 147 |
+
k = max(core.values())
|
| 148 |
+
nodes = (v for v in core if k_filter(v, k, core))
|
| 149 |
+
return G.subgraph(nodes).copy()
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
| 153 |
+
def k_core(G, k=None, core_number=None):
|
| 154 |
+
"""Returns the k-core of G.
|
| 155 |
+
|
| 156 |
+
A k-core is a maximal subgraph that contains nodes of degree `k` or more.
|
| 157 |
+
|
| 158 |
+
.. deprecated:: 3.3
|
| 159 |
+
`k_core` will not accept `MultiGraph` objects in version 3.5.
|
| 160 |
+
|
| 161 |
+
Parameters
|
| 162 |
+
----------
|
| 163 |
+
G : NetworkX graph
|
| 164 |
+
A graph or directed graph
|
| 165 |
+
k : int, optional
|
| 166 |
+
The order of the core. If not specified return the main core.
|
| 167 |
+
core_number : dictionary, optional
|
| 168 |
+
Precomputed core numbers for the graph G.
|
| 169 |
+
|
| 170 |
+
Returns
|
| 171 |
+
-------
|
| 172 |
+
G : NetworkX graph
|
| 173 |
+
The k-core subgraph
|
| 174 |
+
|
| 175 |
+
Raises
|
| 176 |
+
------
|
| 177 |
+
NetworkXNotImplemented
|
| 178 |
+
The k-core is not defined for multigraphs or graphs with self loops.
|
| 179 |
+
|
| 180 |
+
Notes
|
| 181 |
+
-----
|
| 182 |
+
The main core is the core with `k` as the largest core_number.
|
| 183 |
+
|
| 184 |
+
For directed graphs the node degree is defined to be the
|
| 185 |
+
in-degree + out-degree.
|
| 186 |
+
|
| 187 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
| 188 |
+
|
| 189 |
+
Examples
|
| 190 |
+
--------
|
| 191 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
| 192 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
| 193 |
+
>>> H.degree
|
| 194 |
+
DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
|
| 195 |
+
>>> nx.k_core(H).nodes
|
| 196 |
+
NodeView((1, 2, 3, 5))
|
| 197 |
+
|
| 198 |
+
See Also
|
| 199 |
+
--------
|
| 200 |
+
core_number
|
| 201 |
+
|
| 202 |
+
References
|
| 203 |
+
----------
|
| 204 |
+
.. [1] An O(m) Algorithm for Cores Decomposition of Networks
|
| 205 |
+
Vladimir Batagelj and Matjaz Zaversnik, 2003.
|
| 206 |
+
https://arxiv.org/abs/cs.DS/0310049
|
| 207 |
+
"""
|
| 208 |
+
|
| 209 |
+
import warnings
|
| 210 |
+
|
| 211 |
+
if G.is_multigraph():
|
| 212 |
+
warnings.warn(
|
| 213 |
+
(
|
| 214 |
+
"\n\n`k_core` will not accept `MultiGraph` objects in version 3.5.\n"
|
| 215 |
+
"Convert it to an undirected graph instead, using::\n\n"
|
| 216 |
+
"\tG = nx.Graph(G)\n"
|
| 217 |
+
),
|
| 218 |
+
category=DeprecationWarning,
|
| 219 |
+
stacklevel=5,
|
| 220 |
+
)
|
| 221 |
+
|
| 222 |
+
def k_filter(v, k, c):
|
| 223 |
+
return c[v] >= k
|
| 224 |
+
|
| 225 |
+
return _core_subgraph(G, k_filter, k, core_number)
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
| 229 |
+
def k_shell(G, k=None, core_number=None):
|
| 230 |
+
"""Returns the k-shell of G.
|
| 231 |
+
|
| 232 |
+
The k-shell is the subgraph induced by nodes with core number k.
|
| 233 |
+
That is, nodes in the k-core that are not in the (k+1)-core.
|
| 234 |
+
|
| 235 |
+
.. deprecated:: 3.3
|
| 236 |
+
`k_shell` will not accept `MultiGraph` objects in version 3.5.
|
| 237 |
+
|
| 238 |
+
Parameters
|
| 239 |
+
----------
|
| 240 |
+
G : NetworkX graph
|
| 241 |
+
A graph or directed graph.
|
| 242 |
+
k : int, optional
|
| 243 |
+
The order of the shell. If not specified return the outer shell.
|
| 244 |
+
core_number : dictionary, optional
|
| 245 |
+
Precomputed core numbers for the graph G.
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
Returns
|
| 249 |
+
-------
|
| 250 |
+
G : NetworkX graph
|
| 251 |
+
The k-shell subgraph
|
| 252 |
+
|
| 253 |
+
Raises
|
| 254 |
+
------
|
| 255 |
+
NetworkXNotImplemented
|
| 256 |
+
The k-shell is not implemented for multigraphs or graphs with self loops.
|
| 257 |
+
|
| 258 |
+
Notes
|
| 259 |
+
-----
|
| 260 |
+
This is similar to k_corona but in that case only neighbors in the
|
| 261 |
+
k-core are considered.
|
| 262 |
+
|
| 263 |
+
For directed graphs the node degree is defined to be the
|
| 264 |
+
in-degree + out-degree.
|
| 265 |
+
|
| 266 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
| 267 |
+
|
| 268 |
+
Examples
|
| 269 |
+
--------
|
| 270 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
| 271 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
| 272 |
+
>>> H.degree
|
| 273 |
+
DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
|
| 274 |
+
>>> nx.k_shell(H, k=1).nodes
|
| 275 |
+
NodeView((0, 4))
|
| 276 |
+
|
| 277 |
+
See Also
|
| 278 |
+
--------
|
| 279 |
+
core_number
|
| 280 |
+
k_corona
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
References
|
| 284 |
+
----------
|
| 285 |
+
.. [1] A model of Internet topology using k-shell decomposition
|
| 286 |
+
Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt,
|
| 287 |
+
and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154
|
| 288 |
+
http://www.pnas.org/content/104/27/11150.full
|
| 289 |
+
"""
|
| 290 |
+
|
| 291 |
+
import warnings
|
| 292 |
+
|
| 293 |
+
if G.is_multigraph():
|
| 294 |
+
warnings.warn(
|
| 295 |
+
(
|
| 296 |
+
"\n\n`k_shell` will not accept `MultiGraph` objects in version 3.5.\n"
|
| 297 |
+
"Convert it to an undirected graph instead, using::\n\n"
|
| 298 |
+
"\tG = nx.Graph(G)\n"
|
| 299 |
+
),
|
| 300 |
+
category=DeprecationWarning,
|
| 301 |
+
stacklevel=5,
|
| 302 |
+
)
|
| 303 |
+
|
| 304 |
+
def k_filter(v, k, c):
|
| 305 |
+
return c[v] == k
|
| 306 |
+
|
| 307 |
+
return _core_subgraph(G, k_filter, k, core_number)
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
| 311 |
+
def k_crust(G, k=None, core_number=None):
|
| 312 |
+
"""Returns the k-crust of G.
|
| 313 |
+
|
| 314 |
+
The k-crust is the graph G with the edges of the k-core removed
|
| 315 |
+
and isolated nodes found after the removal of edges are also removed.
|
| 316 |
+
|
| 317 |
+
.. deprecated:: 3.3
|
| 318 |
+
`k_crust` will not accept `MultiGraph` objects in version 3.5.
|
| 319 |
+
|
| 320 |
+
Parameters
|
| 321 |
+
----------
|
| 322 |
+
G : NetworkX graph
|
| 323 |
+
A graph or directed graph.
|
| 324 |
+
k : int, optional
|
| 325 |
+
The order of the shell. If not specified return the main crust.
|
| 326 |
+
core_number : dictionary, optional
|
| 327 |
+
Precomputed core numbers for the graph G.
|
| 328 |
+
|
| 329 |
+
Returns
|
| 330 |
+
-------
|
| 331 |
+
G : NetworkX graph
|
| 332 |
+
The k-crust subgraph
|
| 333 |
+
|
| 334 |
+
Raises
|
| 335 |
+
------
|
| 336 |
+
NetworkXNotImplemented
|
| 337 |
+
The k-crust is not implemented for multigraphs or graphs with self loops.
|
| 338 |
+
|
| 339 |
+
Notes
|
| 340 |
+
-----
|
| 341 |
+
This definition of k-crust is different than the definition in [1]_.
|
| 342 |
+
The k-crust in [1]_ is equivalent to the k+1 crust of this algorithm.
|
| 343 |
+
|
| 344 |
+
For directed graphs the node degree is defined to be the
|
| 345 |
+
in-degree + out-degree.
|
| 346 |
+
|
| 347 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
| 348 |
+
|
| 349 |
+
Examples
|
| 350 |
+
--------
|
| 351 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
| 352 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
| 353 |
+
>>> H.degree
|
| 354 |
+
DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
|
| 355 |
+
>>> nx.k_crust(H, k=1).nodes
|
| 356 |
+
NodeView((0, 4, 6))
|
| 357 |
+
|
| 358 |
+
See Also
|
| 359 |
+
--------
|
| 360 |
+
core_number
|
| 361 |
+
|
| 362 |
+
References
|
| 363 |
+
----------
|
| 364 |
+
.. [1] A model of Internet topology using k-shell decomposition
|
| 365 |
+
Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt,
|
| 366 |
+
and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154
|
| 367 |
+
http://www.pnas.org/content/104/27/11150.full
|
| 368 |
+
"""
|
| 369 |
+
|
| 370 |
+
import warnings
|
| 371 |
+
|
| 372 |
+
if G.is_multigraph():
|
| 373 |
+
warnings.warn(
|
| 374 |
+
(
|
| 375 |
+
"\n\n`k_crust` will not accept `MultiGraph` objects in version 3.5.\n"
|
| 376 |
+
"Convert it to an undirected graph instead, using::\n\n"
|
| 377 |
+
"\tG = nx.Graph(G)\n"
|
| 378 |
+
),
|
| 379 |
+
category=DeprecationWarning,
|
| 380 |
+
stacklevel=5,
|
| 381 |
+
)
|
| 382 |
+
|
| 383 |
+
# Default for k is one less than in _core_subgraph, so just inline.
|
| 384 |
+
# Filter is c[v] <= k
|
| 385 |
+
if core_number is None:
|
| 386 |
+
core_number = nx.core_number(G)
|
| 387 |
+
if k is None:
|
| 388 |
+
k = max(core_number.values()) - 1
|
| 389 |
+
nodes = (v for v in core_number if core_number[v] <= k)
|
| 390 |
+
return G.subgraph(nodes).copy()
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
| 394 |
+
def k_corona(G, k, core_number=None):
|
| 395 |
+
"""Returns the k-corona of G.
|
| 396 |
+
|
| 397 |
+
The k-corona is the subgraph of nodes in the k-core which have
|
| 398 |
+
exactly k neighbors in the k-core.
|
| 399 |
+
|
| 400 |
+
.. deprecated:: 3.3
|
| 401 |
+
`k_corona` will not accept `MultiGraph` objects in version 3.5.
|
| 402 |
+
|
| 403 |
+
Parameters
|
| 404 |
+
----------
|
| 405 |
+
G : NetworkX graph
|
| 406 |
+
A graph or directed graph
|
| 407 |
+
k : int
|
| 408 |
+
The order of the corona.
|
| 409 |
+
core_number : dictionary, optional
|
| 410 |
+
Precomputed core numbers for the graph G.
|
| 411 |
+
|
| 412 |
+
Returns
|
| 413 |
+
-------
|
| 414 |
+
G : NetworkX graph
|
| 415 |
+
The k-corona subgraph
|
| 416 |
+
|
| 417 |
+
Raises
|
| 418 |
+
------
|
| 419 |
+
NetworkXNotImplemented
|
| 420 |
+
The k-corona is not defined for multigraphs or graphs with self loops.
|
| 421 |
+
|
| 422 |
+
Notes
|
| 423 |
+
-----
|
| 424 |
+
For directed graphs the node degree is defined to be the
|
| 425 |
+
in-degree + out-degree.
|
| 426 |
+
|
| 427 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
| 428 |
+
|
| 429 |
+
Examples
|
| 430 |
+
--------
|
| 431 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
| 432 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
| 433 |
+
>>> H.degree
|
| 434 |
+
DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
|
| 435 |
+
>>> nx.k_corona(H, k=2).nodes
|
| 436 |
+
NodeView((1, 2, 3, 5))
|
| 437 |
+
|
| 438 |
+
See Also
|
| 439 |
+
--------
|
| 440 |
+
core_number
|
| 441 |
+
|
| 442 |
+
References
|
| 443 |
+
----------
|
| 444 |
+
.. [1] k -core (bootstrap) percolation on complex networks:
|
| 445 |
+
Critical phenomena and nonlocal effects,
|
| 446 |
+
A. V. Goltsev, S. N. Dorogovtsev, and J. F. F. Mendes,
|
| 447 |
+
Phys. Rev. E 73, 056101 (2006)
|
| 448 |
+
http://link.aps.org/doi/10.1103/PhysRevE.73.056101
|
| 449 |
+
"""
|
| 450 |
+
|
| 451 |
+
import warnings
|
| 452 |
+
|
| 453 |
+
if G.is_multigraph():
|
| 454 |
+
warnings.warn(
|
| 455 |
+
(
|
| 456 |
+
"\n\n`k_corona` will not accept `MultiGraph` objects in version 3.5.\n"
|
| 457 |
+
"Convert it to an undirected graph instead, using::\n\n"
|
| 458 |
+
"\tG = nx.Graph(G)\n"
|
| 459 |
+
),
|
| 460 |
+
category=DeprecationWarning,
|
| 461 |
+
stacklevel=5,
|
| 462 |
+
)
|
| 463 |
+
|
| 464 |
+
def func(v, k, c):
|
| 465 |
+
return c[v] == k and k == sum(1 for w in G[v] if c[w] >= k)
|
| 466 |
+
|
| 467 |
+
return _core_subgraph(G, func, k, core_number)
|
| 468 |
+
|
| 469 |
+
|
| 470 |
+
@nx.utils.not_implemented_for("directed")
|
| 471 |
+
@nx.utils.not_implemented_for("multigraph")
|
| 472 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
| 473 |
+
def k_truss(G, k):
|
| 474 |
+
"""Returns the k-truss of `G`.
|
| 475 |
+
|
| 476 |
+
The k-truss is the maximal induced subgraph of `G` which contains at least
|
| 477 |
+
three vertices where every edge is incident to at least `k-2` triangles.
|
| 478 |
+
|
| 479 |
+
Parameters
|
| 480 |
+
----------
|
| 481 |
+
G : NetworkX graph
|
| 482 |
+
An undirected graph
|
| 483 |
+
k : int
|
| 484 |
+
The order of the truss
|
| 485 |
+
|
| 486 |
+
Returns
|
| 487 |
+
-------
|
| 488 |
+
H : NetworkX graph
|
| 489 |
+
The k-truss subgraph
|
| 490 |
+
|
| 491 |
+
Raises
|
| 492 |
+
------
|
| 493 |
+
NetworkXNotImplemented
|
| 494 |
+
If `G` is a multigraph or directed graph or if it contains self loops.
|
| 495 |
+
|
| 496 |
+
Notes
|
| 497 |
+
-----
|
| 498 |
+
A k-clique is a (k-2)-truss and a k-truss is a (k+1)-core.
|
| 499 |
+
|
| 500 |
+
Graph, node, and edge attributes are copied to the subgraph.
|
| 501 |
+
|
| 502 |
+
K-trusses were originally defined in [2] which states that the k-truss
|
| 503 |
+
is the maximal induced subgraph where each edge belongs to at least
|
| 504 |
+
`k-2` triangles. A more recent paper, [1], uses a slightly different
|
| 505 |
+
definition requiring that each edge belong to at least `k` triangles.
|
| 506 |
+
This implementation uses the original definition of `k-2` triangles.
|
| 507 |
+
|
| 508 |
+
Examples
|
| 509 |
+
--------
|
| 510 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
| 511 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
| 512 |
+
>>> H.degree
|
| 513 |
+
DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
|
| 514 |
+
>>> nx.k_truss(H, k=2).nodes
|
| 515 |
+
NodeView((0, 1, 2, 3, 4, 5))
|
| 516 |
+
|
| 517 |
+
References
|
| 518 |
+
----------
|
| 519 |
+
.. [1] Bounds and Algorithms for k-truss. Paul Burkhardt, Vance Faber,
|
| 520 |
+
David G. Harris, 2018. https://arxiv.org/abs/1806.05523v2
|
| 521 |
+
.. [2] Trusses: Cohesive Subgraphs for Social Network Analysis. Jonathan
|
| 522 |
+
Cohen, 2005.
|
| 523 |
+
"""
|
| 524 |
+
if nx.number_of_selfloops(G) > 0:
|
| 525 |
+
msg = (
|
| 526 |
+
"Input graph has self loops which is not permitted; "
|
| 527 |
+
"Consider using G.remove_edges_from(nx.selfloop_edges(G))."
|
| 528 |
+
)
|
| 529 |
+
raise nx.NetworkXNotImplemented(msg)
|
| 530 |
+
|
| 531 |
+
H = G.copy()
|
| 532 |
+
|
| 533 |
+
n_dropped = 1
|
| 534 |
+
while n_dropped > 0:
|
| 535 |
+
n_dropped = 0
|
| 536 |
+
to_drop = []
|
| 537 |
+
seen = set()
|
| 538 |
+
for u in H:
|
| 539 |
+
nbrs_u = set(H[u])
|
| 540 |
+
seen.add(u)
|
| 541 |
+
new_nbrs = [v for v in nbrs_u if v not in seen]
|
| 542 |
+
for v in new_nbrs:
|
| 543 |
+
if len(nbrs_u & set(H[v])) < (k - 2):
|
| 544 |
+
to_drop.append((u, v))
|
| 545 |
+
H.remove_edges_from(to_drop)
|
| 546 |
+
n_dropped = len(to_drop)
|
| 547 |
+
H.remove_nodes_from(list(nx.isolates(H)))
|
| 548 |
+
|
| 549 |
+
return H
|
| 550 |
+
|
| 551 |
+
|
| 552 |
+
@nx.utils.not_implemented_for("multigraph")
|
| 553 |
+
@nx.utils.not_implemented_for("directed")
|
| 554 |
+
@nx._dispatchable
|
| 555 |
+
def onion_layers(G):
|
| 556 |
+
"""Returns the layer of each vertex in an onion decomposition of the graph.
|
| 557 |
+
|
| 558 |
+
The onion decomposition refines the k-core decomposition by providing
|
| 559 |
+
information on the internal organization of each k-shell. It is usually
|
| 560 |
+
used alongside the `core numbers`.
|
| 561 |
+
|
| 562 |
+
Parameters
|
| 563 |
+
----------
|
| 564 |
+
G : NetworkX graph
|
| 565 |
+
An undirected graph without self loops.
|
| 566 |
+
|
| 567 |
+
Returns
|
| 568 |
+
-------
|
| 569 |
+
od_layers : dictionary
|
| 570 |
+
A dictionary keyed by node to the onion layer. The layers are
|
| 571 |
+
contiguous integers starting at 1.
|
| 572 |
+
|
| 573 |
+
Raises
|
| 574 |
+
------
|
| 575 |
+
NetworkXNotImplemented
|
| 576 |
+
If `G` is a multigraph or directed graph or if it contains self loops.
|
| 577 |
+
|
| 578 |
+
Examples
|
| 579 |
+
--------
|
| 580 |
+
>>> degrees = [0, 1, 2, 2, 2, 2, 3]
|
| 581 |
+
>>> H = nx.havel_hakimi_graph(degrees)
|
| 582 |
+
>>> H.degree
|
| 583 |
+
DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
|
| 584 |
+
>>> nx.onion_layers(H)
|
| 585 |
+
{6: 1, 0: 2, 4: 3, 1: 4, 2: 4, 3: 4, 5: 4}
|
| 586 |
+
|
| 587 |
+
See Also
|
| 588 |
+
--------
|
| 589 |
+
core_number
|
| 590 |
+
|
| 591 |
+
References
|
| 592 |
+
----------
|
| 593 |
+
.. [1] Multi-scale structure and topological anomaly detection via a new
|
| 594 |
+
network statistic: The onion decomposition
|
| 595 |
+
L. Hébert-Dufresne, J. A. Grochow, and A. Allard
|
| 596 |
+
Scientific Reports 6, 31708 (2016)
|
| 597 |
+
http://doi.org/10.1038/srep31708
|
| 598 |
+
.. [2] Percolation and the effective structure of complex networks
|
| 599 |
+
A. Allard and L. Hébert-Dufresne
|
| 600 |
+
Physical Review X 9, 011023 (2019)
|
| 601 |
+
http://doi.org/10.1103/PhysRevX.9.011023
|
| 602 |
+
"""
|
| 603 |
+
if nx.number_of_selfloops(G) > 0:
|
| 604 |
+
msg = (
|
| 605 |
+
"Input graph contains self loops which is not permitted; "
|
| 606 |
+
"Consider using G.remove_edges_from(nx.selfloop_edges(G))."
|
| 607 |
+
)
|
| 608 |
+
raise nx.NetworkXNotImplemented(msg)
|
| 609 |
+
# Dictionaries to register the k-core/onion decompositions.
|
| 610 |
+
od_layers = {}
|
| 611 |
+
# Adjacency list
|
| 612 |
+
neighbors = {v: list(nx.all_neighbors(G, v)) for v in G}
|
| 613 |
+
# Effective degree of nodes.
|
| 614 |
+
degrees = dict(G.degree())
|
| 615 |
+
# Performs the onion decomposition.
|
| 616 |
+
current_core = 1
|
| 617 |
+
current_layer = 1
|
| 618 |
+
# Sets vertices of degree 0 to layer 1, if any.
|
| 619 |
+
isolated_nodes = list(nx.isolates(G))
|
| 620 |
+
if len(isolated_nodes) > 0:
|
| 621 |
+
for v in isolated_nodes:
|
| 622 |
+
od_layers[v] = current_layer
|
| 623 |
+
degrees.pop(v)
|
| 624 |
+
current_layer = 2
|
| 625 |
+
# Finds the layer for the remaining nodes.
|
| 626 |
+
while len(degrees) > 0:
|
| 627 |
+
# Sets the order for looking at nodes.
|
| 628 |
+
nodes = sorted(degrees, key=degrees.get)
|
| 629 |
+
# Sets properly the current core.
|
| 630 |
+
min_degree = degrees[nodes[0]]
|
| 631 |
+
if min_degree > current_core:
|
| 632 |
+
current_core = min_degree
|
| 633 |
+
# Identifies vertices in the current layer.
|
| 634 |
+
this_layer = []
|
| 635 |
+
for n in nodes:
|
| 636 |
+
if degrees[n] > current_core:
|
| 637 |
+
break
|
| 638 |
+
this_layer.append(n)
|
| 639 |
+
# Identifies the core/layer of the vertices in the current layer.
|
| 640 |
+
for v in this_layer:
|
| 641 |
+
od_layers[v] = current_layer
|
| 642 |
+
for n in neighbors[v]:
|
| 643 |
+
neighbors[n].remove(v)
|
| 644 |
+
degrees[n] = degrees[n] - 1
|
| 645 |
+
degrees.pop(v)
|
| 646 |
+
# Updates the layer count.
|
| 647 |
+
current_layer = current_layer + 1
|
| 648 |
+
# Returns the dictionaries containing the onion layer of each vertices.
|
| 649 |
+
return od_layers
|
phi4/lib/python3.10/site-packages/networkx/algorithms/covering.py
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions related to graph covers."""
|
| 2 |
+
|
| 3 |
+
from functools import partial
|
| 4 |
+
from itertools import chain
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
from networkx.utils import arbitrary_element, not_implemented_for
|
| 8 |
+
|
| 9 |
+
__all__ = ["min_edge_cover", "is_edge_cover"]
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
@not_implemented_for("directed")
|
| 13 |
+
@not_implemented_for("multigraph")
|
| 14 |
+
@nx._dispatchable
|
| 15 |
+
def min_edge_cover(G, matching_algorithm=None):
|
| 16 |
+
"""Returns the min cardinality edge cover of the graph as a set of edges.
|
| 17 |
+
|
| 18 |
+
A smallest edge cover can be found in polynomial time by finding
|
| 19 |
+
a maximum matching and extending it greedily so that all nodes
|
| 20 |
+
are covered. This function follows that process. A maximum matching
|
| 21 |
+
algorithm can be specified for the first step of the algorithm.
|
| 22 |
+
The resulting set may return a set with one 2-tuple for each edge,
|
| 23 |
+
(the usual case) or with both 2-tuples `(u, v)` and `(v, u)` for
|
| 24 |
+
each edge. The latter is only done when a bipartite matching algorithm
|
| 25 |
+
is specified as `matching_algorithm`.
|
| 26 |
+
|
| 27 |
+
Parameters
|
| 28 |
+
----------
|
| 29 |
+
G : NetworkX graph
|
| 30 |
+
An undirected graph.
|
| 31 |
+
|
| 32 |
+
matching_algorithm : function
|
| 33 |
+
A function that returns a maximum cardinality matching for `G`.
|
| 34 |
+
The function must take one input, the graph `G`, and return
|
| 35 |
+
either a set of edges (with only one direction for the pair of nodes)
|
| 36 |
+
or a dictionary mapping each node to its mate. If not specified,
|
| 37 |
+
:func:`~networkx.algorithms.matching.max_weight_matching` is used.
|
| 38 |
+
Common bipartite matching functions include
|
| 39 |
+
:func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching`
|
| 40 |
+
or
|
| 41 |
+
:func:`~networkx.algorithms.bipartite.matching.eppstein_matching`.
|
| 42 |
+
|
| 43 |
+
Returns
|
| 44 |
+
-------
|
| 45 |
+
min_cover : set
|
| 46 |
+
|
| 47 |
+
A set of the edges in a minimum edge cover in the form of tuples.
|
| 48 |
+
It contains only one of the equivalent 2-tuples `(u, v)` and `(v, u)`
|
| 49 |
+
for each edge. If a bipartite method is used to compute the matching,
|
| 50 |
+
the returned set contains both the 2-tuples `(u, v)` and `(v, u)`
|
| 51 |
+
for each edge of a minimum edge cover.
|
| 52 |
+
|
| 53 |
+
Examples
|
| 54 |
+
--------
|
| 55 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
| 56 |
+
>>> sorted(nx.min_edge_cover(G))
|
| 57 |
+
[(2, 1), (3, 0)]
|
| 58 |
+
|
| 59 |
+
Notes
|
| 60 |
+
-----
|
| 61 |
+
An edge cover of a graph is a set of edges such that every node of
|
| 62 |
+
the graph is incident to at least one edge of the set.
|
| 63 |
+
The minimum edge cover is an edge covering of smallest cardinality.
|
| 64 |
+
|
| 65 |
+
Due to its implementation, the worst-case running time of this algorithm
|
| 66 |
+
is bounded by the worst-case running time of the function
|
| 67 |
+
``matching_algorithm``.
|
| 68 |
+
|
| 69 |
+
Minimum edge cover for `G` can also be found using the `min_edge_covering`
|
| 70 |
+
function in :mod:`networkx.algorithms.bipartite.covering` which is
|
| 71 |
+
simply this function with a default matching algorithm of
|
| 72 |
+
:func:`~networkx.algorithms.bipartite.matching.hopcraft_karp_matching`
|
| 73 |
+
"""
|
| 74 |
+
if len(G) == 0:
|
| 75 |
+
return set()
|
| 76 |
+
if nx.number_of_isolates(G) > 0:
|
| 77 |
+
# ``min_cover`` does not exist as there is an isolated node
|
| 78 |
+
raise nx.NetworkXException(
|
| 79 |
+
"Graph has a node with no edge incident on it, so no edge cover exists."
|
| 80 |
+
)
|
| 81 |
+
if matching_algorithm is None:
|
| 82 |
+
matching_algorithm = partial(nx.max_weight_matching, maxcardinality=True)
|
| 83 |
+
maximum_matching = matching_algorithm(G)
|
| 84 |
+
# ``min_cover`` is superset of ``maximum_matching``
|
| 85 |
+
try:
|
| 86 |
+
# bipartite matching algs return dict so convert if needed
|
| 87 |
+
min_cover = set(maximum_matching.items())
|
| 88 |
+
bipartite_cover = True
|
| 89 |
+
except AttributeError:
|
| 90 |
+
min_cover = maximum_matching
|
| 91 |
+
bipartite_cover = False
|
| 92 |
+
# iterate for uncovered nodes
|
| 93 |
+
uncovered_nodes = set(G) - {v for u, v in min_cover} - {u for u, v in min_cover}
|
| 94 |
+
for v in uncovered_nodes:
|
| 95 |
+
# Since `v` is uncovered, each edge incident to `v` will join it
|
| 96 |
+
# with a covered node (otherwise, if there were an edge joining
|
| 97 |
+
# uncovered nodes `u` and `v`, the maximum matching algorithm
|
| 98 |
+
# would have found it), so we can choose an arbitrary edge
|
| 99 |
+
# incident to `v`. (This applies only in a simple graph, not a
|
| 100 |
+
# multigraph.)
|
| 101 |
+
u = arbitrary_element(G[v])
|
| 102 |
+
min_cover.add((u, v))
|
| 103 |
+
if bipartite_cover:
|
| 104 |
+
min_cover.add((v, u))
|
| 105 |
+
return min_cover
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
@not_implemented_for("directed")
|
| 109 |
+
@nx._dispatchable
|
| 110 |
+
def is_edge_cover(G, cover):
|
| 111 |
+
"""Decides whether a set of edges is a valid edge cover of the graph.
|
| 112 |
+
|
| 113 |
+
Given a set of edges, whether it is an edge covering can
|
| 114 |
+
be decided if we just check whether all nodes of the graph
|
| 115 |
+
has an edge from the set, incident on it.
|
| 116 |
+
|
| 117 |
+
Parameters
|
| 118 |
+
----------
|
| 119 |
+
G : NetworkX graph
|
| 120 |
+
An undirected bipartite graph.
|
| 121 |
+
|
| 122 |
+
cover : set
|
| 123 |
+
Set of edges to be checked.
|
| 124 |
+
|
| 125 |
+
Returns
|
| 126 |
+
-------
|
| 127 |
+
bool
|
| 128 |
+
Whether the set of edges is a valid edge cover of the graph.
|
| 129 |
+
|
| 130 |
+
Examples
|
| 131 |
+
--------
|
| 132 |
+
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
| 133 |
+
>>> cover = {(2, 1), (3, 0)}
|
| 134 |
+
>>> nx.is_edge_cover(G, cover)
|
| 135 |
+
True
|
| 136 |
+
|
| 137 |
+
Notes
|
| 138 |
+
-----
|
| 139 |
+
An edge cover of a graph is a set of edges such that every node of
|
| 140 |
+
the graph is incident to at least one edge of the set.
|
| 141 |
+
"""
|
| 142 |
+
return set(G) <= set(chain.from_iterable(cover))
|
phi4/lib/python3.10/site-packages/networkx/algorithms/cycles.py
ADDED
|
@@ -0,0 +1,1230 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
========================
|
| 3 |
+
Cycle finding algorithms
|
| 4 |
+
========================
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from collections import Counter, defaultdict
|
| 8 |
+
from itertools import combinations, product
|
| 9 |
+
from math import inf
|
| 10 |
+
|
| 11 |
+
import networkx as nx
|
| 12 |
+
from networkx.utils import not_implemented_for, pairwise
|
| 13 |
+
|
| 14 |
+
__all__ = [
|
| 15 |
+
"cycle_basis",
|
| 16 |
+
"simple_cycles",
|
| 17 |
+
"recursive_simple_cycles",
|
| 18 |
+
"find_cycle",
|
| 19 |
+
"minimum_cycle_basis",
|
| 20 |
+
"chordless_cycles",
|
| 21 |
+
"girth",
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
@not_implemented_for("directed")
|
| 26 |
+
@not_implemented_for("multigraph")
|
| 27 |
+
@nx._dispatchable
|
| 28 |
+
def cycle_basis(G, root=None):
|
| 29 |
+
"""Returns a list of cycles which form a basis for cycles of G.
|
| 30 |
+
|
| 31 |
+
A basis for cycles of a network is a minimal collection of
|
| 32 |
+
cycles such that any cycle in the network can be written
|
| 33 |
+
as a sum of cycles in the basis. Here summation of cycles
|
| 34 |
+
is defined as "exclusive or" of the edges. Cycle bases are
|
| 35 |
+
useful, e.g. when deriving equations for electric circuits
|
| 36 |
+
using Kirchhoff's Laws.
|
| 37 |
+
|
| 38 |
+
Parameters
|
| 39 |
+
----------
|
| 40 |
+
G : NetworkX Graph
|
| 41 |
+
root : node, optional
|
| 42 |
+
Specify starting node for basis.
|
| 43 |
+
|
| 44 |
+
Returns
|
| 45 |
+
-------
|
| 46 |
+
A list of cycle lists. Each cycle list is a list of nodes
|
| 47 |
+
which forms a cycle (loop) in G.
|
| 48 |
+
|
| 49 |
+
Examples
|
| 50 |
+
--------
|
| 51 |
+
>>> G = nx.Graph()
|
| 52 |
+
>>> nx.add_cycle(G, [0, 1, 2, 3])
|
| 53 |
+
>>> nx.add_cycle(G, [0, 3, 4, 5])
|
| 54 |
+
>>> nx.cycle_basis(G, 0)
|
| 55 |
+
[[3, 4, 5, 0], [1, 2, 3, 0]]
|
| 56 |
+
|
| 57 |
+
Notes
|
| 58 |
+
-----
|
| 59 |
+
This is adapted from algorithm CACM 491 [1]_.
|
| 60 |
+
|
| 61 |
+
References
|
| 62 |
+
----------
|
| 63 |
+
.. [1] Paton, K. An algorithm for finding a fundamental set of
|
| 64 |
+
cycles of a graph. Comm. ACM 12, 9 (Sept 1969), 514-518.
|
| 65 |
+
|
| 66 |
+
See Also
|
| 67 |
+
--------
|
| 68 |
+
simple_cycles
|
| 69 |
+
minimum_cycle_basis
|
| 70 |
+
"""
|
| 71 |
+
gnodes = dict.fromkeys(G) # set-like object that maintains node order
|
| 72 |
+
cycles = []
|
| 73 |
+
while gnodes: # loop over connected components
|
| 74 |
+
if root is None:
|
| 75 |
+
root = gnodes.popitem()[0]
|
| 76 |
+
stack = [root]
|
| 77 |
+
pred = {root: root}
|
| 78 |
+
used = {root: set()}
|
| 79 |
+
while stack: # walk the spanning tree finding cycles
|
| 80 |
+
z = stack.pop() # use last-in so cycles easier to find
|
| 81 |
+
zused = used[z]
|
| 82 |
+
for nbr in G[z]:
|
| 83 |
+
if nbr not in used: # new node
|
| 84 |
+
pred[nbr] = z
|
| 85 |
+
stack.append(nbr)
|
| 86 |
+
used[nbr] = {z}
|
| 87 |
+
elif nbr == z: # self loops
|
| 88 |
+
cycles.append([z])
|
| 89 |
+
elif nbr not in zused: # found a cycle
|
| 90 |
+
pn = used[nbr]
|
| 91 |
+
cycle = [nbr, z]
|
| 92 |
+
p = pred[z]
|
| 93 |
+
while p not in pn:
|
| 94 |
+
cycle.append(p)
|
| 95 |
+
p = pred[p]
|
| 96 |
+
cycle.append(p)
|
| 97 |
+
cycles.append(cycle)
|
| 98 |
+
used[nbr].add(z)
|
| 99 |
+
for node in pred:
|
| 100 |
+
gnodes.pop(node, None)
|
| 101 |
+
root = None
|
| 102 |
+
return cycles
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
@nx._dispatchable
|
| 106 |
+
def simple_cycles(G, length_bound=None):
|
| 107 |
+
"""Find simple cycles (elementary circuits) of a graph.
|
| 108 |
+
|
| 109 |
+
A "simple cycle", or "elementary circuit", is a closed path where
|
| 110 |
+
no node appears twice. In a directed graph, two simple cycles are distinct
|
| 111 |
+
if they are not cyclic permutations of each other. In an undirected graph,
|
| 112 |
+
two simple cycles are distinct if they are not cyclic permutations of each
|
| 113 |
+
other nor of the other's reversal.
|
| 114 |
+
|
| 115 |
+
Optionally, the cycles are bounded in length. In the unbounded case, we use
|
| 116 |
+
a nonrecursive, iterator/generator version of Johnson's algorithm [1]_. In
|
| 117 |
+
the bounded case, we use a version of the algorithm of Gupta and
|
| 118 |
+
Suzumura [2]_. There may be better algorithms for some cases [3]_ [4]_ [5]_.
|
| 119 |
+
|
| 120 |
+
The algorithms of Johnson, and Gupta and Suzumura, are enhanced by some
|
| 121 |
+
well-known preprocessing techniques. When `G` is directed, we restrict our
|
| 122 |
+
attention to strongly connected components of `G`, generate all simple cycles
|
| 123 |
+
containing a certain node, remove that node, and further decompose the
|
| 124 |
+
remainder into strongly connected components. When `G` is undirected, we
|
| 125 |
+
restrict our attention to biconnected components, generate all simple cycles
|
| 126 |
+
containing a particular edge, remove that edge, and further decompose the
|
| 127 |
+
remainder into biconnected components.
|
| 128 |
+
|
| 129 |
+
Note that multigraphs are supported by this function -- and in undirected
|
| 130 |
+
multigraphs, a pair of parallel edges is considered a cycle of length 2.
|
| 131 |
+
Likewise, self-loops are considered to be cycles of length 1. We define
|
| 132 |
+
cycles as sequences of nodes; so the presence of loops and parallel edges
|
| 133 |
+
does not change the number of simple cycles in a graph.
|
| 134 |
+
|
| 135 |
+
Parameters
|
| 136 |
+
----------
|
| 137 |
+
G : NetworkX Graph
|
| 138 |
+
A networkx graph. Undirected, directed, and multigraphs are all supported.
|
| 139 |
+
|
| 140 |
+
length_bound : int or None, optional (default=None)
|
| 141 |
+
If `length_bound` is an int, generate all simple cycles of `G` with length at
|
| 142 |
+
most `length_bound`. Otherwise, generate all simple cycles of `G`.
|
| 143 |
+
|
| 144 |
+
Yields
|
| 145 |
+
------
|
| 146 |
+
list of nodes
|
| 147 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 148 |
+
|
| 149 |
+
Examples
|
| 150 |
+
--------
|
| 151 |
+
>>> G = nx.DiGraph([(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)])
|
| 152 |
+
>>> sorted(nx.simple_cycles(G))
|
| 153 |
+
[[0], [0, 1, 2], [0, 2], [1, 2], [2]]
|
| 154 |
+
|
| 155 |
+
To filter the cycles so that they don't include certain nodes or edges,
|
| 156 |
+
copy your graph and eliminate those nodes or edges before calling.
|
| 157 |
+
For example, to exclude self-loops from the above example:
|
| 158 |
+
|
| 159 |
+
>>> H = G.copy()
|
| 160 |
+
>>> H.remove_edges_from(nx.selfloop_edges(G))
|
| 161 |
+
>>> sorted(nx.simple_cycles(H))
|
| 162 |
+
[[0, 1, 2], [0, 2], [1, 2]]
|
| 163 |
+
|
| 164 |
+
Notes
|
| 165 |
+
-----
|
| 166 |
+
When `length_bound` is None, the time complexity is $O((n+e)(c+1))$ for $n$
|
| 167 |
+
nodes, $e$ edges and $c$ simple circuits. Otherwise, when ``length_bound > 1``,
|
| 168 |
+
the time complexity is $O((c+n)(k-1)d^k)$ where $d$ is the average degree of
|
| 169 |
+
the nodes of `G` and $k$ = `length_bound`.
|
| 170 |
+
|
| 171 |
+
Raises
|
| 172 |
+
------
|
| 173 |
+
ValueError
|
| 174 |
+
when ``length_bound < 0``.
|
| 175 |
+
|
| 176 |
+
References
|
| 177 |
+
----------
|
| 178 |
+
.. [1] Finding all the elementary circuits of a directed graph.
|
| 179 |
+
D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
|
| 180 |
+
https://doi.org/10.1137/0204007
|
| 181 |
+
.. [2] Finding All Bounded-Length Simple Cycles in a Directed Graph
|
| 182 |
+
A. Gupta and T. Suzumura https://arxiv.org/abs/2105.10094
|
| 183 |
+
.. [3] Enumerating the cycles of a digraph: a new preprocessing strategy.
|
| 184 |
+
G. Loizou and P. Thanish, Information Sciences, v. 27, 163-182, 1982.
|
| 185 |
+
.. [4] A search strategy for the elementary cycles of a directed graph.
|
| 186 |
+
J.L. Szwarcfiter and P.E. Lauer, BIT NUMERICAL MATHEMATICS,
|
| 187 |
+
v. 16, no. 2, 192-204, 1976.
|
| 188 |
+
.. [5] Optimal Listing of Cycles and st-Paths in Undirected Graphs
|
| 189 |
+
R. Ferreira and R. Grossi and A. Marino and N. Pisanti and R. Rizzi and
|
| 190 |
+
G. Sacomoto https://arxiv.org/abs/1205.2766
|
| 191 |
+
|
| 192 |
+
See Also
|
| 193 |
+
--------
|
| 194 |
+
cycle_basis
|
| 195 |
+
chordless_cycles
|
| 196 |
+
"""
|
| 197 |
+
|
| 198 |
+
if length_bound is not None:
|
| 199 |
+
if length_bound == 0:
|
| 200 |
+
return
|
| 201 |
+
elif length_bound < 0:
|
| 202 |
+
raise ValueError("length bound must be non-negative")
|
| 203 |
+
|
| 204 |
+
directed = G.is_directed()
|
| 205 |
+
yield from ([v] for v, Gv in G.adj.items() if v in Gv)
|
| 206 |
+
|
| 207 |
+
if length_bound is not None and length_bound == 1:
|
| 208 |
+
return
|
| 209 |
+
|
| 210 |
+
if G.is_multigraph() and not directed:
|
| 211 |
+
visited = set()
|
| 212 |
+
for u, Gu in G.adj.items():
|
| 213 |
+
multiplicity = ((v, len(Guv)) for v, Guv in Gu.items() if v in visited)
|
| 214 |
+
yield from ([u, v] for v, m in multiplicity if m > 1)
|
| 215 |
+
visited.add(u)
|
| 216 |
+
|
| 217 |
+
# explicitly filter out loops; implicitly filter out parallel edges
|
| 218 |
+
if directed:
|
| 219 |
+
G = nx.DiGraph((u, v) for u, Gu in G.adj.items() for v in Gu if v != u)
|
| 220 |
+
else:
|
| 221 |
+
G = nx.Graph((u, v) for u, Gu in G.adj.items() for v in Gu if v != u)
|
| 222 |
+
|
| 223 |
+
# this case is not strictly necessary but improves performance
|
| 224 |
+
if length_bound is not None and length_bound == 2:
|
| 225 |
+
if directed:
|
| 226 |
+
visited = set()
|
| 227 |
+
for u, Gu in G.adj.items():
|
| 228 |
+
yield from (
|
| 229 |
+
[v, u] for v in visited.intersection(Gu) if G.has_edge(v, u)
|
| 230 |
+
)
|
| 231 |
+
visited.add(u)
|
| 232 |
+
return
|
| 233 |
+
|
| 234 |
+
if directed:
|
| 235 |
+
yield from _directed_cycle_search(G, length_bound)
|
| 236 |
+
else:
|
| 237 |
+
yield from _undirected_cycle_search(G, length_bound)
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
def _directed_cycle_search(G, length_bound):
|
| 241 |
+
"""A dispatch function for `simple_cycles` for directed graphs.
|
| 242 |
+
|
| 243 |
+
We generate all cycles of G through binary partition.
|
| 244 |
+
|
| 245 |
+
1. Pick a node v in G which belongs to at least one cycle
|
| 246 |
+
a. Generate all cycles of G which contain the node v.
|
| 247 |
+
b. Recursively generate all cycles of G \\ v.
|
| 248 |
+
|
| 249 |
+
This is accomplished through the following:
|
| 250 |
+
|
| 251 |
+
1. Compute the strongly connected components SCC of G.
|
| 252 |
+
2. Select and remove a biconnected component C from BCC. Select a
|
| 253 |
+
non-tree edge (u, v) of a depth-first search of G[C].
|
| 254 |
+
3. For each simple cycle P containing v in G[C], yield P.
|
| 255 |
+
4. Add the biconnected components of G[C \\ v] to BCC.
|
| 256 |
+
|
| 257 |
+
If the parameter length_bound is not None, then step 3 will be limited to
|
| 258 |
+
simple cycles of length at most length_bound.
|
| 259 |
+
|
| 260 |
+
Parameters
|
| 261 |
+
----------
|
| 262 |
+
G : NetworkX DiGraph
|
| 263 |
+
A directed graph
|
| 264 |
+
|
| 265 |
+
length_bound : int or None
|
| 266 |
+
If length_bound is an int, generate all simple cycles of G with length at most length_bound.
|
| 267 |
+
Otherwise, generate all simple cycles of G.
|
| 268 |
+
|
| 269 |
+
Yields
|
| 270 |
+
------
|
| 271 |
+
list of nodes
|
| 272 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 273 |
+
"""
|
| 274 |
+
|
| 275 |
+
scc = nx.strongly_connected_components
|
| 276 |
+
components = [c for c in scc(G) if len(c) >= 2]
|
| 277 |
+
while components:
|
| 278 |
+
c = components.pop()
|
| 279 |
+
Gc = G.subgraph(c)
|
| 280 |
+
v = next(iter(c))
|
| 281 |
+
if length_bound is None:
|
| 282 |
+
yield from _johnson_cycle_search(Gc, [v])
|
| 283 |
+
else:
|
| 284 |
+
yield from _bounded_cycle_search(Gc, [v], length_bound)
|
| 285 |
+
# delete v after searching G, to make sure we can find v
|
| 286 |
+
G.remove_node(v)
|
| 287 |
+
components.extend(c for c in scc(Gc) if len(c) >= 2)
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
def _undirected_cycle_search(G, length_bound):
|
| 291 |
+
"""A dispatch function for `simple_cycles` for undirected graphs.
|
| 292 |
+
|
| 293 |
+
We generate all cycles of G through binary partition.
|
| 294 |
+
|
| 295 |
+
1. Pick an edge (u, v) in G which belongs to at least one cycle
|
| 296 |
+
a. Generate all cycles of G which contain the edge (u, v)
|
| 297 |
+
b. Recursively generate all cycles of G \\ (u, v)
|
| 298 |
+
|
| 299 |
+
This is accomplished through the following:
|
| 300 |
+
|
| 301 |
+
1. Compute the biconnected components BCC of G.
|
| 302 |
+
2. Select and remove a biconnected component C from BCC. Select a
|
| 303 |
+
non-tree edge (u, v) of a depth-first search of G[C].
|
| 304 |
+
3. For each (v -> u) path P remaining in G[C] \\ (u, v), yield P.
|
| 305 |
+
4. Add the biconnected components of G[C] \\ (u, v) to BCC.
|
| 306 |
+
|
| 307 |
+
If the parameter length_bound is not None, then step 3 will be limited to simple paths
|
| 308 |
+
of length at most length_bound.
|
| 309 |
+
|
| 310 |
+
Parameters
|
| 311 |
+
----------
|
| 312 |
+
G : NetworkX Graph
|
| 313 |
+
An undirected graph
|
| 314 |
+
|
| 315 |
+
length_bound : int or None
|
| 316 |
+
If length_bound is an int, generate all simple cycles of G with length at most length_bound.
|
| 317 |
+
Otherwise, generate all simple cycles of G.
|
| 318 |
+
|
| 319 |
+
Yields
|
| 320 |
+
------
|
| 321 |
+
list of nodes
|
| 322 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 323 |
+
"""
|
| 324 |
+
|
| 325 |
+
bcc = nx.biconnected_components
|
| 326 |
+
components = [c for c in bcc(G) if len(c) >= 3]
|
| 327 |
+
while components:
|
| 328 |
+
c = components.pop()
|
| 329 |
+
Gc = G.subgraph(c)
|
| 330 |
+
uv = list(next(iter(Gc.edges)))
|
| 331 |
+
G.remove_edge(*uv)
|
| 332 |
+
# delete (u, v) before searching G, to avoid fake 3-cycles [u, v, u]
|
| 333 |
+
if length_bound is None:
|
| 334 |
+
yield from _johnson_cycle_search(Gc, uv)
|
| 335 |
+
else:
|
| 336 |
+
yield from _bounded_cycle_search(Gc, uv, length_bound)
|
| 337 |
+
components.extend(c for c in bcc(Gc) if len(c) >= 3)
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
class _NeighborhoodCache(dict):
|
| 341 |
+
"""Very lightweight graph wrapper which caches neighborhoods as list.
|
| 342 |
+
|
| 343 |
+
This dict subclass uses the __missing__ functionality to query graphs for
|
| 344 |
+
their neighborhoods, and store the result as a list. This is used to avoid
|
| 345 |
+
the performance penalty incurred by subgraph views.
|
| 346 |
+
"""
|
| 347 |
+
|
| 348 |
+
def __init__(self, G):
|
| 349 |
+
self.G = G
|
| 350 |
+
|
| 351 |
+
def __missing__(self, v):
|
| 352 |
+
Gv = self[v] = list(self.G[v])
|
| 353 |
+
return Gv
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
def _johnson_cycle_search(G, path):
|
| 357 |
+
"""The main loop of the cycle-enumeration algorithm of Johnson.
|
| 358 |
+
|
| 359 |
+
Parameters
|
| 360 |
+
----------
|
| 361 |
+
G : NetworkX Graph or DiGraph
|
| 362 |
+
A graph
|
| 363 |
+
|
| 364 |
+
path : list
|
| 365 |
+
A cycle prefix. All cycles generated will begin with this prefix.
|
| 366 |
+
|
| 367 |
+
Yields
|
| 368 |
+
------
|
| 369 |
+
list of nodes
|
| 370 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 371 |
+
|
| 372 |
+
References
|
| 373 |
+
----------
|
| 374 |
+
.. [1] Finding all the elementary circuits of a directed graph.
|
| 375 |
+
D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
|
| 376 |
+
https://doi.org/10.1137/0204007
|
| 377 |
+
|
| 378 |
+
"""
|
| 379 |
+
|
| 380 |
+
G = _NeighborhoodCache(G)
|
| 381 |
+
blocked = set(path)
|
| 382 |
+
B = defaultdict(set) # graph portions that yield no elementary circuit
|
| 383 |
+
start = path[0]
|
| 384 |
+
stack = [iter(G[path[-1]])]
|
| 385 |
+
closed = [False]
|
| 386 |
+
while stack:
|
| 387 |
+
nbrs = stack[-1]
|
| 388 |
+
for w in nbrs:
|
| 389 |
+
if w == start:
|
| 390 |
+
yield path[:]
|
| 391 |
+
closed[-1] = True
|
| 392 |
+
elif w not in blocked:
|
| 393 |
+
path.append(w)
|
| 394 |
+
closed.append(False)
|
| 395 |
+
stack.append(iter(G[w]))
|
| 396 |
+
blocked.add(w)
|
| 397 |
+
break
|
| 398 |
+
else: # no more nbrs
|
| 399 |
+
stack.pop()
|
| 400 |
+
v = path.pop()
|
| 401 |
+
if closed.pop():
|
| 402 |
+
if closed:
|
| 403 |
+
closed[-1] = True
|
| 404 |
+
unblock_stack = {v}
|
| 405 |
+
while unblock_stack:
|
| 406 |
+
u = unblock_stack.pop()
|
| 407 |
+
if u in blocked:
|
| 408 |
+
blocked.remove(u)
|
| 409 |
+
unblock_stack.update(B[u])
|
| 410 |
+
B[u].clear()
|
| 411 |
+
else:
|
| 412 |
+
for w in G[v]:
|
| 413 |
+
B[w].add(v)
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
def _bounded_cycle_search(G, path, length_bound):
|
| 417 |
+
"""The main loop of the cycle-enumeration algorithm of Gupta and Suzumura.
|
| 418 |
+
|
| 419 |
+
Parameters
|
| 420 |
+
----------
|
| 421 |
+
G : NetworkX Graph or DiGraph
|
| 422 |
+
A graph
|
| 423 |
+
|
| 424 |
+
path : list
|
| 425 |
+
A cycle prefix. All cycles generated will begin with this prefix.
|
| 426 |
+
|
| 427 |
+
length_bound: int
|
| 428 |
+
A length bound. All cycles generated will have length at most length_bound.
|
| 429 |
+
|
| 430 |
+
Yields
|
| 431 |
+
------
|
| 432 |
+
list of nodes
|
| 433 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 434 |
+
|
| 435 |
+
References
|
| 436 |
+
----------
|
| 437 |
+
.. [1] Finding All Bounded-Length Simple Cycles in a Directed Graph
|
| 438 |
+
A. Gupta and T. Suzumura https://arxiv.org/abs/2105.10094
|
| 439 |
+
|
| 440 |
+
"""
|
| 441 |
+
G = _NeighborhoodCache(G)
|
| 442 |
+
lock = {v: 0 for v in path}
|
| 443 |
+
B = defaultdict(set)
|
| 444 |
+
start = path[0]
|
| 445 |
+
stack = [iter(G[path[-1]])]
|
| 446 |
+
blen = [length_bound]
|
| 447 |
+
while stack:
|
| 448 |
+
nbrs = stack[-1]
|
| 449 |
+
for w in nbrs:
|
| 450 |
+
if w == start:
|
| 451 |
+
yield path[:]
|
| 452 |
+
blen[-1] = 1
|
| 453 |
+
elif len(path) < lock.get(w, length_bound):
|
| 454 |
+
path.append(w)
|
| 455 |
+
blen.append(length_bound)
|
| 456 |
+
lock[w] = len(path)
|
| 457 |
+
stack.append(iter(G[w]))
|
| 458 |
+
break
|
| 459 |
+
else:
|
| 460 |
+
stack.pop()
|
| 461 |
+
v = path.pop()
|
| 462 |
+
bl = blen.pop()
|
| 463 |
+
if blen:
|
| 464 |
+
blen[-1] = min(blen[-1], bl)
|
| 465 |
+
if bl < length_bound:
|
| 466 |
+
relax_stack = [(bl, v)]
|
| 467 |
+
while relax_stack:
|
| 468 |
+
bl, u = relax_stack.pop()
|
| 469 |
+
if lock.get(u, length_bound) < length_bound - bl + 1:
|
| 470 |
+
lock[u] = length_bound - bl + 1
|
| 471 |
+
relax_stack.extend((bl + 1, w) for w in B[u].difference(path))
|
| 472 |
+
else:
|
| 473 |
+
for w in G[v]:
|
| 474 |
+
B[w].add(v)
|
| 475 |
+
|
| 476 |
+
|
| 477 |
+
@nx._dispatchable
|
| 478 |
+
def chordless_cycles(G, length_bound=None):
|
| 479 |
+
"""Find simple chordless cycles of a graph.
|
| 480 |
+
|
| 481 |
+
A `simple cycle` is a closed path where no node appears twice. In a simple
|
| 482 |
+
cycle, a `chord` is an additional edge between two nodes in the cycle. A
|
| 483 |
+
`chordless cycle` is a simple cycle without chords. Said differently, a
|
| 484 |
+
chordless cycle is a cycle C in a graph G where the number of edges in the
|
| 485 |
+
induced graph G[C] is equal to the length of `C`.
|
| 486 |
+
|
| 487 |
+
Note that some care must be taken in the case that G is not a simple graph
|
| 488 |
+
nor a simple digraph. Some authors limit the definition of chordless cycles
|
| 489 |
+
to have a prescribed minimum length; we do not.
|
| 490 |
+
|
| 491 |
+
1. We interpret self-loops to be chordless cycles, except in multigraphs
|
| 492 |
+
with multiple loops in parallel. Likewise, in a chordless cycle of
|
| 493 |
+
length greater than 1, there can be no nodes with self-loops.
|
| 494 |
+
|
| 495 |
+
2. We interpret directed two-cycles to be chordless cycles, except in
|
| 496 |
+
multi-digraphs when any edge in a two-cycle has a parallel copy.
|
| 497 |
+
|
| 498 |
+
3. We interpret parallel pairs of undirected edges as two-cycles, except
|
| 499 |
+
when a third (or more) parallel edge exists between the two nodes.
|
| 500 |
+
|
| 501 |
+
4. Generalizing the above, edges with parallel clones may not occur in
|
| 502 |
+
chordless cycles.
|
| 503 |
+
|
| 504 |
+
In a directed graph, two chordless cycles are distinct if they are not
|
| 505 |
+
cyclic permutations of each other. In an undirected graph, two chordless
|
| 506 |
+
cycles are distinct if they are not cyclic permutations of each other nor of
|
| 507 |
+
the other's reversal.
|
| 508 |
+
|
| 509 |
+
Optionally, the cycles are bounded in length.
|
| 510 |
+
|
| 511 |
+
We use an algorithm strongly inspired by that of Dias et al [1]_. It has
|
| 512 |
+
been modified in the following ways:
|
| 513 |
+
|
| 514 |
+
1. Recursion is avoided, per Python's limitations
|
| 515 |
+
|
| 516 |
+
2. The labeling function is not necessary, because the starting paths
|
| 517 |
+
are chosen (and deleted from the host graph) to prevent multiple
|
| 518 |
+
occurrences of the same path
|
| 519 |
+
|
| 520 |
+
3. The search is optionally bounded at a specified length
|
| 521 |
+
|
| 522 |
+
4. Support for directed graphs is provided by extending cycles along
|
| 523 |
+
forward edges, and blocking nodes along forward and reverse edges
|
| 524 |
+
|
| 525 |
+
5. Support for multigraphs is provided by omitting digons from the set
|
| 526 |
+
of forward edges
|
| 527 |
+
|
| 528 |
+
Parameters
|
| 529 |
+
----------
|
| 530 |
+
G : NetworkX DiGraph
|
| 531 |
+
A directed graph
|
| 532 |
+
|
| 533 |
+
length_bound : int or None, optional (default=None)
|
| 534 |
+
If length_bound is an int, generate all simple cycles of G with length at
|
| 535 |
+
most length_bound. Otherwise, generate all simple cycles of G.
|
| 536 |
+
|
| 537 |
+
Yields
|
| 538 |
+
------
|
| 539 |
+
list of nodes
|
| 540 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 541 |
+
|
| 542 |
+
Examples
|
| 543 |
+
--------
|
| 544 |
+
>>> sorted(list(nx.chordless_cycles(nx.complete_graph(4))))
|
| 545 |
+
[[1, 0, 2], [1, 0, 3], [2, 0, 3], [2, 1, 3]]
|
| 546 |
+
|
| 547 |
+
Notes
|
| 548 |
+
-----
|
| 549 |
+
When length_bound is None, and the graph is simple, the time complexity is
|
| 550 |
+
$O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$ chordless cycles.
|
| 551 |
+
|
| 552 |
+
Raises
|
| 553 |
+
------
|
| 554 |
+
ValueError
|
| 555 |
+
when length_bound < 0.
|
| 556 |
+
|
| 557 |
+
References
|
| 558 |
+
----------
|
| 559 |
+
.. [1] Efficient enumeration of chordless cycles
|
| 560 |
+
E. Dias and D. Castonguay and H. Longo and W.A.R. Jradi
|
| 561 |
+
https://arxiv.org/abs/1309.1051
|
| 562 |
+
|
| 563 |
+
See Also
|
| 564 |
+
--------
|
| 565 |
+
simple_cycles
|
| 566 |
+
"""
|
| 567 |
+
|
| 568 |
+
if length_bound is not None:
|
| 569 |
+
if length_bound == 0:
|
| 570 |
+
return
|
| 571 |
+
elif length_bound < 0:
|
| 572 |
+
raise ValueError("length bound must be non-negative")
|
| 573 |
+
|
| 574 |
+
directed = G.is_directed()
|
| 575 |
+
multigraph = G.is_multigraph()
|
| 576 |
+
|
| 577 |
+
if multigraph:
|
| 578 |
+
yield from ([v] for v, Gv in G.adj.items() if len(Gv.get(v, ())) == 1)
|
| 579 |
+
else:
|
| 580 |
+
yield from ([v] for v, Gv in G.adj.items() if v in Gv)
|
| 581 |
+
|
| 582 |
+
if length_bound is not None and length_bound == 1:
|
| 583 |
+
return
|
| 584 |
+
|
| 585 |
+
# Nodes with loops cannot belong to longer cycles. Let's delete them here.
|
| 586 |
+
# also, we implicitly reduce the multiplicity of edges down to 1 in the case
|
| 587 |
+
# of multiedges.
|
| 588 |
+
if directed:
|
| 589 |
+
F = nx.DiGraph((u, v) for u, Gu in G.adj.items() if u not in Gu for v in Gu)
|
| 590 |
+
B = F.to_undirected(as_view=False)
|
| 591 |
+
else:
|
| 592 |
+
F = nx.Graph((u, v) for u, Gu in G.adj.items() if u not in Gu for v in Gu)
|
| 593 |
+
B = None
|
| 594 |
+
|
| 595 |
+
# If we're given a multigraph, we have a few cases to consider with parallel
|
| 596 |
+
# edges.
|
| 597 |
+
#
|
| 598 |
+
# 1. If we have 2 or more edges in parallel between the nodes (u, v), we
|
| 599 |
+
# must not construct longer cycles along (u, v).
|
| 600 |
+
# 2. If G is not directed, then a pair of parallel edges between (u, v) is a
|
| 601 |
+
# chordless cycle unless there exists a third (or more) parallel edge.
|
| 602 |
+
# 3. If G is directed, then parallel edges do not form cycles, but do
|
| 603 |
+
# preclude back-edges from forming cycles (handled in the next section),
|
| 604 |
+
# Thus, if an edge (u, v) is duplicated and the reverse (v, u) is also
|
| 605 |
+
# present, then we remove both from F.
|
| 606 |
+
#
|
| 607 |
+
# In directed graphs, we need to consider both directions that edges can
|
| 608 |
+
# take, so iterate over all edges (u, v) and possibly (v, u). In undirected
|
| 609 |
+
# graphs, we need to be a little careful to only consider every edge once,
|
| 610 |
+
# so we use a "visited" set to emulate node-order comparisons.
|
| 611 |
+
|
| 612 |
+
if multigraph:
|
| 613 |
+
if not directed:
|
| 614 |
+
B = F.copy()
|
| 615 |
+
visited = set()
|
| 616 |
+
for u, Gu in G.adj.items():
|
| 617 |
+
if directed:
|
| 618 |
+
multiplicity = ((v, len(Guv)) for v, Guv in Gu.items())
|
| 619 |
+
for v, m in multiplicity:
|
| 620 |
+
if m > 1:
|
| 621 |
+
F.remove_edges_from(((u, v), (v, u)))
|
| 622 |
+
else:
|
| 623 |
+
multiplicity = ((v, len(Guv)) for v, Guv in Gu.items() if v in visited)
|
| 624 |
+
for v, m in multiplicity:
|
| 625 |
+
if m == 2:
|
| 626 |
+
yield [u, v]
|
| 627 |
+
if m > 1:
|
| 628 |
+
F.remove_edge(u, v)
|
| 629 |
+
visited.add(u)
|
| 630 |
+
|
| 631 |
+
# If we're given a directed graphs, we need to think about digons. If we
|
| 632 |
+
# have two edges (u, v) and (v, u), then that's a two-cycle. If either edge
|
| 633 |
+
# was duplicated above, then we removed both from F. So, any digons we find
|
| 634 |
+
# here are chordless. After finding digons, we remove their edges from F
|
| 635 |
+
# to avoid traversing them in the search for chordless cycles.
|
| 636 |
+
if directed:
|
| 637 |
+
for u, Fu in F.adj.items():
|
| 638 |
+
digons = [[u, v] for v in Fu if F.has_edge(v, u)]
|
| 639 |
+
yield from digons
|
| 640 |
+
F.remove_edges_from(digons)
|
| 641 |
+
F.remove_edges_from(e[::-1] for e in digons)
|
| 642 |
+
|
| 643 |
+
if length_bound is not None and length_bound == 2:
|
| 644 |
+
return
|
| 645 |
+
|
| 646 |
+
# Now, we prepare to search for cycles. We have removed all cycles of
|
| 647 |
+
# lengths 1 and 2, so F is a simple graph or simple digraph. We repeatedly
|
| 648 |
+
# separate digraphs into their strongly connected components, and undirected
|
| 649 |
+
# graphs into their biconnected components. For each component, we pick a
|
| 650 |
+
# node v, search for chordless cycles based at each "stem" (u, v, w), and
|
| 651 |
+
# then remove v from that component before separating the graph again.
|
| 652 |
+
if directed:
|
| 653 |
+
separate = nx.strongly_connected_components
|
| 654 |
+
|
| 655 |
+
# Directed stems look like (u -> v -> w), so we use the product of
|
| 656 |
+
# predecessors of v with successors of v.
|
| 657 |
+
def stems(C, v):
|
| 658 |
+
for u, w in product(C.pred[v], C.succ[v]):
|
| 659 |
+
if not G.has_edge(u, w): # omit stems with acyclic chords
|
| 660 |
+
yield [u, v, w], F.has_edge(w, u)
|
| 661 |
+
|
| 662 |
+
else:
|
| 663 |
+
separate = nx.biconnected_components
|
| 664 |
+
|
| 665 |
+
# Undirected stems look like (u ~ v ~ w), but we must not also search
|
| 666 |
+
# (w ~ v ~ u), so we use combinations of v's neighbors of length 2.
|
| 667 |
+
def stems(C, v):
|
| 668 |
+
yield from (([u, v, w], F.has_edge(w, u)) for u, w in combinations(C[v], 2))
|
| 669 |
+
|
| 670 |
+
components = [c for c in separate(F) if len(c) > 2]
|
| 671 |
+
while components:
|
| 672 |
+
c = components.pop()
|
| 673 |
+
v = next(iter(c))
|
| 674 |
+
Fc = F.subgraph(c)
|
| 675 |
+
Fcc = Bcc = None
|
| 676 |
+
for S, is_triangle in stems(Fc, v):
|
| 677 |
+
if is_triangle:
|
| 678 |
+
yield S
|
| 679 |
+
else:
|
| 680 |
+
if Fcc is None:
|
| 681 |
+
Fcc = _NeighborhoodCache(Fc)
|
| 682 |
+
Bcc = Fcc if B is None else _NeighborhoodCache(B.subgraph(c))
|
| 683 |
+
yield from _chordless_cycle_search(Fcc, Bcc, S, length_bound)
|
| 684 |
+
|
| 685 |
+
components.extend(c for c in separate(F.subgraph(c - {v})) if len(c) > 2)
|
| 686 |
+
|
| 687 |
+
|
| 688 |
+
def _chordless_cycle_search(F, B, path, length_bound):
|
| 689 |
+
"""The main loop for chordless cycle enumeration.
|
| 690 |
+
|
| 691 |
+
This algorithm is strongly inspired by that of Dias et al [1]_. It has been
|
| 692 |
+
modified in the following ways:
|
| 693 |
+
|
| 694 |
+
1. Recursion is avoided, per Python's limitations
|
| 695 |
+
|
| 696 |
+
2. The labeling function is not necessary, because the starting paths
|
| 697 |
+
are chosen (and deleted from the host graph) to prevent multiple
|
| 698 |
+
occurrences of the same path
|
| 699 |
+
|
| 700 |
+
3. The search is optionally bounded at a specified length
|
| 701 |
+
|
| 702 |
+
4. Support for directed graphs is provided by extending cycles along
|
| 703 |
+
forward edges, and blocking nodes along forward and reverse edges
|
| 704 |
+
|
| 705 |
+
5. Support for multigraphs is provided by omitting digons from the set
|
| 706 |
+
of forward edges
|
| 707 |
+
|
| 708 |
+
Parameters
|
| 709 |
+
----------
|
| 710 |
+
F : _NeighborhoodCache
|
| 711 |
+
A graph of forward edges to follow in constructing cycles
|
| 712 |
+
|
| 713 |
+
B : _NeighborhoodCache
|
| 714 |
+
A graph of blocking edges to prevent the production of chordless cycles
|
| 715 |
+
|
| 716 |
+
path : list
|
| 717 |
+
A cycle prefix. All cycles generated will begin with this prefix.
|
| 718 |
+
|
| 719 |
+
length_bound : int
|
| 720 |
+
A length bound. All cycles generated will have length at most length_bound.
|
| 721 |
+
|
| 722 |
+
|
| 723 |
+
Yields
|
| 724 |
+
------
|
| 725 |
+
list of nodes
|
| 726 |
+
Each cycle is represented by a list of nodes along the cycle.
|
| 727 |
+
|
| 728 |
+
References
|
| 729 |
+
----------
|
| 730 |
+
.. [1] Efficient enumeration of chordless cycles
|
| 731 |
+
E. Dias and D. Castonguay and H. Longo and W.A.R. Jradi
|
| 732 |
+
https://arxiv.org/abs/1309.1051
|
| 733 |
+
|
| 734 |
+
"""
|
| 735 |
+
blocked = defaultdict(int)
|
| 736 |
+
target = path[0]
|
| 737 |
+
blocked[path[1]] = 1
|
| 738 |
+
for w in path[1:]:
|
| 739 |
+
for v in B[w]:
|
| 740 |
+
blocked[v] += 1
|
| 741 |
+
|
| 742 |
+
stack = [iter(F[path[2]])]
|
| 743 |
+
while stack:
|
| 744 |
+
nbrs = stack[-1]
|
| 745 |
+
for w in nbrs:
|
| 746 |
+
if blocked[w] == 1 and (length_bound is None or len(path) < length_bound):
|
| 747 |
+
Fw = F[w]
|
| 748 |
+
if target in Fw:
|
| 749 |
+
yield path + [w]
|
| 750 |
+
else:
|
| 751 |
+
Bw = B[w]
|
| 752 |
+
if target in Bw:
|
| 753 |
+
continue
|
| 754 |
+
for v in Bw:
|
| 755 |
+
blocked[v] += 1
|
| 756 |
+
path.append(w)
|
| 757 |
+
stack.append(iter(Fw))
|
| 758 |
+
break
|
| 759 |
+
else:
|
| 760 |
+
stack.pop()
|
| 761 |
+
for v in B[path.pop()]:
|
| 762 |
+
blocked[v] -= 1
|
| 763 |
+
|
| 764 |
+
|
| 765 |
+
@not_implemented_for("undirected")
|
| 766 |
+
@nx._dispatchable(mutates_input=True)
|
| 767 |
+
def recursive_simple_cycles(G):
|
| 768 |
+
"""Find simple cycles (elementary circuits) of a directed graph.
|
| 769 |
+
|
| 770 |
+
A `simple cycle`, or `elementary circuit`, is a closed path where
|
| 771 |
+
no node appears twice. Two elementary circuits are distinct if they
|
| 772 |
+
are not cyclic permutations of each other.
|
| 773 |
+
|
| 774 |
+
This version uses a recursive algorithm to build a list of cycles.
|
| 775 |
+
You should probably use the iterator version called simple_cycles().
|
| 776 |
+
Warning: This recursive version uses lots of RAM!
|
| 777 |
+
It appears in NetworkX for pedagogical value.
|
| 778 |
+
|
| 779 |
+
Parameters
|
| 780 |
+
----------
|
| 781 |
+
G : NetworkX DiGraph
|
| 782 |
+
A directed graph
|
| 783 |
+
|
| 784 |
+
Returns
|
| 785 |
+
-------
|
| 786 |
+
A list of cycles, where each cycle is represented by a list of nodes
|
| 787 |
+
along the cycle.
|
| 788 |
+
|
| 789 |
+
Example:
|
| 790 |
+
|
| 791 |
+
>>> edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]
|
| 792 |
+
>>> G = nx.DiGraph(edges)
|
| 793 |
+
>>> nx.recursive_simple_cycles(G)
|
| 794 |
+
[[0], [2], [0, 1, 2], [0, 2], [1, 2]]
|
| 795 |
+
|
| 796 |
+
Notes
|
| 797 |
+
-----
|
| 798 |
+
The implementation follows pp. 79-80 in [1]_.
|
| 799 |
+
|
| 800 |
+
The time complexity is $O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$
|
| 801 |
+
elementary circuits.
|
| 802 |
+
|
| 803 |
+
References
|
| 804 |
+
----------
|
| 805 |
+
.. [1] Finding all the elementary circuits of a directed graph.
|
| 806 |
+
D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
|
| 807 |
+
https://doi.org/10.1137/0204007
|
| 808 |
+
|
| 809 |
+
See Also
|
| 810 |
+
--------
|
| 811 |
+
simple_cycles, cycle_basis
|
| 812 |
+
"""
|
| 813 |
+
|
| 814 |
+
# Jon Olav Vik, 2010-08-09
|
| 815 |
+
def _unblock(thisnode):
|
| 816 |
+
"""Recursively unblock and remove nodes from B[thisnode]."""
|
| 817 |
+
if blocked[thisnode]:
|
| 818 |
+
blocked[thisnode] = False
|
| 819 |
+
while B[thisnode]:
|
| 820 |
+
_unblock(B[thisnode].pop())
|
| 821 |
+
|
| 822 |
+
def circuit(thisnode, startnode, component):
|
| 823 |
+
closed = False # set to True if elementary path is closed
|
| 824 |
+
path.append(thisnode)
|
| 825 |
+
blocked[thisnode] = True
|
| 826 |
+
for nextnode in component[thisnode]: # direct successors of thisnode
|
| 827 |
+
if nextnode == startnode:
|
| 828 |
+
result.append(path[:])
|
| 829 |
+
closed = True
|
| 830 |
+
elif not blocked[nextnode]:
|
| 831 |
+
if circuit(nextnode, startnode, component):
|
| 832 |
+
closed = True
|
| 833 |
+
if closed:
|
| 834 |
+
_unblock(thisnode)
|
| 835 |
+
else:
|
| 836 |
+
for nextnode in component[thisnode]:
|
| 837 |
+
if thisnode not in B[nextnode]: # TODO: use set for speedup?
|
| 838 |
+
B[nextnode].append(thisnode)
|
| 839 |
+
path.pop() # remove thisnode from path
|
| 840 |
+
return closed
|
| 841 |
+
|
| 842 |
+
path = [] # stack of nodes in current path
|
| 843 |
+
blocked = defaultdict(bool) # vertex: blocked from search?
|
| 844 |
+
B = defaultdict(list) # graph portions that yield no elementary circuit
|
| 845 |
+
result = [] # list to accumulate the circuits found
|
| 846 |
+
|
| 847 |
+
# Johnson's algorithm exclude self cycle edges like (v, v)
|
| 848 |
+
# To be backward compatible, we record those cycles in advance
|
| 849 |
+
# and then remove from subG
|
| 850 |
+
for v in G:
|
| 851 |
+
if G.has_edge(v, v):
|
| 852 |
+
result.append([v])
|
| 853 |
+
G.remove_edge(v, v)
|
| 854 |
+
|
| 855 |
+
# Johnson's algorithm requires some ordering of the nodes.
|
| 856 |
+
# They might not be sortable so we assign an arbitrary ordering.
|
| 857 |
+
ordering = dict(zip(G, range(len(G))))
|
| 858 |
+
for s in ordering:
|
| 859 |
+
# Build the subgraph induced by s and following nodes in the ordering
|
| 860 |
+
subgraph = G.subgraph(node for node in G if ordering[node] >= ordering[s])
|
| 861 |
+
# Find the strongly connected component in the subgraph
|
| 862 |
+
# that contains the least node according to the ordering
|
| 863 |
+
strongcomp = nx.strongly_connected_components(subgraph)
|
| 864 |
+
mincomp = min(strongcomp, key=lambda ns: min(ordering[n] for n in ns))
|
| 865 |
+
component = G.subgraph(mincomp)
|
| 866 |
+
if len(component) > 1:
|
| 867 |
+
# smallest node in the component according to the ordering
|
| 868 |
+
startnode = min(component, key=ordering.__getitem__)
|
| 869 |
+
for node in component:
|
| 870 |
+
blocked[node] = False
|
| 871 |
+
B[node][:] = []
|
| 872 |
+
dummy = circuit(startnode, startnode, component)
|
| 873 |
+
return result
|
| 874 |
+
|
| 875 |
+
|
| 876 |
+
@nx._dispatchable
|
| 877 |
+
def find_cycle(G, source=None, orientation=None):
|
| 878 |
+
"""Returns a cycle found via depth-first traversal.
|
| 879 |
+
|
| 880 |
+
The cycle is a list of edges indicating the cyclic path.
|
| 881 |
+
Orientation of directed edges is controlled by `orientation`.
|
| 882 |
+
|
| 883 |
+
Parameters
|
| 884 |
+
----------
|
| 885 |
+
G : graph
|
| 886 |
+
A directed/undirected graph/multigraph.
|
| 887 |
+
|
| 888 |
+
source : node, list of nodes
|
| 889 |
+
The node from which the traversal begins. If None, then a source
|
| 890 |
+
is chosen arbitrarily and repeatedly until all edges from each node in
|
| 891 |
+
the graph are searched.
|
| 892 |
+
|
| 893 |
+
orientation : None | 'original' | 'reverse' | 'ignore' (default: None)
|
| 894 |
+
For directed graphs and directed multigraphs, edge traversals need not
|
| 895 |
+
respect the original orientation of the edges.
|
| 896 |
+
When set to 'reverse' every edge is traversed in the reverse direction.
|
| 897 |
+
When set to 'ignore', every edge is treated as undirected.
|
| 898 |
+
When set to 'original', every edge is treated as directed.
|
| 899 |
+
In all three cases, the yielded edge tuples add a last entry to
|
| 900 |
+
indicate the direction in which that edge was traversed.
|
| 901 |
+
If orientation is None, the yielded edge has no direction indicated.
|
| 902 |
+
The direction is respected, but not reported.
|
| 903 |
+
|
| 904 |
+
Returns
|
| 905 |
+
-------
|
| 906 |
+
edges : directed edges
|
| 907 |
+
A list of directed edges indicating the path taken for the loop.
|
| 908 |
+
If no cycle is found, then an exception is raised.
|
| 909 |
+
For graphs, an edge is of the form `(u, v)` where `u` and `v`
|
| 910 |
+
are the tail and head of the edge as determined by the traversal.
|
| 911 |
+
For multigraphs, an edge is of the form `(u, v, key)`, where `key` is
|
| 912 |
+
the key of the edge. When the graph is directed, then `u` and `v`
|
| 913 |
+
are always in the order of the actual directed edge.
|
| 914 |
+
If orientation is not None then the edge tuple is extended to include
|
| 915 |
+
the direction of traversal ('forward' or 'reverse') on that edge.
|
| 916 |
+
|
| 917 |
+
Raises
|
| 918 |
+
------
|
| 919 |
+
NetworkXNoCycle
|
| 920 |
+
If no cycle was found.
|
| 921 |
+
|
| 922 |
+
Examples
|
| 923 |
+
--------
|
| 924 |
+
In this example, we construct a DAG and find, in the first call, that there
|
| 925 |
+
are no directed cycles, and so an exception is raised. In the second call,
|
| 926 |
+
we ignore edge orientations and find that there is an undirected cycle.
|
| 927 |
+
Note that the second call finds a directed cycle while effectively
|
| 928 |
+
traversing an undirected graph, and so, we found an "undirected cycle".
|
| 929 |
+
This means that this DAG structure does not form a directed tree (which
|
| 930 |
+
is also known as a polytree).
|
| 931 |
+
|
| 932 |
+
>>> G = nx.DiGraph([(0, 1), (0, 2), (1, 2)])
|
| 933 |
+
>>> nx.find_cycle(G, orientation="original")
|
| 934 |
+
Traceback (most recent call last):
|
| 935 |
+
...
|
| 936 |
+
networkx.exception.NetworkXNoCycle: No cycle found.
|
| 937 |
+
>>> list(nx.find_cycle(G, orientation="ignore"))
|
| 938 |
+
[(0, 1, 'forward'), (1, 2, 'forward'), (0, 2, 'reverse')]
|
| 939 |
+
|
| 940 |
+
See Also
|
| 941 |
+
--------
|
| 942 |
+
simple_cycles
|
| 943 |
+
"""
|
| 944 |
+
if not G.is_directed() or orientation in (None, "original"):
|
| 945 |
+
|
| 946 |
+
def tailhead(edge):
|
| 947 |
+
return edge[:2]
|
| 948 |
+
|
| 949 |
+
elif orientation == "reverse":
|
| 950 |
+
|
| 951 |
+
def tailhead(edge):
|
| 952 |
+
return edge[1], edge[0]
|
| 953 |
+
|
| 954 |
+
elif orientation == "ignore":
|
| 955 |
+
|
| 956 |
+
def tailhead(edge):
|
| 957 |
+
if edge[-1] == "reverse":
|
| 958 |
+
return edge[1], edge[0]
|
| 959 |
+
return edge[:2]
|
| 960 |
+
|
| 961 |
+
explored = set()
|
| 962 |
+
cycle = []
|
| 963 |
+
final_node = None
|
| 964 |
+
for start_node in G.nbunch_iter(source):
|
| 965 |
+
if start_node in explored:
|
| 966 |
+
# No loop is possible.
|
| 967 |
+
continue
|
| 968 |
+
|
| 969 |
+
edges = []
|
| 970 |
+
# All nodes seen in this iteration of edge_dfs
|
| 971 |
+
seen = {start_node}
|
| 972 |
+
# Nodes in active path.
|
| 973 |
+
active_nodes = {start_node}
|
| 974 |
+
previous_head = None
|
| 975 |
+
|
| 976 |
+
for edge in nx.edge_dfs(G, start_node, orientation):
|
| 977 |
+
# Determine if this edge is a continuation of the active path.
|
| 978 |
+
tail, head = tailhead(edge)
|
| 979 |
+
if head in explored:
|
| 980 |
+
# Then we've already explored it. No loop is possible.
|
| 981 |
+
continue
|
| 982 |
+
if previous_head is not None and tail != previous_head:
|
| 983 |
+
# This edge results from backtracking.
|
| 984 |
+
# Pop until we get a node whose head equals the current tail.
|
| 985 |
+
# So for example, we might have:
|
| 986 |
+
# (0, 1), (1, 2), (2, 3), (1, 4)
|
| 987 |
+
# which must become:
|
| 988 |
+
# (0, 1), (1, 4)
|
| 989 |
+
while True:
|
| 990 |
+
try:
|
| 991 |
+
popped_edge = edges.pop()
|
| 992 |
+
except IndexError:
|
| 993 |
+
edges = []
|
| 994 |
+
active_nodes = {tail}
|
| 995 |
+
break
|
| 996 |
+
else:
|
| 997 |
+
popped_head = tailhead(popped_edge)[1]
|
| 998 |
+
active_nodes.remove(popped_head)
|
| 999 |
+
|
| 1000 |
+
if edges:
|
| 1001 |
+
last_head = tailhead(edges[-1])[1]
|
| 1002 |
+
if tail == last_head:
|
| 1003 |
+
break
|
| 1004 |
+
edges.append(edge)
|
| 1005 |
+
|
| 1006 |
+
if head in active_nodes:
|
| 1007 |
+
# We have a loop!
|
| 1008 |
+
cycle.extend(edges)
|
| 1009 |
+
final_node = head
|
| 1010 |
+
break
|
| 1011 |
+
else:
|
| 1012 |
+
seen.add(head)
|
| 1013 |
+
active_nodes.add(head)
|
| 1014 |
+
previous_head = head
|
| 1015 |
+
|
| 1016 |
+
if cycle:
|
| 1017 |
+
break
|
| 1018 |
+
else:
|
| 1019 |
+
explored.update(seen)
|
| 1020 |
+
|
| 1021 |
+
else:
|
| 1022 |
+
assert len(cycle) == 0
|
| 1023 |
+
raise nx.exception.NetworkXNoCycle("No cycle found.")
|
| 1024 |
+
|
| 1025 |
+
# We now have a list of edges which ends on a cycle.
|
| 1026 |
+
# So we need to remove from the beginning edges that are not relevant.
|
| 1027 |
+
|
| 1028 |
+
for i, edge in enumerate(cycle):
|
| 1029 |
+
tail, head = tailhead(edge)
|
| 1030 |
+
if tail == final_node:
|
| 1031 |
+
break
|
| 1032 |
+
|
| 1033 |
+
return cycle[i:]
|
| 1034 |
+
|
| 1035 |
+
|
| 1036 |
+
@not_implemented_for("directed")
|
| 1037 |
+
@not_implemented_for("multigraph")
|
| 1038 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 1039 |
+
def minimum_cycle_basis(G, weight=None):
|
| 1040 |
+
"""Returns a minimum weight cycle basis for G
|
| 1041 |
+
|
| 1042 |
+
Minimum weight means a cycle basis for which the total weight
|
| 1043 |
+
(length for unweighted graphs) of all the cycles is minimum.
|
| 1044 |
+
|
| 1045 |
+
Parameters
|
| 1046 |
+
----------
|
| 1047 |
+
G : NetworkX Graph
|
| 1048 |
+
weight: string
|
| 1049 |
+
name of the edge attribute to use for edge weights
|
| 1050 |
+
|
| 1051 |
+
Returns
|
| 1052 |
+
-------
|
| 1053 |
+
A list of cycle lists. Each cycle list is a list of nodes
|
| 1054 |
+
which forms a cycle (loop) in G. Note that the nodes are not
|
| 1055 |
+
necessarily returned in a order by which they appear in the cycle
|
| 1056 |
+
|
| 1057 |
+
Examples
|
| 1058 |
+
--------
|
| 1059 |
+
>>> G = nx.Graph()
|
| 1060 |
+
>>> nx.add_cycle(G, [0, 1, 2, 3])
|
| 1061 |
+
>>> nx.add_cycle(G, [0, 3, 4, 5])
|
| 1062 |
+
>>> nx.minimum_cycle_basis(G)
|
| 1063 |
+
[[5, 4, 3, 0], [3, 2, 1, 0]]
|
| 1064 |
+
|
| 1065 |
+
References:
|
| 1066 |
+
[1] Kavitha, Telikepalli, et al. "An O(m^2n) Algorithm for
|
| 1067 |
+
Minimum Cycle Basis of Graphs."
|
| 1068 |
+
http://link.springer.com/article/10.1007/s00453-007-9064-z
|
| 1069 |
+
[2] de Pina, J. 1995. Applications of shortest path methods.
|
| 1070 |
+
Ph.D. thesis, University of Amsterdam, Netherlands
|
| 1071 |
+
|
| 1072 |
+
See Also
|
| 1073 |
+
--------
|
| 1074 |
+
simple_cycles, cycle_basis
|
| 1075 |
+
"""
|
| 1076 |
+
# We first split the graph in connected subgraphs
|
| 1077 |
+
return sum(
|
| 1078 |
+
(_min_cycle_basis(G.subgraph(c), weight) for c in nx.connected_components(G)),
|
| 1079 |
+
[],
|
| 1080 |
+
)
|
| 1081 |
+
|
| 1082 |
+
|
| 1083 |
+
def _min_cycle_basis(G, weight):
|
| 1084 |
+
cb = []
|
| 1085 |
+
# We extract the edges not in a spanning tree. We do not really need a
|
| 1086 |
+
# *minimum* spanning tree. That is why we call the next function with
|
| 1087 |
+
# weight=None. Depending on implementation, it may be faster as well
|
| 1088 |
+
tree_edges = list(nx.minimum_spanning_edges(G, weight=None, data=False))
|
| 1089 |
+
chords = G.edges - tree_edges - {(v, u) for u, v in tree_edges}
|
| 1090 |
+
|
| 1091 |
+
# We maintain a set of vectors orthogonal to sofar found cycles
|
| 1092 |
+
set_orth = [{edge} for edge in chords]
|
| 1093 |
+
while set_orth:
|
| 1094 |
+
base = set_orth.pop()
|
| 1095 |
+
# kth cycle is "parallel" to kth vector in set_orth
|
| 1096 |
+
cycle_edges = _min_cycle(G, base, weight)
|
| 1097 |
+
cb.append([v for u, v in cycle_edges])
|
| 1098 |
+
|
| 1099 |
+
# now update set_orth so that k+1,k+2... th elements are
|
| 1100 |
+
# orthogonal to the newly found cycle, as per [p. 336, 1]
|
| 1101 |
+
set_orth = [
|
| 1102 |
+
(
|
| 1103 |
+
{e for e in orth if e not in base if e[::-1] not in base}
|
| 1104 |
+
| {e for e in base if e not in orth if e[::-1] not in orth}
|
| 1105 |
+
)
|
| 1106 |
+
if sum((e in orth or e[::-1] in orth) for e in cycle_edges) % 2
|
| 1107 |
+
else orth
|
| 1108 |
+
for orth in set_orth
|
| 1109 |
+
]
|
| 1110 |
+
return cb
|
| 1111 |
+
|
| 1112 |
+
|
| 1113 |
+
def _min_cycle(G, orth, weight):
|
| 1114 |
+
"""
|
| 1115 |
+
Computes the minimum weight cycle in G,
|
| 1116 |
+
orthogonal to the vector orth as per [p. 338, 1]
|
| 1117 |
+
Use (u, 1) to indicate the lifted copy of u (denoted u' in paper).
|
| 1118 |
+
"""
|
| 1119 |
+
Gi = nx.Graph()
|
| 1120 |
+
|
| 1121 |
+
# Add 2 copies of each edge in G to Gi.
|
| 1122 |
+
# If edge is in orth, add cross edge; otherwise in-plane edge
|
| 1123 |
+
for u, v, wt in G.edges(data=weight, default=1):
|
| 1124 |
+
if (u, v) in orth or (v, u) in orth:
|
| 1125 |
+
Gi.add_edges_from([(u, (v, 1)), ((u, 1), v)], Gi_weight=wt)
|
| 1126 |
+
else:
|
| 1127 |
+
Gi.add_edges_from([(u, v), ((u, 1), (v, 1))], Gi_weight=wt)
|
| 1128 |
+
|
| 1129 |
+
# find the shortest length in Gi between n and (n, 1) for each n
|
| 1130 |
+
# Note: Use "Gi_weight" for name of weight attribute
|
| 1131 |
+
spl = nx.shortest_path_length
|
| 1132 |
+
lift = {n: spl(Gi, source=n, target=(n, 1), weight="Gi_weight") for n in G}
|
| 1133 |
+
|
| 1134 |
+
# Now compute that short path in Gi, which translates to a cycle in G
|
| 1135 |
+
start = min(lift, key=lift.get)
|
| 1136 |
+
end = (start, 1)
|
| 1137 |
+
min_path_i = nx.shortest_path(Gi, source=start, target=end, weight="Gi_weight")
|
| 1138 |
+
|
| 1139 |
+
# Now we obtain the actual path, re-map nodes in Gi to those in G
|
| 1140 |
+
min_path = [n if n in G else n[0] for n in min_path_i]
|
| 1141 |
+
|
| 1142 |
+
# Now remove the edges that occur two times
|
| 1143 |
+
# two passes: flag which edges get kept, then build it
|
| 1144 |
+
edgelist = list(pairwise(min_path))
|
| 1145 |
+
edgeset = set()
|
| 1146 |
+
for e in edgelist:
|
| 1147 |
+
if e in edgeset:
|
| 1148 |
+
edgeset.remove(e)
|
| 1149 |
+
elif e[::-1] in edgeset:
|
| 1150 |
+
edgeset.remove(e[::-1])
|
| 1151 |
+
else:
|
| 1152 |
+
edgeset.add(e)
|
| 1153 |
+
|
| 1154 |
+
min_edgelist = []
|
| 1155 |
+
for e in edgelist:
|
| 1156 |
+
if e in edgeset:
|
| 1157 |
+
min_edgelist.append(e)
|
| 1158 |
+
edgeset.remove(e)
|
| 1159 |
+
elif e[::-1] in edgeset:
|
| 1160 |
+
min_edgelist.append(e[::-1])
|
| 1161 |
+
edgeset.remove(e[::-1])
|
| 1162 |
+
|
| 1163 |
+
return min_edgelist
|
| 1164 |
+
|
| 1165 |
+
|
| 1166 |
+
@not_implemented_for("directed")
|
| 1167 |
+
@not_implemented_for("multigraph")
|
| 1168 |
+
@nx._dispatchable
|
| 1169 |
+
def girth(G):
|
| 1170 |
+
"""Returns the girth of the graph.
|
| 1171 |
+
|
| 1172 |
+
The girth of a graph is the length of its shortest cycle, or infinity if
|
| 1173 |
+
the graph is acyclic. The algorithm follows the description given on the
|
| 1174 |
+
Wikipedia page [1]_, and runs in time O(mn) on a graph with m edges and n
|
| 1175 |
+
nodes.
|
| 1176 |
+
|
| 1177 |
+
Parameters
|
| 1178 |
+
----------
|
| 1179 |
+
G : NetworkX Graph
|
| 1180 |
+
|
| 1181 |
+
Returns
|
| 1182 |
+
-------
|
| 1183 |
+
int or math.inf
|
| 1184 |
+
|
| 1185 |
+
Examples
|
| 1186 |
+
--------
|
| 1187 |
+
All examples below (except P_5) can easily be checked using Wikipedia,
|
| 1188 |
+
which has a page for each of these famous graphs.
|
| 1189 |
+
|
| 1190 |
+
>>> nx.girth(nx.chvatal_graph())
|
| 1191 |
+
4
|
| 1192 |
+
>>> nx.girth(nx.tutte_graph())
|
| 1193 |
+
4
|
| 1194 |
+
>>> nx.girth(nx.petersen_graph())
|
| 1195 |
+
5
|
| 1196 |
+
>>> nx.girth(nx.heawood_graph())
|
| 1197 |
+
6
|
| 1198 |
+
>>> nx.girth(nx.pappus_graph())
|
| 1199 |
+
6
|
| 1200 |
+
>>> nx.girth(nx.path_graph(5))
|
| 1201 |
+
inf
|
| 1202 |
+
|
| 1203 |
+
References
|
| 1204 |
+
----------
|
| 1205 |
+
.. [1] `Wikipedia: Girth <https://en.wikipedia.org/wiki/Girth_(graph_theory)>`_
|
| 1206 |
+
|
| 1207 |
+
"""
|
| 1208 |
+
girth = depth_limit = inf
|
| 1209 |
+
tree_edge = nx.algorithms.traversal.breadth_first_search.TREE_EDGE
|
| 1210 |
+
level_edge = nx.algorithms.traversal.breadth_first_search.LEVEL_EDGE
|
| 1211 |
+
for n in G:
|
| 1212 |
+
# run a BFS from source n, keeping track of distances; since we want
|
| 1213 |
+
# the shortest cycle, no need to explore beyond the current minimum length
|
| 1214 |
+
depth = {n: 0}
|
| 1215 |
+
for u, v, label in nx.bfs_labeled_edges(G, n):
|
| 1216 |
+
du = depth[u]
|
| 1217 |
+
if du > depth_limit:
|
| 1218 |
+
break
|
| 1219 |
+
if label is tree_edge:
|
| 1220 |
+
depth[v] = du + 1
|
| 1221 |
+
else:
|
| 1222 |
+
# if (u, v) is a level edge, the length is du + du + 1 (odd)
|
| 1223 |
+
# otherwise, it's a forward edge; length is du + (du + 1) + 1 (even)
|
| 1224 |
+
delta = label is level_edge
|
| 1225 |
+
length = du + du + 2 - delta
|
| 1226 |
+
if length < girth:
|
| 1227 |
+
girth = length
|
| 1228 |
+
depth_limit = du - delta
|
| 1229 |
+
|
| 1230 |
+
return girth
|
phi4/lib/python3.10/site-packages/networkx/algorithms/dag.py
ADDED
|
@@ -0,0 +1,1418 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Algorithms for directed acyclic graphs (DAGs).
|
| 2 |
+
|
| 3 |
+
Note that most of these functions are only guaranteed to work for DAGs.
|
| 4 |
+
In general, these functions do not check for acyclic-ness, so it is up
|
| 5 |
+
to the user to check for that.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import heapq
|
| 9 |
+
from collections import deque
|
| 10 |
+
from functools import partial
|
| 11 |
+
from itertools import chain, combinations, product, starmap
|
| 12 |
+
from math import gcd
|
| 13 |
+
|
| 14 |
+
import networkx as nx
|
| 15 |
+
from networkx.utils import arbitrary_element, not_implemented_for, pairwise
|
| 16 |
+
|
| 17 |
+
__all__ = [
|
| 18 |
+
"descendants",
|
| 19 |
+
"ancestors",
|
| 20 |
+
"topological_sort",
|
| 21 |
+
"lexicographical_topological_sort",
|
| 22 |
+
"all_topological_sorts",
|
| 23 |
+
"topological_generations",
|
| 24 |
+
"is_directed_acyclic_graph",
|
| 25 |
+
"is_aperiodic",
|
| 26 |
+
"transitive_closure",
|
| 27 |
+
"transitive_closure_dag",
|
| 28 |
+
"transitive_reduction",
|
| 29 |
+
"antichains",
|
| 30 |
+
"dag_longest_path",
|
| 31 |
+
"dag_longest_path_length",
|
| 32 |
+
"dag_to_branching",
|
| 33 |
+
"compute_v_structures",
|
| 34 |
+
]
|
| 35 |
+
|
| 36 |
+
chaini = chain.from_iterable
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
@nx._dispatchable
|
| 40 |
+
def descendants(G, source):
|
| 41 |
+
"""Returns all nodes reachable from `source` in `G`.
|
| 42 |
+
|
| 43 |
+
Parameters
|
| 44 |
+
----------
|
| 45 |
+
G : NetworkX Graph
|
| 46 |
+
source : node in `G`
|
| 47 |
+
|
| 48 |
+
Returns
|
| 49 |
+
-------
|
| 50 |
+
set()
|
| 51 |
+
The descendants of `source` in `G`
|
| 52 |
+
|
| 53 |
+
Raises
|
| 54 |
+
------
|
| 55 |
+
NetworkXError
|
| 56 |
+
If node `source` is not in `G`.
|
| 57 |
+
|
| 58 |
+
Examples
|
| 59 |
+
--------
|
| 60 |
+
>>> DG = nx.path_graph(5, create_using=nx.DiGraph)
|
| 61 |
+
>>> sorted(nx.descendants(DG, 2))
|
| 62 |
+
[3, 4]
|
| 63 |
+
|
| 64 |
+
The `source` node is not a descendant of itself, but can be included manually:
|
| 65 |
+
|
| 66 |
+
>>> sorted(nx.descendants(DG, 2) | {2})
|
| 67 |
+
[2, 3, 4]
|
| 68 |
+
|
| 69 |
+
See also
|
| 70 |
+
--------
|
| 71 |
+
ancestors
|
| 72 |
+
"""
|
| 73 |
+
return {child for parent, child in nx.bfs_edges(G, source)}
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
@nx._dispatchable
|
| 77 |
+
def ancestors(G, source):
|
| 78 |
+
"""Returns all nodes having a path to `source` in `G`.
|
| 79 |
+
|
| 80 |
+
Parameters
|
| 81 |
+
----------
|
| 82 |
+
G : NetworkX Graph
|
| 83 |
+
source : node in `G`
|
| 84 |
+
|
| 85 |
+
Returns
|
| 86 |
+
-------
|
| 87 |
+
set()
|
| 88 |
+
The ancestors of `source` in `G`
|
| 89 |
+
|
| 90 |
+
Raises
|
| 91 |
+
------
|
| 92 |
+
NetworkXError
|
| 93 |
+
If node `source` is not in `G`.
|
| 94 |
+
|
| 95 |
+
Examples
|
| 96 |
+
--------
|
| 97 |
+
>>> DG = nx.path_graph(5, create_using=nx.DiGraph)
|
| 98 |
+
>>> sorted(nx.ancestors(DG, 2))
|
| 99 |
+
[0, 1]
|
| 100 |
+
|
| 101 |
+
The `source` node is not an ancestor of itself, but can be included manually:
|
| 102 |
+
|
| 103 |
+
>>> sorted(nx.ancestors(DG, 2) | {2})
|
| 104 |
+
[0, 1, 2]
|
| 105 |
+
|
| 106 |
+
See also
|
| 107 |
+
--------
|
| 108 |
+
descendants
|
| 109 |
+
"""
|
| 110 |
+
return {child for parent, child in nx.bfs_edges(G, source, reverse=True)}
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
@nx._dispatchable
|
| 114 |
+
def has_cycle(G):
|
| 115 |
+
"""Decides whether the directed graph has a cycle."""
|
| 116 |
+
try:
|
| 117 |
+
# Feed the entire iterator into a zero-length deque.
|
| 118 |
+
deque(topological_sort(G), maxlen=0)
|
| 119 |
+
except nx.NetworkXUnfeasible:
|
| 120 |
+
return True
|
| 121 |
+
else:
|
| 122 |
+
return False
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
@nx._dispatchable
|
| 126 |
+
def is_directed_acyclic_graph(G):
|
| 127 |
+
"""Returns True if the graph `G` is a directed acyclic graph (DAG) or
|
| 128 |
+
False if not.
|
| 129 |
+
|
| 130 |
+
Parameters
|
| 131 |
+
----------
|
| 132 |
+
G : NetworkX graph
|
| 133 |
+
|
| 134 |
+
Returns
|
| 135 |
+
-------
|
| 136 |
+
bool
|
| 137 |
+
True if `G` is a DAG, False otherwise
|
| 138 |
+
|
| 139 |
+
Examples
|
| 140 |
+
--------
|
| 141 |
+
Undirected graph::
|
| 142 |
+
|
| 143 |
+
>>> G = nx.Graph([(1, 2), (2, 3)])
|
| 144 |
+
>>> nx.is_directed_acyclic_graph(G)
|
| 145 |
+
False
|
| 146 |
+
|
| 147 |
+
Directed graph with cycle::
|
| 148 |
+
|
| 149 |
+
>>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
|
| 150 |
+
>>> nx.is_directed_acyclic_graph(G)
|
| 151 |
+
False
|
| 152 |
+
|
| 153 |
+
Directed acyclic graph::
|
| 154 |
+
|
| 155 |
+
>>> G = nx.DiGraph([(1, 2), (2, 3)])
|
| 156 |
+
>>> nx.is_directed_acyclic_graph(G)
|
| 157 |
+
True
|
| 158 |
+
|
| 159 |
+
See also
|
| 160 |
+
--------
|
| 161 |
+
topological_sort
|
| 162 |
+
"""
|
| 163 |
+
return G.is_directed() and not has_cycle(G)
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
@nx._dispatchable
|
| 167 |
+
def topological_generations(G):
|
| 168 |
+
"""Stratifies a DAG into generations.
|
| 169 |
+
|
| 170 |
+
A topological generation is node collection in which ancestors of a node in each
|
| 171 |
+
generation are guaranteed to be in a previous generation, and any descendants of
|
| 172 |
+
a node are guaranteed to be in a following generation. Nodes are guaranteed to
|
| 173 |
+
be in the earliest possible generation that they can belong to.
|
| 174 |
+
|
| 175 |
+
Parameters
|
| 176 |
+
----------
|
| 177 |
+
G : NetworkX digraph
|
| 178 |
+
A directed acyclic graph (DAG)
|
| 179 |
+
|
| 180 |
+
Yields
|
| 181 |
+
------
|
| 182 |
+
sets of nodes
|
| 183 |
+
Yields sets of nodes representing each generation.
|
| 184 |
+
|
| 185 |
+
Raises
|
| 186 |
+
------
|
| 187 |
+
NetworkXError
|
| 188 |
+
Generations are defined for directed graphs only. If the graph
|
| 189 |
+
`G` is undirected, a :exc:`NetworkXError` is raised.
|
| 190 |
+
|
| 191 |
+
NetworkXUnfeasible
|
| 192 |
+
If `G` is not a directed acyclic graph (DAG) no topological generations
|
| 193 |
+
exist and a :exc:`NetworkXUnfeasible` exception is raised. This can also
|
| 194 |
+
be raised if `G` is changed while the returned iterator is being processed
|
| 195 |
+
|
| 196 |
+
RuntimeError
|
| 197 |
+
If `G` is changed while the returned iterator is being processed.
|
| 198 |
+
|
| 199 |
+
Examples
|
| 200 |
+
--------
|
| 201 |
+
>>> DG = nx.DiGraph([(2, 1), (3, 1)])
|
| 202 |
+
>>> [sorted(generation) for generation in nx.topological_generations(DG)]
|
| 203 |
+
[[2, 3], [1]]
|
| 204 |
+
|
| 205 |
+
Notes
|
| 206 |
+
-----
|
| 207 |
+
The generation in which a node resides can also be determined by taking the
|
| 208 |
+
max-path-distance from the node to the farthest leaf node. That value can
|
| 209 |
+
be obtained with this function using `enumerate(topological_generations(G))`.
|
| 210 |
+
|
| 211 |
+
See also
|
| 212 |
+
--------
|
| 213 |
+
topological_sort
|
| 214 |
+
"""
|
| 215 |
+
if not G.is_directed():
|
| 216 |
+
raise nx.NetworkXError("Topological sort not defined on undirected graphs.")
|
| 217 |
+
|
| 218 |
+
multigraph = G.is_multigraph()
|
| 219 |
+
indegree_map = {v: d for v, d in G.in_degree() if d > 0}
|
| 220 |
+
zero_indegree = [v for v, d in G.in_degree() if d == 0]
|
| 221 |
+
|
| 222 |
+
while zero_indegree:
|
| 223 |
+
this_generation = zero_indegree
|
| 224 |
+
zero_indegree = []
|
| 225 |
+
for node in this_generation:
|
| 226 |
+
if node not in G:
|
| 227 |
+
raise RuntimeError("Graph changed during iteration")
|
| 228 |
+
for child in G.neighbors(node):
|
| 229 |
+
try:
|
| 230 |
+
indegree_map[child] -= len(G[node][child]) if multigraph else 1
|
| 231 |
+
except KeyError as err:
|
| 232 |
+
raise RuntimeError("Graph changed during iteration") from err
|
| 233 |
+
if indegree_map[child] == 0:
|
| 234 |
+
zero_indegree.append(child)
|
| 235 |
+
del indegree_map[child]
|
| 236 |
+
yield this_generation
|
| 237 |
+
|
| 238 |
+
if indegree_map:
|
| 239 |
+
raise nx.NetworkXUnfeasible(
|
| 240 |
+
"Graph contains a cycle or graph changed during iteration"
|
| 241 |
+
)
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
@nx._dispatchable
|
| 245 |
+
def topological_sort(G):
|
| 246 |
+
"""Returns a generator of nodes in topologically sorted order.
|
| 247 |
+
|
| 248 |
+
A topological sort is a nonunique permutation of the nodes of a
|
| 249 |
+
directed graph such that an edge from u to v implies that u
|
| 250 |
+
appears before v in the topological sort order. This ordering is
|
| 251 |
+
valid only if the graph has no directed cycles.
|
| 252 |
+
|
| 253 |
+
Parameters
|
| 254 |
+
----------
|
| 255 |
+
G : NetworkX digraph
|
| 256 |
+
A directed acyclic graph (DAG)
|
| 257 |
+
|
| 258 |
+
Yields
|
| 259 |
+
------
|
| 260 |
+
nodes
|
| 261 |
+
Yields the nodes in topological sorted order.
|
| 262 |
+
|
| 263 |
+
Raises
|
| 264 |
+
------
|
| 265 |
+
NetworkXError
|
| 266 |
+
Topological sort is defined for directed graphs only. If the graph `G`
|
| 267 |
+
is undirected, a :exc:`NetworkXError` is raised.
|
| 268 |
+
|
| 269 |
+
NetworkXUnfeasible
|
| 270 |
+
If `G` is not a directed acyclic graph (DAG) no topological sort exists
|
| 271 |
+
and a :exc:`NetworkXUnfeasible` exception is raised. This can also be
|
| 272 |
+
raised if `G` is changed while the returned iterator is being processed
|
| 273 |
+
|
| 274 |
+
RuntimeError
|
| 275 |
+
If `G` is changed while the returned iterator is being processed.
|
| 276 |
+
|
| 277 |
+
Examples
|
| 278 |
+
--------
|
| 279 |
+
To get the reverse order of the topological sort:
|
| 280 |
+
|
| 281 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3)])
|
| 282 |
+
>>> list(reversed(list(nx.topological_sort(DG))))
|
| 283 |
+
[3, 2, 1]
|
| 284 |
+
|
| 285 |
+
If your DiGraph naturally has the edges representing tasks/inputs
|
| 286 |
+
and nodes representing people/processes that initiate tasks, then
|
| 287 |
+
topological_sort is not quite what you need. You will have to change
|
| 288 |
+
the tasks to nodes with dependence reflected by edges. The result is
|
| 289 |
+
a kind of topological sort of the edges. This can be done
|
| 290 |
+
with :func:`networkx.line_graph` as follows:
|
| 291 |
+
|
| 292 |
+
>>> list(nx.topological_sort(nx.line_graph(DG)))
|
| 293 |
+
[(1, 2), (2, 3)]
|
| 294 |
+
|
| 295 |
+
Notes
|
| 296 |
+
-----
|
| 297 |
+
This algorithm is based on a description and proof in
|
| 298 |
+
"Introduction to Algorithms: A Creative Approach" [1]_ .
|
| 299 |
+
|
| 300 |
+
See also
|
| 301 |
+
--------
|
| 302 |
+
is_directed_acyclic_graph, lexicographical_topological_sort
|
| 303 |
+
|
| 304 |
+
References
|
| 305 |
+
----------
|
| 306 |
+
.. [1] Manber, U. (1989).
|
| 307 |
+
*Introduction to Algorithms - A Creative Approach.* Addison-Wesley.
|
| 308 |
+
"""
|
| 309 |
+
for generation in nx.topological_generations(G):
|
| 310 |
+
yield from generation
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
@nx._dispatchable
|
| 314 |
+
def lexicographical_topological_sort(G, key=None):
|
| 315 |
+
"""Generate the nodes in the unique lexicographical topological sort order.
|
| 316 |
+
|
| 317 |
+
Generates a unique ordering of nodes by first sorting topologically (for which there are often
|
| 318 |
+
multiple valid orderings) and then additionally by sorting lexicographically.
|
| 319 |
+
|
| 320 |
+
A topological sort arranges the nodes of a directed graph so that the
|
| 321 |
+
upstream node of each directed edge precedes the downstream node.
|
| 322 |
+
It is always possible to find a solution for directed graphs that have no cycles.
|
| 323 |
+
There may be more than one valid solution.
|
| 324 |
+
|
| 325 |
+
Lexicographical sorting is just sorting alphabetically. It is used here to break ties in the
|
| 326 |
+
topological sort and to determine a single, unique ordering. This can be useful in comparing
|
| 327 |
+
sort results.
|
| 328 |
+
|
| 329 |
+
The lexicographical order can be customized by providing a function to the `key=` parameter.
|
| 330 |
+
The definition of the key function is the same as used in python's built-in `sort()`.
|
| 331 |
+
The function takes a single argument and returns a key to use for sorting purposes.
|
| 332 |
+
|
| 333 |
+
Lexicographical sorting can fail if the node names are un-sortable. See the example below.
|
| 334 |
+
The solution is to provide a function to the `key=` argument that returns sortable keys.
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
Parameters
|
| 338 |
+
----------
|
| 339 |
+
G : NetworkX digraph
|
| 340 |
+
A directed acyclic graph (DAG)
|
| 341 |
+
|
| 342 |
+
key : function, optional
|
| 343 |
+
A function of one argument that converts a node name to a comparison key.
|
| 344 |
+
It defines and resolves ambiguities in the sort order. Defaults to the identity function.
|
| 345 |
+
|
| 346 |
+
Yields
|
| 347 |
+
------
|
| 348 |
+
nodes
|
| 349 |
+
Yields the nodes of G in lexicographical topological sort order.
|
| 350 |
+
|
| 351 |
+
Raises
|
| 352 |
+
------
|
| 353 |
+
NetworkXError
|
| 354 |
+
Topological sort is defined for directed graphs only. If the graph `G`
|
| 355 |
+
is undirected, a :exc:`NetworkXError` is raised.
|
| 356 |
+
|
| 357 |
+
NetworkXUnfeasible
|
| 358 |
+
If `G` is not a directed acyclic graph (DAG) no topological sort exists
|
| 359 |
+
and a :exc:`NetworkXUnfeasible` exception is raised. This can also be
|
| 360 |
+
raised if `G` is changed while the returned iterator is being processed
|
| 361 |
+
|
| 362 |
+
RuntimeError
|
| 363 |
+
If `G` is changed while the returned iterator is being processed.
|
| 364 |
+
|
| 365 |
+
TypeError
|
| 366 |
+
Results from un-sortable node names.
|
| 367 |
+
Consider using `key=` parameter to resolve ambiguities in the sort order.
|
| 368 |
+
|
| 369 |
+
Examples
|
| 370 |
+
--------
|
| 371 |
+
>>> DG = nx.DiGraph([(2, 1), (2, 5), (1, 3), (1, 4), (5, 4)])
|
| 372 |
+
>>> list(nx.lexicographical_topological_sort(DG))
|
| 373 |
+
[2, 1, 3, 5, 4]
|
| 374 |
+
>>> list(nx.lexicographical_topological_sort(DG, key=lambda x: -x))
|
| 375 |
+
[2, 5, 1, 4, 3]
|
| 376 |
+
|
| 377 |
+
The sort will fail for any graph with integer and string nodes. Comparison of integer to strings
|
| 378 |
+
is not defined in python. Is 3 greater or less than 'red'?
|
| 379 |
+
|
| 380 |
+
>>> DG = nx.DiGraph([(1, "red"), (3, "red"), (1, "green"), (2, "blue")])
|
| 381 |
+
>>> list(nx.lexicographical_topological_sort(DG))
|
| 382 |
+
Traceback (most recent call last):
|
| 383 |
+
...
|
| 384 |
+
TypeError: '<' not supported between instances of 'str' and 'int'
|
| 385 |
+
...
|
| 386 |
+
|
| 387 |
+
Incomparable nodes can be resolved using a `key` function. This example function
|
| 388 |
+
allows comparison of integers and strings by returning a tuple where the first
|
| 389 |
+
element is True for `str`, False otherwise. The second element is the node name.
|
| 390 |
+
This groups the strings and integers separately so they can be compared only among themselves.
|
| 391 |
+
|
| 392 |
+
>>> key = lambda node: (isinstance(node, str), node)
|
| 393 |
+
>>> list(nx.lexicographical_topological_sort(DG, key=key))
|
| 394 |
+
[1, 2, 3, 'blue', 'green', 'red']
|
| 395 |
+
|
| 396 |
+
Notes
|
| 397 |
+
-----
|
| 398 |
+
This algorithm is based on a description and proof in
|
| 399 |
+
"Introduction to Algorithms: A Creative Approach" [1]_ .
|
| 400 |
+
|
| 401 |
+
See also
|
| 402 |
+
--------
|
| 403 |
+
topological_sort
|
| 404 |
+
|
| 405 |
+
References
|
| 406 |
+
----------
|
| 407 |
+
.. [1] Manber, U. (1989).
|
| 408 |
+
*Introduction to Algorithms - A Creative Approach.* Addison-Wesley.
|
| 409 |
+
"""
|
| 410 |
+
if not G.is_directed():
|
| 411 |
+
msg = "Topological sort not defined on undirected graphs."
|
| 412 |
+
raise nx.NetworkXError(msg)
|
| 413 |
+
|
| 414 |
+
if key is None:
|
| 415 |
+
|
| 416 |
+
def key(node):
|
| 417 |
+
return node
|
| 418 |
+
|
| 419 |
+
nodeid_map = {n: i for i, n in enumerate(G)}
|
| 420 |
+
|
| 421 |
+
def create_tuple(node):
|
| 422 |
+
return key(node), nodeid_map[node], node
|
| 423 |
+
|
| 424 |
+
indegree_map = {v: d for v, d in G.in_degree() if d > 0}
|
| 425 |
+
# These nodes have zero indegree and ready to be returned.
|
| 426 |
+
zero_indegree = [create_tuple(v) for v, d in G.in_degree() if d == 0]
|
| 427 |
+
heapq.heapify(zero_indegree)
|
| 428 |
+
|
| 429 |
+
while zero_indegree:
|
| 430 |
+
_, _, node = heapq.heappop(zero_indegree)
|
| 431 |
+
|
| 432 |
+
if node not in G:
|
| 433 |
+
raise RuntimeError("Graph changed during iteration")
|
| 434 |
+
for _, child in G.edges(node):
|
| 435 |
+
try:
|
| 436 |
+
indegree_map[child] -= 1
|
| 437 |
+
except KeyError as err:
|
| 438 |
+
raise RuntimeError("Graph changed during iteration") from err
|
| 439 |
+
if indegree_map[child] == 0:
|
| 440 |
+
try:
|
| 441 |
+
heapq.heappush(zero_indegree, create_tuple(child))
|
| 442 |
+
except TypeError as err:
|
| 443 |
+
raise TypeError(
|
| 444 |
+
f"{err}\nConsider using `key=` parameter to resolve ambiguities in the sort order."
|
| 445 |
+
)
|
| 446 |
+
del indegree_map[child]
|
| 447 |
+
|
| 448 |
+
yield node
|
| 449 |
+
|
| 450 |
+
if indegree_map:
|
| 451 |
+
msg = "Graph contains a cycle or graph changed during iteration"
|
| 452 |
+
raise nx.NetworkXUnfeasible(msg)
|
| 453 |
+
|
| 454 |
+
|
| 455 |
+
@not_implemented_for("undirected")
|
| 456 |
+
@nx._dispatchable
|
| 457 |
+
def all_topological_sorts(G):
|
| 458 |
+
"""Returns a generator of _all_ topological sorts of the directed graph G.
|
| 459 |
+
|
| 460 |
+
A topological sort is a nonunique permutation of the nodes such that an
|
| 461 |
+
edge from u to v implies that u appears before v in the topological sort
|
| 462 |
+
order.
|
| 463 |
+
|
| 464 |
+
Parameters
|
| 465 |
+
----------
|
| 466 |
+
G : NetworkX DiGraph
|
| 467 |
+
A directed graph
|
| 468 |
+
|
| 469 |
+
Yields
|
| 470 |
+
------
|
| 471 |
+
topological_sort_order : list
|
| 472 |
+
a list of nodes in `G`, representing one of the topological sort orders
|
| 473 |
+
|
| 474 |
+
Raises
|
| 475 |
+
------
|
| 476 |
+
NetworkXNotImplemented
|
| 477 |
+
If `G` is not directed
|
| 478 |
+
NetworkXUnfeasible
|
| 479 |
+
If `G` is not acyclic
|
| 480 |
+
|
| 481 |
+
Examples
|
| 482 |
+
--------
|
| 483 |
+
To enumerate all topological sorts of directed graph:
|
| 484 |
+
|
| 485 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3), (2, 4)])
|
| 486 |
+
>>> list(nx.all_topological_sorts(DG))
|
| 487 |
+
[[1, 2, 4, 3], [1, 2, 3, 4]]
|
| 488 |
+
|
| 489 |
+
Notes
|
| 490 |
+
-----
|
| 491 |
+
Implements an iterative version of the algorithm given in [1].
|
| 492 |
+
|
| 493 |
+
References
|
| 494 |
+
----------
|
| 495 |
+
.. [1] Knuth, Donald E., Szwarcfiter, Jayme L. (1974).
|
| 496 |
+
"A Structured Program to Generate All Topological Sorting Arrangements"
|
| 497 |
+
Information Processing Letters, Volume 2, Issue 6, 1974, Pages 153-157,
|
| 498 |
+
ISSN 0020-0190,
|
| 499 |
+
https://doi.org/10.1016/0020-0190(74)90001-5.
|
| 500 |
+
Elsevier (North-Holland), Amsterdam
|
| 501 |
+
"""
|
| 502 |
+
if not G.is_directed():
|
| 503 |
+
raise nx.NetworkXError("Topological sort not defined on undirected graphs.")
|
| 504 |
+
|
| 505 |
+
# the names of count and D are chosen to match the global variables in [1]
|
| 506 |
+
# number of edges originating in a vertex v
|
| 507 |
+
count = dict(G.in_degree())
|
| 508 |
+
# vertices with indegree 0
|
| 509 |
+
D = deque([v for v, d in G.in_degree() if d == 0])
|
| 510 |
+
# stack of first value chosen at a position k in the topological sort
|
| 511 |
+
bases = []
|
| 512 |
+
current_sort = []
|
| 513 |
+
|
| 514 |
+
# do-while construct
|
| 515 |
+
while True:
|
| 516 |
+
assert all(count[v] == 0 for v in D)
|
| 517 |
+
|
| 518 |
+
if len(current_sort) == len(G):
|
| 519 |
+
yield list(current_sort)
|
| 520 |
+
|
| 521 |
+
# clean-up stack
|
| 522 |
+
while len(current_sort) > 0:
|
| 523 |
+
assert len(bases) == len(current_sort)
|
| 524 |
+
q = current_sort.pop()
|
| 525 |
+
|
| 526 |
+
# "restores" all edges (q, x)
|
| 527 |
+
# NOTE: it is important to iterate over edges instead
|
| 528 |
+
# of successors, so count is updated correctly in multigraphs
|
| 529 |
+
for _, j in G.out_edges(q):
|
| 530 |
+
count[j] += 1
|
| 531 |
+
assert count[j] >= 0
|
| 532 |
+
# remove entries from D
|
| 533 |
+
while len(D) > 0 and count[D[-1]] > 0:
|
| 534 |
+
D.pop()
|
| 535 |
+
|
| 536 |
+
# corresponds to a circular shift of the values in D
|
| 537 |
+
# if the first value chosen (the base) is in the first
|
| 538 |
+
# position of D again, we are done and need to consider the
|
| 539 |
+
# previous condition
|
| 540 |
+
D.appendleft(q)
|
| 541 |
+
if D[-1] == bases[-1]:
|
| 542 |
+
# all possible values have been chosen at current position
|
| 543 |
+
# remove corresponding marker
|
| 544 |
+
bases.pop()
|
| 545 |
+
else:
|
| 546 |
+
# there are still elements that have not been fixed
|
| 547 |
+
# at the current position in the topological sort
|
| 548 |
+
# stop removing elements, escape inner loop
|
| 549 |
+
break
|
| 550 |
+
|
| 551 |
+
else:
|
| 552 |
+
if len(D) == 0:
|
| 553 |
+
raise nx.NetworkXUnfeasible("Graph contains a cycle.")
|
| 554 |
+
|
| 555 |
+
# choose next node
|
| 556 |
+
q = D.pop()
|
| 557 |
+
# "erase" all edges (q, x)
|
| 558 |
+
# NOTE: it is important to iterate over edges instead
|
| 559 |
+
# of successors, so count is updated correctly in multigraphs
|
| 560 |
+
for _, j in G.out_edges(q):
|
| 561 |
+
count[j] -= 1
|
| 562 |
+
assert count[j] >= 0
|
| 563 |
+
if count[j] == 0:
|
| 564 |
+
D.append(j)
|
| 565 |
+
current_sort.append(q)
|
| 566 |
+
|
| 567 |
+
# base for current position might _not_ be fixed yet
|
| 568 |
+
if len(bases) < len(current_sort):
|
| 569 |
+
bases.append(q)
|
| 570 |
+
|
| 571 |
+
if len(bases) == 0:
|
| 572 |
+
break
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
@nx._dispatchable
|
| 576 |
+
def is_aperiodic(G):
|
| 577 |
+
"""Returns True if `G` is aperiodic.
|
| 578 |
+
|
| 579 |
+
A directed graph is aperiodic if there is no integer k > 1 that
|
| 580 |
+
divides the length of every cycle in the graph.
|
| 581 |
+
|
| 582 |
+
Parameters
|
| 583 |
+
----------
|
| 584 |
+
G : NetworkX DiGraph
|
| 585 |
+
A directed graph
|
| 586 |
+
|
| 587 |
+
Returns
|
| 588 |
+
-------
|
| 589 |
+
bool
|
| 590 |
+
True if the graph is aperiodic False otherwise
|
| 591 |
+
|
| 592 |
+
Raises
|
| 593 |
+
------
|
| 594 |
+
NetworkXError
|
| 595 |
+
If `G` is not directed
|
| 596 |
+
|
| 597 |
+
Examples
|
| 598 |
+
--------
|
| 599 |
+
A graph consisting of one cycle, the length of which is 2. Therefore ``k = 2``
|
| 600 |
+
divides the length of every cycle in the graph and thus the graph
|
| 601 |
+
is *not aperiodic*::
|
| 602 |
+
|
| 603 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 1)])
|
| 604 |
+
>>> nx.is_aperiodic(DG)
|
| 605 |
+
False
|
| 606 |
+
|
| 607 |
+
A graph consisting of two cycles: one of length 2 and the other of length 3.
|
| 608 |
+
The cycle lengths are coprime, so there is no single value of k where ``k > 1``
|
| 609 |
+
that divides each cycle length and therefore the graph is *aperiodic*::
|
| 610 |
+
|
| 611 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1), (1, 4), (4, 1)])
|
| 612 |
+
>>> nx.is_aperiodic(DG)
|
| 613 |
+
True
|
| 614 |
+
|
| 615 |
+
A graph consisting of two cycles: one of length 2 and the other of length 4.
|
| 616 |
+
The lengths of the cycles share a common factor ``k = 2``, and therefore
|
| 617 |
+
the graph is *not aperiodic*::
|
| 618 |
+
|
| 619 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 1), (3, 4), (4, 5), (5, 6), (6, 3)])
|
| 620 |
+
>>> nx.is_aperiodic(DG)
|
| 621 |
+
False
|
| 622 |
+
|
| 623 |
+
An acyclic graph, therefore the graph is *not aperiodic*::
|
| 624 |
+
|
| 625 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3)])
|
| 626 |
+
>>> nx.is_aperiodic(DG)
|
| 627 |
+
False
|
| 628 |
+
|
| 629 |
+
Notes
|
| 630 |
+
-----
|
| 631 |
+
This uses the method outlined in [1]_, which runs in $O(m)$ time
|
| 632 |
+
given $m$ edges in `G`. Note that a graph is not aperiodic if it is
|
| 633 |
+
acyclic as every integer trivial divides length 0 cycles.
|
| 634 |
+
|
| 635 |
+
References
|
| 636 |
+
----------
|
| 637 |
+
.. [1] Jarvis, J. P.; Shier, D. R. (1996),
|
| 638 |
+
"Graph-theoretic analysis of finite Markov chains,"
|
| 639 |
+
in Shier, D. R.; Wallenius, K. T., Applied Mathematical Modeling:
|
| 640 |
+
A Multidisciplinary Approach, CRC Press.
|
| 641 |
+
"""
|
| 642 |
+
if not G.is_directed():
|
| 643 |
+
raise nx.NetworkXError("is_aperiodic not defined for undirected graphs")
|
| 644 |
+
if len(G) == 0:
|
| 645 |
+
raise nx.NetworkXPointlessConcept("Graph has no nodes.")
|
| 646 |
+
s = arbitrary_element(G)
|
| 647 |
+
levels = {s: 0}
|
| 648 |
+
this_level = [s]
|
| 649 |
+
g = 0
|
| 650 |
+
lev = 1
|
| 651 |
+
while this_level:
|
| 652 |
+
next_level = []
|
| 653 |
+
for u in this_level:
|
| 654 |
+
for v in G[u]:
|
| 655 |
+
if v in levels: # Non-Tree Edge
|
| 656 |
+
g = gcd(g, levels[u] - levels[v] + 1)
|
| 657 |
+
else: # Tree Edge
|
| 658 |
+
next_level.append(v)
|
| 659 |
+
levels[v] = lev
|
| 660 |
+
this_level = next_level
|
| 661 |
+
lev += 1
|
| 662 |
+
if len(levels) == len(G): # All nodes in tree
|
| 663 |
+
return g == 1
|
| 664 |
+
else:
|
| 665 |
+
return g == 1 and nx.is_aperiodic(G.subgraph(set(G) - set(levels)))
|
| 666 |
+
|
| 667 |
+
|
| 668 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
| 669 |
+
def transitive_closure(G, reflexive=False):
|
| 670 |
+
"""Returns transitive closure of a graph
|
| 671 |
+
|
| 672 |
+
The transitive closure of G = (V,E) is a graph G+ = (V,E+) such that
|
| 673 |
+
for all v, w in V there is an edge (v, w) in E+ if and only if there
|
| 674 |
+
is a path from v to w in G.
|
| 675 |
+
|
| 676 |
+
Handling of paths from v to v has some flexibility within this definition.
|
| 677 |
+
A reflexive transitive closure creates a self-loop for the path
|
| 678 |
+
from v to v of length 0. The usual transitive closure creates a
|
| 679 |
+
self-loop only if a cycle exists (a path from v to v with length > 0).
|
| 680 |
+
We also allow an option for no self-loops.
|
| 681 |
+
|
| 682 |
+
Parameters
|
| 683 |
+
----------
|
| 684 |
+
G : NetworkX Graph
|
| 685 |
+
A directed/undirected graph/multigraph.
|
| 686 |
+
reflexive : Bool or None, optional (default: False)
|
| 687 |
+
Determines when cycles create self-loops in the Transitive Closure.
|
| 688 |
+
If True, trivial cycles (length 0) create self-loops. The result
|
| 689 |
+
is a reflexive transitive closure of G.
|
| 690 |
+
If False (the default) non-trivial cycles create self-loops.
|
| 691 |
+
If None, self-loops are not created.
|
| 692 |
+
|
| 693 |
+
Returns
|
| 694 |
+
-------
|
| 695 |
+
NetworkX graph
|
| 696 |
+
The transitive closure of `G`
|
| 697 |
+
|
| 698 |
+
Raises
|
| 699 |
+
------
|
| 700 |
+
NetworkXError
|
| 701 |
+
If `reflexive` not in `{None, True, False}`
|
| 702 |
+
|
| 703 |
+
Examples
|
| 704 |
+
--------
|
| 705 |
+
The treatment of trivial (i.e. length 0) cycles is controlled by the
|
| 706 |
+
`reflexive` parameter.
|
| 707 |
+
|
| 708 |
+
Trivial (i.e. length 0) cycles do not create self-loops when
|
| 709 |
+
``reflexive=False`` (the default)::
|
| 710 |
+
|
| 711 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3)])
|
| 712 |
+
>>> TC = nx.transitive_closure(DG, reflexive=False)
|
| 713 |
+
>>> TC.edges()
|
| 714 |
+
OutEdgeView([(1, 2), (1, 3), (2, 3)])
|
| 715 |
+
|
| 716 |
+
However, nontrivial (i.e. length greater than 0) cycles create self-loops
|
| 717 |
+
when ``reflexive=False`` (the default)::
|
| 718 |
+
|
| 719 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
|
| 720 |
+
>>> TC = nx.transitive_closure(DG, reflexive=False)
|
| 721 |
+
>>> TC.edges()
|
| 722 |
+
OutEdgeView([(1, 2), (1, 3), (1, 1), (2, 3), (2, 1), (2, 2), (3, 1), (3, 2), (3, 3)])
|
| 723 |
+
|
| 724 |
+
Trivial cycles (length 0) create self-loops when ``reflexive=True``::
|
| 725 |
+
|
| 726 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3)])
|
| 727 |
+
>>> TC = nx.transitive_closure(DG, reflexive=True)
|
| 728 |
+
>>> TC.edges()
|
| 729 |
+
OutEdgeView([(1, 2), (1, 1), (1, 3), (2, 3), (2, 2), (3, 3)])
|
| 730 |
+
|
| 731 |
+
And the third option is not to create self-loops at all when ``reflexive=None``::
|
| 732 |
+
|
| 733 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
|
| 734 |
+
>>> TC = nx.transitive_closure(DG, reflexive=None)
|
| 735 |
+
>>> TC.edges()
|
| 736 |
+
OutEdgeView([(1, 2), (1, 3), (2, 3), (2, 1), (3, 1), (3, 2)])
|
| 737 |
+
|
| 738 |
+
References
|
| 739 |
+
----------
|
| 740 |
+
.. [1] https://www.ics.uci.edu/~eppstein/PADS/PartialOrder.py
|
| 741 |
+
"""
|
| 742 |
+
TC = G.copy()
|
| 743 |
+
|
| 744 |
+
if reflexive not in {None, True, False}:
|
| 745 |
+
raise nx.NetworkXError("Incorrect value for the parameter `reflexive`")
|
| 746 |
+
|
| 747 |
+
for v in G:
|
| 748 |
+
if reflexive is None:
|
| 749 |
+
TC.add_edges_from((v, u) for u in nx.descendants(G, v) if u not in TC[v])
|
| 750 |
+
elif reflexive is True:
|
| 751 |
+
TC.add_edges_from(
|
| 752 |
+
(v, u) for u in nx.descendants(G, v) | {v} if u not in TC[v]
|
| 753 |
+
)
|
| 754 |
+
elif reflexive is False:
|
| 755 |
+
TC.add_edges_from((v, e[1]) for e in nx.edge_bfs(G, v) if e[1] not in TC[v])
|
| 756 |
+
|
| 757 |
+
return TC
|
| 758 |
+
|
| 759 |
+
|
| 760 |
+
@not_implemented_for("undirected")
|
| 761 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
| 762 |
+
def transitive_closure_dag(G, topo_order=None):
|
| 763 |
+
"""Returns the transitive closure of a directed acyclic graph.
|
| 764 |
+
|
| 765 |
+
This function is faster than the function `transitive_closure`, but fails
|
| 766 |
+
if the graph has a cycle.
|
| 767 |
+
|
| 768 |
+
The transitive closure of G = (V,E) is a graph G+ = (V,E+) such that
|
| 769 |
+
for all v, w in V there is an edge (v, w) in E+ if and only if there
|
| 770 |
+
is a non-null path from v to w in G.
|
| 771 |
+
|
| 772 |
+
Parameters
|
| 773 |
+
----------
|
| 774 |
+
G : NetworkX DiGraph
|
| 775 |
+
A directed acyclic graph (DAG)
|
| 776 |
+
|
| 777 |
+
topo_order: list or tuple, optional
|
| 778 |
+
A topological order for G (if None, the function will compute one)
|
| 779 |
+
|
| 780 |
+
Returns
|
| 781 |
+
-------
|
| 782 |
+
NetworkX DiGraph
|
| 783 |
+
The transitive closure of `G`
|
| 784 |
+
|
| 785 |
+
Raises
|
| 786 |
+
------
|
| 787 |
+
NetworkXNotImplemented
|
| 788 |
+
If `G` is not directed
|
| 789 |
+
NetworkXUnfeasible
|
| 790 |
+
If `G` has a cycle
|
| 791 |
+
|
| 792 |
+
Examples
|
| 793 |
+
--------
|
| 794 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3)])
|
| 795 |
+
>>> TC = nx.transitive_closure_dag(DG)
|
| 796 |
+
>>> TC.edges()
|
| 797 |
+
OutEdgeView([(1, 2), (1, 3), (2, 3)])
|
| 798 |
+
|
| 799 |
+
Notes
|
| 800 |
+
-----
|
| 801 |
+
This algorithm is probably simple enough to be well-known but I didn't find
|
| 802 |
+
a mention in the literature.
|
| 803 |
+
"""
|
| 804 |
+
if topo_order is None:
|
| 805 |
+
topo_order = list(topological_sort(G))
|
| 806 |
+
|
| 807 |
+
TC = G.copy()
|
| 808 |
+
|
| 809 |
+
# idea: traverse vertices following a reverse topological order, connecting
|
| 810 |
+
# each vertex to its descendants at distance 2 as we go
|
| 811 |
+
for v in reversed(topo_order):
|
| 812 |
+
TC.add_edges_from((v, u) for u in nx.descendants_at_distance(TC, v, 2))
|
| 813 |
+
|
| 814 |
+
return TC
|
| 815 |
+
|
| 816 |
+
|
| 817 |
+
@not_implemented_for("undirected")
|
| 818 |
+
@nx._dispatchable(returns_graph=True)
|
| 819 |
+
def transitive_reduction(G):
|
| 820 |
+
"""Returns transitive reduction of a directed graph
|
| 821 |
+
|
| 822 |
+
The transitive reduction of G = (V,E) is a graph G- = (V,E-) such that
|
| 823 |
+
for all v,w in V there is an edge (v,w) in E- if and only if (v,w) is
|
| 824 |
+
in E and there is no path from v to w in G with length greater than 1.
|
| 825 |
+
|
| 826 |
+
Parameters
|
| 827 |
+
----------
|
| 828 |
+
G : NetworkX DiGraph
|
| 829 |
+
A directed acyclic graph (DAG)
|
| 830 |
+
|
| 831 |
+
Returns
|
| 832 |
+
-------
|
| 833 |
+
NetworkX DiGraph
|
| 834 |
+
The transitive reduction of `G`
|
| 835 |
+
|
| 836 |
+
Raises
|
| 837 |
+
------
|
| 838 |
+
NetworkXError
|
| 839 |
+
If `G` is not a directed acyclic graph (DAG) transitive reduction is
|
| 840 |
+
not uniquely defined and a :exc:`NetworkXError` exception is raised.
|
| 841 |
+
|
| 842 |
+
Examples
|
| 843 |
+
--------
|
| 844 |
+
To perform transitive reduction on a DiGraph:
|
| 845 |
+
|
| 846 |
+
>>> DG = nx.DiGraph([(1, 2), (2, 3), (1, 3)])
|
| 847 |
+
>>> TR = nx.transitive_reduction(DG)
|
| 848 |
+
>>> list(TR.edges)
|
| 849 |
+
[(1, 2), (2, 3)]
|
| 850 |
+
|
| 851 |
+
To avoid unnecessary data copies, this implementation does not return a
|
| 852 |
+
DiGraph with node/edge data.
|
| 853 |
+
To perform transitive reduction on a DiGraph and transfer node/edge data:
|
| 854 |
+
|
| 855 |
+
>>> DG = nx.DiGraph()
|
| 856 |
+
>>> DG.add_edges_from([(1, 2), (2, 3), (1, 3)], color="red")
|
| 857 |
+
>>> TR = nx.transitive_reduction(DG)
|
| 858 |
+
>>> TR.add_nodes_from(DG.nodes(data=True))
|
| 859 |
+
>>> TR.add_edges_from((u, v, DG.edges[u, v]) for u, v in TR.edges)
|
| 860 |
+
>>> list(TR.edges(data=True))
|
| 861 |
+
[(1, 2, {'color': 'red'}), (2, 3, {'color': 'red'})]
|
| 862 |
+
|
| 863 |
+
References
|
| 864 |
+
----------
|
| 865 |
+
https://en.wikipedia.org/wiki/Transitive_reduction
|
| 866 |
+
|
| 867 |
+
"""
|
| 868 |
+
if not is_directed_acyclic_graph(G):
|
| 869 |
+
msg = "Directed Acyclic Graph required for transitive_reduction"
|
| 870 |
+
raise nx.NetworkXError(msg)
|
| 871 |
+
TR = nx.DiGraph()
|
| 872 |
+
TR.add_nodes_from(G.nodes())
|
| 873 |
+
descendants = {}
|
| 874 |
+
# count before removing set stored in descendants
|
| 875 |
+
check_count = dict(G.in_degree)
|
| 876 |
+
for u in G:
|
| 877 |
+
u_nbrs = set(G[u])
|
| 878 |
+
for v in G[u]:
|
| 879 |
+
if v in u_nbrs:
|
| 880 |
+
if v not in descendants:
|
| 881 |
+
descendants[v] = {y for x, y in nx.dfs_edges(G, v)}
|
| 882 |
+
u_nbrs -= descendants[v]
|
| 883 |
+
check_count[v] -= 1
|
| 884 |
+
if check_count[v] == 0:
|
| 885 |
+
del descendants[v]
|
| 886 |
+
TR.add_edges_from((u, v) for v in u_nbrs)
|
| 887 |
+
return TR
|
| 888 |
+
|
| 889 |
+
|
| 890 |
+
@not_implemented_for("undirected")
|
| 891 |
+
@nx._dispatchable
|
| 892 |
+
def antichains(G, topo_order=None):
|
| 893 |
+
"""Generates antichains from a directed acyclic graph (DAG).
|
| 894 |
+
|
| 895 |
+
An antichain is a subset of a partially ordered set such that any
|
| 896 |
+
two elements in the subset are incomparable.
|
| 897 |
+
|
| 898 |
+
Parameters
|
| 899 |
+
----------
|
| 900 |
+
G : NetworkX DiGraph
|
| 901 |
+
A directed acyclic graph (DAG)
|
| 902 |
+
|
| 903 |
+
topo_order: list or tuple, optional
|
| 904 |
+
A topological order for G (if None, the function will compute one)
|
| 905 |
+
|
| 906 |
+
Yields
|
| 907 |
+
------
|
| 908 |
+
antichain : list
|
| 909 |
+
a list of nodes in `G` representing an antichain
|
| 910 |
+
|
| 911 |
+
Raises
|
| 912 |
+
------
|
| 913 |
+
NetworkXNotImplemented
|
| 914 |
+
If `G` is not directed
|
| 915 |
+
|
| 916 |
+
NetworkXUnfeasible
|
| 917 |
+
If `G` contains a cycle
|
| 918 |
+
|
| 919 |
+
Examples
|
| 920 |
+
--------
|
| 921 |
+
>>> DG = nx.DiGraph([(1, 2), (1, 3)])
|
| 922 |
+
>>> list(nx.antichains(DG))
|
| 923 |
+
[[], [3], [2], [2, 3], [1]]
|
| 924 |
+
|
| 925 |
+
Notes
|
| 926 |
+
-----
|
| 927 |
+
This function was originally developed by Peter Jipsen and Franco Saliola
|
| 928 |
+
for the SAGE project. It's included in NetworkX with permission from the
|
| 929 |
+
authors. Original SAGE code at:
|
| 930 |
+
|
| 931 |
+
https://github.com/sagemath/sage/blob/master/src/sage/combinat/posets/hasse_diagram.py
|
| 932 |
+
|
| 933 |
+
References
|
| 934 |
+
----------
|
| 935 |
+
.. [1] Free Lattices, by R. Freese, J. Jezek and J. B. Nation,
|
| 936 |
+
AMS, Vol 42, 1995, p. 226.
|
| 937 |
+
"""
|
| 938 |
+
if topo_order is None:
|
| 939 |
+
topo_order = list(nx.topological_sort(G))
|
| 940 |
+
|
| 941 |
+
TC = nx.transitive_closure_dag(G, topo_order)
|
| 942 |
+
antichains_stacks = [([], list(reversed(topo_order)))]
|
| 943 |
+
|
| 944 |
+
while antichains_stacks:
|
| 945 |
+
(antichain, stack) = antichains_stacks.pop()
|
| 946 |
+
# Invariant:
|
| 947 |
+
# - the elements of antichain are independent
|
| 948 |
+
# - the elements of stack are independent from those of antichain
|
| 949 |
+
yield antichain
|
| 950 |
+
while stack:
|
| 951 |
+
x = stack.pop()
|
| 952 |
+
new_antichain = antichain + [x]
|
| 953 |
+
new_stack = [t for t in stack if not ((t in TC[x]) or (x in TC[t]))]
|
| 954 |
+
antichains_stacks.append((new_antichain, new_stack))
|
| 955 |
+
|
| 956 |
+
|
| 957 |
+
@not_implemented_for("undirected")
|
| 958 |
+
@nx._dispatchable(edge_attrs={"weight": "default_weight"})
|
| 959 |
+
def dag_longest_path(G, weight="weight", default_weight=1, topo_order=None):
|
| 960 |
+
"""Returns the longest path in a directed acyclic graph (DAG).
|
| 961 |
+
|
| 962 |
+
If `G` has edges with `weight` attribute the edge data are used as
|
| 963 |
+
weight values.
|
| 964 |
+
|
| 965 |
+
Parameters
|
| 966 |
+
----------
|
| 967 |
+
G : NetworkX DiGraph
|
| 968 |
+
A directed acyclic graph (DAG)
|
| 969 |
+
|
| 970 |
+
weight : str, optional
|
| 971 |
+
Edge data key to use for weight
|
| 972 |
+
|
| 973 |
+
default_weight : int, optional
|
| 974 |
+
The weight of edges that do not have a weight attribute
|
| 975 |
+
|
| 976 |
+
topo_order: list or tuple, optional
|
| 977 |
+
A topological order for `G` (if None, the function will compute one)
|
| 978 |
+
|
| 979 |
+
Returns
|
| 980 |
+
-------
|
| 981 |
+
list
|
| 982 |
+
Longest path
|
| 983 |
+
|
| 984 |
+
Raises
|
| 985 |
+
------
|
| 986 |
+
NetworkXNotImplemented
|
| 987 |
+
If `G` is not directed
|
| 988 |
+
|
| 989 |
+
Examples
|
| 990 |
+
--------
|
| 991 |
+
>>> DG = nx.DiGraph(
|
| 992 |
+
... [(0, 1, {"cost": 1}), (1, 2, {"cost": 1}), (0, 2, {"cost": 42})]
|
| 993 |
+
... )
|
| 994 |
+
>>> list(nx.all_simple_paths(DG, 0, 2))
|
| 995 |
+
[[0, 1, 2], [0, 2]]
|
| 996 |
+
>>> nx.dag_longest_path(DG)
|
| 997 |
+
[0, 1, 2]
|
| 998 |
+
>>> nx.dag_longest_path(DG, weight="cost")
|
| 999 |
+
[0, 2]
|
| 1000 |
+
|
| 1001 |
+
In the case where multiple valid topological orderings exist, `topo_order`
|
| 1002 |
+
can be used to specify a specific ordering:
|
| 1003 |
+
|
| 1004 |
+
>>> DG = nx.DiGraph([(0, 1), (0, 2)])
|
| 1005 |
+
>>> sorted(nx.all_topological_sorts(DG)) # Valid topological orderings
|
| 1006 |
+
[[0, 1, 2], [0, 2, 1]]
|
| 1007 |
+
>>> nx.dag_longest_path(DG, topo_order=[0, 1, 2])
|
| 1008 |
+
[0, 1]
|
| 1009 |
+
>>> nx.dag_longest_path(DG, topo_order=[0, 2, 1])
|
| 1010 |
+
[0, 2]
|
| 1011 |
+
|
| 1012 |
+
See also
|
| 1013 |
+
--------
|
| 1014 |
+
dag_longest_path_length
|
| 1015 |
+
|
| 1016 |
+
"""
|
| 1017 |
+
if not G:
|
| 1018 |
+
return []
|
| 1019 |
+
|
| 1020 |
+
if topo_order is None:
|
| 1021 |
+
topo_order = nx.topological_sort(G)
|
| 1022 |
+
|
| 1023 |
+
dist = {} # stores {v : (length, u)}
|
| 1024 |
+
for v in topo_order:
|
| 1025 |
+
us = [
|
| 1026 |
+
(
|
| 1027 |
+
dist[u][0]
|
| 1028 |
+
+ (
|
| 1029 |
+
max(data.values(), key=lambda x: x.get(weight, default_weight))
|
| 1030 |
+
if G.is_multigraph()
|
| 1031 |
+
else data
|
| 1032 |
+
).get(weight, default_weight),
|
| 1033 |
+
u,
|
| 1034 |
+
)
|
| 1035 |
+
for u, data in G.pred[v].items()
|
| 1036 |
+
]
|
| 1037 |
+
|
| 1038 |
+
# Use the best predecessor if there is one and its distance is
|
| 1039 |
+
# non-negative, otherwise terminate.
|
| 1040 |
+
maxu = max(us, key=lambda x: x[0]) if us else (0, v)
|
| 1041 |
+
dist[v] = maxu if maxu[0] >= 0 else (0, v)
|
| 1042 |
+
|
| 1043 |
+
u = None
|
| 1044 |
+
v = max(dist, key=lambda x: dist[x][0])
|
| 1045 |
+
path = []
|
| 1046 |
+
while u != v:
|
| 1047 |
+
path.append(v)
|
| 1048 |
+
u = v
|
| 1049 |
+
v = dist[v][1]
|
| 1050 |
+
|
| 1051 |
+
path.reverse()
|
| 1052 |
+
return path
|
| 1053 |
+
|
| 1054 |
+
|
| 1055 |
+
@not_implemented_for("undirected")
|
| 1056 |
+
@nx._dispatchable(edge_attrs={"weight": "default_weight"})
|
| 1057 |
+
def dag_longest_path_length(G, weight="weight", default_weight=1):
|
| 1058 |
+
"""Returns the longest path length in a DAG
|
| 1059 |
+
|
| 1060 |
+
Parameters
|
| 1061 |
+
----------
|
| 1062 |
+
G : NetworkX DiGraph
|
| 1063 |
+
A directed acyclic graph (DAG)
|
| 1064 |
+
|
| 1065 |
+
weight : string, optional
|
| 1066 |
+
Edge data key to use for weight
|
| 1067 |
+
|
| 1068 |
+
default_weight : int, optional
|
| 1069 |
+
The weight of edges that do not have a weight attribute
|
| 1070 |
+
|
| 1071 |
+
Returns
|
| 1072 |
+
-------
|
| 1073 |
+
int
|
| 1074 |
+
Longest path length
|
| 1075 |
+
|
| 1076 |
+
Raises
|
| 1077 |
+
------
|
| 1078 |
+
NetworkXNotImplemented
|
| 1079 |
+
If `G` is not directed
|
| 1080 |
+
|
| 1081 |
+
Examples
|
| 1082 |
+
--------
|
| 1083 |
+
>>> DG = nx.DiGraph(
|
| 1084 |
+
... [(0, 1, {"cost": 1}), (1, 2, {"cost": 1}), (0, 2, {"cost": 42})]
|
| 1085 |
+
... )
|
| 1086 |
+
>>> list(nx.all_simple_paths(DG, 0, 2))
|
| 1087 |
+
[[0, 1, 2], [0, 2]]
|
| 1088 |
+
>>> nx.dag_longest_path_length(DG)
|
| 1089 |
+
2
|
| 1090 |
+
>>> nx.dag_longest_path_length(DG, weight="cost")
|
| 1091 |
+
42
|
| 1092 |
+
|
| 1093 |
+
See also
|
| 1094 |
+
--------
|
| 1095 |
+
dag_longest_path
|
| 1096 |
+
"""
|
| 1097 |
+
path = nx.dag_longest_path(G, weight, default_weight)
|
| 1098 |
+
path_length = 0
|
| 1099 |
+
if G.is_multigraph():
|
| 1100 |
+
for u, v in pairwise(path):
|
| 1101 |
+
i = max(G[u][v], key=lambda x: G[u][v][x].get(weight, default_weight))
|
| 1102 |
+
path_length += G[u][v][i].get(weight, default_weight)
|
| 1103 |
+
else:
|
| 1104 |
+
for u, v in pairwise(path):
|
| 1105 |
+
path_length += G[u][v].get(weight, default_weight)
|
| 1106 |
+
|
| 1107 |
+
return path_length
|
| 1108 |
+
|
| 1109 |
+
|
| 1110 |
+
@nx._dispatchable
|
| 1111 |
+
def root_to_leaf_paths(G):
|
| 1112 |
+
"""Yields root-to-leaf paths in a directed acyclic graph.
|
| 1113 |
+
|
| 1114 |
+
`G` must be a directed acyclic graph. If not, the behavior of this
|
| 1115 |
+
function is undefined. A "root" in this graph is a node of in-degree
|
| 1116 |
+
zero and a "leaf" a node of out-degree zero.
|
| 1117 |
+
|
| 1118 |
+
When invoked, this function iterates over each path from any root to
|
| 1119 |
+
any leaf. A path is a list of nodes.
|
| 1120 |
+
|
| 1121 |
+
"""
|
| 1122 |
+
roots = (v for v, d in G.in_degree() if d == 0)
|
| 1123 |
+
leaves = (v for v, d in G.out_degree() if d == 0)
|
| 1124 |
+
all_paths = partial(nx.all_simple_paths, G)
|
| 1125 |
+
# TODO In Python 3, this would be better as `yield from ...`.
|
| 1126 |
+
return chaini(starmap(all_paths, product(roots, leaves)))
|
| 1127 |
+
|
| 1128 |
+
|
| 1129 |
+
@not_implemented_for("multigraph")
|
| 1130 |
+
@not_implemented_for("undirected")
|
| 1131 |
+
@nx._dispatchable(returns_graph=True)
|
| 1132 |
+
def dag_to_branching(G):
|
| 1133 |
+
"""Returns a branching representing all (overlapping) paths from
|
| 1134 |
+
root nodes to leaf nodes in the given directed acyclic graph.
|
| 1135 |
+
|
| 1136 |
+
As described in :mod:`networkx.algorithms.tree.recognition`, a
|
| 1137 |
+
*branching* is a directed forest in which each node has at most one
|
| 1138 |
+
parent. In other words, a branching is a disjoint union of
|
| 1139 |
+
*arborescences*. For this function, each node of in-degree zero in
|
| 1140 |
+
`G` becomes a root of one of the arborescences, and there will be
|
| 1141 |
+
one leaf node for each distinct path from that root to a leaf node
|
| 1142 |
+
in `G`.
|
| 1143 |
+
|
| 1144 |
+
Each node `v` in `G` with *k* parents becomes *k* distinct nodes in
|
| 1145 |
+
the returned branching, one for each parent, and the sub-DAG rooted
|
| 1146 |
+
at `v` is duplicated for each copy. The algorithm then recurses on
|
| 1147 |
+
the children of each copy of `v`.
|
| 1148 |
+
|
| 1149 |
+
Parameters
|
| 1150 |
+
----------
|
| 1151 |
+
G : NetworkX graph
|
| 1152 |
+
A directed acyclic graph.
|
| 1153 |
+
|
| 1154 |
+
Returns
|
| 1155 |
+
-------
|
| 1156 |
+
DiGraph
|
| 1157 |
+
The branching in which there is a bijection between root-to-leaf
|
| 1158 |
+
paths in `G` (in which multiple paths may share the same leaf)
|
| 1159 |
+
and root-to-leaf paths in the branching (in which there is a
|
| 1160 |
+
unique path from a root to a leaf).
|
| 1161 |
+
|
| 1162 |
+
Each node has an attribute 'source' whose value is the original
|
| 1163 |
+
node to which this node corresponds. No other graph, node, or
|
| 1164 |
+
edge attributes are copied into this new graph.
|
| 1165 |
+
|
| 1166 |
+
Raises
|
| 1167 |
+
------
|
| 1168 |
+
NetworkXNotImplemented
|
| 1169 |
+
If `G` is not directed, or if `G` is a multigraph.
|
| 1170 |
+
|
| 1171 |
+
HasACycle
|
| 1172 |
+
If `G` is not acyclic.
|
| 1173 |
+
|
| 1174 |
+
Examples
|
| 1175 |
+
--------
|
| 1176 |
+
To examine which nodes in the returned branching were produced by
|
| 1177 |
+
which original node in the directed acyclic graph, we can collect
|
| 1178 |
+
the mapping from source node to new nodes into a dictionary. For
|
| 1179 |
+
example, consider the directed diamond graph::
|
| 1180 |
+
|
| 1181 |
+
>>> from collections import defaultdict
|
| 1182 |
+
>>> from operator import itemgetter
|
| 1183 |
+
>>>
|
| 1184 |
+
>>> G = nx.DiGraph(nx.utils.pairwise("abd"))
|
| 1185 |
+
>>> G.add_edges_from(nx.utils.pairwise("acd"))
|
| 1186 |
+
>>> B = nx.dag_to_branching(G)
|
| 1187 |
+
>>>
|
| 1188 |
+
>>> sources = defaultdict(set)
|
| 1189 |
+
>>> for v, source in B.nodes(data="source"):
|
| 1190 |
+
... sources[source].add(v)
|
| 1191 |
+
>>> len(sources["a"])
|
| 1192 |
+
1
|
| 1193 |
+
>>> len(sources["d"])
|
| 1194 |
+
2
|
| 1195 |
+
|
| 1196 |
+
To copy node attributes from the original graph to the new graph,
|
| 1197 |
+
you can use a dictionary like the one constructed in the above
|
| 1198 |
+
example::
|
| 1199 |
+
|
| 1200 |
+
>>> for source, nodes in sources.items():
|
| 1201 |
+
... for v in nodes:
|
| 1202 |
+
... B.nodes[v].update(G.nodes[source])
|
| 1203 |
+
|
| 1204 |
+
Notes
|
| 1205 |
+
-----
|
| 1206 |
+
This function is not idempotent in the sense that the node labels in
|
| 1207 |
+
the returned branching may be uniquely generated each time the
|
| 1208 |
+
function is invoked. In fact, the node labels may not be integers;
|
| 1209 |
+
in order to relabel the nodes to be more readable, you can use the
|
| 1210 |
+
:func:`networkx.convert_node_labels_to_integers` function.
|
| 1211 |
+
|
| 1212 |
+
The current implementation of this function uses
|
| 1213 |
+
:func:`networkx.prefix_tree`, so it is subject to the limitations of
|
| 1214 |
+
that function.
|
| 1215 |
+
|
| 1216 |
+
"""
|
| 1217 |
+
if has_cycle(G):
|
| 1218 |
+
msg = "dag_to_branching is only defined for acyclic graphs"
|
| 1219 |
+
raise nx.HasACycle(msg)
|
| 1220 |
+
paths = root_to_leaf_paths(G)
|
| 1221 |
+
B = nx.prefix_tree(paths)
|
| 1222 |
+
# Remove the synthetic `root`(0) and `NIL`(-1) nodes from the tree
|
| 1223 |
+
B.remove_node(0)
|
| 1224 |
+
B.remove_node(-1)
|
| 1225 |
+
return B
|
| 1226 |
+
|
| 1227 |
+
|
| 1228 |
+
@not_implemented_for("undirected")
|
| 1229 |
+
@nx._dispatchable
|
| 1230 |
+
def compute_v_structures(G):
|
| 1231 |
+
"""Yields 3-node tuples that represent the v-structures in `G`.
|
| 1232 |
+
|
| 1233 |
+
.. deprecated:: 3.4
|
| 1234 |
+
|
| 1235 |
+
`compute_v_structures` actually yields colliders. It will be removed in
|
| 1236 |
+
version 3.6. Use `nx.dag.v_structures` or `nx.dag.colliders` instead.
|
| 1237 |
+
|
| 1238 |
+
Colliders are triples in the directed acyclic graph (DAG) where two parent nodes
|
| 1239 |
+
point to the same child node. V-structures are colliders where the two parent
|
| 1240 |
+
nodes are not adjacent. In a causal graph setting, the parents do not directly
|
| 1241 |
+
depend on each other, but conditioning on the child node provides an association.
|
| 1242 |
+
|
| 1243 |
+
Parameters
|
| 1244 |
+
----------
|
| 1245 |
+
G : graph
|
| 1246 |
+
A networkx `~networkx.DiGraph`.
|
| 1247 |
+
|
| 1248 |
+
Yields
|
| 1249 |
+
------
|
| 1250 |
+
A 3-tuple representation of a v-structure
|
| 1251 |
+
Each v-structure is a 3-tuple with the parent, collider, and other parent.
|
| 1252 |
+
|
| 1253 |
+
Raises
|
| 1254 |
+
------
|
| 1255 |
+
NetworkXNotImplemented
|
| 1256 |
+
If `G` is an undirected graph.
|
| 1257 |
+
|
| 1258 |
+
Examples
|
| 1259 |
+
--------
|
| 1260 |
+
>>> G = nx.DiGraph([(1, 2), (0, 4), (3, 1), (2, 4), (0, 5), (4, 5), (1, 5)])
|
| 1261 |
+
>>> nx.is_directed_acyclic_graph(G)
|
| 1262 |
+
True
|
| 1263 |
+
>>> list(nx.compute_v_structures(G))
|
| 1264 |
+
[(0, 4, 2), (0, 5, 4), (0, 5, 1), (4, 5, 1)]
|
| 1265 |
+
|
| 1266 |
+
See Also
|
| 1267 |
+
--------
|
| 1268 |
+
v_structures
|
| 1269 |
+
colliders
|
| 1270 |
+
|
| 1271 |
+
Notes
|
| 1272 |
+
-----
|
| 1273 |
+
This function was written to be used on DAGs, however it works on cyclic graphs
|
| 1274 |
+
too. Since colliders are referred to in the cyclic causal graph literature
|
| 1275 |
+
[2]_ we allow cyclic graphs in this function. It is suggested that you test if
|
| 1276 |
+
your input graph is acyclic as in the example if you want that property.
|
| 1277 |
+
|
| 1278 |
+
References
|
| 1279 |
+
----------
|
| 1280 |
+
.. [1] `Pearl's PRIMER <https://bayes.cs.ucla.edu/PRIMER/primer-ch2.pdf>`_
|
| 1281 |
+
Ch-2 page 50: v-structures def.
|
| 1282 |
+
.. [2] A Hyttinen, P.O. Hoyer, F. Eberhardt, M J ̈arvisalo, (2013)
|
| 1283 |
+
"Discovering cyclic causal models with latent variables:
|
| 1284 |
+
a general SAT-based procedure", UAI'13: Proceedings of the Twenty-Ninth
|
| 1285 |
+
Conference on Uncertainty in Artificial Intelligence, pg 301–310,
|
| 1286 |
+
`doi:10.5555/3023638.3023669 <https://dl.acm.org/doi/10.5555/3023638.3023669>`_
|
| 1287 |
+
"""
|
| 1288 |
+
import warnings
|
| 1289 |
+
|
| 1290 |
+
warnings.warn(
|
| 1291 |
+
(
|
| 1292 |
+
"\n\n`compute_v_structures` actually yields colliders. It will be\n"
|
| 1293 |
+
"removed in version 3.6. Use `nx.dag.v_structures` or `nx.dag.colliders`\n"
|
| 1294 |
+
"instead.\n"
|
| 1295 |
+
),
|
| 1296 |
+
category=DeprecationWarning,
|
| 1297 |
+
stacklevel=5,
|
| 1298 |
+
)
|
| 1299 |
+
|
| 1300 |
+
return colliders(G)
|
| 1301 |
+
|
| 1302 |
+
|
| 1303 |
+
@not_implemented_for("undirected")
|
| 1304 |
+
@nx._dispatchable
|
| 1305 |
+
def v_structures(G):
|
| 1306 |
+
"""Yields 3-node tuples that represent the v-structures in `G`.
|
| 1307 |
+
|
| 1308 |
+
Colliders are triples in the directed acyclic graph (DAG) where two parent nodes
|
| 1309 |
+
point to the same child node. V-structures are colliders where the two parent
|
| 1310 |
+
nodes are not adjacent. In a causal graph setting, the parents do not directly
|
| 1311 |
+
depend on each other, but conditioning on the child node provides an association.
|
| 1312 |
+
|
| 1313 |
+
Parameters
|
| 1314 |
+
----------
|
| 1315 |
+
G : graph
|
| 1316 |
+
A networkx `~networkx.DiGraph`.
|
| 1317 |
+
|
| 1318 |
+
Yields
|
| 1319 |
+
------
|
| 1320 |
+
A 3-tuple representation of a v-structure
|
| 1321 |
+
Each v-structure is a 3-tuple with the parent, collider, and other parent.
|
| 1322 |
+
|
| 1323 |
+
Raises
|
| 1324 |
+
------
|
| 1325 |
+
NetworkXNotImplemented
|
| 1326 |
+
If `G` is an undirected graph.
|
| 1327 |
+
|
| 1328 |
+
Examples
|
| 1329 |
+
--------
|
| 1330 |
+
>>> G = nx.DiGraph([(1, 2), (0, 4), (3, 1), (2, 4), (0, 5), (4, 5), (1, 5)])
|
| 1331 |
+
>>> nx.is_directed_acyclic_graph(G)
|
| 1332 |
+
True
|
| 1333 |
+
>>> list(nx.dag.v_structures(G))
|
| 1334 |
+
[(0, 4, 2), (0, 5, 1), (4, 5, 1)]
|
| 1335 |
+
|
| 1336 |
+
See Also
|
| 1337 |
+
--------
|
| 1338 |
+
colliders
|
| 1339 |
+
|
| 1340 |
+
Notes
|
| 1341 |
+
-----
|
| 1342 |
+
This function was written to be used on DAGs, however it works on cyclic graphs
|
| 1343 |
+
too. Since colliders are referred to in the cyclic causal graph literature
|
| 1344 |
+
[2]_ we allow cyclic graphs in this function. It is suggested that you test if
|
| 1345 |
+
your input graph is acyclic as in the example if you want that property.
|
| 1346 |
+
|
| 1347 |
+
References
|
| 1348 |
+
----------
|
| 1349 |
+
.. [1] `Pearl's PRIMER <https://bayes.cs.ucla.edu/PRIMER/primer-ch2.pdf>`_
|
| 1350 |
+
Ch-2 page 50: v-structures def.
|
| 1351 |
+
.. [2] A Hyttinen, P.O. Hoyer, F. Eberhardt, M J ̈arvisalo, (2013)
|
| 1352 |
+
"Discovering cyclic causal models with latent variables:
|
| 1353 |
+
a general SAT-based procedure", UAI'13: Proceedings of the Twenty-Ninth
|
| 1354 |
+
Conference on Uncertainty in Artificial Intelligence, pg 301–310,
|
| 1355 |
+
`doi:10.5555/3023638.3023669 <https://dl.acm.org/doi/10.5555/3023638.3023669>`_
|
| 1356 |
+
"""
|
| 1357 |
+
for p1, c, p2 in colliders(G):
|
| 1358 |
+
if not (G.has_edge(p1, p2) or G.has_edge(p2, p1)):
|
| 1359 |
+
yield (p1, c, p2)
|
| 1360 |
+
|
| 1361 |
+
|
| 1362 |
+
@not_implemented_for("undirected")
|
| 1363 |
+
@nx._dispatchable
|
| 1364 |
+
def colliders(G):
|
| 1365 |
+
"""Yields 3-node tuples that represent the colliders in `G`.
|
| 1366 |
+
|
| 1367 |
+
In a Directed Acyclic Graph (DAG), if you have three nodes A, B, and C, and
|
| 1368 |
+
there are edges from A to C and from B to C, then C is a collider [1]_ . In
|
| 1369 |
+
a causal graph setting, this means that both events A and B are "causing" C,
|
| 1370 |
+
and conditioning on C provide an association between A and B even if
|
| 1371 |
+
no direct causal relationship exists between A and B.
|
| 1372 |
+
|
| 1373 |
+
Parameters
|
| 1374 |
+
----------
|
| 1375 |
+
G : graph
|
| 1376 |
+
A networkx `~networkx.DiGraph`.
|
| 1377 |
+
|
| 1378 |
+
Yields
|
| 1379 |
+
------
|
| 1380 |
+
A 3-tuple representation of a collider
|
| 1381 |
+
Each collider is a 3-tuple with the parent, collider, and other parent.
|
| 1382 |
+
|
| 1383 |
+
Raises
|
| 1384 |
+
------
|
| 1385 |
+
NetworkXNotImplemented
|
| 1386 |
+
If `G` is an undirected graph.
|
| 1387 |
+
|
| 1388 |
+
Examples
|
| 1389 |
+
--------
|
| 1390 |
+
>>> G = nx.DiGraph([(1, 2), (0, 4), (3, 1), (2, 4), (0, 5), (4, 5), (1, 5)])
|
| 1391 |
+
>>> nx.is_directed_acyclic_graph(G)
|
| 1392 |
+
True
|
| 1393 |
+
>>> list(nx.dag.colliders(G))
|
| 1394 |
+
[(0, 4, 2), (0, 5, 4), (0, 5, 1), (4, 5, 1)]
|
| 1395 |
+
|
| 1396 |
+
See Also
|
| 1397 |
+
--------
|
| 1398 |
+
v_structures
|
| 1399 |
+
|
| 1400 |
+
Notes
|
| 1401 |
+
-----
|
| 1402 |
+
This function was written to be used on DAGs, however it works on cyclic graphs
|
| 1403 |
+
too. Since colliders are referred to in the cyclic causal graph literature
|
| 1404 |
+
[2]_ we allow cyclic graphs in this function. It is suggested that you test if
|
| 1405 |
+
your input graph is acyclic as in the example if you want that property.
|
| 1406 |
+
|
| 1407 |
+
References
|
| 1408 |
+
----------
|
| 1409 |
+
.. [1] `Wikipedia: Collider in causal graphs <https://en.wikipedia.org/wiki/Collider_(statistics)>`_
|
| 1410 |
+
.. [2] A Hyttinen, P.O. Hoyer, F. Eberhardt, M J ̈arvisalo, (2013)
|
| 1411 |
+
"Discovering cyclic causal models with latent variables:
|
| 1412 |
+
a general SAT-based procedure", UAI'13: Proceedings of the Twenty-Ninth
|
| 1413 |
+
Conference on Uncertainty in Artificial Intelligence, pg 301–310,
|
| 1414 |
+
`doi:10.5555/3023638.3023669 <https://dl.acm.org/doi/10.5555/3023638.3023669>`_
|
| 1415 |
+
"""
|
| 1416 |
+
for node in G.nodes:
|
| 1417 |
+
for p1, p2 in combinations(G.predecessors(node), 2):
|
| 1418 |
+
yield (p1, node, p2)
|
phi4/lib/python3.10/site-packages/networkx/algorithms/distance_measures.py
ADDED
|
@@ -0,0 +1,1022 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Graph diameter, radius, eccentricity and other properties."""
|
| 2 |
+
|
| 3 |
+
import math
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.utils import not_implemented_for
|
| 7 |
+
|
| 8 |
+
__all__ = [
|
| 9 |
+
"eccentricity",
|
| 10 |
+
"diameter",
|
| 11 |
+
"harmonic_diameter",
|
| 12 |
+
"radius",
|
| 13 |
+
"periphery",
|
| 14 |
+
"center",
|
| 15 |
+
"barycenter",
|
| 16 |
+
"resistance_distance",
|
| 17 |
+
"kemeny_constant",
|
| 18 |
+
"effective_graph_resistance",
|
| 19 |
+
]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def _extrema_bounding(G, compute="diameter", weight=None):
|
| 23 |
+
"""Compute requested extreme distance metric of undirected graph G
|
| 24 |
+
|
| 25 |
+
Computation is based on smart lower and upper bounds, and in practice
|
| 26 |
+
linear in the number of nodes, rather than quadratic (except for some
|
| 27 |
+
border cases such as complete graphs or circle shaped graphs).
|
| 28 |
+
|
| 29 |
+
Parameters
|
| 30 |
+
----------
|
| 31 |
+
G : NetworkX graph
|
| 32 |
+
An undirected graph
|
| 33 |
+
|
| 34 |
+
compute : string denoting the requesting metric
|
| 35 |
+
"diameter" for the maximal eccentricity value,
|
| 36 |
+
"radius" for the minimal eccentricity value,
|
| 37 |
+
"periphery" for the set of nodes with eccentricity equal to the diameter,
|
| 38 |
+
"center" for the set of nodes with eccentricity equal to the radius,
|
| 39 |
+
"eccentricities" for the maximum distance from each node to all other nodes in G
|
| 40 |
+
|
| 41 |
+
weight : string, function, or None
|
| 42 |
+
If this is a string, then edge weights will be accessed via the
|
| 43 |
+
edge attribute with this key (that is, the weight of the edge
|
| 44 |
+
joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
|
| 45 |
+
such edge attribute exists, the weight of the edge is assumed to
|
| 46 |
+
be one.
|
| 47 |
+
|
| 48 |
+
If this is a function, the weight of an edge is the value
|
| 49 |
+
returned by the function. The function must accept exactly three
|
| 50 |
+
positional arguments: the two endpoints of an edge and the
|
| 51 |
+
dictionary of edge attributes for that edge. The function must
|
| 52 |
+
return a number.
|
| 53 |
+
|
| 54 |
+
If this is None, every edge has weight/distance/cost 1.
|
| 55 |
+
|
| 56 |
+
Weights stored as floating point values can lead to small round-off
|
| 57 |
+
errors in distances. Use integer weights to avoid this.
|
| 58 |
+
|
| 59 |
+
Weights should be positive, since they are distances.
|
| 60 |
+
|
| 61 |
+
Returns
|
| 62 |
+
-------
|
| 63 |
+
value : value of the requested metric
|
| 64 |
+
int for "diameter" and "radius" or
|
| 65 |
+
list of nodes for "center" and "periphery" or
|
| 66 |
+
dictionary of eccentricity values keyed by node for "eccentricities"
|
| 67 |
+
|
| 68 |
+
Raises
|
| 69 |
+
------
|
| 70 |
+
NetworkXError
|
| 71 |
+
If the graph consists of multiple components
|
| 72 |
+
ValueError
|
| 73 |
+
If `compute` is not one of "diameter", "radius", "periphery", "center", or "eccentricities".
|
| 74 |
+
|
| 75 |
+
Notes
|
| 76 |
+
-----
|
| 77 |
+
This algorithm was proposed in [1]_ and discussed further in [2]_ and [3]_.
|
| 78 |
+
|
| 79 |
+
References
|
| 80 |
+
----------
|
| 81 |
+
.. [1] F. W. Takes, W. A. Kosters,
|
| 82 |
+
"Determining the diameter of small world networks."
|
| 83 |
+
Proceedings of the 20th ACM international conference on Information and knowledge management, 2011
|
| 84 |
+
https://dl.acm.org/doi/abs/10.1145/2063576.2063748
|
| 85 |
+
.. [2] F. W. Takes, W. A. Kosters,
|
| 86 |
+
"Computing the Eccentricity Distribution of Large Graphs."
|
| 87 |
+
Algorithms, 2013
|
| 88 |
+
https://www.mdpi.com/1999-4893/6/1/100
|
| 89 |
+
.. [3] M. Borassi, P. Crescenzi, M. Habib, W. A. Kosters, A. Marino, F. W. Takes,
|
| 90 |
+
"Fast diameter and radius BFS-based computation in (weakly connected) real-world graphs: With an application to the six degrees of separation games. "
|
| 91 |
+
Theoretical Computer Science, 2015
|
| 92 |
+
https://www.sciencedirect.com/science/article/pii/S0304397515001644
|
| 93 |
+
"""
|
| 94 |
+
# init variables
|
| 95 |
+
degrees = dict(G.degree()) # start with the highest degree node
|
| 96 |
+
minlowernode = max(degrees, key=degrees.get)
|
| 97 |
+
N = len(degrees) # number of nodes
|
| 98 |
+
# alternate between smallest lower and largest upper bound
|
| 99 |
+
high = False
|
| 100 |
+
# status variables
|
| 101 |
+
ecc_lower = dict.fromkeys(G, 0)
|
| 102 |
+
ecc_upper = dict.fromkeys(G, N)
|
| 103 |
+
candidates = set(G)
|
| 104 |
+
|
| 105 |
+
# (re)set bound extremes
|
| 106 |
+
minlower = N
|
| 107 |
+
maxlower = 0
|
| 108 |
+
minupper = N
|
| 109 |
+
maxupper = 0
|
| 110 |
+
|
| 111 |
+
# repeat the following until there are no more candidates
|
| 112 |
+
while candidates:
|
| 113 |
+
if high:
|
| 114 |
+
current = maxuppernode # select node with largest upper bound
|
| 115 |
+
else:
|
| 116 |
+
current = minlowernode # select node with smallest lower bound
|
| 117 |
+
high = not high
|
| 118 |
+
|
| 119 |
+
# get distances from/to current node and derive eccentricity
|
| 120 |
+
dist = nx.shortest_path_length(G, source=current, weight=weight)
|
| 121 |
+
|
| 122 |
+
if len(dist) != N:
|
| 123 |
+
msg = "Cannot compute metric because graph is not connected."
|
| 124 |
+
raise nx.NetworkXError(msg)
|
| 125 |
+
current_ecc = max(dist.values())
|
| 126 |
+
|
| 127 |
+
# print status update
|
| 128 |
+
# print ("ecc of " + str(current) + " (" + str(ecc_lower[current]) + "/"
|
| 129 |
+
# + str(ecc_upper[current]) + ", deg: " + str(dist[current]) + ") is "
|
| 130 |
+
# + str(current_ecc))
|
| 131 |
+
# print(ecc_upper)
|
| 132 |
+
|
| 133 |
+
# (re)set bound extremes
|
| 134 |
+
maxuppernode = None
|
| 135 |
+
minlowernode = None
|
| 136 |
+
|
| 137 |
+
# update node bounds
|
| 138 |
+
for i in candidates:
|
| 139 |
+
# update eccentricity bounds
|
| 140 |
+
d = dist[i]
|
| 141 |
+
ecc_lower[i] = low = max(ecc_lower[i], max(d, (current_ecc - d)))
|
| 142 |
+
ecc_upper[i] = upp = min(ecc_upper[i], current_ecc + d)
|
| 143 |
+
|
| 144 |
+
# update min/max values of lower and upper bounds
|
| 145 |
+
minlower = min(ecc_lower[i], minlower)
|
| 146 |
+
maxlower = max(ecc_lower[i], maxlower)
|
| 147 |
+
minupper = min(ecc_upper[i], minupper)
|
| 148 |
+
maxupper = max(ecc_upper[i], maxupper)
|
| 149 |
+
|
| 150 |
+
# update candidate set
|
| 151 |
+
if compute == "diameter":
|
| 152 |
+
ruled_out = {
|
| 153 |
+
i
|
| 154 |
+
for i in candidates
|
| 155 |
+
if ecc_upper[i] <= maxlower and 2 * ecc_lower[i] >= maxupper
|
| 156 |
+
}
|
| 157 |
+
elif compute == "radius":
|
| 158 |
+
ruled_out = {
|
| 159 |
+
i
|
| 160 |
+
for i in candidates
|
| 161 |
+
if ecc_lower[i] >= minupper and ecc_upper[i] + 1 <= 2 * minlower
|
| 162 |
+
}
|
| 163 |
+
elif compute == "periphery":
|
| 164 |
+
ruled_out = {
|
| 165 |
+
i
|
| 166 |
+
for i in candidates
|
| 167 |
+
if ecc_upper[i] < maxlower
|
| 168 |
+
and (maxlower == maxupper or ecc_lower[i] > maxupper)
|
| 169 |
+
}
|
| 170 |
+
elif compute == "center":
|
| 171 |
+
ruled_out = {
|
| 172 |
+
i
|
| 173 |
+
for i in candidates
|
| 174 |
+
if ecc_lower[i] > minupper
|
| 175 |
+
and (minlower == minupper or ecc_upper[i] + 1 < 2 * minlower)
|
| 176 |
+
}
|
| 177 |
+
elif compute == "eccentricities":
|
| 178 |
+
ruled_out = set()
|
| 179 |
+
else:
|
| 180 |
+
msg = "compute must be one of 'diameter', 'radius', 'periphery', 'center', 'eccentricities'"
|
| 181 |
+
raise ValueError(msg)
|
| 182 |
+
|
| 183 |
+
ruled_out.update(i for i in candidates if ecc_lower[i] == ecc_upper[i])
|
| 184 |
+
candidates -= ruled_out
|
| 185 |
+
|
| 186 |
+
# for i in ruled_out:
|
| 187 |
+
# print("removing %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"%
|
| 188 |
+
# (i,ecc_upper[i],maxlower,ecc_lower[i],maxupper))
|
| 189 |
+
# print("node %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"%
|
| 190 |
+
# (4,ecc_upper[4],maxlower,ecc_lower[4],maxupper))
|
| 191 |
+
# print("NODE 4: %g"%(ecc_upper[4] <= maxlower))
|
| 192 |
+
# print("NODE 4: %g"%(2 * ecc_lower[4] >= maxupper))
|
| 193 |
+
# print("NODE 4: %g"%(ecc_upper[4] <= maxlower
|
| 194 |
+
# and 2 * ecc_lower[4] >= maxupper))
|
| 195 |
+
|
| 196 |
+
# updating maxuppernode and minlowernode for selection in next round
|
| 197 |
+
for i in candidates:
|
| 198 |
+
if (
|
| 199 |
+
minlowernode is None
|
| 200 |
+
or (
|
| 201 |
+
ecc_lower[i] == ecc_lower[minlowernode]
|
| 202 |
+
and degrees[i] > degrees[minlowernode]
|
| 203 |
+
)
|
| 204 |
+
or (ecc_lower[i] < ecc_lower[minlowernode])
|
| 205 |
+
):
|
| 206 |
+
minlowernode = i
|
| 207 |
+
|
| 208 |
+
if (
|
| 209 |
+
maxuppernode is None
|
| 210 |
+
or (
|
| 211 |
+
ecc_upper[i] == ecc_upper[maxuppernode]
|
| 212 |
+
and degrees[i] > degrees[maxuppernode]
|
| 213 |
+
)
|
| 214 |
+
or (ecc_upper[i] > ecc_upper[maxuppernode])
|
| 215 |
+
):
|
| 216 |
+
maxuppernode = i
|
| 217 |
+
|
| 218 |
+
# print status update
|
| 219 |
+
# print (" min=" + str(minlower) + "/" + str(minupper) +
|
| 220 |
+
# " max=" + str(maxlower) + "/" + str(maxupper) +
|
| 221 |
+
# " candidates: " + str(len(candidates)))
|
| 222 |
+
# print("cand:",candidates)
|
| 223 |
+
# print("ecc_l",ecc_lower)
|
| 224 |
+
# print("ecc_u",ecc_upper)
|
| 225 |
+
# wait = input("press Enter to continue")
|
| 226 |
+
|
| 227 |
+
# return the correct value of the requested metric
|
| 228 |
+
if compute == "diameter":
|
| 229 |
+
return maxlower
|
| 230 |
+
if compute == "radius":
|
| 231 |
+
return minupper
|
| 232 |
+
if compute == "periphery":
|
| 233 |
+
p = [v for v in G if ecc_lower[v] == maxlower]
|
| 234 |
+
return p
|
| 235 |
+
if compute == "center":
|
| 236 |
+
c = [v for v in G if ecc_upper[v] == minupper]
|
| 237 |
+
return c
|
| 238 |
+
if compute == "eccentricities":
|
| 239 |
+
return ecc_lower
|
| 240 |
+
return None
|
| 241 |
+
|
| 242 |
+
|
| 243 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 244 |
+
def eccentricity(G, v=None, sp=None, weight=None):
|
| 245 |
+
"""Returns the eccentricity of nodes in G.
|
| 246 |
+
|
| 247 |
+
The eccentricity of a node v is the maximum distance from v to
|
| 248 |
+
all other nodes in G.
|
| 249 |
+
|
| 250 |
+
Parameters
|
| 251 |
+
----------
|
| 252 |
+
G : NetworkX graph
|
| 253 |
+
A graph
|
| 254 |
+
|
| 255 |
+
v : node, optional
|
| 256 |
+
Return value of specified node
|
| 257 |
+
|
| 258 |
+
sp : dict of dicts, optional
|
| 259 |
+
All pairs shortest path lengths as a dictionary of dictionaries
|
| 260 |
+
|
| 261 |
+
weight : string, function, or None (default=None)
|
| 262 |
+
If this is a string, then edge weights will be accessed via the
|
| 263 |
+
edge attribute with this key (that is, the weight of the edge
|
| 264 |
+
joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
|
| 265 |
+
such edge attribute exists, the weight of the edge is assumed to
|
| 266 |
+
be one.
|
| 267 |
+
|
| 268 |
+
If this is a function, the weight of an edge is the value
|
| 269 |
+
returned by the function. The function must accept exactly three
|
| 270 |
+
positional arguments: the two endpoints of an edge and the
|
| 271 |
+
dictionary of edge attributes for that edge. The function must
|
| 272 |
+
return a number.
|
| 273 |
+
|
| 274 |
+
If this is None, every edge has weight/distance/cost 1.
|
| 275 |
+
|
| 276 |
+
Weights stored as floating point values can lead to small round-off
|
| 277 |
+
errors in distances. Use integer weights to avoid this.
|
| 278 |
+
|
| 279 |
+
Weights should be positive, since they are distances.
|
| 280 |
+
|
| 281 |
+
Returns
|
| 282 |
+
-------
|
| 283 |
+
ecc : dictionary
|
| 284 |
+
A dictionary of eccentricity values keyed by node.
|
| 285 |
+
|
| 286 |
+
Examples
|
| 287 |
+
--------
|
| 288 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
|
| 289 |
+
>>> dict(nx.eccentricity(G))
|
| 290 |
+
{1: 2, 2: 3, 3: 2, 4: 2, 5: 3}
|
| 291 |
+
|
| 292 |
+
>>> dict(
|
| 293 |
+
... nx.eccentricity(G, v=[1, 5])
|
| 294 |
+
... ) # This returns the eccentricity of node 1 & 5
|
| 295 |
+
{1: 2, 5: 3}
|
| 296 |
+
|
| 297 |
+
"""
|
| 298 |
+
# if v is None: # none, use entire graph
|
| 299 |
+
# nodes=G.nodes()
|
| 300 |
+
# elif v in G: # is v a single node
|
| 301 |
+
# nodes=[v]
|
| 302 |
+
# else: # assume v is a container of nodes
|
| 303 |
+
# nodes=v
|
| 304 |
+
order = G.order()
|
| 305 |
+
e = {}
|
| 306 |
+
for n in G.nbunch_iter(v):
|
| 307 |
+
if sp is None:
|
| 308 |
+
length = nx.shortest_path_length(G, source=n, weight=weight)
|
| 309 |
+
|
| 310 |
+
L = len(length)
|
| 311 |
+
else:
|
| 312 |
+
try:
|
| 313 |
+
length = sp[n]
|
| 314 |
+
L = len(length)
|
| 315 |
+
except TypeError as err:
|
| 316 |
+
raise nx.NetworkXError('Format of "sp" is invalid.') from err
|
| 317 |
+
if L != order:
|
| 318 |
+
if G.is_directed():
|
| 319 |
+
msg = (
|
| 320 |
+
"Found infinite path length because the digraph is not"
|
| 321 |
+
" strongly connected"
|
| 322 |
+
)
|
| 323 |
+
else:
|
| 324 |
+
msg = "Found infinite path length because the graph is not" " connected"
|
| 325 |
+
raise nx.NetworkXError(msg)
|
| 326 |
+
|
| 327 |
+
e[n] = max(length.values())
|
| 328 |
+
|
| 329 |
+
if v in G:
|
| 330 |
+
return e[v] # return single value
|
| 331 |
+
return e
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 335 |
+
def diameter(G, e=None, usebounds=False, weight=None):
|
| 336 |
+
"""Returns the diameter of the graph G.
|
| 337 |
+
|
| 338 |
+
The diameter is the maximum eccentricity.
|
| 339 |
+
|
| 340 |
+
Parameters
|
| 341 |
+
----------
|
| 342 |
+
G : NetworkX graph
|
| 343 |
+
A graph
|
| 344 |
+
|
| 345 |
+
e : eccentricity dictionary, optional
|
| 346 |
+
A precomputed dictionary of eccentricities.
|
| 347 |
+
|
| 348 |
+
weight : string, function, or None
|
| 349 |
+
If this is a string, then edge weights will be accessed via the
|
| 350 |
+
edge attribute with this key (that is, the weight of the edge
|
| 351 |
+
joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
|
| 352 |
+
such edge attribute exists, the weight of the edge is assumed to
|
| 353 |
+
be one.
|
| 354 |
+
|
| 355 |
+
If this is a function, the weight of an edge is the value
|
| 356 |
+
returned by the function. The function must accept exactly three
|
| 357 |
+
positional arguments: the two endpoints of an edge and the
|
| 358 |
+
dictionary of edge attributes for that edge. The function must
|
| 359 |
+
return a number.
|
| 360 |
+
|
| 361 |
+
If this is None, every edge has weight/distance/cost 1.
|
| 362 |
+
|
| 363 |
+
Weights stored as floating point values can lead to small round-off
|
| 364 |
+
errors in distances. Use integer weights to avoid this.
|
| 365 |
+
|
| 366 |
+
Weights should be positive, since they are distances.
|
| 367 |
+
|
| 368 |
+
Returns
|
| 369 |
+
-------
|
| 370 |
+
d : integer
|
| 371 |
+
Diameter of graph
|
| 372 |
+
|
| 373 |
+
Examples
|
| 374 |
+
--------
|
| 375 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
|
| 376 |
+
>>> nx.diameter(G)
|
| 377 |
+
3
|
| 378 |
+
|
| 379 |
+
See Also
|
| 380 |
+
--------
|
| 381 |
+
eccentricity
|
| 382 |
+
"""
|
| 383 |
+
if usebounds is True and e is None and not G.is_directed():
|
| 384 |
+
return _extrema_bounding(G, compute="diameter", weight=weight)
|
| 385 |
+
if e is None:
|
| 386 |
+
e = eccentricity(G, weight=weight)
|
| 387 |
+
return max(e.values())
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
@nx._dispatchable
|
| 391 |
+
def harmonic_diameter(G, sp=None):
|
| 392 |
+
"""Returns the harmonic diameter of the graph G.
|
| 393 |
+
|
| 394 |
+
The harmonic diameter of a graph is the harmonic mean of the distances
|
| 395 |
+
between all pairs of distinct vertices. Graphs that are not strongly
|
| 396 |
+
connected have infinite diameter and mean distance, making such
|
| 397 |
+
measures not useful. Restricting the diameter or mean distance to
|
| 398 |
+
finite distances yields paradoxical values (e.g., a perfect match
|
| 399 |
+
would have diameter one). The harmonic mean handles gracefully
|
| 400 |
+
infinite distances (e.g., a perfect match has harmonic diameter equal
|
| 401 |
+
to the number of vertices minus one), making it possible to assign a
|
| 402 |
+
meaningful value to all graphs.
|
| 403 |
+
|
| 404 |
+
Note that in [1] the harmonic diameter is called "connectivity length":
|
| 405 |
+
however, "harmonic diameter" is a more standard name from the
|
| 406 |
+
theory of metric spaces. The name "harmonic mean distance" is perhaps
|
| 407 |
+
a more descriptive name, but is not used in the literature, so we use the
|
| 408 |
+
name "harmonic diameter" here.
|
| 409 |
+
|
| 410 |
+
Parameters
|
| 411 |
+
----------
|
| 412 |
+
G : NetworkX graph
|
| 413 |
+
A graph
|
| 414 |
+
|
| 415 |
+
sp : dict of dicts, optional
|
| 416 |
+
All-pairs shortest path lengths as a dictionary of dictionaries
|
| 417 |
+
|
| 418 |
+
Returns
|
| 419 |
+
-------
|
| 420 |
+
hd : float
|
| 421 |
+
Harmonic diameter of graph
|
| 422 |
+
|
| 423 |
+
References
|
| 424 |
+
----------
|
| 425 |
+
.. [1] Massimo Marchiori and Vito Latora, "Harmony in the small-world".
|
| 426 |
+
*Physica A: Statistical Mechanics and Its Applications*
|
| 427 |
+
285(3-4), pages 539-546, 2000.
|
| 428 |
+
<https://doi.org/10.1016/S0378-4371(00)00311-3>
|
| 429 |
+
"""
|
| 430 |
+
order = G.order()
|
| 431 |
+
|
| 432 |
+
sum_invd = 0
|
| 433 |
+
for n in G:
|
| 434 |
+
if sp is None:
|
| 435 |
+
length = nx.single_source_shortest_path_length(G, n)
|
| 436 |
+
else:
|
| 437 |
+
try:
|
| 438 |
+
length = sp[n]
|
| 439 |
+
L = len(length)
|
| 440 |
+
except TypeError as err:
|
| 441 |
+
raise nx.NetworkXError('Format of "sp" is invalid.') from err
|
| 442 |
+
|
| 443 |
+
for d in length.values():
|
| 444 |
+
# Note that this will skip the zero distance from n to itself,
|
| 445 |
+
# as it should be, but also zero-weight paths in weighted graphs.
|
| 446 |
+
if d != 0:
|
| 447 |
+
sum_invd += 1 / d
|
| 448 |
+
|
| 449 |
+
if sum_invd != 0:
|
| 450 |
+
return order * (order - 1) / sum_invd
|
| 451 |
+
if order > 1:
|
| 452 |
+
return math.inf
|
| 453 |
+
return math.nan
|
| 454 |
+
|
| 455 |
+
|
| 456 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 457 |
+
def periphery(G, e=None, usebounds=False, weight=None):
|
| 458 |
+
"""Returns the periphery of the graph G.
|
| 459 |
+
|
| 460 |
+
The periphery is the set of nodes with eccentricity equal to the diameter.
|
| 461 |
+
|
| 462 |
+
Parameters
|
| 463 |
+
----------
|
| 464 |
+
G : NetworkX graph
|
| 465 |
+
A graph
|
| 466 |
+
|
| 467 |
+
e : eccentricity dictionary, optional
|
| 468 |
+
A precomputed dictionary of eccentricities.
|
| 469 |
+
|
| 470 |
+
weight : string, function, or None
|
| 471 |
+
If this is a string, then edge weights will be accessed via the
|
| 472 |
+
edge attribute with this key (that is, the weight of the edge
|
| 473 |
+
joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
|
| 474 |
+
such edge attribute exists, the weight of the edge is assumed to
|
| 475 |
+
be one.
|
| 476 |
+
|
| 477 |
+
If this is a function, the weight of an edge is the value
|
| 478 |
+
returned by the function. The function must accept exactly three
|
| 479 |
+
positional arguments: the two endpoints of an edge and the
|
| 480 |
+
dictionary of edge attributes for that edge. The function must
|
| 481 |
+
return a number.
|
| 482 |
+
|
| 483 |
+
If this is None, every edge has weight/distance/cost 1.
|
| 484 |
+
|
| 485 |
+
Weights stored as floating point values can lead to small round-off
|
| 486 |
+
errors in distances. Use integer weights to avoid this.
|
| 487 |
+
|
| 488 |
+
Weights should be positive, since they are distances.
|
| 489 |
+
|
| 490 |
+
Returns
|
| 491 |
+
-------
|
| 492 |
+
p : list
|
| 493 |
+
List of nodes in periphery
|
| 494 |
+
|
| 495 |
+
Examples
|
| 496 |
+
--------
|
| 497 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
|
| 498 |
+
>>> nx.periphery(G)
|
| 499 |
+
[2, 5]
|
| 500 |
+
|
| 501 |
+
See Also
|
| 502 |
+
--------
|
| 503 |
+
barycenter
|
| 504 |
+
center
|
| 505 |
+
"""
|
| 506 |
+
if usebounds is True and e is None and not G.is_directed():
|
| 507 |
+
return _extrema_bounding(G, compute="periphery", weight=weight)
|
| 508 |
+
if e is None:
|
| 509 |
+
e = eccentricity(G, weight=weight)
|
| 510 |
+
diameter = max(e.values())
|
| 511 |
+
p = [v for v in e if e[v] == diameter]
|
| 512 |
+
return p
|
| 513 |
+
|
| 514 |
+
|
| 515 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 516 |
+
def radius(G, e=None, usebounds=False, weight=None):
|
| 517 |
+
"""Returns the radius of the graph G.
|
| 518 |
+
|
| 519 |
+
The radius is the minimum eccentricity.
|
| 520 |
+
|
| 521 |
+
Parameters
|
| 522 |
+
----------
|
| 523 |
+
G : NetworkX graph
|
| 524 |
+
A graph
|
| 525 |
+
|
| 526 |
+
e : eccentricity dictionary, optional
|
| 527 |
+
A precomputed dictionary of eccentricities.
|
| 528 |
+
|
| 529 |
+
weight : string, function, or None
|
| 530 |
+
If this is a string, then edge weights will be accessed via the
|
| 531 |
+
edge attribute with this key (that is, the weight of the edge
|
| 532 |
+
joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
|
| 533 |
+
such edge attribute exists, the weight of the edge is assumed to
|
| 534 |
+
be one.
|
| 535 |
+
|
| 536 |
+
If this is a function, the weight of an edge is the value
|
| 537 |
+
returned by the function. The function must accept exactly three
|
| 538 |
+
positional arguments: the two endpoints of an edge and the
|
| 539 |
+
dictionary of edge attributes for that edge. The function must
|
| 540 |
+
return a number.
|
| 541 |
+
|
| 542 |
+
If this is None, every edge has weight/distance/cost 1.
|
| 543 |
+
|
| 544 |
+
Weights stored as floating point values can lead to small round-off
|
| 545 |
+
errors in distances. Use integer weights to avoid this.
|
| 546 |
+
|
| 547 |
+
Weights should be positive, since they are distances.
|
| 548 |
+
|
| 549 |
+
Returns
|
| 550 |
+
-------
|
| 551 |
+
r : integer
|
| 552 |
+
Radius of graph
|
| 553 |
+
|
| 554 |
+
Examples
|
| 555 |
+
--------
|
| 556 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
|
| 557 |
+
>>> nx.radius(G)
|
| 558 |
+
2
|
| 559 |
+
|
| 560 |
+
"""
|
| 561 |
+
if usebounds is True and e is None and not G.is_directed():
|
| 562 |
+
return _extrema_bounding(G, compute="radius", weight=weight)
|
| 563 |
+
if e is None:
|
| 564 |
+
e = eccentricity(G, weight=weight)
|
| 565 |
+
return min(e.values())
|
| 566 |
+
|
| 567 |
+
|
| 568 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 569 |
+
def center(G, e=None, usebounds=False, weight=None):
|
| 570 |
+
"""Returns the center of the graph G.
|
| 571 |
+
|
| 572 |
+
The center is the set of nodes with eccentricity equal to radius.
|
| 573 |
+
|
| 574 |
+
Parameters
|
| 575 |
+
----------
|
| 576 |
+
G : NetworkX graph
|
| 577 |
+
A graph
|
| 578 |
+
|
| 579 |
+
e : eccentricity dictionary, optional
|
| 580 |
+
A precomputed dictionary of eccentricities.
|
| 581 |
+
|
| 582 |
+
weight : string, function, or None
|
| 583 |
+
If this is a string, then edge weights will be accessed via the
|
| 584 |
+
edge attribute with this key (that is, the weight of the edge
|
| 585 |
+
joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
|
| 586 |
+
such edge attribute exists, the weight of the edge is assumed to
|
| 587 |
+
be one.
|
| 588 |
+
|
| 589 |
+
If this is a function, the weight of an edge is the value
|
| 590 |
+
returned by the function. The function must accept exactly three
|
| 591 |
+
positional arguments: the two endpoints of an edge and the
|
| 592 |
+
dictionary of edge attributes for that edge. The function must
|
| 593 |
+
return a number.
|
| 594 |
+
|
| 595 |
+
If this is None, every edge has weight/distance/cost 1.
|
| 596 |
+
|
| 597 |
+
Weights stored as floating point values can lead to small round-off
|
| 598 |
+
errors in distances. Use integer weights to avoid this.
|
| 599 |
+
|
| 600 |
+
Weights should be positive, since they are distances.
|
| 601 |
+
|
| 602 |
+
Returns
|
| 603 |
+
-------
|
| 604 |
+
c : list
|
| 605 |
+
List of nodes in center
|
| 606 |
+
|
| 607 |
+
Examples
|
| 608 |
+
--------
|
| 609 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
|
| 610 |
+
>>> list(nx.center(G))
|
| 611 |
+
[1, 3, 4]
|
| 612 |
+
|
| 613 |
+
See Also
|
| 614 |
+
--------
|
| 615 |
+
barycenter
|
| 616 |
+
periphery
|
| 617 |
+
"""
|
| 618 |
+
if usebounds is True and e is None and not G.is_directed():
|
| 619 |
+
return _extrema_bounding(G, compute="center", weight=weight)
|
| 620 |
+
if e is None:
|
| 621 |
+
e = eccentricity(G, weight=weight)
|
| 622 |
+
radius = min(e.values())
|
| 623 |
+
p = [v for v in e if e[v] == radius]
|
| 624 |
+
return p
|
| 625 |
+
|
| 626 |
+
|
| 627 |
+
@nx._dispatchable(edge_attrs="weight", mutates_input={"attr": 2})
|
| 628 |
+
def barycenter(G, weight=None, attr=None, sp=None):
|
| 629 |
+
r"""Calculate barycenter of a connected graph, optionally with edge weights.
|
| 630 |
+
|
| 631 |
+
The :dfn:`barycenter` a
|
| 632 |
+
:func:`connected <networkx.algorithms.components.is_connected>` graph
|
| 633 |
+
:math:`G` is the subgraph induced by the set of its nodes :math:`v`
|
| 634 |
+
minimizing the objective function
|
| 635 |
+
|
| 636 |
+
.. math::
|
| 637 |
+
|
| 638 |
+
\sum_{u \in V(G)} d_G(u, v),
|
| 639 |
+
|
| 640 |
+
where :math:`d_G` is the (possibly weighted) :func:`path length
|
| 641 |
+
<networkx.algorithms.shortest_paths.generic.shortest_path_length>`.
|
| 642 |
+
The barycenter is also called the :dfn:`median`. See [West01]_, p. 78.
|
| 643 |
+
|
| 644 |
+
Parameters
|
| 645 |
+
----------
|
| 646 |
+
G : :class:`networkx.Graph`
|
| 647 |
+
The connected graph :math:`G`.
|
| 648 |
+
weight : :class:`str`, optional
|
| 649 |
+
Passed through to
|
| 650 |
+
:func:`~networkx.algorithms.shortest_paths.generic.shortest_path_length`.
|
| 651 |
+
attr : :class:`str`, optional
|
| 652 |
+
If given, write the value of the objective function to each node's
|
| 653 |
+
`attr` attribute. Otherwise do not store the value.
|
| 654 |
+
sp : dict of dicts, optional
|
| 655 |
+
All pairs shortest path lengths as a dictionary of dictionaries
|
| 656 |
+
|
| 657 |
+
Returns
|
| 658 |
+
-------
|
| 659 |
+
list
|
| 660 |
+
Nodes of `G` that induce the barycenter of `G`.
|
| 661 |
+
|
| 662 |
+
Raises
|
| 663 |
+
------
|
| 664 |
+
NetworkXNoPath
|
| 665 |
+
If `G` is disconnected. `G` may appear disconnected to
|
| 666 |
+
:func:`barycenter` if `sp` is given but is missing shortest path
|
| 667 |
+
lengths for any pairs.
|
| 668 |
+
ValueError
|
| 669 |
+
If `sp` and `weight` are both given.
|
| 670 |
+
|
| 671 |
+
Examples
|
| 672 |
+
--------
|
| 673 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
|
| 674 |
+
>>> nx.barycenter(G)
|
| 675 |
+
[1, 3, 4]
|
| 676 |
+
|
| 677 |
+
See Also
|
| 678 |
+
--------
|
| 679 |
+
center
|
| 680 |
+
periphery
|
| 681 |
+
"""
|
| 682 |
+
if sp is None:
|
| 683 |
+
sp = nx.shortest_path_length(G, weight=weight)
|
| 684 |
+
else:
|
| 685 |
+
sp = sp.items()
|
| 686 |
+
if weight is not None:
|
| 687 |
+
raise ValueError("Cannot use both sp, weight arguments together")
|
| 688 |
+
smallest, barycenter_vertices, n = float("inf"), [], len(G)
|
| 689 |
+
for v, dists in sp:
|
| 690 |
+
if len(dists) < n:
|
| 691 |
+
raise nx.NetworkXNoPath(
|
| 692 |
+
f"Input graph {G} is disconnected, so every induced subgraph "
|
| 693 |
+
"has infinite barycentricity."
|
| 694 |
+
)
|
| 695 |
+
barycentricity = sum(dists.values())
|
| 696 |
+
if attr is not None:
|
| 697 |
+
G.nodes[v][attr] = barycentricity
|
| 698 |
+
if barycentricity < smallest:
|
| 699 |
+
smallest = barycentricity
|
| 700 |
+
barycenter_vertices = [v]
|
| 701 |
+
elif barycentricity == smallest:
|
| 702 |
+
barycenter_vertices.append(v)
|
| 703 |
+
if attr is not None:
|
| 704 |
+
nx._clear_cache(G)
|
| 705 |
+
return barycenter_vertices
|
| 706 |
+
|
| 707 |
+
|
| 708 |
+
@not_implemented_for("directed")
|
| 709 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 710 |
+
def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=True):
|
| 711 |
+
"""Returns the resistance distance between pairs of nodes in graph G.
|
| 712 |
+
|
| 713 |
+
The resistance distance between two nodes of a graph is akin to treating
|
| 714 |
+
the graph as a grid of resistors with a resistance equal to the provided
|
| 715 |
+
weight [1]_, [2]_.
|
| 716 |
+
|
| 717 |
+
If weight is not provided, then a weight of 1 is used for all edges.
|
| 718 |
+
|
| 719 |
+
If two nodes are the same, the resistance distance is zero.
|
| 720 |
+
|
| 721 |
+
Parameters
|
| 722 |
+
----------
|
| 723 |
+
G : NetworkX graph
|
| 724 |
+
A graph
|
| 725 |
+
|
| 726 |
+
nodeA : node or None, optional (default=None)
|
| 727 |
+
A node within graph G.
|
| 728 |
+
If None, compute resistance distance using all nodes as source nodes.
|
| 729 |
+
|
| 730 |
+
nodeB : node or None, optional (default=None)
|
| 731 |
+
A node within graph G.
|
| 732 |
+
If None, compute resistance distance using all nodes as target nodes.
|
| 733 |
+
|
| 734 |
+
weight : string or None, optional (default=None)
|
| 735 |
+
The edge data key used to compute the resistance distance.
|
| 736 |
+
If None, then each edge has weight 1.
|
| 737 |
+
|
| 738 |
+
invert_weight : boolean (default=True)
|
| 739 |
+
Proper calculation of resistance distance requires building the
|
| 740 |
+
Laplacian matrix with the reciprocal of the weight. Not required
|
| 741 |
+
if the weight is already inverted. Weight cannot be zero.
|
| 742 |
+
|
| 743 |
+
Returns
|
| 744 |
+
-------
|
| 745 |
+
rd : dict or float
|
| 746 |
+
If `nodeA` and `nodeB` are given, resistance distance between `nodeA`
|
| 747 |
+
and `nodeB`. If `nodeA` or `nodeB` is unspecified (the default), a
|
| 748 |
+
dictionary of nodes with resistance distances as the value.
|
| 749 |
+
|
| 750 |
+
Raises
|
| 751 |
+
------
|
| 752 |
+
NetworkXNotImplemented
|
| 753 |
+
If `G` is a directed graph.
|
| 754 |
+
|
| 755 |
+
NetworkXError
|
| 756 |
+
If `G` is not connected, or contains no nodes,
|
| 757 |
+
or `nodeA` is not in `G` or `nodeB` is not in `G`.
|
| 758 |
+
|
| 759 |
+
Examples
|
| 760 |
+
--------
|
| 761 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
|
| 762 |
+
>>> round(nx.resistance_distance(G, 1, 3), 10)
|
| 763 |
+
0.625
|
| 764 |
+
|
| 765 |
+
Notes
|
| 766 |
+
-----
|
| 767 |
+
The implementation is based on Theorem A in [2]_. Self-loops are ignored.
|
| 768 |
+
Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights.
|
| 769 |
+
|
| 770 |
+
References
|
| 771 |
+
----------
|
| 772 |
+
.. [1] Wikipedia
|
| 773 |
+
"Resistance distance."
|
| 774 |
+
https://en.wikipedia.org/wiki/Resistance_distance
|
| 775 |
+
.. [2] D. J. Klein and M. Randic.
|
| 776 |
+
Resistance distance.
|
| 777 |
+
J. of Math. Chem. 12:81-95, 1993.
|
| 778 |
+
"""
|
| 779 |
+
import numpy as np
|
| 780 |
+
|
| 781 |
+
if len(G) == 0:
|
| 782 |
+
raise nx.NetworkXError("Graph G must contain at least one node.")
|
| 783 |
+
if not nx.is_connected(G):
|
| 784 |
+
raise nx.NetworkXError("Graph G must be strongly connected.")
|
| 785 |
+
if nodeA is not None and nodeA not in G:
|
| 786 |
+
raise nx.NetworkXError("Node A is not in graph G.")
|
| 787 |
+
if nodeB is not None and nodeB not in G:
|
| 788 |
+
raise nx.NetworkXError("Node B is not in graph G.")
|
| 789 |
+
|
| 790 |
+
G = G.copy()
|
| 791 |
+
node_list = list(G)
|
| 792 |
+
|
| 793 |
+
# Invert weights
|
| 794 |
+
if invert_weight and weight is not None:
|
| 795 |
+
if G.is_multigraph():
|
| 796 |
+
for u, v, k, d in G.edges(keys=True, data=True):
|
| 797 |
+
d[weight] = 1 / d[weight]
|
| 798 |
+
else:
|
| 799 |
+
for u, v, d in G.edges(data=True):
|
| 800 |
+
d[weight] = 1 / d[weight]
|
| 801 |
+
|
| 802 |
+
# Compute resistance distance using the Pseudo-inverse of the Laplacian
|
| 803 |
+
# Self-loops are ignored
|
| 804 |
+
L = nx.laplacian_matrix(G, weight=weight).todense()
|
| 805 |
+
Linv = np.linalg.pinv(L, hermitian=True)
|
| 806 |
+
|
| 807 |
+
# Return relevant distances
|
| 808 |
+
if nodeA is not None and nodeB is not None:
|
| 809 |
+
i = node_list.index(nodeA)
|
| 810 |
+
j = node_list.index(nodeB)
|
| 811 |
+
return Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i)
|
| 812 |
+
|
| 813 |
+
elif nodeA is not None:
|
| 814 |
+
i = node_list.index(nodeA)
|
| 815 |
+
d = {}
|
| 816 |
+
for n in G:
|
| 817 |
+
j = node_list.index(n)
|
| 818 |
+
d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i)
|
| 819 |
+
return d
|
| 820 |
+
|
| 821 |
+
elif nodeB is not None:
|
| 822 |
+
j = node_list.index(nodeB)
|
| 823 |
+
d = {}
|
| 824 |
+
for n in G:
|
| 825 |
+
i = node_list.index(n)
|
| 826 |
+
d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i)
|
| 827 |
+
return d
|
| 828 |
+
|
| 829 |
+
else:
|
| 830 |
+
d = {}
|
| 831 |
+
for n in G:
|
| 832 |
+
i = node_list.index(n)
|
| 833 |
+
d[n] = {}
|
| 834 |
+
for n2 in G:
|
| 835 |
+
j = node_list.index(n2)
|
| 836 |
+
d[n][n2] = (
|
| 837 |
+
Linv.item(i, i)
|
| 838 |
+
+ Linv.item(j, j)
|
| 839 |
+
- Linv.item(i, j)
|
| 840 |
+
- Linv.item(j, i)
|
| 841 |
+
)
|
| 842 |
+
return d
|
| 843 |
+
|
| 844 |
+
|
| 845 |
+
@not_implemented_for("directed")
|
| 846 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 847 |
+
def effective_graph_resistance(G, weight=None, invert_weight=True):
|
| 848 |
+
"""Returns the Effective graph resistance of G.
|
| 849 |
+
|
| 850 |
+
Also known as the Kirchhoff index.
|
| 851 |
+
|
| 852 |
+
The effective graph resistance is defined as the sum
|
| 853 |
+
of the resistance distance of every node pair in G [1]_.
|
| 854 |
+
|
| 855 |
+
If weight is not provided, then a weight of 1 is used for all edges.
|
| 856 |
+
|
| 857 |
+
The effective graph resistance of a disconnected graph is infinite.
|
| 858 |
+
|
| 859 |
+
Parameters
|
| 860 |
+
----------
|
| 861 |
+
G : NetworkX graph
|
| 862 |
+
A graph
|
| 863 |
+
|
| 864 |
+
weight : string or None, optional (default=None)
|
| 865 |
+
The edge data key used to compute the effective graph resistance.
|
| 866 |
+
If None, then each edge has weight 1.
|
| 867 |
+
|
| 868 |
+
invert_weight : boolean (default=True)
|
| 869 |
+
Proper calculation of resistance distance requires building the
|
| 870 |
+
Laplacian matrix with the reciprocal of the weight. Not required
|
| 871 |
+
if the weight is already inverted. Weight cannot be zero.
|
| 872 |
+
|
| 873 |
+
Returns
|
| 874 |
+
-------
|
| 875 |
+
RG : float
|
| 876 |
+
The effective graph resistance of `G`.
|
| 877 |
+
|
| 878 |
+
Raises
|
| 879 |
+
------
|
| 880 |
+
NetworkXNotImplemented
|
| 881 |
+
If `G` is a directed graph.
|
| 882 |
+
|
| 883 |
+
NetworkXError
|
| 884 |
+
If `G` does not contain any nodes.
|
| 885 |
+
|
| 886 |
+
Examples
|
| 887 |
+
--------
|
| 888 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
|
| 889 |
+
>>> round(nx.effective_graph_resistance(G), 10)
|
| 890 |
+
10.25
|
| 891 |
+
|
| 892 |
+
Notes
|
| 893 |
+
-----
|
| 894 |
+
The implementation is based on Theorem 2.2 in [2]_. Self-loops are ignored.
|
| 895 |
+
Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights.
|
| 896 |
+
|
| 897 |
+
References
|
| 898 |
+
----------
|
| 899 |
+
.. [1] Wolfram
|
| 900 |
+
"Kirchhoff Index."
|
| 901 |
+
https://mathworld.wolfram.com/KirchhoffIndex.html
|
| 902 |
+
.. [2] W. Ellens, F. M. Spieksma, P. Van Mieghem, A. Jamakovic, R. E. Kooij.
|
| 903 |
+
Effective graph resistance.
|
| 904 |
+
Lin. Alg. Appl. 435:2491-2506, 2011.
|
| 905 |
+
"""
|
| 906 |
+
import numpy as np
|
| 907 |
+
|
| 908 |
+
if len(G) == 0:
|
| 909 |
+
raise nx.NetworkXError("Graph G must contain at least one node.")
|
| 910 |
+
|
| 911 |
+
# Disconnected graphs have infinite Effective graph resistance
|
| 912 |
+
if not nx.is_connected(G):
|
| 913 |
+
return float("inf")
|
| 914 |
+
|
| 915 |
+
# Invert weights
|
| 916 |
+
G = G.copy()
|
| 917 |
+
if invert_weight and weight is not None:
|
| 918 |
+
if G.is_multigraph():
|
| 919 |
+
for u, v, k, d in G.edges(keys=True, data=True):
|
| 920 |
+
d[weight] = 1 / d[weight]
|
| 921 |
+
else:
|
| 922 |
+
for u, v, d in G.edges(data=True):
|
| 923 |
+
d[weight] = 1 / d[weight]
|
| 924 |
+
|
| 925 |
+
# Get Laplacian eigenvalues
|
| 926 |
+
mu = np.sort(nx.laplacian_spectrum(G, weight=weight))
|
| 927 |
+
|
| 928 |
+
# Compute Effective graph resistance based on spectrum of the Laplacian
|
| 929 |
+
# Self-loops are ignored
|
| 930 |
+
return float(np.sum(1 / mu[1:]) * G.number_of_nodes())
|
| 931 |
+
|
| 932 |
+
|
| 933 |
+
@nx.utils.not_implemented_for("directed")
|
| 934 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 935 |
+
def kemeny_constant(G, *, weight=None):
|
| 936 |
+
"""Returns the Kemeny constant of the given graph.
|
| 937 |
+
|
| 938 |
+
The *Kemeny constant* (or Kemeny's constant) of a graph `G`
|
| 939 |
+
can be computed by regarding the graph as a Markov chain.
|
| 940 |
+
The Kemeny constant is then the expected number of time steps
|
| 941 |
+
to transition from a starting state i to a random destination state
|
| 942 |
+
sampled from the Markov chain's stationary distribution.
|
| 943 |
+
The Kemeny constant is independent of the chosen initial state [1]_.
|
| 944 |
+
|
| 945 |
+
The Kemeny constant measures the time needed for spreading
|
| 946 |
+
across a graph. Low values indicate a closely connected graph
|
| 947 |
+
whereas high values indicate a spread-out graph.
|
| 948 |
+
|
| 949 |
+
If weight is not provided, then a weight of 1 is used for all edges.
|
| 950 |
+
|
| 951 |
+
Since `G` represents a Markov chain, the weights must be positive.
|
| 952 |
+
|
| 953 |
+
Parameters
|
| 954 |
+
----------
|
| 955 |
+
G : NetworkX graph
|
| 956 |
+
|
| 957 |
+
weight : string or None, optional (default=None)
|
| 958 |
+
The edge data key used to compute the Kemeny constant.
|
| 959 |
+
If None, then each edge has weight 1.
|
| 960 |
+
|
| 961 |
+
Returns
|
| 962 |
+
-------
|
| 963 |
+
float
|
| 964 |
+
The Kemeny constant of the graph `G`.
|
| 965 |
+
|
| 966 |
+
Raises
|
| 967 |
+
------
|
| 968 |
+
NetworkXNotImplemented
|
| 969 |
+
If the graph `G` is directed.
|
| 970 |
+
|
| 971 |
+
NetworkXError
|
| 972 |
+
If the graph `G` is not connected, or contains no nodes,
|
| 973 |
+
or has edges with negative weights.
|
| 974 |
+
|
| 975 |
+
Examples
|
| 976 |
+
--------
|
| 977 |
+
>>> G = nx.complete_graph(5)
|
| 978 |
+
>>> round(nx.kemeny_constant(G), 10)
|
| 979 |
+
3.2
|
| 980 |
+
|
| 981 |
+
Notes
|
| 982 |
+
-----
|
| 983 |
+
The implementation is based on equation (3.3) in [2]_.
|
| 984 |
+
Self-loops are allowed and indicate a Markov chain where
|
| 985 |
+
the state can remain the same. Multi-edges are contracted
|
| 986 |
+
in one edge with weight equal to the sum of the weights.
|
| 987 |
+
|
| 988 |
+
References
|
| 989 |
+
----------
|
| 990 |
+
.. [1] Wikipedia
|
| 991 |
+
"Kemeny's constant."
|
| 992 |
+
https://en.wikipedia.org/wiki/Kemeny%27s_constant
|
| 993 |
+
.. [2] Lovász L.
|
| 994 |
+
Random walks on graphs: A survey.
|
| 995 |
+
Paul Erdös is Eighty, vol. 2, Bolyai Society,
|
| 996 |
+
Mathematical Studies, Keszthely, Hungary (1993), pp. 1-46
|
| 997 |
+
"""
|
| 998 |
+
import numpy as np
|
| 999 |
+
import scipy as sp
|
| 1000 |
+
|
| 1001 |
+
if len(G) == 0:
|
| 1002 |
+
raise nx.NetworkXError("Graph G must contain at least one node.")
|
| 1003 |
+
if not nx.is_connected(G):
|
| 1004 |
+
raise nx.NetworkXError("Graph G must be connected.")
|
| 1005 |
+
if nx.is_negatively_weighted(G, weight=weight):
|
| 1006 |
+
raise nx.NetworkXError("The weights of graph G must be nonnegative.")
|
| 1007 |
+
|
| 1008 |
+
# Compute matrix H = D^-1/2 A D^-1/2
|
| 1009 |
+
A = nx.adjacency_matrix(G, weight=weight)
|
| 1010 |
+
n, m = A.shape
|
| 1011 |
+
diags = A.sum(axis=1)
|
| 1012 |
+
with np.errstate(divide="ignore"):
|
| 1013 |
+
diags_sqrt = 1.0 / np.sqrt(diags)
|
| 1014 |
+
diags_sqrt[np.isinf(diags_sqrt)] = 0
|
| 1015 |
+
DH = sp.sparse.csr_array(sp.sparse.spdiags(diags_sqrt, 0, m, n, format="csr"))
|
| 1016 |
+
H = DH @ (A @ DH)
|
| 1017 |
+
|
| 1018 |
+
# Compute eigenvalues of H
|
| 1019 |
+
eig = np.sort(sp.linalg.eigvalsh(H.todense()))
|
| 1020 |
+
|
| 1021 |
+
# Compute the Kemeny constant
|
| 1022 |
+
return float(np.sum(1 / (1 - eig[:-1])))
|
phi4/lib/python3.10/site-packages/networkx/algorithms/distance_regular.py
ADDED
|
@@ -0,0 +1,238 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
=======================
|
| 3 |
+
Distance-regular graphs
|
| 4 |
+
=======================
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.utils import not_implemented_for
|
| 9 |
+
|
| 10 |
+
from .distance_measures import diameter
|
| 11 |
+
|
| 12 |
+
__all__ = [
|
| 13 |
+
"is_distance_regular",
|
| 14 |
+
"is_strongly_regular",
|
| 15 |
+
"intersection_array",
|
| 16 |
+
"global_parameters",
|
| 17 |
+
]
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@nx._dispatchable
|
| 21 |
+
def is_distance_regular(G):
|
| 22 |
+
"""Returns True if the graph is distance regular, False otherwise.
|
| 23 |
+
|
| 24 |
+
A connected graph G is distance-regular if for any nodes x,y
|
| 25 |
+
and any integers i,j=0,1,...,d (where d is the graph
|
| 26 |
+
diameter), the number of vertices at distance i from x and
|
| 27 |
+
distance j from y depends only on i,j and the graph distance
|
| 28 |
+
between x and y, independently of the choice of x and y.
|
| 29 |
+
|
| 30 |
+
Parameters
|
| 31 |
+
----------
|
| 32 |
+
G: Networkx graph (undirected)
|
| 33 |
+
|
| 34 |
+
Returns
|
| 35 |
+
-------
|
| 36 |
+
bool
|
| 37 |
+
True if the graph is Distance Regular, False otherwise
|
| 38 |
+
|
| 39 |
+
Examples
|
| 40 |
+
--------
|
| 41 |
+
>>> G = nx.hypercube_graph(6)
|
| 42 |
+
>>> nx.is_distance_regular(G)
|
| 43 |
+
True
|
| 44 |
+
|
| 45 |
+
See Also
|
| 46 |
+
--------
|
| 47 |
+
intersection_array, global_parameters
|
| 48 |
+
|
| 49 |
+
Notes
|
| 50 |
+
-----
|
| 51 |
+
For undirected and simple graphs only
|
| 52 |
+
|
| 53 |
+
References
|
| 54 |
+
----------
|
| 55 |
+
.. [1] Brouwer, A. E.; Cohen, A. M.; and Neumaier, A.
|
| 56 |
+
Distance-Regular Graphs. New York: Springer-Verlag, 1989.
|
| 57 |
+
.. [2] Weisstein, Eric W. "Distance-Regular Graph."
|
| 58 |
+
http://mathworld.wolfram.com/Distance-RegularGraph.html
|
| 59 |
+
|
| 60 |
+
"""
|
| 61 |
+
try:
|
| 62 |
+
intersection_array(G)
|
| 63 |
+
return True
|
| 64 |
+
except nx.NetworkXError:
|
| 65 |
+
return False
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def global_parameters(b, c):
|
| 69 |
+
"""Returns global parameters for a given intersection array.
|
| 70 |
+
|
| 71 |
+
Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d
|
| 72 |
+
such that for any 2 vertices x,y in G at a distance i=d(x,y), there
|
| 73 |
+
are exactly c_i neighbors of y at a distance of i-1 from x and b_i
|
| 74 |
+
neighbors of y at a distance of i+1 from x.
|
| 75 |
+
|
| 76 |
+
Thus, a distance regular graph has the global parameters,
|
| 77 |
+
[[c_0,a_0,b_0],[c_1,a_1,b_1],......,[c_d,a_d,b_d]] for the
|
| 78 |
+
intersection array [b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d]
|
| 79 |
+
where a_i+b_i+c_i=k , k= degree of every vertex.
|
| 80 |
+
|
| 81 |
+
Parameters
|
| 82 |
+
----------
|
| 83 |
+
b : list
|
| 84 |
+
|
| 85 |
+
c : list
|
| 86 |
+
|
| 87 |
+
Returns
|
| 88 |
+
-------
|
| 89 |
+
iterable
|
| 90 |
+
An iterable over three tuples.
|
| 91 |
+
|
| 92 |
+
Examples
|
| 93 |
+
--------
|
| 94 |
+
>>> G = nx.dodecahedral_graph()
|
| 95 |
+
>>> b, c = nx.intersection_array(G)
|
| 96 |
+
>>> list(nx.global_parameters(b, c))
|
| 97 |
+
[(0, 0, 3), (1, 0, 2), (1, 1, 1), (1, 1, 1), (2, 0, 1), (3, 0, 0)]
|
| 98 |
+
|
| 99 |
+
References
|
| 100 |
+
----------
|
| 101 |
+
.. [1] Weisstein, Eric W. "Global Parameters."
|
| 102 |
+
From MathWorld--A Wolfram Web Resource.
|
| 103 |
+
http://mathworld.wolfram.com/GlobalParameters.html
|
| 104 |
+
|
| 105 |
+
See Also
|
| 106 |
+
--------
|
| 107 |
+
intersection_array
|
| 108 |
+
"""
|
| 109 |
+
return ((y, b[0] - x - y, x) for x, y in zip(b + [0], [0] + c))
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
@not_implemented_for("directed")
|
| 113 |
+
@not_implemented_for("multigraph")
|
| 114 |
+
@nx._dispatchable
|
| 115 |
+
def intersection_array(G):
|
| 116 |
+
"""Returns the intersection array of a distance-regular graph.
|
| 117 |
+
|
| 118 |
+
Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d
|
| 119 |
+
such that for any 2 vertices x,y in G at a distance i=d(x,y), there
|
| 120 |
+
are exactly c_i neighbors of y at a distance of i-1 from x and b_i
|
| 121 |
+
neighbors of y at a distance of i+1 from x.
|
| 122 |
+
|
| 123 |
+
A distance regular graph's intersection array is given by,
|
| 124 |
+
[b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d]
|
| 125 |
+
|
| 126 |
+
Parameters
|
| 127 |
+
----------
|
| 128 |
+
G: Networkx graph (undirected)
|
| 129 |
+
|
| 130 |
+
Returns
|
| 131 |
+
-------
|
| 132 |
+
b,c: tuple of lists
|
| 133 |
+
|
| 134 |
+
Examples
|
| 135 |
+
--------
|
| 136 |
+
>>> G = nx.icosahedral_graph()
|
| 137 |
+
>>> nx.intersection_array(G)
|
| 138 |
+
([5, 2, 1], [1, 2, 5])
|
| 139 |
+
|
| 140 |
+
References
|
| 141 |
+
----------
|
| 142 |
+
.. [1] Weisstein, Eric W. "Intersection Array."
|
| 143 |
+
From MathWorld--A Wolfram Web Resource.
|
| 144 |
+
http://mathworld.wolfram.com/IntersectionArray.html
|
| 145 |
+
|
| 146 |
+
See Also
|
| 147 |
+
--------
|
| 148 |
+
global_parameters
|
| 149 |
+
"""
|
| 150 |
+
# test for regular graph (all degrees must be equal)
|
| 151 |
+
if len(G) == 0:
|
| 152 |
+
raise nx.NetworkXPointlessConcept("Graph has no nodes.")
|
| 153 |
+
degree = iter(G.degree())
|
| 154 |
+
(_, k) = next(degree)
|
| 155 |
+
for _, knext in degree:
|
| 156 |
+
if knext != k:
|
| 157 |
+
raise nx.NetworkXError("Graph is not distance regular.")
|
| 158 |
+
k = knext
|
| 159 |
+
path_length = dict(nx.all_pairs_shortest_path_length(G))
|
| 160 |
+
diameter = max(max(path_length[n].values()) for n in path_length)
|
| 161 |
+
bint = {} # 'b' intersection array
|
| 162 |
+
cint = {} # 'c' intersection array
|
| 163 |
+
for u in G:
|
| 164 |
+
for v in G:
|
| 165 |
+
try:
|
| 166 |
+
i = path_length[u][v]
|
| 167 |
+
except KeyError as err: # graph must be connected
|
| 168 |
+
raise nx.NetworkXError("Graph is not distance regular.") from err
|
| 169 |
+
# number of neighbors of v at a distance of i-1 from u
|
| 170 |
+
c = len([n for n in G[v] if path_length[n][u] == i - 1])
|
| 171 |
+
# number of neighbors of v at a distance of i+1 from u
|
| 172 |
+
b = len([n for n in G[v] if path_length[n][u] == i + 1])
|
| 173 |
+
# b,c are independent of u and v
|
| 174 |
+
if cint.get(i, c) != c or bint.get(i, b) != b:
|
| 175 |
+
raise nx.NetworkXError("Graph is not distance regular")
|
| 176 |
+
bint[i] = b
|
| 177 |
+
cint[i] = c
|
| 178 |
+
return (
|
| 179 |
+
[bint.get(j, 0) for j in range(diameter)],
|
| 180 |
+
[cint.get(j + 1, 0) for j in range(diameter)],
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
# TODO There is a definition for directed strongly regular graphs.
|
| 185 |
+
@not_implemented_for("directed")
|
| 186 |
+
@not_implemented_for("multigraph")
|
| 187 |
+
@nx._dispatchable
|
| 188 |
+
def is_strongly_regular(G):
|
| 189 |
+
"""Returns True if and only if the given graph is strongly
|
| 190 |
+
regular.
|
| 191 |
+
|
| 192 |
+
An undirected graph is *strongly regular* if
|
| 193 |
+
|
| 194 |
+
* it is regular,
|
| 195 |
+
* each pair of adjacent vertices has the same number of neighbors in
|
| 196 |
+
common,
|
| 197 |
+
* each pair of nonadjacent vertices has the same number of neighbors
|
| 198 |
+
in common.
|
| 199 |
+
|
| 200 |
+
Each strongly regular graph is a distance-regular graph.
|
| 201 |
+
Conversely, if a distance-regular graph has diameter two, then it is
|
| 202 |
+
a strongly regular graph. For more information on distance-regular
|
| 203 |
+
graphs, see :func:`is_distance_regular`.
|
| 204 |
+
|
| 205 |
+
Parameters
|
| 206 |
+
----------
|
| 207 |
+
G : NetworkX graph
|
| 208 |
+
An undirected graph.
|
| 209 |
+
|
| 210 |
+
Returns
|
| 211 |
+
-------
|
| 212 |
+
bool
|
| 213 |
+
Whether `G` is strongly regular.
|
| 214 |
+
|
| 215 |
+
Examples
|
| 216 |
+
--------
|
| 217 |
+
|
| 218 |
+
The cycle graph on five vertices is strongly regular. It is
|
| 219 |
+
two-regular, each pair of adjacent vertices has no shared neighbors,
|
| 220 |
+
and each pair of nonadjacent vertices has one shared neighbor::
|
| 221 |
+
|
| 222 |
+
>>> G = nx.cycle_graph(5)
|
| 223 |
+
>>> nx.is_strongly_regular(G)
|
| 224 |
+
True
|
| 225 |
+
|
| 226 |
+
"""
|
| 227 |
+
# Here is an alternate implementation based directly on the
|
| 228 |
+
# definition of strongly regular graphs:
|
| 229 |
+
#
|
| 230 |
+
# return (all_equal(G.degree().values())
|
| 231 |
+
# and all_equal(len(common_neighbors(G, u, v))
|
| 232 |
+
# for u, v in G.edges())
|
| 233 |
+
# and all_equal(len(common_neighbors(G, u, v))
|
| 234 |
+
# for u, v in non_edges(G)))
|
| 235 |
+
#
|
| 236 |
+
# We instead use the fact that a distance-regular graph of diameter
|
| 237 |
+
# two is strongly regular.
|
| 238 |
+
return is_distance_regular(G) and diameter(G) == 2
|
phi4/lib/python3.10/site-packages/networkx/algorithms/dominance.py
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Dominance algorithms.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from functools import reduce
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.utils import not_implemented_for
|
| 9 |
+
|
| 10 |
+
__all__ = ["immediate_dominators", "dominance_frontiers"]
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@not_implemented_for("undirected")
|
| 14 |
+
@nx._dispatchable
|
| 15 |
+
def immediate_dominators(G, start):
|
| 16 |
+
"""Returns the immediate dominators of all nodes of a directed graph.
|
| 17 |
+
|
| 18 |
+
Parameters
|
| 19 |
+
----------
|
| 20 |
+
G : a DiGraph or MultiDiGraph
|
| 21 |
+
The graph where dominance is to be computed.
|
| 22 |
+
|
| 23 |
+
start : node
|
| 24 |
+
The start node of dominance computation.
|
| 25 |
+
|
| 26 |
+
Returns
|
| 27 |
+
-------
|
| 28 |
+
idom : dict keyed by nodes
|
| 29 |
+
A dict containing the immediate dominators of each node reachable from
|
| 30 |
+
`start`.
|
| 31 |
+
|
| 32 |
+
Raises
|
| 33 |
+
------
|
| 34 |
+
NetworkXNotImplemented
|
| 35 |
+
If `G` is undirected.
|
| 36 |
+
|
| 37 |
+
NetworkXError
|
| 38 |
+
If `start` is not in `G`.
|
| 39 |
+
|
| 40 |
+
Notes
|
| 41 |
+
-----
|
| 42 |
+
Except for `start`, the immediate dominators are the parents of their
|
| 43 |
+
corresponding nodes in the dominator tree.
|
| 44 |
+
|
| 45 |
+
Examples
|
| 46 |
+
--------
|
| 47 |
+
>>> G = nx.DiGraph([(1, 2), (1, 3), (2, 5), (3, 4), (4, 5)])
|
| 48 |
+
>>> sorted(nx.immediate_dominators(G, 1).items())
|
| 49 |
+
[(1, 1), (2, 1), (3, 1), (4, 3), (5, 1)]
|
| 50 |
+
|
| 51 |
+
References
|
| 52 |
+
----------
|
| 53 |
+
.. [1] Cooper, Keith D., Harvey, Timothy J. and Kennedy, Ken.
|
| 54 |
+
"A simple, fast dominance algorithm." (2006).
|
| 55 |
+
https://hdl.handle.net/1911/96345
|
| 56 |
+
"""
|
| 57 |
+
if start not in G:
|
| 58 |
+
raise nx.NetworkXError("start is not in G")
|
| 59 |
+
|
| 60 |
+
idom = {start: start}
|
| 61 |
+
|
| 62 |
+
order = list(nx.dfs_postorder_nodes(G, start))
|
| 63 |
+
dfn = {u: i for i, u in enumerate(order)}
|
| 64 |
+
order.pop()
|
| 65 |
+
order.reverse()
|
| 66 |
+
|
| 67 |
+
def intersect(u, v):
|
| 68 |
+
while u != v:
|
| 69 |
+
while dfn[u] < dfn[v]:
|
| 70 |
+
u = idom[u]
|
| 71 |
+
while dfn[u] > dfn[v]:
|
| 72 |
+
v = idom[v]
|
| 73 |
+
return u
|
| 74 |
+
|
| 75 |
+
changed = True
|
| 76 |
+
while changed:
|
| 77 |
+
changed = False
|
| 78 |
+
for u in order:
|
| 79 |
+
new_idom = reduce(intersect, (v for v in G.pred[u] if v in idom))
|
| 80 |
+
if u not in idom or idom[u] != new_idom:
|
| 81 |
+
idom[u] = new_idom
|
| 82 |
+
changed = True
|
| 83 |
+
|
| 84 |
+
return idom
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
@nx._dispatchable
|
| 88 |
+
def dominance_frontiers(G, start):
|
| 89 |
+
"""Returns the dominance frontiers of all nodes of a directed graph.
|
| 90 |
+
|
| 91 |
+
Parameters
|
| 92 |
+
----------
|
| 93 |
+
G : a DiGraph or MultiDiGraph
|
| 94 |
+
The graph where dominance is to be computed.
|
| 95 |
+
|
| 96 |
+
start : node
|
| 97 |
+
The start node of dominance computation.
|
| 98 |
+
|
| 99 |
+
Returns
|
| 100 |
+
-------
|
| 101 |
+
df : dict keyed by nodes
|
| 102 |
+
A dict containing the dominance frontiers of each node reachable from
|
| 103 |
+
`start` as lists.
|
| 104 |
+
|
| 105 |
+
Raises
|
| 106 |
+
------
|
| 107 |
+
NetworkXNotImplemented
|
| 108 |
+
If `G` is undirected.
|
| 109 |
+
|
| 110 |
+
NetworkXError
|
| 111 |
+
If `start` is not in `G`.
|
| 112 |
+
|
| 113 |
+
Examples
|
| 114 |
+
--------
|
| 115 |
+
>>> G = nx.DiGraph([(1, 2), (1, 3), (2, 5), (3, 4), (4, 5)])
|
| 116 |
+
>>> sorted((u, sorted(df)) for u, df in nx.dominance_frontiers(G, 1).items())
|
| 117 |
+
[(1, []), (2, [5]), (3, [5]), (4, [5]), (5, [])]
|
| 118 |
+
|
| 119 |
+
References
|
| 120 |
+
----------
|
| 121 |
+
.. [1] Cooper, Keith D., Harvey, Timothy J. and Kennedy, Ken.
|
| 122 |
+
"A simple, fast dominance algorithm." (2006).
|
| 123 |
+
https://hdl.handle.net/1911/96345
|
| 124 |
+
"""
|
| 125 |
+
idom = nx.immediate_dominators(G, start)
|
| 126 |
+
|
| 127 |
+
df = {u: set() for u in idom}
|
| 128 |
+
for u in idom:
|
| 129 |
+
if len(G.pred[u]) >= 2:
|
| 130 |
+
for v in G.pred[u]:
|
| 131 |
+
if v in idom:
|
| 132 |
+
while v != idom[u]:
|
| 133 |
+
df[v].add(u)
|
| 134 |
+
v = idom[v]
|
| 135 |
+
return df
|
phi4/lib/python3.10/site-packages/networkx/algorithms/dominating.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for computing dominating sets in a graph."""
|
| 2 |
+
|
| 3 |
+
from itertools import chain
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.utils import arbitrary_element
|
| 7 |
+
|
| 8 |
+
__all__ = ["dominating_set", "is_dominating_set"]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@nx._dispatchable
|
| 12 |
+
def dominating_set(G, start_with=None):
|
| 13 |
+
r"""Finds a dominating set for the graph G.
|
| 14 |
+
|
| 15 |
+
A *dominating set* for a graph with node set *V* is a subset *D* of
|
| 16 |
+
*V* such that every node not in *D* is adjacent to at least one
|
| 17 |
+
member of *D* [1]_.
|
| 18 |
+
|
| 19 |
+
Parameters
|
| 20 |
+
----------
|
| 21 |
+
G : NetworkX graph
|
| 22 |
+
|
| 23 |
+
start_with : node (default=None)
|
| 24 |
+
Node to use as a starting point for the algorithm.
|
| 25 |
+
|
| 26 |
+
Returns
|
| 27 |
+
-------
|
| 28 |
+
D : set
|
| 29 |
+
A dominating set for G.
|
| 30 |
+
|
| 31 |
+
Notes
|
| 32 |
+
-----
|
| 33 |
+
This function is an implementation of algorithm 7 in [2]_ which
|
| 34 |
+
finds some dominating set, not necessarily the smallest one.
|
| 35 |
+
|
| 36 |
+
See also
|
| 37 |
+
--------
|
| 38 |
+
is_dominating_set
|
| 39 |
+
|
| 40 |
+
References
|
| 41 |
+
----------
|
| 42 |
+
.. [1] https://en.wikipedia.org/wiki/Dominating_set
|
| 43 |
+
|
| 44 |
+
.. [2] Abdol-Hossein Esfahanian. Connectivity Algorithms.
|
| 45 |
+
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
|
| 46 |
+
|
| 47 |
+
"""
|
| 48 |
+
all_nodes = set(G)
|
| 49 |
+
if start_with is None:
|
| 50 |
+
start_with = arbitrary_element(all_nodes)
|
| 51 |
+
if start_with not in G:
|
| 52 |
+
raise nx.NetworkXError(f"node {start_with} is not in G")
|
| 53 |
+
dominating_set = {start_with}
|
| 54 |
+
dominated_nodes = set(G[start_with])
|
| 55 |
+
remaining_nodes = all_nodes - dominated_nodes - dominating_set
|
| 56 |
+
while remaining_nodes:
|
| 57 |
+
# Choose an arbitrary node and determine its undominated neighbors.
|
| 58 |
+
v = remaining_nodes.pop()
|
| 59 |
+
undominated_nbrs = set(G[v]) - dominating_set
|
| 60 |
+
# Add the node to the dominating set and the neighbors to the
|
| 61 |
+
# dominated set. Finally, remove all of those nodes from the set
|
| 62 |
+
# of remaining nodes.
|
| 63 |
+
dominating_set.add(v)
|
| 64 |
+
dominated_nodes |= undominated_nbrs
|
| 65 |
+
remaining_nodes -= undominated_nbrs
|
| 66 |
+
return dominating_set
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
@nx._dispatchable
|
| 70 |
+
def is_dominating_set(G, nbunch):
|
| 71 |
+
"""Checks if `nbunch` is a dominating set for `G`.
|
| 72 |
+
|
| 73 |
+
A *dominating set* for a graph with node set *V* is a subset *D* of
|
| 74 |
+
*V* such that every node not in *D* is adjacent to at least one
|
| 75 |
+
member of *D* [1]_.
|
| 76 |
+
|
| 77 |
+
Parameters
|
| 78 |
+
----------
|
| 79 |
+
G : NetworkX graph
|
| 80 |
+
|
| 81 |
+
nbunch : iterable
|
| 82 |
+
An iterable of nodes in the graph `G`.
|
| 83 |
+
|
| 84 |
+
See also
|
| 85 |
+
--------
|
| 86 |
+
dominating_set
|
| 87 |
+
|
| 88 |
+
References
|
| 89 |
+
----------
|
| 90 |
+
.. [1] https://en.wikipedia.org/wiki/Dominating_set
|
| 91 |
+
|
| 92 |
+
"""
|
| 93 |
+
testset = {n for n in nbunch if n in G}
|
| 94 |
+
nbrs = set(chain.from_iterable(G[n] for n in testset))
|
| 95 |
+
return len(set(G) - testset - nbrs) == 0
|
phi4/lib/python3.10/site-packages/networkx/algorithms/graph_hashing.py
ADDED
|
@@ -0,0 +1,328 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Functions for hashing graphs to strings.
|
| 3 |
+
Isomorphic graphs should be assigned identical hashes.
|
| 4 |
+
For now, only Weisfeiler-Lehman hashing is implemented.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from collections import Counter, defaultdict
|
| 8 |
+
from hashlib import blake2b
|
| 9 |
+
|
| 10 |
+
import networkx as nx
|
| 11 |
+
|
| 12 |
+
__all__ = ["weisfeiler_lehman_graph_hash", "weisfeiler_lehman_subgraph_hashes"]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def _hash_label(label, digest_size):
|
| 16 |
+
return blake2b(label.encode("ascii"), digest_size=digest_size).hexdigest()
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def _init_node_labels(G, edge_attr, node_attr):
|
| 20 |
+
if node_attr:
|
| 21 |
+
return {u: str(dd[node_attr]) for u, dd in G.nodes(data=True)}
|
| 22 |
+
elif edge_attr:
|
| 23 |
+
return {u: "" for u in G}
|
| 24 |
+
else:
|
| 25 |
+
return {u: str(deg) for u, deg in G.degree()}
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def _neighborhood_aggregate(G, node, node_labels, edge_attr=None):
|
| 29 |
+
"""
|
| 30 |
+
Compute new labels for given node by aggregating
|
| 31 |
+
the labels of each node's neighbors.
|
| 32 |
+
"""
|
| 33 |
+
label_list = []
|
| 34 |
+
for nbr in G.neighbors(node):
|
| 35 |
+
prefix = "" if edge_attr is None else str(G[node][nbr][edge_attr])
|
| 36 |
+
label_list.append(prefix + node_labels[nbr])
|
| 37 |
+
return node_labels[node] + "".join(sorted(label_list))
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
@nx.utils.not_implemented_for("multigraph")
|
| 41 |
+
@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr")
|
| 42 |
+
def weisfeiler_lehman_graph_hash(
|
| 43 |
+
G, edge_attr=None, node_attr=None, iterations=3, digest_size=16
|
| 44 |
+
):
|
| 45 |
+
"""Return Weisfeiler Lehman (WL) graph hash.
|
| 46 |
+
|
| 47 |
+
The function iteratively aggregates and hashes neighborhoods of each node.
|
| 48 |
+
After each node's neighbors are hashed to obtain updated node labels,
|
| 49 |
+
a hashed histogram of resulting labels is returned as the final hash.
|
| 50 |
+
|
| 51 |
+
Hashes are identical for isomorphic graphs and strong guarantees that
|
| 52 |
+
non-isomorphic graphs will get different hashes. See [1]_ for details.
|
| 53 |
+
|
| 54 |
+
If no node or edge attributes are provided, the degree of each node
|
| 55 |
+
is used as its initial label.
|
| 56 |
+
Otherwise, node and/or edge labels are used to compute the hash.
|
| 57 |
+
|
| 58 |
+
Parameters
|
| 59 |
+
----------
|
| 60 |
+
G : graph
|
| 61 |
+
The graph to be hashed.
|
| 62 |
+
Can have node and/or edge attributes. Can also have no attributes.
|
| 63 |
+
edge_attr : string, optional (default=None)
|
| 64 |
+
The key in edge attribute dictionary to be used for hashing.
|
| 65 |
+
If None, edge labels are ignored.
|
| 66 |
+
node_attr: string, optional (default=None)
|
| 67 |
+
The key in node attribute dictionary to be used for hashing.
|
| 68 |
+
If None, and no edge_attr given, use the degrees of the nodes as labels.
|
| 69 |
+
iterations: int, optional (default=3)
|
| 70 |
+
Number of neighbor aggregations to perform.
|
| 71 |
+
Should be larger for larger graphs.
|
| 72 |
+
digest_size: int, optional (default=16)
|
| 73 |
+
Size (in bits) of blake2b hash digest to use for hashing node labels.
|
| 74 |
+
|
| 75 |
+
Returns
|
| 76 |
+
-------
|
| 77 |
+
h : string
|
| 78 |
+
Hexadecimal string corresponding to hash of the input graph.
|
| 79 |
+
|
| 80 |
+
Examples
|
| 81 |
+
--------
|
| 82 |
+
Two graphs with edge attributes that are isomorphic, except for
|
| 83 |
+
differences in the edge labels.
|
| 84 |
+
|
| 85 |
+
>>> G1 = nx.Graph()
|
| 86 |
+
>>> G1.add_edges_from(
|
| 87 |
+
... [
|
| 88 |
+
... (1, 2, {"label": "A"}),
|
| 89 |
+
... (2, 3, {"label": "A"}),
|
| 90 |
+
... (3, 1, {"label": "A"}),
|
| 91 |
+
... (1, 4, {"label": "B"}),
|
| 92 |
+
... ]
|
| 93 |
+
... )
|
| 94 |
+
>>> G2 = nx.Graph()
|
| 95 |
+
>>> G2.add_edges_from(
|
| 96 |
+
... [
|
| 97 |
+
... (5, 6, {"label": "B"}),
|
| 98 |
+
... (6, 7, {"label": "A"}),
|
| 99 |
+
... (7, 5, {"label": "A"}),
|
| 100 |
+
... (7, 8, {"label": "A"}),
|
| 101 |
+
... ]
|
| 102 |
+
... )
|
| 103 |
+
|
| 104 |
+
Omitting the `edge_attr` option, results in identical hashes.
|
| 105 |
+
|
| 106 |
+
>>> nx.weisfeiler_lehman_graph_hash(G1)
|
| 107 |
+
'7bc4dde9a09d0b94c5097b219891d81a'
|
| 108 |
+
>>> nx.weisfeiler_lehman_graph_hash(G2)
|
| 109 |
+
'7bc4dde9a09d0b94c5097b219891d81a'
|
| 110 |
+
|
| 111 |
+
With edge labels, the graphs are no longer assigned
|
| 112 |
+
the same hash digest.
|
| 113 |
+
|
| 114 |
+
>>> nx.weisfeiler_lehman_graph_hash(G1, edge_attr="label")
|
| 115 |
+
'c653d85538bcf041d88c011f4f905f10'
|
| 116 |
+
>>> nx.weisfeiler_lehman_graph_hash(G2, edge_attr="label")
|
| 117 |
+
'3dcd84af1ca855d0eff3c978d88e7ec7'
|
| 118 |
+
|
| 119 |
+
Notes
|
| 120 |
+
-----
|
| 121 |
+
To return the WL hashes of each subgraph of a graph, use
|
| 122 |
+
`weisfeiler_lehman_subgraph_hashes`
|
| 123 |
+
|
| 124 |
+
Similarity between hashes does not imply similarity between graphs.
|
| 125 |
+
|
| 126 |
+
References
|
| 127 |
+
----------
|
| 128 |
+
.. [1] Shervashidze, Nino, Pascal Schweitzer, Erik Jan Van Leeuwen,
|
| 129 |
+
Kurt Mehlhorn, and Karsten M. Borgwardt. Weisfeiler Lehman
|
| 130 |
+
Graph Kernels. Journal of Machine Learning Research. 2011.
|
| 131 |
+
http://www.jmlr.org/papers/volume12/shervashidze11a/shervashidze11a.pdf
|
| 132 |
+
|
| 133 |
+
See also
|
| 134 |
+
--------
|
| 135 |
+
weisfeiler_lehman_subgraph_hashes
|
| 136 |
+
"""
|
| 137 |
+
|
| 138 |
+
def weisfeiler_lehman_step(G, labels, edge_attr=None):
|
| 139 |
+
"""
|
| 140 |
+
Apply neighborhood aggregation to each node
|
| 141 |
+
in the graph.
|
| 142 |
+
Computes a dictionary with labels for each node.
|
| 143 |
+
"""
|
| 144 |
+
new_labels = {}
|
| 145 |
+
for node in G.nodes():
|
| 146 |
+
label = _neighborhood_aggregate(G, node, labels, edge_attr=edge_attr)
|
| 147 |
+
new_labels[node] = _hash_label(label, digest_size)
|
| 148 |
+
return new_labels
|
| 149 |
+
|
| 150 |
+
# set initial node labels
|
| 151 |
+
node_labels = _init_node_labels(G, edge_attr, node_attr)
|
| 152 |
+
|
| 153 |
+
subgraph_hash_counts = []
|
| 154 |
+
for _ in range(iterations):
|
| 155 |
+
node_labels = weisfeiler_lehman_step(G, node_labels, edge_attr=edge_attr)
|
| 156 |
+
counter = Counter(node_labels.values())
|
| 157 |
+
# sort the counter, extend total counts
|
| 158 |
+
subgraph_hash_counts.extend(sorted(counter.items(), key=lambda x: x[0]))
|
| 159 |
+
|
| 160 |
+
# hash the final counter
|
| 161 |
+
return _hash_label(str(tuple(subgraph_hash_counts)), digest_size)
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
@nx.utils.not_implemented_for("multigraph")
|
| 165 |
+
@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr")
|
| 166 |
+
def weisfeiler_lehman_subgraph_hashes(
|
| 167 |
+
G,
|
| 168 |
+
edge_attr=None,
|
| 169 |
+
node_attr=None,
|
| 170 |
+
iterations=3,
|
| 171 |
+
digest_size=16,
|
| 172 |
+
include_initial_labels=False,
|
| 173 |
+
):
|
| 174 |
+
"""
|
| 175 |
+
Return a dictionary of subgraph hashes by node.
|
| 176 |
+
|
| 177 |
+
Dictionary keys are nodes in `G`, and values are a list of hashes.
|
| 178 |
+
Each hash corresponds to a subgraph rooted at a given node u in `G`.
|
| 179 |
+
Lists of subgraph hashes are sorted in increasing order of depth from
|
| 180 |
+
their root node, with the hash at index i corresponding to a subgraph
|
| 181 |
+
of nodes at most i edges distance from u. Thus, each list will contain
|
| 182 |
+
`iterations` elements - a hash for a subgraph at each depth. If
|
| 183 |
+
`include_initial_labels` is set to `True`, each list will additionally
|
| 184 |
+
have contain a hash of the initial node label (or equivalently a
|
| 185 |
+
subgraph of depth 0) prepended, totalling ``iterations + 1`` elements.
|
| 186 |
+
|
| 187 |
+
The function iteratively aggregates and hashes neighborhoods of each node.
|
| 188 |
+
This is achieved for each step by replacing for each node its label from
|
| 189 |
+
the previous iteration with its hashed 1-hop neighborhood aggregate.
|
| 190 |
+
The new node label is then appended to a list of node labels for each
|
| 191 |
+
node.
|
| 192 |
+
|
| 193 |
+
To aggregate neighborhoods for a node $u$ at each step, all labels of
|
| 194 |
+
nodes adjacent to $u$ are concatenated. If the `edge_attr` parameter is set,
|
| 195 |
+
labels for each neighboring node are prefixed with the value of this attribute
|
| 196 |
+
along the connecting edge from this neighbor to node $u$. The resulting string
|
| 197 |
+
is then hashed to compress this information into a fixed digest size.
|
| 198 |
+
|
| 199 |
+
Thus, at the $i$-th iteration, nodes within $i$ hops influence any given
|
| 200 |
+
hashed node label. We can therefore say that at depth $i$ for node $u$
|
| 201 |
+
we have a hash for a subgraph induced by the $i$-hop neighborhood of $u$.
|
| 202 |
+
|
| 203 |
+
The output can be used to create general Weisfeiler-Lehman graph kernels,
|
| 204 |
+
or generate features for graphs or nodes - for example to generate 'words' in
|
| 205 |
+
a graph as seen in the 'graph2vec' algorithm.
|
| 206 |
+
See [1]_ & [2]_ respectively for details.
|
| 207 |
+
|
| 208 |
+
Hashes are identical for isomorphic subgraphs and there exist strong
|
| 209 |
+
guarantees that non-isomorphic graphs will get different hashes.
|
| 210 |
+
See [1]_ for details.
|
| 211 |
+
|
| 212 |
+
If no node or edge attributes are provided, the degree of each node
|
| 213 |
+
is used as its initial label.
|
| 214 |
+
Otherwise, node and/or edge labels are used to compute the hash.
|
| 215 |
+
|
| 216 |
+
Parameters
|
| 217 |
+
----------
|
| 218 |
+
G : graph
|
| 219 |
+
The graph to be hashed.
|
| 220 |
+
Can have node and/or edge attributes. Can also have no attributes.
|
| 221 |
+
edge_attr : string, optional (default=None)
|
| 222 |
+
The key in edge attribute dictionary to be used for hashing.
|
| 223 |
+
If None, edge labels are ignored.
|
| 224 |
+
node_attr : string, optional (default=None)
|
| 225 |
+
The key in node attribute dictionary to be used for hashing.
|
| 226 |
+
If None, and no edge_attr given, use the degrees of the nodes as labels.
|
| 227 |
+
If None, and edge_attr is given, each node starts with an identical label.
|
| 228 |
+
iterations : int, optional (default=3)
|
| 229 |
+
Number of neighbor aggregations to perform.
|
| 230 |
+
Should be larger for larger graphs.
|
| 231 |
+
digest_size : int, optional (default=16)
|
| 232 |
+
Size (in bits) of blake2b hash digest to use for hashing node labels.
|
| 233 |
+
The default size is 16 bits.
|
| 234 |
+
include_initial_labels : bool, optional (default=False)
|
| 235 |
+
If True, include the hashed initial node label as the first subgraph
|
| 236 |
+
hash for each node.
|
| 237 |
+
|
| 238 |
+
Returns
|
| 239 |
+
-------
|
| 240 |
+
node_subgraph_hashes : dict
|
| 241 |
+
A dictionary with each key given by a node in G, and each value given
|
| 242 |
+
by the subgraph hashes in order of depth from the key node.
|
| 243 |
+
|
| 244 |
+
Examples
|
| 245 |
+
--------
|
| 246 |
+
Finding similar nodes in different graphs:
|
| 247 |
+
|
| 248 |
+
>>> G1 = nx.Graph()
|
| 249 |
+
>>> G1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 5), (4, 6), (5, 7), (6, 7)])
|
| 250 |
+
>>> G2 = nx.Graph()
|
| 251 |
+
>>> G2.add_edges_from([(1, 3), (2, 3), (1, 6), (1, 5), (4, 6)])
|
| 252 |
+
>>> g1_hashes = nx.weisfeiler_lehman_subgraph_hashes(
|
| 253 |
+
... G1, iterations=3, digest_size=8
|
| 254 |
+
... )
|
| 255 |
+
>>> g2_hashes = nx.weisfeiler_lehman_subgraph_hashes(
|
| 256 |
+
... G2, iterations=3, digest_size=8
|
| 257 |
+
... )
|
| 258 |
+
|
| 259 |
+
Even though G1 and G2 are not isomorphic (they have different numbers of edges),
|
| 260 |
+
the hash sequence of depth 3 for node 1 in G1 and node 5 in G2 are similar:
|
| 261 |
+
|
| 262 |
+
>>> g1_hashes[1]
|
| 263 |
+
['a93b64973cfc8897', 'db1b43ae35a1878f', '57872a7d2059c1c0']
|
| 264 |
+
>>> g2_hashes[5]
|
| 265 |
+
['a93b64973cfc8897', 'db1b43ae35a1878f', '1716d2a4012fa4bc']
|
| 266 |
+
|
| 267 |
+
The first 2 WL subgraph hashes match. From this we can conclude that it's very
|
| 268 |
+
likely the neighborhood of 2 hops around these nodes are isomorphic.
|
| 269 |
+
|
| 270 |
+
However the 3-hop neighborhoods of ``G1`` and ``G2`` are not isomorphic since the
|
| 271 |
+
3rd hashes in the lists above are not equal.
|
| 272 |
+
|
| 273 |
+
These nodes may be candidates to be classified together since their local topology
|
| 274 |
+
is similar.
|
| 275 |
+
|
| 276 |
+
Notes
|
| 277 |
+
-----
|
| 278 |
+
To hash the full graph when subgraph hashes are not needed, use
|
| 279 |
+
`weisfeiler_lehman_graph_hash` for efficiency.
|
| 280 |
+
|
| 281 |
+
Similarity between hashes does not imply similarity between graphs.
|
| 282 |
+
|
| 283 |
+
References
|
| 284 |
+
----------
|
| 285 |
+
.. [1] Shervashidze, Nino, Pascal Schweitzer, Erik Jan Van Leeuwen,
|
| 286 |
+
Kurt Mehlhorn, and Karsten M. Borgwardt. Weisfeiler Lehman
|
| 287 |
+
Graph Kernels. Journal of Machine Learning Research. 2011.
|
| 288 |
+
http://www.jmlr.org/papers/volume12/shervashidze11a/shervashidze11a.pdf
|
| 289 |
+
.. [2] Annamalai Narayanan, Mahinthan Chandramohan, Rajasekar Venkatesan,
|
| 290 |
+
Lihui Chen, Yang Liu and Shantanu Jaiswa. graph2vec: Learning
|
| 291 |
+
Distributed Representations of Graphs. arXiv. 2017
|
| 292 |
+
https://arxiv.org/pdf/1707.05005.pdf
|
| 293 |
+
|
| 294 |
+
See also
|
| 295 |
+
--------
|
| 296 |
+
weisfeiler_lehman_graph_hash
|
| 297 |
+
"""
|
| 298 |
+
|
| 299 |
+
def weisfeiler_lehman_step(G, labels, node_subgraph_hashes, edge_attr=None):
|
| 300 |
+
"""
|
| 301 |
+
Apply neighborhood aggregation to each node
|
| 302 |
+
in the graph.
|
| 303 |
+
Computes a dictionary with labels for each node.
|
| 304 |
+
Appends the new hashed label to the dictionary of subgraph hashes
|
| 305 |
+
originating from and indexed by each node in G
|
| 306 |
+
"""
|
| 307 |
+
new_labels = {}
|
| 308 |
+
for node in G.nodes():
|
| 309 |
+
label = _neighborhood_aggregate(G, node, labels, edge_attr=edge_attr)
|
| 310 |
+
hashed_label = _hash_label(label, digest_size)
|
| 311 |
+
new_labels[node] = hashed_label
|
| 312 |
+
node_subgraph_hashes[node].append(hashed_label)
|
| 313 |
+
return new_labels
|
| 314 |
+
|
| 315 |
+
node_labels = _init_node_labels(G, edge_attr, node_attr)
|
| 316 |
+
if include_initial_labels:
|
| 317 |
+
node_subgraph_hashes = {
|
| 318 |
+
k: [_hash_label(v, digest_size)] for k, v in node_labels.items()
|
| 319 |
+
}
|
| 320 |
+
else:
|
| 321 |
+
node_subgraph_hashes = defaultdict(list)
|
| 322 |
+
|
| 323 |
+
for _ in range(iterations):
|
| 324 |
+
node_labels = weisfeiler_lehman_step(
|
| 325 |
+
G, node_labels, node_subgraph_hashes, edge_attr
|
| 326 |
+
)
|
| 327 |
+
|
| 328 |
+
return dict(node_subgraph_hashes)
|
phi4/lib/python3.10/site-packages/networkx/algorithms/graphical.py
ADDED
|
@@ -0,0 +1,483 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Test sequences for graphiness."""
|
| 2 |
+
|
| 3 |
+
import heapq
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
__all__ = [
|
| 8 |
+
"is_graphical",
|
| 9 |
+
"is_multigraphical",
|
| 10 |
+
"is_pseudographical",
|
| 11 |
+
"is_digraphical",
|
| 12 |
+
"is_valid_degree_sequence_erdos_gallai",
|
| 13 |
+
"is_valid_degree_sequence_havel_hakimi",
|
| 14 |
+
]
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
@nx._dispatchable(graphs=None)
|
| 18 |
+
def is_graphical(sequence, method="eg"):
|
| 19 |
+
"""Returns True if sequence is a valid degree sequence.
|
| 20 |
+
|
| 21 |
+
A degree sequence is valid if some graph can realize it.
|
| 22 |
+
|
| 23 |
+
Parameters
|
| 24 |
+
----------
|
| 25 |
+
sequence : list or iterable container
|
| 26 |
+
A sequence of integer node degrees
|
| 27 |
+
|
| 28 |
+
method : "eg" | "hh" (default: 'eg')
|
| 29 |
+
The method used to validate the degree sequence.
|
| 30 |
+
"eg" corresponds to the Erdős-Gallai algorithm
|
| 31 |
+
[EG1960]_, [choudum1986]_, and
|
| 32 |
+
"hh" to the Havel-Hakimi algorithm
|
| 33 |
+
[havel1955]_, [hakimi1962]_, [CL1996]_.
|
| 34 |
+
|
| 35 |
+
Returns
|
| 36 |
+
-------
|
| 37 |
+
valid : bool
|
| 38 |
+
True if the sequence is a valid degree sequence and False if not.
|
| 39 |
+
|
| 40 |
+
Examples
|
| 41 |
+
--------
|
| 42 |
+
>>> G = nx.path_graph(4)
|
| 43 |
+
>>> sequence = (d for n, d in G.degree())
|
| 44 |
+
>>> nx.is_graphical(sequence)
|
| 45 |
+
True
|
| 46 |
+
|
| 47 |
+
To test a non-graphical sequence:
|
| 48 |
+
>>> sequence_list = [d for n, d in G.degree()]
|
| 49 |
+
>>> sequence_list[-1] += 1
|
| 50 |
+
>>> nx.is_graphical(sequence_list)
|
| 51 |
+
False
|
| 52 |
+
|
| 53 |
+
References
|
| 54 |
+
----------
|
| 55 |
+
.. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960.
|
| 56 |
+
.. [choudum1986] S.A. Choudum. "A simple proof of the Erdős-Gallai theorem on
|
| 57 |
+
graph sequences." Bulletin of the Australian Mathematical Society, 33,
|
| 58 |
+
pp 67-70, 1986. https://doi.org/10.1017/S0004972700002872
|
| 59 |
+
.. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs"
|
| 60 |
+
Casopis Pest. Mat. 80, 477-480, 1955.
|
| 61 |
+
.. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as
|
| 62 |
+
Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962.
|
| 63 |
+
.. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs",
|
| 64 |
+
Chapman and Hall/CRC, 1996.
|
| 65 |
+
"""
|
| 66 |
+
if method == "eg":
|
| 67 |
+
valid = is_valid_degree_sequence_erdos_gallai(list(sequence))
|
| 68 |
+
elif method == "hh":
|
| 69 |
+
valid = is_valid_degree_sequence_havel_hakimi(list(sequence))
|
| 70 |
+
else:
|
| 71 |
+
msg = "`method` must be 'eg' or 'hh'"
|
| 72 |
+
raise nx.NetworkXException(msg)
|
| 73 |
+
return valid
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def _basic_graphical_tests(deg_sequence):
|
| 77 |
+
# Sort and perform some simple tests on the sequence
|
| 78 |
+
deg_sequence = nx.utils.make_list_of_ints(deg_sequence)
|
| 79 |
+
p = len(deg_sequence)
|
| 80 |
+
num_degs = [0] * p
|
| 81 |
+
dmax, dmin, dsum, n = 0, p, 0, 0
|
| 82 |
+
for d in deg_sequence:
|
| 83 |
+
# Reject if degree is negative or larger than the sequence length
|
| 84 |
+
if d < 0 or d >= p:
|
| 85 |
+
raise nx.NetworkXUnfeasible
|
| 86 |
+
# Process only the non-zero integers
|
| 87 |
+
elif d > 0:
|
| 88 |
+
dmax, dmin, dsum, n = max(dmax, d), min(dmin, d), dsum + d, n + 1
|
| 89 |
+
num_degs[d] += 1
|
| 90 |
+
# Reject sequence if it has odd sum or is oversaturated
|
| 91 |
+
if dsum % 2 or dsum > n * (n - 1):
|
| 92 |
+
raise nx.NetworkXUnfeasible
|
| 93 |
+
return dmax, dmin, dsum, n, num_degs
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
@nx._dispatchable(graphs=None)
|
| 97 |
+
def is_valid_degree_sequence_havel_hakimi(deg_sequence):
|
| 98 |
+
r"""Returns True if deg_sequence can be realized by a simple graph.
|
| 99 |
+
|
| 100 |
+
The validation proceeds using the Havel-Hakimi theorem
|
| 101 |
+
[havel1955]_, [hakimi1962]_, [CL1996]_.
|
| 102 |
+
Worst-case run time is $O(s)$ where $s$ is the sum of the sequence.
|
| 103 |
+
|
| 104 |
+
Parameters
|
| 105 |
+
----------
|
| 106 |
+
deg_sequence : list
|
| 107 |
+
A list of integers where each element specifies the degree of a node
|
| 108 |
+
in a graph.
|
| 109 |
+
|
| 110 |
+
Returns
|
| 111 |
+
-------
|
| 112 |
+
valid : bool
|
| 113 |
+
True if deg_sequence is graphical and False if not.
|
| 114 |
+
|
| 115 |
+
Examples
|
| 116 |
+
--------
|
| 117 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
| 118 |
+
>>> sequence = (d for _, d in G.degree())
|
| 119 |
+
>>> nx.is_valid_degree_sequence_havel_hakimi(sequence)
|
| 120 |
+
True
|
| 121 |
+
|
| 122 |
+
To test a non-valid sequence:
|
| 123 |
+
>>> sequence_list = [d for _, d in G.degree()]
|
| 124 |
+
>>> sequence_list[-1] += 1
|
| 125 |
+
>>> nx.is_valid_degree_sequence_havel_hakimi(sequence_list)
|
| 126 |
+
False
|
| 127 |
+
|
| 128 |
+
Notes
|
| 129 |
+
-----
|
| 130 |
+
The ZZ condition says that for the sequence d if
|
| 131 |
+
|
| 132 |
+
.. math::
|
| 133 |
+
|d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)}
|
| 134 |
+
|
| 135 |
+
then d is graphical. This was shown in Theorem 6 in [1]_.
|
| 136 |
+
|
| 137 |
+
References
|
| 138 |
+
----------
|
| 139 |
+
.. [1] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory
|
| 140 |
+
of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992).
|
| 141 |
+
.. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs"
|
| 142 |
+
Casopis Pest. Mat. 80, 477-480, 1955.
|
| 143 |
+
.. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as
|
| 144 |
+
Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962.
|
| 145 |
+
.. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs",
|
| 146 |
+
Chapman and Hall/CRC, 1996.
|
| 147 |
+
"""
|
| 148 |
+
try:
|
| 149 |
+
dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence)
|
| 150 |
+
except nx.NetworkXUnfeasible:
|
| 151 |
+
return False
|
| 152 |
+
# Accept if sequence has no non-zero degrees or passes the ZZ condition
|
| 153 |
+
if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1):
|
| 154 |
+
return True
|
| 155 |
+
|
| 156 |
+
modstubs = [0] * (dmax + 1)
|
| 157 |
+
# Successively reduce degree sequence by removing the maximum degree
|
| 158 |
+
while n > 0:
|
| 159 |
+
# Retrieve the maximum degree in the sequence
|
| 160 |
+
while num_degs[dmax] == 0:
|
| 161 |
+
dmax -= 1
|
| 162 |
+
# If there are not enough stubs to connect to, then the sequence is
|
| 163 |
+
# not graphical
|
| 164 |
+
if dmax > n - 1:
|
| 165 |
+
return False
|
| 166 |
+
|
| 167 |
+
# Remove largest stub in list
|
| 168 |
+
num_degs[dmax], n = num_degs[dmax] - 1, n - 1
|
| 169 |
+
# Reduce the next dmax largest stubs
|
| 170 |
+
mslen = 0
|
| 171 |
+
k = dmax
|
| 172 |
+
for i in range(dmax):
|
| 173 |
+
while num_degs[k] == 0:
|
| 174 |
+
k -= 1
|
| 175 |
+
num_degs[k], n = num_degs[k] - 1, n - 1
|
| 176 |
+
if k > 1:
|
| 177 |
+
modstubs[mslen] = k - 1
|
| 178 |
+
mslen += 1
|
| 179 |
+
# Add back to the list any non-zero stubs that were removed
|
| 180 |
+
for i in range(mslen):
|
| 181 |
+
stub = modstubs[i]
|
| 182 |
+
num_degs[stub], n = num_degs[stub] + 1, n + 1
|
| 183 |
+
return True
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
@nx._dispatchable(graphs=None)
|
| 187 |
+
def is_valid_degree_sequence_erdos_gallai(deg_sequence):
|
| 188 |
+
r"""Returns True if deg_sequence can be realized by a simple graph.
|
| 189 |
+
|
| 190 |
+
The validation is done using the Erdős-Gallai theorem [EG1960]_.
|
| 191 |
+
|
| 192 |
+
Parameters
|
| 193 |
+
----------
|
| 194 |
+
deg_sequence : list
|
| 195 |
+
A list of integers
|
| 196 |
+
|
| 197 |
+
Returns
|
| 198 |
+
-------
|
| 199 |
+
valid : bool
|
| 200 |
+
True if deg_sequence is graphical and False if not.
|
| 201 |
+
|
| 202 |
+
Examples
|
| 203 |
+
--------
|
| 204 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
| 205 |
+
>>> sequence = (d for _, d in G.degree())
|
| 206 |
+
>>> nx.is_valid_degree_sequence_erdos_gallai(sequence)
|
| 207 |
+
True
|
| 208 |
+
|
| 209 |
+
To test a non-valid sequence:
|
| 210 |
+
>>> sequence_list = [d for _, d in G.degree()]
|
| 211 |
+
>>> sequence_list[-1] += 1
|
| 212 |
+
>>> nx.is_valid_degree_sequence_erdos_gallai(sequence_list)
|
| 213 |
+
False
|
| 214 |
+
|
| 215 |
+
Notes
|
| 216 |
+
-----
|
| 217 |
+
|
| 218 |
+
This implementation uses an equivalent form of the Erdős-Gallai criterion.
|
| 219 |
+
Worst-case run time is $O(n)$ where $n$ is the length of the sequence.
|
| 220 |
+
|
| 221 |
+
Specifically, a sequence d is graphical if and only if the
|
| 222 |
+
sum of the sequence is even and for all strong indices k in the sequence,
|
| 223 |
+
|
| 224 |
+
.. math::
|
| 225 |
+
|
| 226 |
+
\sum_{i=1}^{k} d_i \leq k(k-1) + \sum_{j=k+1}^{n} \min(d_i,k)
|
| 227 |
+
= k(n-1) - ( k \sum_{j=0}^{k-1} n_j - \sum_{j=0}^{k-1} j n_j )
|
| 228 |
+
|
| 229 |
+
A strong index k is any index where d_k >= k and the value n_j is the
|
| 230 |
+
number of occurrences of j in d. The maximal strong index is called the
|
| 231 |
+
Durfee index.
|
| 232 |
+
|
| 233 |
+
This particular rearrangement comes from the proof of Theorem 3 in [2]_.
|
| 234 |
+
|
| 235 |
+
The ZZ condition says that for the sequence d if
|
| 236 |
+
|
| 237 |
+
.. math::
|
| 238 |
+
|d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)}
|
| 239 |
+
|
| 240 |
+
then d is graphical. This was shown in Theorem 6 in [2]_.
|
| 241 |
+
|
| 242 |
+
References
|
| 243 |
+
----------
|
| 244 |
+
.. [1] A. Tripathi and S. Vijay. "A note on a theorem of Erdős & Gallai",
|
| 245 |
+
Discrete Mathematics, 265, pp. 417-420 (2003).
|
| 246 |
+
.. [2] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory
|
| 247 |
+
of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992).
|
| 248 |
+
.. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960.
|
| 249 |
+
"""
|
| 250 |
+
try:
|
| 251 |
+
dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence)
|
| 252 |
+
except nx.NetworkXUnfeasible:
|
| 253 |
+
return False
|
| 254 |
+
# Accept if sequence has no non-zero degrees or passes the ZZ condition
|
| 255 |
+
if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1):
|
| 256 |
+
return True
|
| 257 |
+
|
| 258 |
+
# Perform the EG checks using the reformulation of Zverovich and Zverovich
|
| 259 |
+
k, sum_deg, sum_nj, sum_jnj = 0, 0, 0, 0
|
| 260 |
+
for dk in range(dmax, dmin - 1, -1):
|
| 261 |
+
if dk < k + 1: # Check if already past Durfee index
|
| 262 |
+
return True
|
| 263 |
+
if num_degs[dk] > 0:
|
| 264 |
+
run_size = num_degs[dk] # Process a run of identical-valued degrees
|
| 265 |
+
if dk < k + run_size: # Check if end of run is past Durfee index
|
| 266 |
+
run_size = dk - k # Adjust back to Durfee index
|
| 267 |
+
sum_deg += run_size * dk
|
| 268 |
+
for v in range(run_size):
|
| 269 |
+
sum_nj += num_degs[k + v]
|
| 270 |
+
sum_jnj += (k + v) * num_degs[k + v]
|
| 271 |
+
k += run_size
|
| 272 |
+
if sum_deg > k * (n - 1) - k * sum_nj + sum_jnj:
|
| 273 |
+
return False
|
| 274 |
+
return True
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
@nx._dispatchable(graphs=None)
|
| 278 |
+
def is_multigraphical(sequence):
|
| 279 |
+
"""Returns True if some multigraph can realize the sequence.
|
| 280 |
+
|
| 281 |
+
Parameters
|
| 282 |
+
----------
|
| 283 |
+
sequence : list
|
| 284 |
+
A list of integers
|
| 285 |
+
|
| 286 |
+
Returns
|
| 287 |
+
-------
|
| 288 |
+
valid : bool
|
| 289 |
+
True if deg_sequence is a multigraphic degree sequence and False if not.
|
| 290 |
+
|
| 291 |
+
Examples
|
| 292 |
+
--------
|
| 293 |
+
>>> G = nx.MultiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
| 294 |
+
>>> sequence = (d for _, d in G.degree())
|
| 295 |
+
>>> nx.is_multigraphical(sequence)
|
| 296 |
+
True
|
| 297 |
+
|
| 298 |
+
To test a non-multigraphical sequence:
|
| 299 |
+
>>> sequence_list = [d for _, d in G.degree()]
|
| 300 |
+
>>> sequence_list[-1] += 1
|
| 301 |
+
>>> nx.is_multigraphical(sequence_list)
|
| 302 |
+
False
|
| 303 |
+
|
| 304 |
+
Notes
|
| 305 |
+
-----
|
| 306 |
+
The worst-case run time is $O(n)$ where $n$ is the length of the sequence.
|
| 307 |
+
|
| 308 |
+
References
|
| 309 |
+
----------
|
| 310 |
+
.. [1] S. L. Hakimi. "On the realizability of a set of integers as
|
| 311 |
+
degrees of the vertices of a linear graph", J. SIAM, 10, pp. 496-506
|
| 312 |
+
(1962).
|
| 313 |
+
"""
|
| 314 |
+
try:
|
| 315 |
+
deg_sequence = nx.utils.make_list_of_ints(sequence)
|
| 316 |
+
except nx.NetworkXError:
|
| 317 |
+
return False
|
| 318 |
+
dsum, dmax = 0, 0
|
| 319 |
+
for d in deg_sequence:
|
| 320 |
+
if d < 0:
|
| 321 |
+
return False
|
| 322 |
+
dsum, dmax = dsum + d, max(dmax, d)
|
| 323 |
+
if dsum % 2 or dsum < 2 * dmax:
|
| 324 |
+
return False
|
| 325 |
+
return True
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
@nx._dispatchable(graphs=None)
|
| 329 |
+
def is_pseudographical(sequence):
|
| 330 |
+
"""Returns True if some pseudograph can realize the sequence.
|
| 331 |
+
|
| 332 |
+
Every nonnegative integer sequence with an even sum is pseudographical
|
| 333 |
+
(see [1]_).
|
| 334 |
+
|
| 335 |
+
Parameters
|
| 336 |
+
----------
|
| 337 |
+
sequence : list or iterable container
|
| 338 |
+
A sequence of integer node degrees
|
| 339 |
+
|
| 340 |
+
Returns
|
| 341 |
+
-------
|
| 342 |
+
valid : bool
|
| 343 |
+
True if the sequence is a pseudographic degree sequence and False if not.
|
| 344 |
+
|
| 345 |
+
Examples
|
| 346 |
+
--------
|
| 347 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
| 348 |
+
>>> sequence = (d for _, d in G.degree())
|
| 349 |
+
>>> nx.is_pseudographical(sequence)
|
| 350 |
+
True
|
| 351 |
+
|
| 352 |
+
To test a non-pseudographical sequence:
|
| 353 |
+
>>> sequence_list = [d for _, d in G.degree()]
|
| 354 |
+
>>> sequence_list[-1] += 1
|
| 355 |
+
>>> nx.is_pseudographical(sequence_list)
|
| 356 |
+
False
|
| 357 |
+
|
| 358 |
+
Notes
|
| 359 |
+
-----
|
| 360 |
+
The worst-case run time is $O(n)$ where n is the length of the sequence.
|
| 361 |
+
|
| 362 |
+
References
|
| 363 |
+
----------
|
| 364 |
+
.. [1] F. Boesch and F. Harary. "Line removal algorithms for graphs
|
| 365 |
+
and their degree lists", IEEE Trans. Circuits and Systems, CAS-23(12),
|
| 366 |
+
pp. 778-782 (1976).
|
| 367 |
+
"""
|
| 368 |
+
try:
|
| 369 |
+
deg_sequence = nx.utils.make_list_of_ints(sequence)
|
| 370 |
+
except nx.NetworkXError:
|
| 371 |
+
return False
|
| 372 |
+
return sum(deg_sequence) % 2 == 0 and min(deg_sequence) >= 0
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
@nx._dispatchable(graphs=None)
|
| 376 |
+
def is_digraphical(in_sequence, out_sequence):
|
| 377 |
+
r"""Returns True if some directed graph can realize the in- and out-degree
|
| 378 |
+
sequences.
|
| 379 |
+
|
| 380 |
+
Parameters
|
| 381 |
+
----------
|
| 382 |
+
in_sequence : list or iterable container
|
| 383 |
+
A sequence of integer node in-degrees
|
| 384 |
+
|
| 385 |
+
out_sequence : list or iterable container
|
| 386 |
+
A sequence of integer node out-degrees
|
| 387 |
+
|
| 388 |
+
Returns
|
| 389 |
+
-------
|
| 390 |
+
valid : bool
|
| 391 |
+
True if in and out-sequences are digraphic False if not.
|
| 392 |
+
|
| 393 |
+
Examples
|
| 394 |
+
--------
|
| 395 |
+
>>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
| 396 |
+
>>> in_seq = (d for n, d in G.in_degree())
|
| 397 |
+
>>> out_seq = (d for n, d in G.out_degree())
|
| 398 |
+
>>> nx.is_digraphical(in_seq, out_seq)
|
| 399 |
+
True
|
| 400 |
+
|
| 401 |
+
To test a non-digraphical scenario:
|
| 402 |
+
>>> in_seq_list = [d for n, d in G.in_degree()]
|
| 403 |
+
>>> in_seq_list[-1] += 1
|
| 404 |
+
>>> nx.is_digraphical(in_seq_list, out_seq)
|
| 405 |
+
False
|
| 406 |
+
|
| 407 |
+
Notes
|
| 408 |
+
-----
|
| 409 |
+
This algorithm is from Kleitman and Wang [1]_.
|
| 410 |
+
The worst case runtime is $O(s \times \log n)$ where $s$ and $n$ are the
|
| 411 |
+
sum and length of the sequences respectively.
|
| 412 |
+
|
| 413 |
+
References
|
| 414 |
+
----------
|
| 415 |
+
.. [1] D.J. Kleitman and D.L. Wang
|
| 416 |
+
Algorithms for Constructing Graphs and Digraphs with Given Valences
|
| 417 |
+
and Factors, Discrete Mathematics, 6(1), pp. 79-88 (1973)
|
| 418 |
+
"""
|
| 419 |
+
try:
|
| 420 |
+
in_deg_sequence = nx.utils.make_list_of_ints(in_sequence)
|
| 421 |
+
out_deg_sequence = nx.utils.make_list_of_ints(out_sequence)
|
| 422 |
+
except nx.NetworkXError:
|
| 423 |
+
return False
|
| 424 |
+
# Process the sequences and form two heaps to store degree pairs with
|
| 425 |
+
# either zero or non-zero out degrees
|
| 426 |
+
sumin, sumout, nin, nout = 0, 0, len(in_deg_sequence), len(out_deg_sequence)
|
| 427 |
+
maxn = max(nin, nout)
|
| 428 |
+
maxin = 0
|
| 429 |
+
if maxn == 0:
|
| 430 |
+
return True
|
| 431 |
+
stubheap, zeroheap = [], []
|
| 432 |
+
for n in range(maxn):
|
| 433 |
+
in_deg, out_deg = 0, 0
|
| 434 |
+
if n < nout:
|
| 435 |
+
out_deg = out_deg_sequence[n]
|
| 436 |
+
if n < nin:
|
| 437 |
+
in_deg = in_deg_sequence[n]
|
| 438 |
+
if in_deg < 0 or out_deg < 0:
|
| 439 |
+
return False
|
| 440 |
+
sumin, sumout, maxin = sumin + in_deg, sumout + out_deg, max(maxin, in_deg)
|
| 441 |
+
if in_deg > 0:
|
| 442 |
+
stubheap.append((-1 * out_deg, -1 * in_deg))
|
| 443 |
+
elif out_deg > 0:
|
| 444 |
+
zeroheap.append(-1 * out_deg)
|
| 445 |
+
if sumin != sumout:
|
| 446 |
+
return False
|
| 447 |
+
heapq.heapify(stubheap)
|
| 448 |
+
heapq.heapify(zeroheap)
|
| 449 |
+
|
| 450 |
+
modstubs = [(0, 0)] * (maxin + 1)
|
| 451 |
+
# Successively reduce degree sequence by removing the maximum out degree
|
| 452 |
+
while stubheap:
|
| 453 |
+
# Take the first value in the sequence with non-zero in degree
|
| 454 |
+
(freeout, freein) = heapq.heappop(stubheap)
|
| 455 |
+
freein *= -1
|
| 456 |
+
if freein > len(stubheap) + len(zeroheap):
|
| 457 |
+
return False
|
| 458 |
+
|
| 459 |
+
# Attach out stubs to the nodes with the most in stubs
|
| 460 |
+
mslen = 0
|
| 461 |
+
for i in range(freein):
|
| 462 |
+
if zeroheap and (not stubheap or stubheap[0][0] > zeroheap[0]):
|
| 463 |
+
stubout = heapq.heappop(zeroheap)
|
| 464 |
+
stubin = 0
|
| 465 |
+
else:
|
| 466 |
+
(stubout, stubin) = heapq.heappop(stubheap)
|
| 467 |
+
if stubout == 0:
|
| 468 |
+
return False
|
| 469 |
+
# Check if target is now totally connected
|
| 470 |
+
if stubout + 1 < 0 or stubin < 0:
|
| 471 |
+
modstubs[mslen] = (stubout + 1, stubin)
|
| 472 |
+
mslen += 1
|
| 473 |
+
|
| 474 |
+
# Add back the nodes to the heap that still have available stubs
|
| 475 |
+
for i in range(mslen):
|
| 476 |
+
stub = modstubs[i]
|
| 477 |
+
if stub[1] < 0:
|
| 478 |
+
heapq.heappush(stubheap, stub)
|
| 479 |
+
else:
|
| 480 |
+
heapq.heappush(zeroheap, stub[0])
|
| 481 |
+
if freeout < 0:
|
| 482 |
+
heapq.heappush(zeroheap, freeout)
|
| 483 |
+
return True
|
phi4/lib/python3.10/site-packages/networkx/algorithms/hierarchy.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Flow Hierarchy.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
__all__ = ["flow_hierarchy"]
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 11 |
+
def flow_hierarchy(G, weight=None):
|
| 12 |
+
"""Returns the flow hierarchy of a directed network.
|
| 13 |
+
|
| 14 |
+
Flow hierarchy is defined as the fraction of edges not participating
|
| 15 |
+
in cycles in a directed graph [1]_.
|
| 16 |
+
|
| 17 |
+
Parameters
|
| 18 |
+
----------
|
| 19 |
+
G : DiGraph or MultiDiGraph
|
| 20 |
+
A directed graph
|
| 21 |
+
|
| 22 |
+
weight : string, optional (default=None)
|
| 23 |
+
Attribute to use for edge weights. If None the weight defaults to 1.
|
| 24 |
+
|
| 25 |
+
Returns
|
| 26 |
+
-------
|
| 27 |
+
h : float
|
| 28 |
+
Flow hierarchy value
|
| 29 |
+
|
| 30 |
+
Raises
|
| 31 |
+
------
|
| 32 |
+
NetworkXError
|
| 33 |
+
If `G` is not a directed graph or if `G` has no edges.
|
| 34 |
+
|
| 35 |
+
Notes
|
| 36 |
+
-----
|
| 37 |
+
The algorithm described in [1]_ computes the flow hierarchy through
|
| 38 |
+
exponentiation of the adjacency matrix. This function implements an
|
| 39 |
+
alternative approach that finds strongly connected components.
|
| 40 |
+
An edge is in a cycle if and only if it is in a strongly connected
|
| 41 |
+
component, which can be found in $O(m)$ time using Tarjan's algorithm.
|
| 42 |
+
|
| 43 |
+
References
|
| 44 |
+
----------
|
| 45 |
+
.. [1] Luo, J.; Magee, C.L. (2011),
|
| 46 |
+
Detecting evolving patterns of self-organizing networks by flow
|
| 47 |
+
hierarchy measurement, Complexity, Volume 16 Issue 6 53-61.
|
| 48 |
+
DOI: 10.1002/cplx.20368
|
| 49 |
+
http://web.mit.edu/~cmagee/www/documents/28-DetectingEvolvingPatterns_FlowHierarchy.pdf
|
| 50 |
+
"""
|
| 51 |
+
# corner case: G has no edges
|
| 52 |
+
if nx.is_empty(G):
|
| 53 |
+
raise nx.NetworkXError("flow_hierarchy not applicable to empty graphs")
|
| 54 |
+
if not G.is_directed():
|
| 55 |
+
raise nx.NetworkXError("G must be a digraph in flow_hierarchy")
|
| 56 |
+
scc = nx.strongly_connected_components(G)
|
| 57 |
+
return 1 - sum(G.subgraph(c).size(weight) for c in scc) / G.size(weight)
|
phi4/lib/python3.10/site-packages/networkx/algorithms/hybrid.py
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Provides functions for finding and testing for locally `(k, l)`-connected
|
| 3 |
+
graphs.
|
| 4 |
+
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import copy
|
| 8 |
+
|
| 9 |
+
import networkx as nx
|
| 10 |
+
|
| 11 |
+
__all__ = ["kl_connected_subgraph", "is_kl_connected"]
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@nx._dispatchable(returns_graph=True)
|
| 15 |
+
def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False):
|
| 16 |
+
"""Returns the maximum locally `(k, l)`-connected subgraph of `G`.
|
| 17 |
+
|
| 18 |
+
A graph is locally `(k, l)`-connected if for each edge `(u, v)` in the
|
| 19 |
+
graph there are at least `l` edge-disjoint paths of length at most `k`
|
| 20 |
+
joining `u` to `v`.
|
| 21 |
+
|
| 22 |
+
Parameters
|
| 23 |
+
----------
|
| 24 |
+
G : NetworkX graph
|
| 25 |
+
The graph in which to find a maximum locally `(k, l)`-connected
|
| 26 |
+
subgraph.
|
| 27 |
+
|
| 28 |
+
k : integer
|
| 29 |
+
The maximum length of paths to consider. A higher number means a looser
|
| 30 |
+
connectivity requirement.
|
| 31 |
+
|
| 32 |
+
l : integer
|
| 33 |
+
The number of edge-disjoint paths. A higher number means a stricter
|
| 34 |
+
connectivity requirement.
|
| 35 |
+
|
| 36 |
+
low_memory : bool
|
| 37 |
+
If this is True, this function uses an algorithm that uses slightly
|
| 38 |
+
more time but less memory.
|
| 39 |
+
|
| 40 |
+
same_as_graph : bool
|
| 41 |
+
If True then return a tuple of the form `(H, is_same)`,
|
| 42 |
+
where `H` is the maximum locally `(k, l)`-connected subgraph and
|
| 43 |
+
`is_same` is a Boolean representing whether `G` is locally `(k,
|
| 44 |
+
l)`-connected (and hence, whether `H` is simply a copy of the input
|
| 45 |
+
graph `G`).
|
| 46 |
+
|
| 47 |
+
Returns
|
| 48 |
+
-------
|
| 49 |
+
NetworkX graph or two-tuple
|
| 50 |
+
If `same_as_graph` is True, then this function returns a
|
| 51 |
+
two-tuple as described above. Otherwise, it returns only the maximum
|
| 52 |
+
locally `(k, l)`-connected subgraph.
|
| 53 |
+
|
| 54 |
+
See also
|
| 55 |
+
--------
|
| 56 |
+
is_kl_connected
|
| 57 |
+
|
| 58 |
+
References
|
| 59 |
+
----------
|
| 60 |
+
.. [1] Chung, Fan and Linyuan Lu. "The Small World Phenomenon in Hybrid
|
| 61 |
+
Power Law Graphs." *Complex Networks*. Springer Berlin Heidelberg,
|
| 62 |
+
2004. 89--104.
|
| 63 |
+
|
| 64 |
+
"""
|
| 65 |
+
H = copy.deepcopy(G) # subgraph we construct by removing from G
|
| 66 |
+
|
| 67 |
+
graphOK = True
|
| 68 |
+
deleted_some = True # hack to start off the while loop
|
| 69 |
+
while deleted_some:
|
| 70 |
+
deleted_some = False
|
| 71 |
+
# We use `for edge in list(H.edges()):` instead of
|
| 72 |
+
# `for edge in H.edges():` because we edit the graph `H` in
|
| 73 |
+
# the loop. Hence using an iterator will result in
|
| 74 |
+
# `RuntimeError: dictionary changed size during iteration`
|
| 75 |
+
for edge in list(H.edges()):
|
| 76 |
+
(u, v) = edge
|
| 77 |
+
# Get copy of graph needed for this search
|
| 78 |
+
if low_memory:
|
| 79 |
+
verts = {u, v}
|
| 80 |
+
for i in range(k):
|
| 81 |
+
for w in verts.copy():
|
| 82 |
+
verts.update(G[w])
|
| 83 |
+
G2 = G.subgraph(verts).copy()
|
| 84 |
+
else:
|
| 85 |
+
G2 = copy.deepcopy(G)
|
| 86 |
+
###
|
| 87 |
+
path = [u, v]
|
| 88 |
+
cnt = 0
|
| 89 |
+
accept = 0
|
| 90 |
+
while path:
|
| 91 |
+
cnt += 1 # Found a path
|
| 92 |
+
if cnt >= l:
|
| 93 |
+
accept = 1
|
| 94 |
+
break
|
| 95 |
+
# record edges along this graph
|
| 96 |
+
prev = u
|
| 97 |
+
for w in path:
|
| 98 |
+
if prev != w:
|
| 99 |
+
G2.remove_edge(prev, w)
|
| 100 |
+
prev = w
|
| 101 |
+
# path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1?
|
| 102 |
+
try:
|
| 103 |
+
path = nx.shortest_path(G2, u, v) # ??? should "Cutoff" be k+1?
|
| 104 |
+
except nx.NetworkXNoPath:
|
| 105 |
+
path = False
|
| 106 |
+
# No Other Paths
|
| 107 |
+
if accept == 0:
|
| 108 |
+
H.remove_edge(u, v)
|
| 109 |
+
deleted_some = True
|
| 110 |
+
if graphOK:
|
| 111 |
+
graphOK = False
|
| 112 |
+
# We looked through all edges and removed none of them.
|
| 113 |
+
# So, H is the maximal (k,l)-connected subgraph of G
|
| 114 |
+
if same_as_graph:
|
| 115 |
+
return (H, graphOK)
|
| 116 |
+
return H
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
@nx._dispatchable
|
| 120 |
+
def is_kl_connected(G, k, l, low_memory=False):
|
| 121 |
+
"""Returns True if and only if `G` is locally `(k, l)`-connected.
|
| 122 |
+
|
| 123 |
+
A graph is locally `(k, l)`-connected if for each edge `(u, v)` in the
|
| 124 |
+
graph there are at least `l` edge-disjoint paths of length at most `k`
|
| 125 |
+
joining `u` to `v`.
|
| 126 |
+
|
| 127 |
+
Parameters
|
| 128 |
+
----------
|
| 129 |
+
G : NetworkX graph
|
| 130 |
+
The graph to test for local `(k, l)`-connectedness.
|
| 131 |
+
|
| 132 |
+
k : integer
|
| 133 |
+
The maximum length of paths to consider. A higher number means a looser
|
| 134 |
+
connectivity requirement.
|
| 135 |
+
|
| 136 |
+
l : integer
|
| 137 |
+
The number of edge-disjoint paths. A higher number means a stricter
|
| 138 |
+
connectivity requirement.
|
| 139 |
+
|
| 140 |
+
low_memory : bool
|
| 141 |
+
If this is True, this function uses an algorithm that uses slightly
|
| 142 |
+
more time but less memory.
|
| 143 |
+
|
| 144 |
+
Returns
|
| 145 |
+
-------
|
| 146 |
+
bool
|
| 147 |
+
Whether the graph is locally `(k, l)`-connected subgraph.
|
| 148 |
+
|
| 149 |
+
See also
|
| 150 |
+
--------
|
| 151 |
+
kl_connected_subgraph
|
| 152 |
+
|
| 153 |
+
References
|
| 154 |
+
----------
|
| 155 |
+
.. [1] Chung, Fan and Linyuan Lu. "The Small World Phenomenon in Hybrid
|
| 156 |
+
Power Law Graphs." *Complex Networks*. Springer Berlin Heidelberg,
|
| 157 |
+
2004. 89--104.
|
| 158 |
+
|
| 159 |
+
"""
|
| 160 |
+
graphOK = True
|
| 161 |
+
for edge in G.edges():
|
| 162 |
+
(u, v) = edge
|
| 163 |
+
# Get copy of graph needed for this search
|
| 164 |
+
if low_memory:
|
| 165 |
+
verts = {u, v}
|
| 166 |
+
for i in range(k):
|
| 167 |
+
[verts.update(G.neighbors(w)) for w in verts.copy()]
|
| 168 |
+
G2 = G.subgraph(verts)
|
| 169 |
+
else:
|
| 170 |
+
G2 = copy.deepcopy(G)
|
| 171 |
+
###
|
| 172 |
+
path = [u, v]
|
| 173 |
+
cnt = 0
|
| 174 |
+
accept = 0
|
| 175 |
+
while path:
|
| 176 |
+
cnt += 1 # Found a path
|
| 177 |
+
if cnt >= l:
|
| 178 |
+
accept = 1
|
| 179 |
+
break
|
| 180 |
+
# record edges along this graph
|
| 181 |
+
prev = u
|
| 182 |
+
for w in path:
|
| 183 |
+
if w != prev:
|
| 184 |
+
G2.remove_edge(prev, w)
|
| 185 |
+
prev = w
|
| 186 |
+
# path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1?
|
| 187 |
+
try:
|
| 188 |
+
path = nx.shortest_path(G2, u, v) # ??? should "Cutoff" be k+1?
|
| 189 |
+
except nx.NetworkXNoPath:
|
| 190 |
+
path = False
|
| 191 |
+
# No Other Paths
|
| 192 |
+
if accept == 0:
|
| 193 |
+
graphOK = False
|
| 194 |
+
break
|
| 195 |
+
# return status
|
| 196 |
+
return graphOK
|
phi4/lib/python3.10/site-packages/networkx/algorithms/lowest_common_ancestors.py
ADDED
|
@@ -0,0 +1,269 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Algorithms for finding the lowest common ancestor of trees and DAGs."""
|
| 2 |
+
|
| 3 |
+
from collections import defaultdict
|
| 4 |
+
from collections.abc import Mapping, Set
|
| 5 |
+
from itertools import combinations_with_replacement
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.utils import UnionFind, arbitrary_element, not_implemented_for
|
| 9 |
+
|
| 10 |
+
__all__ = [
|
| 11 |
+
"all_pairs_lowest_common_ancestor",
|
| 12 |
+
"tree_all_pairs_lowest_common_ancestor",
|
| 13 |
+
"lowest_common_ancestor",
|
| 14 |
+
]
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
@not_implemented_for("undirected")
|
| 18 |
+
@nx._dispatchable
|
| 19 |
+
def all_pairs_lowest_common_ancestor(G, pairs=None):
|
| 20 |
+
"""Return the lowest common ancestor of all pairs or the provided pairs
|
| 21 |
+
|
| 22 |
+
Parameters
|
| 23 |
+
----------
|
| 24 |
+
G : NetworkX directed graph
|
| 25 |
+
|
| 26 |
+
pairs : iterable of pairs of nodes, optional (default: all pairs)
|
| 27 |
+
The pairs of nodes of interest.
|
| 28 |
+
If None, will find the LCA of all pairs of nodes.
|
| 29 |
+
|
| 30 |
+
Yields
|
| 31 |
+
------
|
| 32 |
+
((node1, node2), lca) : 2-tuple
|
| 33 |
+
Where lca is least common ancestor of node1 and node2.
|
| 34 |
+
Note that for the default case, the order of the node pair is not considered,
|
| 35 |
+
e.g. you will not get both ``(a, b)`` and ``(b, a)``
|
| 36 |
+
|
| 37 |
+
Raises
|
| 38 |
+
------
|
| 39 |
+
NetworkXPointlessConcept
|
| 40 |
+
If `G` is null.
|
| 41 |
+
NetworkXError
|
| 42 |
+
If `G` is not a DAG.
|
| 43 |
+
|
| 44 |
+
Examples
|
| 45 |
+
--------
|
| 46 |
+
The default behavior is to yield the lowest common ancestor for all
|
| 47 |
+
possible combinations of nodes in `G`, including self-pairings:
|
| 48 |
+
|
| 49 |
+
>>> G = nx.DiGraph([(0, 1), (0, 3), (1, 2)])
|
| 50 |
+
>>> dict(nx.all_pairs_lowest_common_ancestor(G))
|
| 51 |
+
{(0, 0): 0, (0, 1): 0, (0, 3): 0, (0, 2): 0, (1, 1): 1, (1, 3): 0, (1, 2): 1, (3, 3): 3, (3, 2): 0, (2, 2): 2}
|
| 52 |
+
|
| 53 |
+
The pairs argument can be used to limit the output to only the
|
| 54 |
+
specified node pairings:
|
| 55 |
+
|
| 56 |
+
>>> dict(nx.all_pairs_lowest_common_ancestor(G, pairs=[(1, 2), (2, 3)]))
|
| 57 |
+
{(1, 2): 1, (2, 3): 0}
|
| 58 |
+
|
| 59 |
+
Notes
|
| 60 |
+
-----
|
| 61 |
+
Only defined on non-null directed acyclic graphs.
|
| 62 |
+
|
| 63 |
+
See Also
|
| 64 |
+
--------
|
| 65 |
+
lowest_common_ancestor
|
| 66 |
+
"""
|
| 67 |
+
if not nx.is_directed_acyclic_graph(G):
|
| 68 |
+
raise nx.NetworkXError("LCA only defined on directed acyclic graphs.")
|
| 69 |
+
if len(G) == 0:
|
| 70 |
+
raise nx.NetworkXPointlessConcept("LCA meaningless on null graphs.")
|
| 71 |
+
|
| 72 |
+
if pairs is None:
|
| 73 |
+
pairs = combinations_with_replacement(G, 2)
|
| 74 |
+
else:
|
| 75 |
+
# Convert iterator to iterable, if necessary. Trim duplicates.
|
| 76 |
+
pairs = dict.fromkeys(pairs)
|
| 77 |
+
# Verify that each of the nodes in the provided pairs is in G
|
| 78 |
+
nodeset = set(G)
|
| 79 |
+
for pair in pairs:
|
| 80 |
+
if set(pair) - nodeset:
|
| 81 |
+
raise nx.NodeNotFound(
|
| 82 |
+
f"Node(s) {set(pair) - nodeset} from pair {pair} not in G."
|
| 83 |
+
)
|
| 84 |
+
|
| 85 |
+
# Once input validation is done, construct the generator
|
| 86 |
+
def generate_lca_from_pairs(G, pairs):
|
| 87 |
+
ancestor_cache = {}
|
| 88 |
+
|
| 89 |
+
for v, w in pairs:
|
| 90 |
+
if v not in ancestor_cache:
|
| 91 |
+
ancestor_cache[v] = nx.ancestors(G, v)
|
| 92 |
+
ancestor_cache[v].add(v)
|
| 93 |
+
if w not in ancestor_cache:
|
| 94 |
+
ancestor_cache[w] = nx.ancestors(G, w)
|
| 95 |
+
ancestor_cache[w].add(w)
|
| 96 |
+
|
| 97 |
+
common_ancestors = ancestor_cache[v] & ancestor_cache[w]
|
| 98 |
+
|
| 99 |
+
if common_ancestors:
|
| 100 |
+
common_ancestor = next(iter(common_ancestors))
|
| 101 |
+
while True:
|
| 102 |
+
successor = None
|
| 103 |
+
for lower_ancestor in G.successors(common_ancestor):
|
| 104 |
+
if lower_ancestor in common_ancestors:
|
| 105 |
+
successor = lower_ancestor
|
| 106 |
+
break
|
| 107 |
+
if successor is None:
|
| 108 |
+
break
|
| 109 |
+
common_ancestor = successor
|
| 110 |
+
yield ((v, w), common_ancestor)
|
| 111 |
+
|
| 112 |
+
return generate_lca_from_pairs(G, pairs)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
@not_implemented_for("undirected")
|
| 116 |
+
@nx._dispatchable
|
| 117 |
+
def lowest_common_ancestor(G, node1, node2, default=None):
|
| 118 |
+
"""Compute the lowest common ancestor of the given pair of nodes.
|
| 119 |
+
|
| 120 |
+
Parameters
|
| 121 |
+
----------
|
| 122 |
+
G : NetworkX directed graph
|
| 123 |
+
|
| 124 |
+
node1, node2 : nodes in the graph.
|
| 125 |
+
|
| 126 |
+
default : object
|
| 127 |
+
Returned if no common ancestor between `node1` and `node2`
|
| 128 |
+
|
| 129 |
+
Returns
|
| 130 |
+
-------
|
| 131 |
+
The lowest common ancestor of node1 and node2,
|
| 132 |
+
or default if they have no common ancestors.
|
| 133 |
+
|
| 134 |
+
Examples
|
| 135 |
+
--------
|
| 136 |
+
>>> G = nx.DiGraph()
|
| 137 |
+
>>> nx.add_path(G, (0, 1, 2, 3))
|
| 138 |
+
>>> nx.add_path(G, (0, 4, 3))
|
| 139 |
+
>>> nx.lowest_common_ancestor(G, 2, 4)
|
| 140 |
+
0
|
| 141 |
+
|
| 142 |
+
See Also
|
| 143 |
+
--------
|
| 144 |
+
all_pairs_lowest_common_ancestor"""
|
| 145 |
+
|
| 146 |
+
ans = list(all_pairs_lowest_common_ancestor(G, pairs=[(node1, node2)]))
|
| 147 |
+
if ans:
|
| 148 |
+
assert len(ans) == 1
|
| 149 |
+
return ans[0][1]
|
| 150 |
+
return default
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
@not_implemented_for("undirected")
|
| 154 |
+
@nx._dispatchable
|
| 155 |
+
def tree_all_pairs_lowest_common_ancestor(G, root=None, pairs=None):
|
| 156 |
+
r"""Yield the lowest common ancestor for sets of pairs in a tree.
|
| 157 |
+
|
| 158 |
+
Parameters
|
| 159 |
+
----------
|
| 160 |
+
G : NetworkX directed graph (must be a tree)
|
| 161 |
+
|
| 162 |
+
root : node, optional (default: None)
|
| 163 |
+
The root of the subtree to operate on.
|
| 164 |
+
If None, assume the entire graph has exactly one source and use that.
|
| 165 |
+
|
| 166 |
+
pairs : iterable or iterator of pairs of nodes, optional (default: None)
|
| 167 |
+
The pairs of interest. If None, Defaults to all pairs of nodes
|
| 168 |
+
under `root` that have a lowest common ancestor.
|
| 169 |
+
|
| 170 |
+
Returns
|
| 171 |
+
-------
|
| 172 |
+
lcas : generator of tuples `((u, v), lca)` where `u` and `v` are nodes
|
| 173 |
+
in `pairs` and `lca` is their lowest common ancestor.
|
| 174 |
+
|
| 175 |
+
Examples
|
| 176 |
+
--------
|
| 177 |
+
>>> import pprint
|
| 178 |
+
>>> G = nx.DiGraph([(1, 3), (2, 4), (1, 2)])
|
| 179 |
+
>>> pprint.pprint(dict(nx.tree_all_pairs_lowest_common_ancestor(G)))
|
| 180 |
+
{(1, 1): 1,
|
| 181 |
+
(2, 1): 1,
|
| 182 |
+
(2, 2): 2,
|
| 183 |
+
(3, 1): 1,
|
| 184 |
+
(3, 2): 1,
|
| 185 |
+
(3, 3): 3,
|
| 186 |
+
(3, 4): 1,
|
| 187 |
+
(4, 1): 1,
|
| 188 |
+
(4, 2): 2,
|
| 189 |
+
(4, 4): 4}
|
| 190 |
+
|
| 191 |
+
We can also use `pairs` argument to specify the pairs of nodes for which we
|
| 192 |
+
want to compute lowest common ancestors. Here is an example:
|
| 193 |
+
|
| 194 |
+
>>> dict(nx.tree_all_pairs_lowest_common_ancestor(G, pairs=[(1, 4), (2, 3)]))
|
| 195 |
+
{(2, 3): 1, (1, 4): 1}
|
| 196 |
+
|
| 197 |
+
Notes
|
| 198 |
+
-----
|
| 199 |
+
Only defined on non-null trees represented with directed edges from
|
| 200 |
+
parents to children. Uses Tarjan's off-line lowest-common-ancestors
|
| 201 |
+
algorithm. Runs in time $O(4 \times (V + E + P))$ time, where 4 is the largest
|
| 202 |
+
value of the inverse Ackermann function likely to ever come up in actual
|
| 203 |
+
use, and $P$ is the number of pairs requested (or $V^2$ if all are needed).
|
| 204 |
+
|
| 205 |
+
Tarjan, R. E. (1979), "Applications of path compression on balanced trees",
|
| 206 |
+
Journal of the ACM 26 (4): 690-715, doi:10.1145/322154.322161.
|
| 207 |
+
|
| 208 |
+
See Also
|
| 209 |
+
--------
|
| 210 |
+
all_pairs_lowest_common_ancestor: similar routine for general DAGs
|
| 211 |
+
lowest_common_ancestor: just a single pair for general DAGs
|
| 212 |
+
"""
|
| 213 |
+
if len(G) == 0:
|
| 214 |
+
raise nx.NetworkXPointlessConcept("LCA meaningless on null graphs.")
|
| 215 |
+
|
| 216 |
+
# Index pairs of interest for efficient lookup from either side.
|
| 217 |
+
if pairs is not None:
|
| 218 |
+
pair_dict = defaultdict(set)
|
| 219 |
+
# See note on all_pairs_lowest_common_ancestor.
|
| 220 |
+
if not isinstance(pairs, Mapping | Set):
|
| 221 |
+
pairs = set(pairs)
|
| 222 |
+
for u, v in pairs:
|
| 223 |
+
for n in (u, v):
|
| 224 |
+
if n not in G:
|
| 225 |
+
msg = f"The node {str(n)} is not in the digraph."
|
| 226 |
+
raise nx.NodeNotFound(msg)
|
| 227 |
+
pair_dict[u].add(v)
|
| 228 |
+
pair_dict[v].add(u)
|
| 229 |
+
|
| 230 |
+
# If root is not specified, find the exactly one node with in degree 0 and
|
| 231 |
+
# use it. Raise an error if none are found, or more than one is. Also check
|
| 232 |
+
# for any nodes with in degree larger than 1, which would imply G is not a
|
| 233 |
+
# tree.
|
| 234 |
+
if root is None:
|
| 235 |
+
for n, deg in G.in_degree:
|
| 236 |
+
if deg == 0:
|
| 237 |
+
if root is not None:
|
| 238 |
+
msg = "No root specified and tree has multiple sources."
|
| 239 |
+
raise nx.NetworkXError(msg)
|
| 240 |
+
root = n
|
| 241 |
+
# checking deg>1 is not sufficient for MultiDiGraphs
|
| 242 |
+
elif deg > 1 and len(G.pred[n]) > 1:
|
| 243 |
+
msg = "Tree LCA only defined on trees; use DAG routine."
|
| 244 |
+
raise nx.NetworkXError(msg)
|
| 245 |
+
if root is None:
|
| 246 |
+
raise nx.NetworkXError("Graph contains a cycle.")
|
| 247 |
+
|
| 248 |
+
# Iterative implementation of Tarjan's offline lca algorithm
|
| 249 |
+
# as described in CLRS on page 521 (2nd edition)/page 584 (3rd edition)
|
| 250 |
+
uf = UnionFind()
|
| 251 |
+
ancestors = {}
|
| 252 |
+
for node in G:
|
| 253 |
+
ancestors[node] = uf[node]
|
| 254 |
+
|
| 255 |
+
colors = defaultdict(bool)
|
| 256 |
+
for node in nx.dfs_postorder_nodes(G, root):
|
| 257 |
+
colors[node] = True
|
| 258 |
+
for v in pair_dict[node] if pairs is not None else G:
|
| 259 |
+
if colors[v]:
|
| 260 |
+
# If the user requested both directions of a pair, give it.
|
| 261 |
+
# Otherwise, just give one.
|
| 262 |
+
if pairs is not None and (node, v) in pairs:
|
| 263 |
+
yield (node, v), ancestors[uf[v]]
|
| 264 |
+
if pairs is None or (v, node) in pairs:
|
| 265 |
+
yield (v, node), ancestors[uf[v]]
|
| 266 |
+
if node != root:
|
| 267 |
+
parent = arbitrary_element(G.pred[node])
|
| 268 |
+
uf.union(parent, node)
|
| 269 |
+
ancestors[uf[parent]] = parent
|