Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- llava/lib/python3.10/site-packages/setuptools/_entry_points.py +90 -0
- llava/lib/python3.10/site-packages/setuptools/_normalization.py +144 -0
- llava/lib/python3.10/site-packages/setuptools/build_meta.py +560 -0
- llava/lib/python3.10/site-packages/setuptools/cli-64.exe +0 -0
- llava/lib/python3.10/site-packages/setuptools/config/NOTICE +10 -0
- llava/lib/python3.10/site-packages/setuptools/config/__init__.py +43 -0
- llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__init__.py +34 -0
- llava/lib/python3.10/site-packages/setuptools/config/distutils.schema.json +26 -0
- llava/lib/python3.10/site-packages/setuptools/config/pyprojecttoml.py +468 -0
- llava/lib/python3.10/site-packages/setuptools/config/setupcfg.py +780 -0
- llava/lib/python3.10/site-packages/setuptools/config/setuptools.schema.json +433 -0
- llava/lib/python3.10/site-packages/setuptools/depends.py +185 -0
- llava/lib/python3.10/site-packages/setuptools/dist.py +1004 -0
- llava/lib/python3.10/site-packages/setuptools/gui.exe +0 -0
- llava/lib/python3.10/site-packages/setuptools/installer.py +150 -0
- llava/lib/python3.10/site-packages/setuptools/logging.py +40 -0
- llava/lib/python3.10/site-packages/setuptools/namespaces.py +106 -0
- llava/lib/python3.10/site-packages/setuptools/sandbox.py +536 -0
- llava/lib/python3.10/site-packages/setuptools/warnings.py +110 -0
- llava/lib/python3.10/site-packages/setuptools/windows_support.py +30 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/atlas.dat.gz +3 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/cographs.py +68 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/ego.py +66 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/expanders.py +474 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/geometric.py +1048 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/joint_degree_seq.py +664 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/line.py +500 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/random_graphs.py +1400 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/small.py +993 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__init__.py +0 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_atlas.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_classic.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_cographs.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_duplication.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_internet_as_graphs.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_intersection.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_interval_graph.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_lattice.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_random_graphs.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_small.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_stochastic.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_time_series.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_atlas.py +75 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_classic.py +640 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_cographs.py +18 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_community.py +362 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_degree_seq.py +230 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_directed.py +163 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_duplication.py +103 -0
- minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_ego.py +39 -0
llava/lib/python3.10/site-packages/setuptools/_entry_points.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import itertools
|
| 3 |
+
import operator
|
| 4 |
+
|
| 5 |
+
from jaraco.functools import pass_none
|
| 6 |
+
from jaraco.text import yield_lines
|
| 7 |
+
from more_itertools import consume
|
| 8 |
+
|
| 9 |
+
from ._importlib import metadata
|
| 10 |
+
from ._itertools import ensure_unique
|
| 11 |
+
from .errors import OptionError
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def ensure_valid(ep):
|
| 15 |
+
"""
|
| 16 |
+
Exercise one of the dynamic properties to trigger
|
| 17 |
+
the pattern match.
|
| 18 |
+
"""
|
| 19 |
+
try:
|
| 20 |
+
ep.extras
|
| 21 |
+
except (AttributeError, AssertionError) as ex:
|
| 22 |
+
# Why both? See https://github.com/python/importlib_metadata/issues/488
|
| 23 |
+
msg = (
|
| 24 |
+
f"Problems to parse {ep}.\nPlease ensure entry-point follows the spec: "
|
| 25 |
+
"https://packaging.python.org/en/latest/specifications/entry-points/"
|
| 26 |
+
)
|
| 27 |
+
raise OptionError(msg) from ex
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def load_group(value, group):
|
| 31 |
+
"""
|
| 32 |
+
Given a value of an entry point or series of entry points,
|
| 33 |
+
return each as an EntryPoint.
|
| 34 |
+
"""
|
| 35 |
+
# normalize to a single sequence of lines
|
| 36 |
+
lines = yield_lines(value)
|
| 37 |
+
text = f'[{group}]\n' + '\n'.join(lines)
|
| 38 |
+
return metadata.EntryPoints._from_text(text)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def by_group_and_name(ep):
|
| 42 |
+
return ep.group, ep.name
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def validate(eps: metadata.EntryPoints):
|
| 46 |
+
"""
|
| 47 |
+
Ensure entry points are unique by group and name and validate each.
|
| 48 |
+
"""
|
| 49 |
+
consume(map(ensure_valid, ensure_unique(eps, key=by_group_and_name)))
|
| 50 |
+
return eps
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
@functools.singledispatch
|
| 54 |
+
def load(eps):
|
| 55 |
+
"""
|
| 56 |
+
Given a Distribution.entry_points, produce EntryPoints.
|
| 57 |
+
"""
|
| 58 |
+
groups = itertools.chain.from_iterable(
|
| 59 |
+
load_group(value, group) for group, value in eps.items()
|
| 60 |
+
)
|
| 61 |
+
return validate(metadata.EntryPoints(groups))
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
@load.register(str)
|
| 65 |
+
def _(eps):
|
| 66 |
+
r"""
|
| 67 |
+
>>> ep, = load('[console_scripts]\nfoo=bar')
|
| 68 |
+
>>> ep.group
|
| 69 |
+
'console_scripts'
|
| 70 |
+
>>> ep.name
|
| 71 |
+
'foo'
|
| 72 |
+
>>> ep.value
|
| 73 |
+
'bar'
|
| 74 |
+
"""
|
| 75 |
+
return validate(metadata.EntryPoints(metadata.EntryPoints._from_text(eps)))
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
load.register(type(None), lambda x: x)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
@pass_none
|
| 82 |
+
def render(eps: metadata.EntryPoints):
|
| 83 |
+
by_group = operator.attrgetter('group')
|
| 84 |
+
groups = itertools.groupby(sorted(eps, key=by_group), by_group)
|
| 85 |
+
|
| 86 |
+
return '\n'.join(f'[{group}]\n{render_items(items)}\n' for group, items in groups)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def render_items(eps):
|
| 90 |
+
return '\n'.join(f'{ep.name} = {ep.value}' for ep in sorted(eps))
|
llava/lib/python3.10/site-packages/setuptools/_normalization.py
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Helpers for normalization as expected in wheel/sdist/module file names
|
| 3 |
+
and core metadata
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import re
|
| 7 |
+
|
| 8 |
+
import packaging
|
| 9 |
+
|
| 10 |
+
# https://packaging.python.org/en/latest/specifications/core-metadata/#name
|
| 11 |
+
_VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I)
|
| 12 |
+
_UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9._-]+", re.I)
|
| 13 |
+
_NON_ALPHANUMERIC = re.compile(r"[^A-Z0-9]+", re.I)
|
| 14 |
+
_PEP440_FALLBACK = re.compile(r"^v?(?P<safe>(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def safe_identifier(name: str) -> str:
|
| 18 |
+
"""Make a string safe to be used as Python identifier.
|
| 19 |
+
>>> safe_identifier("12abc")
|
| 20 |
+
'_12abc'
|
| 21 |
+
>>> safe_identifier("__editable__.myns.pkg-78.9.3_local")
|
| 22 |
+
'__editable___myns_pkg_78_9_3_local'
|
| 23 |
+
"""
|
| 24 |
+
safe = re.sub(r'\W|^(?=\d)', '_', name)
|
| 25 |
+
assert safe.isidentifier()
|
| 26 |
+
return safe
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def safe_name(component: str) -> str:
|
| 30 |
+
"""Escape a component used as a project name according to Core Metadata.
|
| 31 |
+
>>> safe_name("hello world")
|
| 32 |
+
'hello-world'
|
| 33 |
+
>>> safe_name("hello?world")
|
| 34 |
+
'hello-world'
|
| 35 |
+
>>> safe_name("hello_world")
|
| 36 |
+
'hello_world'
|
| 37 |
+
"""
|
| 38 |
+
# See pkg_resources.safe_name
|
| 39 |
+
return _UNSAFE_NAME_CHARS.sub("-", component)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def safe_version(version: str) -> str:
|
| 43 |
+
"""Convert an arbitrary string into a valid version string.
|
| 44 |
+
Can still raise an ``InvalidVersion`` exception.
|
| 45 |
+
To avoid exceptions use ``best_effort_version``.
|
| 46 |
+
>>> safe_version("1988 12 25")
|
| 47 |
+
'1988.12.25'
|
| 48 |
+
>>> safe_version("v0.2.1")
|
| 49 |
+
'0.2.1'
|
| 50 |
+
>>> safe_version("v0.2?beta")
|
| 51 |
+
'0.2b0'
|
| 52 |
+
>>> safe_version("v0.2 beta")
|
| 53 |
+
'0.2b0'
|
| 54 |
+
>>> safe_version("ubuntu lts")
|
| 55 |
+
Traceback (most recent call last):
|
| 56 |
+
...
|
| 57 |
+
packaging.version.InvalidVersion: Invalid version: 'ubuntu.lts'
|
| 58 |
+
"""
|
| 59 |
+
v = version.replace(' ', '.')
|
| 60 |
+
try:
|
| 61 |
+
return str(packaging.version.Version(v))
|
| 62 |
+
except packaging.version.InvalidVersion:
|
| 63 |
+
attempt = _UNSAFE_NAME_CHARS.sub("-", v)
|
| 64 |
+
return str(packaging.version.Version(attempt))
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def best_effort_version(version: str) -> str:
|
| 68 |
+
"""Convert an arbitrary string into a version-like string.
|
| 69 |
+
Fallback when ``safe_version`` is not safe enough.
|
| 70 |
+
>>> best_effort_version("v0.2 beta")
|
| 71 |
+
'0.2b0'
|
| 72 |
+
>>> best_effort_version("ubuntu lts")
|
| 73 |
+
'0.dev0+sanitized.ubuntu.lts'
|
| 74 |
+
>>> best_effort_version("0.23ubuntu1")
|
| 75 |
+
'0.23.dev0+sanitized.ubuntu1'
|
| 76 |
+
>>> best_effort_version("0.23-")
|
| 77 |
+
'0.23.dev0+sanitized'
|
| 78 |
+
>>> best_effort_version("0.-_")
|
| 79 |
+
'0.dev0+sanitized'
|
| 80 |
+
>>> best_effort_version("42.+?1")
|
| 81 |
+
'42.dev0+sanitized.1'
|
| 82 |
+
"""
|
| 83 |
+
# See pkg_resources._forgiving_version
|
| 84 |
+
try:
|
| 85 |
+
return safe_version(version)
|
| 86 |
+
except packaging.version.InvalidVersion:
|
| 87 |
+
v = version.replace(' ', '.')
|
| 88 |
+
match = _PEP440_FALLBACK.search(v)
|
| 89 |
+
if match:
|
| 90 |
+
safe = match["safe"]
|
| 91 |
+
rest = v[len(safe) :]
|
| 92 |
+
else:
|
| 93 |
+
safe = "0"
|
| 94 |
+
rest = version
|
| 95 |
+
safe_rest = _NON_ALPHANUMERIC.sub(".", rest).strip(".")
|
| 96 |
+
local = f"sanitized.{safe_rest}".strip(".")
|
| 97 |
+
return safe_version(f"{safe}.dev0+{local}")
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def safe_extra(extra: str) -> str:
|
| 101 |
+
"""Normalize extra name according to PEP 685
|
| 102 |
+
>>> safe_extra("_FrIeNdLy-._.-bArD")
|
| 103 |
+
'friendly-bard'
|
| 104 |
+
>>> safe_extra("FrIeNdLy-._.-bArD__._-")
|
| 105 |
+
'friendly-bard'
|
| 106 |
+
"""
|
| 107 |
+
return _NON_ALPHANUMERIC.sub("-", extra).strip("-").lower()
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def filename_component(value: str) -> str:
|
| 111 |
+
"""Normalize each component of a filename (e.g. distribution/version part of wheel)
|
| 112 |
+
Note: ``value`` needs to be already normalized.
|
| 113 |
+
>>> filename_component("my-pkg")
|
| 114 |
+
'my_pkg'
|
| 115 |
+
"""
|
| 116 |
+
return value.replace("-", "_").strip("_")
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def filename_component_broken(value: str) -> str:
|
| 120 |
+
"""
|
| 121 |
+
Produce the incorrect filename component for compatibility.
|
| 122 |
+
|
| 123 |
+
See pypa/setuptools#4167 for detailed analysis.
|
| 124 |
+
|
| 125 |
+
TODO: replace this with filename_component after pip 24 is
|
| 126 |
+
nearly-ubiquitous.
|
| 127 |
+
|
| 128 |
+
>>> filename_component_broken('foo_bar-baz')
|
| 129 |
+
'foo-bar-baz'
|
| 130 |
+
"""
|
| 131 |
+
return value.replace('_', '-')
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
def safer_name(value: str) -> str:
|
| 135 |
+
"""Like ``safe_name`` but can be used as filename component for wheel"""
|
| 136 |
+
# See bdist_wheel.safer_name
|
| 137 |
+
return filename_component(safe_name(value))
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def safer_best_effort_version(value: str) -> str:
|
| 141 |
+
"""Like ``best_effort_version`` but can be used as filename component for wheel"""
|
| 142 |
+
# See bdist_wheel.safer_verion
|
| 143 |
+
# TODO: Replace with only safe_version in the future (no need for best effort)
|
| 144 |
+
return filename_component(best_effort_version(value))
|
llava/lib/python3.10/site-packages/setuptools/build_meta.py
ADDED
|
@@ -0,0 +1,560 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A PEP 517 interface to setuptools
|
| 2 |
+
|
| 3 |
+
Previously, when a user or a command line tool (let's call it a "frontend")
|
| 4 |
+
needed to make a request of setuptools to take a certain action, for
|
| 5 |
+
example, generating a list of installation requirements, the frontend
|
| 6 |
+
would call "setup.py egg_info" or "setup.py bdist_wheel" on the command line.
|
| 7 |
+
|
| 8 |
+
PEP 517 defines a different method of interfacing with setuptools. Rather
|
| 9 |
+
than calling "setup.py" directly, the frontend should:
|
| 10 |
+
|
| 11 |
+
1. Set the current directory to the directory with a setup.py file
|
| 12 |
+
2. Import this module into a safe python interpreter (one in which
|
| 13 |
+
setuptools can potentially set global variables or crash hard).
|
| 14 |
+
3. Call one of the functions defined in PEP 517.
|
| 15 |
+
|
| 16 |
+
What each function does is defined in PEP 517. However, here is a "casual"
|
| 17 |
+
definition of the functions (this definition should not be relied on for
|
| 18 |
+
bug reports or API stability):
|
| 19 |
+
|
| 20 |
+
- `build_wheel`: build a wheel in the folder and return the basename
|
| 21 |
+
- `get_requires_for_build_wheel`: get the `setup_requires` to build
|
| 22 |
+
- `prepare_metadata_for_build_wheel`: get the `install_requires`
|
| 23 |
+
- `build_sdist`: build an sdist in the folder and return the basename
|
| 24 |
+
- `get_requires_for_build_sdist`: get the `setup_requires` to build
|
| 25 |
+
|
| 26 |
+
Again, this is not a formal definition! Just a "taste" of the module.
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
from __future__ import annotations
|
| 30 |
+
|
| 31 |
+
import contextlib
|
| 32 |
+
import io
|
| 33 |
+
import os
|
| 34 |
+
import shlex
|
| 35 |
+
import shutil
|
| 36 |
+
import sys
|
| 37 |
+
import tempfile
|
| 38 |
+
import tokenize
|
| 39 |
+
import warnings
|
| 40 |
+
from collections.abc import Iterable, Iterator, Mapping
|
| 41 |
+
from pathlib import Path
|
| 42 |
+
from typing import TYPE_CHECKING, Union
|
| 43 |
+
|
| 44 |
+
import setuptools
|
| 45 |
+
|
| 46 |
+
from . import errors
|
| 47 |
+
from ._path import StrPath, same_path
|
| 48 |
+
from ._reqs import parse_strings
|
| 49 |
+
from .warnings import SetuptoolsDeprecationWarning
|
| 50 |
+
|
| 51 |
+
import distutils
|
| 52 |
+
from distutils.util import strtobool
|
| 53 |
+
|
| 54 |
+
if TYPE_CHECKING:
|
| 55 |
+
from typing_extensions import TypeAlias
|
| 56 |
+
|
| 57 |
+
__all__ = [
|
| 58 |
+
'get_requires_for_build_sdist',
|
| 59 |
+
'get_requires_for_build_wheel',
|
| 60 |
+
'prepare_metadata_for_build_wheel',
|
| 61 |
+
'build_wheel',
|
| 62 |
+
'build_sdist',
|
| 63 |
+
'get_requires_for_build_editable',
|
| 64 |
+
'prepare_metadata_for_build_editable',
|
| 65 |
+
'build_editable',
|
| 66 |
+
'__legacy__',
|
| 67 |
+
'SetupRequirementsError',
|
| 68 |
+
]
|
| 69 |
+
|
| 70 |
+
SETUPTOOLS_ENABLE_FEATURES = os.getenv("SETUPTOOLS_ENABLE_FEATURES", "").lower()
|
| 71 |
+
LEGACY_EDITABLE = "legacy-editable" in SETUPTOOLS_ENABLE_FEATURES.replace("_", "-")
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class SetupRequirementsError(BaseException):
|
| 75 |
+
def __init__(self, specifiers) -> None:
|
| 76 |
+
self.specifiers = specifiers
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class Distribution(setuptools.dist.Distribution):
|
| 80 |
+
def fetch_build_eggs(self, specifiers):
|
| 81 |
+
specifier_list = list(parse_strings(specifiers))
|
| 82 |
+
|
| 83 |
+
raise SetupRequirementsError(specifier_list)
|
| 84 |
+
|
| 85 |
+
@classmethod
|
| 86 |
+
@contextlib.contextmanager
|
| 87 |
+
def patch(cls):
|
| 88 |
+
"""
|
| 89 |
+
Replace
|
| 90 |
+
distutils.dist.Distribution with this class
|
| 91 |
+
for the duration of this context.
|
| 92 |
+
"""
|
| 93 |
+
orig = distutils.core.Distribution
|
| 94 |
+
distutils.core.Distribution = cls # type: ignore[misc] # monkeypatching
|
| 95 |
+
try:
|
| 96 |
+
yield
|
| 97 |
+
finally:
|
| 98 |
+
distutils.core.Distribution = orig # type: ignore[misc] # monkeypatching
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
@contextlib.contextmanager
|
| 102 |
+
def no_install_setup_requires():
|
| 103 |
+
"""Temporarily disable installing setup_requires
|
| 104 |
+
|
| 105 |
+
Under PEP 517, the backend reports build dependencies to the frontend,
|
| 106 |
+
and the frontend is responsible for ensuring they're installed.
|
| 107 |
+
So setuptools (acting as a backend) should not try to install them.
|
| 108 |
+
"""
|
| 109 |
+
orig = setuptools._install_setup_requires
|
| 110 |
+
setuptools._install_setup_requires = lambda attrs: None
|
| 111 |
+
try:
|
| 112 |
+
yield
|
| 113 |
+
finally:
|
| 114 |
+
setuptools._install_setup_requires = orig
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
def _get_immediate_subdirectories(a_dir):
|
| 118 |
+
return [
|
| 119 |
+
name for name in os.listdir(a_dir) if os.path.isdir(os.path.join(a_dir, name))
|
| 120 |
+
]
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
def _file_with_extension(directory: StrPath, extension: str | tuple[str, ...]):
|
| 124 |
+
matching = (f for f in os.listdir(directory) if f.endswith(extension))
|
| 125 |
+
try:
|
| 126 |
+
(file,) = matching
|
| 127 |
+
except ValueError:
|
| 128 |
+
raise ValueError(
|
| 129 |
+
'No distribution was found. Ensure that `setup.py` '
|
| 130 |
+
'is not empty and that it calls `setup()`.'
|
| 131 |
+
) from None
|
| 132 |
+
return file
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def _open_setup_script(setup_script):
|
| 136 |
+
if not os.path.exists(setup_script):
|
| 137 |
+
# Supply a default setup.py
|
| 138 |
+
return io.StringIO("from setuptools import setup; setup()")
|
| 139 |
+
|
| 140 |
+
return tokenize.open(setup_script)
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
@contextlib.contextmanager
|
| 144 |
+
def suppress_known_deprecation():
|
| 145 |
+
with warnings.catch_warnings():
|
| 146 |
+
warnings.filterwarnings('ignore', 'setup.py install is deprecated')
|
| 147 |
+
yield
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
_ConfigSettings: TypeAlias = Union[Mapping[str, Union[str, list[str], None]], None]
|
| 151 |
+
"""
|
| 152 |
+
Currently the user can run::
|
| 153 |
+
|
| 154 |
+
pip install -e . --config-settings key=value
|
| 155 |
+
python -m build -C--key=value -C key=value
|
| 156 |
+
|
| 157 |
+
- pip will pass both key and value as strings and overwriting repeated keys
|
| 158 |
+
(pypa/pip#11059).
|
| 159 |
+
- build will accumulate values associated with repeated keys in a list.
|
| 160 |
+
It will also accept keys with no associated value.
|
| 161 |
+
This means that an option passed by build can be ``str | list[str] | None``.
|
| 162 |
+
- PEP 517 specifies that ``config_settings`` is an optional dict.
|
| 163 |
+
"""
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
class _ConfigSettingsTranslator:
|
| 167 |
+
"""Translate ``config_settings`` into distutils-style command arguments.
|
| 168 |
+
Only a limited number of options is currently supported.
|
| 169 |
+
"""
|
| 170 |
+
|
| 171 |
+
# See pypa/setuptools#1928 pypa/setuptools#2491
|
| 172 |
+
|
| 173 |
+
def _get_config(self, key: str, config_settings: _ConfigSettings) -> list[str]:
|
| 174 |
+
"""
|
| 175 |
+
Get the value of a specific key in ``config_settings`` as a list of strings.
|
| 176 |
+
|
| 177 |
+
>>> fn = _ConfigSettingsTranslator()._get_config
|
| 178 |
+
>>> fn("--global-option", None)
|
| 179 |
+
[]
|
| 180 |
+
>>> fn("--global-option", {})
|
| 181 |
+
[]
|
| 182 |
+
>>> fn("--global-option", {'--global-option': 'foo'})
|
| 183 |
+
['foo']
|
| 184 |
+
>>> fn("--global-option", {'--global-option': ['foo']})
|
| 185 |
+
['foo']
|
| 186 |
+
>>> fn("--global-option", {'--global-option': 'foo'})
|
| 187 |
+
['foo']
|
| 188 |
+
>>> fn("--global-option", {'--global-option': 'foo bar'})
|
| 189 |
+
['foo', 'bar']
|
| 190 |
+
"""
|
| 191 |
+
cfg = config_settings or {}
|
| 192 |
+
opts = cfg.get(key) or []
|
| 193 |
+
return shlex.split(opts) if isinstance(opts, str) else opts
|
| 194 |
+
|
| 195 |
+
def _global_args(self, config_settings: _ConfigSettings) -> Iterator[str]:
|
| 196 |
+
"""
|
| 197 |
+
Let the user specify ``verbose`` or ``quiet`` + escape hatch via
|
| 198 |
+
``--global-option``.
|
| 199 |
+
Note: ``-v``, ``-vv``, ``-vvv`` have similar effects in setuptools,
|
| 200 |
+
so we just have to cover the basic scenario ``-v``.
|
| 201 |
+
|
| 202 |
+
>>> fn = _ConfigSettingsTranslator()._global_args
|
| 203 |
+
>>> list(fn(None))
|
| 204 |
+
[]
|
| 205 |
+
>>> list(fn({"verbose": "False"}))
|
| 206 |
+
['-q']
|
| 207 |
+
>>> list(fn({"verbose": "1"}))
|
| 208 |
+
['-v']
|
| 209 |
+
>>> list(fn({"--verbose": None}))
|
| 210 |
+
['-v']
|
| 211 |
+
>>> list(fn({"verbose": "true", "--global-option": "-q --no-user-cfg"}))
|
| 212 |
+
['-v', '-q', '--no-user-cfg']
|
| 213 |
+
>>> list(fn({"--quiet": None}))
|
| 214 |
+
['-q']
|
| 215 |
+
"""
|
| 216 |
+
cfg = config_settings or {}
|
| 217 |
+
falsey = {"false", "no", "0", "off"}
|
| 218 |
+
if "verbose" in cfg or "--verbose" in cfg:
|
| 219 |
+
level = str(cfg.get("verbose") or cfg.get("--verbose") or "1")
|
| 220 |
+
yield ("-q" if level.lower() in falsey else "-v")
|
| 221 |
+
if "quiet" in cfg or "--quiet" in cfg:
|
| 222 |
+
level = str(cfg.get("quiet") or cfg.get("--quiet") or "1")
|
| 223 |
+
yield ("-v" if level.lower() in falsey else "-q")
|
| 224 |
+
|
| 225 |
+
yield from self._get_config("--global-option", config_settings)
|
| 226 |
+
|
| 227 |
+
def __dist_info_args(self, config_settings: _ConfigSettings) -> Iterator[str]:
|
| 228 |
+
"""
|
| 229 |
+
The ``dist_info`` command accepts ``tag-date`` and ``tag-build``.
|
| 230 |
+
|
| 231 |
+
.. warning::
|
| 232 |
+
We cannot use this yet as it requires the ``sdist`` and ``bdist_wheel``
|
| 233 |
+
commands run in ``build_sdist`` and ``build_wheel`` to reuse the egg-info
|
| 234 |
+
directory created in ``prepare_metadata_for_build_wheel``.
|
| 235 |
+
|
| 236 |
+
>>> fn = _ConfigSettingsTranslator()._ConfigSettingsTranslator__dist_info_args
|
| 237 |
+
>>> list(fn(None))
|
| 238 |
+
[]
|
| 239 |
+
>>> list(fn({"tag-date": "False"}))
|
| 240 |
+
['--no-date']
|
| 241 |
+
>>> list(fn({"tag-date": None}))
|
| 242 |
+
['--no-date']
|
| 243 |
+
>>> list(fn({"tag-date": "true", "tag-build": ".a"}))
|
| 244 |
+
['--tag-date', '--tag-build', '.a']
|
| 245 |
+
"""
|
| 246 |
+
cfg = config_settings or {}
|
| 247 |
+
if "tag-date" in cfg:
|
| 248 |
+
val = strtobool(str(cfg["tag-date"] or "false"))
|
| 249 |
+
yield ("--tag-date" if val else "--no-date")
|
| 250 |
+
if "tag-build" in cfg:
|
| 251 |
+
yield from ["--tag-build", str(cfg["tag-build"])]
|
| 252 |
+
|
| 253 |
+
def _editable_args(self, config_settings: _ConfigSettings) -> Iterator[str]:
|
| 254 |
+
"""
|
| 255 |
+
The ``editable_wheel`` command accepts ``editable-mode=strict``.
|
| 256 |
+
|
| 257 |
+
>>> fn = _ConfigSettingsTranslator()._editable_args
|
| 258 |
+
>>> list(fn(None))
|
| 259 |
+
[]
|
| 260 |
+
>>> list(fn({"editable-mode": "strict"}))
|
| 261 |
+
['--mode', 'strict']
|
| 262 |
+
"""
|
| 263 |
+
cfg = config_settings or {}
|
| 264 |
+
mode = cfg.get("editable-mode") or cfg.get("editable_mode")
|
| 265 |
+
if not mode:
|
| 266 |
+
return
|
| 267 |
+
yield from ["--mode", str(mode)]
|
| 268 |
+
|
| 269 |
+
def _arbitrary_args(self, config_settings: _ConfigSettings) -> Iterator[str]:
|
| 270 |
+
"""
|
| 271 |
+
Users may expect to pass arbitrary lists of arguments to a command
|
| 272 |
+
via "--global-option" (example provided in PEP 517 of a "escape hatch").
|
| 273 |
+
|
| 274 |
+
>>> fn = _ConfigSettingsTranslator()._arbitrary_args
|
| 275 |
+
>>> list(fn(None))
|
| 276 |
+
[]
|
| 277 |
+
>>> list(fn({}))
|
| 278 |
+
[]
|
| 279 |
+
>>> list(fn({'--build-option': 'foo'}))
|
| 280 |
+
['foo']
|
| 281 |
+
>>> list(fn({'--build-option': ['foo']}))
|
| 282 |
+
['foo']
|
| 283 |
+
>>> list(fn({'--build-option': 'foo'}))
|
| 284 |
+
['foo']
|
| 285 |
+
>>> list(fn({'--build-option': 'foo bar'}))
|
| 286 |
+
['foo', 'bar']
|
| 287 |
+
>>> list(fn({'--global-option': 'foo'}))
|
| 288 |
+
[]
|
| 289 |
+
"""
|
| 290 |
+
yield from self._get_config("--build-option", config_settings)
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
class _BuildMetaBackend(_ConfigSettingsTranslator):
|
| 294 |
+
def _get_build_requires(
|
| 295 |
+
self, config_settings: _ConfigSettings, requirements: list[str]
|
| 296 |
+
):
|
| 297 |
+
sys.argv = [
|
| 298 |
+
*sys.argv[:1],
|
| 299 |
+
*self._global_args(config_settings),
|
| 300 |
+
"egg_info",
|
| 301 |
+
]
|
| 302 |
+
try:
|
| 303 |
+
with Distribution.patch():
|
| 304 |
+
self.run_setup()
|
| 305 |
+
except SetupRequirementsError as e:
|
| 306 |
+
requirements += e.specifiers
|
| 307 |
+
|
| 308 |
+
return requirements
|
| 309 |
+
|
| 310 |
+
def run_setup(self, setup_script: str = 'setup.py'):
|
| 311 |
+
# Note that we can reuse our build directory between calls
|
| 312 |
+
# Correctness comes first, then optimization later
|
| 313 |
+
__file__ = os.path.abspath(setup_script)
|
| 314 |
+
__name__ = '__main__'
|
| 315 |
+
|
| 316 |
+
with _open_setup_script(__file__) as f:
|
| 317 |
+
code = f.read().replace(r'\r\n', r'\n')
|
| 318 |
+
|
| 319 |
+
try:
|
| 320 |
+
exec(code, locals())
|
| 321 |
+
except SystemExit as e:
|
| 322 |
+
if e.code:
|
| 323 |
+
raise
|
| 324 |
+
# We ignore exit code indicating success
|
| 325 |
+
SetuptoolsDeprecationWarning.emit(
|
| 326 |
+
"Running `setup.py` directly as CLI tool is deprecated.",
|
| 327 |
+
"Please avoid using `sys.exit(0)` or similar statements "
|
| 328 |
+
"that don't fit in the paradigm of a configuration file.",
|
| 329 |
+
see_url="https://blog.ganssle.io/articles/2021/10/"
|
| 330 |
+
"setup-py-deprecated.html",
|
| 331 |
+
)
|
| 332 |
+
|
| 333 |
+
def get_requires_for_build_wheel(self, config_settings: _ConfigSettings = None):
|
| 334 |
+
return self._get_build_requires(config_settings, requirements=[])
|
| 335 |
+
|
| 336 |
+
def get_requires_for_build_sdist(self, config_settings: _ConfigSettings = None):
|
| 337 |
+
return self._get_build_requires(config_settings, requirements=[])
|
| 338 |
+
|
| 339 |
+
def _bubble_up_info_directory(
|
| 340 |
+
self, metadata_directory: StrPath, suffix: str
|
| 341 |
+
) -> str:
|
| 342 |
+
"""
|
| 343 |
+
PEP 517 requires that the .dist-info directory be placed in the
|
| 344 |
+
metadata_directory. To comply, we MUST copy the directory to the root.
|
| 345 |
+
|
| 346 |
+
Returns the basename of the info directory, e.g. `proj-0.0.0.dist-info`.
|
| 347 |
+
"""
|
| 348 |
+
info_dir = self._find_info_directory(metadata_directory, suffix)
|
| 349 |
+
if not same_path(info_dir.parent, metadata_directory):
|
| 350 |
+
shutil.move(str(info_dir), metadata_directory)
|
| 351 |
+
# PEP 517 allow other files and dirs to exist in metadata_directory
|
| 352 |
+
return info_dir.name
|
| 353 |
+
|
| 354 |
+
def _find_info_directory(self, metadata_directory: StrPath, suffix: str) -> Path:
|
| 355 |
+
for parent, dirs, _ in os.walk(metadata_directory):
|
| 356 |
+
candidates = [f for f in dirs if f.endswith(suffix)]
|
| 357 |
+
|
| 358 |
+
if len(candidates) != 0 or len(dirs) != 1:
|
| 359 |
+
assert len(candidates) == 1, f"Multiple {suffix} directories found"
|
| 360 |
+
return Path(parent, candidates[0])
|
| 361 |
+
|
| 362 |
+
msg = f"No {suffix} directory found in {metadata_directory}"
|
| 363 |
+
raise errors.InternalError(msg)
|
| 364 |
+
|
| 365 |
+
def prepare_metadata_for_build_wheel(
|
| 366 |
+
self, metadata_directory: StrPath, config_settings: _ConfigSettings = None
|
| 367 |
+
):
|
| 368 |
+
sys.argv = [
|
| 369 |
+
*sys.argv[:1],
|
| 370 |
+
*self._global_args(config_settings),
|
| 371 |
+
"dist_info",
|
| 372 |
+
"--output-dir",
|
| 373 |
+
str(metadata_directory),
|
| 374 |
+
"--keep-egg-info",
|
| 375 |
+
]
|
| 376 |
+
with no_install_setup_requires():
|
| 377 |
+
self.run_setup()
|
| 378 |
+
|
| 379 |
+
self._bubble_up_info_directory(metadata_directory, ".egg-info")
|
| 380 |
+
return self._bubble_up_info_directory(metadata_directory, ".dist-info")
|
| 381 |
+
|
| 382 |
+
def _build_with_temp_dir(
|
| 383 |
+
self,
|
| 384 |
+
setup_command: Iterable[str],
|
| 385 |
+
result_extension: str | tuple[str, ...],
|
| 386 |
+
result_directory: StrPath,
|
| 387 |
+
config_settings: _ConfigSettings,
|
| 388 |
+
arbitrary_args: Iterable[str] = (),
|
| 389 |
+
):
|
| 390 |
+
result_directory = os.path.abspath(result_directory)
|
| 391 |
+
|
| 392 |
+
# Build in a temporary directory, then copy to the target.
|
| 393 |
+
os.makedirs(result_directory, exist_ok=True)
|
| 394 |
+
|
| 395 |
+
with tempfile.TemporaryDirectory(
|
| 396 |
+
prefix=".tmp-", dir=result_directory
|
| 397 |
+
) as tmp_dist_dir:
|
| 398 |
+
sys.argv = [
|
| 399 |
+
*sys.argv[:1],
|
| 400 |
+
*self._global_args(config_settings),
|
| 401 |
+
*setup_command,
|
| 402 |
+
"--dist-dir",
|
| 403 |
+
tmp_dist_dir,
|
| 404 |
+
*arbitrary_args,
|
| 405 |
+
]
|
| 406 |
+
with no_install_setup_requires():
|
| 407 |
+
self.run_setup()
|
| 408 |
+
|
| 409 |
+
result_basename = _file_with_extension(tmp_dist_dir, result_extension)
|
| 410 |
+
result_path = os.path.join(result_directory, result_basename)
|
| 411 |
+
if os.path.exists(result_path):
|
| 412 |
+
# os.rename will fail overwriting on non-Unix.
|
| 413 |
+
os.remove(result_path)
|
| 414 |
+
os.rename(os.path.join(tmp_dist_dir, result_basename), result_path)
|
| 415 |
+
|
| 416 |
+
return result_basename
|
| 417 |
+
|
| 418 |
+
def build_wheel(
|
| 419 |
+
self,
|
| 420 |
+
wheel_directory: StrPath,
|
| 421 |
+
config_settings: _ConfigSettings = None,
|
| 422 |
+
metadata_directory: StrPath | None = None,
|
| 423 |
+
):
|
| 424 |
+
def _build(cmd: list[str]):
|
| 425 |
+
with suppress_known_deprecation():
|
| 426 |
+
return self._build_with_temp_dir(
|
| 427 |
+
cmd,
|
| 428 |
+
'.whl',
|
| 429 |
+
wheel_directory,
|
| 430 |
+
config_settings,
|
| 431 |
+
self._arbitrary_args(config_settings),
|
| 432 |
+
)
|
| 433 |
+
|
| 434 |
+
if metadata_directory is None:
|
| 435 |
+
return _build(['bdist_wheel'])
|
| 436 |
+
|
| 437 |
+
try:
|
| 438 |
+
return _build(['bdist_wheel', '--dist-info-dir', str(metadata_directory)])
|
| 439 |
+
except SystemExit as ex: # pragma: nocover
|
| 440 |
+
# pypa/setuptools#4683
|
| 441 |
+
if "--dist-info-dir not recognized" not in str(ex):
|
| 442 |
+
raise
|
| 443 |
+
_IncompatibleBdistWheel.emit()
|
| 444 |
+
return _build(['bdist_wheel'])
|
| 445 |
+
|
| 446 |
+
def build_sdist(
|
| 447 |
+
self, sdist_directory: StrPath, config_settings: _ConfigSettings = None
|
| 448 |
+
):
|
| 449 |
+
return self._build_with_temp_dir(
|
| 450 |
+
['sdist', '--formats', 'gztar'], '.tar.gz', sdist_directory, config_settings
|
| 451 |
+
)
|
| 452 |
+
|
| 453 |
+
def _get_dist_info_dir(self, metadata_directory: StrPath | None) -> str | None:
|
| 454 |
+
if not metadata_directory:
|
| 455 |
+
return None
|
| 456 |
+
dist_info_candidates = list(Path(metadata_directory).glob("*.dist-info"))
|
| 457 |
+
assert len(dist_info_candidates) <= 1
|
| 458 |
+
return str(dist_info_candidates[0]) if dist_info_candidates else None
|
| 459 |
+
|
| 460 |
+
if not LEGACY_EDITABLE:
|
| 461 |
+
# PEP660 hooks:
|
| 462 |
+
# build_editable
|
| 463 |
+
# get_requires_for_build_editable
|
| 464 |
+
# prepare_metadata_for_build_editable
|
| 465 |
+
def build_editable(
|
| 466 |
+
self,
|
| 467 |
+
wheel_directory: StrPath,
|
| 468 |
+
config_settings: _ConfigSettings = None,
|
| 469 |
+
metadata_directory: StrPath | None = None,
|
| 470 |
+
):
|
| 471 |
+
# XXX can or should we hide our editable_wheel command normally?
|
| 472 |
+
info_dir = self._get_dist_info_dir(metadata_directory)
|
| 473 |
+
opts = ["--dist-info-dir", info_dir] if info_dir else []
|
| 474 |
+
cmd = ["editable_wheel", *opts, *self._editable_args(config_settings)]
|
| 475 |
+
with suppress_known_deprecation():
|
| 476 |
+
return self._build_with_temp_dir(
|
| 477 |
+
cmd, ".whl", wheel_directory, config_settings
|
| 478 |
+
)
|
| 479 |
+
|
| 480 |
+
def get_requires_for_build_editable(
|
| 481 |
+
self, config_settings: _ConfigSettings = None
|
| 482 |
+
):
|
| 483 |
+
return self.get_requires_for_build_wheel(config_settings)
|
| 484 |
+
|
| 485 |
+
def prepare_metadata_for_build_editable(
|
| 486 |
+
self, metadata_directory: StrPath, config_settings: _ConfigSettings = None
|
| 487 |
+
):
|
| 488 |
+
return self.prepare_metadata_for_build_wheel(
|
| 489 |
+
metadata_directory, config_settings
|
| 490 |
+
)
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
class _BuildMetaLegacyBackend(_BuildMetaBackend):
|
| 494 |
+
"""Compatibility backend for setuptools
|
| 495 |
+
|
| 496 |
+
This is a version of setuptools.build_meta that endeavors
|
| 497 |
+
to maintain backwards
|
| 498 |
+
compatibility with pre-PEP 517 modes of invocation. It
|
| 499 |
+
exists as a temporary
|
| 500 |
+
bridge between the old packaging mechanism and the new
|
| 501 |
+
packaging mechanism,
|
| 502 |
+
and will eventually be removed.
|
| 503 |
+
"""
|
| 504 |
+
|
| 505 |
+
def run_setup(self, setup_script: str = 'setup.py'):
|
| 506 |
+
# In order to maintain compatibility with scripts assuming that
|
| 507 |
+
# the setup.py script is in a directory on the PYTHONPATH, inject
|
| 508 |
+
# '' into sys.path. (pypa/setuptools#1642)
|
| 509 |
+
sys_path = list(sys.path) # Save the original path
|
| 510 |
+
|
| 511 |
+
script_dir = os.path.dirname(os.path.abspath(setup_script))
|
| 512 |
+
if script_dir not in sys.path:
|
| 513 |
+
sys.path.insert(0, script_dir)
|
| 514 |
+
|
| 515 |
+
# Some setup.py scripts (e.g. in pygame and numpy) use sys.argv[0] to
|
| 516 |
+
# get the directory of the source code. They expect it to refer to the
|
| 517 |
+
# setup.py script.
|
| 518 |
+
sys_argv_0 = sys.argv[0]
|
| 519 |
+
sys.argv[0] = setup_script
|
| 520 |
+
|
| 521 |
+
try:
|
| 522 |
+
super().run_setup(setup_script=setup_script)
|
| 523 |
+
finally:
|
| 524 |
+
# While PEP 517 frontends should be calling each hook in a fresh
|
| 525 |
+
# subprocess according to the standard (and thus it should not be
|
| 526 |
+
# strictly necessary to restore the old sys.path), we'll restore
|
| 527 |
+
# the original path so that the path manipulation does not persist
|
| 528 |
+
# within the hook after run_setup is called.
|
| 529 |
+
sys.path[:] = sys_path
|
| 530 |
+
sys.argv[0] = sys_argv_0
|
| 531 |
+
|
| 532 |
+
|
| 533 |
+
class _IncompatibleBdistWheel(SetuptoolsDeprecationWarning):
|
| 534 |
+
_SUMMARY = "wheel.bdist_wheel is deprecated, please import it from setuptools"
|
| 535 |
+
_DETAILS = """
|
| 536 |
+
Ensure that any custom bdist_wheel implementation is a subclass of
|
| 537 |
+
setuptools.command.bdist_wheel.bdist_wheel.
|
| 538 |
+
"""
|
| 539 |
+
_DUE_DATE = (2025, 10, 15)
|
| 540 |
+
# Initially introduced in 2024/10/15, but maybe too disruptive to be enforced?
|
| 541 |
+
_SEE_URL = "https://github.com/pypa/wheel/pull/631"
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
# The primary backend
|
| 545 |
+
_BACKEND = _BuildMetaBackend()
|
| 546 |
+
|
| 547 |
+
get_requires_for_build_wheel = _BACKEND.get_requires_for_build_wheel
|
| 548 |
+
get_requires_for_build_sdist = _BACKEND.get_requires_for_build_sdist
|
| 549 |
+
prepare_metadata_for_build_wheel = _BACKEND.prepare_metadata_for_build_wheel
|
| 550 |
+
build_wheel = _BACKEND.build_wheel
|
| 551 |
+
build_sdist = _BACKEND.build_sdist
|
| 552 |
+
|
| 553 |
+
if not LEGACY_EDITABLE:
|
| 554 |
+
get_requires_for_build_editable = _BACKEND.get_requires_for_build_editable
|
| 555 |
+
prepare_metadata_for_build_editable = _BACKEND.prepare_metadata_for_build_editable
|
| 556 |
+
build_editable = _BACKEND.build_editable
|
| 557 |
+
|
| 558 |
+
|
| 559 |
+
# The legacy backend
|
| 560 |
+
__legacy__ = _BuildMetaLegacyBackend()
|
llava/lib/python3.10/site-packages/setuptools/cli-64.exe
ADDED
|
Binary file (14.3 kB). View file
|
|
|
llava/lib/python3.10/site-packages/setuptools/config/NOTICE
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
The following files include code from opensource projects
|
| 2 |
+
(either as direct copies or modified versions):
|
| 3 |
+
|
| 4 |
+
- `setuptools.schema.json`, `distutils.schema.json`:
|
| 5 |
+
- project: `validate-pyproject` - licensed under MPL-2.0
|
| 6 |
+
(https://github.com/abravalheri/validate-pyproject):
|
| 7 |
+
|
| 8 |
+
This Source Code Form is subject to the terms of the Mozilla Public
|
| 9 |
+
License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
| 10 |
+
You can obtain one at https://mozilla.org/MPL/2.0/.
|
llava/lib/python3.10/site-packages/setuptools/config/__init__.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""For backward compatibility, expose main functions from
|
| 2 |
+
``setuptools.config.setupcfg``
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from functools import wraps
|
| 6 |
+
from typing import Callable, TypeVar, cast
|
| 7 |
+
|
| 8 |
+
from ..warnings import SetuptoolsDeprecationWarning
|
| 9 |
+
from . import setupcfg
|
| 10 |
+
|
| 11 |
+
Fn = TypeVar("Fn", bound=Callable)
|
| 12 |
+
|
| 13 |
+
__all__ = ('parse_configuration', 'read_configuration')
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def _deprecation_notice(fn: Fn) -> Fn:
|
| 17 |
+
@wraps(fn)
|
| 18 |
+
def _wrapper(*args, **kwargs):
|
| 19 |
+
SetuptoolsDeprecationWarning.emit(
|
| 20 |
+
"Deprecated API usage.",
|
| 21 |
+
f"""
|
| 22 |
+
As setuptools moves its configuration towards `pyproject.toml`,
|
| 23 |
+
`{__name__}.{fn.__name__}` became deprecated.
|
| 24 |
+
|
| 25 |
+
For the time being, you can use the `{setupcfg.__name__}` module
|
| 26 |
+
to access a backward compatible API, but this module is provisional
|
| 27 |
+
and might be removed in the future.
|
| 28 |
+
|
| 29 |
+
To read project metadata, consider using
|
| 30 |
+
``build.util.project_wheel_metadata`` (https://pypi.org/project/build/).
|
| 31 |
+
For simple scenarios, you can also try parsing the file directly
|
| 32 |
+
with the help of ``configparser``.
|
| 33 |
+
""",
|
| 34 |
+
# due_date not defined yet, because the community still heavily relies on it
|
| 35 |
+
# Warning introduced in 24 Mar 2022
|
| 36 |
+
)
|
| 37 |
+
return fn(*args, **kwargs)
|
| 38 |
+
|
| 39 |
+
return cast(Fn, _wrapper)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
read_configuration = _deprecation_notice(setupcfg.read_configuration)
|
| 43 |
+
parse_configuration = _deprecation_notice(setupcfg.parse_configuration)
|
llava/lib/python3.10/site-packages/setuptools/config/_validate_pyproject/__init__.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from functools import reduce
|
| 2 |
+
from typing import Any, Callable, Dict
|
| 3 |
+
|
| 4 |
+
from . import formats
|
| 5 |
+
from .error_reporting import detailed_errors, ValidationError
|
| 6 |
+
from .extra_validations import EXTRA_VALIDATIONS
|
| 7 |
+
from .fastjsonschema_exceptions import JsonSchemaException, JsonSchemaValueException
|
| 8 |
+
from .fastjsonschema_validations import validate as _validate
|
| 9 |
+
|
| 10 |
+
__all__ = [
|
| 11 |
+
"validate",
|
| 12 |
+
"FORMAT_FUNCTIONS",
|
| 13 |
+
"EXTRA_VALIDATIONS",
|
| 14 |
+
"ValidationError",
|
| 15 |
+
"JsonSchemaException",
|
| 16 |
+
"JsonSchemaValueException",
|
| 17 |
+
]
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
FORMAT_FUNCTIONS: Dict[str, Callable[[str], bool]] = {
|
| 21 |
+
fn.__name__.replace("_", "-"): fn
|
| 22 |
+
for fn in formats.__dict__.values()
|
| 23 |
+
if callable(fn) and not fn.__name__.startswith("_")
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def validate(data: Any) -> bool:
|
| 28 |
+
"""Validate the given ``data`` object using JSON Schema
|
| 29 |
+
This function raises ``ValidationError`` if ``data`` is invalid.
|
| 30 |
+
"""
|
| 31 |
+
with detailed_errors():
|
| 32 |
+
_validate(data, custom_formats=FORMAT_FUNCTIONS)
|
| 33 |
+
reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data)
|
| 34 |
+
return True
|
llava/lib/python3.10/site-packages/setuptools/config/distutils.schema.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"$schema": "http://json-schema.org/draft-07/schema#",
|
| 3 |
+
|
| 4 |
+
"$id": "https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html",
|
| 5 |
+
"title": "``tool.distutils`` table",
|
| 6 |
+
"$$description": [
|
| 7 |
+
"**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``",
|
| 8 |
+
"subtables to configure arguments for ``distutils`` commands.",
|
| 9 |
+
"Originally, ``distutils`` allowed developers to configure arguments for",
|
| 10 |
+
"``setup.py`` commands via `distutils configuration files",
|
| 11 |
+
"<https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html>`_.",
|
| 12 |
+
"See also `the old Python docs <https://docs.python.org/3.11/install/>_`."
|
| 13 |
+
],
|
| 14 |
+
|
| 15 |
+
"type": "object",
|
| 16 |
+
"properties": {
|
| 17 |
+
"global": {
|
| 18 |
+
"type": "object",
|
| 19 |
+
"description": "Global options applied to all ``distutils`` commands"
|
| 20 |
+
}
|
| 21 |
+
},
|
| 22 |
+
"patternProperties": {
|
| 23 |
+
".+": {"type": "object"}
|
| 24 |
+
},
|
| 25 |
+
"$comment": "TODO: Is there a practical way of making this schema more specific?"
|
| 26 |
+
}
|
llava/lib/python3.10/site-packages/setuptools/config/pyprojecttoml.py
ADDED
|
@@ -0,0 +1,468 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Load setuptools configuration from ``pyproject.toml`` files.
|
| 3 |
+
|
| 4 |
+
**PRIVATE MODULE**: API reserved for setuptools internal usage only.
|
| 5 |
+
|
| 6 |
+
To read project metadata, consider using
|
| 7 |
+
``build.util.project_wheel_metadata`` (https://pypi.org/project/build/).
|
| 8 |
+
For simple scenarios, you can also try parsing the file directly
|
| 9 |
+
with the help of ``tomllib`` or ``tomli``.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
from __future__ import annotations
|
| 13 |
+
|
| 14 |
+
import logging
|
| 15 |
+
import os
|
| 16 |
+
from collections.abc import Mapping
|
| 17 |
+
from contextlib import contextmanager
|
| 18 |
+
from functools import partial
|
| 19 |
+
from types import TracebackType
|
| 20 |
+
from typing import TYPE_CHECKING, Any, Callable
|
| 21 |
+
|
| 22 |
+
from .._path import StrPath
|
| 23 |
+
from ..errors import FileError, InvalidConfigError
|
| 24 |
+
from ..warnings import SetuptoolsWarning
|
| 25 |
+
from . import expand as _expand
|
| 26 |
+
from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _MissingDynamic, apply as _apply
|
| 27 |
+
|
| 28 |
+
if TYPE_CHECKING:
|
| 29 |
+
from typing_extensions import Self
|
| 30 |
+
|
| 31 |
+
from setuptools.dist import Distribution
|
| 32 |
+
|
| 33 |
+
_logger = logging.getLogger(__name__)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def load_file(filepath: StrPath) -> dict:
|
| 37 |
+
from ..compat.py310 import tomllib
|
| 38 |
+
|
| 39 |
+
with open(filepath, "rb") as file:
|
| 40 |
+
return tomllib.load(file)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def validate(config: dict, filepath: StrPath) -> bool:
|
| 44 |
+
from . import _validate_pyproject as validator
|
| 45 |
+
|
| 46 |
+
trove_classifier = validator.FORMAT_FUNCTIONS.get("trove-classifier")
|
| 47 |
+
if hasattr(trove_classifier, "_disable_download"):
|
| 48 |
+
# Improve reproducibility by default. See abravalheri/validate-pyproject#31
|
| 49 |
+
trove_classifier._disable_download() # type: ignore[union-attr]
|
| 50 |
+
|
| 51 |
+
try:
|
| 52 |
+
return validator.validate(config)
|
| 53 |
+
except validator.ValidationError as ex:
|
| 54 |
+
summary = f"configuration error: {ex.summary}"
|
| 55 |
+
if ex.name.strip("`") != "project":
|
| 56 |
+
# Probably it is just a field missing/misnamed, not worthy the verbosity...
|
| 57 |
+
_logger.debug(summary)
|
| 58 |
+
_logger.debug(ex.details)
|
| 59 |
+
|
| 60 |
+
error = f"invalid pyproject.toml config: {ex.name}."
|
| 61 |
+
raise ValueError(f"{error}\n{summary}") from None
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def apply_configuration(
|
| 65 |
+
dist: Distribution,
|
| 66 |
+
filepath: StrPath,
|
| 67 |
+
ignore_option_errors: bool = False,
|
| 68 |
+
) -> Distribution:
|
| 69 |
+
"""Apply the configuration from a ``pyproject.toml`` file into an existing
|
| 70 |
+
distribution object.
|
| 71 |
+
"""
|
| 72 |
+
config = read_configuration(filepath, True, ignore_option_errors, dist)
|
| 73 |
+
return _apply(dist, config, filepath)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def read_configuration(
|
| 77 |
+
filepath: StrPath,
|
| 78 |
+
expand: bool = True,
|
| 79 |
+
ignore_option_errors: bool = False,
|
| 80 |
+
dist: Distribution | None = None,
|
| 81 |
+
) -> dict[str, Any]:
|
| 82 |
+
"""Read given configuration file and returns options from it as a dict.
|
| 83 |
+
|
| 84 |
+
:param str|unicode filepath: Path to configuration file in the ``pyproject.toml``
|
| 85 |
+
format.
|
| 86 |
+
|
| 87 |
+
:param bool expand: Whether to expand directives and other computed values
|
| 88 |
+
(i.e. post-process the given configuration)
|
| 89 |
+
|
| 90 |
+
:param bool ignore_option_errors: Whether to silently ignore
|
| 91 |
+
options, values of which could not be resolved (e.g. due to exceptions
|
| 92 |
+
in directives such as file:, attr:, etc.).
|
| 93 |
+
If False exceptions are propagated as expected.
|
| 94 |
+
|
| 95 |
+
:param Distribution|None: Distribution object to which the configuration refers.
|
| 96 |
+
If not given a dummy object will be created and discarded after the
|
| 97 |
+
configuration is read. This is used for auto-discovery of packages and in the
|
| 98 |
+
case a dynamic configuration (e.g. ``attr`` or ``cmdclass``) is expanded.
|
| 99 |
+
When ``expand=False`` this object is simply ignored.
|
| 100 |
+
|
| 101 |
+
:rtype: dict
|
| 102 |
+
"""
|
| 103 |
+
filepath = os.path.abspath(filepath)
|
| 104 |
+
|
| 105 |
+
if not os.path.isfile(filepath):
|
| 106 |
+
raise FileError(f"Configuration file {filepath!r} does not exist.")
|
| 107 |
+
|
| 108 |
+
asdict = load_file(filepath) or {}
|
| 109 |
+
project_table = asdict.get("project", {})
|
| 110 |
+
tool_table = asdict.get("tool", {})
|
| 111 |
+
setuptools_table = tool_table.get("setuptools", {})
|
| 112 |
+
if not asdict or not (project_table or setuptools_table):
|
| 113 |
+
return {} # User is not using pyproject to configure setuptools
|
| 114 |
+
|
| 115 |
+
if "setuptools" in asdict.get("tools", {}):
|
| 116 |
+
# let the user know they probably have a typo in their metadata
|
| 117 |
+
_ToolsTypoInMetadata.emit()
|
| 118 |
+
|
| 119 |
+
if "distutils" in tool_table:
|
| 120 |
+
_ExperimentalConfiguration.emit(subject="[tool.distutils]")
|
| 121 |
+
|
| 122 |
+
# There is an overall sense in the community that making include_package_data=True
|
| 123 |
+
# the default would be an improvement.
|
| 124 |
+
# `ini2toml` backfills include_package_data=False when nothing is explicitly given,
|
| 125 |
+
# therefore setting a default here is backwards compatible.
|
| 126 |
+
if dist and dist.include_package_data is not None:
|
| 127 |
+
setuptools_table.setdefault("include-package-data", dist.include_package_data)
|
| 128 |
+
else:
|
| 129 |
+
setuptools_table.setdefault("include-package-data", True)
|
| 130 |
+
# Persist changes:
|
| 131 |
+
asdict["tool"] = tool_table
|
| 132 |
+
tool_table["setuptools"] = setuptools_table
|
| 133 |
+
|
| 134 |
+
if "ext-modules" in setuptools_table:
|
| 135 |
+
_ExperimentalConfiguration.emit(subject="[tool.setuptools.ext-modules]")
|
| 136 |
+
|
| 137 |
+
with _ignore_errors(ignore_option_errors):
|
| 138 |
+
# Don't complain about unrelated errors (e.g. tools not using the "tool" table)
|
| 139 |
+
subset = {"project": project_table, "tool": {"setuptools": setuptools_table}}
|
| 140 |
+
validate(subset, filepath)
|
| 141 |
+
|
| 142 |
+
if expand:
|
| 143 |
+
root_dir = os.path.dirname(filepath)
|
| 144 |
+
return expand_configuration(asdict, root_dir, ignore_option_errors, dist)
|
| 145 |
+
|
| 146 |
+
return asdict
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def expand_configuration(
|
| 150 |
+
config: dict,
|
| 151 |
+
root_dir: StrPath | None = None,
|
| 152 |
+
ignore_option_errors: bool = False,
|
| 153 |
+
dist: Distribution | None = None,
|
| 154 |
+
) -> dict:
|
| 155 |
+
"""Given a configuration with unresolved fields (e.g. dynamic, cmdclass, ...)
|
| 156 |
+
find their final values.
|
| 157 |
+
|
| 158 |
+
:param dict config: Dict containing the configuration for the distribution
|
| 159 |
+
:param str root_dir: Top-level directory for the distribution/project
|
| 160 |
+
(the same directory where ``pyproject.toml`` is place)
|
| 161 |
+
:param bool ignore_option_errors: see :func:`read_configuration`
|
| 162 |
+
:param Distribution|None: Distribution object to which the configuration refers.
|
| 163 |
+
If not given a dummy object will be created and discarded after the
|
| 164 |
+
configuration is read. Used in the case a dynamic configuration
|
| 165 |
+
(e.g. ``attr`` or ``cmdclass``).
|
| 166 |
+
|
| 167 |
+
:rtype: dict
|
| 168 |
+
"""
|
| 169 |
+
return _ConfigExpander(config, root_dir, ignore_option_errors, dist).expand()
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
class _ConfigExpander:
|
| 173 |
+
def __init__(
|
| 174 |
+
self,
|
| 175 |
+
config: dict,
|
| 176 |
+
root_dir: StrPath | None = None,
|
| 177 |
+
ignore_option_errors: bool = False,
|
| 178 |
+
dist: Distribution | None = None,
|
| 179 |
+
) -> None:
|
| 180 |
+
self.config = config
|
| 181 |
+
self.root_dir = root_dir or os.getcwd()
|
| 182 |
+
self.project_cfg = config.get("project", {})
|
| 183 |
+
self.dynamic = self.project_cfg.get("dynamic", [])
|
| 184 |
+
self.setuptools_cfg = config.get("tool", {}).get("setuptools", {})
|
| 185 |
+
self.dynamic_cfg = self.setuptools_cfg.get("dynamic", {})
|
| 186 |
+
self.ignore_option_errors = ignore_option_errors
|
| 187 |
+
self._dist = dist
|
| 188 |
+
self._referenced_files = set[str]()
|
| 189 |
+
|
| 190 |
+
def _ensure_dist(self) -> Distribution:
|
| 191 |
+
from setuptools.dist import Distribution
|
| 192 |
+
|
| 193 |
+
attrs = {"src_root": self.root_dir, "name": self.project_cfg.get("name", None)}
|
| 194 |
+
return self._dist or Distribution(attrs)
|
| 195 |
+
|
| 196 |
+
def _process_field(self, container: dict, field: str, fn: Callable):
|
| 197 |
+
if field in container:
|
| 198 |
+
with _ignore_errors(self.ignore_option_errors):
|
| 199 |
+
container[field] = fn(container[field])
|
| 200 |
+
|
| 201 |
+
def _canonic_package_data(self, field="package-data"):
|
| 202 |
+
package_data = self.setuptools_cfg.get(field, {})
|
| 203 |
+
return _expand.canonic_package_data(package_data)
|
| 204 |
+
|
| 205 |
+
def expand(self):
|
| 206 |
+
self._expand_packages()
|
| 207 |
+
self._canonic_package_data()
|
| 208 |
+
self._canonic_package_data("exclude-package-data")
|
| 209 |
+
|
| 210 |
+
# A distribution object is required for discovering the correct package_dir
|
| 211 |
+
dist = self._ensure_dist()
|
| 212 |
+
ctx = _EnsurePackagesDiscovered(dist, self.project_cfg, self.setuptools_cfg)
|
| 213 |
+
with ctx as ensure_discovered:
|
| 214 |
+
package_dir = ensure_discovered.package_dir
|
| 215 |
+
self._expand_data_files()
|
| 216 |
+
self._expand_cmdclass(package_dir)
|
| 217 |
+
self._expand_all_dynamic(dist, package_dir)
|
| 218 |
+
|
| 219 |
+
dist._referenced_files.update(self._referenced_files)
|
| 220 |
+
return self.config
|
| 221 |
+
|
| 222 |
+
def _expand_packages(self):
|
| 223 |
+
packages = self.setuptools_cfg.get("packages")
|
| 224 |
+
if packages is None or isinstance(packages, (list, tuple)):
|
| 225 |
+
return
|
| 226 |
+
|
| 227 |
+
find = packages.get("find")
|
| 228 |
+
if isinstance(find, dict):
|
| 229 |
+
find["root_dir"] = self.root_dir
|
| 230 |
+
find["fill_package_dir"] = self.setuptools_cfg.setdefault("package-dir", {})
|
| 231 |
+
with _ignore_errors(self.ignore_option_errors):
|
| 232 |
+
self.setuptools_cfg["packages"] = _expand.find_packages(**find)
|
| 233 |
+
|
| 234 |
+
def _expand_data_files(self):
|
| 235 |
+
data_files = partial(_expand.canonic_data_files, root_dir=self.root_dir)
|
| 236 |
+
self._process_field(self.setuptools_cfg, "data-files", data_files)
|
| 237 |
+
|
| 238 |
+
def _expand_cmdclass(self, package_dir: Mapping[str, str]):
|
| 239 |
+
root_dir = self.root_dir
|
| 240 |
+
cmdclass = partial(_expand.cmdclass, package_dir=package_dir, root_dir=root_dir)
|
| 241 |
+
self._process_field(self.setuptools_cfg, "cmdclass", cmdclass)
|
| 242 |
+
|
| 243 |
+
def _expand_all_dynamic(self, dist: Distribution, package_dir: Mapping[str, str]):
|
| 244 |
+
special = ( # need special handling
|
| 245 |
+
"version",
|
| 246 |
+
"readme",
|
| 247 |
+
"entry-points",
|
| 248 |
+
"scripts",
|
| 249 |
+
"gui-scripts",
|
| 250 |
+
"classifiers",
|
| 251 |
+
"dependencies",
|
| 252 |
+
"optional-dependencies",
|
| 253 |
+
)
|
| 254 |
+
# `_obtain` functions are assumed to raise appropriate exceptions/warnings.
|
| 255 |
+
obtained_dynamic = {
|
| 256 |
+
field: self._obtain(dist, field, package_dir)
|
| 257 |
+
for field in self.dynamic
|
| 258 |
+
if field not in special
|
| 259 |
+
}
|
| 260 |
+
obtained_dynamic.update(
|
| 261 |
+
self._obtain_entry_points(dist, package_dir) or {},
|
| 262 |
+
version=self._obtain_version(dist, package_dir),
|
| 263 |
+
readme=self._obtain_readme(dist),
|
| 264 |
+
classifiers=self._obtain_classifiers(dist),
|
| 265 |
+
dependencies=self._obtain_dependencies(dist),
|
| 266 |
+
optional_dependencies=self._obtain_optional_dependencies(dist),
|
| 267 |
+
)
|
| 268 |
+
# `None` indicates there is nothing in `tool.setuptools.dynamic` but the value
|
| 269 |
+
# might have already been set by setup.py/extensions, so avoid overwriting.
|
| 270 |
+
updates = {k: v for k, v in obtained_dynamic.items() if v is not None}
|
| 271 |
+
self.project_cfg.update(updates)
|
| 272 |
+
|
| 273 |
+
def _ensure_previously_set(self, dist: Distribution, field: str):
|
| 274 |
+
previous = _PREVIOUSLY_DEFINED[field](dist)
|
| 275 |
+
if previous is None and not self.ignore_option_errors:
|
| 276 |
+
msg = (
|
| 277 |
+
f"No configuration found for dynamic {field!r}.\n"
|
| 278 |
+
"Some dynamic fields need to be specified via `tool.setuptools.dynamic`"
|
| 279 |
+
"\nothers must be specified via the equivalent attribute in `setup.py`."
|
| 280 |
+
)
|
| 281 |
+
raise InvalidConfigError(msg)
|
| 282 |
+
|
| 283 |
+
def _expand_directive(
|
| 284 |
+
self, specifier: str, directive, package_dir: Mapping[str, str]
|
| 285 |
+
):
|
| 286 |
+
from more_itertools import always_iterable
|
| 287 |
+
|
| 288 |
+
with _ignore_errors(self.ignore_option_errors):
|
| 289 |
+
root_dir = self.root_dir
|
| 290 |
+
if "file" in directive:
|
| 291 |
+
self._referenced_files.update(always_iterable(directive["file"]))
|
| 292 |
+
return _expand.read_files(directive["file"], root_dir)
|
| 293 |
+
if "attr" in directive:
|
| 294 |
+
return _expand.read_attr(directive["attr"], package_dir, root_dir)
|
| 295 |
+
raise ValueError(f"invalid `{specifier}`: {directive!r}")
|
| 296 |
+
return None
|
| 297 |
+
|
| 298 |
+
def _obtain(self, dist: Distribution, field: str, package_dir: Mapping[str, str]):
|
| 299 |
+
if field in self.dynamic_cfg:
|
| 300 |
+
return self._expand_directive(
|
| 301 |
+
f"tool.setuptools.dynamic.{field}",
|
| 302 |
+
self.dynamic_cfg[field],
|
| 303 |
+
package_dir,
|
| 304 |
+
)
|
| 305 |
+
self._ensure_previously_set(dist, field)
|
| 306 |
+
return None
|
| 307 |
+
|
| 308 |
+
def _obtain_version(self, dist: Distribution, package_dir: Mapping[str, str]):
|
| 309 |
+
# Since plugins can set version, let's silently skip if it cannot be obtained
|
| 310 |
+
if "version" in self.dynamic and "version" in self.dynamic_cfg:
|
| 311 |
+
return _expand.version(
|
| 312 |
+
# We already do an early check for the presence of "version"
|
| 313 |
+
self._obtain(dist, "version", package_dir) # pyright: ignore[reportArgumentType]
|
| 314 |
+
)
|
| 315 |
+
return None
|
| 316 |
+
|
| 317 |
+
def _obtain_readme(self, dist: Distribution) -> dict[str, str] | None:
|
| 318 |
+
if "readme" not in self.dynamic:
|
| 319 |
+
return None
|
| 320 |
+
|
| 321 |
+
dynamic_cfg = self.dynamic_cfg
|
| 322 |
+
if "readme" in dynamic_cfg:
|
| 323 |
+
return {
|
| 324 |
+
# We already do an early check for the presence of "readme"
|
| 325 |
+
"text": self._obtain(dist, "readme", {}),
|
| 326 |
+
"content-type": dynamic_cfg["readme"].get("content-type", "text/x-rst"),
|
| 327 |
+
} # pyright: ignore[reportReturnType]
|
| 328 |
+
|
| 329 |
+
self._ensure_previously_set(dist, "readme")
|
| 330 |
+
return None
|
| 331 |
+
|
| 332 |
+
def _obtain_entry_points(
|
| 333 |
+
self, dist: Distribution, package_dir: Mapping[str, str]
|
| 334 |
+
) -> dict[str, dict[str, Any]] | None:
|
| 335 |
+
fields = ("entry-points", "scripts", "gui-scripts")
|
| 336 |
+
if not any(field in self.dynamic for field in fields):
|
| 337 |
+
return None
|
| 338 |
+
|
| 339 |
+
text = self._obtain(dist, "entry-points", package_dir)
|
| 340 |
+
if text is None:
|
| 341 |
+
return None
|
| 342 |
+
|
| 343 |
+
groups = _expand.entry_points(text)
|
| 344 |
+
# Any is str | dict[str, str], but causes variance issues
|
| 345 |
+
expanded: dict[str, dict[str, Any]] = {"entry-points": groups}
|
| 346 |
+
|
| 347 |
+
def _set_scripts(field: str, group: str):
|
| 348 |
+
if group in groups:
|
| 349 |
+
value = groups.pop(group)
|
| 350 |
+
if field not in self.dynamic:
|
| 351 |
+
raise InvalidConfigError(_MissingDynamic.details(field, value))
|
| 352 |
+
expanded[field] = value
|
| 353 |
+
|
| 354 |
+
_set_scripts("scripts", "console_scripts")
|
| 355 |
+
_set_scripts("gui-scripts", "gui_scripts")
|
| 356 |
+
|
| 357 |
+
return expanded
|
| 358 |
+
|
| 359 |
+
def _obtain_classifiers(self, dist: Distribution):
|
| 360 |
+
if "classifiers" in self.dynamic:
|
| 361 |
+
value = self._obtain(dist, "classifiers", {})
|
| 362 |
+
if value:
|
| 363 |
+
return value.splitlines()
|
| 364 |
+
return None
|
| 365 |
+
|
| 366 |
+
def _obtain_dependencies(self, dist: Distribution):
|
| 367 |
+
if "dependencies" in self.dynamic:
|
| 368 |
+
value = self._obtain(dist, "dependencies", {})
|
| 369 |
+
if value:
|
| 370 |
+
return _parse_requirements_list(value)
|
| 371 |
+
return None
|
| 372 |
+
|
| 373 |
+
def _obtain_optional_dependencies(self, dist: Distribution):
|
| 374 |
+
if "optional-dependencies" not in self.dynamic:
|
| 375 |
+
return None
|
| 376 |
+
if "optional-dependencies" in self.dynamic_cfg:
|
| 377 |
+
optional_dependencies_map = self.dynamic_cfg["optional-dependencies"]
|
| 378 |
+
assert isinstance(optional_dependencies_map, dict)
|
| 379 |
+
return {
|
| 380 |
+
group: _parse_requirements_list(
|
| 381 |
+
self._expand_directive(
|
| 382 |
+
f"tool.setuptools.dynamic.optional-dependencies.{group}",
|
| 383 |
+
directive,
|
| 384 |
+
{},
|
| 385 |
+
)
|
| 386 |
+
)
|
| 387 |
+
for group, directive in optional_dependencies_map.items()
|
| 388 |
+
}
|
| 389 |
+
self._ensure_previously_set(dist, "optional-dependencies")
|
| 390 |
+
return None
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
def _parse_requirements_list(value):
|
| 394 |
+
return [
|
| 395 |
+
line
|
| 396 |
+
for line in value.splitlines()
|
| 397 |
+
if line.strip() and not line.strip().startswith("#")
|
| 398 |
+
]
|
| 399 |
+
|
| 400 |
+
|
| 401 |
+
@contextmanager
|
| 402 |
+
def _ignore_errors(ignore_option_errors: bool):
|
| 403 |
+
if not ignore_option_errors:
|
| 404 |
+
yield
|
| 405 |
+
return
|
| 406 |
+
|
| 407 |
+
try:
|
| 408 |
+
yield
|
| 409 |
+
except Exception as ex:
|
| 410 |
+
_logger.debug(f"ignored error: {ex.__class__.__name__} - {ex}")
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
class _EnsurePackagesDiscovered(_expand.EnsurePackagesDiscovered):
|
| 414 |
+
def __init__(
|
| 415 |
+
self, distribution: Distribution, project_cfg: dict, setuptools_cfg: dict
|
| 416 |
+
) -> None:
|
| 417 |
+
super().__init__(distribution)
|
| 418 |
+
self._project_cfg = project_cfg
|
| 419 |
+
self._setuptools_cfg = setuptools_cfg
|
| 420 |
+
|
| 421 |
+
def __enter__(self) -> Self:
|
| 422 |
+
"""When entering the context, the values of ``packages``, ``py_modules`` and
|
| 423 |
+
``package_dir`` that are missing in ``dist`` are copied from ``setuptools_cfg``.
|
| 424 |
+
"""
|
| 425 |
+
dist, cfg = self._dist, self._setuptools_cfg
|
| 426 |
+
package_dir: dict[str, str] = cfg.setdefault("package-dir", {})
|
| 427 |
+
package_dir.update(dist.package_dir or {})
|
| 428 |
+
dist.package_dir = package_dir # needs to be the same object
|
| 429 |
+
|
| 430 |
+
dist.set_defaults._ignore_ext_modules() # pyproject.toml-specific behaviour
|
| 431 |
+
|
| 432 |
+
# Set `name`, `py_modules` and `packages` in dist to short-circuit
|
| 433 |
+
# auto-discovery, but avoid overwriting empty lists purposefully set by users.
|
| 434 |
+
if dist.metadata.name is None:
|
| 435 |
+
dist.metadata.name = self._project_cfg.get("name")
|
| 436 |
+
if dist.py_modules is None:
|
| 437 |
+
dist.py_modules = cfg.get("py-modules")
|
| 438 |
+
if dist.packages is None:
|
| 439 |
+
dist.packages = cfg.get("packages")
|
| 440 |
+
|
| 441 |
+
return super().__enter__()
|
| 442 |
+
|
| 443 |
+
def __exit__(
|
| 444 |
+
self,
|
| 445 |
+
exc_type: type[BaseException] | None,
|
| 446 |
+
exc_value: BaseException | None,
|
| 447 |
+
traceback: TracebackType | None,
|
| 448 |
+
) -> None:
|
| 449 |
+
"""When exiting the context, if values of ``packages``, ``py_modules`` and
|
| 450 |
+
``package_dir`` are missing in ``setuptools_cfg``, copy from ``dist``.
|
| 451 |
+
"""
|
| 452 |
+
# If anything was discovered set them back, so they count in the final config.
|
| 453 |
+
self._setuptools_cfg.setdefault("packages", self._dist.packages)
|
| 454 |
+
self._setuptools_cfg.setdefault("py-modules", self._dist.py_modules)
|
| 455 |
+
return super().__exit__(exc_type, exc_value, traceback)
|
| 456 |
+
|
| 457 |
+
|
| 458 |
+
class _ExperimentalConfiguration(SetuptoolsWarning):
|
| 459 |
+
_SUMMARY = (
|
| 460 |
+
"`{subject}` in `pyproject.toml` is still *experimental* "
|
| 461 |
+
"and likely to change in future releases."
|
| 462 |
+
)
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
class _ToolsTypoInMetadata(SetuptoolsWarning):
|
| 466 |
+
_SUMMARY = (
|
| 467 |
+
"Ignoring [tools.setuptools] in pyproject.toml, did you mean [tool.setuptools]?"
|
| 468 |
+
)
|
llava/lib/python3.10/site-packages/setuptools/config/setupcfg.py
ADDED
|
@@ -0,0 +1,780 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Load setuptools configuration from ``setup.cfg`` files.
|
| 3 |
+
|
| 4 |
+
**API will be made private in the future**
|
| 5 |
+
|
| 6 |
+
To read project metadata, consider using
|
| 7 |
+
``build.util.project_wheel_metadata`` (https://pypi.org/project/build/).
|
| 8 |
+
For simple scenarios, you can also try parsing the file directly
|
| 9 |
+
with the help of ``configparser``.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
from __future__ import annotations
|
| 13 |
+
|
| 14 |
+
import contextlib
|
| 15 |
+
import functools
|
| 16 |
+
import os
|
| 17 |
+
from collections import defaultdict
|
| 18 |
+
from collections.abc import Iterable, Iterator
|
| 19 |
+
from functools import partial, wraps
|
| 20 |
+
from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, TypeVar, cast
|
| 21 |
+
|
| 22 |
+
from packaging.markers import default_environment as marker_env
|
| 23 |
+
from packaging.requirements import InvalidRequirement, Requirement
|
| 24 |
+
from packaging.version import InvalidVersion, Version
|
| 25 |
+
|
| 26 |
+
from .. import _static
|
| 27 |
+
from .._path import StrPath
|
| 28 |
+
from ..errors import FileError, OptionError
|
| 29 |
+
from ..warnings import SetuptoolsDeprecationWarning
|
| 30 |
+
from . import expand
|
| 31 |
+
|
| 32 |
+
if TYPE_CHECKING:
|
| 33 |
+
from typing_extensions import TypeAlias
|
| 34 |
+
|
| 35 |
+
from setuptools.dist import Distribution
|
| 36 |
+
|
| 37 |
+
from distutils.dist import DistributionMetadata
|
| 38 |
+
|
| 39 |
+
SingleCommandOptions: TypeAlias = dict[str, tuple[str, Any]]
|
| 40 |
+
"""Dict that associate the name of the options of a particular command to a
|
| 41 |
+
tuple. The first element of the tuple indicates the origin of the option value
|
| 42 |
+
(e.g. the name of the configuration file where it was read from),
|
| 43 |
+
while the second element of the tuple is the option value itself
|
| 44 |
+
"""
|
| 45 |
+
AllCommandOptions: TypeAlias = dict[str, SingleCommandOptions]
|
| 46 |
+
"""cmd name => its options"""
|
| 47 |
+
Target = TypeVar("Target", "Distribution", "DistributionMetadata")
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def read_configuration(
|
| 51 |
+
filepath: StrPath, find_others: bool = False, ignore_option_errors: bool = False
|
| 52 |
+
) -> dict:
|
| 53 |
+
"""Read given configuration file and returns options from it as a dict.
|
| 54 |
+
|
| 55 |
+
:param str|unicode filepath: Path to configuration file
|
| 56 |
+
to get options from.
|
| 57 |
+
|
| 58 |
+
:param bool find_others: Whether to search for other configuration files
|
| 59 |
+
which could be on in various places.
|
| 60 |
+
|
| 61 |
+
:param bool ignore_option_errors: Whether to silently ignore
|
| 62 |
+
options, values of which could not be resolved (e.g. due to exceptions
|
| 63 |
+
in directives such as file:, attr:, etc.).
|
| 64 |
+
If False exceptions are propagated as expected.
|
| 65 |
+
|
| 66 |
+
:rtype: dict
|
| 67 |
+
"""
|
| 68 |
+
from setuptools.dist import Distribution
|
| 69 |
+
|
| 70 |
+
dist = Distribution()
|
| 71 |
+
filenames = dist.find_config_files() if find_others else []
|
| 72 |
+
handlers = _apply(dist, filepath, filenames, ignore_option_errors)
|
| 73 |
+
return configuration_to_dict(handlers)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def apply_configuration(dist: Distribution, filepath: StrPath) -> Distribution:
|
| 77 |
+
"""Apply the configuration from a ``setup.cfg`` file into an existing
|
| 78 |
+
distribution object.
|
| 79 |
+
"""
|
| 80 |
+
_apply(dist, filepath)
|
| 81 |
+
dist._finalize_requires()
|
| 82 |
+
return dist
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def _apply(
|
| 86 |
+
dist: Distribution,
|
| 87 |
+
filepath: StrPath,
|
| 88 |
+
other_files: Iterable[StrPath] = (),
|
| 89 |
+
ignore_option_errors: bool = False,
|
| 90 |
+
) -> tuple[ConfigMetadataHandler, ConfigOptionsHandler]:
|
| 91 |
+
"""Read configuration from ``filepath`` and applies to the ``dist`` object."""
|
| 92 |
+
from setuptools.dist import _Distribution
|
| 93 |
+
|
| 94 |
+
filepath = os.path.abspath(filepath)
|
| 95 |
+
|
| 96 |
+
if not os.path.isfile(filepath):
|
| 97 |
+
raise FileError(f'Configuration file {filepath} does not exist.')
|
| 98 |
+
|
| 99 |
+
current_directory = os.getcwd()
|
| 100 |
+
os.chdir(os.path.dirname(filepath))
|
| 101 |
+
filenames = [*other_files, filepath]
|
| 102 |
+
|
| 103 |
+
try:
|
| 104 |
+
# TODO: Temporary cast until mypy 1.12 is released with upstream fixes from typeshed
|
| 105 |
+
_Distribution.parse_config_files(dist, filenames=cast(list[str], filenames))
|
| 106 |
+
handlers = parse_configuration(
|
| 107 |
+
dist, dist.command_options, ignore_option_errors=ignore_option_errors
|
| 108 |
+
)
|
| 109 |
+
dist._finalize_license_files()
|
| 110 |
+
finally:
|
| 111 |
+
os.chdir(current_directory)
|
| 112 |
+
|
| 113 |
+
return handlers
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def _get_option(target_obj: Distribution | DistributionMetadata, key: str):
|
| 117 |
+
"""
|
| 118 |
+
Given a target object and option key, get that option from
|
| 119 |
+
the target object, either through a get_{key} method or
|
| 120 |
+
from an attribute directly.
|
| 121 |
+
"""
|
| 122 |
+
getter_name = f'get_{key}'
|
| 123 |
+
by_attribute = functools.partial(getattr, target_obj, key)
|
| 124 |
+
getter = getattr(target_obj, getter_name, by_attribute)
|
| 125 |
+
return getter()
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def configuration_to_dict(
|
| 129 |
+
handlers: Iterable[
|
| 130 |
+
ConfigHandler[Distribution] | ConfigHandler[DistributionMetadata]
|
| 131 |
+
],
|
| 132 |
+
) -> dict:
|
| 133 |
+
"""Returns configuration data gathered by given handlers as a dict.
|
| 134 |
+
|
| 135 |
+
:param Iterable[ConfigHandler] handlers: Handlers list,
|
| 136 |
+
usually from parse_configuration()
|
| 137 |
+
|
| 138 |
+
:rtype: dict
|
| 139 |
+
"""
|
| 140 |
+
config_dict: dict = defaultdict(dict)
|
| 141 |
+
|
| 142 |
+
for handler in handlers:
|
| 143 |
+
for option in handler.set_options:
|
| 144 |
+
value = _get_option(handler.target_obj, option)
|
| 145 |
+
config_dict[handler.section_prefix][option] = value
|
| 146 |
+
|
| 147 |
+
return config_dict
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def parse_configuration(
|
| 151 |
+
distribution: Distribution,
|
| 152 |
+
command_options: AllCommandOptions,
|
| 153 |
+
ignore_option_errors: bool = False,
|
| 154 |
+
) -> tuple[ConfigMetadataHandler, ConfigOptionsHandler]:
|
| 155 |
+
"""Performs additional parsing of configuration options
|
| 156 |
+
for a distribution.
|
| 157 |
+
|
| 158 |
+
Returns a list of used option handlers.
|
| 159 |
+
|
| 160 |
+
:param Distribution distribution:
|
| 161 |
+
:param dict command_options:
|
| 162 |
+
:param bool ignore_option_errors: Whether to silently ignore
|
| 163 |
+
options, values of which could not be resolved (e.g. due to exceptions
|
| 164 |
+
in directives such as file:, attr:, etc.).
|
| 165 |
+
If False exceptions are propagated as expected.
|
| 166 |
+
:rtype: list
|
| 167 |
+
"""
|
| 168 |
+
with expand.EnsurePackagesDiscovered(distribution) as ensure_discovered:
|
| 169 |
+
options = ConfigOptionsHandler(
|
| 170 |
+
distribution,
|
| 171 |
+
command_options,
|
| 172 |
+
ignore_option_errors,
|
| 173 |
+
ensure_discovered,
|
| 174 |
+
)
|
| 175 |
+
|
| 176 |
+
options.parse()
|
| 177 |
+
if not distribution.package_dir:
|
| 178 |
+
distribution.package_dir = options.package_dir # Filled by `find_packages`
|
| 179 |
+
|
| 180 |
+
meta = ConfigMetadataHandler(
|
| 181 |
+
distribution.metadata,
|
| 182 |
+
command_options,
|
| 183 |
+
ignore_option_errors,
|
| 184 |
+
ensure_discovered,
|
| 185 |
+
distribution.package_dir,
|
| 186 |
+
distribution.src_root,
|
| 187 |
+
)
|
| 188 |
+
meta.parse()
|
| 189 |
+
distribution._referenced_files.update(
|
| 190 |
+
options._referenced_files, meta._referenced_files
|
| 191 |
+
)
|
| 192 |
+
|
| 193 |
+
return meta, options
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def _warn_accidental_env_marker_misconfig(label: str, orig_value: str, parsed: list):
|
| 197 |
+
"""Because users sometimes misinterpret this configuration:
|
| 198 |
+
|
| 199 |
+
[options.extras_require]
|
| 200 |
+
foo = bar;python_version<"4"
|
| 201 |
+
|
| 202 |
+
It looks like one requirement with an environment marker
|
| 203 |
+
but because there is no newline, it's parsed as two requirements
|
| 204 |
+
with a semicolon as separator.
|
| 205 |
+
|
| 206 |
+
Therefore, if:
|
| 207 |
+
* input string does not contain a newline AND
|
| 208 |
+
* parsed result contains two requirements AND
|
| 209 |
+
* parsing of the two parts from the result ("<first>;<second>")
|
| 210 |
+
leads in a valid Requirement with a valid marker
|
| 211 |
+
a UserWarning is shown to inform the user about the possible problem.
|
| 212 |
+
"""
|
| 213 |
+
if "\n" in orig_value or len(parsed) != 2:
|
| 214 |
+
return
|
| 215 |
+
|
| 216 |
+
markers = marker_env().keys()
|
| 217 |
+
|
| 218 |
+
try:
|
| 219 |
+
req = Requirement(parsed[1])
|
| 220 |
+
if req.name in markers:
|
| 221 |
+
_AmbiguousMarker.emit(field=label, req=parsed[1])
|
| 222 |
+
except InvalidRequirement as ex:
|
| 223 |
+
if any(parsed[1].startswith(marker) for marker in markers):
|
| 224 |
+
msg = _AmbiguousMarker.message(field=label, req=parsed[1])
|
| 225 |
+
raise InvalidRequirement(msg) from ex
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
class ConfigHandler(Generic[Target]):
|
| 229 |
+
"""Handles metadata supplied in configuration files."""
|
| 230 |
+
|
| 231 |
+
section_prefix: str
|
| 232 |
+
"""Prefix for config sections handled by this handler.
|
| 233 |
+
Must be provided by class heirs.
|
| 234 |
+
|
| 235 |
+
"""
|
| 236 |
+
|
| 237 |
+
aliases: ClassVar[dict[str, str]] = {}
|
| 238 |
+
"""Options aliases.
|
| 239 |
+
For compatibility with various packages. E.g.: d2to1 and pbr.
|
| 240 |
+
Note: `-` in keys is replaced with `_` by config parser.
|
| 241 |
+
|
| 242 |
+
"""
|
| 243 |
+
|
| 244 |
+
def __init__(
|
| 245 |
+
self,
|
| 246 |
+
target_obj: Target,
|
| 247 |
+
options: AllCommandOptions,
|
| 248 |
+
ignore_option_errors,
|
| 249 |
+
ensure_discovered: expand.EnsurePackagesDiscovered,
|
| 250 |
+
) -> None:
|
| 251 |
+
self.ignore_option_errors = ignore_option_errors
|
| 252 |
+
self.target_obj: Target = target_obj
|
| 253 |
+
self.sections = dict(self._section_options(options))
|
| 254 |
+
self.set_options: list[str] = []
|
| 255 |
+
self.ensure_discovered = ensure_discovered
|
| 256 |
+
self._referenced_files = set[str]()
|
| 257 |
+
"""After parsing configurations, this property will enumerate
|
| 258 |
+
all files referenced by the "file:" directive. Private API for setuptools only.
|
| 259 |
+
"""
|
| 260 |
+
|
| 261 |
+
@classmethod
|
| 262 |
+
def _section_options(
|
| 263 |
+
cls, options: AllCommandOptions
|
| 264 |
+
) -> Iterator[tuple[str, SingleCommandOptions]]:
|
| 265 |
+
for full_name, value in options.items():
|
| 266 |
+
pre, _sep, name = full_name.partition(cls.section_prefix)
|
| 267 |
+
if pre:
|
| 268 |
+
continue
|
| 269 |
+
yield name.lstrip('.'), value
|
| 270 |
+
|
| 271 |
+
@property
|
| 272 |
+
def parsers(self):
|
| 273 |
+
"""Metadata item name to parser function mapping."""
|
| 274 |
+
raise NotImplementedError(
|
| 275 |
+
f'{self.__class__.__name__} must provide .parsers property'
|
| 276 |
+
)
|
| 277 |
+
|
| 278 |
+
def __setitem__(self, option_name, value) -> None:
|
| 279 |
+
target_obj = self.target_obj
|
| 280 |
+
|
| 281 |
+
# Translate alias into real name.
|
| 282 |
+
option_name = self.aliases.get(option_name, option_name)
|
| 283 |
+
|
| 284 |
+
try:
|
| 285 |
+
current_value = getattr(target_obj, option_name)
|
| 286 |
+
except AttributeError as e:
|
| 287 |
+
raise KeyError(option_name) from e
|
| 288 |
+
|
| 289 |
+
if current_value:
|
| 290 |
+
# Already inhabited. Skipping.
|
| 291 |
+
return
|
| 292 |
+
|
| 293 |
+
try:
|
| 294 |
+
parsed = self.parsers.get(option_name, lambda x: x)(value)
|
| 295 |
+
except (Exception,) * self.ignore_option_errors:
|
| 296 |
+
return
|
| 297 |
+
|
| 298 |
+
simple_setter = functools.partial(target_obj.__setattr__, option_name)
|
| 299 |
+
setter = getattr(target_obj, f"set_{option_name}", simple_setter)
|
| 300 |
+
setter(parsed)
|
| 301 |
+
|
| 302 |
+
self.set_options.append(option_name)
|
| 303 |
+
|
| 304 |
+
@classmethod
|
| 305 |
+
def _parse_list(cls, value, separator=','):
|
| 306 |
+
"""Represents value as a list.
|
| 307 |
+
|
| 308 |
+
Value is split either by separator (defaults to comma) or by lines.
|
| 309 |
+
|
| 310 |
+
:param value:
|
| 311 |
+
:param separator: List items separator character.
|
| 312 |
+
:rtype: list
|
| 313 |
+
"""
|
| 314 |
+
if isinstance(value, list): # _get_parser_compound case
|
| 315 |
+
return value
|
| 316 |
+
|
| 317 |
+
if '\n' in value:
|
| 318 |
+
value = value.splitlines()
|
| 319 |
+
else:
|
| 320 |
+
value = value.split(separator)
|
| 321 |
+
|
| 322 |
+
return [chunk.strip() for chunk in value if chunk.strip()]
|
| 323 |
+
|
| 324 |
+
@classmethod
|
| 325 |
+
def _parse_dict(cls, value):
|
| 326 |
+
"""Represents value as a dict.
|
| 327 |
+
|
| 328 |
+
:param value:
|
| 329 |
+
:rtype: dict
|
| 330 |
+
"""
|
| 331 |
+
separator = '='
|
| 332 |
+
result = {}
|
| 333 |
+
for line in cls._parse_list(value):
|
| 334 |
+
key, sep, val = line.partition(separator)
|
| 335 |
+
if sep != separator:
|
| 336 |
+
raise OptionError(f"Unable to parse option value to dict: {value}")
|
| 337 |
+
result[key.strip()] = val.strip()
|
| 338 |
+
|
| 339 |
+
return result
|
| 340 |
+
|
| 341 |
+
@classmethod
|
| 342 |
+
def _parse_bool(cls, value):
|
| 343 |
+
"""Represents value as boolean.
|
| 344 |
+
|
| 345 |
+
:param value:
|
| 346 |
+
:rtype: bool
|
| 347 |
+
"""
|
| 348 |
+
value = value.lower()
|
| 349 |
+
return value in ('1', 'true', 'yes')
|
| 350 |
+
|
| 351 |
+
@classmethod
|
| 352 |
+
def _exclude_files_parser(cls, key):
|
| 353 |
+
"""Returns a parser function to make sure field inputs
|
| 354 |
+
are not files.
|
| 355 |
+
|
| 356 |
+
Parses a value after getting the key so error messages are
|
| 357 |
+
more informative.
|
| 358 |
+
|
| 359 |
+
:param key:
|
| 360 |
+
:rtype: callable
|
| 361 |
+
"""
|
| 362 |
+
|
| 363 |
+
def parser(value):
|
| 364 |
+
exclude_directive = 'file:'
|
| 365 |
+
if value.startswith(exclude_directive):
|
| 366 |
+
raise ValueError(
|
| 367 |
+
f'Only strings are accepted for the {key} field, '
|
| 368 |
+
'files are not accepted'
|
| 369 |
+
)
|
| 370 |
+
return _static.Str(value)
|
| 371 |
+
|
| 372 |
+
return parser
|
| 373 |
+
|
| 374 |
+
def _parse_file(self, value, root_dir: StrPath | None):
|
| 375 |
+
"""Represents value as a string, allowing including text
|
| 376 |
+
from nearest files using `file:` directive.
|
| 377 |
+
|
| 378 |
+
Directive is sandboxed and won't reach anything outside
|
| 379 |
+
directory with setup.py.
|
| 380 |
+
|
| 381 |
+
Examples:
|
| 382 |
+
file: README.rst, CHANGELOG.md, src/file.txt
|
| 383 |
+
|
| 384 |
+
:param str value:
|
| 385 |
+
:rtype: str
|
| 386 |
+
"""
|
| 387 |
+
include_directive = 'file:'
|
| 388 |
+
|
| 389 |
+
if not isinstance(value, str):
|
| 390 |
+
return value
|
| 391 |
+
|
| 392 |
+
if not value.startswith(include_directive):
|
| 393 |
+
return _static.Str(value)
|
| 394 |
+
|
| 395 |
+
spec = value[len(include_directive) :]
|
| 396 |
+
filepaths = [path.strip() for path in spec.split(',')]
|
| 397 |
+
self._referenced_files.update(filepaths)
|
| 398 |
+
# XXX: Is marking as static contents coming from files too optimistic?
|
| 399 |
+
return _static.Str(expand.read_files(filepaths, root_dir))
|
| 400 |
+
|
| 401 |
+
def _parse_attr(self, value, package_dir, root_dir: StrPath):
|
| 402 |
+
"""Represents value as a module attribute.
|
| 403 |
+
|
| 404 |
+
Examples:
|
| 405 |
+
attr: package.attr
|
| 406 |
+
attr: package.module.attr
|
| 407 |
+
|
| 408 |
+
:param str value:
|
| 409 |
+
:rtype: str
|
| 410 |
+
"""
|
| 411 |
+
attr_directive = 'attr:'
|
| 412 |
+
if not value.startswith(attr_directive):
|
| 413 |
+
return _static.Str(value)
|
| 414 |
+
|
| 415 |
+
attr_desc = value.replace(attr_directive, '')
|
| 416 |
+
|
| 417 |
+
# Make sure package_dir is populated correctly, so `attr:` directives can work
|
| 418 |
+
package_dir.update(self.ensure_discovered.package_dir)
|
| 419 |
+
return expand.read_attr(attr_desc, package_dir, root_dir)
|
| 420 |
+
|
| 421 |
+
@classmethod
|
| 422 |
+
def _get_parser_compound(cls, *parse_methods):
|
| 423 |
+
"""Returns parser function to represents value as a list.
|
| 424 |
+
|
| 425 |
+
Parses a value applying given methods one after another.
|
| 426 |
+
|
| 427 |
+
:param parse_methods:
|
| 428 |
+
:rtype: callable
|
| 429 |
+
"""
|
| 430 |
+
|
| 431 |
+
def parse(value):
|
| 432 |
+
parsed = value
|
| 433 |
+
|
| 434 |
+
for method in parse_methods:
|
| 435 |
+
parsed = method(parsed)
|
| 436 |
+
|
| 437 |
+
return parsed
|
| 438 |
+
|
| 439 |
+
return parse
|
| 440 |
+
|
| 441 |
+
@classmethod
|
| 442 |
+
def _parse_section_to_dict_with_key(cls, section_options, values_parser):
|
| 443 |
+
"""Parses section options into a dictionary.
|
| 444 |
+
|
| 445 |
+
Applies a given parser to each option in a section.
|
| 446 |
+
|
| 447 |
+
:param dict section_options:
|
| 448 |
+
:param callable values_parser: function with 2 args corresponding to key, value
|
| 449 |
+
:rtype: dict
|
| 450 |
+
"""
|
| 451 |
+
value = {}
|
| 452 |
+
for key, (_, val) in section_options.items():
|
| 453 |
+
value[key] = values_parser(key, val)
|
| 454 |
+
return value
|
| 455 |
+
|
| 456 |
+
@classmethod
|
| 457 |
+
def _parse_section_to_dict(cls, section_options, values_parser=None):
|
| 458 |
+
"""Parses section options into a dictionary.
|
| 459 |
+
|
| 460 |
+
Optionally applies a given parser to each value.
|
| 461 |
+
|
| 462 |
+
:param dict section_options:
|
| 463 |
+
:param callable values_parser: function with 1 arg corresponding to option value
|
| 464 |
+
:rtype: dict
|
| 465 |
+
"""
|
| 466 |
+
parser = (lambda _, v: values_parser(v)) if values_parser else (lambda _, v: v)
|
| 467 |
+
return cls._parse_section_to_dict_with_key(section_options, parser)
|
| 468 |
+
|
| 469 |
+
def parse_section(self, section_options) -> None:
|
| 470 |
+
"""Parses configuration file section.
|
| 471 |
+
|
| 472 |
+
:param dict section_options:
|
| 473 |
+
"""
|
| 474 |
+
for name, (_, value) in section_options.items():
|
| 475 |
+
with contextlib.suppress(KeyError):
|
| 476 |
+
# Keep silent for a new option may appear anytime.
|
| 477 |
+
self[name] = value
|
| 478 |
+
|
| 479 |
+
def parse(self) -> None:
|
| 480 |
+
"""Parses configuration file items from one
|
| 481 |
+
or more related sections.
|
| 482 |
+
|
| 483 |
+
"""
|
| 484 |
+
for section_name, section_options in self.sections.items():
|
| 485 |
+
method_postfix = ''
|
| 486 |
+
if section_name: # [section.option] variant
|
| 487 |
+
method_postfix = f"_{section_name}"
|
| 488 |
+
|
| 489 |
+
section_parser_method: Callable | None = getattr(
|
| 490 |
+
self,
|
| 491 |
+
# Dots in section names are translated into dunderscores.
|
| 492 |
+
f'parse_section{method_postfix}'.replace('.', '__'),
|
| 493 |
+
None,
|
| 494 |
+
)
|
| 495 |
+
|
| 496 |
+
if section_parser_method is None:
|
| 497 |
+
raise OptionError(
|
| 498 |
+
"Unsupported distribution option section: "
|
| 499 |
+
f"[{self.section_prefix}.{section_name}]"
|
| 500 |
+
)
|
| 501 |
+
|
| 502 |
+
section_parser_method(section_options)
|
| 503 |
+
|
| 504 |
+
def _deprecated_config_handler(self, func, msg, **kw):
|
| 505 |
+
"""this function will wrap around parameters that are deprecated
|
| 506 |
+
|
| 507 |
+
:param msg: deprecation message
|
| 508 |
+
:param func: function to be wrapped around
|
| 509 |
+
"""
|
| 510 |
+
|
| 511 |
+
@wraps(func)
|
| 512 |
+
def config_handler(*args, **kwargs):
|
| 513 |
+
kw.setdefault("stacklevel", 2)
|
| 514 |
+
_DeprecatedConfig.emit("Deprecated config in `setup.cfg`", msg, **kw)
|
| 515 |
+
return func(*args, **kwargs)
|
| 516 |
+
|
| 517 |
+
return config_handler
|
| 518 |
+
|
| 519 |
+
|
| 520 |
+
class ConfigMetadataHandler(ConfigHandler["DistributionMetadata"]):
|
| 521 |
+
section_prefix = 'metadata'
|
| 522 |
+
|
| 523 |
+
aliases = {
|
| 524 |
+
'home_page': 'url',
|
| 525 |
+
'summary': 'description',
|
| 526 |
+
'classifier': 'classifiers',
|
| 527 |
+
'platform': 'platforms',
|
| 528 |
+
}
|
| 529 |
+
|
| 530 |
+
strict_mode = False
|
| 531 |
+
"""We need to keep it loose, to be partially compatible with
|
| 532 |
+
`pbr` and `d2to1` packages which also uses `metadata` section.
|
| 533 |
+
|
| 534 |
+
"""
|
| 535 |
+
|
| 536 |
+
def __init__(
|
| 537 |
+
self,
|
| 538 |
+
target_obj: DistributionMetadata,
|
| 539 |
+
options: AllCommandOptions,
|
| 540 |
+
ignore_option_errors: bool,
|
| 541 |
+
ensure_discovered: expand.EnsurePackagesDiscovered,
|
| 542 |
+
package_dir: dict | None = None,
|
| 543 |
+
root_dir: StrPath | None = os.curdir,
|
| 544 |
+
) -> None:
|
| 545 |
+
super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
|
| 546 |
+
self.package_dir = package_dir
|
| 547 |
+
self.root_dir = root_dir
|
| 548 |
+
|
| 549 |
+
@property
|
| 550 |
+
def parsers(self):
|
| 551 |
+
"""Metadata item name to parser function mapping."""
|
| 552 |
+
parse_list_static = self._get_parser_compound(self._parse_list, _static.List)
|
| 553 |
+
parse_dict_static = self._get_parser_compound(self._parse_dict, _static.Dict)
|
| 554 |
+
parse_file = partial(self._parse_file, root_dir=self.root_dir)
|
| 555 |
+
exclude_files_parser = self._exclude_files_parser
|
| 556 |
+
|
| 557 |
+
return {
|
| 558 |
+
'author': _static.Str,
|
| 559 |
+
'author_email': _static.Str,
|
| 560 |
+
'maintainer': _static.Str,
|
| 561 |
+
'maintainer_email': _static.Str,
|
| 562 |
+
'platforms': parse_list_static,
|
| 563 |
+
'keywords': parse_list_static,
|
| 564 |
+
'provides': parse_list_static,
|
| 565 |
+
'obsoletes': parse_list_static,
|
| 566 |
+
'classifiers': self._get_parser_compound(parse_file, parse_list_static),
|
| 567 |
+
'license': exclude_files_parser('license'),
|
| 568 |
+
'license_files': parse_list_static,
|
| 569 |
+
'description': parse_file,
|
| 570 |
+
'long_description': parse_file,
|
| 571 |
+
'long_description_content_type': _static.Str,
|
| 572 |
+
'version': self._parse_version, # Cannot be marked as dynamic
|
| 573 |
+
'url': _static.Str,
|
| 574 |
+
'project_urls': parse_dict_static,
|
| 575 |
+
}
|
| 576 |
+
|
| 577 |
+
def _parse_version(self, value):
|
| 578 |
+
"""Parses `version` option value.
|
| 579 |
+
|
| 580 |
+
:param value:
|
| 581 |
+
:rtype: str
|
| 582 |
+
|
| 583 |
+
"""
|
| 584 |
+
version = self._parse_file(value, self.root_dir)
|
| 585 |
+
|
| 586 |
+
if version != value:
|
| 587 |
+
version = version.strip()
|
| 588 |
+
# Be strict about versions loaded from file because it's easy to
|
| 589 |
+
# accidentally include newlines and other unintended content
|
| 590 |
+
try:
|
| 591 |
+
Version(version)
|
| 592 |
+
except InvalidVersion as e:
|
| 593 |
+
raise OptionError(
|
| 594 |
+
f'Version loaded from {value} does not '
|
| 595 |
+
f'comply with PEP 440: {version}'
|
| 596 |
+
) from e
|
| 597 |
+
|
| 598 |
+
return version
|
| 599 |
+
|
| 600 |
+
return expand.version(self._parse_attr(value, self.package_dir, self.root_dir))
|
| 601 |
+
|
| 602 |
+
|
| 603 |
+
class ConfigOptionsHandler(ConfigHandler["Distribution"]):
|
| 604 |
+
section_prefix = 'options'
|
| 605 |
+
|
| 606 |
+
def __init__(
|
| 607 |
+
self,
|
| 608 |
+
target_obj: Distribution,
|
| 609 |
+
options: AllCommandOptions,
|
| 610 |
+
ignore_option_errors: bool,
|
| 611 |
+
ensure_discovered: expand.EnsurePackagesDiscovered,
|
| 612 |
+
) -> None:
|
| 613 |
+
super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
|
| 614 |
+
self.root_dir = target_obj.src_root
|
| 615 |
+
self.package_dir: dict[str, str] = {} # To be filled by `find_packages`
|
| 616 |
+
|
| 617 |
+
@classmethod
|
| 618 |
+
def _parse_list_semicolon(cls, value):
|
| 619 |
+
return cls._parse_list(value, separator=';')
|
| 620 |
+
|
| 621 |
+
def _parse_file_in_root(self, value):
|
| 622 |
+
return self._parse_file(value, root_dir=self.root_dir)
|
| 623 |
+
|
| 624 |
+
def _parse_requirements_list(self, label: str, value: str):
|
| 625 |
+
# Parse a requirements list, either by reading in a `file:`, or a list.
|
| 626 |
+
parsed = self._parse_list_semicolon(self._parse_file_in_root(value))
|
| 627 |
+
_warn_accidental_env_marker_misconfig(label, value, parsed)
|
| 628 |
+
# Filter it to only include lines that are not comments. `parse_list`
|
| 629 |
+
# will have stripped each line and filtered out empties.
|
| 630 |
+
return _static.List(line for line in parsed if not line.startswith("#"))
|
| 631 |
+
# ^-- Use `_static.List` to mark a non-`Dynamic` Core Metadata
|
| 632 |
+
|
| 633 |
+
@property
|
| 634 |
+
def parsers(self):
|
| 635 |
+
"""Metadata item name to parser function mapping."""
|
| 636 |
+
parse_list = self._parse_list
|
| 637 |
+
parse_bool = self._parse_bool
|
| 638 |
+
parse_cmdclass = self._parse_cmdclass
|
| 639 |
+
|
| 640 |
+
return {
|
| 641 |
+
'zip_safe': parse_bool,
|
| 642 |
+
'include_package_data': parse_bool,
|
| 643 |
+
'package_dir': self._parse_dict,
|
| 644 |
+
'scripts': parse_list,
|
| 645 |
+
'eager_resources': parse_list,
|
| 646 |
+
'dependency_links': parse_list,
|
| 647 |
+
'namespace_packages': self._deprecated_config_handler(
|
| 648 |
+
parse_list,
|
| 649 |
+
"The namespace_packages parameter is deprecated, "
|
| 650 |
+
"consider using implicit namespaces instead (PEP 420).",
|
| 651 |
+
# TODO: define due date, see setuptools.dist:check_nsp.
|
| 652 |
+
),
|
| 653 |
+
'install_requires': partial( # Core Metadata
|
| 654 |
+
self._parse_requirements_list, "install_requires"
|
| 655 |
+
),
|
| 656 |
+
'setup_requires': self._parse_list_semicolon,
|
| 657 |
+
'packages': self._parse_packages,
|
| 658 |
+
'entry_points': self._parse_file_in_root,
|
| 659 |
+
'py_modules': parse_list,
|
| 660 |
+
'python_requires': _static.SpecifierSet, # Core Metadata
|
| 661 |
+
'cmdclass': parse_cmdclass,
|
| 662 |
+
}
|
| 663 |
+
|
| 664 |
+
def _parse_cmdclass(self, value):
|
| 665 |
+
package_dir = self.ensure_discovered.package_dir
|
| 666 |
+
return expand.cmdclass(self._parse_dict(value), package_dir, self.root_dir)
|
| 667 |
+
|
| 668 |
+
def _parse_packages(self, value):
|
| 669 |
+
"""Parses `packages` option value.
|
| 670 |
+
|
| 671 |
+
:param value:
|
| 672 |
+
:rtype: list
|
| 673 |
+
"""
|
| 674 |
+
find_directives = ['find:', 'find_namespace:']
|
| 675 |
+
trimmed_value = value.strip()
|
| 676 |
+
|
| 677 |
+
if trimmed_value not in find_directives:
|
| 678 |
+
return self._parse_list(value)
|
| 679 |
+
|
| 680 |
+
# Read function arguments from a dedicated section.
|
| 681 |
+
find_kwargs = self.parse_section_packages__find(
|
| 682 |
+
self.sections.get('packages.find', {})
|
| 683 |
+
)
|
| 684 |
+
|
| 685 |
+
find_kwargs.update(
|
| 686 |
+
namespaces=(trimmed_value == find_directives[1]),
|
| 687 |
+
root_dir=self.root_dir,
|
| 688 |
+
fill_package_dir=self.package_dir,
|
| 689 |
+
)
|
| 690 |
+
|
| 691 |
+
return expand.find_packages(**find_kwargs)
|
| 692 |
+
|
| 693 |
+
def parse_section_packages__find(self, section_options):
|
| 694 |
+
"""Parses `packages.find` configuration file section.
|
| 695 |
+
|
| 696 |
+
To be used in conjunction with _parse_packages().
|
| 697 |
+
|
| 698 |
+
:param dict section_options:
|
| 699 |
+
"""
|
| 700 |
+
section_data = self._parse_section_to_dict(section_options, self._parse_list)
|
| 701 |
+
|
| 702 |
+
valid_keys = ['where', 'include', 'exclude']
|
| 703 |
+
find_kwargs = {k: v for k, v in section_data.items() if k in valid_keys and v}
|
| 704 |
+
|
| 705 |
+
where = find_kwargs.get('where')
|
| 706 |
+
if where is not None:
|
| 707 |
+
find_kwargs['where'] = where[0] # cast list to single val
|
| 708 |
+
|
| 709 |
+
return find_kwargs
|
| 710 |
+
|
| 711 |
+
def parse_section_entry_points(self, section_options) -> None:
|
| 712 |
+
"""Parses `entry_points` configuration file section.
|
| 713 |
+
|
| 714 |
+
:param dict section_options:
|
| 715 |
+
"""
|
| 716 |
+
parsed = self._parse_section_to_dict(section_options, self._parse_list)
|
| 717 |
+
self['entry_points'] = parsed
|
| 718 |
+
|
| 719 |
+
def _parse_package_data(self, section_options):
|
| 720 |
+
package_data = self._parse_section_to_dict(section_options, self._parse_list)
|
| 721 |
+
return expand.canonic_package_data(package_data)
|
| 722 |
+
|
| 723 |
+
def parse_section_package_data(self, section_options) -> None:
|
| 724 |
+
"""Parses `package_data` configuration file section.
|
| 725 |
+
|
| 726 |
+
:param dict section_options:
|
| 727 |
+
"""
|
| 728 |
+
self['package_data'] = self._parse_package_data(section_options)
|
| 729 |
+
|
| 730 |
+
def parse_section_exclude_package_data(self, section_options) -> None:
|
| 731 |
+
"""Parses `exclude_package_data` configuration file section.
|
| 732 |
+
|
| 733 |
+
:param dict section_options:
|
| 734 |
+
"""
|
| 735 |
+
self['exclude_package_data'] = self._parse_package_data(section_options)
|
| 736 |
+
|
| 737 |
+
def parse_section_extras_require(self, section_options) -> None: # Core Metadata
|
| 738 |
+
"""Parses `extras_require` configuration file section.
|
| 739 |
+
|
| 740 |
+
:param dict section_options:
|
| 741 |
+
"""
|
| 742 |
+
parsed = self._parse_section_to_dict_with_key(
|
| 743 |
+
section_options,
|
| 744 |
+
lambda k, v: self._parse_requirements_list(f"extras_require[{k}]", v),
|
| 745 |
+
)
|
| 746 |
+
|
| 747 |
+
self['extras_require'] = _static.Dict(parsed)
|
| 748 |
+
# ^-- Use `_static.Dict` to mark a non-`Dynamic` Core Metadata
|
| 749 |
+
|
| 750 |
+
def parse_section_data_files(self, section_options) -> None:
|
| 751 |
+
"""Parses `data_files` configuration file section.
|
| 752 |
+
|
| 753 |
+
:param dict section_options:
|
| 754 |
+
"""
|
| 755 |
+
parsed = self._parse_section_to_dict(section_options, self._parse_list)
|
| 756 |
+
self['data_files'] = expand.canonic_data_files(parsed, self.root_dir)
|
| 757 |
+
|
| 758 |
+
|
| 759 |
+
class _AmbiguousMarker(SetuptoolsDeprecationWarning):
|
| 760 |
+
_SUMMARY = "Ambiguous requirement marker."
|
| 761 |
+
_DETAILS = """
|
| 762 |
+
One of the parsed requirements in `{field}` looks like a valid environment marker:
|
| 763 |
+
|
| 764 |
+
{req!r}
|
| 765 |
+
|
| 766 |
+
Please make sure that the configuration file is correct.
|
| 767 |
+
You can use dangling lines to avoid this problem.
|
| 768 |
+
"""
|
| 769 |
+
_SEE_DOCS = "userguide/declarative_config.html#opt-2"
|
| 770 |
+
# TODO: should we include due_date here? Initially introduced in 6 Aug 2022.
|
| 771 |
+
# Does this make sense with latest version of packaging?
|
| 772 |
+
|
| 773 |
+
@classmethod
|
| 774 |
+
def message(cls, **kw):
|
| 775 |
+
docs = f"https://setuptools.pypa.io/en/latest/{cls._SEE_DOCS}"
|
| 776 |
+
return cls._format(cls._SUMMARY, cls._DETAILS, see_url=docs, format_args=kw)
|
| 777 |
+
|
| 778 |
+
|
| 779 |
+
class _DeprecatedConfig(SetuptoolsDeprecationWarning):
|
| 780 |
+
_SEE_DOCS = "userguide/declarative_config.html"
|
llava/lib/python3.10/site-packages/setuptools/config/setuptools.schema.json
ADDED
|
@@ -0,0 +1,433 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"$schema": "http://json-schema.org/draft-07/schema#",
|
| 3 |
+
|
| 4 |
+
"$id": "https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html",
|
| 5 |
+
"title": "``tool.setuptools`` table",
|
| 6 |
+
"$$description": [
|
| 7 |
+
"``setuptools``-specific configurations that can be set by users that require",
|
| 8 |
+
"customization.",
|
| 9 |
+
"These configurations are completely optional and probably can be skipped when",
|
| 10 |
+
"creating simple packages. They are equivalent to some of the `Keywords",
|
| 11 |
+
"<https://setuptools.pypa.io/en/latest/references/keywords.html>`_",
|
| 12 |
+
"used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.",
|
| 13 |
+
"It considers only ``setuptools`` `parameters",
|
| 14 |
+
"<https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html#setuptools-specific-configuration>`_",
|
| 15 |
+
"that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``",
|
| 16 |
+
"and ``setup_requires`` (incompatible with modern workflows/standards)."
|
| 17 |
+
],
|
| 18 |
+
|
| 19 |
+
"type": "object",
|
| 20 |
+
"additionalProperties": false,
|
| 21 |
+
"properties": {
|
| 22 |
+
"platforms": {
|
| 23 |
+
"type": "array",
|
| 24 |
+
"items": {"type": "string"}
|
| 25 |
+
},
|
| 26 |
+
"provides": {
|
| 27 |
+
"$$description": [
|
| 28 |
+
"Package and virtual package names contained within this package",
|
| 29 |
+
"**(not supported by pip)**"
|
| 30 |
+
],
|
| 31 |
+
"type": "array",
|
| 32 |
+
"items": {"type": "string", "format": "pep508-identifier"}
|
| 33 |
+
},
|
| 34 |
+
"obsoletes": {
|
| 35 |
+
"$$description": [
|
| 36 |
+
"Packages which this package renders obsolete",
|
| 37 |
+
"**(not supported by pip)**"
|
| 38 |
+
],
|
| 39 |
+
"type": "array",
|
| 40 |
+
"items": {"type": "string", "format": "pep508-identifier"}
|
| 41 |
+
},
|
| 42 |
+
"zip-safe": {
|
| 43 |
+
"$$description": [
|
| 44 |
+
"Whether the project can be safely installed and run from a zip file.",
|
| 45 |
+
"**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and",
|
| 46 |
+
"``setup.py install`` in the context of ``eggs`` (**DEPRECATED**)."
|
| 47 |
+
],
|
| 48 |
+
"type": "boolean"
|
| 49 |
+
},
|
| 50 |
+
"script-files": {
|
| 51 |
+
"$$description": [
|
| 52 |
+
"Legacy way of defining scripts (entry-points are preferred).",
|
| 53 |
+
"Equivalent to the ``script`` keyword in ``setup.py``",
|
| 54 |
+
"(it was renamed to avoid confusion with entry-point based ``project.scripts``",
|
| 55 |
+
"defined in :pep:`621`).",
|
| 56 |
+
"**DISCOURAGED**: generic script wrappers are tricky and may not work properly.",
|
| 57 |
+
"Whenever possible, please use ``project.scripts`` instead."
|
| 58 |
+
],
|
| 59 |
+
"type": "array",
|
| 60 |
+
"items": {"type": "string"},
|
| 61 |
+
"$comment": "TODO: is this field deprecated/should be removed?"
|
| 62 |
+
},
|
| 63 |
+
"eager-resources": {
|
| 64 |
+
"$$description": [
|
| 65 |
+
"Resources that should be extracted together, if any of them is needed,",
|
| 66 |
+
"or if any C extensions included in the project are imported.",
|
| 67 |
+
"**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and",
|
| 68 |
+
"``setup.py install`` in the context of ``eggs`` (**DEPRECATED**)."
|
| 69 |
+
],
|
| 70 |
+
"type": "array",
|
| 71 |
+
"items": {"type": "string"}
|
| 72 |
+
},
|
| 73 |
+
"packages": {
|
| 74 |
+
"$$description": [
|
| 75 |
+
"Packages that should be included in the distribution.",
|
| 76 |
+
"It can be given either as a list of package identifiers",
|
| 77 |
+
"or as a ``dict``-like structure with a single key ``find``",
|
| 78 |
+
"which corresponds to a dynamic call to",
|
| 79 |
+
"``setuptools.config.expand.find_packages`` function.",
|
| 80 |
+
"The ``find`` key is associated with a nested ``dict``-like structure that can",
|
| 81 |
+
"contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,",
|
| 82 |
+
"mimicking the keyword arguments of the associated function."
|
| 83 |
+
],
|
| 84 |
+
"oneOf": [
|
| 85 |
+
{
|
| 86 |
+
"title": "Array of Python package identifiers",
|
| 87 |
+
"type": "array",
|
| 88 |
+
"items": {"$ref": "#/definitions/package-name"}
|
| 89 |
+
},
|
| 90 |
+
{"$ref": "#/definitions/find-directive"}
|
| 91 |
+
]
|
| 92 |
+
},
|
| 93 |
+
"package-dir": {
|
| 94 |
+
"$$description": [
|
| 95 |
+
":class:`dict`-like structure mapping from package names to directories where their",
|
| 96 |
+
"code can be found.",
|
| 97 |
+
"The empty string (as key) means that all packages are contained inside",
|
| 98 |
+
"the given directory will be included in the distribution."
|
| 99 |
+
],
|
| 100 |
+
"type": "object",
|
| 101 |
+
"additionalProperties": false,
|
| 102 |
+
"propertyNames": {
|
| 103 |
+
"anyOf": [{"const": ""}, {"$ref": "#/definitions/package-name"}]
|
| 104 |
+
},
|
| 105 |
+
"patternProperties": {
|
| 106 |
+
"^.*$": {"type": "string" }
|
| 107 |
+
}
|
| 108 |
+
},
|
| 109 |
+
"package-data": {
|
| 110 |
+
"$$description": [
|
| 111 |
+
"Mapping from package names to lists of glob patterns.",
|
| 112 |
+
"Usually this option is not needed when using ``include-package-data = true``",
|
| 113 |
+
"For more information on how to include data files, check ``setuptools`` `docs",
|
| 114 |
+
"<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
|
| 115 |
+
],
|
| 116 |
+
"type": "object",
|
| 117 |
+
"additionalProperties": false,
|
| 118 |
+
"propertyNames": {
|
| 119 |
+
"anyOf": [{"type": "string", "format": "python-module-name"}, {"const": "*"}]
|
| 120 |
+
},
|
| 121 |
+
"patternProperties": {
|
| 122 |
+
"^.*$": {"type": "array", "items": {"type": "string"}}
|
| 123 |
+
}
|
| 124 |
+
},
|
| 125 |
+
"include-package-data": {
|
| 126 |
+
"$$description": [
|
| 127 |
+
"Automatically include any data files inside the package directories",
|
| 128 |
+
"that are specified by ``MANIFEST.in``",
|
| 129 |
+
"For more information on how to include data files, check ``setuptools`` `docs",
|
| 130 |
+
"<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
|
| 131 |
+
],
|
| 132 |
+
"type": "boolean"
|
| 133 |
+
},
|
| 134 |
+
"exclude-package-data": {
|
| 135 |
+
"$$description": [
|
| 136 |
+
"Mapping from package names to lists of glob patterns that should be excluded",
|
| 137 |
+
"For more information on how to include data files, check ``setuptools`` `docs",
|
| 138 |
+
"<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
|
| 139 |
+
],
|
| 140 |
+
"type": "object",
|
| 141 |
+
"additionalProperties": false,
|
| 142 |
+
"propertyNames": {
|
| 143 |
+
"anyOf": [{"type": "string", "format": "python-module-name"}, {"const": "*"}]
|
| 144 |
+
},
|
| 145 |
+
"patternProperties": {
|
| 146 |
+
"^.*$": {"type": "array", "items": {"type": "string"}}
|
| 147 |
+
}
|
| 148 |
+
},
|
| 149 |
+
"namespace-packages": {
|
| 150 |
+
"type": "array",
|
| 151 |
+
"items": {"type": "string", "format": "python-module-name-relaxed"},
|
| 152 |
+
"$comment": "https://setuptools.pypa.io/en/latest/userguide/package_discovery.html",
|
| 153 |
+
"description": "**DEPRECATED**: use implicit namespaces instead (:pep:`420`)."
|
| 154 |
+
},
|
| 155 |
+
"py-modules": {
|
| 156 |
+
"description": "Modules that setuptools will manipulate",
|
| 157 |
+
"type": "array",
|
| 158 |
+
"items": {"type": "string", "format": "python-module-name-relaxed"},
|
| 159 |
+
"$comment": "TODO: clarify the relationship with ``packages``"
|
| 160 |
+
},
|
| 161 |
+
"ext-modules": {
|
| 162 |
+
"description": "Extension modules to be compiled by setuptools",
|
| 163 |
+
"type": "array",
|
| 164 |
+
"items": {"$ref": "#/definitions/ext-module"}
|
| 165 |
+
},
|
| 166 |
+
"data-files": {
|
| 167 |
+
"$$description": [
|
| 168 |
+
"``dict``-like structure where each key represents a directory and",
|
| 169 |
+
"the value is a list of glob patterns that should be installed in them.",
|
| 170 |
+
"**DISCOURAGED**: please notice this might not work as expected with wheels.",
|
| 171 |
+
"Whenever possible, consider using data files inside the package directories",
|
| 172 |
+
"(or create a new namespace package that only contains data files).",
|
| 173 |
+
"See `data files support",
|
| 174 |
+
"<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
|
| 175 |
+
],
|
| 176 |
+
"type": "object",
|
| 177 |
+
"patternProperties": {
|
| 178 |
+
"^.*$": {"type": "array", "items": {"type": "string"}}
|
| 179 |
+
}
|
| 180 |
+
},
|
| 181 |
+
"cmdclass": {
|
| 182 |
+
"$$description": [
|
| 183 |
+
"Mapping of distutils-style command names to ``setuptools.Command`` subclasses",
|
| 184 |
+
"which in turn should be represented by strings with a qualified class name",
|
| 185 |
+
"(i.e., \"dotted\" form with module), e.g.::\n\n",
|
| 186 |
+
" cmdclass = {mycmd = \"pkg.subpkg.module.CommandClass\"}\n\n",
|
| 187 |
+
"The command class should be a directly defined at the top-level of the",
|
| 188 |
+
"containing module (no class nesting)."
|
| 189 |
+
],
|
| 190 |
+
"type": "object",
|
| 191 |
+
"patternProperties": {
|
| 192 |
+
"^.*$": {"type": "string", "format": "python-qualified-identifier"}
|
| 193 |
+
}
|
| 194 |
+
},
|
| 195 |
+
"license-files": {
|
| 196 |
+
"type": "array",
|
| 197 |
+
"items": {"type": "string"},
|
| 198 |
+
"$$description": [
|
| 199 |
+
"**PROVISIONAL**: list of glob patterns for all license files being distributed.",
|
| 200 |
+
"(likely to become standard with :pep:`639`).",
|
| 201 |
+
"By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"
|
| 202 |
+
],
|
| 203 |
+
"$comment": "TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?"
|
| 204 |
+
},
|
| 205 |
+
"dynamic": {
|
| 206 |
+
"type": "object",
|
| 207 |
+
"description": "Instructions for loading :pep:`621`-related metadata dynamically",
|
| 208 |
+
"additionalProperties": false,
|
| 209 |
+
"properties": {
|
| 210 |
+
"version": {
|
| 211 |
+
"$$description": [
|
| 212 |
+
"A version dynamically loaded via either the ``attr:`` or ``file:``",
|
| 213 |
+
"directives. Please make sure the given file or attribute respects :pep:`440`.",
|
| 214 |
+
"Also ensure to set ``project.dynamic`` accordingly."
|
| 215 |
+
],
|
| 216 |
+
"oneOf": [
|
| 217 |
+
{"$ref": "#/definitions/attr-directive"},
|
| 218 |
+
{"$ref": "#/definitions/file-directive"}
|
| 219 |
+
]
|
| 220 |
+
},
|
| 221 |
+
"classifiers": {"$ref": "#/definitions/file-directive"},
|
| 222 |
+
"description": {"$ref": "#/definitions/file-directive"},
|
| 223 |
+
"entry-points": {"$ref": "#/definitions/file-directive"},
|
| 224 |
+
"dependencies": {"$ref": "#/definitions/file-directive-for-dependencies"},
|
| 225 |
+
"optional-dependencies": {
|
| 226 |
+
"type": "object",
|
| 227 |
+
"propertyNames": {"type": "string", "format": "pep508-identifier"},
|
| 228 |
+
"additionalProperties": false,
|
| 229 |
+
"patternProperties": {
|
| 230 |
+
".+": {"$ref": "#/definitions/file-directive-for-dependencies"}
|
| 231 |
+
}
|
| 232 |
+
},
|
| 233 |
+
"readme": {
|
| 234 |
+
"type": "object",
|
| 235 |
+
"anyOf": [
|
| 236 |
+
{"$ref": "#/definitions/file-directive"},
|
| 237 |
+
{
|
| 238 |
+
"type": "object",
|
| 239 |
+
"properties": {
|
| 240 |
+
"content-type": {"type": "string"},
|
| 241 |
+
"file": { "$ref": "#/definitions/file-directive/properties/file" }
|
| 242 |
+
},
|
| 243 |
+
"additionalProperties": false}
|
| 244 |
+
],
|
| 245 |
+
"required": ["file"]
|
| 246 |
+
}
|
| 247 |
+
}
|
| 248 |
+
}
|
| 249 |
+
},
|
| 250 |
+
|
| 251 |
+
"definitions": {
|
| 252 |
+
"package-name": {
|
| 253 |
+
"$id": "#/definitions/package-name",
|
| 254 |
+
"title": "Valid package name",
|
| 255 |
+
"description": "Valid package name (importable or :pep:`561`).",
|
| 256 |
+
"type": "string",
|
| 257 |
+
"anyOf": [
|
| 258 |
+
{"type": "string", "format": "python-module-name-relaxed"},
|
| 259 |
+
{"type": "string", "format": "pep561-stub-name"}
|
| 260 |
+
]
|
| 261 |
+
},
|
| 262 |
+
"ext-module": {
|
| 263 |
+
"$id": "#/definitions/ext-module",
|
| 264 |
+
"title": "Extension module",
|
| 265 |
+
"description": "Parameters to construct a :class:`setuptools.Extension` object",
|
| 266 |
+
"type": "object",
|
| 267 |
+
"required": ["name", "sources"],
|
| 268 |
+
"additionalProperties": false,
|
| 269 |
+
"properties": {
|
| 270 |
+
"name": {
|
| 271 |
+
"type": "string",
|
| 272 |
+
"format": "python-module-name-relaxed"
|
| 273 |
+
},
|
| 274 |
+
"sources": {
|
| 275 |
+
"type": "array",
|
| 276 |
+
"items": {"type": "string"}
|
| 277 |
+
},
|
| 278 |
+
"include-dirs":{
|
| 279 |
+
"type": "array",
|
| 280 |
+
"items": {"type": "string"}
|
| 281 |
+
},
|
| 282 |
+
"define-macros": {
|
| 283 |
+
"type": "array",
|
| 284 |
+
"items": {
|
| 285 |
+
"type": "array",
|
| 286 |
+
"items": [
|
| 287 |
+
{"description": "macro name", "type": "string"},
|
| 288 |
+
{"description": "macro value", "oneOf": [{"type": "string"}, {"type": "null"}]}
|
| 289 |
+
],
|
| 290 |
+
"additionalItems": false
|
| 291 |
+
}
|
| 292 |
+
},
|
| 293 |
+
"undef-macros": {
|
| 294 |
+
"type": "array",
|
| 295 |
+
"items": {"type": "string"}
|
| 296 |
+
},
|
| 297 |
+
"library-dirs": {
|
| 298 |
+
"type": "array",
|
| 299 |
+
"items": {"type": "string"}
|
| 300 |
+
},
|
| 301 |
+
"libraries": {
|
| 302 |
+
"type": "array",
|
| 303 |
+
"items": {"type": "string"}
|
| 304 |
+
},
|
| 305 |
+
"runtime-library-dirs": {
|
| 306 |
+
"type": "array",
|
| 307 |
+
"items": {"type": "string"}
|
| 308 |
+
},
|
| 309 |
+
"extra-objects": {
|
| 310 |
+
"type": "array",
|
| 311 |
+
"items": {"type": "string"}
|
| 312 |
+
},
|
| 313 |
+
"extra-compile-args": {
|
| 314 |
+
"type": "array",
|
| 315 |
+
"items": {"type": "string"}
|
| 316 |
+
},
|
| 317 |
+
"extra-link-args": {
|
| 318 |
+
"type": "array",
|
| 319 |
+
"items": {"type": "string"}
|
| 320 |
+
},
|
| 321 |
+
"export-symbols": {
|
| 322 |
+
"type": "array",
|
| 323 |
+
"items": {"type": "string"}
|
| 324 |
+
},
|
| 325 |
+
"swig-opts": {
|
| 326 |
+
"type": "array",
|
| 327 |
+
"items": {"type": "string"}
|
| 328 |
+
},
|
| 329 |
+
"depends": {
|
| 330 |
+
"type": "array",
|
| 331 |
+
"items": {"type": "string"}
|
| 332 |
+
},
|
| 333 |
+
"language": {"type": "string"},
|
| 334 |
+
"optional": {"type": "boolean"},
|
| 335 |
+
"py-limited-api": {"type": "boolean"}
|
| 336 |
+
}
|
| 337 |
+
},
|
| 338 |
+
"file-directive": {
|
| 339 |
+
"$id": "#/definitions/file-directive",
|
| 340 |
+
"title": "'file:' directive",
|
| 341 |
+
"description":
|
| 342 |
+
"Value is read from a file (or list of files and then concatenated)",
|
| 343 |
+
"type": "object",
|
| 344 |
+
"additionalProperties": false,
|
| 345 |
+
"properties": {
|
| 346 |
+
"file": {
|
| 347 |
+
"oneOf": [
|
| 348 |
+
{"type": "string"},
|
| 349 |
+
{"type": "array", "items": {"type": "string"}}
|
| 350 |
+
]
|
| 351 |
+
}
|
| 352 |
+
},
|
| 353 |
+
"required": ["file"]
|
| 354 |
+
},
|
| 355 |
+
"file-directive-for-dependencies": {
|
| 356 |
+
"title": "'file:' directive for dependencies",
|
| 357 |
+
"allOf": [
|
| 358 |
+
{
|
| 359 |
+
"$$description": [
|
| 360 |
+
"**BETA**: subset of the ``requirements.txt`` format",
|
| 361 |
+
"without ``pip`` flags and options",
|
| 362 |
+
"(one :pep:`508`-compliant string per line,",
|
| 363 |
+
"lines that are blank or start with ``#`` are excluded).",
|
| 364 |
+
"See `dynamic metadata",
|
| 365 |
+
"<https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html#dynamic-metadata>`_."
|
| 366 |
+
]
|
| 367 |
+
},
|
| 368 |
+
{"$ref": "#/definitions/file-directive"}
|
| 369 |
+
]
|
| 370 |
+
},
|
| 371 |
+
"attr-directive": {
|
| 372 |
+
"title": "'attr:' directive",
|
| 373 |
+
"$id": "#/definitions/attr-directive",
|
| 374 |
+
"$$description": [
|
| 375 |
+
"Value is read from a module attribute. Supports callables and iterables;",
|
| 376 |
+
"unsupported types are cast via ``str()``"
|
| 377 |
+
],
|
| 378 |
+
"type": "object",
|
| 379 |
+
"additionalProperties": false,
|
| 380 |
+
"properties": {
|
| 381 |
+
"attr": {"type": "string", "format": "python-qualified-identifier"}
|
| 382 |
+
},
|
| 383 |
+
"required": ["attr"]
|
| 384 |
+
},
|
| 385 |
+
"find-directive": {
|
| 386 |
+
"$id": "#/definitions/find-directive",
|
| 387 |
+
"title": "'find:' directive",
|
| 388 |
+
"type": "object",
|
| 389 |
+
"additionalProperties": false,
|
| 390 |
+
"properties": {
|
| 391 |
+
"find": {
|
| 392 |
+
"type": "object",
|
| 393 |
+
"$$description": [
|
| 394 |
+
"Dynamic `package discovery",
|
| 395 |
+
"<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_."
|
| 396 |
+
],
|
| 397 |
+
"additionalProperties": false,
|
| 398 |
+
"properties": {
|
| 399 |
+
"where": {
|
| 400 |
+
"description":
|
| 401 |
+
"Directories to be searched for packages (Unix-style relative path)",
|
| 402 |
+
"type": "array",
|
| 403 |
+
"items": {"type": "string"}
|
| 404 |
+
},
|
| 405 |
+
"exclude": {
|
| 406 |
+
"type": "array",
|
| 407 |
+
"$$description": [
|
| 408 |
+
"Exclude packages that match the values listed in this field.",
|
| 409 |
+
"Can container shell-style wildcards (e.g. ``'pkg.*'``)"
|
| 410 |
+
],
|
| 411 |
+
"items": {"type": "string"}
|
| 412 |
+
},
|
| 413 |
+
"include": {
|
| 414 |
+
"type": "array",
|
| 415 |
+
"$$description": [
|
| 416 |
+
"Restrict the found packages to just the ones listed in this field.",
|
| 417 |
+
"Can container shell-style wildcards (e.g. ``'pkg.*'``)"
|
| 418 |
+
],
|
| 419 |
+
"items": {"type": "string"}
|
| 420 |
+
},
|
| 421 |
+
"namespaces": {
|
| 422 |
+
"type": "boolean",
|
| 423 |
+
"$$description": [
|
| 424 |
+
"When ``True``, directories without a ``__init__.py`` file will also",
|
| 425 |
+
"be scanned for :pep:`420`-style implicit namespaces"
|
| 426 |
+
]
|
| 427 |
+
}
|
| 428 |
+
}
|
| 429 |
+
}
|
| 430 |
+
}
|
| 431 |
+
}
|
| 432 |
+
}
|
| 433 |
+
}
|
llava/lib/python3.10/site-packages/setuptools/depends.py
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import contextlib
|
| 4 |
+
import dis
|
| 5 |
+
import marshal
|
| 6 |
+
import sys
|
| 7 |
+
from types import CodeType
|
| 8 |
+
from typing import Any, Literal, TypeVar
|
| 9 |
+
|
| 10 |
+
from packaging.version import Version
|
| 11 |
+
|
| 12 |
+
from . import _imp
|
| 13 |
+
from ._imp import PY_COMPILED, PY_FROZEN, PY_SOURCE, find_module
|
| 14 |
+
|
| 15 |
+
_T = TypeVar("_T")
|
| 16 |
+
|
| 17 |
+
__all__ = ['Require', 'find_module']
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class Require:
|
| 21 |
+
"""A prerequisite to building or installing a distribution"""
|
| 22 |
+
|
| 23 |
+
def __init__(
|
| 24 |
+
self,
|
| 25 |
+
name,
|
| 26 |
+
requested_version,
|
| 27 |
+
module,
|
| 28 |
+
homepage: str = '',
|
| 29 |
+
attribute=None,
|
| 30 |
+
format=None,
|
| 31 |
+
) -> None:
|
| 32 |
+
if format is None and requested_version is not None:
|
| 33 |
+
format = Version
|
| 34 |
+
|
| 35 |
+
if format is not None:
|
| 36 |
+
requested_version = format(requested_version)
|
| 37 |
+
if attribute is None:
|
| 38 |
+
attribute = '__version__'
|
| 39 |
+
|
| 40 |
+
self.__dict__.update(locals())
|
| 41 |
+
del self.self
|
| 42 |
+
|
| 43 |
+
def full_name(self):
|
| 44 |
+
"""Return full package/distribution name, w/version"""
|
| 45 |
+
if self.requested_version is not None:
|
| 46 |
+
return f'{self.name}-{self.requested_version}'
|
| 47 |
+
return self.name
|
| 48 |
+
|
| 49 |
+
def version_ok(self, version):
|
| 50 |
+
"""Is 'version' sufficiently up-to-date?"""
|
| 51 |
+
return (
|
| 52 |
+
self.attribute is None
|
| 53 |
+
or self.format is None
|
| 54 |
+
or str(version) != "unknown"
|
| 55 |
+
and self.format(version) >= self.requested_version
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
def get_version(
|
| 59 |
+
self, paths=None, default: _T | Literal["unknown"] = "unknown"
|
| 60 |
+
) -> _T | Literal["unknown"] | None | Any:
|
| 61 |
+
"""Get version number of installed module, 'None', or 'default'
|
| 62 |
+
|
| 63 |
+
Search 'paths' for module. If not found, return 'None'. If found,
|
| 64 |
+
return the extracted version attribute, or 'default' if no version
|
| 65 |
+
attribute was specified, or the value cannot be determined without
|
| 66 |
+
importing the module. The version is formatted according to the
|
| 67 |
+
requirement's version format (if any), unless it is 'None' or the
|
| 68 |
+
supplied 'default'.
|
| 69 |
+
"""
|
| 70 |
+
|
| 71 |
+
if self.attribute is None:
|
| 72 |
+
try:
|
| 73 |
+
f, _p, _i = find_module(self.module, paths)
|
| 74 |
+
except ImportError:
|
| 75 |
+
return None
|
| 76 |
+
if f:
|
| 77 |
+
f.close()
|
| 78 |
+
return default
|
| 79 |
+
|
| 80 |
+
v = get_module_constant(self.module, self.attribute, default, paths)
|
| 81 |
+
|
| 82 |
+
if v is not None and v is not default and self.format is not None:
|
| 83 |
+
return self.format(v)
|
| 84 |
+
|
| 85 |
+
return v
|
| 86 |
+
|
| 87 |
+
def is_present(self, paths=None):
|
| 88 |
+
"""Return true if dependency is present on 'paths'"""
|
| 89 |
+
return self.get_version(paths) is not None
|
| 90 |
+
|
| 91 |
+
def is_current(self, paths=None):
|
| 92 |
+
"""Return true if dependency is present and up-to-date on 'paths'"""
|
| 93 |
+
version = self.get_version(paths)
|
| 94 |
+
if version is None:
|
| 95 |
+
return False
|
| 96 |
+
return self.version_ok(str(version))
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def maybe_close(f):
|
| 100 |
+
@contextlib.contextmanager
|
| 101 |
+
def empty():
|
| 102 |
+
yield
|
| 103 |
+
return
|
| 104 |
+
|
| 105 |
+
if not f:
|
| 106 |
+
return empty()
|
| 107 |
+
|
| 108 |
+
return contextlib.closing(f)
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
# Some objects are not available on some platforms.
|
| 112 |
+
# XXX it'd be better to test assertions about bytecode instead.
|
| 113 |
+
if not sys.platform.startswith('java') and sys.platform != 'cli':
|
| 114 |
+
|
| 115 |
+
def get_module_constant(
|
| 116 |
+
module, symbol, default: _T | int = -1, paths=None
|
| 117 |
+
) -> _T | int | None | Any:
|
| 118 |
+
"""Find 'module' by searching 'paths', and extract 'symbol'
|
| 119 |
+
|
| 120 |
+
Return 'None' if 'module' does not exist on 'paths', or it does not define
|
| 121 |
+
'symbol'. If the module defines 'symbol' as a constant, return the
|
| 122 |
+
constant. Otherwise, return 'default'."""
|
| 123 |
+
|
| 124 |
+
try:
|
| 125 |
+
f, path, (_suffix, _mode, kind) = info = find_module(module, paths)
|
| 126 |
+
except ImportError:
|
| 127 |
+
# Module doesn't exist
|
| 128 |
+
return None
|
| 129 |
+
|
| 130 |
+
with maybe_close(f):
|
| 131 |
+
if kind == PY_COMPILED:
|
| 132 |
+
f.read(8) # skip magic & date
|
| 133 |
+
code = marshal.load(f)
|
| 134 |
+
elif kind == PY_FROZEN:
|
| 135 |
+
code = _imp.get_frozen_object(module, paths)
|
| 136 |
+
elif kind == PY_SOURCE:
|
| 137 |
+
code = compile(f.read(), path, 'exec')
|
| 138 |
+
else:
|
| 139 |
+
# Not something we can parse; we'll have to import it. :(
|
| 140 |
+
imported = _imp.get_module(module, paths, info)
|
| 141 |
+
return getattr(imported, symbol, None)
|
| 142 |
+
|
| 143 |
+
return extract_constant(code, symbol, default)
|
| 144 |
+
|
| 145 |
+
def extract_constant(
|
| 146 |
+
code: CodeType, symbol: str, default: _T | int = -1
|
| 147 |
+
) -> _T | int | None | Any:
|
| 148 |
+
"""Extract the constant value of 'symbol' from 'code'
|
| 149 |
+
|
| 150 |
+
If the name 'symbol' is bound to a constant value by the Python code
|
| 151 |
+
object 'code', return that value. If 'symbol' is bound to an expression,
|
| 152 |
+
return 'default'. Otherwise, return 'None'.
|
| 153 |
+
|
| 154 |
+
Return value is based on the first assignment to 'symbol'. 'symbol' must
|
| 155 |
+
be a global, or at least a non-"fast" local in the code block. That is,
|
| 156 |
+
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
|
| 157 |
+
must be present in 'code.co_names'.
|
| 158 |
+
"""
|
| 159 |
+
if symbol not in code.co_names:
|
| 160 |
+
# name's not there, can't possibly be an assignment
|
| 161 |
+
return None
|
| 162 |
+
|
| 163 |
+
name_idx = list(code.co_names).index(symbol)
|
| 164 |
+
|
| 165 |
+
STORE_NAME = dis.opmap['STORE_NAME']
|
| 166 |
+
STORE_GLOBAL = dis.opmap['STORE_GLOBAL']
|
| 167 |
+
LOAD_CONST = dis.opmap['LOAD_CONST']
|
| 168 |
+
|
| 169 |
+
const = default
|
| 170 |
+
|
| 171 |
+
for byte_code in dis.Bytecode(code):
|
| 172 |
+
op = byte_code.opcode
|
| 173 |
+
arg = byte_code.arg
|
| 174 |
+
|
| 175 |
+
if op == LOAD_CONST:
|
| 176 |
+
assert arg is not None
|
| 177 |
+
const = code.co_consts[arg]
|
| 178 |
+
elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):
|
| 179 |
+
return const
|
| 180 |
+
else:
|
| 181 |
+
const = default
|
| 182 |
+
|
| 183 |
+
return None
|
| 184 |
+
|
| 185 |
+
__all__ += ['get_module_constant', 'extract_constant']
|
llava/lib/python3.10/site-packages/setuptools/dist.py
ADDED
|
@@ -0,0 +1,1004 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import io
|
| 4 |
+
import itertools
|
| 5 |
+
import numbers
|
| 6 |
+
import os
|
| 7 |
+
import re
|
| 8 |
+
import sys
|
| 9 |
+
from collections.abc import Iterable, MutableMapping, Sequence
|
| 10 |
+
from glob import iglob
|
| 11 |
+
from pathlib import Path
|
| 12 |
+
from typing import TYPE_CHECKING, Any, Union
|
| 13 |
+
|
| 14 |
+
from more_itertools import partition, unique_everseen
|
| 15 |
+
from packaging.markers import InvalidMarker, Marker
|
| 16 |
+
from packaging.specifiers import InvalidSpecifier, SpecifierSet
|
| 17 |
+
from packaging.version import Version
|
| 18 |
+
|
| 19 |
+
from . import (
|
| 20 |
+
_entry_points,
|
| 21 |
+
_reqs,
|
| 22 |
+
_static,
|
| 23 |
+
command as _, # noqa: F401 # imported for side-effects
|
| 24 |
+
)
|
| 25 |
+
from ._importlib import metadata
|
| 26 |
+
from ._path import StrPath
|
| 27 |
+
from ._reqs import _StrOrIter
|
| 28 |
+
from .config import pyprojecttoml, setupcfg
|
| 29 |
+
from .discovery import ConfigDiscovery
|
| 30 |
+
from .monkey import get_unpatched
|
| 31 |
+
from .warnings import InformationOnly, SetuptoolsDeprecationWarning
|
| 32 |
+
|
| 33 |
+
import distutils.cmd
|
| 34 |
+
import distutils.command
|
| 35 |
+
import distutils.core
|
| 36 |
+
import distutils.dist
|
| 37 |
+
import distutils.log
|
| 38 |
+
from distutils.debug import DEBUG
|
| 39 |
+
from distutils.errors import DistutilsOptionError, DistutilsSetupError
|
| 40 |
+
from distutils.fancy_getopt import translate_longopt
|
| 41 |
+
from distutils.util import strtobool
|
| 42 |
+
|
| 43 |
+
if TYPE_CHECKING:
|
| 44 |
+
from typing_extensions import TypeAlias
|
| 45 |
+
|
| 46 |
+
from pkg_resources import Distribution as _pkg_resources_Distribution
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
__all__ = ['Distribution']
|
| 50 |
+
|
| 51 |
+
_sequence = tuple, list
|
| 52 |
+
"""
|
| 53 |
+
:meta private:
|
| 54 |
+
|
| 55 |
+
Supported iterable types that are known to be:
|
| 56 |
+
- ordered (which `set` isn't)
|
| 57 |
+
- not match a str (which `Sequence[str]` does)
|
| 58 |
+
- not imply a nested type (like `dict`)
|
| 59 |
+
for use with `isinstance`.
|
| 60 |
+
"""
|
| 61 |
+
_Sequence: TypeAlias = Union[tuple[str, ...], list[str]]
|
| 62 |
+
# This is how stringifying _Sequence would look in Python 3.10
|
| 63 |
+
_sequence_type_repr = "tuple[str, ...] | list[str]"
|
| 64 |
+
_OrderedStrSequence: TypeAlias = Union[str, dict[str, Any], Sequence[str]]
|
| 65 |
+
"""
|
| 66 |
+
:meta private:
|
| 67 |
+
Avoid single-use iterable. Disallow sets.
|
| 68 |
+
A poor approximation of an OrderedSequence (dict doesn't match a Sequence).
|
| 69 |
+
"""
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def __getattr__(name: str) -> Any: # pragma: no cover
|
| 73 |
+
if name == "sequence":
|
| 74 |
+
SetuptoolsDeprecationWarning.emit(
|
| 75 |
+
"`setuptools.dist.sequence` is an internal implementation detail.",
|
| 76 |
+
"Please define your own `sequence = tuple, list` instead.",
|
| 77 |
+
due_date=(2025, 8, 28), # Originally added on 2024-08-27
|
| 78 |
+
)
|
| 79 |
+
return _sequence
|
| 80 |
+
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def check_importable(dist, attr, value):
|
| 84 |
+
try:
|
| 85 |
+
ep = metadata.EntryPoint(value=value, name=None, group=None)
|
| 86 |
+
assert not ep.extras
|
| 87 |
+
except (TypeError, ValueError, AttributeError, AssertionError) as e:
|
| 88 |
+
raise DistutilsSetupError(
|
| 89 |
+
f"{attr!r} must be importable 'module:attrs' string (got {value!r})"
|
| 90 |
+
) from e
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def assert_string_list(dist, attr: str, value: _Sequence) -> None:
|
| 94 |
+
"""Verify that value is a string list"""
|
| 95 |
+
try:
|
| 96 |
+
# verify that value is a list or tuple to exclude unordered
|
| 97 |
+
# or single-use iterables
|
| 98 |
+
assert isinstance(value, _sequence)
|
| 99 |
+
# verify that elements of value are strings
|
| 100 |
+
assert ''.join(value) != value
|
| 101 |
+
except (TypeError, ValueError, AttributeError, AssertionError) as e:
|
| 102 |
+
raise DistutilsSetupError(
|
| 103 |
+
f"{attr!r} must be of type <{_sequence_type_repr}> (got {value!r})"
|
| 104 |
+
) from e
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def check_nsp(dist, attr, value):
|
| 108 |
+
"""Verify that namespace packages are valid"""
|
| 109 |
+
ns_packages = value
|
| 110 |
+
assert_string_list(dist, attr, ns_packages)
|
| 111 |
+
for nsp in ns_packages:
|
| 112 |
+
if not dist.has_contents_for(nsp):
|
| 113 |
+
raise DistutilsSetupError(
|
| 114 |
+
f"Distribution contains no modules or packages for namespace package {nsp!r}"
|
| 115 |
+
)
|
| 116 |
+
parent, _sep, _child = nsp.rpartition('.')
|
| 117 |
+
if parent and parent not in ns_packages:
|
| 118 |
+
distutils.log.warn(
|
| 119 |
+
"WARNING: %r is declared as a package namespace, but %r"
|
| 120 |
+
" is not: please correct this in setup.py",
|
| 121 |
+
nsp,
|
| 122 |
+
parent,
|
| 123 |
+
)
|
| 124 |
+
SetuptoolsDeprecationWarning.emit(
|
| 125 |
+
"The namespace_packages parameter is deprecated.",
|
| 126 |
+
"Please replace its usage with implicit namespaces (PEP 420).",
|
| 127 |
+
see_docs="references/keywords.html#keyword-namespace-packages",
|
| 128 |
+
# TODO: define due_date, it may break old packages that are no longer
|
| 129 |
+
# maintained (e.g. sphinxcontrib extensions) when installed from source.
|
| 130 |
+
# Warning officially introduced in May 2022, however the deprecation
|
| 131 |
+
# was mentioned much earlier in the docs (May 2020, see #2149).
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def check_extras(dist, attr, value):
|
| 136 |
+
"""Verify that extras_require mapping is valid"""
|
| 137 |
+
try:
|
| 138 |
+
list(itertools.starmap(_check_extra, value.items()))
|
| 139 |
+
except (TypeError, ValueError, AttributeError) as e:
|
| 140 |
+
raise DistutilsSetupError(
|
| 141 |
+
"'extras_require' must be a dictionary whose values are "
|
| 142 |
+
"strings or lists of strings containing valid project/version "
|
| 143 |
+
"requirement specifiers."
|
| 144 |
+
) from e
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
def _check_extra(extra, reqs):
|
| 148 |
+
_name, _sep, marker = extra.partition(':')
|
| 149 |
+
try:
|
| 150 |
+
_check_marker(marker)
|
| 151 |
+
except InvalidMarker:
|
| 152 |
+
msg = f"Invalid environment marker: {marker} ({extra!r})"
|
| 153 |
+
raise DistutilsSetupError(msg) from None
|
| 154 |
+
list(_reqs.parse(reqs))
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def _check_marker(marker):
|
| 158 |
+
if not marker:
|
| 159 |
+
return
|
| 160 |
+
m = Marker(marker)
|
| 161 |
+
m.evaluate()
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def assert_bool(dist, attr, value):
|
| 165 |
+
"""Verify that value is True, False, 0, or 1"""
|
| 166 |
+
if bool(value) != value:
|
| 167 |
+
raise DistutilsSetupError(f"{attr!r} must be a boolean value (got {value!r})")
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
def invalid_unless_false(dist, attr, value):
|
| 171 |
+
if not value:
|
| 172 |
+
DistDeprecationWarning.emit(f"{attr} is ignored.")
|
| 173 |
+
# TODO: should there be a `due_date` here?
|
| 174 |
+
return
|
| 175 |
+
raise DistutilsSetupError(f"{attr} is invalid.")
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
def check_requirements(dist, attr: str, value: _OrderedStrSequence) -> None:
|
| 179 |
+
"""Verify that install_requires is a valid requirements list"""
|
| 180 |
+
try:
|
| 181 |
+
list(_reqs.parse(value))
|
| 182 |
+
if isinstance(value, set):
|
| 183 |
+
raise TypeError("Unordered types are not allowed")
|
| 184 |
+
except (TypeError, ValueError) as error:
|
| 185 |
+
msg = (
|
| 186 |
+
f"{attr!r} must be a string or iterable of strings "
|
| 187 |
+
f"containing valid project/version requirement specifiers; {error}"
|
| 188 |
+
)
|
| 189 |
+
raise DistutilsSetupError(msg) from error
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
def check_specifier(dist, attr, value):
|
| 193 |
+
"""Verify that value is a valid version specifier"""
|
| 194 |
+
try:
|
| 195 |
+
SpecifierSet(value)
|
| 196 |
+
except (InvalidSpecifier, AttributeError) as error:
|
| 197 |
+
msg = f"{attr!r} must be a string containing valid version specifiers; {error}"
|
| 198 |
+
raise DistutilsSetupError(msg) from error
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def check_entry_points(dist, attr, value):
|
| 202 |
+
"""Verify that entry_points map is parseable"""
|
| 203 |
+
try:
|
| 204 |
+
_entry_points.load(value)
|
| 205 |
+
except Exception as e:
|
| 206 |
+
raise DistutilsSetupError(e) from e
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
def check_package_data(dist, attr, value):
|
| 210 |
+
"""Verify that value is a dictionary of package names to glob lists"""
|
| 211 |
+
if not isinstance(value, dict):
|
| 212 |
+
raise DistutilsSetupError(
|
| 213 |
+
f"{attr!r} must be a dictionary mapping package names to lists of "
|
| 214 |
+
"string wildcard patterns"
|
| 215 |
+
)
|
| 216 |
+
for k, v in value.items():
|
| 217 |
+
if not isinstance(k, str):
|
| 218 |
+
raise DistutilsSetupError(
|
| 219 |
+
f"keys of {attr!r} dict must be strings (got {k!r})"
|
| 220 |
+
)
|
| 221 |
+
assert_string_list(dist, f'values of {attr!r} dict', v)
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
def check_packages(dist, attr, value):
|
| 225 |
+
for pkgname in value:
|
| 226 |
+
if not re.match(r'\w+(\.\w+)*', pkgname):
|
| 227 |
+
distutils.log.warn(
|
| 228 |
+
"WARNING: %r not a valid package name; please use only "
|
| 229 |
+
".-separated package names in setup.py",
|
| 230 |
+
pkgname,
|
| 231 |
+
)
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
if TYPE_CHECKING:
|
| 235 |
+
# Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962
|
| 236 |
+
from distutils.core import Distribution as _Distribution
|
| 237 |
+
else:
|
| 238 |
+
_Distribution = get_unpatched(distutils.core.Distribution)
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
class Distribution(_Distribution):
|
| 242 |
+
"""Distribution with support for tests and package data
|
| 243 |
+
|
| 244 |
+
This is an enhanced version of 'distutils.dist.Distribution' that
|
| 245 |
+
effectively adds the following new optional keyword arguments to 'setup()':
|
| 246 |
+
|
| 247 |
+
'install_requires' -- a string or sequence of strings specifying project
|
| 248 |
+
versions that the distribution requires when installed, in the format
|
| 249 |
+
used by 'pkg_resources.require()'. They will be installed
|
| 250 |
+
automatically when the package is installed. If you wish to use
|
| 251 |
+
packages that are not available in PyPI, or want to give your users an
|
| 252 |
+
alternate download location, you can add a 'find_links' option to the
|
| 253 |
+
'[easy_install]' section of your project's 'setup.cfg' file, and then
|
| 254 |
+
setuptools will scan the listed web pages for links that satisfy the
|
| 255 |
+
requirements.
|
| 256 |
+
|
| 257 |
+
'extras_require' -- a dictionary mapping names of optional "extras" to the
|
| 258 |
+
additional requirement(s) that using those extras incurs. For example,
|
| 259 |
+
this::
|
| 260 |
+
|
| 261 |
+
extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
|
| 262 |
+
|
| 263 |
+
indicates that the distribution can optionally provide an extra
|
| 264 |
+
capability called "reST", but it can only be used if docutils and
|
| 265 |
+
reSTedit are installed. If the user installs your package using
|
| 266 |
+
EasyInstall and requests one of your extras, the corresponding
|
| 267 |
+
additional requirements will be installed if needed.
|
| 268 |
+
|
| 269 |
+
'package_data' -- a dictionary mapping package names to lists of filenames
|
| 270 |
+
or globs to use to find data files contained in the named packages.
|
| 271 |
+
If the dictionary has filenames or globs listed under '""' (the empty
|
| 272 |
+
string), those names will be searched for in every package, in addition
|
| 273 |
+
to any names for the specific package. Data files found using these
|
| 274 |
+
names/globs will be installed along with the package, in the same
|
| 275 |
+
location as the package. Note that globs are allowed to reference
|
| 276 |
+
the contents of non-package subdirectories, as long as you use '/' as
|
| 277 |
+
a path separator. (Globs are automatically converted to
|
| 278 |
+
platform-specific paths at runtime.)
|
| 279 |
+
|
| 280 |
+
In addition to these new keywords, this class also has several new methods
|
| 281 |
+
for manipulating the distribution's contents. For example, the 'include()'
|
| 282 |
+
and 'exclude()' methods can be thought of as in-place add and subtract
|
| 283 |
+
commands that add or remove packages, modules, extensions, and so on from
|
| 284 |
+
the distribution.
|
| 285 |
+
"""
|
| 286 |
+
|
| 287 |
+
_DISTUTILS_UNSUPPORTED_METADATA = {
|
| 288 |
+
'long_description_content_type': lambda: None,
|
| 289 |
+
'project_urls': dict,
|
| 290 |
+
'provides_extras': dict, # behaves like an ordered set
|
| 291 |
+
'license_file': lambda: None,
|
| 292 |
+
'license_files': lambda: None,
|
| 293 |
+
'install_requires': list,
|
| 294 |
+
'extras_require': dict,
|
| 295 |
+
}
|
| 296 |
+
|
| 297 |
+
# Used by build_py, editable_wheel and install_lib commands for legacy namespaces
|
| 298 |
+
namespace_packages: list[str] #: :meta private: DEPRECATED
|
| 299 |
+
|
| 300 |
+
# Any: Dynamic assignment results in Incompatible types in assignment
|
| 301 |
+
def __init__(self, attrs: MutableMapping[str, Any] | None = None) -> None:
|
| 302 |
+
have_package_data = hasattr(self, "package_data")
|
| 303 |
+
if not have_package_data:
|
| 304 |
+
self.package_data: dict[str, list[str]] = {}
|
| 305 |
+
attrs = attrs or {}
|
| 306 |
+
self.dist_files: list[tuple[str, str, str]] = []
|
| 307 |
+
self.include_package_data: bool | None = None
|
| 308 |
+
self.exclude_package_data: dict[str, list[str]] | None = None
|
| 309 |
+
# Filter-out setuptools' specific options.
|
| 310 |
+
self.src_root: str | None = attrs.pop("src_root", None)
|
| 311 |
+
self.dependency_links: list[str] = attrs.pop('dependency_links', [])
|
| 312 |
+
self.setup_requires: list[str] = attrs.pop('setup_requires', [])
|
| 313 |
+
for ep in metadata.entry_points(group='distutils.setup_keywords'):
|
| 314 |
+
vars(self).setdefault(ep.name, None)
|
| 315 |
+
|
| 316 |
+
metadata_only = set(self._DISTUTILS_UNSUPPORTED_METADATA)
|
| 317 |
+
metadata_only -= {"install_requires", "extras_require"}
|
| 318 |
+
dist_attrs = {k: v for k, v in attrs.items() if k not in metadata_only}
|
| 319 |
+
_Distribution.__init__(self, dist_attrs)
|
| 320 |
+
|
| 321 |
+
# Private API (setuptools-use only, not restricted to Distribution)
|
| 322 |
+
# Stores files that are referenced by the configuration and need to be in the
|
| 323 |
+
# sdist (e.g. `version = file: VERSION.txt`)
|
| 324 |
+
self._referenced_files = set[str]()
|
| 325 |
+
|
| 326 |
+
self.set_defaults = ConfigDiscovery(self)
|
| 327 |
+
|
| 328 |
+
self._set_metadata_defaults(attrs)
|
| 329 |
+
|
| 330 |
+
self.metadata.version = self._normalize_version(self.metadata.version)
|
| 331 |
+
self._finalize_requires()
|
| 332 |
+
|
| 333 |
+
def _validate_metadata(self):
|
| 334 |
+
required = {"name"}
|
| 335 |
+
provided = {
|
| 336 |
+
key
|
| 337 |
+
for key in vars(self.metadata)
|
| 338 |
+
if getattr(self.metadata, key, None) is not None
|
| 339 |
+
}
|
| 340 |
+
missing = required - provided
|
| 341 |
+
|
| 342 |
+
if missing:
|
| 343 |
+
msg = f"Required package metadata is missing: {missing}"
|
| 344 |
+
raise DistutilsSetupError(msg)
|
| 345 |
+
|
| 346 |
+
def _set_metadata_defaults(self, attrs):
|
| 347 |
+
"""
|
| 348 |
+
Fill-in missing metadata fields not supported by distutils.
|
| 349 |
+
Some fields may have been set by other tools (e.g. pbr).
|
| 350 |
+
Those fields (vars(self.metadata)) take precedence to
|
| 351 |
+
supplied attrs.
|
| 352 |
+
"""
|
| 353 |
+
for option, default in self._DISTUTILS_UNSUPPORTED_METADATA.items():
|
| 354 |
+
vars(self.metadata).setdefault(option, attrs.get(option, default()))
|
| 355 |
+
|
| 356 |
+
@staticmethod
|
| 357 |
+
def _normalize_version(version):
|
| 358 |
+
from . import sic
|
| 359 |
+
|
| 360 |
+
if isinstance(version, numbers.Number):
|
| 361 |
+
# Some people apparently take "version number" too literally :)
|
| 362 |
+
version = str(version)
|
| 363 |
+
elif isinstance(version, sic) or version is None:
|
| 364 |
+
return version
|
| 365 |
+
|
| 366 |
+
normalized = str(Version(version))
|
| 367 |
+
if version != normalized:
|
| 368 |
+
InformationOnly.emit(f"Normalizing '{version}' to '{normalized}'")
|
| 369 |
+
return normalized
|
| 370 |
+
return version
|
| 371 |
+
|
| 372 |
+
def _finalize_requires(self):
|
| 373 |
+
"""
|
| 374 |
+
Set `metadata.python_requires` and fix environment markers
|
| 375 |
+
in `install_requires` and `extras_require`.
|
| 376 |
+
"""
|
| 377 |
+
if getattr(self, 'python_requires', None):
|
| 378 |
+
self.metadata.python_requires = self.python_requires
|
| 379 |
+
|
| 380 |
+
self._normalize_requires()
|
| 381 |
+
self.metadata.install_requires = self.install_requires
|
| 382 |
+
self.metadata.extras_require = self.extras_require
|
| 383 |
+
|
| 384 |
+
if self.extras_require:
|
| 385 |
+
for extra in self.extras_require.keys():
|
| 386 |
+
# Setuptools allows a weird "<name>:<env markers> syntax for extras
|
| 387 |
+
extra = extra.split(':')[0]
|
| 388 |
+
if extra:
|
| 389 |
+
self.metadata.provides_extras.setdefault(extra)
|
| 390 |
+
|
| 391 |
+
def _normalize_requires(self):
|
| 392 |
+
"""Make sure requirement-related attributes exist and are normalized"""
|
| 393 |
+
install_requires = getattr(self, "install_requires", None) or []
|
| 394 |
+
extras_require = getattr(self, "extras_require", None) or {}
|
| 395 |
+
|
| 396 |
+
# Preserve the "static"-ness of values parsed from config files
|
| 397 |
+
list_ = _static.List if _static.is_static(install_requires) else list
|
| 398 |
+
self.install_requires = list_(map(str, _reqs.parse(install_requires)))
|
| 399 |
+
|
| 400 |
+
dict_ = _static.Dict if _static.is_static(extras_require) else dict
|
| 401 |
+
self.extras_require = dict_(
|
| 402 |
+
(k, list(map(str, _reqs.parse(v or [])))) for k, v in extras_require.items()
|
| 403 |
+
)
|
| 404 |
+
|
| 405 |
+
def _finalize_license_files(self) -> None:
|
| 406 |
+
"""Compute names of all license files which should be included."""
|
| 407 |
+
license_files: list[str] | None = self.metadata.license_files
|
| 408 |
+
patterns = license_files or []
|
| 409 |
+
|
| 410 |
+
license_file: str | None = self.metadata.license_file
|
| 411 |
+
if license_file and license_file not in patterns:
|
| 412 |
+
patterns.append(license_file)
|
| 413 |
+
|
| 414 |
+
if license_files is None and license_file is None:
|
| 415 |
+
# Default patterns match the ones wheel uses
|
| 416 |
+
# See https://wheel.readthedocs.io/en/stable/user_guide.html
|
| 417 |
+
# -> 'Including license files in the generated wheel file'
|
| 418 |
+
patterns = ['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']
|
| 419 |
+
|
| 420 |
+
self.metadata.license_files = list(
|
| 421 |
+
unique_everseen(self._expand_patterns(patterns))
|
| 422 |
+
)
|
| 423 |
+
|
| 424 |
+
@staticmethod
|
| 425 |
+
def _expand_patterns(patterns):
|
| 426 |
+
"""
|
| 427 |
+
>>> list(Distribution._expand_patterns(['LICENSE']))
|
| 428 |
+
['LICENSE']
|
| 429 |
+
>>> list(Distribution._expand_patterns(['pyproject.toml', 'LIC*']))
|
| 430 |
+
['pyproject.toml', 'LICENSE']
|
| 431 |
+
"""
|
| 432 |
+
return (
|
| 433 |
+
path
|
| 434 |
+
for pattern in patterns
|
| 435 |
+
for path in sorted(iglob(pattern))
|
| 436 |
+
if not path.endswith('~') and os.path.isfile(path)
|
| 437 |
+
)
|
| 438 |
+
|
| 439 |
+
# FIXME: 'Distribution._parse_config_files' is too complex (14)
|
| 440 |
+
def _parse_config_files(self, filenames=None): # noqa: C901
|
| 441 |
+
"""
|
| 442 |
+
Adapted from distutils.dist.Distribution.parse_config_files,
|
| 443 |
+
this method provides the same functionality in subtly-improved
|
| 444 |
+
ways.
|
| 445 |
+
"""
|
| 446 |
+
from configparser import ConfigParser
|
| 447 |
+
|
| 448 |
+
# Ignore install directory options if we have a venv
|
| 449 |
+
ignore_options = (
|
| 450 |
+
[]
|
| 451 |
+
if sys.prefix == sys.base_prefix
|
| 452 |
+
else [
|
| 453 |
+
'install-base',
|
| 454 |
+
'install-platbase',
|
| 455 |
+
'install-lib',
|
| 456 |
+
'install-platlib',
|
| 457 |
+
'install-purelib',
|
| 458 |
+
'install-headers',
|
| 459 |
+
'install-scripts',
|
| 460 |
+
'install-data',
|
| 461 |
+
'prefix',
|
| 462 |
+
'exec-prefix',
|
| 463 |
+
'home',
|
| 464 |
+
'user',
|
| 465 |
+
'root',
|
| 466 |
+
]
|
| 467 |
+
)
|
| 468 |
+
|
| 469 |
+
ignore_options = frozenset(ignore_options)
|
| 470 |
+
|
| 471 |
+
if filenames is None:
|
| 472 |
+
filenames = self.find_config_files()
|
| 473 |
+
|
| 474 |
+
if DEBUG:
|
| 475 |
+
self.announce("Distribution.parse_config_files():")
|
| 476 |
+
|
| 477 |
+
parser = ConfigParser()
|
| 478 |
+
parser.optionxform = str
|
| 479 |
+
for filename in filenames:
|
| 480 |
+
with open(filename, encoding='utf-8') as reader:
|
| 481 |
+
if DEBUG:
|
| 482 |
+
self.announce(" reading {filename}".format(**locals()))
|
| 483 |
+
parser.read_file(reader)
|
| 484 |
+
for section in parser.sections():
|
| 485 |
+
options = parser.options(section)
|
| 486 |
+
opt_dict = self.get_option_dict(section)
|
| 487 |
+
|
| 488 |
+
for opt in options:
|
| 489 |
+
if opt == '__name__' or opt in ignore_options:
|
| 490 |
+
continue
|
| 491 |
+
|
| 492 |
+
val = parser.get(section, opt)
|
| 493 |
+
opt = self.warn_dash_deprecation(opt, section)
|
| 494 |
+
opt = self.make_option_lowercase(opt, section)
|
| 495 |
+
opt_dict[opt] = (filename, val)
|
| 496 |
+
|
| 497 |
+
# Make the ConfigParser forget everything (so we retain
|
| 498 |
+
# the original filenames that options come from)
|
| 499 |
+
parser.__init__()
|
| 500 |
+
|
| 501 |
+
if 'global' not in self.command_options:
|
| 502 |
+
return
|
| 503 |
+
|
| 504 |
+
# If there was a "global" section in the config file, use it
|
| 505 |
+
# to set Distribution options.
|
| 506 |
+
|
| 507 |
+
for opt, (src, val) in self.command_options['global'].items():
|
| 508 |
+
alias = self.negative_opt.get(opt)
|
| 509 |
+
if alias:
|
| 510 |
+
val = not strtobool(val)
|
| 511 |
+
elif opt in ('verbose', 'dry_run'): # ugh!
|
| 512 |
+
val = strtobool(val)
|
| 513 |
+
|
| 514 |
+
try:
|
| 515 |
+
setattr(self, alias or opt, val)
|
| 516 |
+
except ValueError as e:
|
| 517 |
+
raise DistutilsOptionError(e) from e
|
| 518 |
+
|
| 519 |
+
def warn_dash_deprecation(self, opt: str, section: str) -> str:
|
| 520 |
+
if section in (
|
| 521 |
+
'options.extras_require',
|
| 522 |
+
'options.data_files',
|
| 523 |
+
):
|
| 524 |
+
return opt
|
| 525 |
+
|
| 526 |
+
underscore_opt = opt.replace('-', '_')
|
| 527 |
+
commands = list(
|
| 528 |
+
itertools.chain(
|
| 529 |
+
distutils.command.__all__,
|
| 530 |
+
self._setuptools_commands(),
|
| 531 |
+
)
|
| 532 |
+
)
|
| 533 |
+
if (
|
| 534 |
+
not section.startswith('options')
|
| 535 |
+
and section != 'metadata'
|
| 536 |
+
and section not in commands
|
| 537 |
+
):
|
| 538 |
+
return underscore_opt
|
| 539 |
+
|
| 540 |
+
if '-' in opt:
|
| 541 |
+
SetuptoolsDeprecationWarning.emit(
|
| 542 |
+
"Invalid dash-separated options",
|
| 543 |
+
f"""
|
| 544 |
+
Usage of dash-separated {opt!r} will not be supported in future
|
| 545 |
+
versions. Please use the underscore name {underscore_opt!r} instead.
|
| 546 |
+
""",
|
| 547 |
+
see_docs="userguide/declarative_config.html",
|
| 548 |
+
due_date=(2025, 3, 3),
|
| 549 |
+
# Warning initially introduced in 3 Mar 2021
|
| 550 |
+
)
|
| 551 |
+
return underscore_opt
|
| 552 |
+
|
| 553 |
+
def _setuptools_commands(self):
|
| 554 |
+
try:
|
| 555 |
+
entry_points = metadata.distribution('setuptools').entry_points
|
| 556 |
+
return {ep.name for ep in entry_points} # Avoid newer API for compatibility
|
| 557 |
+
except metadata.PackageNotFoundError:
|
| 558 |
+
# during bootstrapping, distribution doesn't exist
|
| 559 |
+
return []
|
| 560 |
+
|
| 561 |
+
def make_option_lowercase(self, opt: str, section: str) -> str:
|
| 562 |
+
if section != 'metadata' or opt.islower():
|
| 563 |
+
return opt
|
| 564 |
+
|
| 565 |
+
lowercase_opt = opt.lower()
|
| 566 |
+
SetuptoolsDeprecationWarning.emit(
|
| 567 |
+
"Invalid uppercase configuration",
|
| 568 |
+
f"""
|
| 569 |
+
Usage of uppercase key {opt!r} in {section!r} will not be supported in
|
| 570 |
+
future versions. Please use lowercase {lowercase_opt!r} instead.
|
| 571 |
+
""",
|
| 572 |
+
see_docs="userguide/declarative_config.html",
|
| 573 |
+
due_date=(2025, 3, 3),
|
| 574 |
+
# Warning initially introduced in 6 Mar 2021
|
| 575 |
+
)
|
| 576 |
+
return lowercase_opt
|
| 577 |
+
|
| 578 |
+
# FIXME: 'Distribution._set_command_options' is too complex (14)
|
| 579 |
+
def _set_command_options(self, command_obj, option_dict=None): # noqa: C901
|
| 580 |
+
"""
|
| 581 |
+
Set the options for 'command_obj' from 'option_dict'. Basically
|
| 582 |
+
this means copying elements of a dictionary ('option_dict') to
|
| 583 |
+
attributes of an instance ('command').
|
| 584 |
+
|
| 585 |
+
'command_obj' must be a Command instance. If 'option_dict' is not
|
| 586 |
+
supplied, uses the standard option dictionary for this command
|
| 587 |
+
(from 'self.command_options').
|
| 588 |
+
|
| 589 |
+
(Adopted from distutils.dist.Distribution._set_command_options)
|
| 590 |
+
"""
|
| 591 |
+
command_name = command_obj.get_command_name()
|
| 592 |
+
if option_dict is None:
|
| 593 |
+
option_dict = self.get_option_dict(command_name)
|
| 594 |
+
|
| 595 |
+
if DEBUG:
|
| 596 |
+
self.announce(f" setting options for '{command_name}' command:")
|
| 597 |
+
for option, (source, value) in option_dict.items():
|
| 598 |
+
if DEBUG:
|
| 599 |
+
self.announce(f" {option} = {value} (from {source})")
|
| 600 |
+
try:
|
| 601 |
+
bool_opts = [translate_longopt(o) for o in command_obj.boolean_options]
|
| 602 |
+
except AttributeError:
|
| 603 |
+
bool_opts = []
|
| 604 |
+
try:
|
| 605 |
+
neg_opt = command_obj.negative_opt
|
| 606 |
+
except AttributeError:
|
| 607 |
+
neg_opt = {}
|
| 608 |
+
|
| 609 |
+
try:
|
| 610 |
+
is_string = isinstance(value, str)
|
| 611 |
+
if option in neg_opt and is_string:
|
| 612 |
+
setattr(command_obj, neg_opt[option], not strtobool(value))
|
| 613 |
+
elif option in bool_opts and is_string:
|
| 614 |
+
setattr(command_obj, option, strtobool(value))
|
| 615 |
+
elif hasattr(command_obj, option):
|
| 616 |
+
setattr(command_obj, option, value)
|
| 617 |
+
else:
|
| 618 |
+
raise DistutilsOptionError(
|
| 619 |
+
f"error in {source}: command '{command_name}' has no such option '{option}'"
|
| 620 |
+
)
|
| 621 |
+
except ValueError as e:
|
| 622 |
+
raise DistutilsOptionError(e) from e
|
| 623 |
+
|
| 624 |
+
def _get_project_config_files(self, filenames: Iterable[StrPath] | None):
|
| 625 |
+
"""Add default file and split between INI and TOML"""
|
| 626 |
+
tomlfiles = []
|
| 627 |
+
standard_project_metadata = Path(self.src_root or os.curdir, "pyproject.toml")
|
| 628 |
+
if filenames is not None:
|
| 629 |
+
parts = partition(lambda f: Path(f).suffix == ".toml", filenames)
|
| 630 |
+
filenames = list(parts[0]) # 1st element => predicate is False
|
| 631 |
+
tomlfiles = list(parts[1]) # 2nd element => predicate is True
|
| 632 |
+
elif standard_project_metadata.exists():
|
| 633 |
+
tomlfiles = [standard_project_metadata]
|
| 634 |
+
return filenames, tomlfiles
|
| 635 |
+
|
| 636 |
+
def parse_config_files(
|
| 637 |
+
self,
|
| 638 |
+
filenames: Iterable[StrPath] | None = None,
|
| 639 |
+
ignore_option_errors: bool = False,
|
| 640 |
+
) -> None:
|
| 641 |
+
"""Parses configuration files from various levels
|
| 642 |
+
and loads configuration.
|
| 643 |
+
"""
|
| 644 |
+
inifiles, tomlfiles = self._get_project_config_files(filenames)
|
| 645 |
+
|
| 646 |
+
self._parse_config_files(filenames=inifiles)
|
| 647 |
+
|
| 648 |
+
setupcfg.parse_configuration(
|
| 649 |
+
self, self.command_options, ignore_option_errors=ignore_option_errors
|
| 650 |
+
)
|
| 651 |
+
for filename in tomlfiles:
|
| 652 |
+
pyprojecttoml.apply_configuration(self, filename, ignore_option_errors)
|
| 653 |
+
|
| 654 |
+
self._finalize_requires()
|
| 655 |
+
self._finalize_license_files()
|
| 656 |
+
|
| 657 |
+
def fetch_build_eggs(
|
| 658 |
+
self, requires: _StrOrIter
|
| 659 |
+
) -> list[_pkg_resources_Distribution]:
|
| 660 |
+
"""Resolve pre-setup requirements"""
|
| 661 |
+
from .installer import _fetch_build_eggs
|
| 662 |
+
|
| 663 |
+
return _fetch_build_eggs(self, requires)
|
| 664 |
+
|
| 665 |
+
def finalize_options(self) -> None:
|
| 666 |
+
"""
|
| 667 |
+
Allow plugins to apply arbitrary operations to the
|
| 668 |
+
distribution. Each hook may optionally define a 'order'
|
| 669 |
+
to influence the order of execution. Smaller numbers
|
| 670 |
+
go first and the default is 0.
|
| 671 |
+
"""
|
| 672 |
+
group = 'setuptools.finalize_distribution_options'
|
| 673 |
+
|
| 674 |
+
def by_order(hook):
|
| 675 |
+
return getattr(hook, 'order', 0)
|
| 676 |
+
|
| 677 |
+
defined = metadata.entry_points(group=group)
|
| 678 |
+
filtered = itertools.filterfalse(self._removed, defined)
|
| 679 |
+
loaded = map(lambda e: e.load(), filtered)
|
| 680 |
+
for ep in sorted(loaded, key=by_order):
|
| 681 |
+
ep(self)
|
| 682 |
+
|
| 683 |
+
@staticmethod
|
| 684 |
+
def _removed(ep):
|
| 685 |
+
"""
|
| 686 |
+
When removing an entry point, if metadata is loaded
|
| 687 |
+
from an older version of Setuptools, that removed
|
| 688 |
+
entry point will attempt to be loaded and will fail.
|
| 689 |
+
See #2765 for more details.
|
| 690 |
+
"""
|
| 691 |
+
removed = {
|
| 692 |
+
# removed 2021-09-05
|
| 693 |
+
'2to3_doctests',
|
| 694 |
+
}
|
| 695 |
+
return ep.name in removed
|
| 696 |
+
|
| 697 |
+
def _finalize_setup_keywords(self):
|
| 698 |
+
for ep in metadata.entry_points(group='distutils.setup_keywords'):
|
| 699 |
+
value = getattr(self, ep.name, None)
|
| 700 |
+
if value is not None:
|
| 701 |
+
ep.load()(self, ep.name, value)
|
| 702 |
+
|
| 703 |
+
def get_egg_cache_dir(self):
|
| 704 |
+
from . import windows_support
|
| 705 |
+
|
| 706 |
+
egg_cache_dir = os.path.join(os.curdir, '.eggs')
|
| 707 |
+
if not os.path.exists(egg_cache_dir):
|
| 708 |
+
os.mkdir(egg_cache_dir)
|
| 709 |
+
windows_support.hide_file(egg_cache_dir)
|
| 710 |
+
readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt')
|
| 711 |
+
with open(readme_txt_filename, 'w', encoding="utf-8") as f:
|
| 712 |
+
f.write(
|
| 713 |
+
'This directory contains eggs that were downloaded '
|
| 714 |
+
'by setuptools to build, test, and run plug-ins.\n\n'
|
| 715 |
+
)
|
| 716 |
+
f.write(
|
| 717 |
+
'This directory caches those eggs to prevent '
|
| 718 |
+
'repeated downloads.\n\n'
|
| 719 |
+
)
|
| 720 |
+
f.write('However, it is safe to delete this directory.\n\n')
|
| 721 |
+
|
| 722 |
+
return egg_cache_dir
|
| 723 |
+
|
| 724 |
+
def fetch_build_egg(self, req):
|
| 725 |
+
"""Fetch an egg needed for building"""
|
| 726 |
+
from .installer import fetch_build_egg
|
| 727 |
+
|
| 728 |
+
return fetch_build_egg(self, req)
|
| 729 |
+
|
| 730 |
+
def get_command_class(self, command: str) -> type[distutils.cmd.Command]: # type: ignore[override] # Not doing complex overrides yet
|
| 731 |
+
"""Pluggable version of get_command_class()"""
|
| 732 |
+
if command in self.cmdclass:
|
| 733 |
+
return self.cmdclass[command]
|
| 734 |
+
|
| 735 |
+
# Special case bdist_wheel so it's never loaded from "wheel"
|
| 736 |
+
if command == 'bdist_wheel':
|
| 737 |
+
from .command.bdist_wheel import bdist_wheel
|
| 738 |
+
|
| 739 |
+
return bdist_wheel
|
| 740 |
+
|
| 741 |
+
eps = metadata.entry_points(group='distutils.commands', name=command)
|
| 742 |
+
for ep in eps:
|
| 743 |
+
self.cmdclass[command] = cmdclass = ep.load()
|
| 744 |
+
return cmdclass
|
| 745 |
+
else:
|
| 746 |
+
return _Distribution.get_command_class(self, command)
|
| 747 |
+
|
| 748 |
+
def print_commands(self):
|
| 749 |
+
for ep in metadata.entry_points(group='distutils.commands'):
|
| 750 |
+
if ep.name not in self.cmdclass:
|
| 751 |
+
cmdclass = ep.load()
|
| 752 |
+
self.cmdclass[ep.name] = cmdclass
|
| 753 |
+
return _Distribution.print_commands(self)
|
| 754 |
+
|
| 755 |
+
def get_command_list(self):
|
| 756 |
+
for ep in metadata.entry_points(group='distutils.commands'):
|
| 757 |
+
if ep.name not in self.cmdclass:
|
| 758 |
+
cmdclass = ep.load()
|
| 759 |
+
self.cmdclass[ep.name] = cmdclass
|
| 760 |
+
return _Distribution.get_command_list(self)
|
| 761 |
+
|
| 762 |
+
def include(self, **attrs) -> None:
|
| 763 |
+
"""Add items to distribution that are named in keyword arguments
|
| 764 |
+
|
| 765 |
+
For example, 'dist.include(py_modules=["x"])' would add 'x' to
|
| 766 |
+
the distribution's 'py_modules' attribute, if it was not already
|
| 767 |
+
there.
|
| 768 |
+
|
| 769 |
+
Currently, this method only supports inclusion for attributes that are
|
| 770 |
+
lists or tuples. If you need to add support for adding to other
|
| 771 |
+
attributes in this or a subclass, you can add an '_include_X' method,
|
| 772 |
+
where 'X' is the name of the attribute. The method will be called with
|
| 773 |
+
the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
|
| 774 |
+
will try to call 'dist._include_foo({"bar":"baz"})', which can then
|
| 775 |
+
handle whatever special inclusion logic is needed.
|
| 776 |
+
"""
|
| 777 |
+
for k, v in attrs.items():
|
| 778 |
+
include = getattr(self, '_include_' + k, None)
|
| 779 |
+
if include:
|
| 780 |
+
include(v)
|
| 781 |
+
else:
|
| 782 |
+
self._include_misc(k, v)
|
| 783 |
+
|
| 784 |
+
def exclude_package(self, package: str) -> None:
|
| 785 |
+
"""Remove packages, modules, and extensions in named package"""
|
| 786 |
+
|
| 787 |
+
pfx = package + '.'
|
| 788 |
+
if self.packages:
|
| 789 |
+
self.packages = [
|
| 790 |
+
p for p in self.packages if p != package and not p.startswith(pfx)
|
| 791 |
+
]
|
| 792 |
+
|
| 793 |
+
if self.py_modules:
|
| 794 |
+
self.py_modules = [
|
| 795 |
+
p for p in self.py_modules if p != package and not p.startswith(pfx)
|
| 796 |
+
]
|
| 797 |
+
|
| 798 |
+
if self.ext_modules:
|
| 799 |
+
self.ext_modules = [
|
| 800 |
+
p
|
| 801 |
+
for p in self.ext_modules
|
| 802 |
+
if p.name != package and not p.name.startswith(pfx)
|
| 803 |
+
]
|
| 804 |
+
|
| 805 |
+
def has_contents_for(self, package: str) -> bool:
|
| 806 |
+
"""Return true if 'exclude_package(package)' would do something"""
|
| 807 |
+
|
| 808 |
+
pfx = package + '.'
|
| 809 |
+
|
| 810 |
+
for p in self.iter_distribution_names():
|
| 811 |
+
if p == package or p.startswith(pfx):
|
| 812 |
+
return True
|
| 813 |
+
|
| 814 |
+
return False
|
| 815 |
+
|
| 816 |
+
def _exclude_misc(self, name: str, value: _Sequence) -> None:
|
| 817 |
+
"""Handle 'exclude()' for list/tuple attrs without a special handler"""
|
| 818 |
+
if not isinstance(value, _sequence):
|
| 819 |
+
raise DistutilsSetupError(
|
| 820 |
+
f"{name}: setting must be of type <{_sequence_type_repr}> (got {value!r})"
|
| 821 |
+
)
|
| 822 |
+
try:
|
| 823 |
+
old = getattr(self, name)
|
| 824 |
+
except AttributeError as e:
|
| 825 |
+
raise DistutilsSetupError(f"{name}: No such distribution setting") from e
|
| 826 |
+
if old is not None and not isinstance(old, _sequence):
|
| 827 |
+
raise DistutilsSetupError(
|
| 828 |
+
name + ": this setting cannot be changed via include/exclude"
|
| 829 |
+
)
|
| 830 |
+
elif old:
|
| 831 |
+
setattr(self, name, [item for item in old if item not in value])
|
| 832 |
+
|
| 833 |
+
def _include_misc(self, name: str, value: _Sequence) -> None:
|
| 834 |
+
"""Handle 'include()' for list/tuple attrs without a special handler"""
|
| 835 |
+
|
| 836 |
+
if not isinstance(value, _sequence):
|
| 837 |
+
raise DistutilsSetupError(
|
| 838 |
+
f"{name}: setting must be of type <{_sequence_type_repr}> (got {value!r})"
|
| 839 |
+
)
|
| 840 |
+
try:
|
| 841 |
+
old = getattr(self, name)
|
| 842 |
+
except AttributeError as e:
|
| 843 |
+
raise DistutilsSetupError(f"{name}: No such distribution setting") from e
|
| 844 |
+
if old is None:
|
| 845 |
+
setattr(self, name, value)
|
| 846 |
+
elif not isinstance(old, _sequence):
|
| 847 |
+
raise DistutilsSetupError(
|
| 848 |
+
name + ": this setting cannot be changed via include/exclude"
|
| 849 |
+
)
|
| 850 |
+
else:
|
| 851 |
+
new = [item for item in value if item not in old]
|
| 852 |
+
setattr(self, name, list(old) + new)
|
| 853 |
+
|
| 854 |
+
def exclude(self, **attrs) -> None:
|
| 855 |
+
"""Remove items from distribution that are named in keyword arguments
|
| 856 |
+
|
| 857 |
+
For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
|
| 858 |
+
the distribution's 'py_modules' attribute. Excluding packages uses
|
| 859 |
+
the 'exclude_package()' method, so all of the package's contained
|
| 860 |
+
packages, modules, and extensions are also excluded.
|
| 861 |
+
|
| 862 |
+
Currently, this method only supports exclusion from attributes that are
|
| 863 |
+
lists or tuples. If you need to add support for excluding from other
|
| 864 |
+
attributes in this or a subclass, you can add an '_exclude_X' method,
|
| 865 |
+
where 'X' is the name of the attribute. The method will be called with
|
| 866 |
+
the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
|
| 867 |
+
will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
|
| 868 |
+
handle whatever special exclusion logic is needed.
|
| 869 |
+
"""
|
| 870 |
+
for k, v in attrs.items():
|
| 871 |
+
exclude = getattr(self, '_exclude_' + k, None)
|
| 872 |
+
if exclude:
|
| 873 |
+
exclude(v)
|
| 874 |
+
else:
|
| 875 |
+
self._exclude_misc(k, v)
|
| 876 |
+
|
| 877 |
+
def _exclude_packages(self, packages: _Sequence) -> None:
|
| 878 |
+
if not isinstance(packages, _sequence):
|
| 879 |
+
raise DistutilsSetupError(
|
| 880 |
+
f"packages: setting must be of type <{_sequence_type_repr}> (got {packages!r})"
|
| 881 |
+
)
|
| 882 |
+
list(map(self.exclude_package, packages))
|
| 883 |
+
|
| 884 |
+
def _parse_command_opts(self, parser, args):
|
| 885 |
+
# Remove --with-X/--without-X options when processing command args
|
| 886 |
+
self.global_options = self.__class__.global_options
|
| 887 |
+
self.negative_opt = self.__class__.negative_opt
|
| 888 |
+
|
| 889 |
+
# First, expand any aliases
|
| 890 |
+
command = args[0]
|
| 891 |
+
aliases = self.get_option_dict('aliases')
|
| 892 |
+
while command in aliases:
|
| 893 |
+
_src, alias = aliases[command]
|
| 894 |
+
del aliases[command] # ensure each alias can expand only once!
|
| 895 |
+
import shlex
|
| 896 |
+
|
| 897 |
+
args[:1] = shlex.split(alias, True)
|
| 898 |
+
command = args[0]
|
| 899 |
+
|
| 900 |
+
nargs = _Distribution._parse_command_opts(self, parser, args)
|
| 901 |
+
|
| 902 |
+
# Handle commands that want to consume all remaining arguments
|
| 903 |
+
cmd_class = self.get_command_class(command)
|
| 904 |
+
if getattr(cmd_class, 'command_consumes_arguments', None):
|
| 905 |
+
self.get_option_dict(command)['args'] = ("command line", nargs)
|
| 906 |
+
if nargs is not None:
|
| 907 |
+
return []
|
| 908 |
+
|
| 909 |
+
return nargs
|
| 910 |
+
|
| 911 |
+
def get_cmdline_options(self) -> dict[str, dict[str, str | None]]:
|
| 912 |
+
"""Return a '{cmd: {opt:val}}' map of all command-line options
|
| 913 |
+
|
| 914 |
+
Option names are all long, but do not include the leading '--', and
|
| 915 |
+
contain dashes rather than underscores. If the option doesn't take
|
| 916 |
+
an argument (e.g. '--quiet'), the 'val' is 'None'.
|
| 917 |
+
|
| 918 |
+
Note that options provided by config files are intentionally excluded.
|
| 919 |
+
"""
|
| 920 |
+
|
| 921 |
+
d: dict[str, dict[str, str | None]] = {}
|
| 922 |
+
|
| 923 |
+
for cmd, opts in self.command_options.items():
|
| 924 |
+
val: str | None
|
| 925 |
+
for opt, (src, val) in opts.items():
|
| 926 |
+
if src != "command line":
|
| 927 |
+
continue
|
| 928 |
+
|
| 929 |
+
opt = opt.replace('_', '-')
|
| 930 |
+
|
| 931 |
+
if val == 0:
|
| 932 |
+
cmdobj = self.get_command_obj(cmd)
|
| 933 |
+
neg_opt = self.negative_opt.copy()
|
| 934 |
+
neg_opt.update(getattr(cmdobj, 'negative_opt', {}))
|
| 935 |
+
for neg, pos in neg_opt.items():
|
| 936 |
+
if pos == opt:
|
| 937 |
+
opt = neg
|
| 938 |
+
val = None
|
| 939 |
+
break
|
| 940 |
+
else:
|
| 941 |
+
raise AssertionError("Shouldn't be able to get here")
|
| 942 |
+
|
| 943 |
+
elif val == 1:
|
| 944 |
+
val = None
|
| 945 |
+
|
| 946 |
+
d.setdefault(cmd, {})[opt] = val
|
| 947 |
+
|
| 948 |
+
return d
|
| 949 |
+
|
| 950 |
+
def iter_distribution_names(self):
|
| 951 |
+
"""Yield all packages, modules, and extension names in distribution"""
|
| 952 |
+
|
| 953 |
+
yield from self.packages or ()
|
| 954 |
+
|
| 955 |
+
yield from self.py_modules or ()
|
| 956 |
+
|
| 957 |
+
for ext in self.ext_modules or ():
|
| 958 |
+
if isinstance(ext, tuple):
|
| 959 |
+
name, _buildinfo = ext
|
| 960 |
+
else:
|
| 961 |
+
name = ext.name
|
| 962 |
+
if name.endswith('module'):
|
| 963 |
+
name = name[:-6]
|
| 964 |
+
yield name
|
| 965 |
+
|
| 966 |
+
def handle_display_options(self, option_order):
|
| 967 |
+
"""If there were any non-global "display-only" options
|
| 968 |
+
(--help-commands or the metadata display options) on the command
|
| 969 |
+
line, display the requested info and return true; else return
|
| 970 |
+
false.
|
| 971 |
+
"""
|
| 972 |
+
import sys
|
| 973 |
+
|
| 974 |
+
if self.help_commands:
|
| 975 |
+
return _Distribution.handle_display_options(self, option_order)
|
| 976 |
+
|
| 977 |
+
# Stdout may be StringIO (e.g. in tests)
|
| 978 |
+
if not isinstance(sys.stdout, io.TextIOWrapper):
|
| 979 |
+
return _Distribution.handle_display_options(self, option_order)
|
| 980 |
+
|
| 981 |
+
# Don't wrap stdout if utf-8 is already the encoding. Provides
|
| 982 |
+
# workaround for #334.
|
| 983 |
+
if sys.stdout.encoding.lower() in ('utf-8', 'utf8'):
|
| 984 |
+
return _Distribution.handle_display_options(self, option_order)
|
| 985 |
+
|
| 986 |
+
# Print metadata in UTF-8 no matter the platform
|
| 987 |
+
encoding = sys.stdout.encoding
|
| 988 |
+
sys.stdout.reconfigure(encoding='utf-8')
|
| 989 |
+
try:
|
| 990 |
+
return _Distribution.handle_display_options(self, option_order)
|
| 991 |
+
finally:
|
| 992 |
+
sys.stdout.reconfigure(encoding=encoding)
|
| 993 |
+
|
| 994 |
+
def run_command(self, command) -> None:
|
| 995 |
+
self.set_defaults()
|
| 996 |
+
# Postpone defaults until all explicit configuration is considered
|
| 997 |
+
# (setup() args, config files, command line and plugins)
|
| 998 |
+
|
| 999 |
+
super().run_command(command)
|
| 1000 |
+
|
| 1001 |
+
|
| 1002 |
+
class DistDeprecationWarning(SetuptoolsDeprecationWarning):
|
| 1003 |
+
"""Class for warning about deprecations in dist in
|
| 1004 |
+
setuptools. Not ignored by default, unlike DeprecationWarning."""
|
llava/lib/python3.10/site-packages/setuptools/gui.exe
ADDED
|
Binary file (11.8 kB). View file
|
|
|
llava/lib/python3.10/site-packages/setuptools/installer.py
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import glob
|
| 4 |
+
import os
|
| 5 |
+
import subprocess
|
| 6 |
+
import sys
|
| 7 |
+
import tempfile
|
| 8 |
+
from functools import partial
|
| 9 |
+
|
| 10 |
+
from pkg_resources import Distribution
|
| 11 |
+
|
| 12 |
+
from . import _reqs
|
| 13 |
+
from ._reqs import _StrOrIter
|
| 14 |
+
from .warnings import SetuptoolsDeprecationWarning
|
| 15 |
+
from .wheel import Wheel
|
| 16 |
+
|
| 17 |
+
from distutils import log
|
| 18 |
+
from distutils.errors import DistutilsError
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def _fixup_find_links(find_links):
|
| 22 |
+
"""Ensure find-links option end-up being a list of strings."""
|
| 23 |
+
if isinstance(find_links, str):
|
| 24 |
+
return find_links.split()
|
| 25 |
+
assert isinstance(find_links, (tuple, list))
|
| 26 |
+
return find_links
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def fetch_build_egg(dist, req):
|
| 30 |
+
"""Fetch an egg needed for building.
|
| 31 |
+
|
| 32 |
+
Use pip/wheel to fetch/build a wheel."""
|
| 33 |
+
_DeprecatedInstaller.emit()
|
| 34 |
+
_warn_wheel_not_available(dist)
|
| 35 |
+
return _fetch_build_egg_no_warn(dist, req)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def _fetch_build_eggs(dist, requires: _StrOrIter) -> list[Distribution]:
|
| 39 |
+
import pkg_resources # Delay import to avoid unnecessary side-effects
|
| 40 |
+
|
| 41 |
+
_DeprecatedInstaller.emit(stacklevel=3)
|
| 42 |
+
_warn_wheel_not_available(dist)
|
| 43 |
+
|
| 44 |
+
resolved_dists = pkg_resources.working_set.resolve(
|
| 45 |
+
_reqs.parse(requires, pkg_resources.Requirement), # required for compatibility
|
| 46 |
+
installer=partial(_fetch_build_egg_no_warn, dist), # avoid warning twice
|
| 47 |
+
replace_conflicting=True,
|
| 48 |
+
)
|
| 49 |
+
for dist in resolved_dists:
|
| 50 |
+
pkg_resources.working_set.add(dist, replace=True)
|
| 51 |
+
return resolved_dists
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def _fetch_build_egg_no_warn(dist, req): # noqa: C901 # is too complex (16) # FIXME
|
| 55 |
+
import pkg_resources # Delay import to avoid unnecessary side-effects
|
| 56 |
+
|
| 57 |
+
# Ignore environment markers; if supplied, it is required.
|
| 58 |
+
req = strip_marker(req)
|
| 59 |
+
# Take easy_install options into account, but do not override relevant
|
| 60 |
+
# pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll
|
| 61 |
+
# take precedence.
|
| 62 |
+
opts = dist.get_option_dict('easy_install')
|
| 63 |
+
if 'allow_hosts' in opts:
|
| 64 |
+
raise DistutilsError(
|
| 65 |
+
'the `allow-hosts` option is not supported '
|
| 66 |
+
'when using pip to install requirements.'
|
| 67 |
+
)
|
| 68 |
+
quiet = 'PIP_QUIET' not in os.environ and 'PIP_VERBOSE' not in os.environ
|
| 69 |
+
if 'PIP_INDEX_URL' in os.environ:
|
| 70 |
+
index_url = None
|
| 71 |
+
elif 'index_url' in opts:
|
| 72 |
+
index_url = opts['index_url'][1]
|
| 73 |
+
else:
|
| 74 |
+
index_url = None
|
| 75 |
+
find_links = (
|
| 76 |
+
_fixup_find_links(opts['find_links'][1])[:] if 'find_links' in opts else []
|
| 77 |
+
)
|
| 78 |
+
if dist.dependency_links:
|
| 79 |
+
find_links.extend(dist.dependency_links)
|
| 80 |
+
eggs_dir = os.path.realpath(dist.get_egg_cache_dir())
|
| 81 |
+
environment = pkg_resources.Environment()
|
| 82 |
+
for egg_dist in pkg_resources.find_distributions(eggs_dir):
|
| 83 |
+
if egg_dist in req and environment.can_add(egg_dist):
|
| 84 |
+
return egg_dist
|
| 85 |
+
with tempfile.TemporaryDirectory() as tmpdir:
|
| 86 |
+
cmd = [
|
| 87 |
+
sys.executable,
|
| 88 |
+
'-m',
|
| 89 |
+
'pip',
|
| 90 |
+
'--disable-pip-version-check',
|
| 91 |
+
'wheel',
|
| 92 |
+
'--no-deps',
|
| 93 |
+
'-w',
|
| 94 |
+
tmpdir,
|
| 95 |
+
]
|
| 96 |
+
if quiet:
|
| 97 |
+
cmd.append('--quiet')
|
| 98 |
+
if index_url is not None:
|
| 99 |
+
cmd.extend(('--index-url', index_url))
|
| 100 |
+
for link in find_links or []:
|
| 101 |
+
cmd.extend(('--find-links', link))
|
| 102 |
+
# If requirement is a PEP 508 direct URL, directly pass
|
| 103 |
+
# the URL to pip, as `req @ url` does not work on the
|
| 104 |
+
# command line.
|
| 105 |
+
cmd.append(req.url or str(req))
|
| 106 |
+
try:
|
| 107 |
+
subprocess.check_call(cmd)
|
| 108 |
+
except subprocess.CalledProcessError as e:
|
| 109 |
+
raise DistutilsError(str(e)) from e
|
| 110 |
+
wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0])
|
| 111 |
+
dist_location = os.path.join(eggs_dir, wheel.egg_name())
|
| 112 |
+
wheel.install_as_egg(dist_location)
|
| 113 |
+
dist_metadata = pkg_resources.PathMetadata(
|
| 114 |
+
dist_location, os.path.join(dist_location, 'EGG-INFO')
|
| 115 |
+
)
|
| 116 |
+
return pkg_resources.Distribution.from_filename(
|
| 117 |
+
dist_location, metadata=dist_metadata
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def strip_marker(req):
|
| 122 |
+
"""
|
| 123 |
+
Return a new requirement without the environment marker to avoid
|
| 124 |
+
calling pip with something like `babel; extra == "i18n"`, which
|
| 125 |
+
would always be ignored.
|
| 126 |
+
"""
|
| 127 |
+
import pkg_resources # Delay import to avoid unnecessary side-effects
|
| 128 |
+
|
| 129 |
+
# create a copy to avoid mutating the input
|
| 130 |
+
req = pkg_resources.Requirement.parse(str(req))
|
| 131 |
+
req.marker = None
|
| 132 |
+
return req
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def _warn_wheel_not_available(dist):
|
| 136 |
+
import pkg_resources # Delay import to avoid unnecessary side-effects
|
| 137 |
+
|
| 138 |
+
try:
|
| 139 |
+
pkg_resources.get_distribution('wheel')
|
| 140 |
+
except pkg_resources.DistributionNotFound:
|
| 141 |
+
dist.announce('WARNING: The wheel package is not available.', log.WARN)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
class _DeprecatedInstaller(SetuptoolsDeprecationWarning):
|
| 145 |
+
_SUMMARY = "setuptools.installer and fetch_build_eggs are deprecated."
|
| 146 |
+
_DETAILS = """
|
| 147 |
+
Requirements should be satisfied by a PEP 517 installer.
|
| 148 |
+
If you are using pip, you can try `pip install --use-pep517`.
|
| 149 |
+
"""
|
| 150 |
+
# _DUE_DATE not decided yet
|
llava/lib/python3.10/site-packages/setuptools/logging.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import inspect
|
| 2 |
+
import logging
|
| 3 |
+
import sys
|
| 4 |
+
|
| 5 |
+
from . import monkey
|
| 6 |
+
|
| 7 |
+
import distutils.log
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def _not_warning(record):
|
| 11 |
+
return record.levelno < logging.WARNING
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def configure() -> None:
|
| 15 |
+
"""
|
| 16 |
+
Configure logging to emit warning and above to stderr
|
| 17 |
+
and everything else to stdout. This behavior is provided
|
| 18 |
+
for compatibility with distutils.log but may change in
|
| 19 |
+
the future.
|
| 20 |
+
"""
|
| 21 |
+
err_handler = logging.StreamHandler()
|
| 22 |
+
err_handler.setLevel(logging.WARNING)
|
| 23 |
+
out_handler = logging.StreamHandler(sys.stdout)
|
| 24 |
+
out_handler.addFilter(_not_warning)
|
| 25 |
+
handlers = err_handler, out_handler
|
| 26 |
+
logging.basicConfig(
|
| 27 |
+
format="{message}", style='{', handlers=handlers, level=logging.DEBUG
|
| 28 |
+
)
|
| 29 |
+
if inspect.ismodule(distutils.dist.log):
|
| 30 |
+
monkey.patch_func(set_threshold, distutils.log, 'set_threshold')
|
| 31 |
+
# For some reason `distutils.log` module is getting cached in `distutils.dist`
|
| 32 |
+
# and then loaded again when patched,
|
| 33 |
+
# implying: id(distutils.log) != id(distutils.dist.log).
|
| 34 |
+
# Make sure the same module object is used everywhere:
|
| 35 |
+
distutils.dist.log = distutils.log
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def set_threshold(level: int) -> int:
|
| 39 |
+
logging.root.setLevel(level * 10)
|
| 40 |
+
return set_threshold.unpatched(level)
|
llava/lib/python3.10/site-packages/setuptools/namespaces.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import itertools
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
from .compat import py312
|
| 5 |
+
|
| 6 |
+
from distutils import log
|
| 7 |
+
|
| 8 |
+
flatten = itertools.chain.from_iterable
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class Installer:
|
| 12 |
+
nspkg_ext = '-nspkg.pth'
|
| 13 |
+
|
| 14 |
+
def install_namespaces(self) -> None:
|
| 15 |
+
nsp = self._get_all_ns_packages()
|
| 16 |
+
if not nsp:
|
| 17 |
+
return
|
| 18 |
+
filename = self._get_nspkg_file()
|
| 19 |
+
self.outputs.append(filename)
|
| 20 |
+
log.info("Installing %s", filename)
|
| 21 |
+
lines = map(self._gen_nspkg_line, nsp)
|
| 22 |
+
|
| 23 |
+
if self.dry_run:
|
| 24 |
+
# always generate the lines, even in dry run
|
| 25 |
+
list(lines)
|
| 26 |
+
return
|
| 27 |
+
|
| 28 |
+
with open(filename, 'wt', encoding=py312.PTH_ENCODING) as f:
|
| 29 |
+
# Python<3.13 requires encoding="locale" instead of "utf-8"
|
| 30 |
+
# See: python/cpython#77102
|
| 31 |
+
f.writelines(lines)
|
| 32 |
+
|
| 33 |
+
def uninstall_namespaces(self) -> None:
|
| 34 |
+
filename = self._get_nspkg_file()
|
| 35 |
+
if not os.path.exists(filename):
|
| 36 |
+
return
|
| 37 |
+
log.info("Removing %s", filename)
|
| 38 |
+
os.remove(filename)
|
| 39 |
+
|
| 40 |
+
def _get_nspkg_file(self):
|
| 41 |
+
filename, _ = os.path.splitext(self._get_target())
|
| 42 |
+
return filename + self.nspkg_ext
|
| 43 |
+
|
| 44 |
+
def _get_target(self):
|
| 45 |
+
return self.target
|
| 46 |
+
|
| 47 |
+
_nspkg_tmpl = (
|
| 48 |
+
"import sys, types, os",
|
| 49 |
+
"p = os.path.join(%(root)s, *%(pth)r)",
|
| 50 |
+
"importlib = __import__('importlib.util')",
|
| 51 |
+
"__import__('importlib.machinery')",
|
| 52 |
+
(
|
| 53 |
+
"m = "
|
| 54 |
+
"sys.modules.setdefault(%(pkg)r, "
|
| 55 |
+
"importlib.util.module_from_spec("
|
| 56 |
+
"importlib.machinery.PathFinder.find_spec(%(pkg)r, "
|
| 57 |
+
"[os.path.dirname(p)])))"
|
| 58 |
+
),
|
| 59 |
+
("m = m or sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))"),
|
| 60 |
+
"mp = (m or []) and m.__dict__.setdefault('__path__',[])",
|
| 61 |
+
"(p not in mp) and mp.append(p)",
|
| 62 |
+
)
|
| 63 |
+
"lines for the namespace installer"
|
| 64 |
+
|
| 65 |
+
_nspkg_tmpl_multi = ('m and setattr(sys.modules[%(parent)r], %(child)r, m)',)
|
| 66 |
+
"additional line(s) when a parent package is indicated"
|
| 67 |
+
|
| 68 |
+
def _get_root(self):
|
| 69 |
+
return "sys._getframe(1).f_locals['sitedir']"
|
| 70 |
+
|
| 71 |
+
def _gen_nspkg_line(self, pkg):
|
| 72 |
+
pth = tuple(pkg.split('.'))
|
| 73 |
+
root = self._get_root()
|
| 74 |
+
tmpl_lines = self._nspkg_tmpl
|
| 75 |
+
parent, sep, child = pkg.rpartition('.')
|
| 76 |
+
if parent:
|
| 77 |
+
tmpl_lines += self._nspkg_tmpl_multi
|
| 78 |
+
return ';'.join(tmpl_lines) % locals() + '\n'
|
| 79 |
+
|
| 80 |
+
def _get_all_ns_packages(self):
|
| 81 |
+
"""Return sorted list of all package namespaces"""
|
| 82 |
+
pkgs = self.distribution.namespace_packages or []
|
| 83 |
+
return sorted(set(flatten(map(self._pkg_names, pkgs))))
|
| 84 |
+
|
| 85 |
+
@staticmethod
|
| 86 |
+
def _pkg_names(pkg):
|
| 87 |
+
"""
|
| 88 |
+
Given a namespace package, yield the components of that
|
| 89 |
+
package.
|
| 90 |
+
|
| 91 |
+
>>> names = Installer._pkg_names('a.b.c')
|
| 92 |
+
>>> set(names) == set(['a', 'a.b', 'a.b.c'])
|
| 93 |
+
True
|
| 94 |
+
"""
|
| 95 |
+
parts = pkg.split('.')
|
| 96 |
+
while parts:
|
| 97 |
+
yield '.'.join(parts)
|
| 98 |
+
parts.pop()
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class DevelopInstaller(Installer):
|
| 102 |
+
def _get_root(self):
|
| 103 |
+
return repr(str(self.egg_path))
|
| 104 |
+
|
| 105 |
+
def _get_target(self):
|
| 106 |
+
return self.egg_link
|
llava/lib/python3.10/site-packages/setuptools/sandbox.py
ADDED
|
@@ -0,0 +1,536 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import builtins
|
| 4 |
+
import contextlib
|
| 5 |
+
import functools
|
| 6 |
+
import itertools
|
| 7 |
+
import operator
|
| 8 |
+
import os
|
| 9 |
+
import pickle
|
| 10 |
+
import re
|
| 11 |
+
import sys
|
| 12 |
+
import tempfile
|
| 13 |
+
import textwrap
|
| 14 |
+
from types import TracebackType
|
| 15 |
+
from typing import TYPE_CHECKING, Any, ClassVar
|
| 16 |
+
|
| 17 |
+
import pkg_resources
|
| 18 |
+
from pkg_resources import working_set
|
| 19 |
+
|
| 20 |
+
from distutils.errors import DistutilsError
|
| 21 |
+
|
| 22 |
+
if TYPE_CHECKING:
|
| 23 |
+
import os as _os
|
| 24 |
+
elif sys.platform.startswith('java'):
|
| 25 |
+
import org.python.modules.posix.PosixModule as _os # pyright: ignore[reportMissingImports]
|
| 26 |
+
else:
|
| 27 |
+
_os = sys.modules[os.name]
|
| 28 |
+
_open = open
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
if TYPE_CHECKING:
|
| 32 |
+
from typing_extensions import Self
|
| 33 |
+
|
| 34 |
+
__all__ = [
|
| 35 |
+
"AbstractSandbox",
|
| 36 |
+
"DirectorySandbox",
|
| 37 |
+
"SandboxViolation",
|
| 38 |
+
"run_setup",
|
| 39 |
+
]
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def _execfile(filename, globals, locals=None):
|
| 43 |
+
"""
|
| 44 |
+
Python 3 implementation of execfile.
|
| 45 |
+
"""
|
| 46 |
+
mode = 'rb'
|
| 47 |
+
with open(filename, mode) as stream:
|
| 48 |
+
script = stream.read()
|
| 49 |
+
if locals is None:
|
| 50 |
+
locals = globals
|
| 51 |
+
code = compile(script, filename, 'exec')
|
| 52 |
+
exec(code, globals, locals)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
@contextlib.contextmanager
|
| 56 |
+
def save_argv(repl=None):
|
| 57 |
+
saved = sys.argv[:]
|
| 58 |
+
if repl is not None:
|
| 59 |
+
sys.argv[:] = repl
|
| 60 |
+
try:
|
| 61 |
+
yield saved
|
| 62 |
+
finally:
|
| 63 |
+
sys.argv[:] = saved
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
@contextlib.contextmanager
|
| 67 |
+
def save_path():
|
| 68 |
+
saved = sys.path[:]
|
| 69 |
+
try:
|
| 70 |
+
yield saved
|
| 71 |
+
finally:
|
| 72 |
+
sys.path[:] = saved
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
@contextlib.contextmanager
|
| 76 |
+
def override_temp(replacement):
|
| 77 |
+
"""
|
| 78 |
+
Monkey-patch tempfile.tempdir with replacement, ensuring it exists
|
| 79 |
+
"""
|
| 80 |
+
os.makedirs(replacement, exist_ok=True)
|
| 81 |
+
|
| 82 |
+
saved = tempfile.tempdir
|
| 83 |
+
|
| 84 |
+
tempfile.tempdir = replacement
|
| 85 |
+
|
| 86 |
+
try:
|
| 87 |
+
yield
|
| 88 |
+
finally:
|
| 89 |
+
tempfile.tempdir = saved
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
@contextlib.contextmanager
|
| 93 |
+
def pushd(target):
|
| 94 |
+
saved = os.getcwd()
|
| 95 |
+
os.chdir(target)
|
| 96 |
+
try:
|
| 97 |
+
yield saved
|
| 98 |
+
finally:
|
| 99 |
+
os.chdir(saved)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
class UnpickleableException(Exception):
|
| 103 |
+
"""
|
| 104 |
+
An exception representing another Exception that could not be pickled.
|
| 105 |
+
"""
|
| 106 |
+
|
| 107 |
+
@staticmethod
|
| 108 |
+
def dump(type, exc):
|
| 109 |
+
"""
|
| 110 |
+
Always return a dumped (pickled) type and exc. If exc can't be pickled,
|
| 111 |
+
wrap it in UnpickleableException first.
|
| 112 |
+
"""
|
| 113 |
+
try:
|
| 114 |
+
return pickle.dumps(type), pickle.dumps(exc)
|
| 115 |
+
except Exception:
|
| 116 |
+
# get UnpickleableException inside the sandbox
|
| 117 |
+
from setuptools.sandbox import UnpickleableException as cls
|
| 118 |
+
|
| 119 |
+
return cls.dump(cls, cls(repr(exc)))
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class ExceptionSaver:
|
| 123 |
+
"""
|
| 124 |
+
A Context Manager that will save an exception, serialize, and restore it
|
| 125 |
+
later.
|
| 126 |
+
"""
|
| 127 |
+
|
| 128 |
+
def __enter__(self) -> Self:
|
| 129 |
+
return self
|
| 130 |
+
|
| 131 |
+
def __exit__(
|
| 132 |
+
self,
|
| 133 |
+
type: type[BaseException] | None,
|
| 134 |
+
exc: BaseException | None,
|
| 135 |
+
tb: TracebackType | None,
|
| 136 |
+
) -> bool:
|
| 137 |
+
if not exc:
|
| 138 |
+
return False
|
| 139 |
+
|
| 140 |
+
# dump the exception
|
| 141 |
+
self._saved = UnpickleableException.dump(type, exc)
|
| 142 |
+
self._tb = tb
|
| 143 |
+
|
| 144 |
+
# suppress the exception
|
| 145 |
+
return True
|
| 146 |
+
|
| 147 |
+
def resume(self):
|
| 148 |
+
"restore and re-raise any exception"
|
| 149 |
+
|
| 150 |
+
if '_saved' not in vars(self):
|
| 151 |
+
return
|
| 152 |
+
|
| 153 |
+
_type, exc = map(pickle.loads, self._saved)
|
| 154 |
+
raise exc.with_traceback(self._tb)
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
@contextlib.contextmanager
|
| 158 |
+
def save_modules():
|
| 159 |
+
"""
|
| 160 |
+
Context in which imported modules are saved.
|
| 161 |
+
|
| 162 |
+
Translates exceptions internal to the context into the equivalent exception
|
| 163 |
+
outside the context.
|
| 164 |
+
"""
|
| 165 |
+
saved = sys.modules.copy()
|
| 166 |
+
with ExceptionSaver() as saved_exc:
|
| 167 |
+
yield saved
|
| 168 |
+
|
| 169 |
+
sys.modules.update(saved)
|
| 170 |
+
# remove any modules imported since
|
| 171 |
+
del_modules = (
|
| 172 |
+
mod_name
|
| 173 |
+
for mod_name in sys.modules
|
| 174 |
+
if mod_name not in saved
|
| 175 |
+
# exclude any encodings modules. See #285
|
| 176 |
+
and not mod_name.startswith('encodings.')
|
| 177 |
+
)
|
| 178 |
+
_clear_modules(del_modules)
|
| 179 |
+
|
| 180 |
+
saved_exc.resume()
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
def _clear_modules(module_names):
|
| 184 |
+
for mod_name in list(module_names):
|
| 185 |
+
del sys.modules[mod_name]
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
@contextlib.contextmanager
|
| 189 |
+
def save_pkg_resources_state():
|
| 190 |
+
saved = pkg_resources.__getstate__()
|
| 191 |
+
try:
|
| 192 |
+
yield saved
|
| 193 |
+
finally:
|
| 194 |
+
pkg_resources.__setstate__(saved)
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
@contextlib.contextmanager
|
| 198 |
+
def setup_context(setup_dir):
|
| 199 |
+
temp_dir = os.path.join(setup_dir, 'temp')
|
| 200 |
+
with save_pkg_resources_state():
|
| 201 |
+
with save_modules():
|
| 202 |
+
with save_path():
|
| 203 |
+
hide_setuptools()
|
| 204 |
+
with save_argv():
|
| 205 |
+
with override_temp(temp_dir):
|
| 206 |
+
with pushd(setup_dir):
|
| 207 |
+
# ensure setuptools commands are available
|
| 208 |
+
__import__('setuptools')
|
| 209 |
+
yield
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
_MODULES_TO_HIDE = {
|
| 213 |
+
'setuptools',
|
| 214 |
+
'distutils',
|
| 215 |
+
'pkg_resources',
|
| 216 |
+
'Cython',
|
| 217 |
+
'_distutils_hack',
|
| 218 |
+
}
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
def _needs_hiding(mod_name):
|
| 222 |
+
"""
|
| 223 |
+
>>> _needs_hiding('setuptools')
|
| 224 |
+
True
|
| 225 |
+
>>> _needs_hiding('pkg_resources')
|
| 226 |
+
True
|
| 227 |
+
>>> _needs_hiding('setuptools_plugin')
|
| 228 |
+
False
|
| 229 |
+
>>> _needs_hiding('setuptools.__init__')
|
| 230 |
+
True
|
| 231 |
+
>>> _needs_hiding('distutils')
|
| 232 |
+
True
|
| 233 |
+
>>> _needs_hiding('os')
|
| 234 |
+
False
|
| 235 |
+
>>> _needs_hiding('Cython')
|
| 236 |
+
True
|
| 237 |
+
"""
|
| 238 |
+
base_module = mod_name.split('.', 1)[0]
|
| 239 |
+
return base_module in _MODULES_TO_HIDE
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
def hide_setuptools():
|
| 243 |
+
"""
|
| 244 |
+
Remove references to setuptools' modules from sys.modules to allow the
|
| 245 |
+
invocation to import the most appropriate setuptools. This technique is
|
| 246 |
+
necessary to avoid issues such as #315 where setuptools upgrading itself
|
| 247 |
+
would fail to find a function declared in the metadata.
|
| 248 |
+
"""
|
| 249 |
+
_distutils_hack = sys.modules.get('_distutils_hack', None)
|
| 250 |
+
if _distutils_hack is not None:
|
| 251 |
+
_distutils_hack._remove_shim()
|
| 252 |
+
|
| 253 |
+
modules = filter(_needs_hiding, sys.modules)
|
| 254 |
+
_clear_modules(modules)
|
| 255 |
+
|
| 256 |
+
|
| 257 |
+
def run_setup(setup_script, args):
|
| 258 |
+
"""Run a distutils setup script, sandboxed in its directory"""
|
| 259 |
+
setup_dir = os.path.abspath(os.path.dirname(setup_script))
|
| 260 |
+
with setup_context(setup_dir):
|
| 261 |
+
try:
|
| 262 |
+
sys.argv[:] = [setup_script] + list(args)
|
| 263 |
+
sys.path.insert(0, setup_dir)
|
| 264 |
+
# reset to include setup dir, w/clean callback list
|
| 265 |
+
working_set.__init__()
|
| 266 |
+
working_set.callbacks.append(lambda dist: dist.activate())
|
| 267 |
+
|
| 268 |
+
with DirectorySandbox(setup_dir):
|
| 269 |
+
ns = dict(__file__=setup_script, __name__='__main__')
|
| 270 |
+
_execfile(setup_script, ns)
|
| 271 |
+
except SystemExit as v:
|
| 272 |
+
if v.args and v.args[0]:
|
| 273 |
+
raise
|
| 274 |
+
# Normal exit, just return
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
class AbstractSandbox:
|
| 278 |
+
"""Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
|
| 279 |
+
|
| 280 |
+
_active = False
|
| 281 |
+
|
| 282 |
+
def __init__(self) -> None:
|
| 283 |
+
self._attrs = [
|
| 284 |
+
name
|
| 285 |
+
for name in dir(_os)
|
| 286 |
+
if not name.startswith('_') and hasattr(self, name)
|
| 287 |
+
]
|
| 288 |
+
|
| 289 |
+
def _copy(self, source):
|
| 290 |
+
for name in self._attrs:
|
| 291 |
+
setattr(os, name, getattr(source, name))
|
| 292 |
+
|
| 293 |
+
def __enter__(self) -> None:
|
| 294 |
+
self._copy(self)
|
| 295 |
+
builtins.open = self._open
|
| 296 |
+
self._active = True
|
| 297 |
+
|
| 298 |
+
def __exit__(
|
| 299 |
+
self,
|
| 300 |
+
exc_type: type[BaseException] | None,
|
| 301 |
+
exc_value: BaseException | None,
|
| 302 |
+
traceback: TracebackType | None,
|
| 303 |
+
):
|
| 304 |
+
self._active = False
|
| 305 |
+
builtins.open = _open
|
| 306 |
+
self._copy(_os)
|
| 307 |
+
|
| 308 |
+
def run(self, func):
|
| 309 |
+
"""Run 'func' under os sandboxing"""
|
| 310 |
+
with self:
|
| 311 |
+
return func()
|
| 312 |
+
|
| 313 |
+
def _mk_dual_path_wrapper(name: str): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099
|
| 314 |
+
original = getattr(_os, name)
|
| 315 |
+
|
| 316 |
+
def wrap(self, src, dst, *args, **kw):
|
| 317 |
+
if self._active:
|
| 318 |
+
src, dst = self._remap_pair(name, src, dst, *args, **kw)
|
| 319 |
+
return original(src, dst, *args, **kw)
|
| 320 |
+
|
| 321 |
+
return wrap
|
| 322 |
+
|
| 323 |
+
for __name in ["rename", "link", "symlink"]:
|
| 324 |
+
if hasattr(_os, __name):
|
| 325 |
+
locals()[__name] = _mk_dual_path_wrapper(__name)
|
| 326 |
+
|
| 327 |
+
def _mk_single_path_wrapper(name: str, original=None): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099
|
| 328 |
+
original = original or getattr(_os, name)
|
| 329 |
+
|
| 330 |
+
def wrap(self, path, *args, **kw):
|
| 331 |
+
if self._active:
|
| 332 |
+
path = self._remap_input(name, path, *args, **kw)
|
| 333 |
+
return original(path, *args, **kw)
|
| 334 |
+
|
| 335 |
+
return wrap
|
| 336 |
+
|
| 337 |
+
_open = _mk_single_path_wrapper('open', _open)
|
| 338 |
+
for __name in [
|
| 339 |
+
"stat",
|
| 340 |
+
"listdir",
|
| 341 |
+
"chdir",
|
| 342 |
+
"open",
|
| 343 |
+
"chmod",
|
| 344 |
+
"chown",
|
| 345 |
+
"mkdir",
|
| 346 |
+
"remove",
|
| 347 |
+
"unlink",
|
| 348 |
+
"rmdir",
|
| 349 |
+
"utime",
|
| 350 |
+
"lchown",
|
| 351 |
+
"chroot",
|
| 352 |
+
"lstat",
|
| 353 |
+
"startfile",
|
| 354 |
+
"mkfifo",
|
| 355 |
+
"mknod",
|
| 356 |
+
"pathconf",
|
| 357 |
+
"access",
|
| 358 |
+
]:
|
| 359 |
+
if hasattr(_os, __name):
|
| 360 |
+
locals()[__name] = _mk_single_path_wrapper(__name)
|
| 361 |
+
|
| 362 |
+
def _mk_single_with_return(name: str): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099
|
| 363 |
+
original = getattr(_os, name)
|
| 364 |
+
|
| 365 |
+
def wrap(self, path, *args, **kw):
|
| 366 |
+
if self._active:
|
| 367 |
+
path = self._remap_input(name, path, *args, **kw)
|
| 368 |
+
return self._remap_output(name, original(path, *args, **kw))
|
| 369 |
+
return original(path, *args, **kw)
|
| 370 |
+
|
| 371 |
+
return wrap
|
| 372 |
+
|
| 373 |
+
for __name in ['readlink', 'tempnam']:
|
| 374 |
+
if hasattr(_os, __name):
|
| 375 |
+
locals()[__name] = _mk_single_with_return(__name)
|
| 376 |
+
|
| 377 |
+
def _mk_query(name: str): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099
|
| 378 |
+
original = getattr(_os, name)
|
| 379 |
+
|
| 380 |
+
def wrap(self, *args, **kw):
|
| 381 |
+
retval = original(*args, **kw)
|
| 382 |
+
if self._active:
|
| 383 |
+
return self._remap_output(name, retval)
|
| 384 |
+
return retval
|
| 385 |
+
|
| 386 |
+
return wrap
|
| 387 |
+
|
| 388 |
+
for __name in ['getcwd', 'tmpnam']:
|
| 389 |
+
if hasattr(_os, __name):
|
| 390 |
+
locals()[__name] = _mk_query(__name)
|
| 391 |
+
|
| 392 |
+
def _validate_path(self, path):
|
| 393 |
+
"""Called to remap or validate any path, whether input or output"""
|
| 394 |
+
return path
|
| 395 |
+
|
| 396 |
+
def _remap_input(self, operation, path, *args, **kw):
|
| 397 |
+
"""Called for path inputs"""
|
| 398 |
+
return self._validate_path(path)
|
| 399 |
+
|
| 400 |
+
def _remap_output(self, operation, path):
|
| 401 |
+
"""Called for path outputs"""
|
| 402 |
+
return self._validate_path(path)
|
| 403 |
+
|
| 404 |
+
def _remap_pair(self, operation, src, dst, *args, **kw):
|
| 405 |
+
"""Called for path pairs like rename, link, and symlink operations"""
|
| 406 |
+
return (
|
| 407 |
+
self._remap_input(operation + '-from', src, *args, **kw),
|
| 408 |
+
self._remap_input(operation + '-to', dst, *args, **kw),
|
| 409 |
+
)
|
| 410 |
+
|
| 411 |
+
if TYPE_CHECKING:
|
| 412 |
+
# This is a catch-all for all the dynamically created attributes.
|
| 413 |
+
# This isn't public API anyway
|
| 414 |
+
def __getattribute__(self, name: str) -> Any: ...
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
if hasattr(os, 'devnull'):
|
| 418 |
+
_EXCEPTIONS = [os.devnull]
|
| 419 |
+
else:
|
| 420 |
+
_EXCEPTIONS = []
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
class DirectorySandbox(AbstractSandbox):
|
| 424 |
+
"""Restrict operations to a single subdirectory - pseudo-chroot"""
|
| 425 |
+
|
| 426 |
+
write_ops: ClassVar[dict[str, None]] = dict.fromkeys([
|
| 427 |
+
"open",
|
| 428 |
+
"chmod",
|
| 429 |
+
"chown",
|
| 430 |
+
"mkdir",
|
| 431 |
+
"remove",
|
| 432 |
+
"unlink",
|
| 433 |
+
"rmdir",
|
| 434 |
+
"utime",
|
| 435 |
+
"lchown",
|
| 436 |
+
"chroot",
|
| 437 |
+
"mkfifo",
|
| 438 |
+
"mknod",
|
| 439 |
+
"tempnam",
|
| 440 |
+
])
|
| 441 |
+
|
| 442 |
+
_exception_patterns: list[str | re.Pattern] = []
|
| 443 |
+
"exempt writing to paths that match the pattern"
|
| 444 |
+
|
| 445 |
+
def __init__(self, sandbox, exceptions=_EXCEPTIONS) -> None:
|
| 446 |
+
self._sandbox = os.path.normcase(os.path.realpath(sandbox))
|
| 447 |
+
self._prefix = os.path.join(self._sandbox, '')
|
| 448 |
+
self._exceptions = [
|
| 449 |
+
os.path.normcase(os.path.realpath(path)) for path in exceptions
|
| 450 |
+
]
|
| 451 |
+
AbstractSandbox.__init__(self)
|
| 452 |
+
|
| 453 |
+
def _violation(self, operation, *args, **kw):
|
| 454 |
+
from setuptools.sandbox import SandboxViolation
|
| 455 |
+
|
| 456 |
+
raise SandboxViolation(operation, args, kw)
|
| 457 |
+
|
| 458 |
+
def _open(self, path, mode='r', *args, **kw):
|
| 459 |
+
if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
|
| 460 |
+
self._violation("open", path, mode, *args, **kw)
|
| 461 |
+
return _open(path, mode, *args, **kw)
|
| 462 |
+
|
| 463 |
+
def tmpnam(self) -> None:
|
| 464 |
+
self._violation("tmpnam")
|
| 465 |
+
|
| 466 |
+
def _ok(self, path):
|
| 467 |
+
active = self._active
|
| 468 |
+
try:
|
| 469 |
+
self._active = False
|
| 470 |
+
realpath = os.path.normcase(os.path.realpath(path))
|
| 471 |
+
return (
|
| 472 |
+
self._exempted(realpath)
|
| 473 |
+
or realpath == self._sandbox
|
| 474 |
+
or realpath.startswith(self._prefix)
|
| 475 |
+
)
|
| 476 |
+
finally:
|
| 477 |
+
self._active = active
|
| 478 |
+
|
| 479 |
+
def _exempted(self, filepath):
|
| 480 |
+
start_matches = (
|
| 481 |
+
filepath.startswith(exception) for exception in self._exceptions
|
| 482 |
+
)
|
| 483 |
+
pattern_matches = (
|
| 484 |
+
re.match(pattern, filepath) for pattern in self._exception_patterns
|
| 485 |
+
)
|
| 486 |
+
candidates = itertools.chain(start_matches, pattern_matches)
|
| 487 |
+
return any(candidates)
|
| 488 |
+
|
| 489 |
+
def _remap_input(self, operation, path, *args, **kw):
|
| 490 |
+
"""Called for path inputs"""
|
| 491 |
+
if operation in self.write_ops and not self._ok(path):
|
| 492 |
+
self._violation(operation, os.path.realpath(path), *args, **kw)
|
| 493 |
+
return path
|
| 494 |
+
|
| 495 |
+
def _remap_pair(self, operation, src, dst, *args, **kw):
|
| 496 |
+
"""Called for path pairs like rename, link, and symlink operations"""
|
| 497 |
+
if not self._ok(src) or not self._ok(dst):
|
| 498 |
+
self._violation(operation, src, dst, *args, **kw)
|
| 499 |
+
return (src, dst)
|
| 500 |
+
|
| 501 |
+
def open(self, file, flags, mode: int = 0o777, *args, **kw) -> int:
|
| 502 |
+
"""Called for low-level os.open()"""
|
| 503 |
+
if flags & WRITE_FLAGS and not self._ok(file):
|
| 504 |
+
self._violation("os.open", file, flags, mode, *args, **kw)
|
| 505 |
+
return _os.open(file, flags, mode, *args, **kw)
|
| 506 |
+
|
| 507 |
+
|
| 508 |
+
WRITE_FLAGS = functools.reduce(
|
| 509 |
+
operator.or_,
|
| 510 |
+
[
|
| 511 |
+
getattr(_os, a, 0)
|
| 512 |
+
for a in "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()
|
| 513 |
+
],
|
| 514 |
+
)
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
class SandboxViolation(DistutilsError):
|
| 518 |
+
"""A setup script attempted to modify the filesystem outside the sandbox"""
|
| 519 |
+
|
| 520 |
+
tmpl = textwrap.dedent(
|
| 521 |
+
"""
|
| 522 |
+
SandboxViolation: {cmd}{args!r} {kwargs}
|
| 523 |
+
|
| 524 |
+
The package setup script has attempted to modify files on your system
|
| 525 |
+
that are not within the EasyInstall build area, and has been aborted.
|
| 526 |
+
|
| 527 |
+
This package cannot be safely installed by EasyInstall, and may not
|
| 528 |
+
support alternate installation locations even if you run its setup
|
| 529 |
+
script by hand. Please inform the package's author and the EasyInstall
|
| 530 |
+
maintainers to find out if a fix or workaround is available.
|
| 531 |
+
"""
|
| 532 |
+
).lstrip()
|
| 533 |
+
|
| 534 |
+
def __str__(self) -> str:
|
| 535 |
+
cmd, args, kwargs = self.args
|
| 536 |
+
return self.tmpl.format(**locals())
|
llava/lib/python3.10/site-packages/setuptools/warnings.py
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Provide basic warnings used by setuptools modules.
|
| 2 |
+
|
| 3 |
+
Using custom classes (other than ``UserWarning``) allow users to set
|
| 4 |
+
``PYTHONWARNINGS`` filters to run tests and prepare for upcoming changes in
|
| 5 |
+
setuptools.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from __future__ import annotations
|
| 9 |
+
|
| 10 |
+
import os
|
| 11 |
+
import warnings
|
| 12 |
+
from datetime import date
|
| 13 |
+
from inspect import cleandoc
|
| 14 |
+
from textwrap import indent
|
| 15 |
+
from typing import TYPE_CHECKING
|
| 16 |
+
|
| 17 |
+
if TYPE_CHECKING:
|
| 18 |
+
from typing_extensions import TypeAlias
|
| 19 |
+
|
| 20 |
+
_DueDate: TypeAlias = tuple[int, int, int] # time tuple
|
| 21 |
+
_INDENT = 8 * " "
|
| 22 |
+
_TEMPLATE = f"""{80 * '*'}\n{{details}}\n{80 * '*'}"""
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class SetuptoolsWarning(UserWarning):
|
| 26 |
+
"""Base class in ``setuptools`` warning hierarchy."""
|
| 27 |
+
|
| 28 |
+
@classmethod
|
| 29 |
+
def emit(
|
| 30 |
+
cls,
|
| 31 |
+
summary: str | None = None,
|
| 32 |
+
details: str | None = None,
|
| 33 |
+
due_date: _DueDate | None = None,
|
| 34 |
+
see_docs: str | None = None,
|
| 35 |
+
see_url: str | None = None,
|
| 36 |
+
stacklevel: int = 2,
|
| 37 |
+
**kwargs,
|
| 38 |
+
) -> None:
|
| 39 |
+
"""Private: reserved for ``setuptools`` internal use only"""
|
| 40 |
+
# Default values:
|
| 41 |
+
summary_ = summary or getattr(cls, "_SUMMARY", None) or ""
|
| 42 |
+
details_ = details or getattr(cls, "_DETAILS", None) or ""
|
| 43 |
+
due_date = due_date or getattr(cls, "_DUE_DATE", None)
|
| 44 |
+
docs_ref = see_docs or getattr(cls, "_SEE_DOCS", None)
|
| 45 |
+
docs_url = docs_ref and f"https://setuptools.pypa.io/en/latest/{docs_ref}"
|
| 46 |
+
see_url = see_url or getattr(cls, "_SEE_URL", None)
|
| 47 |
+
due = date(*due_date) if due_date else None
|
| 48 |
+
|
| 49 |
+
text = cls._format(summary_, details_, due, see_url or docs_url, kwargs)
|
| 50 |
+
if due and due < date.today() and _should_enforce():
|
| 51 |
+
raise cls(text)
|
| 52 |
+
warnings.warn(text, cls, stacklevel=stacklevel + 1)
|
| 53 |
+
|
| 54 |
+
@classmethod
|
| 55 |
+
def _format(
|
| 56 |
+
cls,
|
| 57 |
+
summary: str,
|
| 58 |
+
details: str,
|
| 59 |
+
due_date: date | None = None,
|
| 60 |
+
see_url: str | None = None,
|
| 61 |
+
format_args: dict | None = None,
|
| 62 |
+
) -> str:
|
| 63 |
+
"""Private: reserved for ``setuptools`` internal use only"""
|
| 64 |
+
today = date.today()
|
| 65 |
+
summary = cleandoc(summary).format_map(format_args or {})
|
| 66 |
+
possible_parts = [
|
| 67 |
+
cleandoc(details).format_map(format_args or {}),
|
| 68 |
+
(
|
| 69 |
+
f"\nBy {due_date:%Y-%b-%d}, you need to update your project and remove "
|
| 70 |
+
"deprecated calls\nor your builds will no longer be supported."
|
| 71 |
+
if due_date and due_date > today
|
| 72 |
+
else None
|
| 73 |
+
),
|
| 74 |
+
(
|
| 75 |
+
"\nThis deprecation is overdue, please update your project and remove "
|
| 76 |
+
"deprecated\ncalls to avoid build errors in the future."
|
| 77 |
+
if due_date and due_date < today
|
| 78 |
+
else None
|
| 79 |
+
),
|
| 80 |
+
(f"\nSee {see_url} for details." if see_url else None),
|
| 81 |
+
]
|
| 82 |
+
parts = [x for x in possible_parts if x]
|
| 83 |
+
if parts:
|
| 84 |
+
body = indent(_TEMPLATE.format(details="\n".join(parts)), _INDENT)
|
| 85 |
+
return "\n".join([summary, "!!\n", body, "\n!!"])
|
| 86 |
+
return summary
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
class InformationOnly(SetuptoolsWarning):
|
| 90 |
+
"""Currently there is no clear way of displaying messages to the users
|
| 91 |
+
that use the setuptools backend directly via ``pip``.
|
| 92 |
+
The only thing that might work is a warning, although it is not the
|
| 93 |
+
most appropriate tool for the job...
|
| 94 |
+
|
| 95 |
+
See pypa/packaging-problems#558.
|
| 96 |
+
"""
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class SetuptoolsDeprecationWarning(SetuptoolsWarning):
|
| 100 |
+
"""
|
| 101 |
+
Base class for warning deprecations in ``setuptools``
|
| 102 |
+
|
| 103 |
+
This class is not derived from ``DeprecationWarning``, and as such is
|
| 104 |
+
visible by default.
|
| 105 |
+
"""
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def _should_enforce():
|
| 109 |
+
enforce = os.getenv("SETUPTOOLS_ENFORCE_DEPRECATION", "false").lower()
|
| 110 |
+
return enforce in ("true", "on", "ok", "1")
|
llava/lib/python3.10/site-packages/setuptools/windows_support.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import platform
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def windows_only(func):
|
| 5 |
+
if platform.system() != 'Windows':
|
| 6 |
+
return lambda *args, **kwargs: None
|
| 7 |
+
return func
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@windows_only
|
| 11 |
+
def hide_file(path: str) -> None:
|
| 12 |
+
"""
|
| 13 |
+
Set the hidden attribute on a file or directory.
|
| 14 |
+
|
| 15 |
+
From https://stackoverflow.com/questions/19622133/
|
| 16 |
+
|
| 17 |
+
`path` must be text.
|
| 18 |
+
"""
|
| 19 |
+
import ctypes
|
| 20 |
+
import ctypes.wintypes
|
| 21 |
+
|
| 22 |
+
SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW
|
| 23 |
+
SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD
|
| 24 |
+
SetFileAttributes.restype = ctypes.wintypes.BOOL
|
| 25 |
+
|
| 26 |
+
FILE_ATTRIBUTE_HIDDEN = 0x02
|
| 27 |
+
|
| 28 |
+
ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN)
|
| 29 |
+
if not ret:
|
| 30 |
+
raise ctypes.WinError()
|
minigpt2/lib/python3.10/site-packages/networkx/generators/atlas.dat.gz
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:73fc416df0164923607751cb759f4ae81deb5f6550bf25be59c86de3b747e41d
|
| 3 |
+
size 8887
|
minigpt2/lib/python3.10/site-packages/networkx/generators/cographs.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""Generators for cographs
|
| 2 |
+
|
| 3 |
+
A cograph is a graph containing no path on four vertices.
|
| 4 |
+
Cographs or $P_4$-free graphs can be obtained from a single vertex
|
| 5 |
+
by disjoint union and complementation operations.
|
| 6 |
+
|
| 7 |
+
References
|
| 8 |
+
----------
|
| 9 |
+
.. [0] D.G. Corneil, H. Lerchs, L.Stewart Burlingham,
|
| 10 |
+
"Complement reducible graphs",
|
| 11 |
+
Discrete Applied Mathematics, Volume 3, Issue 3, 1981, Pages 163-174,
|
| 12 |
+
ISSN 0166-218X.
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
import networkx as nx
|
| 16 |
+
from networkx.utils import py_random_state
|
| 17 |
+
|
| 18 |
+
__all__ = ["random_cograph"]
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
@py_random_state(1)
|
| 22 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 23 |
+
def random_cograph(n, seed=None):
|
| 24 |
+
r"""Returns a random cograph with $2 ^ n$ nodes.
|
| 25 |
+
|
| 26 |
+
A cograph is a graph containing no path on four vertices.
|
| 27 |
+
Cographs or $P_4$-free graphs can be obtained from a single vertex
|
| 28 |
+
by disjoint union and complementation operations.
|
| 29 |
+
|
| 30 |
+
This generator starts off from a single vertex and performs disjoint
|
| 31 |
+
union and full join operations on itself.
|
| 32 |
+
The decision on which operation will take place is random.
|
| 33 |
+
|
| 34 |
+
Parameters
|
| 35 |
+
----------
|
| 36 |
+
n : int
|
| 37 |
+
The order of the cograph.
|
| 38 |
+
seed : integer, random_state, or None (default)
|
| 39 |
+
Indicator of random number generation state.
|
| 40 |
+
See :ref:`Randomness<randomness>`.
|
| 41 |
+
|
| 42 |
+
Returns
|
| 43 |
+
-------
|
| 44 |
+
G : A random graph containing no path on four vertices.
|
| 45 |
+
|
| 46 |
+
See Also
|
| 47 |
+
--------
|
| 48 |
+
full_join
|
| 49 |
+
union
|
| 50 |
+
|
| 51 |
+
References
|
| 52 |
+
----------
|
| 53 |
+
.. [1] D.G. Corneil, H. Lerchs, L.Stewart Burlingham,
|
| 54 |
+
"Complement reducible graphs",
|
| 55 |
+
Discrete Applied Mathematics, Volume 3, Issue 3, 1981, Pages 163-174,
|
| 56 |
+
ISSN 0166-218X.
|
| 57 |
+
"""
|
| 58 |
+
R = nx.empty_graph(1)
|
| 59 |
+
|
| 60 |
+
for i in range(n):
|
| 61 |
+
RR = nx.relabel_nodes(R.copy(), lambda x: x + len(R))
|
| 62 |
+
|
| 63 |
+
if seed.randint(0, 1) == 0:
|
| 64 |
+
R = nx.full_join(R, RR)
|
| 65 |
+
else:
|
| 66 |
+
R = nx.disjoint_union(R, RR)
|
| 67 |
+
|
| 68 |
+
return R
|
minigpt2/lib/python3.10/site-packages/networkx/generators/ego.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Ego graph.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
__all__ = ["ego_graph"]
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
| 11 |
+
def ego_graph(G, n, radius=1, center=True, undirected=False, distance=None):
|
| 12 |
+
"""Returns induced subgraph of neighbors centered at node n within
|
| 13 |
+
a given radius.
|
| 14 |
+
|
| 15 |
+
Parameters
|
| 16 |
+
----------
|
| 17 |
+
G : graph
|
| 18 |
+
A NetworkX Graph or DiGraph
|
| 19 |
+
|
| 20 |
+
n : node
|
| 21 |
+
A single node
|
| 22 |
+
|
| 23 |
+
radius : number, optional
|
| 24 |
+
Include all neighbors of distance<=radius from n.
|
| 25 |
+
|
| 26 |
+
center : bool, optional
|
| 27 |
+
If False, do not include center node in graph
|
| 28 |
+
|
| 29 |
+
undirected : bool, optional
|
| 30 |
+
If True use both in- and out-neighbors of directed graphs.
|
| 31 |
+
|
| 32 |
+
distance : key, optional
|
| 33 |
+
Use specified edge data key as distance. For example, setting
|
| 34 |
+
distance='weight' will use the edge weight to measure the
|
| 35 |
+
distance from the node n.
|
| 36 |
+
|
| 37 |
+
Notes
|
| 38 |
+
-----
|
| 39 |
+
For directed graphs D this produces the "out" neighborhood
|
| 40 |
+
or successors. If you want the neighborhood of predecessors
|
| 41 |
+
first reverse the graph with D.reverse(). If you want both
|
| 42 |
+
directions use the keyword argument undirected=True.
|
| 43 |
+
|
| 44 |
+
Node, edge, and graph attributes are copied to the returned subgraph.
|
| 45 |
+
"""
|
| 46 |
+
if undirected:
|
| 47 |
+
if distance is not None:
|
| 48 |
+
sp, _ = nx.single_source_dijkstra(
|
| 49 |
+
G.to_undirected(), n, cutoff=radius, weight=distance
|
| 50 |
+
)
|
| 51 |
+
else:
|
| 52 |
+
sp = dict(
|
| 53 |
+
nx.single_source_shortest_path_length(
|
| 54 |
+
G.to_undirected(), n, cutoff=radius
|
| 55 |
+
)
|
| 56 |
+
)
|
| 57 |
+
else:
|
| 58 |
+
if distance is not None:
|
| 59 |
+
sp, _ = nx.single_source_dijkstra(G, n, cutoff=radius, weight=distance)
|
| 60 |
+
else:
|
| 61 |
+
sp = dict(nx.single_source_shortest_path_length(G, n, cutoff=radius))
|
| 62 |
+
|
| 63 |
+
H = G.subgraph(sp).copy()
|
| 64 |
+
if not center:
|
| 65 |
+
H.remove_node(n)
|
| 66 |
+
return H
|
minigpt2/lib/python3.10/site-packages/networkx/generators/expanders.py
ADDED
|
@@ -0,0 +1,474 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Provides explicit constructions of expander graphs."""
|
| 2 |
+
|
| 3 |
+
import itertools
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
__all__ = [
|
| 8 |
+
"margulis_gabber_galil_graph",
|
| 9 |
+
"chordal_cycle_graph",
|
| 10 |
+
"paley_graph",
|
| 11 |
+
"maybe_regular_expander",
|
| 12 |
+
"is_regular_expander",
|
| 13 |
+
"random_regular_expander_graph",
|
| 14 |
+
]
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
# Other discrete torus expanders can be constructed by using the following edge
|
| 18 |
+
# sets. For more information, see Chapter 4, "Expander Graphs", in
|
| 19 |
+
# "Pseudorandomness", by Salil Vadhan.
|
| 20 |
+
#
|
| 21 |
+
# For a directed expander, add edges from (x, y) to:
|
| 22 |
+
#
|
| 23 |
+
# (x, y),
|
| 24 |
+
# ((x + 1) % n, y),
|
| 25 |
+
# (x, (y + 1) % n),
|
| 26 |
+
# (x, (x + y) % n),
|
| 27 |
+
# (-y % n, x)
|
| 28 |
+
#
|
| 29 |
+
# For an undirected expander, add the reverse edges.
|
| 30 |
+
#
|
| 31 |
+
# Also appearing in the paper of Gabber and Galil:
|
| 32 |
+
#
|
| 33 |
+
# (x, y),
|
| 34 |
+
# (x, (x + y) % n),
|
| 35 |
+
# (x, (x + y + 1) % n),
|
| 36 |
+
# ((x + y) % n, y),
|
| 37 |
+
# ((x + y + 1) % n, y)
|
| 38 |
+
#
|
| 39 |
+
# and:
|
| 40 |
+
#
|
| 41 |
+
# (x, y),
|
| 42 |
+
# ((x + 2*y) % n, y),
|
| 43 |
+
# ((x + (2*y + 1)) % n, y),
|
| 44 |
+
# ((x + (2*y + 2)) % n, y),
|
| 45 |
+
# (x, (y + 2*x) % n),
|
| 46 |
+
# (x, (y + (2*x + 1)) % n),
|
| 47 |
+
# (x, (y + (2*x + 2)) % n),
|
| 48 |
+
#
|
| 49 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 50 |
+
def margulis_gabber_galil_graph(n, create_using=None):
|
| 51 |
+
r"""Returns the Margulis-Gabber-Galil undirected MultiGraph on `n^2` nodes.
|
| 52 |
+
|
| 53 |
+
The undirected MultiGraph is regular with degree `8`. Nodes are integer
|
| 54 |
+
pairs. The second-largest eigenvalue of the adjacency matrix of the graph
|
| 55 |
+
is at most `5 \sqrt{2}`, regardless of `n`.
|
| 56 |
+
|
| 57 |
+
Parameters
|
| 58 |
+
----------
|
| 59 |
+
n : int
|
| 60 |
+
Determines the number of nodes in the graph: `n^2`.
|
| 61 |
+
create_using : NetworkX graph constructor, optional (default MultiGraph)
|
| 62 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 63 |
+
|
| 64 |
+
Returns
|
| 65 |
+
-------
|
| 66 |
+
G : graph
|
| 67 |
+
The constructed undirected multigraph.
|
| 68 |
+
|
| 69 |
+
Raises
|
| 70 |
+
------
|
| 71 |
+
NetworkXError
|
| 72 |
+
If the graph is directed or not a multigraph.
|
| 73 |
+
|
| 74 |
+
"""
|
| 75 |
+
G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
|
| 76 |
+
if G.is_directed() or not G.is_multigraph():
|
| 77 |
+
msg = "`create_using` must be an undirected multigraph."
|
| 78 |
+
raise nx.NetworkXError(msg)
|
| 79 |
+
|
| 80 |
+
for x, y in itertools.product(range(n), repeat=2):
|
| 81 |
+
for u, v in (
|
| 82 |
+
((x + 2 * y) % n, y),
|
| 83 |
+
((x + (2 * y + 1)) % n, y),
|
| 84 |
+
(x, (y + 2 * x) % n),
|
| 85 |
+
(x, (y + (2 * x + 1)) % n),
|
| 86 |
+
):
|
| 87 |
+
G.add_edge((x, y), (u, v))
|
| 88 |
+
G.graph["name"] = f"margulis_gabber_galil_graph({n})"
|
| 89 |
+
return G
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 93 |
+
def chordal_cycle_graph(p, create_using=None):
|
| 94 |
+
"""Returns the chordal cycle graph on `p` nodes.
|
| 95 |
+
|
| 96 |
+
The returned graph is a cycle graph on `p` nodes with chords joining each
|
| 97 |
+
vertex `x` to its inverse modulo `p`. This graph is a (mildly explicit)
|
| 98 |
+
3-regular expander [1]_.
|
| 99 |
+
|
| 100 |
+
`p` *must* be a prime number.
|
| 101 |
+
|
| 102 |
+
Parameters
|
| 103 |
+
----------
|
| 104 |
+
p : a prime number
|
| 105 |
+
|
| 106 |
+
The number of vertices in the graph. This also indicates where the
|
| 107 |
+
chordal edges in the cycle will be created.
|
| 108 |
+
|
| 109 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 110 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 111 |
+
|
| 112 |
+
Returns
|
| 113 |
+
-------
|
| 114 |
+
G : graph
|
| 115 |
+
The constructed undirected multigraph.
|
| 116 |
+
|
| 117 |
+
Raises
|
| 118 |
+
------
|
| 119 |
+
NetworkXError
|
| 120 |
+
|
| 121 |
+
If `create_using` indicates directed or not a multigraph.
|
| 122 |
+
|
| 123 |
+
References
|
| 124 |
+
----------
|
| 125 |
+
|
| 126 |
+
.. [1] Theorem 4.4.2 in A. Lubotzky. "Discrete groups, expanding graphs and
|
| 127 |
+
invariant measures", volume 125 of Progress in Mathematics.
|
| 128 |
+
Birkhäuser Verlag, Basel, 1994.
|
| 129 |
+
|
| 130 |
+
"""
|
| 131 |
+
G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
|
| 132 |
+
if G.is_directed() or not G.is_multigraph():
|
| 133 |
+
msg = "`create_using` must be an undirected multigraph."
|
| 134 |
+
raise nx.NetworkXError(msg)
|
| 135 |
+
|
| 136 |
+
for x in range(p):
|
| 137 |
+
left = (x - 1) % p
|
| 138 |
+
right = (x + 1) % p
|
| 139 |
+
# Here we apply Fermat's Little Theorem to compute the multiplicative
|
| 140 |
+
# inverse of x in Z/pZ. By Fermat's Little Theorem,
|
| 141 |
+
#
|
| 142 |
+
# x^p = x (mod p)
|
| 143 |
+
#
|
| 144 |
+
# Therefore,
|
| 145 |
+
#
|
| 146 |
+
# x * x^(p - 2) = 1 (mod p)
|
| 147 |
+
#
|
| 148 |
+
# The number 0 is a special case: we just let its inverse be itself.
|
| 149 |
+
chord = pow(x, p - 2, p) if x > 0 else 0
|
| 150 |
+
for y in (left, right, chord):
|
| 151 |
+
G.add_edge(x, y)
|
| 152 |
+
G.graph["name"] = f"chordal_cycle_graph({p})"
|
| 153 |
+
return G
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 157 |
+
def paley_graph(p, create_using=None):
|
| 158 |
+
r"""Returns the Paley $\frac{(p-1)}{2}$ -regular graph on $p$ nodes.
|
| 159 |
+
|
| 160 |
+
The returned graph is a graph on $\mathbb{Z}/p\mathbb{Z}$ with edges between $x$ and $y$
|
| 161 |
+
if and only if $x-y$ is a nonzero square in $\mathbb{Z}/p\mathbb{Z}$.
|
| 162 |
+
|
| 163 |
+
If $p \equiv 1 \pmod 4$, $-1$ is a square in $\mathbb{Z}/p\mathbb{Z}$ and therefore $x-y$ is a square if and
|
| 164 |
+
only if $y-x$ is also a square, i.e the edges in the Paley graph are symmetric.
|
| 165 |
+
|
| 166 |
+
If $p \equiv 3 \pmod 4$, $-1$ is not a square in $\mathbb{Z}/p\mathbb{Z}$ and therefore either $x-y$ or $y-x$
|
| 167 |
+
is a square in $\mathbb{Z}/p\mathbb{Z}$ but not both.
|
| 168 |
+
|
| 169 |
+
Note that a more general definition of Paley graphs extends this construction
|
| 170 |
+
to graphs over $q=p^n$ vertices, by using the finite field $F_q$ instead of $\mathbb{Z}/p\mathbb{Z}$.
|
| 171 |
+
This construction requires to compute squares in general finite fields and is
|
| 172 |
+
not what is implemented here (i.e `paley_graph(25)` does not return the true
|
| 173 |
+
Paley graph associated with $5^2$).
|
| 174 |
+
|
| 175 |
+
Parameters
|
| 176 |
+
----------
|
| 177 |
+
p : int, an odd prime number.
|
| 178 |
+
|
| 179 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 180 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 181 |
+
|
| 182 |
+
Returns
|
| 183 |
+
-------
|
| 184 |
+
G : graph
|
| 185 |
+
The constructed directed graph.
|
| 186 |
+
|
| 187 |
+
Raises
|
| 188 |
+
------
|
| 189 |
+
NetworkXError
|
| 190 |
+
If the graph is a multigraph.
|
| 191 |
+
|
| 192 |
+
References
|
| 193 |
+
----------
|
| 194 |
+
Chapter 13 in B. Bollobas, Random Graphs. Second edition.
|
| 195 |
+
Cambridge Studies in Advanced Mathematics, 73.
|
| 196 |
+
Cambridge University Press, Cambridge (2001).
|
| 197 |
+
"""
|
| 198 |
+
G = nx.empty_graph(0, create_using, default=nx.DiGraph)
|
| 199 |
+
if G.is_multigraph():
|
| 200 |
+
msg = "`create_using` cannot be a multigraph."
|
| 201 |
+
raise nx.NetworkXError(msg)
|
| 202 |
+
|
| 203 |
+
# Compute the squares in Z/pZ.
|
| 204 |
+
# Make it a set to uniquify (there are exactly (p-1)/2 squares in Z/pZ
|
| 205 |
+
# when is prime).
|
| 206 |
+
square_set = {(x**2) % p for x in range(1, p) if (x**2) % p != 0}
|
| 207 |
+
|
| 208 |
+
for x in range(p):
|
| 209 |
+
for x2 in square_set:
|
| 210 |
+
G.add_edge(x, (x + x2) % p)
|
| 211 |
+
G.graph["name"] = f"paley({p})"
|
| 212 |
+
return G
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
@nx.utils.decorators.np_random_state("seed")
|
| 216 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 217 |
+
def maybe_regular_expander(n, d, *, create_using=None, max_tries=100, seed=None):
|
| 218 |
+
r"""Utility for creating a random regular expander.
|
| 219 |
+
|
| 220 |
+
Returns a random $d$-regular graph on $n$ nodes which is an expander
|
| 221 |
+
graph with very good probability.
|
| 222 |
+
|
| 223 |
+
Parameters
|
| 224 |
+
----------
|
| 225 |
+
n : int
|
| 226 |
+
The number of nodes.
|
| 227 |
+
d : int
|
| 228 |
+
The degree of each node.
|
| 229 |
+
create_using : Graph Instance or Constructor
|
| 230 |
+
Indicator of type of graph to return.
|
| 231 |
+
If a Graph-type instance, then clear and use it.
|
| 232 |
+
If a constructor, call it to create an empty graph.
|
| 233 |
+
Use the Graph constructor by default.
|
| 234 |
+
max_tries : int. (default: 100)
|
| 235 |
+
The number of allowed loops when generating each independent cycle
|
| 236 |
+
seed : (default: None)
|
| 237 |
+
Seed used to set random number generation state. See :ref`Randomness<randomness>`.
|
| 238 |
+
|
| 239 |
+
Notes
|
| 240 |
+
-----
|
| 241 |
+
The nodes are numbered from $0$ to $n - 1$.
|
| 242 |
+
|
| 243 |
+
The graph is generated by taking $d / 2$ random independent cycles.
|
| 244 |
+
|
| 245 |
+
Joel Friedman proved that in this model the resulting
|
| 246 |
+
graph is an expander with probability
|
| 247 |
+
$1 - O(n^{-\tau})$ where $\tau = \lceil (\sqrt{d - 1}) / 2 \rceil - 1$. [1]_
|
| 248 |
+
|
| 249 |
+
Examples
|
| 250 |
+
--------
|
| 251 |
+
>>> G = nx.maybe_regular_expander(n=200, d=6, seed=8020)
|
| 252 |
+
|
| 253 |
+
Returns
|
| 254 |
+
-------
|
| 255 |
+
G : graph
|
| 256 |
+
The constructed undirected graph.
|
| 257 |
+
|
| 258 |
+
Raises
|
| 259 |
+
------
|
| 260 |
+
NetworkXError
|
| 261 |
+
If $d % 2 != 0$ as the degree must be even.
|
| 262 |
+
If $n - 1$ is less than $ 2d $ as the graph is complete at most.
|
| 263 |
+
If max_tries is reached
|
| 264 |
+
|
| 265 |
+
See Also
|
| 266 |
+
--------
|
| 267 |
+
is_regular_expander
|
| 268 |
+
random_regular_expander_graph
|
| 269 |
+
|
| 270 |
+
References
|
| 271 |
+
----------
|
| 272 |
+
.. [1] Joel Friedman,
|
| 273 |
+
A Proof of Alon’s Second Eigenvalue Conjecture and Related Problems, 2004
|
| 274 |
+
https://arxiv.org/abs/cs/0405020
|
| 275 |
+
|
| 276 |
+
"""
|
| 277 |
+
|
| 278 |
+
import numpy as np
|
| 279 |
+
|
| 280 |
+
if n < 1:
|
| 281 |
+
raise nx.NetworkXError("n must be a positive integer")
|
| 282 |
+
|
| 283 |
+
if not (d >= 2):
|
| 284 |
+
raise nx.NetworkXError("d must be greater than or equal to 2")
|
| 285 |
+
|
| 286 |
+
if not (d % 2 == 0):
|
| 287 |
+
raise nx.NetworkXError("d must be even")
|
| 288 |
+
|
| 289 |
+
if not (n - 1 >= d):
|
| 290 |
+
raise nx.NetworkXError(
|
| 291 |
+
f"Need n-1>= d to have room for {d//2} independent cycles with {n} nodes"
|
| 292 |
+
)
|
| 293 |
+
|
| 294 |
+
G = nx.empty_graph(n, create_using)
|
| 295 |
+
|
| 296 |
+
if n < 2:
|
| 297 |
+
return G
|
| 298 |
+
|
| 299 |
+
cycles = []
|
| 300 |
+
edges = set()
|
| 301 |
+
|
| 302 |
+
# Create d / 2 cycles
|
| 303 |
+
for i in range(d // 2):
|
| 304 |
+
iterations = max_tries
|
| 305 |
+
# Make sure the cycles are independent to have a regular graph
|
| 306 |
+
while len(edges) != (i + 1) * n:
|
| 307 |
+
iterations -= 1
|
| 308 |
+
# Faster than random.permutation(n) since there are only
|
| 309 |
+
# (n-1)! distinct cycles against n! permutations of size n
|
| 310 |
+
cycle = seed.permutation(n - 1).tolist()
|
| 311 |
+
cycle.append(n - 1)
|
| 312 |
+
|
| 313 |
+
new_edges = {
|
| 314 |
+
(u, v)
|
| 315 |
+
for u, v in nx.utils.pairwise(cycle, cyclic=True)
|
| 316 |
+
if (u, v) not in edges and (v, u) not in edges
|
| 317 |
+
}
|
| 318 |
+
# If the new cycle has no edges in common with previous cycles
|
| 319 |
+
# then add it to the list otherwise try again
|
| 320 |
+
if len(new_edges) == n:
|
| 321 |
+
cycles.append(cycle)
|
| 322 |
+
edges.update(new_edges)
|
| 323 |
+
|
| 324 |
+
if iterations == 0:
|
| 325 |
+
raise nx.NetworkXError("Too many iterations in maybe_regular_expander")
|
| 326 |
+
|
| 327 |
+
G.add_edges_from(edges)
|
| 328 |
+
|
| 329 |
+
return G
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
@nx.utils.not_implemented_for("directed")
|
| 333 |
+
@nx.utils.not_implemented_for("multigraph")
|
| 334 |
+
@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}})
|
| 335 |
+
def is_regular_expander(G, *, epsilon=0):
|
| 336 |
+
r"""Determines whether the graph G is a regular expander. [1]_
|
| 337 |
+
|
| 338 |
+
An expander graph is a sparse graph with strong connectivity properties.
|
| 339 |
+
|
| 340 |
+
More precisely, this helper checks whether the graph is a
|
| 341 |
+
regular $(n, d, \lambda)$-expander with $\lambda$ close to
|
| 342 |
+
the Alon-Boppana bound and given by
|
| 343 |
+
$\lambda = 2 \sqrt{d - 1} + \epsilon$. [2]_
|
| 344 |
+
|
| 345 |
+
In the case where $\epsilon = 0$ then if the graph successfully passes the test
|
| 346 |
+
it is a Ramanujan graph. [3]_
|
| 347 |
+
|
| 348 |
+
A Ramanujan graph has spectral gap almost as large as possible, which makes them
|
| 349 |
+
excellent expanders.
|
| 350 |
+
|
| 351 |
+
Parameters
|
| 352 |
+
----------
|
| 353 |
+
G : NetworkX graph
|
| 354 |
+
epsilon : int, float, default=0
|
| 355 |
+
|
| 356 |
+
Returns
|
| 357 |
+
-------
|
| 358 |
+
bool
|
| 359 |
+
Whether the given graph is a regular $(n, d, \lambda)$-expander
|
| 360 |
+
where $\lambda = 2 \sqrt{d - 1} + \epsilon$.
|
| 361 |
+
|
| 362 |
+
Examples
|
| 363 |
+
--------
|
| 364 |
+
>>> G = nx.random_regular_expander_graph(20, 4)
|
| 365 |
+
>>> nx.is_regular_expander(G)
|
| 366 |
+
True
|
| 367 |
+
|
| 368 |
+
See Also
|
| 369 |
+
--------
|
| 370 |
+
maybe_regular_expander
|
| 371 |
+
random_regular_expander_graph
|
| 372 |
+
|
| 373 |
+
References
|
| 374 |
+
----------
|
| 375 |
+
.. [1] Expander graph, https://en.wikipedia.org/wiki/Expander_graph
|
| 376 |
+
.. [2] Alon-Boppana bound, https://en.wikipedia.org/wiki/Alon%E2%80%93Boppana_bound
|
| 377 |
+
.. [3] Ramanujan graphs, https://en.wikipedia.org/wiki/Ramanujan_graph
|
| 378 |
+
|
| 379 |
+
"""
|
| 380 |
+
|
| 381 |
+
import numpy as np
|
| 382 |
+
from scipy.sparse.linalg import eigsh
|
| 383 |
+
|
| 384 |
+
if epsilon < 0:
|
| 385 |
+
raise nx.NetworkXError("epsilon must be non negative")
|
| 386 |
+
|
| 387 |
+
if not nx.is_regular(G):
|
| 388 |
+
return False
|
| 389 |
+
|
| 390 |
+
_, d = nx.utils.arbitrary_element(G.degree)
|
| 391 |
+
|
| 392 |
+
A = nx.adjacency_matrix(G, dtype=float)
|
| 393 |
+
lams = eigsh(A, which="LM", k=2, return_eigenvectors=False)
|
| 394 |
+
|
| 395 |
+
# lambda2 is the second biggest eigenvalue
|
| 396 |
+
lambda2 = min(lams)
|
| 397 |
+
|
| 398 |
+
# Use bool() to convert numpy scalar to Python Boolean
|
| 399 |
+
return bool(abs(lambda2) < 2 ** np.sqrt(d - 1) + epsilon)
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
@nx.utils.decorators.np_random_state("seed")
|
| 403 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 404 |
+
def random_regular_expander_graph(
|
| 405 |
+
n, d, *, epsilon=0, create_using=None, max_tries=100, seed=None
|
| 406 |
+
):
|
| 407 |
+
r"""Returns a random regular expander graph on $n$ nodes with degree $d$.
|
| 408 |
+
|
| 409 |
+
An expander graph is a sparse graph with strong connectivity properties. [1]_
|
| 410 |
+
|
| 411 |
+
More precisely the returned graph is a $(n, d, \lambda)$-expander with
|
| 412 |
+
$\lambda = 2 \sqrt{d - 1} + \epsilon$, close to the Alon-Boppana bound. [2]_
|
| 413 |
+
|
| 414 |
+
In the case where $\epsilon = 0$ it returns a Ramanujan graph.
|
| 415 |
+
A Ramanujan graph has spectral gap almost as large as possible,
|
| 416 |
+
which makes them excellent expanders. [3]_
|
| 417 |
+
|
| 418 |
+
Parameters
|
| 419 |
+
----------
|
| 420 |
+
n : int
|
| 421 |
+
The number of nodes.
|
| 422 |
+
d : int
|
| 423 |
+
The degree of each node.
|
| 424 |
+
epsilon : int, float, default=0
|
| 425 |
+
max_tries : int, (default: 100)
|
| 426 |
+
The number of allowed loops, also used in the maybe_regular_expander utility
|
| 427 |
+
seed : (default: None)
|
| 428 |
+
Seed used to set random number generation state. See :ref`Randomness<randomness>`.
|
| 429 |
+
|
| 430 |
+
Raises
|
| 431 |
+
------
|
| 432 |
+
NetworkXError
|
| 433 |
+
If max_tries is reached
|
| 434 |
+
|
| 435 |
+
Examples
|
| 436 |
+
--------
|
| 437 |
+
>>> G = nx.random_regular_expander_graph(20, 4)
|
| 438 |
+
>>> nx.is_regular_expander(G)
|
| 439 |
+
True
|
| 440 |
+
|
| 441 |
+
Notes
|
| 442 |
+
-----
|
| 443 |
+
This loops over `maybe_regular_expander` and can be slow when
|
| 444 |
+
$n$ is too big or $\epsilon$ too small.
|
| 445 |
+
|
| 446 |
+
See Also
|
| 447 |
+
--------
|
| 448 |
+
maybe_regular_expander
|
| 449 |
+
is_regular_expander
|
| 450 |
+
|
| 451 |
+
References
|
| 452 |
+
----------
|
| 453 |
+
.. [1] Expander graph, https://en.wikipedia.org/wiki/Expander_graph
|
| 454 |
+
.. [2] Alon-Boppana bound, https://en.wikipedia.org/wiki/Alon%E2%80%93Boppana_bound
|
| 455 |
+
.. [3] Ramanujan graphs, https://en.wikipedia.org/wiki/Ramanujan_graph
|
| 456 |
+
|
| 457 |
+
"""
|
| 458 |
+
G = maybe_regular_expander(
|
| 459 |
+
n, d, create_using=create_using, max_tries=max_tries, seed=seed
|
| 460 |
+
)
|
| 461 |
+
iterations = max_tries
|
| 462 |
+
|
| 463 |
+
while not is_regular_expander(G, epsilon=epsilon):
|
| 464 |
+
iterations -= 1
|
| 465 |
+
G = maybe_regular_expander(
|
| 466 |
+
n=n, d=d, create_using=create_using, max_tries=max_tries, seed=seed
|
| 467 |
+
)
|
| 468 |
+
|
| 469 |
+
if iterations == 0:
|
| 470 |
+
raise nx.NetworkXError(
|
| 471 |
+
"Too many iterations in random_regular_expander_graph"
|
| 472 |
+
)
|
| 473 |
+
|
| 474 |
+
return G
|
minigpt2/lib/python3.10/site-packages/networkx/generators/geometric.py
ADDED
|
@@ -0,0 +1,1048 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Generators for geometric graphs."""
|
| 2 |
+
|
| 3 |
+
import math
|
| 4 |
+
from bisect import bisect_left
|
| 5 |
+
from itertools import accumulate, combinations, product
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.utils import py_random_state
|
| 9 |
+
|
| 10 |
+
__all__ = [
|
| 11 |
+
"geometric_edges",
|
| 12 |
+
"geographical_threshold_graph",
|
| 13 |
+
"navigable_small_world_graph",
|
| 14 |
+
"random_geometric_graph",
|
| 15 |
+
"soft_random_geometric_graph",
|
| 16 |
+
"thresholded_random_geometric_graph",
|
| 17 |
+
"waxman_graph",
|
| 18 |
+
"geometric_soft_configuration_graph",
|
| 19 |
+
]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@nx._dispatchable(node_attrs="pos_name")
|
| 23 |
+
def geometric_edges(G, radius, p=2, *, pos_name="pos"):
|
| 24 |
+
"""Returns edge list of node pairs within `radius` of each other.
|
| 25 |
+
|
| 26 |
+
Parameters
|
| 27 |
+
----------
|
| 28 |
+
G : networkx graph
|
| 29 |
+
The graph from which to generate the edge list. The nodes in `G` should
|
| 30 |
+
have an attribute ``pos`` corresponding to the node position, which is
|
| 31 |
+
used to compute the distance to other nodes.
|
| 32 |
+
radius : scalar
|
| 33 |
+
The distance threshold. Edges are included in the edge list if the
|
| 34 |
+
distance between the two nodes is less than `radius`.
|
| 35 |
+
pos_name : string, default="pos"
|
| 36 |
+
The name of the node attribute which represents the position of each
|
| 37 |
+
node in 2D coordinates. Every node in the Graph must have this attribute.
|
| 38 |
+
p : scalar, default=2
|
| 39 |
+
The `Minkowski distance metric
|
| 40 |
+
<https://en.wikipedia.org/wiki/Minkowski_distance>`_ used to compute
|
| 41 |
+
distances. The default value is 2, i.e. Euclidean distance.
|
| 42 |
+
|
| 43 |
+
Returns
|
| 44 |
+
-------
|
| 45 |
+
edges : list
|
| 46 |
+
List of edges whose distances are less than `radius`
|
| 47 |
+
|
| 48 |
+
Notes
|
| 49 |
+
-----
|
| 50 |
+
Radius uses Minkowski distance metric `p`.
|
| 51 |
+
If scipy is available, `scipy.spatial.cKDTree` is used to speed computation.
|
| 52 |
+
|
| 53 |
+
Examples
|
| 54 |
+
--------
|
| 55 |
+
Create a graph with nodes that have a "pos" attribute representing 2D
|
| 56 |
+
coordinates.
|
| 57 |
+
|
| 58 |
+
>>> G = nx.Graph()
|
| 59 |
+
>>> G.add_nodes_from(
|
| 60 |
+
... [
|
| 61 |
+
... (0, {"pos": (0, 0)}),
|
| 62 |
+
... (1, {"pos": (3, 0)}),
|
| 63 |
+
... (2, {"pos": (8, 0)}),
|
| 64 |
+
... ]
|
| 65 |
+
... )
|
| 66 |
+
>>> nx.geometric_edges(G, radius=1)
|
| 67 |
+
[]
|
| 68 |
+
>>> nx.geometric_edges(G, radius=4)
|
| 69 |
+
[(0, 1)]
|
| 70 |
+
>>> nx.geometric_edges(G, radius=6)
|
| 71 |
+
[(0, 1), (1, 2)]
|
| 72 |
+
>>> nx.geometric_edges(G, radius=9)
|
| 73 |
+
[(0, 1), (0, 2), (1, 2)]
|
| 74 |
+
"""
|
| 75 |
+
# Input validation - every node must have a "pos" attribute
|
| 76 |
+
for n, pos in G.nodes(data=pos_name):
|
| 77 |
+
if pos is None:
|
| 78 |
+
raise nx.NetworkXError(
|
| 79 |
+
f"Node {n} (and all nodes) must have a '{pos_name}' attribute."
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
# NOTE: See _geometric_edges for the actual implementation. The reason this
|
| 83 |
+
# is split into two functions is to avoid the overhead of input validation
|
| 84 |
+
# every time the function is called internally in one of the other
|
| 85 |
+
# geometric generators
|
| 86 |
+
return _geometric_edges(G, radius, p, pos_name)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def _geometric_edges(G, radius, p, pos_name):
|
| 90 |
+
"""
|
| 91 |
+
Implements `geometric_edges` without input validation. See `geometric_edges`
|
| 92 |
+
for complete docstring.
|
| 93 |
+
"""
|
| 94 |
+
nodes_pos = G.nodes(data=pos_name)
|
| 95 |
+
try:
|
| 96 |
+
import scipy as sp
|
| 97 |
+
except ImportError:
|
| 98 |
+
# no scipy KDTree so compute by for-loop
|
| 99 |
+
radius_p = radius**p
|
| 100 |
+
edges = [
|
| 101 |
+
(u, v)
|
| 102 |
+
for (u, pu), (v, pv) in combinations(nodes_pos, 2)
|
| 103 |
+
if sum(abs(a - b) ** p for a, b in zip(pu, pv)) <= radius_p
|
| 104 |
+
]
|
| 105 |
+
return edges
|
| 106 |
+
# scipy KDTree is available
|
| 107 |
+
nodes, coords = list(zip(*nodes_pos))
|
| 108 |
+
kdtree = sp.spatial.cKDTree(coords) # Cannot provide generator.
|
| 109 |
+
edge_indexes = kdtree.query_pairs(radius, p)
|
| 110 |
+
edges = [(nodes[u], nodes[v]) for u, v in sorted(edge_indexes)]
|
| 111 |
+
return edges
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
@py_random_state(5)
|
| 115 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 116 |
+
def random_geometric_graph(
|
| 117 |
+
n, radius, dim=2, pos=None, p=2, seed=None, *, pos_name="pos"
|
| 118 |
+
):
|
| 119 |
+
"""Returns a random geometric graph in the unit cube of dimensions `dim`.
|
| 120 |
+
|
| 121 |
+
The random geometric graph model places `n` nodes uniformly at
|
| 122 |
+
random in the unit cube. Two nodes are joined by an edge if the
|
| 123 |
+
distance between the nodes is at most `radius`.
|
| 124 |
+
|
| 125 |
+
Edges are determined using a KDTree when SciPy is available.
|
| 126 |
+
This reduces the time complexity from $O(n^2)$ to $O(n)$.
|
| 127 |
+
|
| 128 |
+
Parameters
|
| 129 |
+
----------
|
| 130 |
+
n : int or iterable
|
| 131 |
+
Number of nodes or iterable of nodes
|
| 132 |
+
radius: float
|
| 133 |
+
Distance threshold value
|
| 134 |
+
dim : int, optional
|
| 135 |
+
Dimension of graph
|
| 136 |
+
pos : dict, optional
|
| 137 |
+
A dictionary keyed by node with node positions as values.
|
| 138 |
+
p : float, optional
|
| 139 |
+
Which Minkowski distance metric to use. `p` has to meet the condition
|
| 140 |
+
``1 <= p <= infinity``.
|
| 141 |
+
|
| 142 |
+
If this argument is not specified, the :math:`L^2` metric
|
| 143 |
+
(the Euclidean distance metric), p = 2 is used.
|
| 144 |
+
This should not be confused with the `p` of an Erdős-Rényi random
|
| 145 |
+
graph, which represents probability.
|
| 146 |
+
seed : integer, random_state, or None (default)
|
| 147 |
+
Indicator of random number generation state.
|
| 148 |
+
See :ref:`Randomness<randomness>`.
|
| 149 |
+
pos_name : string, default="pos"
|
| 150 |
+
The name of the node attribute which represents the position
|
| 151 |
+
in 2D coordinates of the node in the returned graph.
|
| 152 |
+
|
| 153 |
+
Returns
|
| 154 |
+
-------
|
| 155 |
+
Graph
|
| 156 |
+
A random geometric graph, undirected and without self-loops.
|
| 157 |
+
Each node has a node attribute ``'pos'`` that stores the
|
| 158 |
+
position of that node in Euclidean space as provided by the
|
| 159 |
+
``pos`` keyword argument or, if ``pos`` was not provided, as
|
| 160 |
+
generated by this function.
|
| 161 |
+
|
| 162 |
+
Examples
|
| 163 |
+
--------
|
| 164 |
+
Create a random geometric graph on twenty nodes where nodes are joined by
|
| 165 |
+
an edge if their distance is at most 0.1::
|
| 166 |
+
|
| 167 |
+
>>> G = nx.random_geometric_graph(20, 0.1)
|
| 168 |
+
|
| 169 |
+
Notes
|
| 170 |
+
-----
|
| 171 |
+
This uses a *k*-d tree to build the graph.
|
| 172 |
+
|
| 173 |
+
The `pos` keyword argument can be used to specify node positions so you
|
| 174 |
+
can create an arbitrary distribution and domain for positions.
|
| 175 |
+
|
| 176 |
+
For example, to use a 2D Gaussian distribution of node positions with mean
|
| 177 |
+
(0, 0) and standard deviation 2::
|
| 178 |
+
|
| 179 |
+
>>> import random
|
| 180 |
+
>>> n = 20
|
| 181 |
+
>>> pos = {i: (random.gauss(0, 2), random.gauss(0, 2)) for i in range(n)}
|
| 182 |
+
>>> G = nx.random_geometric_graph(n, 0.2, pos=pos)
|
| 183 |
+
|
| 184 |
+
References
|
| 185 |
+
----------
|
| 186 |
+
.. [1] Penrose, Mathew, *Random Geometric Graphs*,
|
| 187 |
+
Oxford Studies in Probability, 5, 2003.
|
| 188 |
+
|
| 189 |
+
"""
|
| 190 |
+
# TODO Is this function just a special case of the geographical
|
| 191 |
+
# threshold graph?
|
| 192 |
+
#
|
| 193 |
+
# half_radius = {v: radius / 2 for v in n}
|
| 194 |
+
# return geographical_threshold_graph(nodes, theta=1, alpha=1,
|
| 195 |
+
# weight=half_radius)
|
| 196 |
+
#
|
| 197 |
+
G = nx.empty_graph(n)
|
| 198 |
+
# If no positions are provided, choose uniformly random vectors in
|
| 199 |
+
# Euclidean space of the specified dimension.
|
| 200 |
+
if pos is None:
|
| 201 |
+
pos = {v: [seed.random() for i in range(dim)] for v in G}
|
| 202 |
+
nx.set_node_attributes(G, pos, pos_name)
|
| 203 |
+
|
| 204 |
+
G.add_edges_from(_geometric_edges(G, radius, p, pos_name))
|
| 205 |
+
return G
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
@py_random_state(6)
|
| 209 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 210 |
+
def soft_random_geometric_graph(
|
| 211 |
+
n, radius, dim=2, pos=None, p=2, p_dist=None, seed=None, *, pos_name="pos"
|
| 212 |
+
):
|
| 213 |
+
r"""Returns a soft random geometric graph in the unit cube.
|
| 214 |
+
|
| 215 |
+
The soft random geometric graph [1] model places `n` nodes uniformly at
|
| 216 |
+
random in the unit cube in dimension `dim`. Two nodes of distance, `dist`,
|
| 217 |
+
computed by the `p`-Minkowski distance metric are joined by an edge with
|
| 218 |
+
probability `p_dist` if the computed distance metric value of the nodes
|
| 219 |
+
is at most `radius`, otherwise they are not joined.
|
| 220 |
+
|
| 221 |
+
Edges within `radius` of each other are determined using a KDTree when
|
| 222 |
+
SciPy is available. This reduces the time complexity from :math:`O(n^2)`
|
| 223 |
+
to :math:`O(n)`.
|
| 224 |
+
|
| 225 |
+
Parameters
|
| 226 |
+
----------
|
| 227 |
+
n : int or iterable
|
| 228 |
+
Number of nodes or iterable of nodes
|
| 229 |
+
radius: float
|
| 230 |
+
Distance threshold value
|
| 231 |
+
dim : int, optional
|
| 232 |
+
Dimension of graph
|
| 233 |
+
pos : dict, optional
|
| 234 |
+
A dictionary keyed by node with node positions as values.
|
| 235 |
+
p : float, optional
|
| 236 |
+
Which Minkowski distance metric to use.
|
| 237 |
+
`p` has to meet the condition ``1 <= p <= infinity``.
|
| 238 |
+
|
| 239 |
+
If this argument is not specified, the :math:`L^2` metric
|
| 240 |
+
(the Euclidean distance metric), p = 2 is used.
|
| 241 |
+
|
| 242 |
+
This should not be confused with the `p` of an Erdős-Rényi random
|
| 243 |
+
graph, which represents probability.
|
| 244 |
+
p_dist : function, optional
|
| 245 |
+
A probability density function computing the probability of
|
| 246 |
+
connecting two nodes that are of distance, dist, computed by the
|
| 247 |
+
Minkowski distance metric. The probability density function, `p_dist`,
|
| 248 |
+
must be any function that takes the metric value as input
|
| 249 |
+
and outputs a single probability value between 0-1. The scipy.stats
|
| 250 |
+
package has many probability distribution functions implemented and
|
| 251 |
+
tools for custom probability distribution definitions [2], and passing
|
| 252 |
+
the .pdf method of scipy.stats distributions can be used here. If the
|
| 253 |
+
probability function, `p_dist`, is not supplied, the default function
|
| 254 |
+
is an exponential distribution with rate parameter :math:`\lambda=1`.
|
| 255 |
+
seed : integer, random_state, or None (default)
|
| 256 |
+
Indicator of random number generation state.
|
| 257 |
+
See :ref:`Randomness<randomness>`.
|
| 258 |
+
pos_name : string, default="pos"
|
| 259 |
+
The name of the node attribute which represents the position
|
| 260 |
+
in 2D coordinates of the node in the returned graph.
|
| 261 |
+
|
| 262 |
+
Returns
|
| 263 |
+
-------
|
| 264 |
+
Graph
|
| 265 |
+
A soft random geometric graph, undirected and without self-loops.
|
| 266 |
+
Each node has a node attribute ``'pos'`` that stores the
|
| 267 |
+
position of that node in Euclidean space as provided by the
|
| 268 |
+
``pos`` keyword argument or, if ``pos`` was not provided, as
|
| 269 |
+
generated by this function.
|
| 270 |
+
|
| 271 |
+
Examples
|
| 272 |
+
--------
|
| 273 |
+
Default Graph:
|
| 274 |
+
|
| 275 |
+
G = nx.soft_random_geometric_graph(50, 0.2)
|
| 276 |
+
|
| 277 |
+
Custom Graph:
|
| 278 |
+
|
| 279 |
+
Create a soft random geometric graph on 100 uniformly distributed nodes
|
| 280 |
+
where nodes are joined by an edge with probability computed from an
|
| 281 |
+
exponential distribution with rate parameter :math:`\lambda=1` if their
|
| 282 |
+
Euclidean distance is at most 0.2.
|
| 283 |
+
|
| 284 |
+
Notes
|
| 285 |
+
-----
|
| 286 |
+
This uses a *k*-d tree to build the graph.
|
| 287 |
+
|
| 288 |
+
The `pos` keyword argument can be used to specify node positions so you
|
| 289 |
+
can create an arbitrary distribution and domain for positions.
|
| 290 |
+
|
| 291 |
+
For example, to use a 2D Gaussian distribution of node positions with mean
|
| 292 |
+
(0, 0) and standard deviation 2
|
| 293 |
+
|
| 294 |
+
The scipy.stats package can be used to define the probability distribution
|
| 295 |
+
with the .pdf method used as `p_dist`.
|
| 296 |
+
|
| 297 |
+
::
|
| 298 |
+
|
| 299 |
+
>>> import random
|
| 300 |
+
>>> import math
|
| 301 |
+
>>> n = 100
|
| 302 |
+
>>> pos = {i: (random.gauss(0, 2), random.gauss(0, 2)) for i in range(n)}
|
| 303 |
+
>>> p_dist = lambda dist: math.exp(-dist)
|
| 304 |
+
>>> G = nx.soft_random_geometric_graph(n, 0.2, pos=pos, p_dist=p_dist)
|
| 305 |
+
|
| 306 |
+
References
|
| 307 |
+
----------
|
| 308 |
+
.. [1] Penrose, Mathew D. "Connectivity of soft random geometric graphs."
|
| 309 |
+
The Annals of Applied Probability 26.2 (2016): 986-1028.
|
| 310 |
+
.. [2] scipy.stats -
|
| 311 |
+
https://docs.scipy.org/doc/scipy/reference/tutorial/stats.html
|
| 312 |
+
|
| 313 |
+
"""
|
| 314 |
+
G = nx.empty_graph(n)
|
| 315 |
+
G.name = f"soft_random_geometric_graph({n}, {radius}, {dim})"
|
| 316 |
+
# If no positions are provided, choose uniformly random vectors in
|
| 317 |
+
# Euclidean space of the specified dimension.
|
| 318 |
+
if pos is None:
|
| 319 |
+
pos = {v: [seed.random() for i in range(dim)] for v in G}
|
| 320 |
+
nx.set_node_attributes(G, pos, pos_name)
|
| 321 |
+
|
| 322 |
+
# if p_dist function not supplied the default function is an exponential
|
| 323 |
+
# distribution with rate parameter :math:`\lambda=1`.
|
| 324 |
+
if p_dist is None:
|
| 325 |
+
|
| 326 |
+
def p_dist(dist):
|
| 327 |
+
return math.exp(-dist)
|
| 328 |
+
|
| 329 |
+
def should_join(edge):
|
| 330 |
+
u, v = edge
|
| 331 |
+
dist = (sum(abs(a - b) ** p for a, b in zip(pos[u], pos[v]))) ** (1 / p)
|
| 332 |
+
return seed.random() < p_dist(dist)
|
| 333 |
+
|
| 334 |
+
G.add_edges_from(filter(should_join, _geometric_edges(G, radius, p, pos_name)))
|
| 335 |
+
return G
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
@py_random_state(7)
|
| 339 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 340 |
+
def geographical_threshold_graph(
|
| 341 |
+
n,
|
| 342 |
+
theta,
|
| 343 |
+
dim=2,
|
| 344 |
+
pos=None,
|
| 345 |
+
weight=None,
|
| 346 |
+
metric=None,
|
| 347 |
+
p_dist=None,
|
| 348 |
+
seed=None,
|
| 349 |
+
*,
|
| 350 |
+
pos_name="pos",
|
| 351 |
+
weight_name="weight",
|
| 352 |
+
):
|
| 353 |
+
r"""Returns a geographical threshold graph.
|
| 354 |
+
|
| 355 |
+
The geographical threshold graph model places $n$ nodes uniformly at
|
| 356 |
+
random in a rectangular domain. Each node $u$ is assigned a weight
|
| 357 |
+
$w_u$. Two nodes $u$ and $v$ are joined by an edge if
|
| 358 |
+
|
| 359 |
+
.. math::
|
| 360 |
+
|
| 361 |
+
(w_u + w_v)p_{dist}(r) \ge \theta
|
| 362 |
+
|
| 363 |
+
where `r` is the distance between `u` and `v`, `p_dist` is any function of
|
| 364 |
+
`r`, and :math:`\theta` as the threshold parameter. `p_dist` is used to
|
| 365 |
+
give weight to the distance between nodes when deciding whether or not
|
| 366 |
+
they should be connected. The larger `p_dist` is, the more prone nodes
|
| 367 |
+
separated by `r` are to be connected, and vice versa.
|
| 368 |
+
|
| 369 |
+
Parameters
|
| 370 |
+
----------
|
| 371 |
+
n : int or iterable
|
| 372 |
+
Number of nodes or iterable of nodes
|
| 373 |
+
theta: float
|
| 374 |
+
Threshold value
|
| 375 |
+
dim : int, optional
|
| 376 |
+
Dimension of graph
|
| 377 |
+
pos : dict
|
| 378 |
+
Node positions as a dictionary of tuples keyed by node.
|
| 379 |
+
weight : dict
|
| 380 |
+
Node weights as a dictionary of numbers keyed by node.
|
| 381 |
+
metric : function
|
| 382 |
+
A metric on vectors of numbers (represented as lists or
|
| 383 |
+
tuples). This must be a function that accepts two lists (or
|
| 384 |
+
tuples) as input and yields a number as output. The function
|
| 385 |
+
must also satisfy the four requirements of a `metric`_.
|
| 386 |
+
Specifically, if $d$ is the function and $x$, $y$,
|
| 387 |
+
and $z$ are vectors in the graph, then $d$ must satisfy
|
| 388 |
+
|
| 389 |
+
1. $d(x, y) \ge 0$,
|
| 390 |
+
2. $d(x, y) = 0$ if and only if $x = y$,
|
| 391 |
+
3. $d(x, y) = d(y, x)$,
|
| 392 |
+
4. $d(x, z) \le d(x, y) + d(y, z)$.
|
| 393 |
+
|
| 394 |
+
If this argument is not specified, the Euclidean distance metric is
|
| 395 |
+
used.
|
| 396 |
+
|
| 397 |
+
.. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29
|
| 398 |
+
p_dist : function, optional
|
| 399 |
+
Any function used to give weight to the distance between nodes when
|
| 400 |
+
deciding whether or not they should be connected. `p_dist` was
|
| 401 |
+
originally conceived as a probability density function giving the
|
| 402 |
+
probability of connecting two nodes that are of metric distance `r`
|
| 403 |
+
apart. The implementation here allows for more arbitrary definitions
|
| 404 |
+
of `p_dist` that do not need to correspond to valid probability
|
| 405 |
+
density functions. The :mod:`scipy.stats` package has many
|
| 406 |
+
probability density functions implemented and tools for custom
|
| 407 |
+
probability density definitions, and passing the ``.pdf`` method of
|
| 408 |
+
scipy.stats distributions can be used here. If ``p_dist=None``
|
| 409 |
+
(the default), the exponential function :math:`r^{-2}` is used.
|
| 410 |
+
seed : integer, random_state, or None (default)
|
| 411 |
+
Indicator of random number generation state.
|
| 412 |
+
See :ref:`Randomness<randomness>`.
|
| 413 |
+
pos_name : string, default="pos"
|
| 414 |
+
The name of the node attribute which represents the position
|
| 415 |
+
in 2D coordinates of the node in the returned graph.
|
| 416 |
+
weight_name : string, default="weight"
|
| 417 |
+
The name of the node attribute which represents the weight
|
| 418 |
+
of the node in the returned graph.
|
| 419 |
+
|
| 420 |
+
Returns
|
| 421 |
+
-------
|
| 422 |
+
Graph
|
| 423 |
+
A random geographic threshold graph, undirected and without
|
| 424 |
+
self-loops.
|
| 425 |
+
|
| 426 |
+
Each node has a node attribute ``pos`` that stores the
|
| 427 |
+
position of that node in Euclidean space as provided by the
|
| 428 |
+
``pos`` keyword argument or, if ``pos`` was not provided, as
|
| 429 |
+
generated by this function. Similarly, each node has a node
|
| 430 |
+
attribute ``weight`` that stores the weight of that node as
|
| 431 |
+
provided or as generated.
|
| 432 |
+
|
| 433 |
+
Examples
|
| 434 |
+
--------
|
| 435 |
+
Specify an alternate distance metric using the ``metric`` keyword
|
| 436 |
+
argument. For example, to use the `taxicab metric`_ instead of the
|
| 437 |
+
default `Euclidean metric`_::
|
| 438 |
+
|
| 439 |
+
>>> dist = lambda x, y: sum(abs(a - b) for a, b in zip(x, y))
|
| 440 |
+
>>> G = nx.geographical_threshold_graph(10, 0.1, metric=dist)
|
| 441 |
+
|
| 442 |
+
.. _taxicab metric: https://en.wikipedia.org/wiki/Taxicab_geometry
|
| 443 |
+
.. _Euclidean metric: https://en.wikipedia.org/wiki/Euclidean_distance
|
| 444 |
+
|
| 445 |
+
Notes
|
| 446 |
+
-----
|
| 447 |
+
If weights are not specified they are assigned to nodes by drawing randomly
|
| 448 |
+
from the exponential distribution with rate parameter $\lambda=1$.
|
| 449 |
+
To specify weights from a different distribution, use the `weight` keyword
|
| 450 |
+
argument::
|
| 451 |
+
|
| 452 |
+
>>> import random
|
| 453 |
+
>>> n = 20
|
| 454 |
+
>>> w = {i: random.expovariate(5.0) for i in range(n)}
|
| 455 |
+
>>> G = nx.geographical_threshold_graph(20, 50, weight=w)
|
| 456 |
+
|
| 457 |
+
If node positions are not specified they are randomly assigned from the
|
| 458 |
+
uniform distribution.
|
| 459 |
+
|
| 460 |
+
References
|
| 461 |
+
----------
|
| 462 |
+
.. [1] Masuda, N., Miwa, H., Konno, N.:
|
| 463 |
+
Geographical threshold graphs with small-world and scale-free
|
| 464 |
+
properties.
|
| 465 |
+
Physical Review E 71, 036108 (2005)
|
| 466 |
+
.. [2] Milan Bradonjić, Aric Hagberg and Allon G. Percus,
|
| 467 |
+
Giant component and connectivity in geographical threshold graphs,
|
| 468 |
+
in Algorithms and Models for the Web-Graph (WAW 2007),
|
| 469 |
+
Antony Bonato and Fan Chung (Eds), pp. 209--216, 2007
|
| 470 |
+
"""
|
| 471 |
+
G = nx.empty_graph(n)
|
| 472 |
+
# If no weights are provided, choose them from an exponential
|
| 473 |
+
# distribution.
|
| 474 |
+
if weight is None:
|
| 475 |
+
weight = {v: seed.expovariate(1) for v in G}
|
| 476 |
+
# If no positions are provided, choose uniformly random vectors in
|
| 477 |
+
# Euclidean space of the specified dimension.
|
| 478 |
+
if pos is None:
|
| 479 |
+
pos = {v: [seed.random() for i in range(dim)] for v in G}
|
| 480 |
+
# If no distance metric is provided, use Euclidean distance.
|
| 481 |
+
if metric is None:
|
| 482 |
+
metric = math.dist
|
| 483 |
+
nx.set_node_attributes(G, weight, weight_name)
|
| 484 |
+
nx.set_node_attributes(G, pos, pos_name)
|
| 485 |
+
|
| 486 |
+
# if p_dist is not supplied, use default r^-2
|
| 487 |
+
if p_dist is None:
|
| 488 |
+
|
| 489 |
+
def p_dist(r):
|
| 490 |
+
return r**-2
|
| 491 |
+
|
| 492 |
+
# Returns ``True`` if and only if the nodes whose attributes are
|
| 493 |
+
# ``du`` and ``dv`` should be joined, according to the threshold
|
| 494 |
+
# condition.
|
| 495 |
+
def should_join(pair):
|
| 496 |
+
u, v = pair
|
| 497 |
+
u_pos, v_pos = pos[u], pos[v]
|
| 498 |
+
u_weight, v_weight = weight[u], weight[v]
|
| 499 |
+
return (u_weight + v_weight) * p_dist(metric(u_pos, v_pos)) >= theta
|
| 500 |
+
|
| 501 |
+
G.add_edges_from(filter(should_join, combinations(G, 2)))
|
| 502 |
+
return G
|
| 503 |
+
|
| 504 |
+
|
| 505 |
+
@py_random_state(6)
|
| 506 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 507 |
+
def waxman_graph(
|
| 508 |
+
n,
|
| 509 |
+
beta=0.4,
|
| 510 |
+
alpha=0.1,
|
| 511 |
+
L=None,
|
| 512 |
+
domain=(0, 0, 1, 1),
|
| 513 |
+
metric=None,
|
| 514 |
+
seed=None,
|
| 515 |
+
*,
|
| 516 |
+
pos_name="pos",
|
| 517 |
+
):
|
| 518 |
+
r"""Returns a Waxman random graph.
|
| 519 |
+
|
| 520 |
+
The Waxman random graph model places `n` nodes uniformly at random
|
| 521 |
+
in a rectangular domain. Each pair of nodes at distance `d` is
|
| 522 |
+
joined by an edge with probability
|
| 523 |
+
|
| 524 |
+
.. math::
|
| 525 |
+
p = \beta \exp(-d / \alpha L).
|
| 526 |
+
|
| 527 |
+
This function implements both Waxman models, using the `L` keyword
|
| 528 |
+
argument.
|
| 529 |
+
|
| 530 |
+
* Waxman-1: if `L` is not specified, it is set to be the maximum distance
|
| 531 |
+
between any pair of nodes.
|
| 532 |
+
* Waxman-2: if `L` is specified, the distance between a pair of nodes is
|
| 533 |
+
chosen uniformly at random from the interval `[0, L]`.
|
| 534 |
+
|
| 535 |
+
Parameters
|
| 536 |
+
----------
|
| 537 |
+
n : int or iterable
|
| 538 |
+
Number of nodes or iterable of nodes
|
| 539 |
+
beta: float
|
| 540 |
+
Model parameter
|
| 541 |
+
alpha: float
|
| 542 |
+
Model parameter
|
| 543 |
+
L : float, optional
|
| 544 |
+
Maximum distance between nodes. If not specified, the actual distance
|
| 545 |
+
is calculated.
|
| 546 |
+
domain : four-tuple of numbers, optional
|
| 547 |
+
Domain size, given as a tuple of the form `(x_min, y_min, x_max,
|
| 548 |
+
y_max)`.
|
| 549 |
+
metric : function
|
| 550 |
+
A metric on vectors of numbers (represented as lists or
|
| 551 |
+
tuples). This must be a function that accepts two lists (or
|
| 552 |
+
tuples) as input and yields a number as output. The function
|
| 553 |
+
must also satisfy the four requirements of a `metric`_.
|
| 554 |
+
Specifically, if $d$ is the function and $x$, $y$,
|
| 555 |
+
and $z$ are vectors in the graph, then $d$ must satisfy
|
| 556 |
+
|
| 557 |
+
1. $d(x, y) \ge 0$,
|
| 558 |
+
2. $d(x, y) = 0$ if and only if $x = y$,
|
| 559 |
+
3. $d(x, y) = d(y, x)$,
|
| 560 |
+
4. $d(x, z) \le d(x, y) + d(y, z)$.
|
| 561 |
+
|
| 562 |
+
If this argument is not specified, the Euclidean distance metric is
|
| 563 |
+
used.
|
| 564 |
+
|
| 565 |
+
.. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29
|
| 566 |
+
|
| 567 |
+
seed : integer, random_state, or None (default)
|
| 568 |
+
Indicator of random number generation state.
|
| 569 |
+
See :ref:`Randomness<randomness>`.
|
| 570 |
+
pos_name : string, default="pos"
|
| 571 |
+
The name of the node attribute which represents the position
|
| 572 |
+
in 2D coordinates of the node in the returned graph.
|
| 573 |
+
|
| 574 |
+
Returns
|
| 575 |
+
-------
|
| 576 |
+
Graph
|
| 577 |
+
A random Waxman graph, undirected and without self-loops. Each
|
| 578 |
+
node has a node attribute ``'pos'`` that stores the position of
|
| 579 |
+
that node in Euclidean space as generated by this function.
|
| 580 |
+
|
| 581 |
+
Examples
|
| 582 |
+
--------
|
| 583 |
+
Specify an alternate distance metric using the ``metric`` keyword
|
| 584 |
+
argument. For example, to use the "`taxicab metric`_" instead of the
|
| 585 |
+
default `Euclidean metric`_::
|
| 586 |
+
|
| 587 |
+
>>> dist = lambda x, y: sum(abs(a - b) for a, b in zip(x, y))
|
| 588 |
+
>>> G = nx.waxman_graph(10, 0.5, 0.1, metric=dist)
|
| 589 |
+
|
| 590 |
+
.. _taxicab metric: https://en.wikipedia.org/wiki/Taxicab_geometry
|
| 591 |
+
.. _Euclidean metric: https://en.wikipedia.org/wiki/Euclidean_distance
|
| 592 |
+
|
| 593 |
+
Notes
|
| 594 |
+
-----
|
| 595 |
+
Starting in NetworkX 2.0 the parameters alpha and beta align with their
|
| 596 |
+
usual roles in the probability distribution. In earlier versions their
|
| 597 |
+
positions in the expression were reversed. Their position in the calling
|
| 598 |
+
sequence reversed as well to minimize backward incompatibility.
|
| 599 |
+
|
| 600 |
+
References
|
| 601 |
+
----------
|
| 602 |
+
.. [1] B. M. Waxman, *Routing of multipoint connections*.
|
| 603 |
+
IEEE J. Select. Areas Commun. 6(9),(1988) 1617--1622.
|
| 604 |
+
"""
|
| 605 |
+
G = nx.empty_graph(n)
|
| 606 |
+
(xmin, ymin, xmax, ymax) = domain
|
| 607 |
+
# Each node gets a uniformly random position in the given rectangle.
|
| 608 |
+
pos = {v: (seed.uniform(xmin, xmax), seed.uniform(ymin, ymax)) for v in G}
|
| 609 |
+
nx.set_node_attributes(G, pos, pos_name)
|
| 610 |
+
# If no distance metric is provided, use Euclidean distance.
|
| 611 |
+
if metric is None:
|
| 612 |
+
metric = math.dist
|
| 613 |
+
# If the maximum distance L is not specified (that is, we are in the
|
| 614 |
+
# Waxman-1 model), then find the maximum distance between any pair
|
| 615 |
+
# of nodes.
|
| 616 |
+
#
|
| 617 |
+
# In the Waxman-1 model, join nodes randomly based on distance. In
|
| 618 |
+
# the Waxman-2 model, join randomly based on random l.
|
| 619 |
+
if L is None:
|
| 620 |
+
L = max(metric(x, y) for x, y in combinations(pos.values(), 2))
|
| 621 |
+
|
| 622 |
+
def dist(u, v):
|
| 623 |
+
return metric(pos[u], pos[v])
|
| 624 |
+
|
| 625 |
+
else:
|
| 626 |
+
|
| 627 |
+
def dist(u, v):
|
| 628 |
+
return seed.random() * L
|
| 629 |
+
|
| 630 |
+
# `pair` is the pair of nodes to decide whether to join.
|
| 631 |
+
def should_join(pair):
|
| 632 |
+
return seed.random() < beta * math.exp(-dist(*pair) / (alpha * L))
|
| 633 |
+
|
| 634 |
+
G.add_edges_from(filter(should_join, combinations(G, 2)))
|
| 635 |
+
return G
|
| 636 |
+
|
| 637 |
+
|
| 638 |
+
@py_random_state(5)
|
| 639 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 640 |
+
def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None):
|
| 641 |
+
r"""Returns a navigable small-world graph.
|
| 642 |
+
|
| 643 |
+
A navigable small-world graph is a directed grid with additional long-range
|
| 644 |
+
connections that are chosen randomly.
|
| 645 |
+
|
| 646 |
+
[...] we begin with a set of nodes [...] that are identified with the set
|
| 647 |
+
of lattice points in an $n \times n$ square,
|
| 648 |
+
$\{(i, j): i \in \{1, 2, \ldots, n\}, j \in \{1, 2, \ldots, n\}\}$,
|
| 649 |
+
and we define the *lattice distance* between two nodes $(i, j)$ and
|
| 650 |
+
$(k, l)$ to be the number of "lattice steps" separating them:
|
| 651 |
+
$d((i, j), (k, l)) = |k - i| + |l - j|$.
|
| 652 |
+
|
| 653 |
+
For a universal constant $p >= 1$, the node $u$ has a directed edge to
|
| 654 |
+
every other node within lattice distance $p$---these are its *local
|
| 655 |
+
contacts*. For universal constants $q >= 0$ and $r >= 0$ we also
|
| 656 |
+
construct directed edges from $u$ to $q$ other nodes (the *long-range
|
| 657 |
+
contacts*) using independent random trials; the $i$th directed edge from
|
| 658 |
+
$u$ has endpoint $v$ with probability proportional to $[d(u,v)]^{-r}$.
|
| 659 |
+
|
| 660 |
+
-- [1]_
|
| 661 |
+
|
| 662 |
+
Parameters
|
| 663 |
+
----------
|
| 664 |
+
n : int
|
| 665 |
+
The length of one side of the lattice; the number of nodes in
|
| 666 |
+
the graph is therefore $n^2$.
|
| 667 |
+
p : int
|
| 668 |
+
The diameter of short range connections. Each node is joined with every
|
| 669 |
+
other node within this lattice distance.
|
| 670 |
+
q : int
|
| 671 |
+
The number of long-range connections for each node.
|
| 672 |
+
r : float
|
| 673 |
+
Exponent for decaying probability of connections. The probability of
|
| 674 |
+
connecting to a node at lattice distance $d$ is $1/d^r$.
|
| 675 |
+
dim : int
|
| 676 |
+
Dimension of grid
|
| 677 |
+
seed : integer, random_state, or None (default)
|
| 678 |
+
Indicator of random number generation state.
|
| 679 |
+
See :ref:`Randomness<randomness>`.
|
| 680 |
+
|
| 681 |
+
References
|
| 682 |
+
----------
|
| 683 |
+
.. [1] J. Kleinberg. The small-world phenomenon: An algorithmic
|
| 684 |
+
perspective. Proc. 32nd ACM Symposium on Theory of Computing, 2000.
|
| 685 |
+
"""
|
| 686 |
+
if p < 1:
|
| 687 |
+
raise nx.NetworkXException("p must be >= 1")
|
| 688 |
+
if q < 0:
|
| 689 |
+
raise nx.NetworkXException("q must be >= 0")
|
| 690 |
+
if r < 0:
|
| 691 |
+
raise nx.NetworkXException("r must be >= 0")
|
| 692 |
+
|
| 693 |
+
G = nx.DiGraph()
|
| 694 |
+
nodes = list(product(range(n), repeat=dim))
|
| 695 |
+
for p1 in nodes:
|
| 696 |
+
probs = [0]
|
| 697 |
+
for p2 in nodes:
|
| 698 |
+
if p1 == p2:
|
| 699 |
+
continue
|
| 700 |
+
d = sum((abs(b - a) for a, b in zip(p1, p2)))
|
| 701 |
+
if d <= p:
|
| 702 |
+
G.add_edge(p1, p2)
|
| 703 |
+
probs.append(d**-r)
|
| 704 |
+
cdf = list(accumulate(probs))
|
| 705 |
+
for _ in range(q):
|
| 706 |
+
target = nodes[bisect_left(cdf, seed.uniform(0, cdf[-1]))]
|
| 707 |
+
G.add_edge(p1, target)
|
| 708 |
+
return G
|
| 709 |
+
|
| 710 |
+
|
| 711 |
+
@py_random_state(7)
|
| 712 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 713 |
+
def thresholded_random_geometric_graph(
|
| 714 |
+
n,
|
| 715 |
+
radius,
|
| 716 |
+
theta,
|
| 717 |
+
dim=2,
|
| 718 |
+
pos=None,
|
| 719 |
+
weight=None,
|
| 720 |
+
p=2,
|
| 721 |
+
seed=None,
|
| 722 |
+
*,
|
| 723 |
+
pos_name="pos",
|
| 724 |
+
weight_name="weight",
|
| 725 |
+
):
|
| 726 |
+
r"""Returns a thresholded random geometric graph in the unit cube.
|
| 727 |
+
|
| 728 |
+
The thresholded random geometric graph [1] model places `n` nodes
|
| 729 |
+
uniformly at random in the unit cube of dimensions `dim`. Each node
|
| 730 |
+
`u` is assigned a weight :math:`w_u`. Two nodes `u` and `v` are
|
| 731 |
+
joined by an edge if they are within the maximum connection distance,
|
| 732 |
+
`radius` computed by the `p`-Minkowski distance and the summation of
|
| 733 |
+
weights :math:`w_u` + :math:`w_v` is greater than or equal
|
| 734 |
+
to the threshold parameter `theta`.
|
| 735 |
+
|
| 736 |
+
Edges within `radius` of each other are determined using a KDTree when
|
| 737 |
+
SciPy is available. This reduces the time complexity from :math:`O(n^2)`
|
| 738 |
+
to :math:`O(n)`.
|
| 739 |
+
|
| 740 |
+
Parameters
|
| 741 |
+
----------
|
| 742 |
+
n : int or iterable
|
| 743 |
+
Number of nodes or iterable of nodes
|
| 744 |
+
radius: float
|
| 745 |
+
Distance threshold value
|
| 746 |
+
theta: float
|
| 747 |
+
Threshold value
|
| 748 |
+
dim : int, optional
|
| 749 |
+
Dimension of graph
|
| 750 |
+
pos : dict, optional
|
| 751 |
+
A dictionary keyed by node with node positions as values.
|
| 752 |
+
weight : dict, optional
|
| 753 |
+
Node weights as a dictionary of numbers keyed by node.
|
| 754 |
+
p : float, optional (default 2)
|
| 755 |
+
Which Minkowski distance metric to use. `p` has to meet the condition
|
| 756 |
+
``1 <= p <= infinity``.
|
| 757 |
+
|
| 758 |
+
If this argument is not specified, the :math:`L^2` metric
|
| 759 |
+
(the Euclidean distance metric), p = 2 is used.
|
| 760 |
+
|
| 761 |
+
This should not be confused with the `p` of an Erdős-Rényi random
|
| 762 |
+
graph, which represents probability.
|
| 763 |
+
seed : integer, random_state, or None (default)
|
| 764 |
+
Indicator of random number generation state.
|
| 765 |
+
See :ref:`Randomness<randomness>`.
|
| 766 |
+
pos_name : string, default="pos"
|
| 767 |
+
The name of the node attribute which represents the position
|
| 768 |
+
in 2D coordinates of the node in the returned graph.
|
| 769 |
+
weight_name : string, default="weight"
|
| 770 |
+
The name of the node attribute which represents the weight
|
| 771 |
+
of the node in the returned graph.
|
| 772 |
+
|
| 773 |
+
Returns
|
| 774 |
+
-------
|
| 775 |
+
Graph
|
| 776 |
+
A thresholded random geographic graph, undirected and without
|
| 777 |
+
self-loops.
|
| 778 |
+
|
| 779 |
+
Each node has a node attribute ``'pos'`` that stores the
|
| 780 |
+
position of that node in Euclidean space as provided by the
|
| 781 |
+
``pos`` keyword argument or, if ``pos`` was not provided, as
|
| 782 |
+
generated by this function. Similarly, each node has a nodethre
|
| 783 |
+
attribute ``'weight'`` that stores the weight of that node as
|
| 784 |
+
provided or as generated.
|
| 785 |
+
|
| 786 |
+
Examples
|
| 787 |
+
--------
|
| 788 |
+
Default Graph:
|
| 789 |
+
|
| 790 |
+
G = nx.thresholded_random_geometric_graph(50, 0.2, 0.1)
|
| 791 |
+
|
| 792 |
+
Custom Graph:
|
| 793 |
+
|
| 794 |
+
Create a thresholded random geometric graph on 50 uniformly distributed
|
| 795 |
+
nodes where nodes are joined by an edge if their sum weights drawn from
|
| 796 |
+
a exponential distribution with rate = 5 are >= theta = 0.1 and their
|
| 797 |
+
Euclidean distance is at most 0.2.
|
| 798 |
+
|
| 799 |
+
Notes
|
| 800 |
+
-----
|
| 801 |
+
This uses a *k*-d tree to build the graph.
|
| 802 |
+
|
| 803 |
+
The `pos` keyword argument can be used to specify node positions so you
|
| 804 |
+
can create an arbitrary distribution and domain for positions.
|
| 805 |
+
|
| 806 |
+
For example, to use a 2D Gaussian distribution of node positions with mean
|
| 807 |
+
(0, 0) and standard deviation 2
|
| 808 |
+
|
| 809 |
+
If weights are not specified they are assigned to nodes by drawing randomly
|
| 810 |
+
from the exponential distribution with rate parameter :math:`\lambda=1`.
|
| 811 |
+
To specify weights from a different distribution, use the `weight` keyword
|
| 812 |
+
argument::
|
| 813 |
+
|
| 814 |
+
::
|
| 815 |
+
|
| 816 |
+
>>> import random
|
| 817 |
+
>>> import math
|
| 818 |
+
>>> n = 50
|
| 819 |
+
>>> pos = {i: (random.gauss(0, 2), random.gauss(0, 2)) for i in range(n)}
|
| 820 |
+
>>> w = {i: random.expovariate(5.0) for i in range(n)}
|
| 821 |
+
>>> G = nx.thresholded_random_geometric_graph(n, 0.2, 0.1, 2, pos, w)
|
| 822 |
+
|
| 823 |
+
References
|
| 824 |
+
----------
|
| 825 |
+
.. [1] http://cole-maclean.github.io/blog/files/thesis.pdf
|
| 826 |
+
|
| 827 |
+
"""
|
| 828 |
+
G = nx.empty_graph(n)
|
| 829 |
+
G.name = f"thresholded_random_geometric_graph({n}, {radius}, {theta}, {dim})"
|
| 830 |
+
# If no weights are provided, choose them from an exponential
|
| 831 |
+
# distribution.
|
| 832 |
+
if weight is None:
|
| 833 |
+
weight = {v: seed.expovariate(1) for v in G}
|
| 834 |
+
# If no positions are provided, choose uniformly random vectors in
|
| 835 |
+
# Euclidean space of the specified dimension.
|
| 836 |
+
if pos is None:
|
| 837 |
+
pos = {v: [seed.random() for i in range(dim)] for v in G}
|
| 838 |
+
# If no distance metric is provided, use Euclidean distance.
|
| 839 |
+
nx.set_node_attributes(G, weight, weight_name)
|
| 840 |
+
nx.set_node_attributes(G, pos, pos_name)
|
| 841 |
+
|
| 842 |
+
edges = (
|
| 843 |
+
(u, v)
|
| 844 |
+
for u, v in _geometric_edges(G, radius, p, pos_name)
|
| 845 |
+
if weight[u] + weight[v] >= theta
|
| 846 |
+
)
|
| 847 |
+
G.add_edges_from(edges)
|
| 848 |
+
return G
|
| 849 |
+
|
| 850 |
+
|
| 851 |
+
@py_random_state(5)
|
| 852 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 853 |
+
def geometric_soft_configuration_graph(
|
| 854 |
+
*, beta, n=None, gamma=None, mean_degree=None, kappas=None, seed=None
|
| 855 |
+
):
|
| 856 |
+
r"""Returns a random graph from the geometric soft configuration model.
|
| 857 |
+
|
| 858 |
+
The $\mathbb{S}^1$ model [1]_ is the geometric soft configuration model
|
| 859 |
+
which is able to explain many fundamental features of real networks such as
|
| 860 |
+
small-world property, heteregenous degree distributions, high level of
|
| 861 |
+
clustering, and self-similarity.
|
| 862 |
+
|
| 863 |
+
In the geometric soft configuration model, a node $i$ is assigned two hidden
|
| 864 |
+
variables: a hidden degree $\kappa_i$, quantifying its popularity, influence,
|
| 865 |
+
or importance, and an angular position $\theta_i$ in a circle abstracting the
|
| 866 |
+
similarity space, where angular distances between nodes are a proxy for their
|
| 867 |
+
similarity. Focusing on the angular position, this model is often called
|
| 868 |
+
the $\mathbb{S}^1$ model (a one-dimensional sphere). The circle's radius is
|
| 869 |
+
adjusted to $R = N/2\pi$, where $N$ is the number of nodes, so that the density
|
| 870 |
+
is set to 1 without loss of generality.
|
| 871 |
+
|
| 872 |
+
The connection probability between any pair of nodes increases with
|
| 873 |
+
the product of their hidden degrees (i.e., their combined popularities),
|
| 874 |
+
and decreases with the angular distance between the two nodes.
|
| 875 |
+
Specifically, nodes $i$ and $j$ are connected with the probability
|
| 876 |
+
|
| 877 |
+
$p_{ij} = \frac{1}{1 + \frac{d_{ij}^\beta}{\left(\mu \kappa_i \kappa_j\right)^{\max(1, \beta)}}}$
|
| 878 |
+
|
| 879 |
+
where $d_{ij} = R\Delta\theta_{ij}$ is the arc length of the circle between
|
| 880 |
+
nodes $i$ and $j$ separated by an angular distance $\Delta\theta_{ij}$.
|
| 881 |
+
Parameters $\mu$ and $\beta$ (also called inverse temperature) control the
|
| 882 |
+
average degree and the clustering coefficient, respectively.
|
| 883 |
+
|
| 884 |
+
It can be shown [2]_ that the model undergoes a structural phase transition
|
| 885 |
+
at $\beta=1$ so that for $\beta<1$ networks are unclustered in the thermodynamic
|
| 886 |
+
limit (when $N\to \infty$) whereas for $\beta>1$ the ensemble generates
|
| 887 |
+
networks with finite clustering coefficient.
|
| 888 |
+
|
| 889 |
+
The $\mathbb{S}^1$ model can be expressed as a purely geometric model
|
| 890 |
+
$\mathbb{H}^2$ in the hyperbolic plane [3]_ by mapping the hidden degree of
|
| 891 |
+
each node into a radial coordinate as
|
| 892 |
+
|
| 893 |
+
$r_i = \hat{R} - \frac{2 \max(1, \beta)}{\beta \zeta} \ln \left(\frac{\kappa_i}{\kappa_0}\right)$
|
| 894 |
+
|
| 895 |
+
where $\hat{R}$ is the radius of the hyperbolic disk and $\zeta$ is the curvature,
|
| 896 |
+
|
| 897 |
+
$\hat{R} = \frac{2}{\zeta} \ln \left(\frac{N}{\pi}\right)
|
| 898 |
+
- \frac{2\max(1, \beta)}{\beta \zeta} \ln (\mu \kappa_0^2)$
|
| 899 |
+
|
| 900 |
+
The connection probability then reads
|
| 901 |
+
|
| 902 |
+
$p_{ij} = \frac{1}{1 + \exp\left({\frac{\beta\zeta}{2} (x_{ij} - \hat{R})}\right)}$
|
| 903 |
+
|
| 904 |
+
where
|
| 905 |
+
|
| 906 |
+
$x_{ij} = r_i + r_j + \frac{2}{\zeta} \ln \frac{\Delta\theta_{ij}}{2}$
|
| 907 |
+
|
| 908 |
+
is a good approximation of the hyperbolic distance between two nodes separated
|
| 909 |
+
by an angular distance $\Delta\theta_{ij}$ with radial coordinates $r_i$ and $r_j$.
|
| 910 |
+
For $\beta > 1$, the curvature $\zeta = 1$, for $\beta < 1$, $\zeta = \beta^{-1}$.
|
| 911 |
+
|
| 912 |
+
|
| 913 |
+
Parameters
|
| 914 |
+
----------
|
| 915 |
+
Either `n`, `gamma`, `mean_degree` are provided or `kappas`. The values of
|
| 916 |
+
`n`, `gamma`, `mean_degree` (if provided) are used to construct a random
|
| 917 |
+
kappa-dict keyed by node with values sampled from a power-law distribution.
|
| 918 |
+
|
| 919 |
+
beta : positive number
|
| 920 |
+
Inverse temperature, controlling the clustering coefficient.
|
| 921 |
+
n : int (default: None)
|
| 922 |
+
Size of the network (number of nodes).
|
| 923 |
+
If not provided, `kappas` must be provided and holds the nodes.
|
| 924 |
+
gamma : float (default: None)
|
| 925 |
+
Exponent of the power-law distribution for hidden degrees `kappas`.
|
| 926 |
+
If not provided, `kappas` must be provided directly.
|
| 927 |
+
mean_degree : float (default: None)
|
| 928 |
+
The mean degree in the network.
|
| 929 |
+
If not provided, `kappas` must be provided directly.
|
| 930 |
+
kappas : dict (default: None)
|
| 931 |
+
A dict keyed by node to its hidden degree value.
|
| 932 |
+
If not provided, random values are computed based on a power-law
|
| 933 |
+
distribution using `n`, `gamma` and `mean_degree`.
|
| 934 |
+
seed : int, random_state, or None (default)
|
| 935 |
+
Indicator of random number generation state.
|
| 936 |
+
See :ref:`Randomness<randomness>`.
|
| 937 |
+
|
| 938 |
+
Returns
|
| 939 |
+
-------
|
| 940 |
+
Graph
|
| 941 |
+
A random geometric soft configuration graph (undirected with no self-loops).
|
| 942 |
+
Each node has three node-attributes:
|
| 943 |
+
|
| 944 |
+
- ``kappa`` that represents the hidden degree.
|
| 945 |
+
|
| 946 |
+
- ``theta`` the position in the similarity space ($\mathbb{S}^1$) which is
|
| 947 |
+
also the angular position in the hyperbolic plane.
|
| 948 |
+
|
| 949 |
+
- ``radius`` the radial position in the hyperbolic plane
|
| 950 |
+
(based on the hidden degree).
|
| 951 |
+
|
| 952 |
+
|
| 953 |
+
Examples
|
| 954 |
+
--------
|
| 955 |
+
Generate a network with specified parameters:
|
| 956 |
+
|
| 957 |
+
>>> G = nx.geometric_soft_configuration_graph(
|
| 958 |
+
... beta=1.5, n=100, gamma=2.7, mean_degree=5
|
| 959 |
+
... )
|
| 960 |
+
|
| 961 |
+
Create a geometric soft configuration graph with 100 nodes. The $\beta$ parameter
|
| 962 |
+
is set to 1.5 and the exponent of the powerlaw distribution of the hidden
|
| 963 |
+
degrees is 2.7 with mean value of 5.
|
| 964 |
+
|
| 965 |
+
Generate a network with predefined hidden degrees:
|
| 966 |
+
|
| 967 |
+
>>> kappas = {i: 10 for i in range(100)}
|
| 968 |
+
>>> G = nx.geometric_soft_configuration_graph(beta=2.5, kappas=kappas)
|
| 969 |
+
|
| 970 |
+
Create a geometric soft configuration graph with 100 nodes. The $\beta$ parameter
|
| 971 |
+
is set to 2.5 and all nodes with hidden degree $\kappa=10$.
|
| 972 |
+
|
| 973 |
+
|
| 974 |
+
References
|
| 975 |
+
----------
|
| 976 |
+
.. [1] Serrano, M. Á., Krioukov, D., & Boguñá, M. (2008). Self-similarity
|
| 977 |
+
of complex networks and hidden metric spaces. Physical review letters, 100(7), 078701.
|
| 978 |
+
|
| 979 |
+
.. [2] van der Kolk, J., Serrano, M. Á., & Boguñá, M. (2022). An anomalous
|
| 980 |
+
topological phase transition in spatial random graphs. Communications Physics, 5(1), 245.
|
| 981 |
+
|
| 982 |
+
.. [3] Krioukov, D., Papadopoulos, F., Kitsak, M., Vahdat, A., & Boguná, M. (2010).
|
| 983 |
+
Hyperbolic geometry of complex networks. Physical Review E, 82(3), 036106.
|
| 984 |
+
|
| 985 |
+
"""
|
| 986 |
+
if beta <= 0:
|
| 987 |
+
raise nx.NetworkXError("The parameter beta cannot be smaller or equal to 0.")
|
| 988 |
+
|
| 989 |
+
if kappas is not None:
|
| 990 |
+
if not all((n is None, gamma is None, mean_degree is None)):
|
| 991 |
+
raise nx.NetworkXError(
|
| 992 |
+
"When kappas is input, n, gamma and mean_degree must not be."
|
| 993 |
+
)
|
| 994 |
+
|
| 995 |
+
n = len(kappas)
|
| 996 |
+
mean_degree = sum(kappas) / len(kappas)
|
| 997 |
+
else:
|
| 998 |
+
if any((n is None, gamma is None, mean_degree is None)):
|
| 999 |
+
raise nx.NetworkXError(
|
| 1000 |
+
"Please provide either kappas, or all 3 of: n, gamma and mean_degree."
|
| 1001 |
+
)
|
| 1002 |
+
|
| 1003 |
+
# Generate `n` hidden degrees from a powerlaw distribution
|
| 1004 |
+
# with given exponent `gamma` and mean value `mean_degree`
|
| 1005 |
+
gam_ratio = (gamma - 2) / (gamma - 1)
|
| 1006 |
+
kappa_0 = mean_degree * gam_ratio * (1 - 1 / n) / (1 - 1 / n**gam_ratio)
|
| 1007 |
+
base = 1 - 1 / n
|
| 1008 |
+
power = 1 / (1 - gamma)
|
| 1009 |
+
kappas = {i: kappa_0 * (1 - seed.random() * base) ** power for i in range(n)}
|
| 1010 |
+
|
| 1011 |
+
G = nx.Graph()
|
| 1012 |
+
R = n / (2 * math.pi)
|
| 1013 |
+
|
| 1014 |
+
# Approximate values for mu in the thermodynamic limit (when n -> infinity)
|
| 1015 |
+
if beta > 1:
|
| 1016 |
+
mu = beta * math.sin(math.pi / beta) / (2 * math.pi * mean_degree)
|
| 1017 |
+
elif beta == 1:
|
| 1018 |
+
mu = 1 / (2 * mean_degree * math.log(n))
|
| 1019 |
+
else:
|
| 1020 |
+
mu = (1 - beta) / (2**beta * mean_degree * n ** (1 - beta))
|
| 1021 |
+
|
| 1022 |
+
# Generate random positions on a circle
|
| 1023 |
+
thetas = {k: seed.uniform(0, 2 * math.pi) for k in kappas}
|
| 1024 |
+
|
| 1025 |
+
for u in kappas:
|
| 1026 |
+
for v in list(G):
|
| 1027 |
+
angle = math.pi - math.fabs(math.pi - math.fabs(thetas[u] - thetas[v]))
|
| 1028 |
+
dij = math.pow(R * angle, beta)
|
| 1029 |
+
mu_kappas = math.pow(mu * kappas[u] * kappas[v], max(1, beta))
|
| 1030 |
+
p_ij = 1 / (1 + dij / mu_kappas)
|
| 1031 |
+
|
| 1032 |
+
# Create an edge with a certain connection probability
|
| 1033 |
+
if seed.random() < p_ij:
|
| 1034 |
+
G.add_edge(u, v)
|
| 1035 |
+
G.add_node(u)
|
| 1036 |
+
|
| 1037 |
+
nx.set_node_attributes(G, thetas, "theta")
|
| 1038 |
+
nx.set_node_attributes(G, kappas, "kappa")
|
| 1039 |
+
|
| 1040 |
+
# Map hidden degrees into the radial coordinates
|
| 1041 |
+
zeta = 1 if beta > 1 else 1 / beta
|
| 1042 |
+
kappa_min = min(kappas.values())
|
| 1043 |
+
R_c = 2 * max(1, beta) / (beta * zeta)
|
| 1044 |
+
R_hat = (2 / zeta) * math.log(n / math.pi) - R_c * math.log(mu * kappa_min)
|
| 1045 |
+
radii = {node: R_hat - R_c * math.log(kappa) for node, kappa in kappas.items()}
|
| 1046 |
+
nx.set_node_attributes(G, radii, "radius")
|
| 1047 |
+
|
| 1048 |
+
return G
|
minigpt2/lib/python3.10/site-packages/networkx/generators/joint_degree_seq.py
ADDED
|
@@ -0,0 +1,664 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Generate graphs with a given joint degree and directed joint degree"""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.utils import py_random_state
|
| 5 |
+
|
| 6 |
+
__all__ = [
|
| 7 |
+
"is_valid_joint_degree",
|
| 8 |
+
"is_valid_directed_joint_degree",
|
| 9 |
+
"joint_degree_graph",
|
| 10 |
+
"directed_joint_degree_graph",
|
| 11 |
+
]
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@nx._dispatchable(graphs=None)
|
| 15 |
+
def is_valid_joint_degree(joint_degrees):
|
| 16 |
+
"""Checks whether the given joint degree dictionary is realizable.
|
| 17 |
+
|
| 18 |
+
A *joint degree dictionary* is a dictionary of dictionaries, in
|
| 19 |
+
which entry ``joint_degrees[k][l]`` is an integer representing the
|
| 20 |
+
number of edges joining nodes of degree *k* with nodes of degree
|
| 21 |
+
*l*. Such a dictionary is realizable as a simple graph if and only
|
| 22 |
+
if the following conditions are satisfied.
|
| 23 |
+
|
| 24 |
+
- each entry must be an integer,
|
| 25 |
+
- the total number of nodes of degree *k*, computed by
|
| 26 |
+
``sum(joint_degrees[k].values()) / k``, must be an integer,
|
| 27 |
+
- the total number of edges joining nodes of degree *k* with
|
| 28 |
+
nodes of degree *l* cannot exceed the total number of possible edges,
|
| 29 |
+
- each diagonal entry ``joint_degrees[k][k]`` must be even (this is
|
| 30 |
+
a convention assumed by the :func:`joint_degree_graph` function).
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
Parameters
|
| 34 |
+
----------
|
| 35 |
+
joint_degrees : dictionary of dictionary of integers
|
| 36 |
+
A joint degree dictionary in which entry ``joint_degrees[k][l]``
|
| 37 |
+
is the number of edges joining nodes of degree *k* with nodes of
|
| 38 |
+
degree *l*.
|
| 39 |
+
|
| 40 |
+
Returns
|
| 41 |
+
-------
|
| 42 |
+
bool
|
| 43 |
+
Whether the given joint degree dictionary is realizable as a
|
| 44 |
+
simple graph.
|
| 45 |
+
|
| 46 |
+
References
|
| 47 |
+
----------
|
| 48 |
+
.. [1] M. Gjoka, M. Kurant, A. Markopoulou, "2.5K Graphs: from Sampling
|
| 49 |
+
to Generation", IEEE Infocom, 2013.
|
| 50 |
+
.. [2] I. Stanton, A. Pinar, "Constructing and sampling graphs with a
|
| 51 |
+
prescribed joint degree distribution", Journal of Experimental
|
| 52 |
+
Algorithmics, 2012.
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
degree_count = {}
|
| 56 |
+
for k in joint_degrees:
|
| 57 |
+
if k > 0:
|
| 58 |
+
k_size = sum(joint_degrees[k].values()) / k
|
| 59 |
+
if not k_size.is_integer():
|
| 60 |
+
return False
|
| 61 |
+
degree_count[k] = k_size
|
| 62 |
+
|
| 63 |
+
for k in joint_degrees:
|
| 64 |
+
for l in joint_degrees[k]:
|
| 65 |
+
if not float(joint_degrees[k][l]).is_integer():
|
| 66 |
+
return False
|
| 67 |
+
|
| 68 |
+
if (k != l) and (joint_degrees[k][l] > degree_count[k] * degree_count[l]):
|
| 69 |
+
return False
|
| 70 |
+
elif k == l:
|
| 71 |
+
if joint_degrees[k][k] > degree_count[k] * (degree_count[k] - 1):
|
| 72 |
+
return False
|
| 73 |
+
if joint_degrees[k][k] % 2 != 0:
|
| 74 |
+
return False
|
| 75 |
+
|
| 76 |
+
# if all above conditions have been satisfied then the input
|
| 77 |
+
# joint degree is realizable as a simple graph.
|
| 78 |
+
return True
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def _neighbor_switch(G, w, unsat, h_node_residual, avoid_node_id=None):
|
| 82 |
+
"""Releases one free stub for ``w``, while preserving joint degree in G.
|
| 83 |
+
|
| 84 |
+
Parameters
|
| 85 |
+
----------
|
| 86 |
+
G : NetworkX graph
|
| 87 |
+
Graph in which the neighbor switch will take place.
|
| 88 |
+
w : integer
|
| 89 |
+
Node id for which we will execute this neighbor switch.
|
| 90 |
+
unsat : set of integers
|
| 91 |
+
Set of unsaturated node ids that have the same degree as w.
|
| 92 |
+
h_node_residual: dictionary of integers
|
| 93 |
+
Keeps track of the remaining stubs for a given node.
|
| 94 |
+
avoid_node_id: integer
|
| 95 |
+
Node id to avoid when selecting w_prime.
|
| 96 |
+
|
| 97 |
+
Notes
|
| 98 |
+
-----
|
| 99 |
+
First, it selects *w_prime*, an unsaturated node that has the same degree
|
| 100 |
+
as ``w``. Second, it selects *switch_node*, a neighbor node of ``w`` that
|
| 101 |
+
is not connected to *w_prime*. Then it executes an edge swap i.e. removes
|
| 102 |
+
(``w``,*switch_node*) and adds (*w_prime*,*switch_node*). Gjoka et. al. [1]
|
| 103 |
+
prove that such an edge swap is always possible.
|
| 104 |
+
|
| 105 |
+
References
|
| 106 |
+
----------
|
| 107 |
+
.. [1] M. Gjoka, B. Tillman, A. Markopoulou, "Construction of Simple
|
| 108 |
+
Graphs with a Target Joint Degree Matrix and Beyond", IEEE Infocom, '15
|
| 109 |
+
"""
|
| 110 |
+
|
| 111 |
+
if (avoid_node_id is None) or (h_node_residual[avoid_node_id] > 1):
|
| 112 |
+
# select unsaturated node w_prime that has the same degree as w
|
| 113 |
+
w_prime = next(iter(unsat))
|
| 114 |
+
else:
|
| 115 |
+
# assume that the node pair (v,w) has been selected for connection. if
|
| 116 |
+
# - neighbor_switch is called for node w,
|
| 117 |
+
# - nodes v and w have the same degree,
|
| 118 |
+
# - node v=avoid_node_id has only one stub left,
|
| 119 |
+
# then prevent v=avoid_node_id from being selected as w_prime.
|
| 120 |
+
|
| 121 |
+
iter_var = iter(unsat)
|
| 122 |
+
while True:
|
| 123 |
+
w_prime = next(iter_var)
|
| 124 |
+
if w_prime != avoid_node_id:
|
| 125 |
+
break
|
| 126 |
+
|
| 127 |
+
# select switch_node, a neighbor of w, that is not connected to w_prime
|
| 128 |
+
w_prime_neighbs = G[w_prime] # slightly faster declaring this variable
|
| 129 |
+
for v in G[w]:
|
| 130 |
+
if (v not in w_prime_neighbs) and (v != w_prime):
|
| 131 |
+
switch_node = v
|
| 132 |
+
break
|
| 133 |
+
|
| 134 |
+
# remove edge (w,switch_node), add edge (w_prime,switch_node) and update
|
| 135 |
+
# data structures
|
| 136 |
+
G.remove_edge(w, switch_node)
|
| 137 |
+
G.add_edge(w_prime, switch_node)
|
| 138 |
+
h_node_residual[w] += 1
|
| 139 |
+
h_node_residual[w_prime] -= 1
|
| 140 |
+
if h_node_residual[w_prime] == 0:
|
| 141 |
+
unsat.remove(w_prime)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
@py_random_state(1)
|
| 145 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 146 |
+
def joint_degree_graph(joint_degrees, seed=None):
|
| 147 |
+
"""Generates a random simple graph with the given joint degree dictionary.
|
| 148 |
+
|
| 149 |
+
Parameters
|
| 150 |
+
----------
|
| 151 |
+
joint_degrees : dictionary of dictionary of integers
|
| 152 |
+
A joint degree dictionary in which entry ``joint_degrees[k][l]`` is the
|
| 153 |
+
number of edges joining nodes of degree *k* with nodes of degree *l*.
|
| 154 |
+
seed : integer, random_state, or None (default)
|
| 155 |
+
Indicator of random number generation state.
|
| 156 |
+
See :ref:`Randomness<randomness>`.
|
| 157 |
+
|
| 158 |
+
Returns
|
| 159 |
+
-------
|
| 160 |
+
G : Graph
|
| 161 |
+
A graph with the specified joint degree dictionary.
|
| 162 |
+
|
| 163 |
+
Raises
|
| 164 |
+
------
|
| 165 |
+
NetworkXError
|
| 166 |
+
If *joint_degrees* dictionary is not realizable.
|
| 167 |
+
|
| 168 |
+
Notes
|
| 169 |
+
-----
|
| 170 |
+
In each iteration of the "while loop" the algorithm picks two disconnected
|
| 171 |
+
nodes *v* and *w*, of degree *k* and *l* correspondingly, for which
|
| 172 |
+
``joint_degrees[k][l]`` has not reached its target yet. It then adds
|
| 173 |
+
edge (*v*, *w*) and increases the number of edges in graph G by one.
|
| 174 |
+
|
| 175 |
+
The intelligence of the algorithm lies in the fact that it is always
|
| 176 |
+
possible to add an edge between such disconnected nodes *v* and *w*,
|
| 177 |
+
even if one or both nodes do not have free stubs. That is made possible by
|
| 178 |
+
executing a "neighbor switch", an edge rewiring move that releases
|
| 179 |
+
a free stub while keeping the joint degree of G the same.
|
| 180 |
+
|
| 181 |
+
The algorithm continues for E (number of edges) iterations of
|
| 182 |
+
the "while loop", at the which point all entries of the given
|
| 183 |
+
``joint_degrees[k][l]`` have reached their target values and the
|
| 184 |
+
construction is complete.
|
| 185 |
+
|
| 186 |
+
References
|
| 187 |
+
----------
|
| 188 |
+
.. [1] M. Gjoka, B. Tillman, A. Markopoulou, "Construction of Simple
|
| 189 |
+
Graphs with a Target Joint Degree Matrix and Beyond", IEEE Infocom, '15
|
| 190 |
+
|
| 191 |
+
Examples
|
| 192 |
+
--------
|
| 193 |
+
>>> joint_degrees = {
|
| 194 |
+
... 1: {4: 1},
|
| 195 |
+
... 2: {2: 2, 3: 2, 4: 2},
|
| 196 |
+
... 3: {2: 2, 4: 1},
|
| 197 |
+
... 4: {1: 1, 2: 2, 3: 1},
|
| 198 |
+
... }
|
| 199 |
+
>>> G = nx.joint_degree_graph(joint_degrees)
|
| 200 |
+
>>>
|
| 201 |
+
"""
|
| 202 |
+
|
| 203 |
+
if not is_valid_joint_degree(joint_degrees):
|
| 204 |
+
msg = "Input joint degree dict not realizable as a simple graph"
|
| 205 |
+
raise nx.NetworkXError(msg)
|
| 206 |
+
|
| 207 |
+
# compute degree count from joint_degrees
|
| 208 |
+
degree_count = {k: sum(l.values()) // k for k, l in joint_degrees.items() if k > 0}
|
| 209 |
+
|
| 210 |
+
# start with empty N-node graph
|
| 211 |
+
N = sum(degree_count.values())
|
| 212 |
+
G = nx.empty_graph(N)
|
| 213 |
+
|
| 214 |
+
# for a given degree group, keep the list of all node ids
|
| 215 |
+
h_degree_nodelist = {}
|
| 216 |
+
|
| 217 |
+
# for a given node, keep track of the remaining stubs
|
| 218 |
+
h_node_residual = {}
|
| 219 |
+
|
| 220 |
+
# populate h_degree_nodelist and h_node_residual
|
| 221 |
+
nodeid = 0
|
| 222 |
+
for degree, num_nodes in degree_count.items():
|
| 223 |
+
h_degree_nodelist[degree] = range(nodeid, nodeid + num_nodes)
|
| 224 |
+
for v in h_degree_nodelist[degree]:
|
| 225 |
+
h_node_residual[v] = degree
|
| 226 |
+
nodeid += int(num_nodes)
|
| 227 |
+
|
| 228 |
+
# iterate over every degree pair (k,l) and add the number of edges given
|
| 229 |
+
# for each pair
|
| 230 |
+
for k in joint_degrees:
|
| 231 |
+
for l in joint_degrees[k]:
|
| 232 |
+
# n_edges_add is the number of edges to add for the
|
| 233 |
+
# degree pair (k,l)
|
| 234 |
+
n_edges_add = joint_degrees[k][l]
|
| 235 |
+
|
| 236 |
+
if (n_edges_add > 0) and (k >= l):
|
| 237 |
+
# number of nodes with degree k and l
|
| 238 |
+
k_size = degree_count[k]
|
| 239 |
+
l_size = degree_count[l]
|
| 240 |
+
|
| 241 |
+
# k_nodes and l_nodes consist of all nodes of degree k and l
|
| 242 |
+
k_nodes = h_degree_nodelist[k]
|
| 243 |
+
l_nodes = h_degree_nodelist[l]
|
| 244 |
+
|
| 245 |
+
# k_unsat and l_unsat consist of nodes of degree k and l that
|
| 246 |
+
# are unsaturated (nodes that have at least 1 available stub)
|
| 247 |
+
k_unsat = {v for v in k_nodes if h_node_residual[v] > 0}
|
| 248 |
+
|
| 249 |
+
if k != l:
|
| 250 |
+
l_unsat = {w for w in l_nodes if h_node_residual[w] > 0}
|
| 251 |
+
else:
|
| 252 |
+
l_unsat = k_unsat
|
| 253 |
+
n_edges_add = joint_degrees[k][l] // 2
|
| 254 |
+
|
| 255 |
+
while n_edges_add > 0:
|
| 256 |
+
# randomly pick nodes v and w that have degrees k and l
|
| 257 |
+
v = k_nodes[seed.randrange(k_size)]
|
| 258 |
+
w = l_nodes[seed.randrange(l_size)]
|
| 259 |
+
|
| 260 |
+
# if nodes v and w are disconnected then attempt to connect
|
| 261 |
+
if not G.has_edge(v, w) and (v != w):
|
| 262 |
+
# if node v has no free stubs then do neighbor switch
|
| 263 |
+
if h_node_residual[v] == 0:
|
| 264 |
+
_neighbor_switch(G, v, k_unsat, h_node_residual)
|
| 265 |
+
|
| 266 |
+
# if node w has no free stubs then do neighbor switch
|
| 267 |
+
if h_node_residual[w] == 0:
|
| 268 |
+
if k != l:
|
| 269 |
+
_neighbor_switch(G, w, l_unsat, h_node_residual)
|
| 270 |
+
else:
|
| 271 |
+
_neighbor_switch(
|
| 272 |
+
G, w, l_unsat, h_node_residual, avoid_node_id=v
|
| 273 |
+
)
|
| 274 |
+
|
| 275 |
+
# add edge (v, w) and update data structures
|
| 276 |
+
G.add_edge(v, w)
|
| 277 |
+
h_node_residual[v] -= 1
|
| 278 |
+
h_node_residual[w] -= 1
|
| 279 |
+
n_edges_add -= 1
|
| 280 |
+
|
| 281 |
+
if h_node_residual[v] == 0:
|
| 282 |
+
k_unsat.discard(v)
|
| 283 |
+
if h_node_residual[w] == 0:
|
| 284 |
+
l_unsat.discard(w)
|
| 285 |
+
return G
|
| 286 |
+
|
| 287 |
+
|
| 288 |
+
@nx._dispatchable(graphs=None)
|
| 289 |
+
def is_valid_directed_joint_degree(in_degrees, out_degrees, nkk):
|
| 290 |
+
"""Checks whether the given directed joint degree input is realizable
|
| 291 |
+
|
| 292 |
+
Parameters
|
| 293 |
+
----------
|
| 294 |
+
in_degrees : list of integers
|
| 295 |
+
in degree sequence contains the in degrees of nodes.
|
| 296 |
+
out_degrees : list of integers
|
| 297 |
+
out degree sequence contains the out degrees of nodes.
|
| 298 |
+
nkk : dictionary of dictionary of integers
|
| 299 |
+
directed joint degree dictionary. for nodes of out degree k (first
|
| 300 |
+
level of dict) and nodes of in degree l (second level of dict)
|
| 301 |
+
describes the number of edges.
|
| 302 |
+
|
| 303 |
+
Returns
|
| 304 |
+
-------
|
| 305 |
+
boolean
|
| 306 |
+
returns true if given input is realizable, else returns false.
|
| 307 |
+
|
| 308 |
+
Notes
|
| 309 |
+
-----
|
| 310 |
+
Here is the list of conditions that the inputs (in/out degree sequences,
|
| 311 |
+
nkk) need to satisfy for simple directed graph realizability:
|
| 312 |
+
|
| 313 |
+
- Condition 0: in_degrees and out_degrees have the same length
|
| 314 |
+
- Condition 1: nkk[k][l] is integer for all k,l
|
| 315 |
+
- Condition 2: sum(nkk[k])/k = number of nodes with partition id k, is an
|
| 316 |
+
integer and matching degree sequence
|
| 317 |
+
- Condition 3: number of edges and non-chords between k and l cannot exceed
|
| 318 |
+
maximum possible number of edges
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
References
|
| 322 |
+
----------
|
| 323 |
+
[1] B. Tillman, A. Markopoulou, C. T. Butts & M. Gjoka,
|
| 324 |
+
"Construction of Directed 2K Graphs". In Proc. of KDD 2017.
|
| 325 |
+
"""
|
| 326 |
+
V = {} # number of nodes with in/out degree.
|
| 327 |
+
forbidden = {}
|
| 328 |
+
if len(in_degrees) != len(out_degrees):
|
| 329 |
+
return False
|
| 330 |
+
|
| 331 |
+
for idx in range(len(in_degrees)):
|
| 332 |
+
i = in_degrees[idx]
|
| 333 |
+
o = out_degrees[idx]
|
| 334 |
+
V[(i, 0)] = V.get((i, 0), 0) + 1
|
| 335 |
+
V[(o, 1)] = V.get((o, 1), 0) + 1
|
| 336 |
+
|
| 337 |
+
forbidden[(o, i)] = forbidden.get((o, i), 0) + 1
|
| 338 |
+
|
| 339 |
+
S = {} # number of edges going from in/out degree nodes.
|
| 340 |
+
for k in nkk:
|
| 341 |
+
for l in nkk[k]:
|
| 342 |
+
val = nkk[k][l]
|
| 343 |
+
if not float(val).is_integer(): # condition 1
|
| 344 |
+
return False
|
| 345 |
+
|
| 346 |
+
if val > 0:
|
| 347 |
+
S[(k, 1)] = S.get((k, 1), 0) + val
|
| 348 |
+
S[(l, 0)] = S.get((l, 0), 0) + val
|
| 349 |
+
# condition 3
|
| 350 |
+
if val + forbidden.get((k, l), 0) > V[(k, 1)] * V[(l, 0)]:
|
| 351 |
+
return False
|
| 352 |
+
|
| 353 |
+
return all(S[s] / s[0] == V[s] for s in S)
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
def _directed_neighbor_switch(
|
| 357 |
+
G, w, unsat, h_node_residual_out, chords, h_partition_in, partition
|
| 358 |
+
):
|
| 359 |
+
"""Releases one free stub for node w, while preserving joint degree in G.
|
| 360 |
+
|
| 361 |
+
Parameters
|
| 362 |
+
----------
|
| 363 |
+
G : networkx directed graph
|
| 364 |
+
graph within which the edge swap will take place.
|
| 365 |
+
w : integer
|
| 366 |
+
node id for which we need to perform a neighbor switch.
|
| 367 |
+
unsat: set of integers
|
| 368 |
+
set of node ids that have the same degree as w and are unsaturated.
|
| 369 |
+
h_node_residual_out: dict of integers
|
| 370 |
+
for a given node, keeps track of the remaining stubs to be added.
|
| 371 |
+
chords: set of tuples
|
| 372 |
+
keeps track of available positions to add edges.
|
| 373 |
+
h_partition_in: dict of integers
|
| 374 |
+
for a given node, keeps track of its partition id (in degree).
|
| 375 |
+
partition: integer
|
| 376 |
+
partition id to check if chords have to be updated.
|
| 377 |
+
|
| 378 |
+
Notes
|
| 379 |
+
-----
|
| 380 |
+
First, it selects node w_prime that (1) has the same degree as w and
|
| 381 |
+
(2) is unsaturated. Then, it selects node v, a neighbor of w, that is
|
| 382 |
+
not connected to w_prime and does an edge swap i.e. removes (w,v) and
|
| 383 |
+
adds (w_prime,v). If neighbor switch is not possible for w using
|
| 384 |
+
w_prime and v, then return w_prime; in [1] it's proven that
|
| 385 |
+
such unsaturated nodes can be used.
|
| 386 |
+
|
| 387 |
+
References
|
| 388 |
+
----------
|
| 389 |
+
[1] B. Tillman, A. Markopoulou, C. T. Butts & M. Gjoka,
|
| 390 |
+
"Construction of Directed 2K Graphs". In Proc. of KDD 2017.
|
| 391 |
+
"""
|
| 392 |
+
w_prime = unsat.pop()
|
| 393 |
+
unsat.add(w_prime)
|
| 394 |
+
# select node t, a neighbor of w, that is not connected to w_prime
|
| 395 |
+
w_neighbs = list(G.successors(w))
|
| 396 |
+
# slightly faster declaring this variable
|
| 397 |
+
w_prime_neighbs = list(G.successors(w_prime))
|
| 398 |
+
|
| 399 |
+
for v in w_neighbs:
|
| 400 |
+
if (v not in w_prime_neighbs) and w_prime != v:
|
| 401 |
+
# removes (w,v), add (w_prime,v) and update data structures
|
| 402 |
+
G.remove_edge(w, v)
|
| 403 |
+
G.add_edge(w_prime, v)
|
| 404 |
+
|
| 405 |
+
if h_partition_in[v] == partition:
|
| 406 |
+
chords.add((w, v))
|
| 407 |
+
chords.discard((w_prime, v))
|
| 408 |
+
|
| 409 |
+
h_node_residual_out[w] += 1
|
| 410 |
+
h_node_residual_out[w_prime] -= 1
|
| 411 |
+
if h_node_residual_out[w_prime] == 0:
|
| 412 |
+
unsat.remove(w_prime)
|
| 413 |
+
return None
|
| 414 |
+
|
| 415 |
+
# If neighbor switch didn't work, use unsaturated node
|
| 416 |
+
return w_prime
|
| 417 |
+
|
| 418 |
+
|
| 419 |
+
def _directed_neighbor_switch_rev(
|
| 420 |
+
G, w, unsat, h_node_residual_in, chords, h_partition_out, partition
|
| 421 |
+
):
|
| 422 |
+
"""The reverse of directed_neighbor_switch.
|
| 423 |
+
|
| 424 |
+
Parameters
|
| 425 |
+
----------
|
| 426 |
+
G : networkx directed graph
|
| 427 |
+
graph within which the edge swap will take place.
|
| 428 |
+
w : integer
|
| 429 |
+
node id for which we need to perform a neighbor switch.
|
| 430 |
+
unsat: set of integers
|
| 431 |
+
set of node ids that have the same degree as w and are unsaturated.
|
| 432 |
+
h_node_residual_in: dict of integers
|
| 433 |
+
for a given node, keeps track of the remaining stubs to be added.
|
| 434 |
+
chords: set of tuples
|
| 435 |
+
keeps track of available positions to add edges.
|
| 436 |
+
h_partition_out: dict of integers
|
| 437 |
+
for a given node, keeps track of its partition id (out degree).
|
| 438 |
+
partition: integer
|
| 439 |
+
partition id to check if chords have to be updated.
|
| 440 |
+
|
| 441 |
+
Notes
|
| 442 |
+
-----
|
| 443 |
+
Same operation as directed_neighbor_switch except it handles this operation
|
| 444 |
+
for incoming edges instead of outgoing.
|
| 445 |
+
"""
|
| 446 |
+
w_prime = unsat.pop()
|
| 447 |
+
unsat.add(w_prime)
|
| 448 |
+
# slightly faster declaring these as variables.
|
| 449 |
+
w_neighbs = list(G.predecessors(w))
|
| 450 |
+
w_prime_neighbs = list(G.predecessors(w_prime))
|
| 451 |
+
# select node v, a neighbor of w, that is not connected to w_prime.
|
| 452 |
+
for v in w_neighbs:
|
| 453 |
+
if (v not in w_prime_neighbs) and w_prime != v:
|
| 454 |
+
# removes (v,w), add (v,w_prime) and update data structures.
|
| 455 |
+
G.remove_edge(v, w)
|
| 456 |
+
G.add_edge(v, w_prime)
|
| 457 |
+
if h_partition_out[v] == partition:
|
| 458 |
+
chords.add((v, w))
|
| 459 |
+
chords.discard((v, w_prime))
|
| 460 |
+
|
| 461 |
+
h_node_residual_in[w] += 1
|
| 462 |
+
h_node_residual_in[w_prime] -= 1
|
| 463 |
+
if h_node_residual_in[w_prime] == 0:
|
| 464 |
+
unsat.remove(w_prime)
|
| 465 |
+
return None
|
| 466 |
+
|
| 467 |
+
# If neighbor switch didn't work, use the unsaturated node.
|
| 468 |
+
return w_prime
|
| 469 |
+
|
| 470 |
+
|
| 471 |
+
@py_random_state(3)
|
| 472 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 473 |
+
def directed_joint_degree_graph(in_degrees, out_degrees, nkk, seed=None):
|
| 474 |
+
"""Generates a random simple directed graph with the joint degree.
|
| 475 |
+
|
| 476 |
+
Parameters
|
| 477 |
+
----------
|
| 478 |
+
degree_seq : list of tuples (of size 3)
|
| 479 |
+
degree sequence contains tuples of nodes with node id, in degree and
|
| 480 |
+
out degree.
|
| 481 |
+
nkk : dictionary of dictionary of integers
|
| 482 |
+
directed joint degree dictionary, for nodes of out degree k (first
|
| 483 |
+
level of dict) and nodes of in degree l (second level of dict)
|
| 484 |
+
describes the number of edges.
|
| 485 |
+
seed : hashable object, optional
|
| 486 |
+
Seed for random number generator.
|
| 487 |
+
|
| 488 |
+
Returns
|
| 489 |
+
-------
|
| 490 |
+
G : Graph
|
| 491 |
+
A directed graph with the specified inputs.
|
| 492 |
+
|
| 493 |
+
Raises
|
| 494 |
+
------
|
| 495 |
+
NetworkXError
|
| 496 |
+
If degree_seq and nkk are not realizable as a simple directed graph.
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
Notes
|
| 500 |
+
-----
|
| 501 |
+
Similarly to the undirected version:
|
| 502 |
+
In each iteration of the "while loop" the algorithm picks two disconnected
|
| 503 |
+
nodes v and w, of degree k and l correspondingly, for which nkk[k][l] has
|
| 504 |
+
not reached its target yet i.e. (for given k,l): n_edges_add < nkk[k][l].
|
| 505 |
+
It then adds edge (v,w) and always increases the number of edges in graph G
|
| 506 |
+
by one.
|
| 507 |
+
|
| 508 |
+
The intelligence of the algorithm lies in the fact that it is always
|
| 509 |
+
possible to add an edge between disconnected nodes v and w, for which
|
| 510 |
+
nkk[degree(v)][degree(w)] has not reached its target, even if one or both
|
| 511 |
+
nodes do not have free stubs. If either node v or w does not have a free
|
| 512 |
+
stub, we perform a "neighbor switch", an edge rewiring move that releases a
|
| 513 |
+
free stub while keeping nkk the same.
|
| 514 |
+
|
| 515 |
+
The difference for the directed version lies in the fact that neighbor
|
| 516 |
+
switches might not be able to rewire, but in these cases unsaturated nodes
|
| 517 |
+
can be reassigned to use instead, see [1] for detailed description and
|
| 518 |
+
proofs.
|
| 519 |
+
|
| 520 |
+
The algorithm continues for E (number of edges in the graph) iterations of
|
| 521 |
+
the "while loop", at which point all entries of the given nkk[k][l] have
|
| 522 |
+
reached their target values and the construction is complete.
|
| 523 |
+
|
| 524 |
+
References
|
| 525 |
+
----------
|
| 526 |
+
[1] B. Tillman, A. Markopoulou, C. T. Butts & M. Gjoka,
|
| 527 |
+
"Construction of Directed 2K Graphs". In Proc. of KDD 2017.
|
| 528 |
+
|
| 529 |
+
Examples
|
| 530 |
+
--------
|
| 531 |
+
>>> in_degrees = [0, 1, 1, 2]
|
| 532 |
+
>>> out_degrees = [1, 1, 1, 1]
|
| 533 |
+
>>> nkk = {1: {1: 2, 2: 2}}
|
| 534 |
+
>>> G = nx.directed_joint_degree_graph(in_degrees, out_degrees, nkk)
|
| 535 |
+
>>>
|
| 536 |
+
"""
|
| 537 |
+
if not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk):
|
| 538 |
+
msg = "Input is not realizable as a simple graph"
|
| 539 |
+
raise nx.NetworkXError(msg)
|
| 540 |
+
|
| 541 |
+
# start with an empty directed graph.
|
| 542 |
+
G = nx.DiGraph()
|
| 543 |
+
|
| 544 |
+
# for a given group, keep the list of all node ids.
|
| 545 |
+
h_degree_nodelist_in = {}
|
| 546 |
+
h_degree_nodelist_out = {}
|
| 547 |
+
# for a given group, keep the list of all unsaturated node ids.
|
| 548 |
+
h_degree_nodelist_in_unsat = {}
|
| 549 |
+
h_degree_nodelist_out_unsat = {}
|
| 550 |
+
# for a given node, keep track of the remaining stubs to be added.
|
| 551 |
+
h_node_residual_out = {}
|
| 552 |
+
h_node_residual_in = {}
|
| 553 |
+
# for a given node, keep track of the partition id.
|
| 554 |
+
h_partition_out = {}
|
| 555 |
+
h_partition_in = {}
|
| 556 |
+
# keep track of non-chords between pairs of partition ids.
|
| 557 |
+
non_chords = {}
|
| 558 |
+
|
| 559 |
+
# populate data structures
|
| 560 |
+
for idx, i in enumerate(in_degrees):
|
| 561 |
+
idx = int(idx)
|
| 562 |
+
if i > 0:
|
| 563 |
+
h_degree_nodelist_in.setdefault(i, [])
|
| 564 |
+
h_degree_nodelist_in_unsat.setdefault(i, set())
|
| 565 |
+
h_degree_nodelist_in[i].append(idx)
|
| 566 |
+
h_degree_nodelist_in_unsat[i].add(idx)
|
| 567 |
+
h_node_residual_in[idx] = i
|
| 568 |
+
h_partition_in[idx] = i
|
| 569 |
+
|
| 570 |
+
for idx, o in enumerate(out_degrees):
|
| 571 |
+
o = out_degrees[idx]
|
| 572 |
+
non_chords[(o, in_degrees[idx])] = non_chords.get((o, in_degrees[idx]), 0) + 1
|
| 573 |
+
idx = int(idx)
|
| 574 |
+
if o > 0:
|
| 575 |
+
h_degree_nodelist_out.setdefault(o, [])
|
| 576 |
+
h_degree_nodelist_out_unsat.setdefault(o, set())
|
| 577 |
+
h_degree_nodelist_out[o].append(idx)
|
| 578 |
+
h_degree_nodelist_out_unsat[o].add(idx)
|
| 579 |
+
h_node_residual_out[idx] = o
|
| 580 |
+
h_partition_out[idx] = o
|
| 581 |
+
|
| 582 |
+
G.add_node(idx)
|
| 583 |
+
|
| 584 |
+
nk_in = {}
|
| 585 |
+
nk_out = {}
|
| 586 |
+
for p in h_degree_nodelist_in:
|
| 587 |
+
nk_in[p] = len(h_degree_nodelist_in[p])
|
| 588 |
+
for p in h_degree_nodelist_out:
|
| 589 |
+
nk_out[p] = len(h_degree_nodelist_out[p])
|
| 590 |
+
|
| 591 |
+
# iterate over every degree pair (k,l) and add the number of edges given
|
| 592 |
+
# for each pair.
|
| 593 |
+
for k in nkk:
|
| 594 |
+
for l in nkk[k]:
|
| 595 |
+
n_edges_add = nkk[k][l]
|
| 596 |
+
|
| 597 |
+
if n_edges_add > 0:
|
| 598 |
+
# chords contains a random set of potential edges.
|
| 599 |
+
chords = set()
|
| 600 |
+
|
| 601 |
+
k_len = nk_out[k]
|
| 602 |
+
l_len = nk_in[l]
|
| 603 |
+
chords_sample = seed.sample(
|
| 604 |
+
range(k_len * l_len), n_edges_add + non_chords.get((k, l), 0)
|
| 605 |
+
)
|
| 606 |
+
|
| 607 |
+
num = 0
|
| 608 |
+
while len(chords) < n_edges_add:
|
| 609 |
+
i = h_degree_nodelist_out[k][chords_sample[num] % k_len]
|
| 610 |
+
j = h_degree_nodelist_in[l][chords_sample[num] // k_len]
|
| 611 |
+
num += 1
|
| 612 |
+
if i != j:
|
| 613 |
+
chords.add((i, j))
|
| 614 |
+
|
| 615 |
+
# k_unsat and l_unsat consist of nodes of in/out degree k and l
|
| 616 |
+
# that are unsaturated i.e. those nodes that have at least one
|
| 617 |
+
# available stub
|
| 618 |
+
k_unsat = h_degree_nodelist_out_unsat[k]
|
| 619 |
+
l_unsat = h_degree_nodelist_in_unsat[l]
|
| 620 |
+
|
| 621 |
+
while n_edges_add > 0:
|
| 622 |
+
v, w = chords.pop()
|
| 623 |
+
chords.add((v, w))
|
| 624 |
+
|
| 625 |
+
# if node v has no free stubs then do neighbor switch.
|
| 626 |
+
if h_node_residual_out[v] == 0:
|
| 627 |
+
_v = _directed_neighbor_switch(
|
| 628 |
+
G,
|
| 629 |
+
v,
|
| 630 |
+
k_unsat,
|
| 631 |
+
h_node_residual_out,
|
| 632 |
+
chords,
|
| 633 |
+
h_partition_in,
|
| 634 |
+
l,
|
| 635 |
+
)
|
| 636 |
+
if _v is not None:
|
| 637 |
+
v = _v
|
| 638 |
+
|
| 639 |
+
# if node w has no free stubs then do neighbor switch.
|
| 640 |
+
if h_node_residual_in[w] == 0:
|
| 641 |
+
_w = _directed_neighbor_switch_rev(
|
| 642 |
+
G,
|
| 643 |
+
w,
|
| 644 |
+
l_unsat,
|
| 645 |
+
h_node_residual_in,
|
| 646 |
+
chords,
|
| 647 |
+
h_partition_out,
|
| 648 |
+
k,
|
| 649 |
+
)
|
| 650 |
+
if _w is not None:
|
| 651 |
+
w = _w
|
| 652 |
+
|
| 653 |
+
# add edge (v,w) and update data structures.
|
| 654 |
+
G.add_edge(v, w)
|
| 655 |
+
h_node_residual_out[v] -= 1
|
| 656 |
+
h_node_residual_in[w] -= 1
|
| 657 |
+
n_edges_add -= 1
|
| 658 |
+
chords.discard((v, w))
|
| 659 |
+
|
| 660 |
+
if h_node_residual_out[v] == 0:
|
| 661 |
+
k_unsat.discard(v)
|
| 662 |
+
if h_node_residual_in[w] == 0:
|
| 663 |
+
l_unsat.discard(w)
|
| 664 |
+
return G
|
minigpt2/lib/python3.10/site-packages/networkx/generators/line.py
ADDED
|
@@ -0,0 +1,500 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for generating line graphs."""
|
| 2 |
+
|
| 3 |
+
from collections import defaultdict
|
| 4 |
+
from functools import partial
|
| 5 |
+
from itertools import combinations
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.utils import arbitrary_element
|
| 9 |
+
from networkx.utils.decorators import not_implemented_for
|
| 10 |
+
|
| 11 |
+
__all__ = ["line_graph", "inverse_line_graph"]
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@nx._dispatchable(returns_graph=True)
|
| 15 |
+
def line_graph(G, create_using=None):
|
| 16 |
+
r"""Returns the line graph of the graph or digraph `G`.
|
| 17 |
+
|
| 18 |
+
The line graph of a graph `G` has a node for each edge in `G` and an
|
| 19 |
+
edge joining those nodes if the two edges in `G` share a common node. For
|
| 20 |
+
directed graphs, nodes are adjacent exactly when the edges they represent
|
| 21 |
+
form a directed path of length two.
|
| 22 |
+
|
| 23 |
+
The nodes of the line graph are 2-tuples of nodes in the original graph (or
|
| 24 |
+
3-tuples for multigraphs, with the key of the edge as the third element).
|
| 25 |
+
|
| 26 |
+
For information about self-loops and more discussion, see the **Notes**
|
| 27 |
+
section below.
|
| 28 |
+
|
| 29 |
+
Parameters
|
| 30 |
+
----------
|
| 31 |
+
G : graph
|
| 32 |
+
A NetworkX Graph, DiGraph, MultiGraph, or MultiDigraph.
|
| 33 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 34 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 35 |
+
|
| 36 |
+
Returns
|
| 37 |
+
-------
|
| 38 |
+
L : graph
|
| 39 |
+
The line graph of G.
|
| 40 |
+
|
| 41 |
+
Examples
|
| 42 |
+
--------
|
| 43 |
+
>>> G = nx.star_graph(3)
|
| 44 |
+
>>> L = nx.line_graph(G)
|
| 45 |
+
>>> print(sorted(map(sorted, L.edges()))) # makes a 3-clique, K3
|
| 46 |
+
[[(0, 1), (0, 2)], [(0, 1), (0, 3)], [(0, 2), (0, 3)]]
|
| 47 |
+
|
| 48 |
+
Edge attributes from `G` are not copied over as node attributes in `L`, but
|
| 49 |
+
attributes can be copied manually:
|
| 50 |
+
|
| 51 |
+
>>> G = nx.path_graph(4)
|
| 52 |
+
>>> G.add_edges_from((u, v, {"tot": u + v}) for u, v in G.edges)
|
| 53 |
+
>>> G.edges(data=True)
|
| 54 |
+
EdgeDataView([(0, 1, {'tot': 1}), (1, 2, {'tot': 3}), (2, 3, {'tot': 5})])
|
| 55 |
+
>>> H = nx.line_graph(G)
|
| 56 |
+
>>> H.add_nodes_from((node, G.edges[node]) for node in H)
|
| 57 |
+
>>> H.nodes(data=True)
|
| 58 |
+
NodeDataView({(0, 1): {'tot': 1}, (2, 3): {'tot': 5}, (1, 2): {'tot': 3}})
|
| 59 |
+
|
| 60 |
+
Notes
|
| 61 |
+
-----
|
| 62 |
+
Graph, node, and edge data are not propagated to the new graph. For
|
| 63 |
+
undirected graphs, the nodes in G must be sortable, otherwise the
|
| 64 |
+
constructed line graph may not be correct.
|
| 65 |
+
|
| 66 |
+
*Self-loops in undirected graphs*
|
| 67 |
+
|
| 68 |
+
For an undirected graph `G` without multiple edges, each edge can be
|
| 69 |
+
written as a set `\{u, v\}`. Its line graph `L` has the edges of `G` as
|
| 70 |
+
its nodes. If `x` and `y` are two nodes in `L`, then `\{x, y\}` is an edge
|
| 71 |
+
in `L` if and only if the intersection of `x` and `y` is nonempty. Thus,
|
| 72 |
+
the set of all edges is determined by the set of all pairwise intersections
|
| 73 |
+
of edges in `G`.
|
| 74 |
+
|
| 75 |
+
Trivially, every edge in G would have a nonzero intersection with itself,
|
| 76 |
+
and so every node in `L` should have a self-loop. This is not so
|
| 77 |
+
interesting, and the original context of line graphs was with simple
|
| 78 |
+
graphs, which had no self-loops or multiple edges. The line graph was also
|
| 79 |
+
meant to be a simple graph and thus, self-loops in `L` are not part of the
|
| 80 |
+
standard definition of a line graph. In a pairwise intersection matrix,
|
| 81 |
+
this is analogous to excluding the diagonal entries from the line graph
|
| 82 |
+
definition.
|
| 83 |
+
|
| 84 |
+
Self-loops and multiple edges in `G` add nodes to `L` in a natural way, and
|
| 85 |
+
do not require any fundamental changes to the definition. It might be
|
| 86 |
+
argued that the self-loops we excluded before should now be included.
|
| 87 |
+
However, the self-loops are still "trivial" in some sense and thus, are
|
| 88 |
+
usually excluded.
|
| 89 |
+
|
| 90 |
+
*Self-loops in directed graphs*
|
| 91 |
+
|
| 92 |
+
For a directed graph `G` without multiple edges, each edge can be written
|
| 93 |
+
as a tuple `(u, v)`. Its line graph `L` has the edges of `G` as its
|
| 94 |
+
nodes. If `x` and `y` are two nodes in `L`, then `(x, y)` is an edge in `L`
|
| 95 |
+
if and only if the tail of `x` matches the head of `y`, for example, if `x
|
| 96 |
+
= (a, b)` and `y = (b, c)` for some vertices `a`, `b`, and `c` in `G`.
|
| 97 |
+
|
| 98 |
+
Due to the directed nature of the edges, it is no longer the case that
|
| 99 |
+
every edge in `G` should have a self-loop in `L`. Now, the only time
|
| 100 |
+
self-loops arise is if a node in `G` itself has a self-loop. So such
|
| 101 |
+
self-loops are no longer "trivial" but instead, represent essential
|
| 102 |
+
features of the topology of `G`. For this reason, the historical
|
| 103 |
+
development of line digraphs is such that self-loops are included. When the
|
| 104 |
+
graph `G` has multiple edges, once again only superficial changes are
|
| 105 |
+
required to the definition.
|
| 106 |
+
|
| 107 |
+
References
|
| 108 |
+
----------
|
| 109 |
+
* Harary, Frank, and Norman, Robert Z., "Some properties of line digraphs",
|
| 110 |
+
Rend. Circ. Mat. Palermo, II. Ser. 9 (1960), 161--168.
|
| 111 |
+
* Hemminger, R. L.; Beineke, L. W. (1978), "Line graphs and line digraphs",
|
| 112 |
+
in Beineke, L. W.; Wilson, R. J., Selected Topics in Graph Theory,
|
| 113 |
+
Academic Press Inc., pp. 271--305.
|
| 114 |
+
|
| 115 |
+
"""
|
| 116 |
+
if G.is_directed():
|
| 117 |
+
L = _lg_directed(G, create_using=create_using)
|
| 118 |
+
else:
|
| 119 |
+
L = _lg_undirected(G, selfloops=False, create_using=create_using)
|
| 120 |
+
return L
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
def _lg_directed(G, create_using=None):
|
| 124 |
+
"""Returns the line graph L of the (multi)digraph G.
|
| 125 |
+
|
| 126 |
+
Edges in G appear as nodes in L, represented as tuples of the form (u,v)
|
| 127 |
+
or (u,v,key) if G is a multidigraph. A node in L corresponding to the edge
|
| 128 |
+
(u,v) is connected to every node corresponding to an edge (v,w).
|
| 129 |
+
|
| 130 |
+
Parameters
|
| 131 |
+
----------
|
| 132 |
+
G : digraph
|
| 133 |
+
A directed graph or directed multigraph.
|
| 134 |
+
create_using : NetworkX graph constructor, optional
|
| 135 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 136 |
+
Default is to use the same graph class as `G`.
|
| 137 |
+
|
| 138 |
+
"""
|
| 139 |
+
L = nx.empty_graph(0, create_using, default=G.__class__)
|
| 140 |
+
|
| 141 |
+
# Create a graph specific edge function.
|
| 142 |
+
get_edges = partial(G.edges, keys=True) if G.is_multigraph() else G.edges
|
| 143 |
+
|
| 144 |
+
for from_node in get_edges():
|
| 145 |
+
# from_node is: (u,v) or (u,v,key)
|
| 146 |
+
L.add_node(from_node)
|
| 147 |
+
for to_node in get_edges(from_node[1]):
|
| 148 |
+
L.add_edge(from_node, to_node)
|
| 149 |
+
|
| 150 |
+
return L
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
def _lg_undirected(G, selfloops=False, create_using=None):
|
| 154 |
+
"""Returns the line graph L of the (multi)graph G.
|
| 155 |
+
|
| 156 |
+
Edges in G appear as nodes in L, represented as sorted tuples of the form
|
| 157 |
+
(u,v), or (u,v,key) if G is a multigraph. A node in L corresponding to
|
| 158 |
+
the edge {u,v} is connected to every node corresponding to an edge that
|
| 159 |
+
involves u or v.
|
| 160 |
+
|
| 161 |
+
Parameters
|
| 162 |
+
----------
|
| 163 |
+
G : graph
|
| 164 |
+
An undirected graph or multigraph.
|
| 165 |
+
selfloops : bool
|
| 166 |
+
If `True`, then self-loops are included in the line graph. If `False`,
|
| 167 |
+
they are excluded.
|
| 168 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 169 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 170 |
+
|
| 171 |
+
Notes
|
| 172 |
+
-----
|
| 173 |
+
The standard algorithm for line graphs of undirected graphs does not
|
| 174 |
+
produce self-loops.
|
| 175 |
+
|
| 176 |
+
"""
|
| 177 |
+
L = nx.empty_graph(0, create_using, default=G.__class__)
|
| 178 |
+
|
| 179 |
+
# Graph specific functions for edges.
|
| 180 |
+
get_edges = partial(G.edges, keys=True) if G.is_multigraph() else G.edges
|
| 181 |
+
|
| 182 |
+
# Determine if we include self-loops or not.
|
| 183 |
+
shift = 0 if selfloops else 1
|
| 184 |
+
|
| 185 |
+
# Introduce numbering of nodes
|
| 186 |
+
node_index = {n: i for i, n in enumerate(G)}
|
| 187 |
+
|
| 188 |
+
# Lift canonical representation of nodes to edges in line graph
|
| 189 |
+
edge_key_function = lambda edge: (node_index[edge[0]], node_index[edge[1]])
|
| 190 |
+
|
| 191 |
+
edges = set()
|
| 192 |
+
for u in G:
|
| 193 |
+
# Label nodes as a sorted tuple of nodes in original graph.
|
| 194 |
+
# Decide on representation of {u, v} as (u, v) or (v, u) depending on node_index.
|
| 195 |
+
# -> This ensures a canonical representation and avoids comparing values of different types.
|
| 196 |
+
nodes = [tuple(sorted(x[:2], key=node_index.get)) + x[2:] for x in get_edges(u)]
|
| 197 |
+
|
| 198 |
+
if len(nodes) == 1:
|
| 199 |
+
# Then the edge will be an isolated node in L.
|
| 200 |
+
L.add_node(nodes[0])
|
| 201 |
+
|
| 202 |
+
# Add a clique of `nodes` to graph. To prevent double adding edges,
|
| 203 |
+
# especially important for multigraphs, we store the edges in
|
| 204 |
+
# canonical form in a set.
|
| 205 |
+
for i, a in enumerate(nodes):
|
| 206 |
+
edges.update(
|
| 207 |
+
[
|
| 208 |
+
tuple(sorted((a, b), key=edge_key_function))
|
| 209 |
+
for b in nodes[i + shift :]
|
| 210 |
+
]
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
L.add_edges_from(edges)
|
| 214 |
+
return L
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
@not_implemented_for("directed")
|
| 218 |
+
@not_implemented_for("multigraph")
|
| 219 |
+
@nx._dispatchable(returns_graph=True)
|
| 220 |
+
def inverse_line_graph(G):
|
| 221 |
+
"""Returns the inverse line graph of graph G.
|
| 222 |
+
|
| 223 |
+
If H is a graph, and G is the line graph of H, such that G = L(H).
|
| 224 |
+
Then H is the inverse line graph of G.
|
| 225 |
+
|
| 226 |
+
Not all graphs are line graphs and these do not have an inverse line graph.
|
| 227 |
+
In these cases this function raises a NetworkXError.
|
| 228 |
+
|
| 229 |
+
Parameters
|
| 230 |
+
----------
|
| 231 |
+
G : graph
|
| 232 |
+
A NetworkX Graph
|
| 233 |
+
|
| 234 |
+
Returns
|
| 235 |
+
-------
|
| 236 |
+
H : graph
|
| 237 |
+
The inverse line graph of G.
|
| 238 |
+
|
| 239 |
+
Raises
|
| 240 |
+
------
|
| 241 |
+
NetworkXNotImplemented
|
| 242 |
+
If G is directed or a multigraph
|
| 243 |
+
|
| 244 |
+
NetworkXError
|
| 245 |
+
If G is not a line graph
|
| 246 |
+
|
| 247 |
+
Notes
|
| 248 |
+
-----
|
| 249 |
+
This is an implementation of the Roussopoulos algorithm[1]_.
|
| 250 |
+
|
| 251 |
+
If G consists of multiple components, then the algorithm doesn't work.
|
| 252 |
+
You should invert every component separately:
|
| 253 |
+
|
| 254 |
+
>>> K5 = nx.complete_graph(5)
|
| 255 |
+
>>> P4 = nx.Graph([("a", "b"), ("b", "c"), ("c", "d")])
|
| 256 |
+
>>> G = nx.union(K5, P4)
|
| 257 |
+
>>> root_graphs = []
|
| 258 |
+
>>> for comp in nx.connected_components(G):
|
| 259 |
+
... root_graphs.append(nx.inverse_line_graph(G.subgraph(comp)))
|
| 260 |
+
>>> len(root_graphs)
|
| 261 |
+
2
|
| 262 |
+
|
| 263 |
+
References
|
| 264 |
+
----------
|
| 265 |
+
.. [1] Roussopoulos, N.D. , "A max {m, n} algorithm for determining the graph H from
|
| 266 |
+
its line graph G", Information Processing Letters 2, (1973), 108--112, ISSN 0020-0190,
|
| 267 |
+
`DOI link <https://doi.org/10.1016/0020-0190(73)90029-X>`_
|
| 268 |
+
|
| 269 |
+
"""
|
| 270 |
+
if G.number_of_nodes() == 0:
|
| 271 |
+
return nx.empty_graph(1)
|
| 272 |
+
elif G.number_of_nodes() == 1:
|
| 273 |
+
v = arbitrary_element(G)
|
| 274 |
+
a = (v, 0)
|
| 275 |
+
b = (v, 1)
|
| 276 |
+
H = nx.Graph([(a, b)])
|
| 277 |
+
return H
|
| 278 |
+
elif G.number_of_nodes() > 1 and G.number_of_edges() == 0:
|
| 279 |
+
msg = (
|
| 280 |
+
"inverse_line_graph() doesn't work on an edgeless graph. "
|
| 281 |
+
"Please use this function on each component separately."
|
| 282 |
+
)
|
| 283 |
+
raise nx.NetworkXError(msg)
|
| 284 |
+
|
| 285 |
+
if nx.number_of_selfloops(G) != 0:
|
| 286 |
+
msg = (
|
| 287 |
+
"A line graph as generated by NetworkX has no selfloops, so G has no "
|
| 288 |
+
"inverse line graph. Please remove the selfloops from G and try again."
|
| 289 |
+
)
|
| 290 |
+
raise nx.NetworkXError(msg)
|
| 291 |
+
|
| 292 |
+
starting_cell = _select_starting_cell(G)
|
| 293 |
+
P = _find_partition(G, starting_cell)
|
| 294 |
+
# count how many times each vertex appears in the partition set
|
| 295 |
+
P_count = {u: 0 for u in G.nodes}
|
| 296 |
+
for p in P:
|
| 297 |
+
for u in p:
|
| 298 |
+
P_count[u] += 1
|
| 299 |
+
|
| 300 |
+
if max(P_count.values()) > 2:
|
| 301 |
+
msg = "G is not a line graph (vertex found in more than two partition cells)"
|
| 302 |
+
raise nx.NetworkXError(msg)
|
| 303 |
+
W = tuple((u,) for u in P_count if P_count[u] == 1)
|
| 304 |
+
H = nx.Graph()
|
| 305 |
+
H.add_nodes_from(P)
|
| 306 |
+
H.add_nodes_from(W)
|
| 307 |
+
for a, b in combinations(H.nodes, 2):
|
| 308 |
+
if any(a_bit in b for a_bit in a):
|
| 309 |
+
H.add_edge(a, b)
|
| 310 |
+
return H
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
def _triangles(G, e):
|
| 314 |
+
"""Return list of all triangles containing edge e"""
|
| 315 |
+
u, v = e
|
| 316 |
+
if u not in G:
|
| 317 |
+
raise nx.NetworkXError(f"Vertex {u} not in graph")
|
| 318 |
+
if v not in G[u]:
|
| 319 |
+
raise nx.NetworkXError(f"Edge ({u}, {v}) not in graph")
|
| 320 |
+
triangle_list = []
|
| 321 |
+
for x in G[u]:
|
| 322 |
+
if x in G[v]:
|
| 323 |
+
triangle_list.append((u, v, x))
|
| 324 |
+
return triangle_list
|
| 325 |
+
|
| 326 |
+
|
| 327 |
+
def _odd_triangle(G, T):
|
| 328 |
+
"""Test whether T is an odd triangle in G
|
| 329 |
+
|
| 330 |
+
Parameters
|
| 331 |
+
----------
|
| 332 |
+
G : NetworkX Graph
|
| 333 |
+
T : 3-tuple of vertices forming triangle in G
|
| 334 |
+
|
| 335 |
+
Returns
|
| 336 |
+
-------
|
| 337 |
+
True is T is an odd triangle
|
| 338 |
+
False otherwise
|
| 339 |
+
|
| 340 |
+
Raises
|
| 341 |
+
------
|
| 342 |
+
NetworkXError
|
| 343 |
+
T is not a triangle in G
|
| 344 |
+
|
| 345 |
+
Notes
|
| 346 |
+
-----
|
| 347 |
+
An odd triangle is one in which there exists another vertex in G which is
|
| 348 |
+
adjacent to either exactly one or exactly all three of the vertices in the
|
| 349 |
+
triangle.
|
| 350 |
+
|
| 351 |
+
"""
|
| 352 |
+
for u in T:
|
| 353 |
+
if u not in G.nodes():
|
| 354 |
+
raise nx.NetworkXError(f"Vertex {u} not in graph")
|
| 355 |
+
for e in list(combinations(T, 2)):
|
| 356 |
+
if e[0] not in G[e[1]]:
|
| 357 |
+
raise nx.NetworkXError(f"Edge ({e[0]}, {e[1]}) not in graph")
|
| 358 |
+
|
| 359 |
+
T_nbrs = defaultdict(int)
|
| 360 |
+
for t in T:
|
| 361 |
+
for v in G[t]:
|
| 362 |
+
if v not in T:
|
| 363 |
+
T_nbrs[v] += 1
|
| 364 |
+
return any(T_nbrs[v] in [1, 3] for v in T_nbrs)
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
def _find_partition(G, starting_cell):
|
| 368 |
+
"""Find a partition of the vertices of G into cells of complete graphs
|
| 369 |
+
|
| 370 |
+
Parameters
|
| 371 |
+
----------
|
| 372 |
+
G : NetworkX Graph
|
| 373 |
+
starting_cell : tuple of vertices in G which form a cell
|
| 374 |
+
|
| 375 |
+
Returns
|
| 376 |
+
-------
|
| 377 |
+
List of tuples of vertices of G
|
| 378 |
+
|
| 379 |
+
Raises
|
| 380 |
+
------
|
| 381 |
+
NetworkXError
|
| 382 |
+
If a cell is not a complete subgraph then G is not a line graph
|
| 383 |
+
"""
|
| 384 |
+
G_partition = G.copy()
|
| 385 |
+
P = [starting_cell] # partition set
|
| 386 |
+
G_partition.remove_edges_from(list(combinations(starting_cell, 2)))
|
| 387 |
+
# keep list of partitioned nodes which might have an edge in G_partition
|
| 388 |
+
partitioned_vertices = list(starting_cell)
|
| 389 |
+
while G_partition.number_of_edges() > 0:
|
| 390 |
+
# there are still edges left and so more cells to be made
|
| 391 |
+
u = partitioned_vertices.pop()
|
| 392 |
+
deg_u = len(G_partition[u])
|
| 393 |
+
if deg_u != 0:
|
| 394 |
+
# if u still has edges then we need to find its other cell
|
| 395 |
+
# this other cell must be a complete subgraph or else G is
|
| 396 |
+
# not a line graph
|
| 397 |
+
new_cell = [u] + list(G_partition[u])
|
| 398 |
+
for u in new_cell:
|
| 399 |
+
for v in new_cell:
|
| 400 |
+
if (u != v) and (v not in G_partition[u]):
|
| 401 |
+
msg = (
|
| 402 |
+
"G is not a line graph "
|
| 403 |
+
"(partition cell not a complete subgraph)"
|
| 404 |
+
)
|
| 405 |
+
raise nx.NetworkXError(msg)
|
| 406 |
+
P.append(tuple(new_cell))
|
| 407 |
+
G_partition.remove_edges_from(list(combinations(new_cell, 2)))
|
| 408 |
+
partitioned_vertices += new_cell
|
| 409 |
+
return P
|
| 410 |
+
|
| 411 |
+
|
| 412 |
+
def _select_starting_cell(G, starting_edge=None):
|
| 413 |
+
"""Select a cell to initiate _find_partition
|
| 414 |
+
|
| 415 |
+
Parameters
|
| 416 |
+
----------
|
| 417 |
+
G : NetworkX Graph
|
| 418 |
+
starting_edge: an edge to build the starting cell from
|
| 419 |
+
|
| 420 |
+
Returns
|
| 421 |
+
-------
|
| 422 |
+
Tuple of vertices in G
|
| 423 |
+
|
| 424 |
+
Raises
|
| 425 |
+
------
|
| 426 |
+
NetworkXError
|
| 427 |
+
If it is determined that G is not a line graph
|
| 428 |
+
|
| 429 |
+
Notes
|
| 430 |
+
-----
|
| 431 |
+
If starting edge not specified then pick an arbitrary edge - doesn't
|
| 432 |
+
matter which. However, this function may call itself requiring a
|
| 433 |
+
specific starting edge. Note that the r, s notation for counting
|
| 434 |
+
triangles is the same as in the Roussopoulos paper cited above.
|
| 435 |
+
"""
|
| 436 |
+
if starting_edge is None:
|
| 437 |
+
e = arbitrary_element(G.edges())
|
| 438 |
+
else:
|
| 439 |
+
e = starting_edge
|
| 440 |
+
if e[0] not in G.nodes():
|
| 441 |
+
raise nx.NetworkXError(f"Vertex {e[0]} not in graph")
|
| 442 |
+
if e[1] not in G[e[0]]:
|
| 443 |
+
msg = f"starting_edge ({e[0]}, {e[1]}) is not in the Graph"
|
| 444 |
+
raise nx.NetworkXError(msg)
|
| 445 |
+
e_triangles = _triangles(G, e)
|
| 446 |
+
r = len(e_triangles)
|
| 447 |
+
if r == 0:
|
| 448 |
+
# there are no triangles containing e, so the starting cell is just e
|
| 449 |
+
starting_cell = e
|
| 450 |
+
elif r == 1:
|
| 451 |
+
# there is exactly one triangle, T, containing e. If other 2 edges
|
| 452 |
+
# of T belong only to this triangle then T is starting cell
|
| 453 |
+
T = e_triangles[0]
|
| 454 |
+
a, b, c = T
|
| 455 |
+
# ab was original edge so check the other 2 edges
|
| 456 |
+
ac_edges = len(_triangles(G, (a, c)))
|
| 457 |
+
bc_edges = len(_triangles(G, (b, c)))
|
| 458 |
+
if ac_edges == 1:
|
| 459 |
+
if bc_edges == 1:
|
| 460 |
+
starting_cell = T
|
| 461 |
+
else:
|
| 462 |
+
return _select_starting_cell(G, starting_edge=(b, c))
|
| 463 |
+
else:
|
| 464 |
+
return _select_starting_cell(G, starting_edge=(a, c))
|
| 465 |
+
else:
|
| 466 |
+
# r >= 2 so we need to count the number of odd triangles, s
|
| 467 |
+
s = 0
|
| 468 |
+
odd_triangles = []
|
| 469 |
+
for T in e_triangles:
|
| 470 |
+
if _odd_triangle(G, T):
|
| 471 |
+
s += 1
|
| 472 |
+
odd_triangles.append(T)
|
| 473 |
+
if r == 2 and s == 0:
|
| 474 |
+
# in this case either triangle works, so just use T
|
| 475 |
+
starting_cell = T
|
| 476 |
+
elif r - 1 <= s <= r:
|
| 477 |
+
# check if odd triangles containing e form complete subgraph
|
| 478 |
+
triangle_nodes = set()
|
| 479 |
+
for T in odd_triangles:
|
| 480 |
+
for x in T:
|
| 481 |
+
triangle_nodes.add(x)
|
| 482 |
+
|
| 483 |
+
for u in triangle_nodes:
|
| 484 |
+
for v in triangle_nodes:
|
| 485 |
+
if u != v and (v not in G[u]):
|
| 486 |
+
msg = (
|
| 487 |
+
"G is not a line graph (odd triangles "
|
| 488 |
+
"do not form complete subgraph)"
|
| 489 |
+
)
|
| 490 |
+
raise nx.NetworkXError(msg)
|
| 491 |
+
# otherwise then we can use this as the starting cell
|
| 492 |
+
starting_cell = tuple(triangle_nodes)
|
| 493 |
+
|
| 494 |
+
else:
|
| 495 |
+
msg = (
|
| 496 |
+
"G is not a line graph (incorrect number of "
|
| 497 |
+
"odd triangles around starting edge)"
|
| 498 |
+
)
|
| 499 |
+
raise nx.NetworkXError(msg)
|
| 500 |
+
return starting_cell
|
minigpt2/lib/python3.10/site-packages/networkx/generators/random_graphs.py
ADDED
|
@@ -0,0 +1,1400 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Generators for random graphs.
|
| 3 |
+
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import itertools
|
| 7 |
+
import math
|
| 8 |
+
from collections import defaultdict
|
| 9 |
+
|
| 10 |
+
import networkx as nx
|
| 11 |
+
from networkx.utils import py_random_state
|
| 12 |
+
|
| 13 |
+
from ..utils.misc import check_create_using
|
| 14 |
+
from .classic import complete_graph, empty_graph, path_graph, star_graph
|
| 15 |
+
from .degree_seq import degree_sequence_tree
|
| 16 |
+
|
| 17 |
+
__all__ = [
|
| 18 |
+
"fast_gnp_random_graph",
|
| 19 |
+
"gnp_random_graph",
|
| 20 |
+
"dense_gnm_random_graph",
|
| 21 |
+
"gnm_random_graph",
|
| 22 |
+
"erdos_renyi_graph",
|
| 23 |
+
"binomial_graph",
|
| 24 |
+
"newman_watts_strogatz_graph",
|
| 25 |
+
"watts_strogatz_graph",
|
| 26 |
+
"connected_watts_strogatz_graph",
|
| 27 |
+
"random_regular_graph",
|
| 28 |
+
"barabasi_albert_graph",
|
| 29 |
+
"dual_barabasi_albert_graph",
|
| 30 |
+
"extended_barabasi_albert_graph",
|
| 31 |
+
"powerlaw_cluster_graph",
|
| 32 |
+
"random_lobster",
|
| 33 |
+
"random_shell_graph",
|
| 34 |
+
"random_powerlaw_tree",
|
| 35 |
+
"random_powerlaw_tree_sequence",
|
| 36 |
+
"random_kernel_graph",
|
| 37 |
+
]
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
@py_random_state(2)
|
| 41 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 42 |
+
def fast_gnp_random_graph(n, p, seed=None, directed=False, *, create_using=None):
|
| 43 |
+
"""Returns a $G_{n,p}$ random graph, also known as an Erdős-Rényi graph or
|
| 44 |
+
a binomial graph.
|
| 45 |
+
|
| 46 |
+
Parameters
|
| 47 |
+
----------
|
| 48 |
+
n : int
|
| 49 |
+
The number of nodes.
|
| 50 |
+
p : float
|
| 51 |
+
Probability for edge creation.
|
| 52 |
+
seed : integer, random_state, or None (default)
|
| 53 |
+
Indicator of random number generation state.
|
| 54 |
+
See :ref:`Randomness<randomness>`.
|
| 55 |
+
directed : bool, optional (default=False)
|
| 56 |
+
If True, this function returns a directed graph.
|
| 57 |
+
create_using : Graph constructor, optional (default=nx.Graph or nx.DiGraph)
|
| 58 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 59 |
+
Multigraph types are not supported and raise a ``NetworkXError``.
|
| 60 |
+
By default NetworkX Graph or DiGraph are used depending on `directed`.
|
| 61 |
+
|
| 62 |
+
Notes
|
| 63 |
+
-----
|
| 64 |
+
The $G_{n,p}$ graph algorithm chooses each of the $[n (n - 1)] / 2$
|
| 65 |
+
(undirected) or $n (n - 1)$ (directed) possible edges with probability $p$.
|
| 66 |
+
|
| 67 |
+
This algorithm [1]_ runs in $O(n + m)$ time, where `m` is the expected number of
|
| 68 |
+
edges, which equals $p n (n - 1) / 2$. This should be faster than
|
| 69 |
+
:func:`gnp_random_graph` when $p$ is small and the expected number of edges
|
| 70 |
+
is small (that is, the graph is sparse).
|
| 71 |
+
|
| 72 |
+
See Also
|
| 73 |
+
--------
|
| 74 |
+
gnp_random_graph
|
| 75 |
+
|
| 76 |
+
References
|
| 77 |
+
----------
|
| 78 |
+
.. [1] Vladimir Batagelj and Ulrik Brandes,
|
| 79 |
+
"Efficient generation of large random networks",
|
| 80 |
+
Phys. Rev. E, 71, 036113, 2005.
|
| 81 |
+
"""
|
| 82 |
+
default = nx.DiGraph if directed else nx.Graph
|
| 83 |
+
create_using = check_create_using(
|
| 84 |
+
create_using, directed=directed, multigraph=False, default=default
|
| 85 |
+
)
|
| 86 |
+
if p <= 0 or p >= 1:
|
| 87 |
+
return nx.gnp_random_graph(
|
| 88 |
+
n, p, seed=seed, directed=directed, create_using=create_using
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
G = empty_graph(n, create_using=create_using)
|
| 92 |
+
|
| 93 |
+
lp = math.log(1.0 - p)
|
| 94 |
+
|
| 95 |
+
if directed:
|
| 96 |
+
v = 1
|
| 97 |
+
w = -1
|
| 98 |
+
while v < n:
|
| 99 |
+
lr = math.log(1.0 - seed.random())
|
| 100 |
+
w = w + 1 + int(lr / lp)
|
| 101 |
+
while w >= v and v < n:
|
| 102 |
+
w = w - v
|
| 103 |
+
v = v + 1
|
| 104 |
+
if v < n:
|
| 105 |
+
G.add_edge(w, v)
|
| 106 |
+
|
| 107 |
+
# Nodes in graph are from 0,n-1 (start with v as the second node index).
|
| 108 |
+
v = 1
|
| 109 |
+
w = -1
|
| 110 |
+
while v < n:
|
| 111 |
+
lr = math.log(1.0 - seed.random())
|
| 112 |
+
w = w + 1 + int(lr / lp)
|
| 113 |
+
while w >= v and v < n:
|
| 114 |
+
w = w - v
|
| 115 |
+
v = v + 1
|
| 116 |
+
if v < n:
|
| 117 |
+
G.add_edge(v, w)
|
| 118 |
+
return G
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
@py_random_state(2)
|
| 122 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 123 |
+
def gnp_random_graph(n, p, seed=None, directed=False, *, create_using=None):
|
| 124 |
+
"""Returns a $G_{n,p}$ random graph, also known as an Erdős-Rényi graph
|
| 125 |
+
or a binomial graph.
|
| 126 |
+
|
| 127 |
+
The $G_{n,p}$ model chooses each of the possible edges with probability $p$.
|
| 128 |
+
|
| 129 |
+
Parameters
|
| 130 |
+
----------
|
| 131 |
+
n : int
|
| 132 |
+
The number of nodes.
|
| 133 |
+
p : float
|
| 134 |
+
Probability for edge creation.
|
| 135 |
+
seed : integer, random_state, or None (default)
|
| 136 |
+
Indicator of random number generation state.
|
| 137 |
+
See :ref:`Randomness<randomness>`.
|
| 138 |
+
directed : bool, optional (default=False)
|
| 139 |
+
If True, this function returns a directed graph.
|
| 140 |
+
create_using : Graph constructor, optional (default=nx.Graph or nx.DiGraph)
|
| 141 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 142 |
+
Multigraph types are not supported and raise a ``NetworkXError``.
|
| 143 |
+
By default NetworkX Graph or DiGraph are used depending on `directed`.
|
| 144 |
+
|
| 145 |
+
See Also
|
| 146 |
+
--------
|
| 147 |
+
fast_gnp_random_graph
|
| 148 |
+
|
| 149 |
+
Notes
|
| 150 |
+
-----
|
| 151 |
+
This algorithm [2]_ runs in $O(n^2)$ time. For sparse graphs (that is, for
|
| 152 |
+
small values of $p$), :func:`fast_gnp_random_graph` is a faster algorithm.
|
| 153 |
+
|
| 154 |
+
:func:`binomial_graph` and :func:`erdos_renyi_graph` are
|
| 155 |
+
aliases for :func:`gnp_random_graph`.
|
| 156 |
+
|
| 157 |
+
>>> nx.binomial_graph is nx.gnp_random_graph
|
| 158 |
+
True
|
| 159 |
+
>>> nx.erdos_renyi_graph is nx.gnp_random_graph
|
| 160 |
+
True
|
| 161 |
+
|
| 162 |
+
References
|
| 163 |
+
----------
|
| 164 |
+
.. [1] P. Erdős and A. Rényi, On Random Graphs, Publ. Math. 6, 290 (1959).
|
| 165 |
+
.. [2] E. N. Gilbert, Random Graphs, Ann. Math. Stat., 30, 1141 (1959).
|
| 166 |
+
"""
|
| 167 |
+
default = nx.DiGraph if directed else nx.Graph
|
| 168 |
+
create_using = check_create_using(
|
| 169 |
+
create_using, directed=directed, multigraph=False, default=default
|
| 170 |
+
)
|
| 171 |
+
if p >= 1:
|
| 172 |
+
return complete_graph(n, create_using=create_using)
|
| 173 |
+
|
| 174 |
+
G = nx.empty_graph(n, create_using=create_using)
|
| 175 |
+
if p <= 0:
|
| 176 |
+
return G
|
| 177 |
+
|
| 178 |
+
edgetool = itertools.permutations if directed else itertools.combinations
|
| 179 |
+
for e in edgetool(range(n), 2):
|
| 180 |
+
if seed.random() < p:
|
| 181 |
+
G.add_edge(*e)
|
| 182 |
+
return G
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
# add some aliases to common names
|
| 186 |
+
binomial_graph = gnp_random_graph
|
| 187 |
+
erdos_renyi_graph = gnp_random_graph
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
@py_random_state(2)
|
| 191 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 192 |
+
def dense_gnm_random_graph(n, m, seed=None, *, create_using=None):
|
| 193 |
+
"""Returns a $G_{n,m}$ random graph.
|
| 194 |
+
|
| 195 |
+
In the $G_{n,m}$ model, a graph is chosen uniformly at random from the set
|
| 196 |
+
of all graphs with $n$ nodes and $m$ edges.
|
| 197 |
+
|
| 198 |
+
This algorithm should be faster than :func:`gnm_random_graph` for dense
|
| 199 |
+
graphs.
|
| 200 |
+
|
| 201 |
+
Parameters
|
| 202 |
+
----------
|
| 203 |
+
n : int
|
| 204 |
+
The number of nodes.
|
| 205 |
+
m : int
|
| 206 |
+
The number of edges.
|
| 207 |
+
seed : integer, random_state, or None (default)
|
| 208 |
+
Indicator of random number generation state.
|
| 209 |
+
See :ref:`Randomness<randomness>`.
|
| 210 |
+
create_using : Graph constructor, optional (default=nx.Graph)
|
| 211 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 212 |
+
Multigraph and directed types are not supported and raise a ``NetworkXError``.
|
| 213 |
+
|
| 214 |
+
See Also
|
| 215 |
+
--------
|
| 216 |
+
gnm_random_graph
|
| 217 |
+
|
| 218 |
+
Notes
|
| 219 |
+
-----
|
| 220 |
+
Algorithm by Keith M. Briggs Mar 31, 2006.
|
| 221 |
+
Inspired by Knuth's Algorithm S (Selection sampling technique),
|
| 222 |
+
in section 3.4.2 of [1]_.
|
| 223 |
+
|
| 224 |
+
References
|
| 225 |
+
----------
|
| 226 |
+
.. [1] Donald E. Knuth, The Art of Computer Programming,
|
| 227 |
+
Volume 2/Seminumerical algorithms, Third Edition, Addison-Wesley, 1997.
|
| 228 |
+
"""
|
| 229 |
+
create_using = check_create_using(create_using, directed=False, multigraph=False)
|
| 230 |
+
mmax = n * (n - 1) // 2
|
| 231 |
+
if m >= mmax:
|
| 232 |
+
return complete_graph(n, create_using)
|
| 233 |
+
G = empty_graph(n, create_using)
|
| 234 |
+
|
| 235 |
+
if n == 1:
|
| 236 |
+
return G
|
| 237 |
+
|
| 238 |
+
u = 0
|
| 239 |
+
v = 1
|
| 240 |
+
t = 0
|
| 241 |
+
k = 0
|
| 242 |
+
while True:
|
| 243 |
+
if seed.randrange(mmax - t) < m - k:
|
| 244 |
+
G.add_edge(u, v)
|
| 245 |
+
k += 1
|
| 246 |
+
if k == m:
|
| 247 |
+
return G
|
| 248 |
+
t += 1
|
| 249 |
+
v += 1
|
| 250 |
+
if v == n: # go to next row of adjacency matrix
|
| 251 |
+
u += 1
|
| 252 |
+
v = u + 1
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
@py_random_state(2)
|
| 256 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 257 |
+
def gnm_random_graph(n, m, seed=None, directed=False, *, create_using=None):
|
| 258 |
+
"""Returns a $G_{n,m}$ random graph.
|
| 259 |
+
|
| 260 |
+
In the $G_{n,m}$ model, a graph is chosen uniformly at random from the set
|
| 261 |
+
of all graphs with $n$ nodes and $m$ edges.
|
| 262 |
+
|
| 263 |
+
This algorithm should be faster than :func:`dense_gnm_random_graph` for
|
| 264 |
+
sparse graphs.
|
| 265 |
+
|
| 266 |
+
Parameters
|
| 267 |
+
----------
|
| 268 |
+
n : int
|
| 269 |
+
The number of nodes.
|
| 270 |
+
m : int
|
| 271 |
+
The number of edges.
|
| 272 |
+
seed : integer, random_state, or None (default)
|
| 273 |
+
Indicator of random number generation state.
|
| 274 |
+
See :ref:`Randomness<randomness>`.
|
| 275 |
+
directed : bool, optional (default=False)
|
| 276 |
+
If True return a directed graph
|
| 277 |
+
create_using : Graph constructor, optional (default=nx.Graph or nx.DiGraph)
|
| 278 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 279 |
+
Multigraph types are not supported and raise a ``NetworkXError``.
|
| 280 |
+
By default NetworkX Graph or DiGraph are used depending on `directed`.
|
| 281 |
+
|
| 282 |
+
See also
|
| 283 |
+
--------
|
| 284 |
+
dense_gnm_random_graph
|
| 285 |
+
|
| 286 |
+
"""
|
| 287 |
+
default = nx.DiGraph if directed else nx.Graph
|
| 288 |
+
create_using = check_create_using(
|
| 289 |
+
create_using, directed=directed, multigraph=False, default=default
|
| 290 |
+
)
|
| 291 |
+
if n == 1:
|
| 292 |
+
return nx.empty_graph(n, create_using=create_using)
|
| 293 |
+
max_edges = n * (n - 1) if directed else n * (n - 1) / 2.0
|
| 294 |
+
if m >= max_edges:
|
| 295 |
+
return complete_graph(n, create_using=create_using)
|
| 296 |
+
|
| 297 |
+
G = nx.empty_graph(n, create_using=create_using)
|
| 298 |
+
nlist = list(G)
|
| 299 |
+
edge_count = 0
|
| 300 |
+
while edge_count < m:
|
| 301 |
+
# generate random edge,u,v
|
| 302 |
+
u = seed.choice(nlist)
|
| 303 |
+
v = seed.choice(nlist)
|
| 304 |
+
if u == v or G.has_edge(u, v):
|
| 305 |
+
continue
|
| 306 |
+
else:
|
| 307 |
+
G.add_edge(u, v)
|
| 308 |
+
edge_count = edge_count + 1
|
| 309 |
+
return G
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
@py_random_state(3)
|
| 313 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 314 |
+
def newman_watts_strogatz_graph(n, k, p, seed=None, *, create_using=None):
|
| 315 |
+
"""Returns a Newman–Watts–Strogatz small-world graph.
|
| 316 |
+
|
| 317 |
+
Parameters
|
| 318 |
+
----------
|
| 319 |
+
n : int
|
| 320 |
+
The number of nodes.
|
| 321 |
+
k : int
|
| 322 |
+
Each node is joined with its `k` nearest neighbors in a ring
|
| 323 |
+
topology.
|
| 324 |
+
p : float
|
| 325 |
+
The probability of adding a new edge for each edge.
|
| 326 |
+
seed : integer, random_state, or None (default)
|
| 327 |
+
Indicator of random number generation state.
|
| 328 |
+
See :ref:`Randomness<randomness>`.
|
| 329 |
+
create_using : Graph constructor, optional (default=nx.Graph)
|
| 330 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 331 |
+
Multigraph and directed types are not supported and raise a ``NetworkXError``.
|
| 332 |
+
|
| 333 |
+
Notes
|
| 334 |
+
-----
|
| 335 |
+
First create a ring over $n$ nodes [1]_. Then each node in the ring is
|
| 336 |
+
connected with its $k$ nearest neighbors (or $k - 1$ neighbors if $k$
|
| 337 |
+
is odd). Then shortcuts are created by adding new edges as follows: for
|
| 338 |
+
each edge $(u, v)$ in the underlying "$n$-ring with $k$ nearest
|
| 339 |
+
neighbors" with probability $p$ add a new edge $(u, w)$ with
|
| 340 |
+
randomly-chosen existing node $w$. In contrast with
|
| 341 |
+
:func:`watts_strogatz_graph`, no edges are removed.
|
| 342 |
+
|
| 343 |
+
See Also
|
| 344 |
+
--------
|
| 345 |
+
watts_strogatz_graph
|
| 346 |
+
|
| 347 |
+
References
|
| 348 |
+
----------
|
| 349 |
+
.. [1] M. E. J. Newman and D. J. Watts,
|
| 350 |
+
Renormalization group analysis of the small-world network model,
|
| 351 |
+
Physics Letters A, 263, 341, 1999.
|
| 352 |
+
https://doi.org/10.1016/S0375-9601(99)00757-4
|
| 353 |
+
"""
|
| 354 |
+
create_using = check_create_using(create_using, directed=False, multigraph=False)
|
| 355 |
+
if k > n:
|
| 356 |
+
raise nx.NetworkXError("k>=n, choose smaller k or larger n")
|
| 357 |
+
|
| 358 |
+
# If k == n the graph return is a complete graph
|
| 359 |
+
if k == n:
|
| 360 |
+
return nx.complete_graph(n, create_using)
|
| 361 |
+
|
| 362 |
+
G = empty_graph(n, create_using)
|
| 363 |
+
nlist = list(G.nodes())
|
| 364 |
+
fromv = nlist
|
| 365 |
+
# connect the k/2 neighbors
|
| 366 |
+
for j in range(1, k // 2 + 1):
|
| 367 |
+
tov = fromv[j:] + fromv[0:j] # the first j are now last
|
| 368 |
+
for i in range(len(fromv)):
|
| 369 |
+
G.add_edge(fromv[i], tov[i])
|
| 370 |
+
# for each edge u-v, with probability p, randomly select existing
|
| 371 |
+
# node w and add new edge u-w
|
| 372 |
+
e = list(G.edges())
|
| 373 |
+
for u, v in e:
|
| 374 |
+
if seed.random() < p:
|
| 375 |
+
w = seed.choice(nlist)
|
| 376 |
+
# no self-loops and reject if edge u-w exists
|
| 377 |
+
# is that the correct NWS model?
|
| 378 |
+
while w == u or G.has_edge(u, w):
|
| 379 |
+
w = seed.choice(nlist)
|
| 380 |
+
if G.degree(u) >= n - 1:
|
| 381 |
+
break # skip this rewiring
|
| 382 |
+
else:
|
| 383 |
+
G.add_edge(u, w)
|
| 384 |
+
return G
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
@py_random_state(3)
|
| 388 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 389 |
+
def watts_strogatz_graph(n, k, p, seed=None, *, create_using=None):
|
| 390 |
+
"""Returns a Watts–Strogatz small-world graph.
|
| 391 |
+
|
| 392 |
+
Parameters
|
| 393 |
+
----------
|
| 394 |
+
n : int
|
| 395 |
+
The number of nodes
|
| 396 |
+
k : int
|
| 397 |
+
Each node is joined with its `k` nearest neighbors in a ring
|
| 398 |
+
topology.
|
| 399 |
+
p : float
|
| 400 |
+
The probability of rewiring each edge
|
| 401 |
+
seed : integer, random_state, or None (default)
|
| 402 |
+
Indicator of random number generation state.
|
| 403 |
+
See :ref:`Randomness<randomness>`.
|
| 404 |
+
create_using : Graph constructor, optional (default=nx.Graph)
|
| 405 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 406 |
+
Multigraph and directed types are not supported and raise a ``NetworkXError``.
|
| 407 |
+
|
| 408 |
+
See Also
|
| 409 |
+
--------
|
| 410 |
+
newman_watts_strogatz_graph
|
| 411 |
+
connected_watts_strogatz_graph
|
| 412 |
+
|
| 413 |
+
Notes
|
| 414 |
+
-----
|
| 415 |
+
First create a ring over $n$ nodes [1]_. Then each node in the ring is joined
|
| 416 |
+
to its $k$ nearest neighbors (or $k - 1$ neighbors if $k$ is odd).
|
| 417 |
+
Then shortcuts are created by replacing some edges as follows: for each
|
| 418 |
+
edge $(u, v)$ in the underlying "$n$-ring with $k$ nearest neighbors"
|
| 419 |
+
with probability $p$ replace it with a new edge $(u, w)$ with uniformly
|
| 420 |
+
random choice of existing node $w$.
|
| 421 |
+
|
| 422 |
+
In contrast with :func:`newman_watts_strogatz_graph`, the random rewiring
|
| 423 |
+
does not increase the number of edges. The rewired graph is not guaranteed
|
| 424 |
+
to be connected as in :func:`connected_watts_strogatz_graph`.
|
| 425 |
+
|
| 426 |
+
References
|
| 427 |
+
----------
|
| 428 |
+
.. [1] Duncan J. Watts and Steven H. Strogatz,
|
| 429 |
+
Collective dynamics of small-world networks,
|
| 430 |
+
Nature, 393, pp. 440--442, 1998.
|
| 431 |
+
"""
|
| 432 |
+
create_using = check_create_using(create_using, directed=False, multigraph=False)
|
| 433 |
+
if k > n:
|
| 434 |
+
raise nx.NetworkXError("k>n, choose smaller k or larger n")
|
| 435 |
+
|
| 436 |
+
# If k == n, the graph is complete not Watts-Strogatz
|
| 437 |
+
if k == n:
|
| 438 |
+
G = nx.complete_graph(n, create_using)
|
| 439 |
+
return G
|
| 440 |
+
|
| 441 |
+
G = nx.empty_graph(n, create_using=create_using)
|
| 442 |
+
nodes = list(range(n)) # nodes are labeled 0 to n-1
|
| 443 |
+
# connect each node to k/2 neighbors
|
| 444 |
+
for j in range(1, k // 2 + 1):
|
| 445 |
+
targets = nodes[j:] + nodes[0:j] # first j nodes are now last in list
|
| 446 |
+
G.add_edges_from(zip(nodes, targets))
|
| 447 |
+
# rewire edges from each node
|
| 448 |
+
# loop over all nodes in order (label) and neighbors in order (distance)
|
| 449 |
+
# no self loops or multiple edges allowed
|
| 450 |
+
for j in range(1, k // 2 + 1): # outer loop is neighbors
|
| 451 |
+
targets = nodes[j:] + nodes[0:j] # first j nodes are now last in list
|
| 452 |
+
# inner loop in node order
|
| 453 |
+
for u, v in zip(nodes, targets):
|
| 454 |
+
if seed.random() < p:
|
| 455 |
+
w = seed.choice(nodes)
|
| 456 |
+
# Enforce no self-loops or multiple edges
|
| 457 |
+
while w == u or G.has_edge(u, w):
|
| 458 |
+
w = seed.choice(nodes)
|
| 459 |
+
if G.degree(u) >= n - 1:
|
| 460 |
+
break # skip this rewiring
|
| 461 |
+
else:
|
| 462 |
+
G.remove_edge(u, v)
|
| 463 |
+
G.add_edge(u, w)
|
| 464 |
+
return G
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
@py_random_state(4)
|
| 468 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 469 |
+
def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None, *, create_using=None):
|
| 470 |
+
"""Returns a connected Watts–Strogatz small-world graph.
|
| 471 |
+
|
| 472 |
+
Attempts to generate a connected graph by repeated generation of
|
| 473 |
+
Watts–Strogatz small-world graphs. An exception is raised if the maximum
|
| 474 |
+
number of tries is exceeded.
|
| 475 |
+
|
| 476 |
+
Parameters
|
| 477 |
+
----------
|
| 478 |
+
n : int
|
| 479 |
+
The number of nodes
|
| 480 |
+
k : int
|
| 481 |
+
Each node is joined with its `k` nearest neighbors in a ring
|
| 482 |
+
topology.
|
| 483 |
+
p : float
|
| 484 |
+
The probability of rewiring each edge
|
| 485 |
+
tries : int
|
| 486 |
+
Number of attempts to generate a connected graph.
|
| 487 |
+
seed : integer, random_state, or None (default)
|
| 488 |
+
Indicator of random number generation state.
|
| 489 |
+
See :ref:`Randomness<randomness>`.
|
| 490 |
+
create_using : Graph constructor, optional (default=nx.Graph)
|
| 491 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 492 |
+
Multigraph and directed types are not supported and raise a ``NetworkXError``.
|
| 493 |
+
|
| 494 |
+
Notes
|
| 495 |
+
-----
|
| 496 |
+
First create a ring over $n$ nodes [1]_. Then each node in the ring is joined
|
| 497 |
+
to its $k$ nearest neighbors (or $k - 1$ neighbors if $k$ is odd).
|
| 498 |
+
Then shortcuts are created by replacing some edges as follows: for each
|
| 499 |
+
edge $(u, v)$ in the underlying "$n$-ring with $k$ nearest neighbors"
|
| 500 |
+
with probability $p$ replace it with a new edge $(u, w)$ with uniformly
|
| 501 |
+
random choice of existing node $w$.
|
| 502 |
+
The entire process is repeated until a connected graph results.
|
| 503 |
+
|
| 504 |
+
See Also
|
| 505 |
+
--------
|
| 506 |
+
newman_watts_strogatz_graph
|
| 507 |
+
watts_strogatz_graph
|
| 508 |
+
|
| 509 |
+
References
|
| 510 |
+
----------
|
| 511 |
+
.. [1] Duncan J. Watts and Steven H. Strogatz,
|
| 512 |
+
Collective dynamics of small-world networks,
|
| 513 |
+
Nature, 393, pp. 440--442, 1998.
|
| 514 |
+
"""
|
| 515 |
+
for i in range(tries):
|
| 516 |
+
# seed is an RNG so should change sequence each call
|
| 517 |
+
G = watts_strogatz_graph(n, k, p, seed, create_using=create_using)
|
| 518 |
+
if nx.is_connected(G):
|
| 519 |
+
return G
|
| 520 |
+
raise nx.NetworkXError("Maximum number of tries exceeded")
|
| 521 |
+
|
| 522 |
+
|
| 523 |
+
@py_random_state(2)
|
| 524 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 525 |
+
def random_regular_graph(d, n, seed=None, *, create_using=None):
|
| 526 |
+
r"""Returns a random $d$-regular graph on $n$ nodes.
|
| 527 |
+
|
| 528 |
+
A regular graph is a graph where each node has the same number of neighbors.
|
| 529 |
+
|
| 530 |
+
The resulting graph has no self-loops or parallel edges.
|
| 531 |
+
|
| 532 |
+
Parameters
|
| 533 |
+
----------
|
| 534 |
+
d : int
|
| 535 |
+
The degree of each node.
|
| 536 |
+
n : integer
|
| 537 |
+
The number of nodes. The value of $n \times d$ must be even.
|
| 538 |
+
seed : integer, random_state, or None (default)
|
| 539 |
+
Indicator of random number generation state.
|
| 540 |
+
See :ref:`Randomness<randomness>`.
|
| 541 |
+
create_using : Graph constructor, optional (default=nx.Graph)
|
| 542 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 543 |
+
Multigraph and directed types are not supported and raise a ``NetworkXError``.
|
| 544 |
+
|
| 545 |
+
Notes
|
| 546 |
+
-----
|
| 547 |
+
The nodes are numbered from $0$ to $n - 1$.
|
| 548 |
+
|
| 549 |
+
Kim and Vu's paper [2]_ shows that this algorithm samples in an
|
| 550 |
+
asymptotically uniform way from the space of random graphs when
|
| 551 |
+
$d = O(n^{1 / 3 - \epsilon})$.
|
| 552 |
+
|
| 553 |
+
Raises
|
| 554 |
+
------
|
| 555 |
+
|
| 556 |
+
NetworkXError
|
| 557 |
+
If $n \times d$ is odd or $d$ is greater than or equal to $n$.
|
| 558 |
+
|
| 559 |
+
References
|
| 560 |
+
----------
|
| 561 |
+
.. [1] A. Steger and N. Wormald,
|
| 562 |
+
Generating random regular graphs quickly,
|
| 563 |
+
Probability and Computing 8 (1999), 377-396, 1999.
|
| 564 |
+
https://doi.org/10.1017/S0963548399003867
|
| 565 |
+
|
| 566 |
+
.. [2] Jeong Han Kim and Van H. Vu,
|
| 567 |
+
Generating random regular graphs,
|
| 568 |
+
Proceedings of the thirty-fifth ACM symposium on Theory of computing,
|
| 569 |
+
San Diego, CA, USA, pp 213--222, 2003.
|
| 570 |
+
http://portal.acm.org/citation.cfm?id=780542.780576
|
| 571 |
+
"""
|
| 572 |
+
create_using = check_create_using(create_using, directed=False, multigraph=False)
|
| 573 |
+
if (n * d) % 2 != 0:
|
| 574 |
+
raise nx.NetworkXError("n * d must be even")
|
| 575 |
+
|
| 576 |
+
if not 0 <= d < n:
|
| 577 |
+
raise nx.NetworkXError("the 0 <= d < n inequality must be satisfied")
|
| 578 |
+
|
| 579 |
+
G = nx.empty_graph(n, create_using=create_using)
|
| 580 |
+
|
| 581 |
+
if d == 0:
|
| 582 |
+
return G
|
| 583 |
+
|
| 584 |
+
def _suitable(edges, potential_edges):
|
| 585 |
+
# Helper subroutine to check if there are suitable edges remaining
|
| 586 |
+
# If False, the generation of the graph has failed
|
| 587 |
+
if not potential_edges:
|
| 588 |
+
return True
|
| 589 |
+
for s1 in potential_edges:
|
| 590 |
+
for s2 in potential_edges:
|
| 591 |
+
# Two iterators on the same dictionary are guaranteed
|
| 592 |
+
# to visit it in the same order if there are no
|
| 593 |
+
# intervening modifications.
|
| 594 |
+
if s1 == s2:
|
| 595 |
+
# Only need to consider s1-s2 pair one time
|
| 596 |
+
break
|
| 597 |
+
if s1 > s2:
|
| 598 |
+
s1, s2 = s2, s1
|
| 599 |
+
if (s1, s2) not in edges:
|
| 600 |
+
return True
|
| 601 |
+
return False
|
| 602 |
+
|
| 603 |
+
def _try_creation():
|
| 604 |
+
# Attempt to create an edge set
|
| 605 |
+
|
| 606 |
+
edges = set()
|
| 607 |
+
stubs = list(range(n)) * d
|
| 608 |
+
|
| 609 |
+
while stubs:
|
| 610 |
+
potential_edges = defaultdict(lambda: 0)
|
| 611 |
+
seed.shuffle(stubs)
|
| 612 |
+
stubiter = iter(stubs)
|
| 613 |
+
for s1, s2 in zip(stubiter, stubiter):
|
| 614 |
+
if s1 > s2:
|
| 615 |
+
s1, s2 = s2, s1
|
| 616 |
+
if s1 != s2 and ((s1, s2) not in edges):
|
| 617 |
+
edges.add((s1, s2))
|
| 618 |
+
else:
|
| 619 |
+
potential_edges[s1] += 1
|
| 620 |
+
potential_edges[s2] += 1
|
| 621 |
+
|
| 622 |
+
if not _suitable(edges, potential_edges):
|
| 623 |
+
return None # failed to find suitable edge set
|
| 624 |
+
|
| 625 |
+
stubs = [
|
| 626 |
+
node
|
| 627 |
+
for node, potential in potential_edges.items()
|
| 628 |
+
for _ in range(potential)
|
| 629 |
+
]
|
| 630 |
+
return edges
|
| 631 |
+
|
| 632 |
+
# Even though a suitable edge set exists,
|
| 633 |
+
# the generation of such a set is not guaranteed.
|
| 634 |
+
# Try repeatedly to find one.
|
| 635 |
+
edges = _try_creation()
|
| 636 |
+
while edges is None:
|
| 637 |
+
edges = _try_creation()
|
| 638 |
+
G.add_edges_from(edges)
|
| 639 |
+
|
| 640 |
+
return G
|
| 641 |
+
|
| 642 |
+
|
| 643 |
+
def _random_subset(seq, m, rng):
|
| 644 |
+
"""Return m unique elements from seq.
|
| 645 |
+
|
| 646 |
+
This differs from random.sample which can return repeated
|
| 647 |
+
elements if seq holds repeated elements.
|
| 648 |
+
|
| 649 |
+
Note: rng is a random.Random or numpy.random.RandomState instance.
|
| 650 |
+
"""
|
| 651 |
+
targets = set()
|
| 652 |
+
while len(targets) < m:
|
| 653 |
+
x = rng.choice(seq)
|
| 654 |
+
targets.add(x)
|
| 655 |
+
return targets
|
| 656 |
+
|
| 657 |
+
|
| 658 |
+
@py_random_state(2)
|
| 659 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 660 |
+
def barabasi_albert_graph(n, m, seed=None, initial_graph=None, *, create_using=None):
|
| 661 |
+
"""Returns a random graph using Barabási–Albert preferential attachment
|
| 662 |
+
|
| 663 |
+
A graph of $n$ nodes is grown by attaching new nodes each with $m$
|
| 664 |
+
edges that are preferentially attached to existing nodes with high degree.
|
| 665 |
+
|
| 666 |
+
Parameters
|
| 667 |
+
----------
|
| 668 |
+
n : int
|
| 669 |
+
Number of nodes
|
| 670 |
+
m : int
|
| 671 |
+
Number of edges to attach from a new node to existing nodes
|
| 672 |
+
seed : integer, random_state, or None (default)
|
| 673 |
+
Indicator of random number generation state.
|
| 674 |
+
See :ref:`Randomness<randomness>`.
|
| 675 |
+
initial_graph : Graph or None (default)
|
| 676 |
+
Initial network for Barabási–Albert algorithm.
|
| 677 |
+
It should be a connected graph for most use cases.
|
| 678 |
+
A copy of `initial_graph` is used.
|
| 679 |
+
If None, starts from a star graph on (m+1) nodes.
|
| 680 |
+
create_using : Graph constructor, optional (default=nx.Graph)
|
| 681 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 682 |
+
Multigraph and directed types are not supported and raise a ``NetworkXError``.
|
| 683 |
+
|
| 684 |
+
Returns
|
| 685 |
+
-------
|
| 686 |
+
G : Graph
|
| 687 |
+
|
| 688 |
+
Raises
|
| 689 |
+
------
|
| 690 |
+
NetworkXError
|
| 691 |
+
If `m` does not satisfy ``1 <= m < n``, or
|
| 692 |
+
the initial graph number of nodes m0 does not satisfy ``m <= m0 <= n``.
|
| 693 |
+
|
| 694 |
+
References
|
| 695 |
+
----------
|
| 696 |
+
.. [1] A. L. Barabási and R. Albert "Emergence of scaling in
|
| 697 |
+
random networks", Science 286, pp 509-512, 1999.
|
| 698 |
+
"""
|
| 699 |
+
create_using = check_create_using(create_using, directed=False, multigraph=False)
|
| 700 |
+
if m < 1 or m >= n:
|
| 701 |
+
raise nx.NetworkXError(
|
| 702 |
+
f"Barabási–Albert network must have m >= 1 and m < n, m = {m}, n = {n}"
|
| 703 |
+
)
|
| 704 |
+
|
| 705 |
+
if initial_graph is None:
|
| 706 |
+
# Default initial graph : star graph on (m + 1) nodes
|
| 707 |
+
G = star_graph(m, create_using)
|
| 708 |
+
else:
|
| 709 |
+
if len(initial_graph) < m or len(initial_graph) > n:
|
| 710 |
+
raise nx.NetworkXError(
|
| 711 |
+
f"Barabási–Albert initial graph needs between m={m} and n={n} nodes"
|
| 712 |
+
)
|
| 713 |
+
G = initial_graph.copy()
|
| 714 |
+
|
| 715 |
+
# List of existing nodes, with nodes repeated once for each adjacent edge
|
| 716 |
+
repeated_nodes = [n for n, d in G.degree() for _ in range(d)]
|
| 717 |
+
# Start adding the other n - m0 nodes.
|
| 718 |
+
source = len(G)
|
| 719 |
+
while source < n:
|
| 720 |
+
# Now choose m unique nodes from the existing nodes
|
| 721 |
+
# Pick uniformly from repeated_nodes (preferential attachment)
|
| 722 |
+
targets = _random_subset(repeated_nodes, m, seed)
|
| 723 |
+
# Add edges to m nodes from the source.
|
| 724 |
+
G.add_edges_from(zip([source] * m, targets))
|
| 725 |
+
# Add one node to the list for each new edge just created.
|
| 726 |
+
repeated_nodes.extend(targets)
|
| 727 |
+
# And the new node "source" has m edges to add to the list.
|
| 728 |
+
repeated_nodes.extend([source] * m)
|
| 729 |
+
|
| 730 |
+
source += 1
|
| 731 |
+
return G
|
| 732 |
+
|
| 733 |
+
|
| 734 |
+
@py_random_state(4)
|
| 735 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 736 |
+
def dual_barabasi_albert_graph(
|
| 737 |
+
n, m1, m2, p, seed=None, initial_graph=None, *, create_using=None
|
| 738 |
+
):
|
| 739 |
+
"""Returns a random graph using dual Barabási–Albert preferential attachment
|
| 740 |
+
|
| 741 |
+
A graph of $n$ nodes is grown by attaching new nodes each with either $m_1$
|
| 742 |
+
edges (with probability $p$) or $m_2$ edges (with probability $1-p$) that
|
| 743 |
+
are preferentially attached to existing nodes with high degree.
|
| 744 |
+
|
| 745 |
+
Parameters
|
| 746 |
+
----------
|
| 747 |
+
n : int
|
| 748 |
+
Number of nodes
|
| 749 |
+
m1 : int
|
| 750 |
+
Number of edges to link each new node to existing nodes with probability $p$
|
| 751 |
+
m2 : int
|
| 752 |
+
Number of edges to link each new node to existing nodes with probability $1-p$
|
| 753 |
+
p : float
|
| 754 |
+
The probability of attaching $m_1$ edges (as opposed to $m_2$ edges)
|
| 755 |
+
seed : integer, random_state, or None (default)
|
| 756 |
+
Indicator of random number generation state.
|
| 757 |
+
See :ref:`Randomness<randomness>`.
|
| 758 |
+
initial_graph : Graph or None (default)
|
| 759 |
+
Initial network for Barabási–Albert algorithm.
|
| 760 |
+
A copy of `initial_graph` is used.
|
| 761 |
+
It should be connected for most use cases.
|
| 762 |
+
If None, starts from an star graph on max(m1, m2) + 1 nodes.
|
| 763 |
+
create_using : Graph constructor, optional (default=nx.Graph)
|
| 764 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 765 |
+
Multigraph and directed types are not supported and raise a ``NetworkXError``.
|
| 766 |
+
|
| 767 |
+
Returns
|
| 768 |
+
-------
|
| 769 |
+
G : Graph
|
| 770 |
+
|
| 771 |
+
Raises
|
| 772 |
+
------
|
| 773 |
+
NetworkXError
|
| 774 |
+
If `m1` and `m2` do not satisfy ``1 <= m1,m2 < n``, or
|
| 775 |
+
`p` does not satisfy ``0 <= p <= 1``, or
|
| 776 |
+
the initial graph number of nodes m0 does not satisfy m1, m2 <= m0 <= n.
|
| 777 |
+
|
| 778 |
+
References
|
| 779 |
+
----------
|
| 780 |
+
.. [1] N. Moshiri "The dual-Barabasi-Albert model", arXiv:1810.10538.
|
| 781 |
+
"""
|
| 782 |
+
create_using = check_create_using(create_using, directed=False, multigraph=False)
|
| 783 |
+
if m1 < 1 or m1 >= n:
|
| 784 |
+
raise nx.NetworkXError(
|
| 785 |
+
f"Dual Barabási–Albert must have m1 >= 1 and m1 < n, m1 = {m1}, n = {n}"
|
| 786 |
+
)
|
| 787 |
+
if m2 < 1 or m2 >= n:
|
| 788 |
+
raise nx.NetworkXError(
|
| 789 |
+
f"Dual Barabási–Albert must have m2 >= 1 and m2 < n, m2 = {m2}, n = {n}"
|
| 790 |
+
)
|
| 791 |
+
if p < 0 or p > 1:
|
| 792 |
+
raise nx.NetworkXError(
|
| 793 |
+
f"Dual Barabási–Albert network must have 0 <= p <= 1, p = {p}"
|
| 794 |
+
)
|
| 795 |
+
|
| 796 |
+
# For simplicity, if p == 0 or 1, just return BA
|
| 797 |
+
if p == 1:
|
| 798 |
+
return barabasi_albert_graph(n, m1, seed, create_using=create_using)
|
| 799 |
+
elif p == 0:
|
| 800 |
+
return barabasi_albert_graph(n, m2, seed, create_using=create_using)
|
| 801 |
+
|
| 802 |
+
if initial_graph is None:
|
| 803 |
+
# Default initial graph : star graph on max(m1, m2) nodes
|
| 804 |
+
G = star_graph(max(m1, m2), create_using)
|
| 805 |
+
else:
|
| 806 |
+
if len(initial_graph) < max(m1, m2) or len(initial_graph) > n:
|
| 807 |
+
raise nx.NetworkXError(
|
| 808 |
+
f"Barabási–Albert initial graph must have between "
|
| 809 |
+
f"max(m1, m2) = {max(m1, m2)} and n = {n} nodes"
|
| 810 |
+
)
|
| 811 |
+
G = initial_graph.copy()
|
| 812 |
+
|
| 813 |
+
# Target nodes for new edges
|
| 814 |
+
targets = list(G)
|
| 815 |
+
# List of existing nodes, with nodes repeated once for each adjacent edge
|
| 816 |
+
repeated_nodes = [n for n, d in G.degree() for _ in range(d)]
|
| 817 |
+
# Start adding the remaining nodes.
|
| 818 |
+
source = len(G)
|
| 819 |
+
while source < n:
|
| 820 |
+
# Pick which m to use (m1 or m2)
|
| 821 |
+
if seed.random() < p:
|
| 822 |
+
m = m1
|
| 823 |
+
else:
|
| 824 |
+
m = m2
|
| 825 |
+
# Now choose m unique nodes from the existing nodes
|
| 826 |
+
# Pick uniformly from repeated_nodes (preferential attachment)
|
| 827 |
+
targets = _random_subset(repeated_nodes, m, seed)
|
| 828 |
+
# Add edges to m nodes from the source.
|
| 829 |
+
G.add_edges_from(zip([source] * m, targets))
|
| 830 |
+
# Add one node to the list for each new edge just created.
|
| 831 |
+
repeated_nodes.extend(targets)
|
| 832 |
+
# And the new node "source" has m edges to add to the list.
|
| 833 |
+
repeated_nodes.extend([source] * m)
|
| 834 |
+
|
| 835 |
+
source += 1
|
| 836 |
+
return G
|
| 837 |
+
|
| 838 |
+
|
| 839 |
+
@py_random_state(4)
|
| 840 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 841 |
+
def extended_barabasi_albert_graph(n, m, p, q, seed=None, *, create_using=None):
|
| 842 |
+
"""Returns an extended Barabási–Albert model graph.
|
| 843 |
+
|
| 844 |
+
An extended Barabási–Albert model graph is a random graph constructed
|
| 845 |
+
using preferential attachment. The extended model allows new edges,
|
| 846 |
+
rewired edges or new nodes. Based on the probabilities $p$ and $q$
|
| 847 |
+
with $p + q < 1$, the growing behavior of the graph is determined as:
|
| 848 |
+
|
| 849 |
+
1) With $p$ probability, $m$ new edges are added to the graph,
|
| 850 |
+
starting from randomly chosen existing nodes and attached preferentially at the
|
| 851 |
+
other end.
|
| 852 |
+
|
| 853 |
+
2) With $q$ probability, $m$ existing edges are rewired
|
| 854 |
+
by randomly choosing an edge and rewiring one end to a preferentially chosen node.
|
| 855 |
+
|
| 856 |
+
3) With $(1 - p - q)$ probability, $m$ new nodes are added to the graph
|
| 857 |
+
with edges attached preferentially.
|
| 858 |
+
|
| 859 |
+
When $p = q = 0$, the model behaves just like the Barabási–Alber model.
|
| 860 |
+
|
| 861 |
+
Parameters
|
| 862 |
+
----------
|
| 863 |
+
n : int
|
| 864 |
+
Number of nodes
|
| 865 |
+
m : int
|
| 866 |
+
Number of edges with which a new node attaches to existing nodes
|
| 867 |
+
p : float
|
| 868 |
+
Probability value for adding an edge between existing nodes. p + q < 1
|
| 869 |
+
q : float
|
| 870 |
+
Probability value of rewiring of existing edges. p + q < 1
|
| 871 |
+
seed : integer, random_state, or None (default)
|
| 872 |
+
Indicator of random number generation state.
|
| 873 |
+
See :ref:`Randomness<randomness>`.
|
| 874 |
+
create_using : Graph constructor, optional (default=nx.Graph)
|
| 875 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 876 |
+
Multigraph and directed types are not supported and raise a ``NetworkXError``.
|
| 877 |
+
|
| 878 |
+
Returns
|
| 879 |
+
-------
|
| 880 |
+
G : Graph
|
| 881 |
+
|
| 882 |
+
Raises
|
| 883 |
+
------
|
| 884 |
+
NetworkXError
|
| 885 |
+
If `m` does not satisfy ``1 <= m < n`` or ``1 >= p + q``
|
| 886 |
+
|
| 887 |
+
References
|
| 888 |
+
----------
|
| 889 |
+
.. [1] Albert, R., & Barabási, A. L. (2000)
|
| 890 |
+
Topology of evolving networks: local events and universality
|
| 891 |
+
Physical review letters, 85(24), 5234.
|
| 892 |
+
"""
|
| 893 |
+
create_using = check_create_using(create_using, directed=False, multigraph=False)
|
| 894 |
+
if m < 1 or m >= n:
|
| 895 |
+
msg = f"Extended Barabasi-Albert network needs m>=1 and m<n, m={m}, n={n}"
|
| 896 |
+
raise nx.NetworkXError(msg)
|
| 897 |
+
if p + q >= 1:
|
| 898 |
+
msg = f"Extended Barabasi-Albert network needs p + q <= 1, p={p}, q={q}"
|
| 899 |
+
raise nx.NetworkXError(msg)
|
| 900 |
+
|
| 901 |
+
# Add m initial nodes (m0 in barabasi-speak)
|
| 902 |
+
G = empty_graph(m, create_using)
|
| 903 |
+
|
| 904 |
+
# List of nodes to represent the preferential attachment random selection.
|
| 905 |
+
# At the creation of the graph, all nodes are added to the list
|
| 906 |
+
# so that even nodes that are not connected have a chance to get selected,
|
| 907 |
+
# for rewiring and adding of edges.
|
| 908 |
+
# With each new edge, nodes at the ends of the edge are added to the list.
|
| 909 |
+
attachment_preference = []
|
| 910 |
+
attachment_preference.extend(range(m))
|
| 911 |
+
|
| 912 |
+
# Start adding the other n-m nodes. The first node is m.
|
| 913 |
+
new_node = m
|
| 914 |
+
while new_node < n:
|
| 915 |
+
a_probability = seed.random()
|
| 916 |
+
|
| 917 |
+
# Total number of edges of a Clique of all the nodes
|
| 918 |
+
clique_degree = len(G) - 1
|
| 919 |
+
clique_size = (len(G) * clique_degree) / 2
|
| 920 |
+
|
| 921 |
+
# Adding m new edges, if there is room to add them
|
| 922 |
+
if a_probability < p and G.size() <= clique_size - m:
|
| 923 |
+
# Select the nodes where an edge can be added
|
| 924 |
+
eligible_nodes = [nd for nd, deg in G.degree() if deg < clique_degree]
|
| 925 |
+
for i in range(m):
|
| 926 |
+
# Choosing a random source node from eligible_nodes
|
| 927 |
+
src_node = seed.choice(eligible_nodes)
|
| 928 |
+
|
| 929 |
+
# Picking a possible node that is not 'src_node' or
|
| 930 |
+
# neighbor with 'src_node', with preferential attachment
|
| 931 |
+
prohibited_nodes = list(G[src_node])
|
| 932 |
+
prohibited_nodes.append(src_node)
|
| 933 |
+
# This will raise an exception if the sequence is empty
|
| 934 |
+
dest_node = seed.choice(
|
| 935 |
+
[nd for nd in attachment_preference if nd not in prohibited_nodes]
|
| 936 |
+
)
|
| 937 |
+
# Adding the new edge
|
| 938 |
+
G.add_edge(src_node, dest_node)
|
| 939 |
+
|
| 940 |
+
# Appending both nodes to add to their preferential attachment
|
| 941 |
+
attachment_preference.append(src_node)
|
| 942 |
+
attachment_preference.append(dest_node)
|
| 943 |
+
|
| 944 |
+
# Adjusting the eligible nodes. Degree may be saturated.
|
| 945 |
+
if G.degree(src_node) == clique_degree:
|
| 946 |
+
eligible_nodes.remove(src_node)
|
| 947 |
+
if G.degree(dest_node) == clique_degree and dest_node in eligible_nodes:
|
| 948 |
+
eligible_nodes.remove(dest_node)
|
| 949 |
+
|
| 950 |
+
# Rewiring m edges, if there are enough edges
|
| 951 |
+
elif p <= a_probability < (p + q) and m <= G.size() < clique_size:
|
| 952 |
+
# Selecting nodes that have at least 1 edge but that are not
|
| 953 |
+
# fully connected to ALL other nodes (center of star).
|
| 954 |
+
# These nodes are the pivot nodes of the edges to rewire
|
| 955 |
+
eligible_nodes = [nd for nd, deg in G.degree() if 0 < deg < clique_degree]
|
| 956 |
+
for i in range(m):
|
| 957 |
+
# Choosing a random source node
|
| 958 |
+
node = seed.choice(eligible_nodes)
|
| 959 |
+
|
| 960 |
+
# The available nodes do have a neighbor at least.
|
| 961 |
+
nbr_nodes = list(G[node])
|
| 962 |
+
|
| 963 |
+
# Choosing the other end that will get detached
|
| 964 |
+
src_node = seed.choice(nbr_nodes)
|
| 965 |
+
|
| 966 |
+
# Picking a target node that is not 'node' or
|
| 967 |
+
# neighbor with 'node', with preferential attachment
|
| 968 |
+
nbr_nodes.append(node)
|
| 969 |
+
dest_node = seed.choice(
|
| 970 |
+
[nd for nd in attachment_preference if nd not in nbr_nodes]
|
| 971 |
+
)
|
| 972 |
+
# Rewire
|
| 973 |
+
G.remove_edge(node, src_node)
|
| 974 |
+
G.add_edge(node, dest_node)
|
| 975 |
+
|
| 976 |
+
# Adjusting the preferential attachment list
|
| 977 |
+
attachment_preference.remove(src_node)
|
| 978 |
+
attachment_preference.append(dest_node)
|
| 979 |
+
|
| 980 |
+
# Adjusting the eligible nodes.
|
| 981 |
+
# nodes may be saturated or isolated.
|
| 982 |
+
if G.degree(src_node) == 0 and src_node in eligible_nodes:
|
| 983 |
+
eligible_nodes.remove(src_node)
|
| 984 |
+
if dest_node in eligible_nodes:
|
| 985 |
+
if G.degree(dest_node) == clique_degree:
|
| 986 |
+
eligible_nodes.remove(dest_node)
|
| 987 |
+
else:
|
| 988 |
+
if G.degree(dest_node) == 1:
|
| 989 |
+
eligible_nodes.append(dest_node)
|
| 990 |
+
|
| 991 |
+
# Adding new node with m edges
|
| 992 |
+
else:
|
| 993 |
+
# Select the edges' nodes by preferential attachment
|
| 994 |
+
targets = _random_subset(attachment_preference, m, seed)
|
| 995 |
+
G.add_edges_from(zip([new_node] * m, targets))
|
| 996 |
+
|
| 997 |
+
# Add one node to the list for each new edge just created.
|
| 998 |
+
attachment_preference.extend(targets)
|
| 999 |
+
# The new node has m edges to it, plus itself: m + 1
|
| 1000 |
+
attachment_preference.extend([new_node] * (m + 1))
|
| 1001 |
+
new_node += 1
|
| 1002 |
+
return G
|
| 1003 |
+
|
| 1004 |
+
|
| 1005 |
+
@py_random_state(3)
|
| 1006 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 1007 |
+
def powerlaw_cluster_graph(n, m, p, seed=None, *, create_using=None):
|
| 1008 |
+
"""Holme and Kim algorithm for growing graphs with powerlaw
|
| 1009 |
+
degree distribution and approximate average clustering.
|
| 1010 |
+
|
| 1011 |
+
Parameters
|
| 1012 |
+
----------
|
| 1013 |
+
n : int
|
| 1014 |
+
the number of nodes
|
| 1015 |
+
m : int
|
| 1016 |
+
the number of random edges to add for each new node
|
| 1017 |
+
p : float,
|
| 1018 |
+
Probability of adding a triangle after adding a random edge
|
| 1019 |
+
seed : integer, random_state, or None (default)
|
| 1020 |
+
Indicator of random number generation state.
|
| 1021 |
+
See :ref:`Randomness<randomness>`.
|
| 1022 |
+
create_using : Graph constructor, optional (default=nx.Graph)
|
| 1023 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 1024 |
+
Multigraph and directed types are not supported and raise a ``NetworkXError``.
|
| 1025 |
+
|
| 1026 |
+
Notes
|
| 1027 |
+
-----
|
| 1028 |
+
The average clustering has a hard time getting above a certain
|
| 1029 |
+
cutoff that depends on `m`. This cutoff is often quite low. The
|
| 1030 |
+
transitivity (fraction of triangles to possible triangles) seems to
|
| 1031 |
+
decrease with network size.
|
| 1032 |
+
|
| 1033 |
+
It is essentially the Barabási–Albert (BA) growth model with an
|
| 1034 |
+
extra step that each random edge is followed by a chance of
|
| 1035 |
+
making an edge to one of its neighbors too (and thus a triangle).
|
| 1036 |
+
|
| 1037 |
+
This algorithm improves on BA in the sense that it enables a
|
| 1038 |
+
higher average clustering to be attained if desired.
|
| 1039 |
+
|
| 1040 |
+
It seems possible to have a disconnected graph with this algorithm
|
| 1041 |
+
since the initial `m` nodes may not be all linked to a new node
|
| 1042 |
+
on the first iteration like the BA model.
|
| 1043 |
+
|
| 1044 |
+
Raises
|
| 1045 |
+
------
|
| 1046 |
+
NetworkXError
|
| 1047 |
+
If `m` does not satisfy ``1 <= m <= n`` or `p` does not
|
| 1048 |
+
satisfy ``0 <= p <= 1``.
|
| 1049 |
+
|
| 1050 |
+
References
|
| 1051 |
+
----------
|
| 1052 |
+
.. [1] P. Holme and B. J. Kim,
|
| 1053 |
+
"Growing scale-free networks with tunable clustering",
|
| 1054 |
+
Phys. Rev. E, 65, 026107, 2002.
|
| 1055 |
+
"""
|
| 1056 |
+
create_using = check_create_using(create_using, directed=False, multigraph=False)
|
| 1057 |
+
if m < 1 or n < m:
|
| 1058 |
+
raise nx.NetworkXError(f"NetworkXError must have m>1 and m<n, m={m},n={n}")
|
| 1059 |
+
|
| 1060 |
+
if p > 1 or p < 0:
|
| 1061 |
+
raise nx.NetworkXError(f"NetworkXError p must be in [0,1], p={p}")
|
| 1062 |
+
|
| 1063 |
+
G = empty_graph(m, create_using) # add m initial nodes (m0 in barabasi-speak)
|
| 1064 |
+
repeated_nodes = list(G) # list of existing nodes to sample from
|
| 1065 |
+
# with nodes repeated once for each adjacent edge
|
| 1066 |
+
source = m # next node is m
|
| 1067 |
+
while source < n: # Now add the other n-1 nodes
|
| 1068 |
+
possible_targets = _random_subset(repeated_nodes, m, seed)
|
| 1069 |
+
# do one preferential attachment for new node
|
| 1070 |
+
target = possible_targets.pop()
|
| 1071 |
+
G.add_edge(source, target)
|
| 1072 |
+
repeated_nodes.append(target) # add one node to list for each new link
|
| 1073 |
+
count = 1
|
| 1074 |
+
while count < m: # add m-1 more new links
|
| 1075 |
+
if seed.random() < p: # clustering step: add triangle
|
| 1076 |
+
neighborhood = [
|
| 1077 |
+
nbr
|
| 1078 |
+
for nbr in G.neighbors(target)
|
| 1079 |
+
if not G.has_edge(source, nbr) and nbr != source
|
| 1080 |
+
]
|
| 1081 |
+
if neighborhood: # if there is a neighbor without a link
|
| 1082 |
+
nbr = seed.choice(neighborhood)
|
| 1083 |
+
G.add_edge(source, nbr) # add triangle
|
| 1084 |
+
repeated_nodes.append(nbr)
|
| 1085 |
+
count = count + 1
|
| 1086 |
+
continue # go to top of while loop
|
| 1087 |
+
# else do preferential attachment step if above fails
|
| 1088 |
+
target = possible_targets.pop()
|
| 1089 |
+
G.add_edge(source, target)
|
| 1090 |
+
repeated_nodes.append(target)
|
| 1091 |
+
count = count + 1
|
| 1092 |
+
|
| 1093 |
+
repeated_nodes.extend([source] * m) # add source node to list m times
|
| 1094 |
+
source += 1
|
| 1095 |
+
return G
|
| 1096 |
+
|
| 1097 |
+
|
| 1098 |
+
@py_random_state(3)
|
| 1099 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 1100 |
+
def random_lobster(n, p1, p2, seed=None, *, create_using=None):
|
| 1101 |
+
"""Returns a random lobster graph.
|
| 1102 |
+
|
| 1103 |
+
A lobster is a tree that reduces to a caterpillar when pruning all
|
| 1104 |
+
leaf nodes. A caterpillar is a tree that reduces to a path graph
|
| 1105 |
+
when pruning all leaf nodes; setting `p2` to zero produces a caterpillar.
|
| 1106 |
+
|
| 1107 |
+
This implementation iterates on the probabilities `p1` and `p2` to add
|
| 1108 |
+
edges at levels 1 and 2, respectively. Graphs are therefore constructed
|
| 1109 |
+
iteratively with uniform randomness at each level rather than being selected
|
| 1110 |
+
uniformly at random from the set of all possible lobsters.
|
| 1111 |
+
|
| 1112 |
+
Parameters
|
| 1113 |
+
----------
|
| 1114 |
+
n : int
|
| 1115 |
+
The expected number of nodes in the backbone
|
| 1116 |
+
p1 : float
|
| 1117 |
+
Probability of adding an edge to the backbone
|
| 1118 |
+
p2 : float
|
| 1119 |
+
Probability of adding an edge one level beyond backbone
|
| 1120 |
+
seed : integer, random_state, or None (default)
|
| 1121 |
+
Indicator of random number generation state.
|
| 1122 |
+
See :ref:`Randomness<randomness>`.
|
| 1123 |
+
create_using : Graph constructor, optional (default=nx.Grap)
|
| 1124 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 1125 |
+
Multigraph and directed types are not supported and raise a ``NetworkXError``.
|
| 1126 |
+
|
| 1127 |
+
Raises
|
| 1128 |
+
------
|
| 1129 |
+
NetworkXError
|
| 1130 |
+
If `p1` or `p2` parameters are >= 1 because the while loops would never finish.
|
| 1131 |
+
"""
|
| 1132 |
+
create_using = check_create_using(create_using, directed=False, multigraph=False)
|
| 1133 |
+
p1, p2 = abs(p1), abs(p2)
|
| 1134 |
+
if any(p >= 1 for p in [p1, p2]):
|
| 1135 |
+
raise nx.NetworkXError("Probability values for `p1` and `p2` must both be < 1.")
|
| 1136 |
+
|
| 1137 |
+
# a necessary ingredient in any self-respecting graph library
|
| 1138 |
+
llen = int(2 * seed.random() * n + 0.5)
|
| 1139 |
+
L = path_graph(llen, create_using)
|
| 1140 |
+
# build caterpillar: add edges to path graph with probability p1
|
| 1141 |
+
current_node = llen - 1
|
| 1142 |
+
for n in range(llen):
|
| 1143 |
+
while seed.random() < p1: # add fuzzy caterpillar parts
|
| 1144 |
+
current_node += 1
|
| 1145 |
+
L.add_edge(n, current_node)
|
| 1146 |
+
cat_node = current_node
|
| 1147 |
+
while seed.random() < p2: # add crunchy lobster bits
|
| 1148 |
+
current_node += 1
|
| 1149 |
+
L.add_edge(cat_node, current_node)
|
| 1150 |
+
return L # voila, un lobster!
|
| 1151 |
+
|
| 1152 |
+
|
| 1153 |
+
@py_random_state(1)
|
| 1154 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 1155 |
+
def random_shell_graph(constructor, seed=None, *, create_using=None):
|
| 1156 |
+
"""Returns a random shell graph for the constructor given.
|
| 1157 |
+
|
| 1158 |
+
Parameters
|
| 1159 |
+
----------
|
| 1160 |
+
constructor : list of three-tuples
|
| 1161 |
+
Represents the parameters for a shell, starting at the center
|
| 1162 |
+
shell. Each element of the list must be of the form `(n, m,
|
| 1163 |
+
d)`, where `n` is the number of nodes in the shell, `m` is
|
| 1164 |
+
the number of edges in the shell, and `d` is the ratio of
|
| 1165 |
+
inter-shell (next) edges to intra-shell edges. If `d` is zero,
|
| 1166 |
+
there will be no intra-shell edges, and if `d` is one there
|
| 1167 |
+
will be all possible intra-shell edges.
|
| 1168 |
+
seed : integer, random_state, or None (default)
|
| 1169 |
+
Indicator of random number generation state.
|
| 1170 |
+
See :ref:`Randomness<randomness>`.
|
| 1171 |
+
create_using : Graph constructor, optional (default=nx.Graph)
|
| 1172 |
+
Graph type to create. Graph instances are not supported.
|
| 1173 |
+
Multigraph and directed types are not supported and raise a ``NetworkXError``.
|
| 1174 |
+
|
| 1175 |
+
Examples
|
| 1176 |
+
--------
|
| 1177 |
+
>>> constructor = [(10, 20, 0.8), (20, 40, 0.8)]
|
| 1178 |
+
>>> G = nx.random_shell_graph(constructor)
|
| 1179 |
+
|
| 1180 |
+
"""
|
| 1181 |
+
create_using = check_create_using(create_using, directed=False, multigraph=False)
|
| 1182 |
+
G = empty_graph(0, create_using)
|
| 1183 |
+
|
| 1184 |
+
glist = []
|
| 1185 |
+
intra_edges = []
|
| 1186 |
+
nnodes = 0
|
| 1187 |
+
# create gnm graphs for each shell
|
| 1188 |
+
for n, m, d in constructor:
|
| 1189 |
+
inter_edges = int(m * d)
|
| 1190 |
+
intra_edges.append(m - inter_edges)
|
| 1191 |
+
g = nx.convert_node_labels_to_integers(
|
| 1192 |
+
gnm_random_graph(n, inter_edges, seed=seed, create_using=G.__class__),
|
| 1193 |
+
first_label=nnodes,
|
| 1194 |
+
)
|
| 1195 |
+
glist.append(g)
|
| 1196 |
+
nnodes += n
|
| 1197 |
+
G = nx.operators.union(G, g)
|
| 1198 |
+
|
| 1199 |
+
# connect the shells randomly
|
| 1200 |
+
for gi in range(len(glist) - 1):
|
| 1201 |
+
nlist1 = list(glist[gi])
|
| 1202 |
+
nlist2 = list(glist[gi + 1])
|
| 1203 |
+
total_edges = intra_edges[gi]
|
| 1204 |
+
edge_count = 0
|
| 1205 |
+
while edge_count < total_edges:
|
| 1206 |
+
u = seed.choice(nlist1)
|
| 1207 |
+
v = seed.choice(nlist2)
|
| 1208 |
+
if u == v or G.has_edge(u, v):
|
| 1209 |
+
continue
|
| 1210 |
+
else:
|
| 1211 |
+
G.add_edge(u, v)
|
| 1212 |
+
edge_count = edge_count + 1
|
| 1213 |
+
return G
|
| 1214 |
+
|
| 1215 |
+
|
| 1216 |
+
@py_random_state(2)
|
| 1217 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 1218 |
+
def random_powerlaw_tree(n, gamma=3, seed=None, tries=100, *, create_using=None):
|
| 1219 |
+
"""Returns a tree with a power law degree distribution.
|
| 1220 |
+
|
| 1221 |
+
Parameters
|
| 1222 |
+
----------
|
| 1223 |
+
n : int
|
| 1224 |
+
The number of nodes.
|
| 1225 |
+
gamma : float
|
| 1226 |
+
Exponent of the power law.
|
| 1227 |
+
seed : integer, random_state, or None (default)
|
| 1228 |
+
Indicator of random number generation state.
|
| 1229 |
+
See :ref:`Randomness<randomness>`.
|
| 1230 |
+
tries : int
|
| 1231 |
+
Number of attempts to adjust the sequence to make it a tree.
|
| 1232 |
+
create_using : Graph constructor, optional (default=nx.Graph)
|
| 1233 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 1234 |
+
Multigraph and directed types are not supported and raise a ``NetworkXError``.
|
| 1235 |
+
|
| 1236 |
+
Raises
|
| 1237 |
+
------
|
| 1238 |
+
NetworkXError
|
| 1239 |
+
If no valid sequence is found within the maximum number of
|
| 1240 |
+
attempts.
|
| 1241 |
+
|
| 1242 |
+
Notes
|
| 1243 |
+
-----
|
| 1244 |
+
A trial power law degree sequence is chosen and then elements are
|
| 1245 |
+
swapped with new elements from a powerlaw distribution until the
|
| 1246 |
+
sequence makes a tree (by checking, for example, that the number of
|
| 1247 |
+
edges is one smaller than the number of nodes).
|
| 1248 |
+
|
| 1249 |
+
"""
|
| 1250 |
+
create_using = check_create_using(create_using, directed=False, multigraph=False)
|
| 1251 |
+
# This call may raise a NetworkXError if the number of tries is succeeded.
|
| 1252 |
+
seq = random_powerlaw_tree_sequence(n, gamma=gamma, seed=seed, tries=tries)
|
| 1253 |
+
G = degree_sequence_tree(seq, create_using)
|
| 1254 |
+
return G
|
| 1255 |
+
|
| 1256 |
+
|
| 1257 |
+
@py_random_state(2)
|
| 1258 |
+
@nx._dispatchable(graphs=None)
|
| 1259 |
+
def random_powerlaw_tree_sequence(n, gamma=3, seed=None, tries=100):
|
| 1260 |
+
"""Returns a degree sequence for a tree with a power law distribution.
|
| 1261 |
+
|
| 1262 |
+
Parameters
|
| 1263 |
+
----------
|
| 1264 |
+
n : int,
|
| 1265 |
+
The number of nodes.
|
| 1266 |
+
gamma : float
|
| 1267 |
+
Exponent of the power law.
|
| 1268 |
+
seed : integer, random_state, or None (default)
|
| 1269 |
+
Indicator of random number generation state.
|
| 1270 |
+
See :ref:`Randomness<randomness>`.
|
| 1271 |
+
tries : int
|
| 1272 |
+
Number of attempts to adjust the sequence to make it a tree.
|
| 1273 |
+
|
| 1274 |
+
Raises
|
| 1275 |
+
------
|
| 1276 |
+
NetworkXError
|
| 1277 |
+
If no valid sequence is found within the maximum number of
|
| 1278 |
+
attempts.
|
| 1279 |
+
|
| 1280 |
+
Notes
|
| 1281 |
+
-----
|
| 1282 |
+
A trial power law degree sequence is chosen and then elements are
|
| 1283 |
+
swapped with new elements from a power law distribution until
|
| 1284 |
+
the sequence makes a tree (by checking, for example, that the number of
|
| 1285 |
+
edges is one smaller than the number of nodes).
|
| 1286 |
+
|
| 1287 |
+
"""
|
| 1288 |
+
# get trial sequence
|
| 1289 |
+
z = nx.utils.powerlaw_sequence(n, exponent=gamma, seed=seed)
|
| 1290 |
+
# round to integer values in the range [0,n]
|
| 1291 |
+
zseq = [min(n, max(round(s), 0)) for s in z]
|
| 1292 |
+
|
| 1293 |
+
# another sequence to swap values from
|
| 1294 |
+
z = nx.utils.powerlaw_sequence(tries, exponent=gamma, seed=seed)
|
| 1295 |
+
# round to integer values in the range [0,n]
|
| 1296 |
+
swap = [min(n, max(round(s), 0)) for s in z]
|
| 1297 |
+
|
| 1298 |
+
for deg in swap:
|
| 1299 |
+
# If this degree sequence can be the degree sequence of a tree, return
|
| 1300 |
+
# it. It can be a tree if the number of edges is one fewer than the
|
| 1301 |
+
# number of nodes, or in other words, `n - sum(zseq) / 2 == 1`. We
|
| 1302 |
+
# use an equivalent condition below that avoids floating point
|
| 1303 |
+
# operations.
|
| 1304 |
+
if 2 * n - sum(zseq) == 2:
|
| 1305 |
+
return zseq
|
| 1306 |
+
index = seed.randint(0, n - 1)
|
| 1307 |
+
zseq[index] = swap.pop()
|
| 1308 |
+
|
| 1309 |
+
raise nx.NetworkXError(
|
| 1310 |
+
f"Exceeded max ({tries}) attempts for a valid tree sequence."
|
| 1311 |
+
)
|
| 1312 |
+
|
| 1313 |
+
|
| 1314 |
+
@py_random_state(3)
|
| 1315 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 1316 |
+
def random_kernel_graph(
|
| 1317 |
+
n, kernel_integral, kernel_root=None, seed=None, *, create_using=None
|
| 1318 |
+
):
|
| 1319 |
+
r"""Returns an random graph based on the specified kernel.
|
| 1320 |
+
|
| 1321 |
+
The algorithm chooses each of the $[n(n-1)]/2$ possible edges with
|
| 1322 |
+
probability specified by a kernel $\kappa(x,y)$ [1]_. The kernel
|
| 1323 |
+
$\kappa(x,y)$ must be a symmetric (in $x,y$), non-negative,
|
| 1324 |
+
bounded function.
|
| 1325 |
+
|
| 1326 |
+
Parameters
|
| 1327 |
+
----------
|
| 1328 |
+
n : int
|
| 1329 |
+
The number of nodes
|
| 1330 |
+
kernel_integral : function
|
| 1331 |
+
Function that returns the definite integral of the kernel $\kappa(x,y)$,
|
| 1332 |
+
$F(y,a,b) := \int_a^b \kappa(x,y)dx$
|
| 1333 |
+
kernel_root: function (optional)
|
| 1334 |
+
Function that returns the root $b$ of the equation $F(y,a,b) = r$.
|
| 1335 |
+
If None, the root is found using :func:`scipy.optimize.brentq`
|
| 1336 |
+
(this requires SciPy).
|
| 1337 |
+
seed : integer, random_state, or None (default)
|
| 1338 |
+
Indicator of random number generation state.
|
| 1339 |
+
See :ref:`Randomness<randomness>`.
|
| 1340 |
+
create_using : Graph constructor, optional (default=nx.Graph)
|
| 1341 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 1342 |
+
Multigraph and directed types are not supported and raise a ``NetworkXError``.
|
| 1343 |
+
|
| 1344 |
+
Notes
|
| 1345 |
+
-----
|
| 1346 |
+
The kernel is specified through its definite integral which must be
|
| 1347 |
+
provided as one of the arguments. If the integral and root of the
|
| 1348 |
+
kernel integral can be found in $O(1)$ time then this algorithm runs in
|
| 1349 |
+
time $O(n+m)$ where m is the expected number of edges [2]_.
|
| 1350 |
+
|
| 1351 |
+
The nodes are set to integers from $0$ to $n-1$.
|
| 1352 |
+
|
| 1353 |
+
Examples
|
| 1354 |
+
--------
|
| 1355 |
+
Generate an Erdős–Rényi random graph $G(n,c/n)$, with kernel
|
| 1356 |
+
$\kappa(x,y)=c$ where $c$ is the mean expected degree.
|
| 1357 |
+
|
| 1358 |
+
>>> def integral(u, w, z):
|
| 1359 |
+
... return c * (z - w)
|
| 1360 |
+
>>> def root(u, w, r):
|
| 1361 |
+
... return r / c + w
|
| 1362 |
+
>>> c = 1
|
| 1363 |
+
>>> graph = nx.random_kernel_graph(1000, integral, root)
|
| 1364 |
+
|
| 1365 |
+
See Also
|
| 1366 |
+
--------
|
| 1367 |
+
gnp_random_graph
|
| 1368 |
+
expected_degree_graph
|
| 1369 |
+
|
| 1370 |
+
References
|
| 1371 |
+
----------
|
| 1372 |
+
.. [1] Bollobás, Béla, Janson, S. and Riordan, O.
|
| 1373 |
+
"The phase transition in inhomogeneous random graphs",
|
| 1374 |
+
*Random Structures Algorithms*, 31, 3--122, 2007.
|
| 1375 |
+
|
| 1376 |
+
.. [2] Hagberg A, Lemons N (2015),
|
| 1377 |
+
"Fast Generation of Sparse Random Kernel Graphs".
|
| 1378 |
+
PLoS ONE 10(9): e0135177, 2015. doi:10.1371/journal.pone.0135177
|
| 1379 |
+
"""
|
| 1380 |
+
create_using = check_create_using(create_using, directed=False, multigraph=False)
|
| 1381 |
+
if kernel_root is None:
|
| 1382 |
+
import scipy as sp
|
| 1383 |
+
|
| 1384 |
+
def kernel_root(y, a, r):
|
| 1385 |
+
def my_function(b):
|
| 1386 |
+
return kernel_integral(y, a, b) - r
|
| 1387 |
+
|
| 1388 |
+
return sp.optimize.brentq(my_function, a, 1)
|
| 1389 |
+
|
| 1390 |
+
graph = nx.empty_graph(create_using=create_using)
|
| 1391 |
+
graph.add_nodes_from(range(n))
|
| 1392 |
+
(i, j) = (1, 1)
|
| 1393 |
+
while i < n:
|
| 1394 |
+
r = -math.log(1 - seed.random()) # (1-seed.random()) in (0, 1]
|
| 1395 |
+
if kernel_integral(i / n, j / n, 1) <= r:
|
| 1396 |
+
i, j = i + 1, i + 1
|
| 1397 |
+
else:
|
| 1398 |
+
j = math.ceil(n * kernel_root(i / n, j / n, r))
|
| 1399 |
+
graph.add_edge(i - 1, j - 1)
|
| 1400 |
+
return graph
|
minigpt2/lib/python3.10/site-packages/networkx/generators/small.py
ADDED
|
@@ -0,0 +1,993 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Various small and named graphs, together with some compact generators.
|
| 3 |
+
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
__all__ = [
|
| 7 |
+
"LCF_graph",
|
| 8 |
+
"bull_graph",
|
| 9 |
+
"chvatal_graph",
|
| 10 |
+
"cubical_graph",
|
| 11 |
+
"desargues_graph",
|
| 12 |
+
"diamond_graph",
|
| 13 |
+
"dodecahedral_graph",
|
| 14 |
+
"frucht_graph",
|
| 15 |
+
"heawood_graph",
|
| 16 |
+
"hoffman_singleton_graph",
|
| 17 |
+
"house_graph",
|
| 18 |
+
"house_x_graph",
|
| 19 |
+
"icosahedral_graph",
|
| 20 |
+
"krackhardt_kite_graph",
|
| 21 |
+
"moebius_kantor_graph",
|
| 22 |
+
"octahedral_graph",
|
| 23 |
+
"pappus_graph",
|
| 24 |
+
"petersen_graph",
|
| 25 |
+
"sedgewick_maze_graph",
|
| 26 |
+
"tetrahedral_graph",
|
| 27 |
+
"truncated_cube_graph",
|
| 28 |
+
"truncated_tetrahedron_graph",
|
| 29 |
+
"tutte_graph",
|
| 30 |
+
]
|
| 31 |
+
|
| 32 |
+
from functools import wraps
|
| 33 |
+
|
| 34 |
+
import networkx as nx
|
| 35 |
+
from networkx.exception import NetworkXError
|
| 36 |
+
from networkx.generators.classic import (
|
| 37 |
+
complete_graph,
|
| 38 |
+
cycle_graph,
|
| 39 |
+
empty_graph,
|
| 40 |
+
path_graph,
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def _raise_on_directed(func):
|
| 45 |
+
"""
|
| 46 |
+
A decorator which inspects the `create_using` argument and raises a
|
| 47 |
+
NetworkX exception when `create_using` is a DiGraph (class or instance) for
|
| 48 |
+
graph generators that do not support directed outputs.
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
@wraps(func)
|
| 52 |
+
def wrapper(*args, **kwargs):
|
| 53 |
+
if kwargs.get("create_using") is not None:
|
| 54 |
+
G = nx.empty_graph(create_using=kwargs["create_using"])
|
| 55 |
+
if G.is_directed():
|
| 56 |
+
raise NetworkXError("Directed Graph not supported")
|
| 57 |
+
return func(*args, **kwargs)
|
| 58 |
+
|
| 59 |
+
return wrapper
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 63 |
+
def LCF_graph(n, shift_list, repeats, create_using=None):
|
| 64 |
+
"""
|
| 65 |
+
Return the cubic graph specified in LCF notation.
|
| 66 |
+
|
| 67 |
+
LCF (Lederberg-Coxeter-Fruchte) notation[1]_ is a compressed
|
| 68 |
+
notation used in the generation of various cubic Hamiltonian
|
| 69 |
+
graphs of high symmetry. See, for example, `dodecahedral_graph`,
|
| 70 |
+
`desargues_graph`, `heawood_graph` and `pappus_graph`.
|
| 71 |
+
|
| 72 |
+
Nodes are drawn from ``range(n)``. Each node ``n_i`` is connected with
|
| 73 |
+
node ``n_i + shift % n`` where ``shift`` is given by cycling through
|
| 74 |
+
the input `shift_list` `repeat` s times.
|
| 75 |
+
|
| 76 |
+
Parameters
|
| 77 |
+
----------
|
| 78 |
+
n : int
|
| 79 |
+
The starting graph is the `n`-cycle with nodes ``0, ..., n-1``.
|
| 80 |
+
The null graph is returned if `n` < 1.
|
| 81 |
+
|
| 82 |
+
shift_list : list
|
| 83 |
+
A list of integer shifts mod `n`, ``[s1, s2, .., sk]``
|
| 84 |
+
|
| 85 |
+
repeats : int
|
| 86 |
+
Integer specifying the number of times that shifts in `shift_list`
|
| 87 |
+
are successively applied to each current node in the n-cycle
|
| 88 |
+
to generate an edge between ``n_current`` and ``n_current + shift mod n``.
|
| 89 |
+
|
| 90 |
+
Returns
|
| 91 |
+
-------
|
| 92 |
+
G : Graph
|
| 93 |
+
A graph instance created from the specified LCF notation.
|
| 94 |
+
|
| 95 |
+
Examples
|
| 96 |
+
--------
|
| 97 |
+
The utility graph $K_{3,3}$
|
| 98 |
+
|
| 99 |
+
>>> G = nx.LCF_graph(6, [3, -3], 3)
|
| 100 |
+
>>> G.edges()
|
| 101 |
+
EdgeView([(0, 1), (0, 5), (0, 3), (1, 2), (1, 4), (2, 3), (2, 5), (3, 4), (4, 5)])
|
| 102 |
+
|
| 103 |
+
The Heawood graph:
|
| 104 |
+
|
| 105 |
+
>>> G = nx.LCF_graph(14, [5, -5], 7)
|
| 106 |
+
>>> nx.is_isomorphic(G, nx.heawood_graph())
|
| 107 |
+
True
|
| 108 |
+
|
| 109 |
+
References
|
| 110 |
+
----------
|
| 111 |
+
.. [1] https://en.wikipedia.org/wiki/LCF_notation
|
| 112 |
+
|
| 113 |
+
"""
|
| 114 |
+
if n <= 0:
|
| 115 |
+
return empty_graph(0, create_using)
|
| 116 |
+
|
| 117 |
+
# start with the n-cycle
|
| 118 |
+
G = cycle_graph(n, create_using)
|
| 119 |
+
if G.is_directed():
|
| 120 |
+
raise NetworkXError("Directed Graph not supported")
|
| 121 |
+
G.name = "LCF_graph"
|
| 122 |
+
nodes = sorted(G)
|
| 123 |
+
|
| 124 |
+
n_extra_edges = repeats * len(shift_list)
|
| 125 |
+
# edges are added n_extra_edges times
|
| 126 |
+
# (not all of these need be new)
|
| 127 |
+
if n_extra_edges < 1:
|
| 128 |
+
return G
|
| 129 |
+
|
| 130 |
+
for i in range(n_extra_edges):
|
| 131 |
+
shift = shift_list[i % len(shift_list)] # cycle through shift_list
|
| 132 |
+
v1 = nodes[i % n] # cycle repeatedly through nodes
|
| 133 |
+
v2 = nodes[(i + shift) % n]
|
| 134 |
+
G.add_edge(v1, v2)
|
| 135 |
+
return G
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
# -------------------------------------------------------------------------------
|
| 139 |
+
# Various small and named graphs
|
| 140 |
+
# -------------------------------------------------------------------------------
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
@_raise_on_directed
|
| 144 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 145 |
+
def bull_graph(create_using=None):
|
| 146 |
+
"""
|
| 147 |
+
Returns the Bull Graph
|
| 148 |
+
|
| 149 |
+
The Bull Graph has 5 nodes and 5 edges. It is a planar undirected
|
| 150 |
+
graph in the form of a triangle with two disjoint pendant edges [1]_
|
| 151 |
+
The name comes from the triangle and pendant edges representing
|
| 152 |
+
respectively the body and legs of a bull.
|
| 153 |
+
|
| 154 |
+
Parameters
|
| 155 |
+
----------
|
| 156 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 157 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 158 |
+
|
| 159 |
+
Returns
|
| 160 |
+
-------
|
| 161 |
+
G : networkx Graph
|
| 162 |
+
A bull graph with 5 nodes
|
| 163 |
+
|
| 164 |
+
References
|
| 165 |
+
----------
|
| 166 |
+
.. [1] https://en.wikipedia.org/wiki/Bull_graph.
|
| 167 |
+
|
| 168 |
+
"""
|
| 169 |
+
G = nx.from_dict_of_lists(
|
| 170 |
+
{0: [1, 2], 1: [0, 2, 3], 2: [0, 1, 4], 3: [1], 4: [2]},
|
| 171 |
+
create_using=create_using,
|
| 172 |
+
)
|
| 173 |
+
G.name = "Bull Graph"
|
| 174 |
+
return G
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
@_raise_on_directed
|
| 178 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 179 |
+
def chvatal_graph(create_using=None):
|
| 180 |
+
"""
|
| 181 |
+
Returns the Chvátal Graph
|
| 182 |
+
|
| 183 |
+
The Chvátal Graph is an undirected graph with 12 nodes and 24 edges [1]_.
|
| 184 |
+
It has 370 distinct (directed) Hamiltonian cycles, giving a unique generalized
|
| 185 |
+
LCF notation of order 4, two of order 6 , and 43 of order 1 [2]_.
|
| 186 |
+
|
| 187 |
+
Parameters
|
| 188 |
+
----------
|
| 189 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 190 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 191 |
+
|
| 192 |
+
Returns
|
| 193 |
+
-------
|
| 194 |
+
G : networkx Graph
|
| 195 |
+
The Chvátal graph with 12 nodes and 24 edges
|
| 196 |
+
|
| 197 |
+
References
|
| 198 |
+
----------
|
| 199 |
+
.. [1] https://en.wikipedia.org/wiki/Chv%C3%A1tal_graph
|
| 200 |
+
.. [2] https://mathworld.wolfram.com/ChvatalGraph.html
|
| 201 |
+
|
| 202 |
+
"""
|
| 203 |
+
G = nx.from_dict_of_lists(
|
| 204 |
+
{
|
| 205 |
+
0: [1, 4, 6, 9],
|
| 206 |
+
1: [2, 5, 7],
|
| 207 |
+
2: [3, 6, 8],
|
| 208 |
+
3: [4, 7, 9],
|
| 209 |
+
4: [5, 8],
|
| 210 |
+
5: [10, 11],
|
| 211 |
+
6: [10, 11],
|
| 212 |
+
7: [8, 11],
|
| 213 |
+
8: [10],
|
| 214 |
+
9: [10, 11],
|
| 215 |
+
},
|
| 216 |
+
create_using=create_using,
|
| 217 |
+
)
|
| 218 |
+
G.name = "Chvatal Graph"
|
| 219 |
+
return G
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
@_raise_on_directed
|
| 223 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 224 |
+
def cubical_graph(create_using=None):
|
| 225 |
+
"""
|
| 226 |
+
Returns the 3-regular Platonic Cubical Graph
|
| 227 |
+
|
| 228 |
+
The skeleton of the cube (the nodes and edges) form a graph, with 8
|
| 229 |
+
nodes, and 12 edges. It is a special case of the hypercube graph.
|
| 230 |
+
It is one of 5 Platonic graphs, each a skeleton of its
|
| 231 |
+
Platonic solid [1]_.
|
| 232 |
+
Such graphs arise in parallel processing in computers.
|
| 233 |
+
|
| 234 |
+
Parameters
|
| 235 |
+
----------
|
| 236 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 237 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 238 |
+
|
| 239 |
+
Returns
|
| 240 |
+
-------
|
| 241 |
+
G : networkx Graph
|
| 242 |
+
A cubical graph with 8 nodes and 12 edges
|
| 243 |
+
|
| 244 |
+
References
|
| 245 |
+
----------
|
| 246 |
+
.. [1] https://en.wikipedia.org/wiki/Cube#Cubical_graph
|
| 247 |
+
|
| 248 |
+
"""
|
| 249 |
+
G = nx.from_dict_of_lists(
|
| 250 |
+
{
|
| 251 |
+
0: [1, 3, 4],
|
| 252 |
+
1: [0, 2, 7],
|
| 253 |
+
2: [1, 3, 6],
|
| 254 |
+
3: [0, 2, 5],
|
| 255 |
+
4: [0, 5, 7],
|
| 256 |
+
5: [3, 4, 6],
|
| 257 |
+
6: [2, 5, 7],
|
| 258 |
+
7: [1, 4, 6],
|
| 259 |
+
},
|
| 260 |
+
create_using=create_using,
|
| 261 |
+
)
|
| 262 |
+
G.name = "Platonic Cubical Graph"
|
| 263 |
+
return G
|
| 264 |
+
|
| 265 |
+
|
| 266 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 267 |
+
def desargues_graph(create_using=None):
|
| 268 |
+
"""
|
| 269 |
+
Returns the Desargues Graph
|
| 270 |
+
|
| 271 |
+
The Desargues Graph is a non-planar, distance-transitive cubic graph
|
| 272 |
+
with 20 nodes and 30 edges [1]_.
|
| 273 |
+
It is a symmetric graph. It can be represented in LCF notation
|
| 274 |
+
as [5,-5,9,-9]^5 [2]_.
|
| 275 |
+
|
| 276 |
+
Parameters
|
| 277 |
+
----------
|
| 278 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 279 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 280 |
+
|
| 281 |
+
Returns
|
| 282 |
+
-------
|
| 283 |
+
G : networkx Graph
|
| 284 |
+
Desargues Graph with 20 nodes and 30 edges
|
| 285 |
+
|
| 286 |
+
References
|
| 287 |
+
----------
|
| 288 |
+
.. [1] https://en.wikipedia.org/wiki/Desargues_graph
|
| 289 |
+
.. [2] https://mathworld.wolfram.com/DesarguesGraph.html
|
| 290 |
+
"""
|
| 291 |
+
G = LCF_graph(20, [5, -5, 9, -9], 5, create_using)
|
| 292 |
+
G.name = "Desargues Graph"
|
| 293 |
+
return G
|
| 294 |
+
|
| 295 |
+
|
| 296 |
+
@_raise_on_directed
|
| 297 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 298 |
+
def diamond_graph(create_using=None):
|
| 299 |
+
"""
|
| 300 |
+
Returns the Diamond graph
|
| 301 |
+
|
| 302 |
+
The Diamond Graph is planar undirected graph with 4 nodes and 5 edges.
|
| 303 |
+
It is also sometimes known as the double triangle graph or kite graph [1]_.
|
| 304 |
+
|
| 305 |
+
Parameters
|
| 306 |
+
----------
|
| 307 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 308 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 309 |
+
|
| 310 |
+
Returns
|
| 311 |
+
-------
|
| 312 |
+
G : networkx Graph
|
| 313 |
+
Diamond Graph with 4 nodes and 5 edges
|
| 314 |
+
|
| 315 |
+
References
|
| 316 |
+
----------
|
| 317 |
+
.. [1] https://mathworld.wolfram.com/DiamondGraph.html
|
| 318 |
+
"""
|
| 319 |
+
G = nx.from_dict_of_lists(
|
| 320 |
+
{0: [1, 2], 1: [0, 2, 3], 2: [0, 1, 3], 3: [1, 2]}, create_using=create_using
|
| 321 |
+
)
|
| 322 |
+
G.name = "Diamond Graph"
|
| 323 |
+
return G
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 327 |
+
def dodecahedral_graph(create_using=None):
|
| 328 |
+
"""
|
| 329 |
+
Returns the Platonic Dodecahedral graph.
|
| 330 |
+
|
| 331 |
+
The dodecahedral graph has 20 nodes and 30 edges. The skeleton of the
|
| 332 |
+
dodecahedron forms a graph. It is one of 5 Platonic graphs [1]_.
|
| 333 |
+
It can be described in LCF notation as:
|
| 334 |
+
``[10, 7, 4, -4, -7, 10, -4, 7, -7, 4]^2`` [2]_.
|
| 335 |
+
|
| 336 |
+
Parameters
|
| 337 |
+
----------
|
| 338 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 339 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 340 |
+
|
| 341 |
+
Returns
|
| 342 |
+
-------
|
| 343 |
+
G : networkx Graph
|
| 344 |
+
Dodecahedral Graph with 20 nodes and 30 edges
|
| 345 |
+
|
| 346 |
+
References
|
| 347 |
+
----------
|
| 348 |
+
.. [1] https://en.wikipedia.org/wiki/Regular_dodecahedron#Dodecahedral_graph
|
| 349 |
+
.. [2] https://mathworld.wolfram.com/DodecahedralGraph.html
|
| 350 |
+
|
| 351 |
+
"""
|
| 352 |
+
G = LCF_graph(20, [10, 7, 4, -4, -7, 10, -4, 7, -7, 4], 2, create_using)
|
| 353 |
+
G.name = "Dodecahedral Graph"
|
| 354 |
+
return G
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 358 |
+
def frucht_graph(create_using=None):
|
| 359 |
+
"""
|
| 360 |
+
Returns the Frucht Graph.
|
| 361 |
+
|
| 362 |
+
The Frucht Graph is the smallest cubical graph whose
|
| 363 |
+
automorphism group consists only of the identity element [1]_.
|
| 364 |
+
It has 12 nodes and 18 edges and no nontrivial symmetries.
|
| 365 |
+
It is planar and Hamiltonian [2]_.
|
| 366 |
+
|
| 367 |
+
Parameters
|
| 368 |
+
----------
|
| 369 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 370 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 371 |
+
|
| 372 |
+
Returns
|
| 373 |
+
-------
|
| 374 |
+
G : networkx Graph
|
| 375 |
+
Frucht Graph with 12 nodes and 18 edges
|
| 376 |
+
|
| 377 |
+
References
|
| 378 |
+
----------
|
| 379 |
+
.. [1] https://en.wikipedia.org/wiki/Frucht_graph
|
| 380 |
+
.. [2] https://mathworld.wolfram.com/FruchtGraph.html
|
| 381 |
+
|
| 382 |
+
"""
|
| 383 |
+
G = cycle_graph(7, create_using)
|
| 384 |
+
G.add_edges_from(
|
| 385 |
+
[
|
| 386 |
+
[0, 7],
|
| 387 |
+
[1, 7],
|
| 388 |
+
[2, 8],
|
| 389 |
+
[3, 9],
|
| 390 |
+
[4, 9],
|
| 391 |
+
[5, 10],
|
| 392 |
+
[6, 10],
|
| 393 |
+
[7, 11],
|
| 394 |
+
[8, 11],
|
| 395 |
+
[8, 9],
|
| 396 |
+
[10, 11],
|
| 397 |
+
]
|
| 398 |
+
)
|
| 399 |
+
|
| 400 |
+
G.name = "Frucht Graph"
|
| 401 |
+
return G
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 405 |
+
def heawood_graph(create_using=None):
|
| 406 |
+
"""
|
| 407 |
+
Returns the Heawood Graph, a (3,6) cage.
|
| 408 |
+
|
| 409 |
+
The Heawood Graph is an undirected graph with 14 nodes and 21 edges,
|
| 410 |
+
named after Percy John Heawood [1]_.
|
| 411 |
+
It is cubic symmetric, nonplanar, Hamiltonian, and can be represented
|
| 412 |
+
in LCF notation as ``[5,-5]^7`` [2]_.
|
| 413 |
+
It is the unique (3,6)-cage: the regular cubic graph of girth 6 with
|
| 414 |
+
minimal number of vertices [3]_.
|
| 415 |
+
|
| 416 |
+
Parameters
|
| 417 |
+
----------
|
| 418 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 419 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 420 |
+
|
| 421 |
+
Returns
|
| 422 |
+
-------
|
| 423 |
+
G : networkx Graph
|
| 424 |
+
Heawood Graph with 14 nodes and 21 edges
|
| 425 |
+
|
| 426 |
+
References
|
| 427 |
+
----------
|
| 428 |
+
.. [1] https://en.wikipedia.org/wiki/Heawood_graph
|
| 429 |
+
.. [2] https://mathworld.wolfram.com/HeawoodGraph.html
|
| 430 |
+
.. [3] https://www.win.tue.nl/~aeb/graphs/Heawood.html
|
| 431 |
+
|
| 432 |
+
"""
|
| 433 |
+
G = LCF_graph(14, [5, -5], 7, create_using)
|
| 434 |
+
G.name = "Heawood Graph"
|
| 435 |
+
return G
|
| 436 |
+
|
| 437 |
+
|
| 438 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 439 |
+
def hoffman_singleton_graph():
|
| 440 |
+
"""
|
| 441 |
+
Returns the Hoffman-Singleton Graph.
|
| 442 |
+
|
| 443 |
+
The Hoffman–Singleton graph is a symmetrical undirected graph
|
| 444 |
+
with 50 nodes and 175 edges.
|
| 445 |
+
All indices lie in ``Z % 5``: that is, the integers mod 5 [1]_.
|
| 446 |
+
It is the only regular graph of vertex degree 7, diameter 2, and girth 5.
|
| 447 |
+
It is the unique (7,5)-cage graph and Moore graph, and contains many
|
| 448 |
+
copies of the Petersen graph [2]_.
|
| 449 |
+
|
| 450 |
+
Returns
|
| 451 |
+
-------
|
| 452 |
+
G : networkx Graph
|
| 453 |
+
Hoffman–Singleton Graph with 50 nodes and 175 edges
|
| 454 |
+
|
| 455 |
+
Notes
|
| 456 |
+
-----
|
| 457 |
+
Constructed from pentagon and pentagram as follows: Take five pentagons $P_h$
|
| 458 |
+
and five pentagrams $Q_i$ . Join vertex $j$ of $P_h$ to vertex $h·i+j$ of $Q_i$ [3]_.
|
| 459 |
+
|
| 460 |
+
References
|
| 461 |
+
----------
|
| 462 |
+
.. [1] https://blogs.ams.org/visualinsight/2016/02/01/hoffman-singleton-graph/
|
| 463 |
+
.. [2] https://mathworld.wolfram.com/Hoffman-SingletonGraph.html
|
| 464 |
+
.. [3] https://en.wikipedia.org/wiki/Hoffman%E2%80%93Singleton_graph
|
| 465 |
+
|
| 466 |
+
"""
|
| 467 |
+
G = nx.Graph()
|
| 468 |
+
for i in range(5):
|
| 469 |
+
for j in range(5):
|
| 470 |
+
G.add_edge(("pentagon", i, j), ("pentagon", i, (j - 1) % 5))
|
| 471 |
+
G.add_edge(("pentagon", i, j), ("pentagon", i, (j + 1) % 5))
|
| 472 |
+
G.add_edge(("pentagram", i, j), ("pentagram", i, (j - 2) % 5))
|
| 473 |
+
G.add_edge(("pentagram", i, j), ("pentagram", i, (j + 2) % 5))
|
| 474 |
+
for k in range(5):
|
| 475 |
+
G.add_edge(("pentagon", i, j), ("pentagram", k, (i * k + j) % 5))
|
| 476 |
+
G = nx.convert_node_labels_to_integers(G)
|
| 477 |
+
G.name = "Hoffman-Singleton Graph"
|
| 478 |
+
return G
|
| 479 |
+
|
| 480 |
+
|
| 481 |
+
@_raise_on_directed
|
| 482 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 483 |
+
def house_graph(create_using=None):
|
| 484 |
+
"""
|
| 485 |
+
Returns the House graph (square with triangle on top)
|
| 486 |
+
|
| 487 |
+
The house graph is a simple undirected graph with
|
| 488 |
+
5 nodes and 6 edges [1]_.
|
| 489 |
+
|
| 490 |
+
Parameters
|
| 491 |
+
----------
|
| 492 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 493 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 494 |
+
|
| 495 |
+
Returns
|
| 496 |
+
-------
|
| 497 |
+
G : networkx Graph
|
| 498 |
+
House graph in the form of a square with a triangle on top
|
| 499 |
+
|
| 500 |
+
References
|
| 501 |
+
----------
|
| 502 |
+
.. [1] https://mathworld.wolfram.com/HouseGraph.html
|
| 503 |
+
"""
|
| 504 |
+
G = nx.from_dict_of_lists(
|
| 505 |
+
{0: [1, 2], 1: [0, 3], 2: [0, 3, 4], 3: [1, 2, 4], 4: [2, 3]},
|
| 506 |
+
create_using=create_using,
|
| 507 |
+
)
|
| 508 |
+
G.name = "House Graph"
|
| 509 |
+
return G
|
| 510 |
+
|
| 511 |
+
|
| 512 |
+
@_raise_on_directed
|
| 513 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 514 |
+
def house_x_graph(create_using=None):
|
| 515 |
+
"""
|
| 516 |
+
Returns the House graph with a cross inside the house square.
|
| 517 |
+
|
| 518 |
+
The House X-graph is the House graph plus the two edges connecting diagonally
|
| 519 |
+
opposite vertices of the square base. It is also one of the two graphs
|
| 520 |
+
obtained by removing two edges from the pentatope graph [1]_.
|
| 521 |
+
|
| 522 |
+
Parameters
|
| 523 |
+
----------
|
| 524 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 525 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 526 |
+
|
| 527 |
+
Returns
|
| 528 |
+
-------
|
| 529 |
+
G : networkx Graph
|
| 530 |
+
House graph with diagonal vertices connected
|
| 531 |
+
|
| 532 |
+
References
|
| 533 |
+
----------
|
| 534 |
+
.. [1] https://mathworld.wolfram.com/HouseGraph.html
|
| 535 |
+
"""
|
| 536 |
+
G = house_graph(create_using)
|
| 537 |
+
G.add_edges_from([(0, 3), (1, 2)])
|
| 538 |
+
G.name = "House-with-X-inside Graph"
|
| 539 |
+
return G
|
| 540 |
+
|
| 541 |
+
|
| 542 |
+
@_raise_on_directed
|
| 543 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 544 |
+
def icosahedral_graph(create_using=None):
|
| 545 |
+
"""
|
| 546 |
+
Returns the Platonic Icosahedral graph.
|
| 547 |
+
|
| 548 |
+
The icosahedral graph has 12 nodes and 30 edges. It is a Platonic graph
|
| 549 |
+
whose nodes have the connectivity of the icosahedron. It is undirected,
|
| 550 |
+
regular and Hamiltonian [1]_.
|
| 551 |
+
|
| 552 |
+
Parameters
|
| 553 |
+
----------
|
| 554 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 555 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 556 |
+
|
| 557 |
+
Returns
|
| 558 |
+
-------
|
| 559 |
+
G : networkx Graph
|
| 560 |
+
Icosahedral graph with 12 nodes and 30 edges.
|
| 561 |
+
|
| 562 |
+
References
|
| 563 |
+
----------
|
| 564 |
+
.. [1] https://mathworld.wolfram.com/IcosahedralGraph.html
|
| 565 |
+
"""
|
| 566 |
+
G = nx.from_dict_of_lists(
|
| 567 |
+
{
|
| 568 |
+
0: [1, 5, 7, 8, 11],
|
| 569 |
+
1: [2, 5, 6, 8],
|
| 570 |
+
2: [3, 6, 8, 9],
|
| 571 |
+
3: [4, 6, 9, 10],
|
| 572 |
+
4: [5, 6, 10, 11],
|
| 573 |
+
5: [6, 11],
|
| 574 |
+
7: [8, 9, 10, 11],
|
| 575 |
+
8: [9],
|
| 576 |
+
9: [10],
|
| 577 |
+
10: [11],
|
| 578 |
+
},
|
| 579 |
+
create_using=create_using,
|
| 580 |
+
)
|
| 581 |
+
G.name = "Platonic Icosahedral Graph"
|
| 582 |
+
return G
|
| 583 |
+
|
| 584 |
+
|
| 585 |
+
@_raise_on_directed
|
| 586 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 587 |
+
def krackhardt_kite_graph(create_using=None):
|
| 588 |
+
"""
|
| 589 |
+
Returns the Krackhardt Kite Social Network.
|
| 590 |
+
|
| 591 |
+
A 10 actor social network introduced by David Krackhardt
|
| 592 |
+
to illustrate different centrality measures [1]_.
|
| 593 |
+
|
| 594 |
+
Parameters
|
| 595 |
+
----------
|
| 596 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 597 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 598 |
+
|
| 599 |
+
Returns
|
| 600 |
+
-------
|
| 601 |
+
G : networkx Graph
|
| 602 |
+
Krackhardt Kite graph with 10 nodes and 18 edges
|
| 603 |
+
|
| 604 |
+
Notes
|
| 605 |
+
-----
|
| 606 |
+
The traditional labeling is:
|
| 607 |
+
Andre=1, Beverley=2, Carol=3, Diane=4,
|
| 608 |
+
Ed=5, Fernando=6, Garth=7, Heather=8, Ike=9, Jane=10.
|
| 609 |
+
|
| 610 |
+
References
|
| 611 |
+
----------
|
| 612 |
+
.. [1] Krackhardt, David. "Assessing the Political Landscape: Structure,
|
| 613 |
+
Cognition, and Power in Organizations". Administrative Science Quarterly.
|
| 614 |
+
35 (2): 342–369. doi:10.2307/2393394. JSTOR 2393394. June 1990.
|
| 615 |
+
|
| 616 |
+
"""
|
| 617 |
+
G = nx.from_dict_of_lists(
|
| 618 |
+
{
|
| 619 |
+
0: [1, 2, 3, 5],
|
| 620 |
+
1: [0, 3, 4, 6],
|
| 621 |
+
2: [0, 3, 5],
|
| 622 |
+
3: [0, 1, 2, 4, 5, 6],
|
| 623 |
+
4: [1, 3, 6],
|
| 624 |
+
5: [0, 2, 3, 6, 7],
|
| 625 |
+
6: [1, 3, 4, 5, 7],
|
| 626 |
+
7: [5, 6, 8],
|
| 627 |
+
8: [7, 9],
|
| 628 |
+
9: [8],
|
| 629 |
+
},
|
| 630 |
+
create_using=create_using,
|
| 631 |
+
)
|
| 632 |
+
G.name = "Krackhardt Kite Social Network"
|
| 633 |
+
return G
|
| 634 |
+
|
| 635 |
+
|
| 636 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 637 |
+
def moebius_kantor_graph(create_using=None):
|
| 638 |
+
"""
|
| 639 |
+
Returns the Moebius-Kantor graph.
|
| 640 |
+
|
| 641 |
+
The Möbius-Kantor graph is the cubic symmetric graph on 16 nodes.
|
| 642 |
+
Its LCF notation is [5,-5]^8, and it is isomorphic to the generalized
|
| 643 |
+
Petersen graph [1]_.
|
| 644 |
+
|
| 645 |
+
Parameters
|
| 646 |
+
----------
|
| 647 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 648 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 649 |
+
|
| 650 |
+
Returns
|
| 651 |
+
-------
|
| 652 |
+
G : networkx Graph
|
| 653 |
+
Moebius-Kantor graph
|
| 654 |
+
|
| 655 |
+
References
|
| 656 |
+
----------
|
| 657 |
+
.. [1] https://en.wikipedia.org/wiki/M%C3%B6bius%E2%80%93Kantor_graph
|
| 658 |
+
|
| 659 |
+
"""
|
| 660 |
+
G = LCF_graph(16, [5, -5], 8, create_using)
|
| 661 |
+
G.name = "Moebius-Kantor Graph"
|
| 662 |
+
return G
|
| 663 |
+
|
| 664 |
+
|
| 665 |
+
@_raise_on_directed
|
| 666 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 667 |
+
def octahedral_graph(create_using=None):
|
| 668 |
+
"""
|
| 669 |
+
Returns the Platonic Octahedral graph.
|
| 670 |
+
|
| 671 |
+
The octahedral graph is the 6-node 12-edge Platonic graph having the
|
| 672 |
+
connectivity of the octahedron [1]_. If 6 couples go to a party,
|
| 673 |
+
and each person shakes hands with every person except his or her partner,
|
| 674 |
+
then this graph describes the set of handshakes that take place;
|
| 675 |
+
for this reason it is also called the cocktail party graph [2]_.
|
| 676 |
+
|
| 677 |
+
Parameters
|
| 678 |
+
----------
|
| 679 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 680 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 681 |
+
|
| 682 |
+
Returns
|
| 683 |
+
-------
|
| 684 |
+
G : networkx Graph
|
| 685 |
+
Octahedral graph
|
| 686 |
+
|
| 687 |
+
References
|
| 688 |
+
----------
|
| 689 |
+
.. [1] https://mathworld.wolfram.com/OctahedralGraph.html
|
| 690 |
+
.. [2] https://en.wikipedia.org/wiki/Tur%C3%A1n_graph#Special_cases
|
| 691 |
+
|
| 692 |
+
"""
|
| 693 |
+
G = nx.from_dict_of_lists(
|
| 694 |
+
{0: [1, 2, 3, 4], 1: [2, 3, 5], 2: [4, 5], 3: [4, 5], 4: [5]},
|
| 695 |
+
create_using=create_using,
|
| 696 |
+
)
|
| 697 |
+
G.name = "Platonic Octahedral Graph"
|
| 698 |
+
return G
|
| 699 |
+
|
| 700 |
+
|
| 701 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 702 |
+
def pappus_graph():
|
| 703 |
+
"""
|
| 704 |
+
Returns the Pappus graph.
|
| 705 |
+
|
| 706 |
+
The Pappus graph is a cubic symmetric distance-regular graph with 18 nodes
|
| 707 |
+
and 27 edges. It is Hamiltonian and can be represented in LCF notation as
|
| 708 |
+
[5,7,-7,7,-7,-5]^3 [1]_.
|
| 709 |
+
|
| 710 |
+
Returns
|
| 711 |
+
-------
|
| 712 |
+
G : networkx Graph
|
| 713 |
+
Pappus graph
|
| 714 |
+
|
| 715 |
+
References
|
| 716 |
+
----------
|
| 717 |
+
.. [1] https://en.wikipedia.org/wiki/Pappus_graph
|
| 718 |
+
"""
|
| 719 |
+
G = LCF_graph(18, [5, 7, -7, 7, -7, -5], 3)
|
| 720 |
+
G.name = "Pappus Graph"
|
| 721 |
+
return G
|
| 722 |
+
|
| 723 |
+
|
| 724 |
+
@_raise_on_directed
|
| 725 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 726 |
+
def petersen_graph(create_using=None):
|
| 727 |
+
"""
|
| 728 |
+
Returns the Petersen graph.
|
| 729 |
+
|
| 730 |
+
The Peterson graph is a cubic, undirected graph with 10 nodes and 15 edges [1]_.
|
| 731 |
+
Julius Petersen constructed the graph as the smallest counterexample
|
| 732 |
+
against the claim that a connected bridgeless cubic graph
|
| 733 |
+
has an edge colouring with three colours [2]_.
|
| 734 |
+
|
| 735 |
+
Parameters
|
| 736 |
+
----------
|
| 737 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 738 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 739 |
+
|
| 740 |
+
Returns
|
| 741 |
+
-------
|
| 742 |
+
G : networkx Graph
|
| 743 |
+
Petersen graph
|
| 744 |
+
|
| 745 |
+
References
|
| 746 |
+
----------
|
| 747 |
+
.. [1] https://en.wikipedia.org/wiki/Petersen_graph
|
| 748 |
+
.. [2] https://www.win.tue.nl/~aeb/drg/graphs/Petersen.html
|
| 749 |
+
"""
|
| 750 |
+
G = nx.from_dict_of_lists(
|
| 751 |
+
{
|
| 752 |
+
0: [1, 4, 5],
|
| 753 |
+
1: [0, 2, 6],
|
| 754 |
+
2: [1, 3, 7],
|
| 755 |
+
3: [2, 4, 8],
|
| 756 |
+
4: [3, 0, 9],
|
| 757 |
+
5: [0, 7, 8],
|
| 758 |
+
6: [1, 8, 9],
|
| 759 |
+
7: [2, 5, 9],
|
| 760 |
+
8: [3, 5, 6],
|
| 761 |
+
9: [4, 6, 7],
|
| 762 |
+
},
|
| 763 |
+
create_using=create_using,
|
| 764 |
+
)
|
| 765 |
+
G.name = "Petersen Graph"
|
| 766 |
+
return G
|
| 767 |
+
|
| 768 |
+
|
| 769 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 770 |
+
def sedgewick_maze_graph(create_using=None):
|
| 771 |
+
"""
|
| 772 |
+
Return a small maze with a cycle.
|
| 773 |
+
|
| 774 |
+
This is the maze used in Sedgewick, 3rd Edition, Part 5, Graph
|
| 775 |
+
Algorithms, Chapter 18, e.g. Figure 18.2 and following [1]_.
|
| 776 |
+
Nodes are numbered 0,..,7
|
| 777 |
+
|
| 778 |
+
Parameters
|
| 779 |
+
----------
|
| 780 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 781 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 782 |
+
|
| 783 |
+
Returns
|
| 784 |
+
-------
|
| 785 |
+
G : networkx Graph
|
| 786 |
+
Small maze with a cycle
|
| 787 |
+
|
| 788 |
+
References
|
| 789 |
+
----------
|
| 790 |
+
.. [1] Figure 18.2, Chapter 18, Graph Algorithms (3rd Ed), Sedgewick
|
| 791 |
+
"""
|
| 792 |
+
G = empty_graph(0, create_using)
|
| 793 |
+
G.add_nodes_from(range(8))
|
| 794 |
+
G.add_edges_from([[0, 2], [0, 7], [0, 5]])
|
| 795 |
+
G.add_edges_from([[1, 7], [2, 6]])
|
| 796 |
+
G.add_edges_from([[3, 4], [3, 5]])
|
| 797 |
+
G.add_edges_from([[4, 5], [4, 7], [4, 6]])
|
| 798 |
+
G.name = "Sedgewick Maze"
|
| 799 |
+
return G
|
| 800 |
+
|
| 801 |
+
|
| 802 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 803 |
+
def tetrahedral_graph(create_using=None):
|
| 804 |
+
"""
|
| 805 |
+
Returns the 3-regular Platonic Tetrahedral graph.
|
| 806 |
+
|
| 807 |
+
Tetrahedral graph has 4 nodes and 6 edges. It is a
|
| 808 |
+
special case of the complete graph, K4, and wheel graph, W4.
|
| 809 |
+
It is one of the 5 platonic graphs [1]_.
|
| 810 |
+
|
| 811 |
+
Parameters
|
| 812 |
+
----------
|
| 813 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 814 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 815 |
+
|
| 816 |
+
Returns
|
| 817 |
+
-------
|
| 818 |
+
G : networkx Graph
|
| 819 |
+
Tetrahedral Graph
|
| 820 |
+
|
| 821 |
+
References
|
| 822 |
+
----------
|
| 823 |
+
.. [1] https://en.wikipedia.org/wiki/Tetrahedron#Tetrahedral_graph
|
| 824 |
+
|
| 825 |
+
"""
|
| 826 |
+
G = complete_graph(4, create_using)
|
| 827 |
+
G.name = "Platonic Tetrahedral Graph"
|
| 828 |
+
return G
|
| 829 |
+
|
| 830 |
+
|
| 831 |
+
@_raise_on_directed
|
| 832 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 833 |
+
def truncated_cube_graph(create_using=None):
|
| 834 |
+
"""
|
| 835 |
+
Returns the skeleton of the truncated cube.
|
| 836 |
+
|
| 837 |
+
The truncated cube is an Archimedean solid with 14 regular
|
| 838 |
+
faces (6 octagonal and 8 triangular), 36 edges and 24 nodes [1]_.
|
| 839 |
+
The truncated cube is created by truncating (cutting off) the tips
|
| 840 |
+
of the cube one third of the way into each edge [2]_.
|
| 841 |
+
|
| 842 |
+
Parameters
|
| 843 |
+
----------
|
| 844 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 845 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 846 |
+
|
| 847 |
+
Returns
|
| 848 |
+
-------
|
| 849 |
+
G : networkx Graph
|
| 850 |
+
Skeleton of the truncated cube
|
| 851 |
+
|
| 852 |
+
References
|
| 853 |
+
----------
|
| 854 |
+
.. [1] https://en.wikipedia.org/wiki/Truncated_cube
|
| 855 |
+
.. [2] https://www.coolmath.com/reference/polyhedra-truncated-cube
|
| 856 |
+
|
| 857 |
+
"""
|
| 858 |
+
G = nx.from_dict_of_lists(
|
| 859 |
+
{
|
| 860 |
+
0: [1, 2, 4],
|
| 861 |
+
1: [11, 14],
|
| 862 |
+
2: [3, 4],
|
| 863 |
+
3: [6, 8],
|
| 864 |
+
4: [5],
|
| 865 |
+
5: [16, 18],
|
| 866 |
+
6: [7, 8],
|
| 867 |
+
7: [10, 12],
|
| 868 |
+
8: [9],
|
| 869 |
+
9: [17, 20],
|
| 870 |
+
10: [11, 12],
|
| 871 |
+
11: [14],
|
| 872 |
+
12: [13],
|
| 873 |
+
13: [21, 22],
|
| 874 |
+
14: [15],
|
| 875 |
+
15: [19, 23],
|
| 876 |
+
16: [17, 18],
|
| 877 |
+
17: [20],
|
| 878 |
+
18: [19],
|
| 879 |
+
19: [23],
|
| 880 |
+
20: [21],
|
| 881 |
+
21: [22],
|
| 882 |
+
22: [23],
|
| 883 |
+
},
|
| 884 |
+
create_using=create_using,
|
| 885 |
+
)
|
| 886 |
+
G.name = "Truncated Cube Graph"
|
| 887 |
+
return G
|
| 888 |
+
|
| 889 |
+
|
| 890 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 891 |
+
def truncated_tetrahedron_graph(create_using=None):
|
| 892 |
+
"""
|
| 893 |
+
Returns the skeleton of the truncated Platonic tetrahedron.
|
| 894 |
+
|
| 895 |
+
The truncated tetrahedron is an Archimedean solid with 4 regular hexagonal faces,
|
| 896 |
+
4 equilateral triangle faces, 12 nodes and 18 edges. It can be constructed by truncating
|
| 897 |
+
all 4 vertices of a regular tetrahedron at one third of the original edge length [1]_.
|
| 898 |
+
|
| 899 |
+
Parameters
|
| 900 |
+
----------
|
| 901 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 902 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 903 |
+
|
| 904 |
+
Returns
|
| 905 |
+
-------
|
| 906 |
+
G : networkx Graph
|
| 907 |
+
Skeleton of the truncated tetrahedron
|
| 908 |
+
|
| 909 |
+
References
|
| 910 |
+
----------
|
| 911 |
+
.. [1] https://en.wikipedia.org/wiki/Truncated_tetrahedron
|
| 912 |
+
|
| 913 |
+
"""
|
| 914 |
+
G = path_graph(12, create_using)
|
| 915 |
+
G.add_edges_from([(0, 2), (0, 9), (1, 6), (3, 11), (4, 11), (5, 7), (8, 10)])
|
| 916 |
+
G.name = "Truncated Tetrahedron Graph"
|
| 917 |
+
return G
|
| 918 |
+
|
| 919 |
+
|
| 920 |
+
@_raise_on_directed
|
| 921 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 922 |
+
def tutte_graph(create_using=None):
|
| 923 |
+
"""
|
| 924 |
+
Returns the Tutte graph.
|
| 925 |
+
|
| 926 |
+
The Tutte graph is a cubic polyhedral, non-Hamiltonian graph. It has
|
| 927 |
+
46 nodes and 69 edges.
|
| 928 |
+
It is a counterexample to Tait's conjecture that every 3-regular polyhedron
|
| 929 |
+
has a Hamiltonian cycle.
|
| 930 |
+
It can be realized geometrically from a tetrahedron by multiply truncating
|
| 931 |
+
three of its vertices [1]_.
|
| 932 |
+
|
| 933 |
+
Parameters
|
| 934 |
+
----------
|
| 935 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 936 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 937 |
+
|
| 938 |
+
Returns
|
| 939 |
+
-------
|
| 940 |
+
G : networkx Graph
|
| 941 |
+
Tutte graph
|
| 942 |
+
|
| 943 |
+
References
|
| 944 |
+
----------
|
| 945 |
+
.. [1] https://en.wikipedia.org/wiki/Tutte_graph
|
| 946 |
+
"""
|
| 947 |
+
G = nx.from_dict_of_lists(
|
| 948 |
+
{
|
| 949 |
+
0: [1, 2, 3],
|
| 950 |
+
1: [4, 26],
|
| 951 |
+
2: [10, 11],
|
| 952 |
+
3: [18, 19],
|
| 953 |
+
4: [5, 33],
|
| 954 |
+
5: [6, 29],
|
| 955 |
+
6: [7, 27],
|
| 956 |
+
7: [8, 14],
|
| 957 |
+
8: [9, 38],
|
| 958 |
+
9: [10, 37],
|
| 959 |
+
10: [39],
|
| 960 |
+
11: [12, 39],
|
| 961 |
+
12: [13, 35],
|
| 962 |
+
13: [14, 15],
|
| 963 |
+
14: [34],
|
| 964 |
+
15: [16, 22],
|
| 965 |
+
16: [17, 44],
|
| 966 |
+
17: [18, 43],
|
| 967 |
+
18: [45],
|
| 968 |
+
19: [20, 45],
|
| 969 |
+
20: [21, 41],
|
| 970 |
+
21: [22, 23],
|
| 971 |
+
22: [40],
|
| 972 |
+
23: [24, 27],
|
| 973 |
+
24: [25, 32],
|
| 974 |
+
25: [26, 31],
|
| 975 |
+
26: [33],
|
| 976 |
+
27: [28],
|
| 977 |
+
28: [29, 32],
|
| 978 |
+
29: [30],
|
| 979 |
+
30: [31, 33],
|
| 980 |
+
31: [32],
|
| 981 |
+
34: [35, 38],
|
| 982 |
+
35: [36],
|
| 983 |
+
36: [37, 39],
|
| 984 |
+
37: [38],
|
| 985 |
+
40: [41, 44],
|
| 986 |
+
41: [42],
|
| 987 |
+
42: [43, 45],
|
| 988 |
+
43: [44],
|
| 989 |
+
},
|
| 990 |
+
create_using=create_using,
|
| 991 |
+
)
|
| 992 |
+
G.name = "Tutte's Graph"
|
| 993 |
+
return G
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__init__.py
ADDED
|
File without changes
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_atlas.cpython-310.pyc
ADDED
|
Binary file (3.57 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_classic.cpython-310.pyc
ADDED
|
Binary file (21.9 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_cographs.cpython-310.pyc
ADDED
|
Binary file (642 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_duplication.cpython-310.pyc
ADDED
|
Binary file (4.96 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_internet_as_graphs.cpython-310.pyc
ADDED
|
Binary file (4.81 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_intersection.cpython-310.pyc
ADDED
|
Binary file (1.55 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_interval_graph.cpython-310.pyc
ADDED
|
Binary file (4.17 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_lattice.cpython-310.pyc
ADDED
|
Binary file (10.5 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_random_graphs.cpython-310.pyc
ADDED
|
Binary file (17.1 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_small.cpython-310.pyc
ADDED
|
Binary file (7.41 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_stochastic.cpython-310.pyc
ADDED
|
Binary file (2.85 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/__pycache__/test_time_series.cpython-310.pyc
ADDED
|
Binary file (2.63 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_atlas.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import groupby
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx import graph_atlas, graph_atlas_g
|
| 7 |
+
from networkx.generators.atlas import NUM_GRAPHS
|
| 8 |
+
from networkx.utils import edges_equal, nodes_equal, pairwise
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class TestAtlasGraph:
|
| 12 |
+
"""Unit tests for the :func:`~networkx.graph_atlas` function."""
|
| 13 |
+
|
| 14 |
+
def test_index_too_small(self):
|
| 15 |
+
with pytest.raises(ValueError):
|
| 16 |
+
graph_atlas(-1)
|
| 17 |
+
|
| 18 |
+
def test_index_too_large(self):
|
| 19 |
+
with pytest.raises(ValueError):
|
| 20 |
+
graph_atlas(NUM_GRAPHS)
|
| 21 |
+
|
| 22 |
+
def test_graph(self):
|
| 23 |
+
G = graph_atlas(6)
|
| 24 |
+
assert nodes_equal(G.nodes(), range(3))
|
| 25 |
+
assert edges_equal(G.edges(), [(0, 1), (0, 2)])
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class TestAtlasGraphG:
|
| 29 |
+
"""Unit tests for the :func:`~networkx.graph_atlas_g` function."""
|
| 30 |
+
|
| 31 |
+
@classmethod
|
| 32 |
+
def setup_class(cls):
|
| 33 |
+
cls.GAG = graph_atlas_g()
|
| 34 |
+
|
| 35 |
+
def test_sizes(self):
|
| 36 |
+
G = self.GAG[0]
|
| 37 |
+
assert G.number_of_nodes() == 0
|
| 38 |
+
assert G.number_of_edges() == 0
|
| 39 |
+
|
| 40 |
+
G = self.GAG[7]
|
| 41 |
+
assert G.number_of_nodes() == 3
|
| 42 |
+
assert G.number_of_edges() == 3
|
| 43 |
+
|
| 44 |
+
def test_names(self):
|
| 45 |
+
for i, G in enumerate(self.GAG):
|
| 46 |
+
assert int(G.name[1:]) == i
|
| 47 |
+
|
| 48 |
+
def test_nondecreasing_nodes(self):
|
| 49 |
+
# check for nondecreasing number of nodes
|
| 50 |
+
for n1, n2 in pairwise(map(len, self.GAG)):
|
| 51 |
+
assert n2 <= n1 + 1
|
| 52 |
+
|
| 53 |
+
def test_nondecreasing_edges(self):
|
| 54 |
+
# check for nondecreasing number of edges (for fixed number of
|
| 55 |
+
# nodes)
|
| 56 |
+
for n, group in groupby(self.GAG, key=nx.number_of_nodes):
|
| 57 |
+
for m1, m2 in pairwise(map(nx.number_of_edges, group)):
|
| 58 |
+
assert m2 <= m1 + 1
|
| 59 |
+
|
| 60 |
+
def test_nondecreasing_degree_sequence(self):
|
| 61 |
+
# Check for lexicographically nondecreasing degree sequences
|
| 62 |
+
# (for fixed number of nodes and edges).
|
| 63 |
+
#
|
| 64 |
+
# There are three exceptions to this rule in the order given in
|
| 65 |
+
# the "Atlas of Graphs" book, so we need to manually exclude
|
| 66 |
+
# those.
|
| 67 |
+
exceptions = [("G55", "G56"), ("G1007", "G1008"), ("G1012", "G1013")]
|
| 68 |
+
for n, group in groupby(self.GAG, key=nx.number_of_nodes):
|
| 69 |
+
for m, group in groupby(group, key=nx.number_of_edges):
|
| 70 |
+
for G1, G2 in pairwise(group):
|
| 71 |
+
if (G1.name, G2.name) in exceptions:
|
| 72 |
+
continue
|
| 73 |
+
d1 = sorted(d for v, d in G1.degree())
|
| 74 |
+
d2 = sorted(d for v, d in G2.degree())
|
| 75 |
+
assert d1 <= d2
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_classic.py
ADDED
|
@@ -0,0 +1,640 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
====================
|
| 3 |
+
Generators - Classic
|
| 4 |
+
====================
|
| 5 |
+
|
| 6 |
+
Unit tests for various classic graph generators in generators/classic.py
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import itertools
|
| 10 |
+
import typing
|
| 11 |
+
|
| 12 |
+
import pytest
|
| 13 |
+
|
| 14 |
+
import networkx as nx
|
| 15 |
+
from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic
|
| 16 |
+
from networkx.utils import edges_equal, nodes_equal
|
| 17 |
+
|
| 18 |
+
is_isomorphic = graph_could_be_isomorphic
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class TestGeneratorClassic:
|
| 22 |
+
def test_balanced_tree(self):
|
| 23 |
+
# balanced_tree(r,h) is a tree with (r**(h+1)-1)/(r-1) edges
|
| 24 |
+
for r, h in [(2, 2), (3, 3), (6, 2)]:
|
| 25 |
+
t = nx.balanced_tree(r, h)
|
| 26 |
+
order = t.order()
|
| 27 |
+
assert order == (r ** (h + 1) - 1) / (r - 1)
|
| 28 |
+
assert nx.is_connected(t)
|
| 29 |
+
assert t.size() == order - 1
|
| 30 |
+
dh = nx.degree_histogram(t)
|
| 31 |
+
assert dh[0] == 0 # no nodes of 0
|
| 32 |
+
assert dh[1] == r**h # nodes of degree 1 are leaves
|
| 33 |
+
assert dh[r] == 1 # root is degree r
|
| 34 |
+
assert dh[r + 1] == order - r**h - 1 # everyone else is degree r+1
|
| 35 |
+
assert len(dh) == r + 2
|
| 36 |
+
|
| 37 |
+
def test_balanced_tree_star(self):
|
| 38 |
+
# balanced_tree(r,1) is the r-star
|
| 39 |
+
t = nx.balanced_tree(r=2, h=1)
|
| 40 |
+
assert is_isomorphic(t, nx.star_graph(2))
|
| 41 |
+
t = nx.balanced_tree(r=5, h=1)
|
| 42 |
+
assert is_isomorphic(t, nx.star_graph(5))
|
| 43 |
+
t = nx.balanced_tree(r=10, h=1)
|
| 44 |
+
assert is_isomorphic(t, nx.star_graph(10))
|
| 45 |
+
|
| 46 |
+
def test_balanced_tree_path(self):
|
| 47 |
+
"""Tests that the balanced tree with branching factor one is the
|
| 48 |
+
path graph.
|
| 49 |
+
|
| 50 |
+
"""
|
| 51 |
+
# A tree of height four has five levels.
|
| 52 |
+
T = nx.balanced_tree(1, 4)
|
| 53 |
+
P = nx.path_graph(5)
|
| 54 |
+
assert is_isomorphic(T, P)
|
| 55 |
+
|
| 56 |
+
def test_full_rary_tree(self):
|
| 57 |
+
r = 2
|
| 58 |
+
n = 9
|
| 59 |
+
t = nx.full_rary_tree(r, n)
|
| 60 |
+
assert t.order() == n
|
| 61 |
+
assert nx.is_connected(t)
|
| 62 |
+
dh = nx.degree_histogram(t)
|
| 63 |
+
assert dh[0] == 0 # no nodes of 0
|
| 64 |
+
assert dh[1] == 5 # nodes of degree 1 are leaves
|
| 65 |
+
assert dh[r] == 1 # root is degree r
|
| 66 |
+
assert dh[r + 1] == 9 - 5 - 1 # everyone else is degree r+1
|
| 67 |
+
assert len(dh) == r + 2
|
| 68 |
+
|
| 69 |
+
def test_full_rary_tree_balanced(self):
|
| 70 |
+
t = nx.full_rary_tree(2, 15)
|
| 71 |
+
th = nx.balanced_tree(2, 3)
|
| 72 |
+
assert is_isomorphic(t, th)
|
| 73 |
+
|
| 74 |
+
def test_full_rary_tree_path(self):
|
| 75 |
+
t = nx.full_rary_tree(1, 10)
|
| 76 |
+
assert is_isomorphic(t, nx.path_graph(10))
|
| 77 |
+
|
| 78 |
+
def test_full_rary_tree_empty(self):
|
| 79 |
+
t = nx.full_rary_tree(0, 10)
|
| 80 |
+
assert is_isomorphic(t, nx.empty_graph(10))
|
| 81 |
+
t = nx.full_rary_tree(3, 0)
|
| 82 |
+
assert is_isomorphic(t, nx.empty_graph(0))
|
| 83 |
+
|
| 84 |
+
def test_full_rary_tree_3_20(self):
|
| 85 |
+
t = nx.full_rary_tree(3, 20)
|
| 86 |
+
assert t.order() == 20
|
| 87 |
+
|
| 88 |
+
def test_barbell_graph(self):
|
| 89 |
+
# number of nodes = 2*m1 + m2 (2 m1-complete graphs + m2-path + 2 edges)
|
| 90 |
+
# number of edges = 2*(nx.number_of_edges(m1-complete graph) + m2 + 1
|
| 91 |
+
m1 = 3
|
| 92 |
+
m2 = 5
|
| 93 |
+
b = nx.barbell_graph(m1, m2)
|
| 94 |
+
assert nx.number_of_nodes(b) == 2 * m1 + m2
|
| 95 |
+
assert nx.number_of_edges(b) == m1 * (m1 - 1) + m2 + 1
|
| 96 |
+
|
| 97 |
+
m1 = 4
|
| 98 |
+
m2 = 10
|
| 99 |
+
b = nx.barbell_graph(m1, m2)
|
| 100 |
+
assert nx.number_of_nodes(b) == 2 * m1 + m2
|
| 101 |
+
assert nx.number_of_edges(b) == m1 * (m1 - 1) + m2 + 1
|
| 102 |
+
|
| 103 |
+
m1 = 3
|
| 104 |
+
m2 = 20
|
| 105 |
+
b = nx.barbell_graph(m1, m2)
|
| 106 |
+
assert nx.number_of_nodes(b) == 2 * m1 + m2
|
| 107 |
+
assert nx.number_of_edges(b) == m1 * (m1 - 1) + m2 + 1
|
| 108 |
+
|
| 109 |
+
# Raise NetworkXError if m1<2
|
| 110 |
+
m1 = 1
|
| 111 |
+
m2 = 20
|
| 112 |
+
pytest.raises(nx.NetworkXError, nx.barbell_graph, m1, m2)
|
| 113 |
+
|
| 114 |
+
# Raise NetworkXError if m2<0
|
| 115 |
+
m1 = 5
|
| 116 |
+
m2 = -2
|
| 117 |
+
pytest.raises(nx.NetworkXError, nx.barbell_graph, m1, m2)
|
| 118 |
+
|
| 119 |
+
# nx.barbell_graph(2,m) = nx.path_graph(m+4)
|
| 120 |
+
m1 = 2
|
| 121 |
+
m2 = 5
|
| 122 |
+
b = nx.barbell_graph(m1, m2)
|
| 123 |
+
assert is_isomorphic(b, nx.path_graph(m2 + 4))
|
| 124 |
+
|
| 125 |
+
m1 = 2
|
| 126 |
+
m2 = 10
|
| 127 |
+
b = nx.barbell_graph(m1, m2)
|
| 128 |
+
assert is_isomorphic(b, nx.path_graph(m2 + 4))
|
| 129 |
+
|
| 130 |
+
m1 = 2
|
| 131 |
+
m2 = 20
|
| 132 |
+
b = nx.barbell_graph(m1, m2)
|
| 133 |
+
assert is_isomorphic(b, nx.path_graph(m2 + 4))
|
| 134 |
+
|
| 135 |
+
pytest.raises(
|
| 136 |
+
nx.NetworkXError, nx.barbell_graph, m1, m2, create_using=nx.DiGraph()
|
| 137 |
+
)
|
| 138 |
+
|
| 139 |
+
mb = nx.barbell_graph(m1, m2, create_using=nx.MultiGraph())
|
| 140 |
+
assert edges_equal(mb.edges(), b.edges())
|
| 141 |
+
|
| 142 |
+
def test_binomial_tree(self):
|
| 143 |
+
graphs = (None, nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)
|
| 144 |
+
for create_using in graphs:
|
| 145 |
+
for n in range(4):
|
| 146 |
+
b = nx.binomial_tree(n, create_using)
|
| 147 |
+
assert nx.number_of_nodes(b) == 2**n
|
| 148 |
+
assert nx.number_of_edges(b) == (2**n - 1)
|
| 149 |
+
|
| 150 |
+
def test_complete_graph(self):
|
| 151 |
+
# complete_graph(m) is a connected graph with
|
| 152 |
+
# m nodes and m*(m+1)/2 edges
|
| 153 |
+
for m in [0, 1, 3, 5]:
|
| 154 |
+
g = nx.complete_graph(m)
|
| 155 |
+
assert nx.number_of_nodes(g) == m
|
| 156 |
+
assert nx.number_of_edges(g) == m * (m - 1) // 2
|
| 157 |
+
|
| 158 |
+
mg = nx.complete_graph(m, create_using=nx.MultiGraph)
|
| 159 |
+
assert edges_equal(mg.edges(), g.edges())
|
| 160 |
+
|
| 161 |
+
g = nx.complete_graph("abc")
|
| 162 |
+
assert nodes_equal(g.nodes(), ["a", "b", "c"])
|
| 163 |
+
assert g.size() == 3
|
| 164 |
+
|
| 165 |
+
# creates a self-loop... should it? <backward compatible says yes>
|
| 166 |
+
g = nx.complete_graph("abcb")
|
| 167 |
+
assert nodes_equal(g.nodes(), ["a", "b", "c"])
|
| 168 |
+
assert g.size() == 4
|
| 169 |
+
|
| 170 |
+
g = nx.complete_graph("abcb", create_using=nx.MultiGraph)
|
| 171 |
+
assert nodes_equal(g.nodes(), ["a", "b", "c"])
|
| 172 |
+
assert g.size() == 6
|
| 173 |
+
|
| 174 |
+
def test_complete_digraph(self):
|
| 175 |
+
# complete_graph(m) is a connected graph with
|
| 176 |
+
# m nodes and m*(m+1)/2 edges
|
| 177 |
+
for m in [0, 1, 3, 5]:
|
| 178 |
+
g = nx.complete_graph(m, create_using=nx.DiGraph)
|
| 179 |
+
assert nx.number_of_nodes(g) == m
|
| 180 |
+
assert nx.number_of_edges(g) == m * (m - 1)
|
| 181 |
+
|
| 182 |
+
g = nx.complete_graph("abc", create_using=nx.DiGraph)
|
| 183 |
+
assert len(g) == 3
|
| 184 |
+
assert g.size() == 6
|
| 185 |
+
assert g.is_directed()
|
| 186 |
+
|
| 187 |
+
def test_circular_ladder_graph(self):
|
| 188 |
+
G = nx.circular_ladder_graph(5)
|
| 189 |
+
pytest.raises(
|
| 190 |
+
nx.NetworkXError, nx.circular_ladder_graph, 5, create_using=nx.DiGraph
|
| 191 |
+
)
|
| 192 |
+
mG = nx.circular_ladder_graph(5, create_using=nx.MultiGraph)
|
| 193 |
+
assert edges_equal(mG.edges(), G.edges())
|
| 194 |
+
|
| 195 |
+
def test_circulant_graph(self):
|
| 196 |
+
# Ci_n(1) is the cycle graph for all n
|
| 197 |
+
Ci6_1 = nx.circulant_graph(6, [1])
|
| 198 |
+
C6 = nx.cycle_graph(6)
|
| 199 |
+
assert edges_equal(Ci6_1.edges(), C6.edges())
|
| 200 |
+
|
| 201 |
+
# Ci_n(1, 2, ..., n div 2) is the complete graph for all n
|
| 202 |
+
Ci7 = nx.circulant_graph(7, [1, 2, 3])
|
| 203 |
+
K7 = nx.complete_graph(7)
|
| 204 |
+
assert edges_equal(Ci7.edges(), K7.edges())
|
| 205 |
+
|
| 206 |
+
# Ci_6(1, 3) is K_3,3 i.e. the utility graph
|
| 207 |
+
Ci6_1_3 = nx.circulant_graph(6, [1, 3])
|
| 208 |
+
K3_3 = nx.complete_bipartite_graph(3, 3)
|
| 209 |
+
assert is_isomorphic(Ci6_1_3, K3_3)
|
| 210 |
+
|
| 211 |
+
def test_cycle_graph(self):
|
| 212 |
+
G = nx.cycle_graph(4)
|
| 213 |
+
assert edges_equal(G.edges(), [(0, 1), (0, 3), (1, 2), (2, 3)])
|
| 214 |
+
mG = nx.cycle_graph(4, create_using=nx.MultiGraph)
|
| 215 |
+
assert edges_equal(mG.edges(), [(0, 1), (0, 3), (1, 2), (2, 3)])
|
| 216 |
+
G = nx.cycle_graph(4, create_using=nx.DiGraph)
|
| 217 |
+
assert not G.has_edge(2, 1)
|
| 218 |
+
assert G.has_edge(1, 2)
|
| 219 |
+
assert G.is_directed()
|
| 220 |
+
|
| 221 |
+
G = nx.cycle_graph("abc")
|
| 222 |
+
assert len(G) == 3
|
| 223 |
+
assert G.size() == 3
|
| 224 |
+
G = nx.cycle_graph("abcb")
|
| 225 |
+
assert len(G) == 3
|
| 226 |
+
assert G.size() == 2
|
| 227 |
+
g = nx.cycle_graph("abc", nx.DiGraph)
|
| 228 |
+
assert len(g) == 3
|
| 229 |
+
assert g.size() == 3
|
| 230 |
+
assert g.is_directed()
|
| 231 |
+
g = nx.cycle_graph("abcb", nx.DiGraph)
|
| 232 |
+
assert len(g) == 3
|
| 233 |
+
assert g.size() == 4
|
| 234 |
+
|
| 235 |
+
def test_dorogovtsev_goltsev_mendes_graph(self):
|
| 236 |
+
G = nx.dorogovtsev_goltsev_mendes_graph(0)
|
| 237 |
+
assert edges_equal(G.edges(), [(0, 1)])
|
| 238 |
+
assert nodes_equal(list(G), [0, 1])
|
| 239 |
+
G = nx.dorogovtsev_goltsev_mendes_graph(1)
|
| 240 |
+
assert edges_equal(G.edges(), [(0, 1), (0, 2), (1, 2)])
|
| 241 |
+
assert nx.average_clustering(G) == 1.0
|
| 242 |
+
assert nx.average_shortest_path_length(G) == 1.0
|
| 243 |
+
assert sorted(nx.triangles(G).values()) == [1, 1, 1]
|
| 244 |
+
assert nx.is_planar(G)
|
| 245 |
+
G = nx.dorogovtsev_goltsev_mendes_graph(2)
|
| 246 |
+
assert nx.number_of_nodes(G) == 6
|
| 247 |
+
assert nx.number_of_edges(G) == 9
|
| 248 |
+
assert nx.average_clustering(G) == 0.75
|
| 249 |
+
assert nx.average_shortest_path_length(G) == 1.4
|
| 250 |
+
assert nx.is_planar(G)
|
| 251 |
+
G = nx.dorogovtsev_goltsev_mendes_graph(10)
|
| 252 |
+
assert nx.number_of_nodes(G) == 29526
|
| 253 |
+
assert nx.number_of_edges(G) == 59049
|
| 254 |
+
assert G.degree(0) == 1024
|
| 255 |
+
assert G.degree(1) == 1024
|
| 256 |
+
assert G.degree(2) == 1024
|
| 257 |
+
|
| 258 |
+
with pytest.raises(nx.NetworkXError, match=r"n must be greater than"):
|
| 259 |
+
nx.dorogovtsev_goltsev_mendes_graph(-1)
|
| 260 |
+
with pytest.raises(nx.NetworkXError, match=r"directed graph not supported"):
|
| 261 |
+
nx.dorogovtsev_goltsev_mendes_graph(7, create_using=nx.DiGraph)
|
| 262 |
+
with pytest.raises(nx.NetworkXError, match=r"multigraph not supported"):
|
| 263 |
+
nx.dorogovtsev_goltsev_mendes_graph(7, create_using=nx.MultiGraph)
|
| 264 |
+
with pytest.raises(nx.NetworkXError):
|
| 265 |
+
nx.dorogovtsev_goltsev_mendes_graph(7, create_using=nx.MultiDiGraph)
|
| 266 |
+
|
| 267 |
+
def test_create_using(self):
|
| 268 |
+
G = nx.empty_graph()
|
| 269 |
+
assert isinstance(G, nx.Graph)
|
| 270 |
+
pytest.raises(TypeError, nx.empty_graph, create_using=0.0)
|
| 271 |
+
pytest.raises(TypeError, nx.empty_graph, create_using="Graph")
|
| 272 |
+
|
| 273 |
+
G = nx.empty_graph(create_using=nx.MultiGraph)
|
| 274 |
+
assert isinstance(G, nx.MultiGraph)
|
| 275 |
+
G = nx.empty_graph(create_using=nx.DiGraph)
|
| 276 |
+
assert isinstance(G, nx.DiGraph)
|
| 277 |
+
|
| 278 |
+
G = nx.empty_graph(create_using=nx.DiGraph, default=nx.MultiGraph)
|
| 279 |
+
assert isinstance(G, nx.DiGraph)
|
| 280 |
+
G = nx.empty_graph(create_using=None, default=nx.MultiGraph)
|
| 281 |
+
assert isinstance(G, nx.MultiGraph)
|
| 282 |
+
G = nx.empty_graph(default=nx.MultiGraph)
|
| 283 |
+
assert isinstance(G, nx.MultiGraph)
|
| 284 |
+
|
| 285 |
+
G = nx.path_graph(5)
|
| 286 |
+
H = nx.empty_graph(create_using=G)
|
| 287 |
+
assert not H.is_multigraph()
|
| 288 |
+
assert not H.is_directed()
|
| 289 |
+
assert len(H) == 0
|
| 290 |
+
assert G is H
|
| 291 |
+
|
| 292 |
+
H = nx.empty_graph(create_using=nx.MultiGraph())
|
| 293 |
+
assert H.is_multigraph()
|
| 294 |
+
assert not H.is_directed()
|
| 295 |
+
assert G is not H
|
| 296 |
+
|
| 297 |
+
# test for subclasses that also use typing.Protocol. See gh-6243
|
| 298 |
+
class Mixin(typing.Protocol):
|
| 299 |
+
pass
|
| 300 |
+
|
| 301 |
+
class MyGraph(Mixin, nx.DiGraph):
|
| 302 |
+
pass
|
| 303 |
+
|
| 304 |
+
G = nx.empty_graph(create_using=MyGraph)
|
| 305 |
+
|
| 306 |
+
def test_empty_graph(self):
|
| 307 |
+
G = nx.empty_graph()
|
| 308 |
+
assert nx.number_of_nodes(G) == 0
|
| 309 |
+
G = nx.empty_graph(42)
|
| 310 |
+
assert nx.number_of_nodes(G) == 42
|
| 311 |
+
assert nx.number_of_edges(G) == 0
|
| 312 |
+
|
| 313 |
+
G = nx.empty_graph("abc")
|
| 314 |
+
assert len(G) == 3
|
| 315 |
+
assert G.size() == 0
|
| 316 |
+
|
| 317 |
+
# create empty digraph
|
| 318 |
+
G = nx.empty_graph(42, create_using=nx.DiGraph(name="duh"))
|
| 319 |
+
assert nx.number_of_nodes(G) == 42
|
| 320 |
+
assert nx.number_of_edges(G) == 0
|
| 321 |
+
assert isinstance(G, nx.DiGraph)
|
| 322 |
+
|
| 323 |
+
# create empty multigraph
|
| 324 |
+
G = nx.empty_graph(42, create_using=nx.MultiGraph(name="duh"))
|
| 325 |
+
assert nx.number_of_nodes(G) == 42
|
| 326 |
+
assert nx.number_of_edges(G) == 0
|
| 327 |
+
assert isinstance(G, nx.MultiGraph)
|
| 328 |
+
|
| 329 |
+
# create empty graph from another
|
| 330 |
+
pete = nx.petersen_graph()
|
| 331 |
+
G = nx.empty_graph(42, create_using=pete)
|
| 332 |
+
assert nx.number_of_nodes(G) == 42
|
| 333 |
+
assert nx.number_of_edges(G) == 0
|
| 334 |
+
assert isinstance(G, nx.Graph)
|
| 335 |
+
|
| 336 |
+
def test_ladder_graph(self):
|
| 337 |
+
for i, G in [
|
| 338 |
+
(0, nx.empty_graph(0)),
|
| 339 |
+
(1, nx.path_graph(2)),
|
| 340 |
+
(2, nx.hypercube_graph(2)),
|
| 341 |
+
(10, nx.grid_graph([2, 10])),
|
| 342 |
+
]:
|
| 343 |
+
assert is_isomorphic(nx.ladder_graph(i), G)
|
| 344 |
+
|
| 345 |
+
pytest.raises(nx.NetworkXError, nx.ladder_graph, 2, create_using=nx.DiGraph)
|
| 346 |
+
|
| 347 |
+
g = nx.ladder_graph(2)
|
| 348 |
+
mg = nx.ladder_graph(2, create_using=nx.MultiGraph)
|
| 349 |
+
assert edges_equal(mg.edges(), g.edges())
|
| 350 |
+
|
| 351 |
+
@pytest.mark.parametrize(("m", "n"), [(3, 5), (4, 10), (3, 20)])
|
| 352 |
+
def test_lollipop_graph_right_sizes(self, m, n):
|
| 353 |
+
G = nx.lollipop_graph(m, n)
|
| 354 |
+
assert nx.number_of_nodes(G) == m + n
|
| 355 |
+
assert nx.number_of_edges(G) == m * (m - 1) / 2 + n
|
| 356 |
+
|
| 357 |
+
@pytest.mark.parametrize(("m", "n"), [("ab", ""), ("abc", "defg")])
|
| 358 |
+
def test_lollipop_graph_size_node_sequence(self, m, n):
|
| 359 |
+
G = nx.lollipop_graph(m, n)
|
| 360 |
+
assert nx.number_of_nodes(G) == len(m) + len(n)
|
| 361 |
+
assert nx.number_of_edges(G) == len(m) * (len(m) - 1) / 2 + len(n)
|
| 362 |
+
|
| 363 |
+
def test_lollipop_graph_exceptions(self):
|
| 364 |
+
# Raise NetworkXError if m<2
|
| 365 |
+
pytest.raises(nx.NetworkXError, nx.lollipop_graph, -1, 2)
|
| 366 |
+
pytest.raises(nx.NetworkXError, nx.lollipop_graph, 1, 20)
|
| 367 |
+
pytest.raises(nx.NetworkXError, nx.lollipop_graph, "", 20)
|
| 368 |
+
pytest.raises(nx.NetworkXError, nx.lollipop_graph, "a", 20)
|
| 369 |
+
|
| 370 |
+
# Raise NetworkXError if n<0
|
| 371 |
+
pytest.raises(nx.NetworkXError, nx.lollipop_graph, 5, -2)
|
| 372 |
+
|
| 373 |
+
# raise NetworkXError if create_using is directed
|
| 374 |
+
with pytest.raises(nx.NetworkXError):
|
| 375 |
+
nx.lollipop_graph(2, 20, create_using=nx.DiGraph)
|
| 376 |
+
with pytest.raises(nx.NetworkXError):
|
| 377 |
+
nx.lollipop_graph(2, 20, create_using=nx.MultiDiGraph)
|
| 378 |
+
|
| 379 |
+
@pytest.mark.parametrize(("m", "n"), [(2, 0), (2, 5), (2, 10), ("ab", 20)])
|
| 380 |
+
def test_lollipop_graph_same_as_path_when_m1_is_2(self, m, n):
|
| 381 |
+
G = nx.lollipop_graph(m, n)
|
| 382 |
+
assert is_isomorphic(G, nx.path_graph(n + 2))
|
| 383 |
+
|
| 384 |
+
def test_lollipop_graph_for_multigraph(self):
|
| 385 |
+
G = nx.lollipop_graph(5, 20)
|
| 386 |
+
MG = nx.lollipop_graph(5, 20, create_using=nx.MultiGraph)
|
| 387 |
+
assert edges_equal(MG.edges(), G.edges())
|
| 388 |
+
|
| 389 |
+
@pytest.mark.parametrize(
|
| 390 |
+
("m", "n"),
|
| 391 |
+
[(4, "abc"), ("abcd", 3), ([1, 2, 3, 4], "abc"), ("abcd", [1, 2, 3])],
|
| 392 |
+
)
|
| 393 |
+
def test_lollipop_graph_mixing_input_types(self, m, n):
|
| 394 |
+
expected = nx.compose(nx.complete_graph(4), nx.path_graph(range(100, 103)))
|
| 395 |
+
expected.add_edge(0, 100) # Connect complete graph and path graph
|
| 396 |
+
assert is_isomorphic(nx.lollipop_graph(m, n), expected)
|
| 397 |
+
|
| 398 |
+
def test_lollipop_graph_non_builtin_ints(self):
|
| 399 |
+
np = pytest.importorskip("numpy")
|
| 400 |
+
G = nx.lollipop_graph(np.int32(4), np.int64(3))
|
| 401 |
+
expected = nx.compose(nx.complete_graph(4), nx.path_graph(range(100, 103)))
|
| 402 |
+
expected.add_edge(0, 100) # Connect complete graph and path graph
|
| 403 |
+
assert is_isomorphic(G, expected)
|
| 404 |
+
|
| 405 |
+
def test_null_graph(self):
|
| 406 |
+
assert nx.number_of_nodes(nx.null_graph()) == 0
|
| 407 |
+
|
| 408 |
+
def test_path_graph(self):
|
| 409 |
+
p = nx.path_graph(0)
|
| 410 |
+
assert is_isomorphic(p, nx.null_graph())
|
| 411 |
+
|
| 412 |
+
p = nx.path_graph(1)
|
| 413 |
+
assert is_isomorphic(p, nx.empty_graph(1))
|
| 414 |
+
|
| 415 |
+
p = nx.path_graph(10)
|
| 416 |
+
assert nx.is_connected(p)
|
| 417 |
+
assert sorted(d for n, d in p.degree()) == [1, 1, 2, 2, 2, 2, 2, 2, 2, 2]
|
| 418 |
+
assert p.order() - 1 == p.size()
|
| 419 |
+
|
| 420 |
+
dp = nx.path_graph(3, create_using=nx.DiGraph)
|
| 421 |
+
assert dp.has_edge(0, 1)
|
| 422 |
+
assert not dp.has_edge(1, 0)
|
| 423 |
+
|
| 424 |
+
mp = nx.path_graph(10, create_using=nx.MultiGraph)
|
| 425 |
+
assert edges_equal(mp.edges(), p.edges())
|
| 426 |
+
|
| 427 |
+
G = nx.path_graph("abc")
|
| 428 |
+
assert len(G) == 3
|
| 429 |
+
assert G.size() == 2
|
| 430 |
+
G = nx.path_graph("abcb")
|
| 431 |
+
assert len(G) == 3
|
| 432 |
+
assert G.size() == 2
|
| 433 |
+
g = nx.path_graph("abc", nx.DiGraph)
|
| 434 |
+
assert len(g) == 3
|
| 435 |
+
assert g.size() == 2
|
| 436 |
+
assert g.is_directed()
|
| 437 |
+
g = nx.path_graph("abcb", nx.DiGraph)
|
| 438 |
+
assert len(g) == 3
|
| 439 |
+
assert g.size() == 3
|
| 440 |
+
|
| 441 |
+
G = nx.path_graph((1, 2, 3, 2, 4))
|
| 442 |
+
assert G.has_edge(2, 4)
|
| 443 |
+
|
| 444 |
+
def test_star_graph(self):
|
| 445 |
+
assert is_isomorphic(nx.star_graph(""), nx.empty_graph(0))
|
| 446 |
+
assert is_isomorphic(nx.star_graph([]), nx.empty_graph(0))
|
| 447 |
+
assert is_isomorphic(nx.star_graph(0), nx.empty_graph(1))
|
| 448 |
+
assert is_isomorphic(nx.star_graph(1), nx.path_graph(2))
|
| 449 |
+
assert is_isomorphic(nx.star_graph(2), nx.path_graph(3))
|
| 450 |
+
assert is_isomorphic(nx.star_graph(5), nx.complete_bipartite_graph(1, 5))
|
| 451 |
+
|
| 452 |
+
s = nx.star_graph(10)
|
| 453 |
+
assert sorted(d for n, d in s.degree()) == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10]
|
| 454 |
+
|
| 455 |
+
pytest.raises(nx.NetworkXError, nx.star_graph, 10, create_using=nx.DiGraph)
|
| 456 |
+
|
| 457 |
+
ms = nx.star_graph(10, create_using=nx.MultiGraph)
|
| 458 |
+
assert edges_equal(ms.edges(), s.edges())
|
| 459 |
+
|
| 460 |
+
G = nx.star_graph("abc")
|
| 461 |
+
assert len(G) == 3
|
| 462 |
+
assert G.size() == 2
|
| 463 |
+
|
| 464 |
+
G = nx.star_graph("abcb")
|
| 465 |
+
assert len(G) == 3
|
| 466 |
+
assert G.size() == 2
|
| 467 |
+
G = nx.star_graph("abcb", create_using=nx.MultiGraph)
|
| 468 |
+
assert len(G) == 3
|
| 469 |
+
assert G.size() == 3
|
| 470 |
+
|
| 471 |
+
G = nx.star_graph("abcdefg")
|
| 472 |
+
assert len(G) == 7
|
| 473 |
+
assert G.size() == 6
|
| 474 |
+
|
| 475 |
+
def test_non_int_integers_for_star_graph(self):
|
| 476 |
+
np = pytest.importorskip("numpy")
|
| 477 |
+
G = nx.star_graph(np.int32(3))
|
| 478 |
+
assert len(G) == 4
|
| 479 |
+
assert G.size() == 3
|
| 480 |
+
|
| 481 |
+
@pytest.mark.parametrize(("m", "n"), [(3, 0), (3, 5), (4, 10), (3, 20)])
|
| 482 |
+
def test_tadpole_graph_right_sizes(self, m, n):
|
| 483 |
+
G = nx.tadpole_graph(m, n)
|
| 484 |
+
assert nx.number_of_nodes(G) == m + n
|
| 485 |
+
assert nx.number_of_edges(G) == m + n - (m == 2)
|
| 486 |
+
|
| 487 |
+
@pytest.mark.parametrize(("m", "n"), [("ab", ""), ("ab", "c"), ("abc", "defg")])
|
| 488 |
+
def test_tadpole_graph_size_node_sequences(self, m, n):
|
| 489 |
+
G = nx.tadpole_graph(m, n)
|
| 490 |
+
assert nx.number_of_nodes(G) == len(m) + len(n)
|
| 491 |
+
assert nx.number_of_edges(G) == len(m) + len(n) - (len(m) == 2)
|
| 492 |
+
|
| 493 |
+
def test_tadpole_graph_exceptions(self):
|
| 494 |
+
# Raise NetworkXError if m<2
|
| 495 |
+
pytest.raises(nx.NetworkXError, nx.tadpole_graph, -1, 3)
|
| 496 |
+
pytest.raises(nx.NetworkXError, nx.tadpole_graph, 0, 3)
|
| 497 |
+
pytest.raises(nx.NetworkXError, nx.tadpole_graph, 1, 3)
|
| 498 |
+
|
| 499 |
+
# Raise NetworkXError if n<0
|
| 500 |
+
pytest.raises(nx.NetworkXError, nx.tadpole_graph, 5, -2)
|
| 501 |
+
|
| 502 |
+
# Raise NetworkXError for digraphs
|
| 503 |
+
with pytest.raises(nx.NetworkXError):
|
| 504 |
+
nx.tadpole_graph(2, 20, create_using=nx.DiGraph)
|
| 505 |
+
with pytest.raises(nx.NetworkXError):
|
| 506 |
+
nx.tadpole_graph(2, 20, create_using=nx.MultiDiGraph)
|
| 507 |
+
|
| 508 |
+
@pytest.mark.parametrize(("m", "n"), [(2, 0), (2, 5), (2, 10), ("ab", 20)])
|
| 509 |
+
def test_tadpole_graph_same_as_path_when_m_is_2(self, m, n):
|
| 510 |
+
G = nx.tadpole_graph(m, n)
|
| 511 |
+
assert is_isomorphic(G, nx.path_graph(n + 2))
|
| 512 |
+
|
| 513 |
+
@pytest.mark.parametrize("m", [4, 7])
|
| 514 |
+
def test_tadpole_graph_same_as_cycle_when_m2_is_0(self, m):
|
| 515 |
+
G = nx.tadpole_graph(m, 0)
|
| 516 |
+
assert is_isomorphic(G, nx.cycle_graph(m))
|
| 517 |
+
|
| 518 |
+
def test_tadpole_graph_for_multigraph(self):
|
| 519 |
+
G = nx.tadpole_graph(5, 20)
|
| 520 |
+
MG = nx.tadpole_graph(5, 20, create_using=nx.MultiGraph)
|
| 521 |
+
assert edges_equal(MG.edges(), G.edges())
|
| 522 |
+
|
| 523 |
+
@pytest.mark.parametrize(
|
| 524 |
+
("m", "n"),
|
| 525 |
+
[(4, "abc"), ("abcd", 3), ([1, 2, 3, 4], "abc"), ("abcd", [1, 2, 3])],
|
| 526 |
+
)
|
| 527 |
+
def test_tadpole_graph_mixing_input_types(self, m, n):
|
| 528 |
+
expected = nx.compose(nx.cycle_graph(4), nx.path_graph(range(100, 103)))
|
| 529 |
+
expected.add_edge(0, 100) # Connect cycle and path
|
| 530 |
+
assert is_isomorphic(nx.tadpole_graph(m, n), expected)
|
| 531 |
+
|
| 532 |
+
def test_tadpole_graph_non_builtin_integers(self):
|
| 533 |
+
np = pytest.importorskip("numpy")
|
| 534 |
+
G = nx.tadpole_graph(np.int32(4), np.int64(3))
|
| 535 |
+
expected = nx.compose(nx.cycle_graph(4), nx.path_graph(range(100, 103)))
|
| 536 |
+
expected.add_edge(0, 100) # Connect cycle and path
|
| 537 |
+
assert is_isomorphic(G, expected)
|
| 538 |
+
|
| 539 |
+
def test_trivial_graph(self):
|
| 540 |
+
assert nx.number_of_nodes(nx.trivial_graph()) == 1
|
| 541 |
+
|
| 542 |
+
def test_turan_graph(self):
|
| 543 |
+
assert nx.number_of_edges(nx.turan_graph(13, 4)) == 63
|
| 544 |
+
assert is_isomorphic(
|
| 545 |
+
nx.turan_graph(13, 4), nx.complete_multipartite_graph(3, 4, 3, 3)
|
| 546 |
+
)
|
| 547 |
+
|
| 548 |
+
def test_wheel_graph(self):
|
| 549 |
+
for n, G in [
|
| 550 |
+
("", nx.null_graph()),
|
| 551 |
+
(0, nx.null_graph()),
|
| 552 |
+
(1, nx.empty_graph(1)),
|
| 553 |
+
(2, nx.path_graph(2)),
|
| 554 |
+
(3, nx.complete_graph(3)),
|
| 555 |
+
(4, nx.complete_graph(4)),
|
| 556 |
+
]:
|
| 557 |
+
g = nx.wheel_graph(n)
|
| 558 |
+
assert is_isomorphic(g, G)
|
| 559 |
+
|
| 560 |
+
g = nx.wheel_graph(10)
|
| 561 |
+
assert sorted(d for n, d in g.degree()) == [3, 3, 3, 3, 3, 3, 3, 3, 3, 9]
|
| 562 |
+
|
| 563 |
+
pytest.raises(nx.NetworkXError, nx.wheel_graph, 10, create_using=nx.DiGraph)
|
| 564 |
+
|
| 565 |
+
mg = nx.wheel_graph(10, create_using=nx.MultiGraph())
|
| 566 |
+
assert edges_equal(mg.edges(), g.edges())
|
| 567 |
+
|
| 568 |
+
G = nx.wheel_graph("abc")
|
| 569 |
+
assert len(G) == 3
|
| 570 |
+
assert G.size() == 3
|
| 571 |
+
|
| 572 |
+
G = nx.wheel_graph("abcb")
|
| 573 |
+
assert len(G) == 3
|
| 574 |
+
assert G.size() == 4
|
| 575 |
+
G = nx.wheel_graph("abcb", nx.MultiGraph)
|
| 576 |
+
assert len(G) == 3
|
| 577 |
+
assert G.size() == 6
|
| 578 |
+
|
| 579 |
+
def test_non_int_integers_for_wheel_graph(self):
|
| 580 |
+
np = pytest.importorskip("numpy")
|
| 581 |
+
G = nx.wheel_graph(np.int32(3))
|
| 582 |
+
assert len(G) == 3
|
| 583 |
+
assert G.size() == 3
|
| 584 |
+
|
| 585 |
+
def test_complete_0_partite_graph(self):
|
| 586 |
+
"""Tests that the complete 0-partite graph is the null graph."""
|
| 587 |
+
G = nx.complete_multipartite_graph()
|
| 588 |
+
H = nx.null_graph()
|
| 589 |
+
assert nodes_equal(G, H)
|
| 590 |
+
assert edges_equal(G.edges(), H.edges())
|
| 591 |
+
|
| 592 |
+
def test_complete_1_partite_graph(self):
|
| 593 |
+
"""Tests that the complete 1-partite graph is the empty graph."""
|
| 594 |
+
G = nx.complete_multipartite_graph(3)
|
| 595 |
+
H = nx.empty_graph(3)
|
| 596 |
+
assert nodes_equal(G, H)
|
| 597 |
+
assert edges_equal(G.edges(), H.edges())
|
| 598 |
+
|
| 599 |
+
def test_complete_2_partite_graph(self):
|
| 600 |
+
"""Tests that the complete 2-partite graph is the complete bipartite
|
| 601 |
+
graph.
|
| 602 |
+
|
| 603 |
+
"""
|
| 604 |
+
G = nx.complete_multipartite_graph(2, 3)
|
| 605 |
+
H = nx.complete_bipartite_graph(2, 3)
|
| 606 |
+
assert nodes_equal(G, H)
|
| 607 |
+
assert edges_equal(G.edges(), H.edges())
|
| 608 |
+
|
| 609 |
+
def test_complete_multipartite_graph(self):
|
| 610 |
+
"""Tests for generating the complete multipartite graph."""
|
| 611 |
+
G = nx.complete_multipartite_graph(2, 3, 4)
|
| 612 |
+
blocks = [(0, 1), (2, 3, 4), (5, 6, 7, 8)]
|
| 613 |
+
# Within each block, no two vertices should be adjacent.
|
| 614 |
+
for block in blocks:
|
| 615 |
+
for u, v in itertools.combinations_with_replacement(block, 2):
|
| 616 |
+
assert v not in G[u]
|
| 617 |
+
assert G.nodes[u] == G.nodes[v]
|
| 618 |
+
# Across blocks, all vertices should be adjacent.
|
| 619 |
+
for block1, block2 in itertools.combinations(blocks, 2):
|
| 620 |
+
for u, v in itertools.product(block1, block2):
|
| 621 |
+
assert v in G[u]
|
| 622 |
+
assert G.nodes[u] != G.nodes[v]
|
| 623 |
+
with pytest.raises(nx.NetworkXError, match="Negative number of nodes"):
|
| 624 |
+
nx.complete_multipartite_graph(2, -3, 4)
|
| 625 |
+
|
| 626 |
+
def test_kneser_graph(self):
|
| 627 |
+
# the petersen graph is a special case of the kneser graph when n=5 and k=2
|
| 628 |
+
assert is_isomorphic(nx.kneser_graph(5, 2), nx.petersen_graph())
|
| 629 |
+
|
| 630 |
+
# when k is 1, the kneser graph returns a complete graph with n vertices
|
| 631 |
+
for i in range(1, 7):
|
| 632 |
+
assert is_isomorphic(nx.kneser_graph(i, 1), nx.complete_graph(i))
|
| 633 |
+
|
| 634 |
+
# the kneser graph of n and n-1 is the empty graph with n vertices
|
| 635 |
+
for j in range(3, 7):
|
| 636 |
+
assert is_isomorphic(nx.kneser_graph(j, j - 1), nx.empty_graph(j))
|
| 637 |
+
|
| 638 |
+
# in general the number of edges of the kneser graph is equal to
|
| 639 |
+
# (n choose k) times (n-k choose k) divided by 2
|
| 640 |
+
assert nx.number_of_edges(nx.kneser_graph(8, 3)) == 280
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_cographs.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Unit tests for the :mod:`networkx.generators.cographs` module."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def test_random_cograph():
|
| 7 |
+
n = 3
|
| 8 |
+
G = nx.random_cograph(n)
|
| 9 |
+
|
| 10 |
+
assert len(G) == 2**n
|
| 11 |
+
|
| 12 |
+
# Every connected subgraph of G has diameter <= 2
|
| 13 |
+
if nx.is_connected(G):
|
| 14 |
+
assert nx.diameter(G) <= 2
|
| 15 |
+
else:
|
| 16 |
+
components = nx.connected_components(G)
|
| 17 |
+
for component in components:
|
| 18 |
+
assert nx.diameter(G.subgraph(component)) <= 2
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_community.py
ADDED
|
@@ -0,0 +1,362 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def test_random_partition_graph():
|
| 7 |
+
G = nx.random_partition_graph([3, 3, 3], 1, 0, seed=42)
|
| 8 |
+
C = G.graph["partition"]
|
| 9 |
+
assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}]
|
| 10 |
+
assert len(G) == 9
|
| 11 |
+
assert len(list(G.edges())) == 9
|
| 12 |
+
|
| 13 |
+
G = nx.random_partition_graph([3, 3, 3], 0, 1)
|
| 14 |
+
C = G.graph["partition"]
|
| 15 |
+
assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}]
|
| 16 |
+
assert len(G) == 9
|
| 17 |
+
assert len(list(G.edges())) == 27
|
| 18 |
+
|
| 19 |
+
G = nx.random_partition_graph([3, 3, 3], 1, 0, directed=True)
|
| 20 |
+
C = G.graph["partition"]
|
| 21 |
+
assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}]
|
| 22 |
+
assert len(G) == 9
|
| 23 |
+
assert len(list(G.edges())) == 18
|
| 24 |
+
|
| 25 |
+
G = nx.random_partition_graph([3, 3, 3], 0, 1, directed=True)
|
| 26 |
+
C = G.graph["partition"]
|
| 27 |
+
assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}]
|
| 28 |
+
assert len(G) == 9
|
| 29 |
+
assert len(list(G.edges())) == 54
|
| 30 |
+
|
| 31 |
+
G = nx.random_partition_graph([1, 2, 3, 4, 5], 0.5, 0.1)
|
| 32 |
+
C = G.graph["partition"]
|
| 33 |
+
assert C == [{0}, {1, 2}, {3, 4, 5}, {6, 7, 8, 9}, {10, 11, 12, 13, 14}]
|
| 34 |
+
assert len(G) == 15
|
| 35 |
+
|
| 36 |
+
rpg = nx.random_partition_graph
|
| 37 |
+
pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], 1.1, 0.1)
|
| 38 |
+
pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], -0.1, 0.1)
|
| 39 |
+
pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], 0.1, 1.1)
|
| 40 |
+
pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], 0.1, -0.1)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def test_planted_partition_graph():
|
| 44 |
+
G = nx.planted_partition_graph(4, 3, 1, 0, seed=42)
|
| 45 |
+
C = G.graph["partition"]
|
| 46 |
+
assert len(C) == 4
|
| 47 |
+
assert len(G) == 12
|
| 48 |
+
assert len(list(G.edges())) == 12
|
| 49 |
+
|
| 50 |
+
G = nx.planted_partition_graph(4, 3, 0, 1)
|
| 51 |
+
C = G.graph["partition"]
|
| 52 |
+
assert len(C) == 4
|
| 53 |
+
assert len(G) == 12
|
| 54 |
+
assert len(list(G.edges())) == 54
|
| 55 |
+
|
| 56 |
+
G = nx.planted_partition_graph(10, 4, 0.5, 0.1, seed=42)
|
| 57 |
+
C = G.graph["partition"]
|
| 58 |
+
assert len(C) == 10
|
| 59 |
+
assert len(G) == 40
|
| 60 |
+
|
| 61 |
+
G = nx.planted_partition_graph(4, 3, 1, 0, directed=True)
|
| 62 |
+
C = G.graph["partition"]
|
| 63 |
+
assert len(C) == 4
|
| 64 |
+
assert len(G) == 12
|
| 65 |
+
assert len(list(G.edges())) == 24
|
| 66 |
+
|
| 67 |
+
G = nx.planted_partition_graph(4, 3, 0, 1, directed=True)
|
| 68 |
+
C = G.graph["partition"]
|
| 69 |
+
assert len(C) == 4
|
| 70 |
+
assert len(G) == 12
|
| 71 |
+
assert len(list(G.edges())) == 108
|
| 72 |
+
|
| 73 |
+
G = nx.planted_partition_graph(10, 4, 0.5, 0.1, seed=42, directed=True)
|
| 74 |
+
C = G.graph["partition"]
|
| 75 |
+
assert len(C) == 10
|
| 76 |
+
assert len(G) == 40
|
| 77 |
+
|
| 78 |
+
ppg = nx.planted_partition_graph
|
| 79 |
+
pytest.raises(nx.NetworkXError, ppg, 3, 3, 1.1, 0.1)
|
| 80 |
+
pytest.raises(nx.NetworkXError, ppg, 3, 3, -0.1, 0.1)
|
| 81 |
+
pytest.raises(nx.NetworkXError, ppg, 3, 3, 0.1, 1.1)
|
| 82 |
+
pytest.raises(nx.NetworkXError, ppg, 3, 3, 0.1, -0.1)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def test_relaxed_caveman_graph():
|
| 86 |
+
G = nx.relaxed_caveman_graph(4, 3, 0)
|
| 87 |
+
assert len(G) == 12
|
| 88 |
+
G = nx.relaxed_caveman_graph(4, 3, 1)
|
| 89 |
+
assert len(G) == 12
|
| 90 |
+
G = nx.relaxed_caveman_graph(4, 3, 0.5)
|
| 91 |
+
assert len(G) == 12
|
| 92 |
+
G = nx.relaxed_caveman_graph(4, 3, 0.5, seed=42)
|
| 93 |
+
assert len(G) == 12
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def test_connected_caveman_graph():
|
| 97 |
+
G = nx.connected_caveman_graph(4, 3)
|
| 98 |
+
assert len(G) == 12
|
| 99 |
+
|
| 100 |
+
G = nx.connected_caveman_graph(1, 5)
|
| 101 |
+
K5 = nx.complete_graph(5)
|
| 102 |
+
K5.remove_edge(3, 4)
|
| 103 |
+
assert nx.is_isomorphic(G, K5)
|
| 104 |
+
|
| 105 |
+
# need at least 2 nodes in each clique
|
| 106 |
+
pytest.raises(nx.NetworkXError, nx.connected_caveman_graph, 4, 1)
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def test_caveman_graph():
|
| 110 |
+
G = nx.caveman_graph(4, 3)
|
| 111 |
+
assert len(G) == 12
|
| 112 |
+
|
| 113 |
+
G = nx.caveman_graph(5, 1)
|
| 114 |
+
E5 = nx.empty_graph(5)
|
| 115 |
+
assert nx.is_isomorphic(G, E5)
|
| 116 |
+
|
| 117 |
+
G = nx.caveman_graph(1, 5)
|
| 118 |
+
K5 = nx.complete_graph(5)
|
| 119 |
+
assert nx.is_isomorphic(G, K5)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def test_gaussian_random_partition_graph():
|
| 123 |
+
G = nx.gaussian_random_partition_graph(100, 10, 10, 0.3, 0.01)
|
| 124 |
+
assert len(G) == 100
|
| 125 |
+
G = nx.gaussian_random_partition_graph(100, 10, 10, 0.3, 0.01, directed=True)
|
| 126 |
+
assert len(G) == 100
|
| 127 |
+
G = nx.gaussian_random_partition_graph(
|
| 128 |
+
100, 10, 10, 0.3, 0.01, directed=False, seed=42
|
| 129 |
+
)
|
| 130 |
+
assert len(G) == 100
|
| 131 |
+
assert not isinstance(G, nx.DiGraph)
|
| 132 |
+
G = nx.gaussian_random_partition_graph(
|
| 133 |
+
100, 10, 10, 0.3, 0.01, directed=True, seed=42
|
| 134 |
+
)
|
| 135 |
+
assert len(G) == 100
|
| 136 |
+
assert isinstance(G, nx.DiGraph)
|
| 137 |
+
pytest.raises(
|
| 138 |
+
nx.NetworkXError, nx.gaussian_random_partition_graph, 100, 101, 10, 1, 0
|
| 139 |
+
)
|
| 140 |
+
# Test when clusters are likely less than 1
|
| 141 |
+
G = nx.gaussian_random_partition_graph(10, 0.5, 0.5, 0.5, 0.5, seed=1)
|
| 142 |
+
assert len(G) == 10
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def test_ring_of_cliques():
|
| 146 |
+
for i in range(2, 20, 3):
|
| 147 |
+
for j in range(2, 20, 3):
|
| 148 |
+
G = nx.ring_of_cliques(i, j)
|
| 149 |
+
assert G.number_of_nodes() == i * j
|
| 150 |
+
if i != 2 or j != 1:
|
| 151 |
+
expected_num_edges = i * (((j * (j - 1)) // 2) + 1)
|
| 152 |
+
else:
|
| 153 |
+
# the edge that already exists cannot be duplicated
|
| 154 |
+
expected_num_edges = i * (((j * (j - 1)) // 2) + 1) - 1
|
| 155 |
+
assert G.number_of_edges() == expected_num_edges
|
| 156 |
+
with pytest.raises(
|
| 157 |
+
nx.NetworkXError, match="A ring of cliques must have at least two cliques"
|
| 158 |
+
):
|
| 159 |
+
nx.ring_of_cliques(1, 5)
|
| 160 |
+
with pytest.raises(
|
| 161 |
+
nx.NetworkXError, match="The cliques must have at least two nodes"
|
| 162 |
+
):
|
| 163 |
+
nx.ring_of_cliques(3, 0)
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
def test_windmill_graph():
|
| 167 |
+
for n in range(2, 20, 3):
|
| 168 |
+
for k in range(2, 20, 3):
|
| 169 |
+
G = nx.windmill_graph(n, k)
|
| 170 |
+
assert G.number_of_nodes() == (k - 1) * n + 1
|
| 171 |
+
assert G.number_of_edges() == n * k * (k - 1) / 2
|
| 172 |
+
assert G.degree(0) == G.number_of_nodes() - 1
|
| 173 |
+
for i in range(1, G.number_of_nodes()):
|
| 174 |
+
assert G.degree(i) == k - 1
|
| 175 |
+
with pytest.raises(
|
| 176 |
+
nx.NetworkXError, match="A windmill graph must have at least two cliques"
|
| 177 |
+
):
|
| 178 |
+
nx.windmill_graph(1, 3)
|
| 179 |
+
with pytest.raises(
|
| 180 |
+
nx.NetworkXError, match="The cliques must have at least two nodes"
|
| 181 |
+
):
|
| 182 |
+
nx.windmill_graph(3, 0)
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def test_stochastic_block_model():
|
| 186 |
+
sizes = [75, 75, 300]
|
| 187 |
+
probs = [[0.25, 0.05, 0.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]]
|
| 188 |
+
G = nx.stochastic_block_model(sizes, probs, seed=0)
|
| 189 |
+
C = G.graph["partition"]
|
| 190 |
+
assert len(C) == 3
|
| 191 |
+
assert len(G) == 450
|
| 192 |
+
assert G.size() == 22160
|
| 193 |
+
|
| 194 |
+
GG = nx.stochastic_block_model(sizes, probs, range(450), seed=0)
|
| 195 |
+
assert G.nodes == GG.nodes
|
| 196 |
+
|
| 197 |
+
# Test Exceptions
|
| 198 |
+
sbm = nx.stochastic_block_model
|
| 199 |
+
badnodelist = list(range(400)) # not enough nodes to match sizes
|
| 200 |
+
badprobs1 = [[0.25, 0.05, 1.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]]
|
| 201 |
+
badprobs2 = [[0.25, 0.05, 0.02], [0.05, -0.35, 0.07], [0.02, 0.07, 0.40]]
|
| 202 |
+
probs_rect1 = [[0.25, 0.05, 0.02], [0.05, -0.35, 0.07]]
|
| 203 |
+
probs_rect2 = [[0.25, 0.05], [0.05, -0.35], [0.02, 0.07]]
|
| 204 |
+
asymprobs = [[0.25, 0.05, 0.01], [0.05, -0.35, 0.07], [0.02, 0.07, 0.40]]
|
| 205 |
+
pytest.raises(nx.NetworkXException, sbm, sizes, badprobs1)
|
| 206 |
+
pytest.raises(nx.NetworkXException, sbm, sizes, badprobs2)
|
| 207 |
+
pytest.raises(nx.NetworkXException, sbm, sizes, probs_rect1, directed=True)
|
| 208 |
+
pytest.raises(nx.NetworkXException, sbm, sizes, probs_rect2, directed=True)
|
| 209 |
+
pytest.raises(nx.NetworkXException, sbm, sizes, asymprobs, directed=False)
|
| 210 |
+
pytest.raises(nx.NetworkXException, sbm, sizes, probs, badnodelist)
|
| 211 |
+
nodelist = [0] + list(range(449)) # repeated node name in nodelist
|
| 212 |
+
pytest.raises(nx.NetworkXException, sbm, sizes, probs, nodelist)
|
| 213 |
+
|
| 214 |
+
# Extra keyword arguments test
|
| 215 |
+
GG = nx.stochastic_block_model(sizes, probs, seed=0, selfloops=True)
|
| 216 |
+
assert G.nodes == GG.nodes
|
| 217 |
+
GG = nx.stochastic_block_model(sizes, probs, selfloops=True, directed=True)
|
| 218 |
+
assert G.nodes == GG.nodes
|
| 219 |
+
GG = nx.stochastic_block_model(sizes, probs, seed=0, sparse=False)
|
| 220 |
+
assert G.nodes == GG.nodes
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
def test_generator():
|
| 224 |
+
n = 250
|
| 225 |
+
tau1 = 3
|
| 226 |
+
tau2 = 1.5
|
| 227 |
+
mu = 0.1
|
| 228 |
+
G = nx.LFR_benchmark_graph(
|
| 229 |
+
n, tau1, tau2, mu, average_degree=5, min_community=20, seed=10
|
| 230 |
+
)
|
| 231 |
+
assert len(G) == 250
|
| 232 |
+
C = {frozenset(G.nodes[v]["community"]) for v in G}
|
| 233 |
+
assert nx.community.is_partition(G.nodes(), C)
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
def test_invalid_tau1():
|
| 237 |
+
with pytest.raises(nx.NetworkXError, match="tau2 must be greater than one"):
|
| 238 |
+
n = 100
|
| 239 |
+
tau1 = 2
|
| 240 |
+
tau2 = 1
|
| 241 |
+
mu = 0.1
|
| 242 |
+
nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2)
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
def test_invalid_tau2():
|
| 246 |
+
with pytest.raises(nx.NetworkXError, match="tau1 must be greater than one"):
|
| 247 |
+
n = 100
|
| 248 |
+
tau1 = 1
|
| 249 |
+
tau2 = 2
|
| 250 |
+
mu = 0.1
|
| 251 |
+
nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2)
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
def test_mu_too_large():
|
| 255 |
+
with pytest.raises(nx.NetworkXError, match="mu must be in the interval \\[0, 1\\]"):
|
| 256 |
+
n = 100
|
| 257 |
+
tau1 = 2
|
| 258 |
+
tau2 = 2
|
| 259 |
+
mu = 1.1
|
| 260 |
+
nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2)
|
| 261 |
+
|
| 262 |
+
|
| 263 |
+
def test_mu_too_small():
|
| 264 |
+
with pytest.raises(nx.NetworkXError, match="mu must be in the interval \\[0, 1\\]"):
|
| 265 |
+
n = 100
|
| 266 |
+
tau1 = 2
|
| 267 |
+
tau2 = 2
|
| 268 |
+
mu = -1
|
| 269 |
+
nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2)
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
def test_both_degrees_none():
|
| 273 |
+
with pytest.raises(
|
| 274 |
+
nx.NetworkXError,
|
| 275 |
+
match="Must assign exactly one of min_degree and average_degree",
|
| 276 |
+
):
|
| 277 |
+
n = 100
|
| 278 |
+
tau1 = 2
|
| 279 |
+
tau2 = 2
|
| 280 |
+
mu = 1
|
| 281 |
+
nx.LFR_benchmark_graph(n, tau1, tau2, mu)
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
def test_neither_degrees_none():
|
| 285 |
+
with pytest.raises(
|
| 286 |
+
nx.NetworkXError,
|
| 287 |
+
match="Must assign exactly one of min_degree and average_degree",
|
| 288 |
+
):
|
| 289 |
+
n = 100
|
| 290 |
+
tau1 = 2
|
| 291 |
+
tau2 = 2
|
| 292 |
+
mu = 1
|
| 293 |
+
nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2, average_degree=5)
|
| 294 |
+
|
| 295 |
+
|
| 296 |
+
def test_max_iters_exceeded():
|
| 297 |
+
with pytest.raises(
|
| 298 |
+
nx.ExceededMaxIterations,
|
| 299 |
+
match="Could not assign communities; try increasing min_community",
|
| 300 |
+
):
|
| 301 |
+
n = 10
|
| 302 |
+
tau1 = 2
|
| 303 |
+
tau2 = 2
|
| 304 |
+
mu = 0.1
|
| 305 |
+
nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2, max_iters=10, seed=1)
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
def test_max_deg_out_of_range():
|
| 309 |
+
with pytest.raises(
|
| 310 |
+
nx.NetworkXError, match="max_degree must be in the interval \\(0, n\\]"
|
| 311 |
+
):
|
| 312 |
+
n = 10
|
| 313 |
+
tau1 = 2
|
| 314 |
+
tau2 = 2
|
| 315 |
+
mu = 0.1
|
| 316 |
+
nx.LFR_benchmark_graph(
|
| 317 |
+
n, tau1, tau2, mu, max_degree=n + 1, max_iters=10, seed=1
|
| 318 |
+
)
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
def test_max_community():
|
| 322 |
+
n = 250
|
| 323 |
+
tau1 = 3
|
| 324 |
+
tau2 = 1.5
|
| 325 |
+
mu = 0.1
|
| 326 |
+
G = nx.LFR_benchmark_graph(
|
| 327 |
+
n,
|
| 328 |
+
tau1,
|
| 329 |
+
tau2,
|
| 330 |
+
mu,
|
| 331 |
+
average_degree=5,
|
| 332 |
+
max_degree=100,
|
| 333 |
+
min_community=50,
|
| 334 |
+
max_community=200,
|
| 335 |
+
seed=10,
|
| 336 |
+
)
|
| 337 |
+
assert len(G) == 250
|
| 338 |
+
C = {frozenset(G.nodes[v]["community"]) for v in G}
|
| 339 |
+
assert nx.community.is_partition(G.nodes(), C)
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
def test_powerlaw_iterations_exceeded():
|
| 343 |
+
with pytest.raises(
|
| 344 |
+
nx.ExceededMaxIterations, match="Could not create power law sequence"
|
| 345 |
+
):
|
| 346 |
+
n = 100
|
| 347 |
+
tau1 = 2
|
| 348 |
+
tau2 = 2
|
| 349 |
+
mu = 1
|
| 350 |
+
nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2, max_iters=0)
|
| 351 |
+
|
| 352 |
+
|
| 353 |
+
def test_no_scipy_zeta():
|
| 354 |
+
zeta2 = 1.6449340668482264
|
| 355 |
+
assert abs(zeta2 - nx.generators.community._hurwitz_zeta(2, 1, 0.0001)) < 0.01
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
def test_generate_min_degree_itr():
|
| 359 |
+
with pytest.raises(
|
| 360 |
+
nx.ExceededMaxIterations, match="Could not match average_degree"
|
| 361 |
+
):
|
| 362 |
+
nx.generators.community._generate_min_degree(2, 2, 1, 0.01, 0)
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_degree_seq.py
ADDED
|
@@ -0,0 +1,230 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class TestConfigurationModel:
|
| 7 |
+
"""Unit tests for the :func:`~networkx.configuration_model`
|
| 8 |
+
function.
|
| 9 |
+
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
def test_empty_degree_sequence(self):
|
| 13 |
+
"""Tests that an empty degree sequence yields the null graph."""
|
| 14 |
+
G = nx.configuration_model([])
|
| 15 |
+
assert len(G) == 0
|
| 16 |
+
|
| 17 |
+
def test_degree_zero(self):
|
| 18 |
+
"""Tests that a degree sequence of all zeros yields the empty
|
| 19 |
+
graph.
|
| 20 |
+
|
| 21 |
+
"""
|
| 22 |
+
G = nx.configuration_model([0, 0, 0])
|
| 23 |
+
assert len(G) == 3
|
| 24 |
+
assert G.number_of_edges() == 0
|
| 25 |
+
|
| 26 |
+
def test_degree_sequence(self):
|
| 27 |
+
"""Tests that the degree sequence of the generated graph matches
|
| 28 |
+
the input degree sequence.
|
| 29 |
+
|
| 30 |
+
"""
|
| 31 |
+
deg_seq = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]
|
| 32 |
+
G = nx.configuration_model(deg_seq, seed=12345678)
|
| 33 |
+
assert sorted((d for n, d in G.degree()), reverse=True) == [
|
| 34 |
+
5,
|
| 35 |
+
3,
|
| 36 |
+
3,
|
| 37 |
+
3,
|
| 38 |
+
3,
|
| 39 |
+
2,
|
| 40 |
+
2,
|
| 41 |
+
2,
|
| 42 |
+
1,
|
| 43 |
+
1,
|
| 44 |
+
1,
|
| 45 |
+
]
|
| 46 |
+
assert sorted((d for n, d in G.degree(range(len(deg_seq)))), reverse=True) == [
|
| 47 |
+
5,
|
| 48 |
+
3,
|
| 49 |
+
3,
|
| 50 |
+
3,
|
| 51 |
+
3,
|
| 52 |
+
2,
|
| 53 |
+
2,
|
| 54 |
+
2,
|
| 55 |
+
1,
|
| 56 |
+
1,
|
| 57 |
+
1,
|
| 58 |
+
]
|
| 59 |
+
|
| 60 |
+
def test_random_seed(self):
|
| 61 |
+
"""Tests that each call with the same random seed generates the
|
| 62 |
+
same graph.
|
| 63 |
+
|
| 64 |
+
"""
|
| 65 |
+
deg_seq = [3] * 12
|
| 66 |
+
G1 = nx.configuration_model(deg_seq, seed=1000)
|
| 67 |
+
G2 = nx.configuration_model(deg_seq, seed=1000)
|
| 68 |
+
assert nx.is_isomorphic(G1, G2)
|
| 69 |
+
G1 = nx.configuration_model(deg_seq, seed=10)
|
| 70 |
+
G2 = nx.configuration_model(deg_seq, seed=10)
|
| 71 |
+
assert nx.is_isomorphic(G1, G2)
|
| 72 |
+
|
| 73 |
+
def test_directed_disallowed(self):
|
| 74 |
+
"""Tests that attempting to create a configuration model graph
|
| 75 |
+
using a directed graph yields an exception.
|
| 76 |
+
|
| 77 |
+
"""
|
| 78 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 79 |
+
nx.configuration_model([], create_using=nx.DiGraph())
|
| 80 |
+
|
| 81 |
+
def test_odd_degree_sum(self):
|
| 82 |
+
"""Tests that a degree sequence whose sum is odd yields an
|
| 83 |
+
exception.
|
| 84 |
+
|
| 85 |
+
"""
|
| 86 |
+
with pytest.raises(nx.NetworkXError):
|
| 87 |
+
nx.configuration_model([1, 2])
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def test_directed_configuration_raise_unequal():
|
| 91 |
+
with pytest.raises(nx.NetworkXError):
|
| 92 |
+
zin = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1]
|
| 93 |
+
zout = [5, 3, 3, 3, 3, 2, 2, 2, 1, 2]
|
| 94 |
+
nx.directed_configuration_model(zin, zout)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def test_directed_configuration_model():
|
| 98 |
+
G = nx.directed_configuration_model([], [], seed=0)
|
| 99 |
+
assert len(G) == 0
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def test_simple_directed_configuration_model():
|
| 103 |
+
G = nx.directed_configuration_model([1, 1], [1, 1], seed=0)
|
| 104 |
+
assert len(G) == 2
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def test_expected_degree_graph_empty():
|
| 108 |
+
# empty graph has empty degree sequence
|
| 109 |
+
deg_seq = []
|
| 110 |
+
G = nx.expected_degree_graph(deg_seq)
|
| 111 |
+
assert dict(G.degree()) == {}
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def test_expected_degree_graph():
|
| 115 |
+
# test that fixed seed delivers the same graph
|
| 116 |
+
deg_seq = [3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]
|
| 117 |
+
G1 = nx.expected_degree_graph(deg_seq, seed=1000)
|
| 118 |
+
assert len(G1) == 12
|
| 119 |
+
|
| 120 |
+
G2 = nx.expected_degree_graph(deg_seq, seed=1000)
|
| 121 |
+
assert nx.is_isomorphic(G1, G2)
|
| 122 |
+
|
| 123 |
+
G1 = nx.expected_degree_graph(deg_seq, seed=10)
|
| 124 |
+
G2 = nx.expected_degree_graph(deg_seq, seed=10)
|
| 125 |
+
assert nx.is_isomorphic(G1, G2)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def test_expected_degree_graph_selfloops():
|
| 129 |
+
deg_seq = [3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]
|
| 130 |
+
G1 = nx.expected_degree_graph(deg_seq, seed=1000, selfloops=False)
|
| 131 |
+
G2 = nx.expected_degree_graph(deg_seq, seed=1000, selfloops=False)
|
| 132 |
+
assert nx.is_isomorphic(G1, G2)
|
| 133 |
+
assert len(G1) == 12
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def test_expected_degree_graph_skew():
|
| 137 |
+
deg_seq = [10, 2, 2, 2, 2]
|
| 138 |
+
G1 = nx.expected_degree_graph(deg_seq, seed=1000)
|
| 139 |
+
G2 = nx.expected_degree_graph(deg_seq, seed=1000)
|
| 140 |
+
assert nx.is_isomorphic(G1, G2)
|
| 141 |
+
assert len(G1) == 5
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def test_havel_hakimi_construction():
|
| 145 |
+
G = nx.havel_hakimi_graph([])
|
| 146 |
+
assert len(G) == 0
|
| 147 |
+
|
| 148 |
+
z = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]
|
| 149 |
+
pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z)
|
| 150 |
+
z = ["A", 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]
|
| 151 |
+
pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z)
|
| 152 |
+
|
| 153 |
+
z = [5, 4, 3, 3, 3, 2, 2, 2]
|
| 154 |
+
G = nx.havel_hakimi_graph(z)
|
| 155 |
+
G = nx.configuration_model(z)
|
| 156 |
+
z = [6, 5, 4, 4, 2, 1, 1, 1]
|
| 157 |
+
pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z)
|
| 158 |
+
|
| 159 |
+
z = [10, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2]
|
| 160 |
+
|
| 161 |
+
G = nx.havel_hakimi_graph(z)
|
| 162 |
+
|
| 163 |
+
pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z, create_using=nx.DiGraph())
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
def test_directed_havel_hakimi():
|
| 167 |
+
# Test range of valid directed degree sequences
|
| 168 |
+
n, r = 100, 10
|
| 169 |
+
p = 1.0 / r
|
| 170 |
+
for i in range(r):
|
| 171 |
+
G1 = nx.erdos_renyi_graph(n, p * (i + 1), None, True)
|
| 172 |
+
din1 = [d for n, d in G1.in_degree()]
|
| 173 |
+
dout1 = [d for n, d in G1.out_degree()]
|
| 174 |
+
G2 = nx.directed_havel_hakimi_graph(din1, dout1)
|
| 175 |
+
din2 = [d for n, d in G2.in_degree()]
|
| 176 |
+
dout2 = [d for n, d in G2.out_degree()]
|
| 177 |
+
assert sorted(din1) == sorted(din2)
|
| 178 |
+
assert sorted(dout1) == sorted(dout2)
|
| 179 |
+
|
| 180 |
+
# Test non-graphical sequence
|
| 181 |
+
dout = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]
|
| 182 |
+
din = [103, 102, 102, 102, 102, 102, 102, 102, 102, 102]
|
| 183 |
+
pytest.raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout)
|
| 184 |
+
# Test valid sequences
|
| 185 |
+
dout = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4]
|
| 186 |
+
din = [2, 2, 2, 2, 2, 2, 2, 2, 0, 2]
|
| 187 |
+
G2 = nx.directed_havel_hakimi_graph(din, dout)
|
| 188 |
+
dout2 = (d for n, d in G2.out_degree())
|
| 189 |
+
din2 = (d for n, d in G2.in_degree())
|
| 190 |
+
assert sorted(dout) == sorted(dout2)
|
| 191 |
+
assert sorted(din) == sorted(din2)
|
| 192 |
+
# Test unequal sums
|
| 193 |
+
din = [2, 2, 2, 2, 2, 2, 2, 2, 2, 2]
|
| 194 |
+
pytest.raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout)
|
| 195 |
+
# Test for negative values
|
| 196 |
+
din = [2, 2, 2, 2, 2, 2, 2, 2, 2, 2, -2]
|
| 197 |
+
pytest.raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout)
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def test_degree_sequence_tree():
|
| 201 |
+
z = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4]
|
| 202 |
+
G = nx.degree_sequence_tree(z)
|
| 203 |
+
assert len(G) == len(z)
|
| 204 |
+
assert len(list(G.edges())) == sum(z) / 2
|
| 205 |
+
|
| 206 |
+
pytest.raises(
|
| 207 |
+
nx.NetworkXError, nx.degree_sequence_tree, z, create_using=nx.DiGraph()
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
z = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4]
|
| 211 |
+
pytest.raises(nx.NetworkXError, nx.degree_sequence_tree, z)
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def test_random_degree_sequence_graph():
|
| 215 |
+
d = [1, 2, 2, 3]
|
| 216 |
+
G = nx.random_degree_sequence_graph(d, seed=42)
|
| 217 |
+
assert d == sorted(d for n, d in G.degree())
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
def test_random_degree_sequence_graph_raise():
|
| 221 |
+
z = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4]
|
| 222 |
+
pytest.raises(nx.NetworkXUnfeasible, nx.random_degree_sequence_graph, z)
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
def test_random_degree_sequence_large():
|
| 226 |
+
G1 = nx.fast_gnp_random_graph(100, 0.1, seed=42)
|
| 227 |
+
d1 = (d for n, d in G1.degree())
|
| 228 |
+
G2 = nx.random_degree_sequence_graph(d1, seed=42)
|
| 229 |
+
d2 = (d for n, d in G2.degree())
|
| 230 |
+
assert sorted(d1) == sorted(d2)
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_directed.py
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Generators - Directed Graphs
|
| 2 |
+
----------------------------
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
import networkx as nx
|
| 8 |
+
from networkx.classes import Graph, MultiDiGraph
|
| 9 |
+
from networkx.generators.directed import (
|
| 10 |
+
gn_graph,
|
| 11 |
+
gnc_graph,
|
| 12 |
+
gnr_graph,
|
| 13 |
+
random_k_out_graph,
|
| 14 |
+
random_uniform_k_out_graph,
|
| 15 |
+
scale_free_graph,
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class TestGeneratorsDirected:
|
| 20 |
+
def test_smoke_test_random_graphs(self):
|
| 21 |
+
gn_graph(100)
|
| 22 |
+
gnr_graph(100, 0.5)
|
| 23 |
+
gnc_graph(100)
|
| 24 |
+
scale_free_graph(100)
|
| 25 |
+
|
| 26 |
+
gn_graph(100, seed=42)
|
| 27 |
+
gnr_graph(100, 0.5, seed=42)
|
| 28 |
+
gnc_graph(100, seed=42)
|
| 29 |
+
scale_free_graph(100, seed=42)
|
| 30 |
+
|
| 31 |
+
def test_create_using_keyword_arguments(self):
|
| 32 |
+
pytest.raises(nx.NetworkXError, gn_graph, 100, create_using=Graph())
|
| 33 |
+
pytest.raises(nx.NetworkXError, gnr_graph, 100, 0.5, create_using=Graph())
|
| 34 |
+
pytest.raises(nx.NetworkXError, gnc_graph, 100, create_using=Graph())
|
| 35 |
+
G = gn_graph(100, seed=1)
|
| 36 |
+
MG = gn_graph(100, create_using=MultiDiGraph(), seed=1)
|
| 37 |
+
assert sorted(G.edges()) == sorted(MG.edges())
|
| 38 |
+
G = gnr_graph(100, 0.5, seed=1)
|
| 39 |
+
MG = gnr_graph(100, 0.5, create_using=MultiDiGraph(), seed=1)
|
| 40 |
+
assert sorted(G.edges()) == sorted(MG.edges())
|
| 41 |
+
G = gnc_graph(100, seed=1)
|
| 42 |
+
MG = gnc_graph(100, create_using=MultiDiGraph(), seed=1)
|
| 43 |
+
assert sorted(G.edges()) == sorted(MG.edges())
|
| 44 |
+
|
| 45 |
+
G = scale_free_graph(
|
| 46 |
+
100,
|
| 47 |
+
alpha=0.3,
|
| 48 |
+
beta=0.4,
|
| 49 |
+
gamma=0.3,
|
| 50 |
+
delta_in=0.3,
|
| 51 |
+
delta_out=0.1,
|
| 52 |
+
initial_graph=nx.cycle_graph(4, create_using=MultiDiGraph),
|
| 53 |
+
seed=1,
|
| 54 |
+
)
|
| 55 |
+
pytest.raises(ValueError, scale_free_graph, 100, 0.5, 0.4, 0.3)
|
| 56 |
+
pytest.raises(ValueError, scale_free_graph, 100, alpha=-0.3)
|
| 57 |
+
pytest.raises(ValueError, scale_free_graph, 100, beta=-0.3)
|
| 58 |
+
pytest.raises(ValueError, scale_free_graph, 100, gamma=-0.3)
|
| 59 |
+
|
| 60 |
+
def test_parameters(self):
|
| 61 |
+
G = nx.DiGraph()
|
| 62 |
+
G.add_node(0)
|
| 63 |
+
|
| 64 |
+
def kernel(x):
|
| 65 |
+
return x
|
| 66 |
+
|
| 67 |
+
assert nx.is_isomorphic(gn_graph(1), G)
|
| 68 |
+
assert nx.is_isomorphic(gn_graph(1, kernel=kernel), G)
|
| 69 |
+
assert nx.is_isomorphic(gnc_graph(1), G)
|
| 70 |
+
assert nx.is_isomorphic(gnr_graph(1, 0.5), G)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def test_scale_free_graph_negative_delta():
|
| 74 |
+
with pytest.raises(ValueError, match="delta_in must be >= 0."):
|
| 75 |
+
scale_free_graph(10, delta_in=-1)
|
| 76 |
+
with pytest.raises(ValueError, match="delta_out must be >= 0."):
|
| 77 |
+
scale_free_graph(10, delta_out=-1)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def test_non_numeric_ordering():
|
| 81 |
+
G = MultiDiGraph([("a", "b"), ("b", "c"), ("c", "a")])
|
| 82 |
+
s = scale_free_graph(3, initial_graph=G)
|
| 83 |
+
assert len(s) == 3
|
| 84 |
+
assert len(s.edges) == 3
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
@pytest.mark.parametrize("ig", (nx.Graph(), nx.DiGraph([(0, 1)])))
|
| 88 |
+
def test_scale_free_graph_initial_graph_kwarg(ig):
|
| 89 |
+
with pytest.raises(nx.NetworkXError):
|
| 90 |
+
scale_free_graph(100, initial_graph=ig)
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
class TestRandomKOutGraph:
|
| 94 |
+
"""Unit tests for the
|
| 95 |
+
:func:`~networkx.generators.directed.random_k_out_graph` function.
|
| 96 |
+
|
| 97 |
+
"""
|
| 98 |
+
|
| 99 |
+
def test_regularity(self):
|
| 100 |
+
"""Tests that the generated graph is `k`-out-regular."""
|
| 101 |
+
n = 10
|
| 102 |
+
k = 3
|
| 103 |
+
alpha = 1
|
| 104 |
+
G = random_k_out_graph(n, k, alpha)
|
| 105 |
+
assert all(d == k for v, d in G.out_degree())
|
| 106 |
+
G = random_k_out_graph(n, k, alpha, seed=42)
|
| 107 |
+
assert all(d == k for v, d in G.out_degree())
|
| 108 |
+
|
| 109 |
+
def test_no_self_loops(self):
|
| 110 |
+
"""Tests for forbidding self-loops."""
|
| 111 |
+
n = 10
|
| 112 |
+
k = 3
|
| 113 |
+
alpha = 1
|
| 114 |
+
G = random_k_out_graph(n, k, alpha, self_loops=False)
|
| 115 |
+
assert nx.number_of_selfloops(G) == 0
|
| 116 |
+
|
| 117 |
+
def test_negative_alpha(self):
|
| 118 |
+
with pytest.raises(ValueError, match="alpha must be positive"):
|
| 119 |
+
random_k_out_graph(10, 3, -1)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class TestUniformRandomKOutGraph:
|
| 123 |
+
"""Unit tests for the
|
| 124 |
+
:func:`~networkx.generators.directed.random_uniform_k_out_graph`
|
| 125 |
+
function.
|
| 126 |
+
|
| 127 |
+
"""
|
| 128 |
+
|
| 129 |
+
def test_regularity(self):
|
| 130 |
+
"""Tests that the generated graph is `k`-out-regular."""
|
| 131 |
+
n = 10
|
| 132 |
+
k = 3
|
| 133 |
+
G = random_uniform_k_out_graph(n, k)
|
| 134 |
+
assert all(d == k for v, d in G.out_degree())
|
| 135 |
+
G = random_uniform_k_out_graph(n, k, seed=42)
|
| 136 |
+
assert all(d == k for v, d in G.out_degree())
|
| 137 |
+
|
| 138 |
+
def test_no_self_loops(self):
|
| 139 |
+
"""Tests for forbidding self-loops."""
|
| 140 |
+
n = 10
|
| 141 |
+
k = 3
|
| 142 |
+
G = random_uniform_k_out_graph(n, k, self_loops=False)
|
| 143 |
+
assert nx.number_of_selfloops(G) == 0
|
| 144 |
+
assert all(d == k for v, d in G.out_degree())
|
| 145 |
+
|
| 146 |
+
def test_with_replacement(self):
|
| 147 |
+
n = 10
|
| 148 |
+
k = 3
|
| 149 |
+
G = random_uniform_k_out_graph(n, k, with_replacement=True)
|
| 150 |
+
assert G.is_multigraph()
|
| 151 |
+
assert all(d == k for v, d in G.out_degree())
|
| 152 |
+
n = 10
|
| 153 |
+
k = 9
|
| 154 |
+
G = random_uniform_k_out_graph(n, k, with_replacement=False, self_loops=False)
|
| 155 |
+
assert nx.number_of_selfloops(G) == 0
|
| 156 |
+
assert all(d == k for v, d in G.out_degree())
|
| 157 |
+
|
| 158 |
+
def test_without_replacement(self):
|
| 159 |
+
n = 10
|
| 160 |
+
k = 3
|
| 161 |
+
G = random_uniform_k_out_graph(n, k, with_replacement=False)
|
| 162 |
+
assert not G.is_multigraph()
|
| 163 |
+
assert all(d == k for v, d in G.out_degree())
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_duplication.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Unit tests for the :mod:`networkx.generators.duplication` module."""
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class TestDuplicationDivergenceGraph:
|
| 9 |
+
"""Unit tests for the
|
| 10 |
+
:func:`networkx.generators.duplication.duplication_divergence_graph`
|
| 11 |
+
function.
|
| 12 |
+
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
def test_final_size(self):
|
| 16 |
+
G = nx.duplication_divergence_graph(3, p=1)
|
| 17 |
+
assert len(G) == 3
|
| 18 |
+
G = nx.duplication_divergence_graph(3, p=1, seed=42)
|
| 19 |
+
assert len(G) == 3
|
| 20 |
+
|
| 21 |
+
def test_probability_too_large(self):
|
| 22 |
+
with pytest.raises(nx.NetworkXError):
|
| 23 |
+
nx.duplication_divergence_graph(3, p=2)
|
| 24 |
+
|
| 25 |
+
def test_probability_too_small(self):
|
| 26 |
+
with pytest.raises(nx.NetworkXError):
|
| 27 |
+
nx.duplication_divergence_graph(3, p=-1)
|
| 28 |
+
|
| 29 |
+
def test_non_extreme_probability_value(self):
|
| 30 |
+
G = nx.duplication_divergence_graph(6, p=0.3, seed=42)
|
| 31 |
+
assert len(G) == 6
|
| 32 |
+
assert list(G.degree()) == [(0, 2), (1, 3), (2, 2), (3, 3), (4, 1), (5, 1)]
|
| 33 |
+
|
| 34 |
+
def test_minimum_desired_nodes(self):
|
| 35 |
+
with pytest.raises(
|
| 36 |
+
nx.NetworkXError, match=".*n must be greater than or equal to 2"
|
| 37 |
+
):
|
| 38 |
+
nx.duplication_divergence_graph(1, p=1)
|
| 39 |
+
|
| 40 |
+
def test_create_using(self):
|
| 41 |
+
class DummyGraph(nx.Graph):
|
| 42 |
+
pass
|
| 43 |
+
|
| 44 |
+
class DummyDiGraph(nx.DiGraph):
|
| 45 |
+
pass
|
| 46 |
+
|
| 47 |
+
G = nx.duplication_divergence_graph(6, 0.3, seed=42, create_using=DummyGraph)
|
| 48 |
+
assert isinstance(G, DummyGraph)
|
| 49 |
+
with pytest.raises(nx.NetworkXError, match="create_using must not be directed"):
|
| 50 |
+
nx.duplication_divergence_graph(6, 0.3, seed=42, create_using=DummyDiGraph)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class TestPartialDuplicationGraph:
|
| 54 |
+
"""Unit tests for the
|
| 55 |
+
:func:`networkx.generators.duplication.partial_duplication_graph`
|
| 56 |
+
function.
|
| 57 |
+
|
| 58 |
+
"""
|
| 59 |
+
|
| 60 |
+
def test_final_size(self):
|
| 61 |
+
N = 10
|
| 62 |
+
n = 5
|
| 63 |
+
p = 0.5
|
| 64 |
+
q = 0.5
|
| 65 |
+
G = nx.partial_duplication_graph(N, n, p, q)
|
| 66 |
+
assert len(G) == N
|
| 67 |
+
G = nx.partial_duplication_graph(N, n, p, q, seed=42)
|
| 68 |
+
assert len(G) == N
|
| 69 |
+
|
| 70 |
+
def test_initial_clique_size(self):
|
| 71 |
+
N = 10
|
| 72 |
+
n = 10
|
| 73 |
+
p = 0.5
|
| 74 |
+
q = 0.5
|
| 75 |
+
G = nx.partial_duplication_graph(N, n, p, q)
|
| 76 |
+
assert len(G) == n
|
| 77 |
+
|
| 78 |
+
def test_invalid_initial_size(self):
|
| 79 |
+
with pytest.raises(nx.NetworkXError):
|
| 80 |
+
N = 5
|
| 81 |
+
n = 10
|
| 82 |
+
p = 0.5
|
| 83 |
+
q = 0.5
|
| 84 |
+
G = nx.partial_duplication_graph(N, n, p, q)
|
| 85 |
+
|
| 86 |
+
def test_invalid_probabilities(self):
|
| 87 |
+
N = 1
|
| 88 |
+
n = 1
|
| 89 |
+
for p, q in [(0.5, 2), (0.5, -1), (2, 0.5), (-1, 0.5)]:
|
| 90 |
+
args = (N, n, p, q)
|
| 91 |
+
pytest.raises(nx.NetworkXError, nx.partial_duplication_graph, *args)
|
| 92 |
+
|
| 93 |
+
def test_create_using(self):
|
| 94 |
+
class DummyGraph(nx.Graph):
|
| 95 |
+
pass
|
| 96 |
+
|
| 97 |
+
class DummyDiGraph(nx.DiGraph):
|
| 98 |
+
pass
|
| 99 |
+
|
| 100 |
+
G = nx.partial_duplication_graph(10, 5, 0.5, 0.5, create_using=DummyGraph)
|
| 101 |
+
assert isinstance(G, DummyGraph)
|
| 102 |
+
with pytest.raises(nx.NetworkXError, match="create_using must not be directed"):
|
| 103 |
+
nx.partial_duplication_graph(10, 5, 0.5, 0.5, create_using=DummyDiGraph)
|
minigpt2/lib/python3.10/site-packages/networkx/generators/tests/test_ego.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ego graph
|
| 3 |
+
---------
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
from networkx.utils import edges_equal, nodes_equal
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TestGeneratorEgo:
|
| 11 |
+
def test_ego(self):
|
| 12 |
+
G = nx.star_graph(3)
|
| 13 |
+
H = nx.ego_graph(G, 0)
|
| 14 |
+
assert nx.is_isomorphic(G, H)
|
| 15 |
+
G.add_edge(1, 11)
|
| 16 |
+
G.add_edge(2, 22)
|
| 17 |
+
G.add_edge(3, 33)
|
| 18 |
+
H = nx.ego_graph(G, 0)
|
| 19 |
+
assert nx.is_isomorphic(nx.star_graph(3), H)
|
| 20 |
+
G = nx.path_graph(3)
|
| 21 |
+
H = nx.ego_graph(G, 0)
|
| 22 |
+
assert edges_equal(H.edges(), [(0, 1)])
|
| 23 |
+
H = nx.ego_graph(G, 0, undirected=True)
|
| 24 |
+
assert edges_equal(H.edges(), [(0, 1)])
|
| 25 |
+
H = nx.ego_graph(G, 0, center=False)
|
| 26 |
+
assert edges_equal(H.edges(), [])
|
| 27 |
+
|
| 28 |
+
def test_ego_distance(self):
|
| 29 |
+
G = nx.Graph()
|
| 30 |
+
G.add_edge(0, 1, weight=2, distance=1)
|
| 31 |
+
G.add_edge(1, 2, weight=2, distance=2)
|
| 32 |
+
G.add_edge(2, 3, weight=2, distance=1)
|
| 33 |
+
assert nodes_equal(nx.ego_graph(G, 0, radius=3).nodes(), [0, 1, 2, 3])
|
| 34 |
+
eg = nx.ego_graph(G, 0, radius=3, distance="weight")
|
| 35 |
+
assert nodes_equal(eg.nodes(), [0, 1])
|
| 36 |
+
eg = nx.ego_graph(G, 0, radius=3, distance="weight", undirected=True)
|
| 37 |
+
assert nodes_equal(eg.nodes(), [0, 1])
|
| 38 |
+
eg = nx.ego_graph(G, 0, radius=3, distance="distance")
|
| 39 |
+
assert nodes_equal(eg.nodes(), [0, 1, 2])
|