Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__init__.py +0 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/__init__.cpython-310.pyc +0 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_all_random_functions.cpython-310.pyc +0 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_convert.cpython-310.pyc +0 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_convert_numpy.cpython-310.pyc +0 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_convert_pandas.cpython-310.pyc +0 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_convert_scipy.cpython-310.pyc +0 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_exceptions.cpython-310.pyc +0 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_import.cpython-310.pyc +0 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_lazy_imports.cpython-310.pyc +0 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_relabel.cpython-310.pyc +0 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_all_random_functions.py +250 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_convert.py +321 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_convert_numpy.py +532 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_convert_pandas.py +349 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_convert_scipy.py +282 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_exceptions.py +40 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_import.py +11 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_lazy_imports.py +97 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/__init__.py +8 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/backends.py +0 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/configs.py +387 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/decorators.py +1237 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/heaps.py +340 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/mapped_queue.py +297 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/misc.py +653 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/random_sequence.py +164 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/rcm.py +159 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/tests/__init__.py +0 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/tests/test__init.py +11 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/tests/test_config.py +231 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/tests/test_unionfind.py +55 -0
- infer_4_37_2/lib/python3.10/site-packages/networkx/utils/union_find.py +106 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/__init__.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/activations.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/attention2d.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/attention_pool.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/config.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/create_norm.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/create_norm_act.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/drop.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/evo_norm.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/gather_excite.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/global_context.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/halo_attn.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/helpers.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/inplace_abn.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/layer_scale.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/linear.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/timm/layers/__pycache__/norm_act.cpython-310.pyc +0 -0
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__init__.py
ADDED
|
File without changes
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (172 Bytes). View file
|
|
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_all_random_functions.cpython-310.pyc
ADDED
|
Binary file (7.18 kB). View file
|
|
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_convert.cpython-310.pyc
ADDED
|
Binary file (10.6 kB). View file
|
|
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_convert_numpy.cpython-310.pyc
ADDED
|
Binary file (18.4 kB). View file
|
|
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_convert_pandas.cpython-310.pyc
ADDED
|
Binary file (12.5 kB). View file
|
|
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_convert_scipy.cpython-310.pyc
ADDED
|
Binary file (10.3 kB). View file
|
|
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_exceptions.cpython-310.pyc
ADDED
|
Binary file (1.34 kB). View file
|
|
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_import.cpython-310.pyc
ADDED
|
Binary file (617 Bytes). View file
|
|
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_lazy_imports.cpython-310.pyc
ADDED
|
Binary file (2.14 kB). View file
|
|
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/__pycache__/test_relabel.cpython-310.pyc
ADDED
|
Binary file (15 kB). View file
|
|
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_all_random_functions.py
ADDED
|
@@ -0,0 +1,250 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
np = pytest.importorskip("numpy")
|
| 4 |
+
import random
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
from networkx.algorithms import approximation as approx
|
| 8 |
+
from networkx.algorithms import threshold
|
| 9 |
+
|
| 10 |
+
progress = 0
|
| 11 |
+
|
| 12 |
+
# store the random numbers after setting a global seed
|
| 13 |
+
np.random.seed(42)
|
| 14 |
+
np_rv = np.random.rand()
|
| 15 |
+
random.seed(42)
|
| 16 |
+
py_rv = random.random()
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def t(f, *args, **kwds):
|
| 20 |
+
"""call one function and check if global RNG changed"""
|
| 21 |
+
global progress
|
| 22 |
+
progress += 1
|
| 23 |
+
print(progress, ",", end="")
|
| 24 |
+
|
| 25 |
+
f(*args, **kwds)
|
| 26 |
+
|
| 27 |
+
after_np_rv = np.random.rand()
|
| 28 |
+
# if np_rv != after_np_rv:
|
| 29 |
+
# print(np_rv, after_np_rv, "don't match np!")
|
| 30 |
+
assert np_rv == after_np_rv
|
| 31 |
+
np.random.seed(42)
|
| 32 |
+
|
| 33 |
+
after_py_rv = random.random()
|
| 34 |
+
# if py_rv != after_py_rv:
|
| 35 |
+
# print(py_rv, after_py_rv, "don't match py!")
|
| 36 |
+
assert py_rv == after_py_rv
|
| 37 |
+
random.seed(42)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def run_all_random_functions(seed):
|
| 41 |
+
n = 20
|
| 42 |
+
m = 10
|
| 43 |
+
k = l = 2
|
| 44 |
+
s = v = 10
|
| 45 |
+
p = q = p1 = p2 = p_in = p_out = 0.4
|
| 46 |
+
alpha = radius = theta = 0.75
|
| 47 |
+
sizes = (20, 20, 10)
|
| 48 |
+
colors = [1, 2, 3]
|
| 49 |
+
G = nx.barbell_graph(12, 20)
|
| 50 |
+
H = nx.cycle_graph(3)
|
| 51 |
+
H.add_weighted_edges_from((u, v, 0.2) for u, v in H.edges)
|
| 52 |
+
deg_sequence = [3, 2, 1, 3, 2, 1, 3, 2, 1, 2, 1, 2, 1]
|
| 53 |
+
in_degree_sequence = w = sequence = aseq = bseq = deg_sequence
|
| 54 |
+
|
| 55 |
+
# print("starting...")
|
| 56 |
+
t(nx.maximal_independent_set, G, seed=seed)
|
| 57 |
+
t(nx.rich_club_coefficient, G, seed=seed, normalized=False)
|
| 58 |
+
t(nx.random_reference, G, seed=seed)
|
| 59 |
+
t(nx.lattice_reference, G, seed=seed)
|
| 60 |
+
t(nx.sigma, G, 1, 2, seed=seed)
|
| 61 |
+
t(nx.omega, G, 1, 2, seed=seed)
|
| 62 |
+
# print("out of smallworld.py")
|
| 63 |
+
t(nx.double_edge_swap, G, seed=seed)
|
| 64 |
+
# print("starting connected_double_edge_swap")
|
| 65 |
+
t(nx.connected_double_edge_swap, nx.complete_graph(9), seed=seed)
|
| 66 |
+
# print("ending connected_double_edge_swap")
|
| 67 |
+
t(nx.random_layout, G, seed=seed)
|
| 68 |
+
t(nx.fruchterman_reingold_layout, G, seed=seed)
|
| 69 |
+
t(nx.algebraic_connectivity, G, seed=seed)
|
| 70 |
+
t(nx.fiedler_vector, G, seed=seed)
|
| 71 |
+
t(nx.spectral_ordering, G, seed=seed)
|
| 72 |
+
# print('starting average_clustering')
|
| 73 |
+
t(approx.average_clustering, G, seed=seed)
|
| 74 |
+
t(approx.simulated_annealing_tsp, H, "greedy", source=1, seed=seed)
|
| 75 |
+
t(approx.threshold_accepting_tsp, H, "greedy", source=1, seed=seed)
|
| 76 |
+
t(
|
| 77 |
+
approx.traveling_salesman_problem,
|
| 78 |
+
H,
|
| 79 |
+
method=lambda G, weight: approx.simulated_annealing_tsp(
|
| 80 |
+
G, "greedy", weight, seed=seed
|
| 81 |
+
),
|
| 82 |
+
)
|
| 83 |
+
t(
|
| 84 |
+
approx.traveling_salesman_problem,
|
| 85 |
+
H,
|
| 86 |
+
method=lambda G, weight: approx.threshold_accepting_tsp(
|
| 87 |
+
G, "greedy", weight, seed=seed
|
| 88 |
+
),
|
| 89 |
+
)
|
| 90 |
+
t(nx.betweenness_centrality, G, seed=seed)
|
| 91 |
+
t(nx.edge_betweenness_centrality, G, seed=seed)
|
| 92 |
+
t(nx.approximate_current_flow_betweenness_centrality, G, seed=seed)
|
| 93 |
+
# print("kernighan")
|
| 94 |
+
t(nx.algorithms.community.kernighan_lin_bisection, G, seed=seed)
|
| 95 |
+
# nx.algorithms.community.asyn_lpa_communities(G, seed=seed)
|
| 96 |
+
t(nx.algorithms.tree.greedy_branching, G, seed=seed)
|
| 97 |
+
# print('done with graph argument functions')
|
| 98 |
+
|
| 99 |
+
t(nx.spectral_graph_forge, G, alpha, seed=seed)
|
| 100 |
+
t(nx.algorithms.community.asyn_fluidc, G, k, max_iter=1, seed=seed)
|
| 101 |
+
t(
|
| 102 |
+
nx.algorithms.connectivity.edge_augmentation.greedy_k_edge_augmentation,
|
| 103 |
+
G,
|
| 104 |
+
k,
|
| 105 |
+
seed=seed,
|
| 106 |
+
)
|
| 107 |
+
t(nx.algorithms.coloring.strategy_random_sequential, G, colors, seed=seed)
|
| 108 |
+
|
| 109 |
+
cs = ["d", "i", "i", "d", "d", "i"]
|
| 110 |
+
t(threshold.swap_d, cs, seed=seed)
|
| 111 |
+
t(nx.configuration_model, deg_sequence, seed=seed)
|
| 112 |
+
t(
|
| 113 |
+
nx.directed_configuration_model,
|
| 114 |
+
in_degree_sequence,
|
| 115 |
+
in_degree_sequence,
|
| 116 |
+
seed=seed,
|
| 117 |
+
)
|
| 118 |
+
t(nx.expected_degree_graph, w, seed=seed)
|
| 119 |
+
t(nx.random_degree_sequence_graph, sequence, seed=seed)
|
| 120 |
+
joint_degrees = {
|
| 121 |
+
1: {4: 1},
|
| 122 |
+
2: {2: 2, 3: 2, 4: 2},
|
| 123 |
+
3: {2: 2, 4: 1},
|
| 124 |
+
4: {1: 1, 2: 2, 3: 1},
|
| 125 |
+
}
|
| 126 |
+
t(nx.joint_degree_graph, joint_degrees, seed=seed)
|
| 127 |
+
joint_degree_sequence = [
|
| 128 |
+
(1, 0),
|
| 129 |
+
(1, 0),
|
| 130 |
+
(1, 0),
|
| 131 |
+
(2, 0),
|
| 132 |
+
(1, 0),
|
| 133 |
+
(2, 1),
|
| 134 |
+
(0, 1),
|
| 135 |
+
(0, 1),
|
| 136 |
+
]
|
| 137 |
+
t(nx.random_clustered_graph, joint_degree_sequence, seed=seed)
|
| 138 |
+
constructor = [(3, 3, 0.5), (10, 10, 0.7)]
|
| 139 |
+
t(nx.random_shell_graph, constructor, seed=seed)
|
| 140 |
+
t(nx.random_triad, G.to_directed(), seed=seed)
|
| 141 |
+
mapping = {1: 0.4, 2: 0.3, 3: 0.3}
|
| 142 |
+
t(nx.utils.random_weighted_sample, mapping, k, seed=seed)
|
| 143 |
+
t(nx.utils.weighted_choice, mapping, seed=seed)
|
| 144 |
+
t(nx.algorithms.bipartite.configuration_model, aseq, bseq, seed=seed)
|
| 145 |
+
t(nx.algorithms.bipartite.preferential_attachment_graph, aseq, p, seed=seed)
|
| 146 |
+
|
| 147 |
+
def kernel_integral(u, w, z):
|
| 148 |
+
return z - w
|
| 149 |
+
|
| 150 |
+
t(nx.random_kernel_graph, n, kernel_integral, seed=seed)
|
| 151 |
+
|
| 152 |
+
sizes = [75, 75, 300]
|
| 153 |
+
probs = [[0.25, 0.05, 0.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]]
|
| 154 |
+
t(nx.stochastic_block_model, sizes, probs, seed=seed)
|
| 155 |
+
t(nx.random_partition_graph, sizes, p_in, p_out, seed=seed)
|
| 156 |
+
|
| 157 |
+
# print("starting generator functions")
|
| 158 |
+
t(threshold.random_threshold_sequence, n, p, seed=seed)
|
| 159 |
+
t(nx.tournament.random_tournament, n, seed=seed)
|
| 160 |
+
t(nx.relaxed_caveman_graph, l, k, p, seed=seed)
|
| 161 |
+
t(nx.planted_partition_graph, l, k, p_in, p_out, seed=seed)
|
| 162 |
+
t(nx.gaussian_random_partition_graph, n, s, v, p_in, p_out, seed=seed)
|
| 163 |
+
t(nx.gn_graph, n, seed=seed)
|
| 164 |
+
t(nx.gnr_graph, n, p, seed=seed)
|
| 165 |
+
t(nx.gnc_graph, n, seed=seed)
|
| 166 |
+
t(nx.scale_free_graph, n, seed=seed)
|
| 167 |
+
t(nx.directed.random_uniform_k_out_graph, n, k, seed=seed)
|
| 168 |
+
t(nx.random_k_out_graph, n, k, alpha, seed=seed)
|
| 169 |
+
N = 1000
|
| 170 |
+
t(nx.partial_duplication_graph, N, n, p, q, seed=seed)
|
| 171 |
+
t(nx.duplication_divergence_graph, n, p, seed=seed)
|
| 172 |
+
t(nx.random_geometric_graph, n, radius, seed=seed)
|
| 173 |
+
t(nx.soft_random_geometric_graph, n, radius, seed=seed)
|
| 174 |
+
t(nx.geographical_threshold_graph, n, theta, seed=seed)
|
| 175 |
+
t(nx.waxman_graph, n, seed=seed)
|
| 176 |
+
t(nx.navigable_small_world_graph, n, seed=seed)
|
| 177 |
+
t(nx.thresholded_random_geometric_graph, n, radius, theta, seed=seed)
|
| 178 |
+
t(nx.uniform_random_intersection_graph, n, m, p, seed=seed)
|
| 179 |
+
t(nx.k_random_intersection_graph, n, m, k, seed=seed)
|
| 180 |
+
|
| 181 |
+
t(nx.general_random_intersection_graph, n, 2, [0.1, 0.5], seed=seed)
|
| 182 |
+
t(nx.fast_gnp_random_graph, n, p, seed=seed)
|
| 183 |
+
t(nx.gnp_random_graph, n, p, seed=seed)
|
| 184 |
+
t(nx.dense_gnm_random_graph, n, m, seed=seed)
|
| 185 |
+
t(nx.gnm_random_graph, n, m, seed=seed)
|
| 186 |
+
t(nx.newman_watts_strogatz_graph, n, k, p, seed=seed)
|
| 187 |
+
t(nx.watts_strogatz_graph, n, k, p, seed=seed)
|
| 188 |
+
t(nx.connected_watts_strogatz_graph, n, k, p, seed=seed)
|
| 189 |
+
t(nx.random_regular_graph, 3, n, seed=seed)
|
| 190 |
+
t(nx.barabasi_albert_graph, n, m, seed=seed)
|
| 191 |
+
t(nx.extended_barabasi_albert_graph, n, m, p, q, seed=seed)
|
| 192 |
+
t(nx.powerlaw_cluster_graph, n, m, p, seed=seed)
|
| 193 |
+
t(nx.random_lobster, n, p1, p2, seed=seed)
|
| 194 |
+
t(nx.random_powerlaw_tree, n, seed=seed, tries=5000)
|
| 195 |
+
t(nx.random_powerlaw_tree_sequence, 10, seed=seed, tries=5000)
|
| 196 |
+
t(nx.random_labeled_tree, n, seed=seed)
|
| 197 |
+
t(nx.utils.powerlaw_sequence, n, seed=seed)
|
| 198 |
+
t(nx.utils.zipf_rv, 2.3, seed=seed)
|
| 199 |
+
cdist = [0.2, 0.4, 0.5, 0.7, 0.9, 1.0]
|
| 200 |
+
t(nx.utils.discrete_sequence, n, cdistribution=cdist, seed=seed)
|
| 201 |
+
t(nx.algorithms.bipartite.random_graph, n, m, p, seed=seed)
|
| 202 |
+
t(nx.algorithms.bipartite.gnmk_random_graph, n, m, k, seed=seed)
|
| 203 |
+
LFR = nx.generators.LFR_benchmark_graph
|
| 204 |
+
t(
|
| 205 |
+
LFR,
|
| 206 |
+
25,
|
| 207 |
+
3,
|
| 208 |
+
1.5,
|
| 209 |
+
0.1,
|
| 210 |
+
average_degree=3,
|
| 211 |
+
min_community=10,
|
| 212 |
+
seed=seed,
|
| 213 |
+
max_community=20,
|
| 214 |
+
)
|
| 215 |
+
t(nx.random_internet_as_graph, n, seed=seed)
|
| 216 |
+
# print("done")
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
# choose to test an integer seed, or whether a single RNG can be everywhere
|
| 220 |
+
# np_rng = np.random.RandomState(14)
|
| 221 |
+
# seed = np_rng
|
| 222 |
+
# seed = 14
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
@pytest.mark.slow
|
| 226 |
+
# print("NetworkX Version:", nx.__version__)
|
| 227 |
+
def test_rng_interface():
|
| 228 |
+
global progress
|
| 229 |
+
|
| 230 |
+
# try different kinds of seeds
|
| 231 |
+
for seed in [14, np.random.RandomState(14)]:
|
| 232 |
+
np.random.seed(42)
|
| 233 |
+
random.seed(42)
|
| 234 |
+
run_all_random_functions(seed)
|
| 235 |
+
progress = 0
|
| 236 |
+
|
| 237 |
+
# check that both global RNGs are unaffected
|
| 238 |
+
after_np_rv = np.random.rand()
|
| 239 |
+
# if np_rv != after_np_rv:
|
| 240 |
+
# print(np_rv, after_np_rv, "don't match np!")
|
| 241 |
+
assert np_rv == after_np_rv
|
| 242 |
+
after_py_rv = random.random()
|
| 243 |
+
# if py_rv != after_py_rv:
|
| 244 |
+
# print(py_rv, after_py_rv, "don't match py!")
|
| 245 |
+
assert py_rv == after_py_rv
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
# print("\nDone testing seed:", seed)
|
| 249 |
+
|
| 250 |
+
# test_rng_interface()
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_convert.py
ADDED
|
@@ -0,0 +1,321 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.convert import (
|
| 5 |
+
from_dict_of_dicts,
|
| 6 |
+
from_dict_of_lists,
|
| 7 |
+
to_dict_of_dicts,
|
| 8 |
+
to_dict_of_lists,
|
| 9 |
+
to_networkx_graph,
|
| 10 |
+
)
|
| 11 |
+
from networkx.generators.classic import barbell_graph, cycle_graph
|
| 12 |
+
from networkx.utils import edges_equal, graphs_equal, nodes_equal
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TestConvert:
|
| 16 |
+
def edgelists_equal(self, e1, e2):
|
| 17 |
+
return sorted(sorted(e) for e in e1) == sorted(sorted(e) for e in e2)
|
| 18 |
+
|
| 19 |
+
def test_simple_graphs(self):
|
| 20 |
+
for dest, source in [
|
| 21 |
+
(to_dict_of_dicts, from_dict_of_dicts),
|
| 22 |
+
(to_dict_of_lists, from_dict_of_lists),
|
| 23 |
+
]:
|
| 24 |
+
G = barbell_graph(10, 3)
|
| 25 |
+
G.graph = {}
|
| 26 |
+
dod = dest(G)
|
| 27 |
+
|
| 28 |
+
# Dict of [dicts, lists]
|
| 29 |
+
GG = source(dod)
|
| 30 |
+
assert graphs_equal(G, GG)
|
| 31 |
+
GW = to_networkx_graph(dod)
|
| 32 |
+
assert graphs_equal(G, GW)
|
| 33 |
+
GI = nx.Graph(dod)
|
| 34 |
+
assert graphs_equal(G, GI)
|
| 35 |
+
|
| 36 |
+
# With nodelist keyword
|
| 37 |
+
P4 = nx.path_graph(4)
|
| 38 |
+
P3 = nx.path_graph(3)
|
| 39 |
+
P4.graph = {}
|
| 40 |
+
P3.graph = {}
|
| 41 |
+
dod = dest(P4, nodelist=[0, 1, 2])
|
| 42 |
+
Gdod = nx.Graph(dod)
|
| 43 |
+
assert graphs_equal(Gdod, P3)
|
| 44 |
+
|
| 45 |
+
def test_exceptions(self):
|
| 46 |
+
# NX graph
|
| 47 |
+
class G:
|
| 48 |
+
adj = None
|
| 49 |
+
|
| 50 |
+
pytest.raises(nx.NetworkXError, to_networkx_graph, G)
|
| 51 |
+
|
| 52 |
+
# pygraphviz agraph
|
| 53 |
+
class G:
|
| 54 |
+
is_strict = None
|
| 55 |
+
|
| 56 |
+
pytest.raises(nx.NetworkXError, to_networkx_graph, G)
|
| 57 |
+
|
| 58 |
+
# Dict of [dicts, lists]
|
| 59 |
+
G = {"a": 0}
|
| 60 |
+
pytest.raises(TypeError, to_networkx_graph, G)
|
| 61 |
+
|
| 62 |
+
# list or generator of edges
|
| 63 |
+
class G:
|
| 64 |
+
next = None
|
| 65 |
+
|
| 66 |
+
pytest.raises(nx.NetworkXError, to_networkx_graph, G)
|
| 67 |
+
|
| 68 |
+
# no match
|
| 69 |
+
pytest.raises(nx.NetworkXError, to_networkx_graph, "a")
|
| 70 |
+
|
| 71 |
+
def test_digraphs(self):
|
| 72 |
+
for dest, source in [
|
| 73 |
+
(to_dict_of_dicts, from_dict_of_dicts),
|
| 74 |
+
(to_dict_of_lists, from_dict_of_lists),
|
| 75 |
+
]:
|
| 76 |
+
G = cycle_graph(10)
|
| 77 |
+
|
| 78 |
+
# Dict of [dicts, lists]
|
| 79 |
+
dod = dest(G)
|
| 80 |
+
GG = source(dod)
|
| 81 |
+
assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes()))
|
| 82 |
+
assert edges_equal(sorted(G.edges()), sorted(GG.edges()))
|
| 83 |
+
GW = to_networkx_graph(dod)
|
| 84 |
+
assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes()))
|
| 85 |
+
assert edges_equal(sorted(G.edges()), sorted(GW.edges()))
|
| 86 |
+
GI = nx.Graph(dod)
|
| 87 |
+
assert nodes_equal(sorted(G.nodes()), sorted(GI.nodes()))
|
| 88 |
+
assert edges_equal(sorted(G.edges()), sorted(GI.edges()))
|
| 89 |
+
|
| 90 |
+
G = cycle_graph(10, create_using=nx.DiGraph)
|
| 91 |
+
dod = dest(G)
|
| 92 |
+
GG = source(dod, create_using=nx.DiGraph)
|
| 93 |
+
assert sorted(G.nodes()) == sorted(GG.nodes())
|
| 94 |
+
assert sorted(G.edges()) == sorted(GG.edges())
|
| 95 |
+
GW = to_networkx_graph(dod, create_using=nx.DiGraph)
|
| 96 |
+
assert sorted(G.nodes()) == sorted(GW.nodes())
|
| 97 |
+
assert sorted(G.edges()) == sorted(GW.edges())
|
| 98 |
+
GI = nx.DiGraph(dod)
|
| 99 |
+
assert sorted(G.nodes()) == sorted(GI.nodes())
|
| 100 |
+
assert sorted(G.edges()) == sorted(GI.edges())
|
| 101 |
+
|
| 102 |
+
def test_graph(self):
|
| 103 |
+
g = nx.cycle_graph(10)
|
| 104 |
+
G = nx.Graph()
|
| 105 |
+
G.add_nodes_from(g)
|
| 106 |
+
G.add_weighted_edges_from((u, v, u) for u, v in g.edges())
|
| 107 |
+
|
| 108 |
+
# Dict of dicts
|
| 109 |
+
dod = to_dict_of_dicts(G)
|
| 110 |
+
GG = from_dict_of_dicts(dod, create_using=nx.Graph)
|
| 111 |
+
assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes()))
|
| 112 |
+
assert edges_equal(sorted(G.edges()), sorted(GG.edges()))
|
| 113 |
+
GW = to_networkx_graph(dod, create_using=nx.Graph)
|
| 114 |
+
assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes()))
|
| 115 |
+
assert edges_equal(sorted(G.edges()), sorted(GW.edges()))
|
| 116 |
+
GI = nx.Graph(dod)
|
| 117 |
+
assert sorted(G.nodes()) == sorted(GI.nodes())
|
| 118 |
+
assert sorted(G.edges()) == sorted(GI.edges())
|
| 119 |
+
|
| 120 |
+
# Dict of lists
|
| 121 |
+
dol = to_dict_of_lists(G)
|
| 122 |
+
GG = from_dict_of_lists(dol, create_using=nx.Graph)
|
| 123 |
+
# dict of lists throws away edge data so set it to none
|
| 124 |
+
enone = [(u, v, {}) for (u, v, d) in G.edges(data=True)]
|
| 125 |
+
assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes()))
|
| 126 |
+
assert edges_equal(enone, sorted(GG.edges(data=True)))
|
| 127 |
+
GW = to_networkx_graph(dol, create_using=nx.Graph)
|
| 128 |
+
assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes()))
|
| 129 |
+
assert edges_equal(enone, sorted(GW.edges(data=True)))
|
| 130 |
+
GI = nx.Graph(dol)
|
| 131 |
+
assert nodes_equal(sorted(G.nodes()), sorted(GI.nodes()))
|
| 132 |
+
assert edges_equal(enone, sorted(GI.edges(data=True)))
|
| 133 |
+
|
| 134 |
+
def test_with_multiedges_self_loops(self):
|
| 135 |
+
G = cycle_graph(10)
|
| 136 |
+
XG = nx.Graph()
|
| 137 |
+
XG.add_nodes_from(G)
|
| 138 |
+
XG.add_weighted_edges_from((u, v, u) for u, v in G.edges())
|
| 139 |
+
XGM = nx.MultiGraph()
|
| 140 |
+
XGM.add_nodes_from(G)
|
| 141 |
+
XGM.add_weighted_edges_from((u, v, u) for u, v in G.edges())
|
| 142 |
+
XGM.add_edge(0, 1, weight=2) # multiedge
|
| 143 |
+
XGS = nx.Graph()
|
| 144 |
+
XGS.add_nodes_from(G)
|
| 145 |
+
XGS.add_weighted_edges_from((u, v, u) for u, v in G.edges())
|
| 146 |
+
XGS.add_edge(0, 0, weight=100) # self loop
|
| 147 |
+
|
| 148 |
+
# Dict of dicts
|
| 149 |
+
# with self loops, OK
|
| 150 |
+
dod = to_dict_of_dicts(XGS)
|
| 151 |
+
GG = from_dict_of_dicts(dod, create_using=nx.Graph)
|
| 152 |
+
assert nodes_equal(XGS.nodes(), GG.nodes())
|
| 153 |
+
assert edges_equal(XGS.edges(), GG.edges())
|
| 154 |
+
GW = to_networkx_graph(dod, create_using=nx.Graph)
|
| 155 |
+
assert nodes_equal(XGS.nodes(), GW.nodes())
|
| 156 |
+
assert edges_equal(XGS.edges(), GW.edges())
|
| 157 |
+
GI = nx.Graph(dod)
|
| 158 |
+
assert nodes_equal(XGS.nodes(), GI.nodes())
|
| 159 |
+
assert edges_equal(XGS.edges(), GI.edges())
|
| 160 |
+
|
| 161 |
+
# Dict of lists
|
| 162 |
+
# with self loops, OK
|
| 163 |
+
dol = to_dict_of_lists(XGS)
|
| 164 |
+
GG = from_dict_of_lists(dol, create_using=nx.Graph)
|
| 165 |
+
# dict of lists throws away edge data so set it to none
|
| 166 |
+
enone = [(u, v, {}) for (u, v, d) in XGS.edges(data=True)]
|
| 167 |
+
assert nodes_equal(sorted(XGS.nodes()), sorted(GG.nodes()))
|
| 168 |
+
assert edges_equal(enone, sorted(GG.edges(data=True)))
|
| 169 |
+
GW = to_networkx_graph(dol, create_using=nx.Graph)
|
| 170 |
+
assert nodes_equal(sorted(XGS.nodes()), sorted(GW.nodes()))
|
| 171 |
+
assert edges_equal(enone, sorted(GW.edges(data=True)))
|
| 172 |
+
GI = nx.Graph(dol)
|
| 173 |
+
assert nodes_equal(sorted(XGS.nodes()), sorted(GI.nodes()))
|
| 174 |
+
assert edges_equal(enone, sorted(GI.edges(data=True)))
|
| 175 |
+
|
| 176 |
+
# Dict of dicts
|
| 177 |
+
# with multiedges, OK
|
| 178 |
+
dod = to_dict_of_dicts(XGM)
|
| 179 |
+
GG = from_dict_of_dicts(dod, create_using=nx.MultiGraph, multigraph_input=True)
|
| 180 |
+
assert nodes_equal(sorted(XGM.nodes()), sorted(GG.nodes()))
|
| 181 |
+
assert edges_equal(sorted(XGM.edges()), sorted(GG.edges()))
|
| 182 |
+
GW = to_networkx_graph(dod, create_using=nx.MultiGraph, multigraph_input=True)
|
| 183 |
+
assert nodes_equal(sorted(XGM.nodes()), sorted(GW.nodes()))
|
| 184 |
+
assert edges_equal(sorted(XGM.edges()), sorted(GW.edges()))
|
| 185 |
+
GI = nx.MultiGraph(dod)
|
| 186 |
+
assert nodes_equal(sorted(XGM.nodes()), sorted(GI.nodes()))
|
| 187 |
+
assert sorted(XGM.edges()) == sorted(GI.edges())
|
| 188 |
+
GE = from_dict_of_dicts(dod, create_using=nx.MultiGraph, multigraph_input=False)
|
| 189 |
+
assert nodes_equal(sorted(XGM.nodes()), sorted(GE.nodes()))
|
| 190 |
+
assert sorted(XGM.edges()) != sorted(GE.edges())
|
| 191 |
+
GI = nx.MultiGraph(XGM)
|
| 192 |
+
assert nodes_equal(sorted(XGM.nodes()), sorted(GI.nodes()))
|
| 193 |
+
assert edges_equal(sorted(XGM.edges()), sorted(GI.edges()))
|
| 194 |
+
GM = nx.MultiGraph(G)
|
| 195 |
+
assert nodes_equal(sorted(GM.nodes()), sorted(G.nodes()))
|
| 196 |
+
assert edges_equal(sorted(GM.edges()), sorted(G.edges()))
|
| 197 |
+
|
| 198 |
+
# Dict of lists
|
| 199 |
+
# with multiedges, OK, but better write as DiGraph else you'll
|
| 200 |
+
# get double edges
|
| 201 |
+
dol = to_dict_of_lists(G)
|
| 202 |
+
GG = from_dict_of_lists(dol, create_using=nx.MultiGraph)
|
| 203 |
+
assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes()))
|
| 204 |
+
assert edges_equal(sorted(G.edges()), sorted(GG.edges()))
|
| 205 |
+
GW = to_networkx_graph(dol, create_using=nx.MultiGraph)
|
| 206 |
+
assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes()))
|
| 207 |
+
assert edges_equal(sorted(G.edges()), sorted(GW.edges()))
|
| 208 |
+
GI = nx.MultiGraph(dol)
|
| 209 |
+
assert nodes_equal(sorted(G.nodes()), sorted(GI.nodes()))
|
| 210 |
+
assert edges_equal(sorted(G.edges()), sorted(GI.edges()))
|
| 211 |
+
|
| 212 |
+
def test_edgelists(self):
|
| 213 |
+
P = nx.path_graph(4)
|
| 214 |
+
e = [(0, 1), (1, 2), (2, 3)]
|
| 215 |
+
G = nx.Graph(e)
|
| 216 |
+
assert nodes_equal(sorted(G.nodes()), sorted(P.nodes()))
|
| 217 |
+
assert edges_equal(sorted(G.edges()), sorted(P.edges()))
|
| 218 |
+
assert edges_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True)))
|
| 219 |
+
|
| 220 |
+
e = [(0, 1, {}), (1, 2, {}), (2, 3, {})]
|
| 221 |
+
G = nx.Graph(e)
|
| 222 |
+
assert nodes_equal(sorted(G.nodes()), sorted(P.nodes()))
|
| 223 |
+
assert edges_equal(sorted(G.edges()), sorted(P.edges()))
|
| 224 |
+
assert edges_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True)))
|
| 225 |
+
|
| 226 |
+
e = ((n, n + 1) for n in range(3))
|
| 227 |
+
G = nx.Graph(e)
|
| 228 |
+
assert nodes_equal(sorted(G.nodes()), sorted(P.nodes()))
|
| 229 |
+
assert edges_equal(sorted(G.edges()), sorted(P.edges()))
|
| 230 |
+
assert edges_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True)))
|
| 231 |
+
|
| 232 |
+
def test_directed_to_undirected(self):
|
| 233 |
+
edges1 = [(0, 1), (1, 2), (2, 0)]
|
| 234 |
+
edges2 = [(0, 1), (1, 2), (0, 2)]
|
| 235 |
+
assert self.edgelists_equal(nx.Graph(nx.DiGraph(edges1)).edges(), edges1)
|
| 236 |
+
assert self.edgelists_equal(nx.Graph(nx.DiGraph(edges2)).edges(), edges1)
|
| 237 |
+
assert self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges1)).edges(), edges1)
|
| 238 |
+
assert self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges2)).edges(), edges1)
|
| 239 |
+
|
| 240 |
+
assert self.edgelists_equal(
|
| 241 |
+
nx.MultiGraph(nx.MultiDiGraph(edges1)).edges(), edges1
|
| 242 |
+
)
|
| 243 |
+
assert self.edgelists_equal(
|
| 244 |
+
nx.MultiGraph(nx.MultiDiGraph(edges2)).edges(), edges1
|
| 245 |
+
)
|
| 246 |
+
|
| 247 |
+
assert self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges1)).edges(), edges1)
|
| 248 |
+
assert self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges2)).edges(), edges1)
|
| 249 |
+
|
| 250 |
+
def test_attribute_dict_integrity(self):
|
| 251 |
+
# we must not replace dict-like graph data structures with dicts
|
| 252 |
+
G = nx.Graph()
|
| 253 |
+
G.add_nodes_from("abc")
|
| 254 |
+
H = to_networkx_graph(G, create_using=nx.Graph)
|
| 255 |
+
assert list(H.nodes) == list(G.nodes)
|
| 256 |
+
H = nx.DiGraph(G)
|
| 257 |
+
assert list(H.nodes) == list(G.nodes)
|
| 258 |
+
|
| 259 |
+
def test_to_edgelist(self):
|
| 260 |
+
G = nx.Graph([(1, 1)])
|
| 261 |
+
elist = nx.to_edgelist(G, nodelist=list(G))
|
| 262 |
+
assert edges_equal(G.edges(data=True), elist)
|
| 263 |
+
|
| 264 |
+
def test_custom_node_attr_dict_safekeeping(self):
|
| 265 |
+
class custom_dict(dict):
|
| 266 |
+
pass
|
| 267 |
+
|
| 268 |
+
class Custom(nx.Graph):
|
| 269 |
+
node_attr_dict_factory = custom_dict
|
| 270 |
+
|
| 271 |
+
g = nx.Graph()
|
| 272 |
+
g.add_node(1, weight=1)
|
| 273 |
+
|
| 274 |
+
h = Custom(g)
|
| 275 |
+
assert isinstance(g._node[1], dict)
|
| 276 |
+
assert isinstance(h._node[1], custom_dict)
|
| 277 |
+
|
| 278 |
+
# this raise exception
|
| 279 |
+
# h._node.update((n, dd.copy()) for n, dd in g.nodes.items())
|
| 280 |
+
# assert isinstance(h._node[1], custom_dict)
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
@pytest.mark.parametrize(
|
| 284 |
+
"edgelist",
|
| 285 |
+
(
|
| 286 |
+
# Graph with no edge data
|
| 287 |
+
[(0, 1), (1, 2)],
|
| 288 |
+
# Graph with edge data
|
| 289 |
+
[(0, 1, {"weight": 1.0}), (1, 2, {"weight": 2.0})],
|
| 290 |
+
),
|
| 291 |
+
)
|
| 292 |
+
def test_to_dict_of_dicts_with_edgedata_param(edgelist):
|
| 293 |
+
G = nx.Graph()
|
| 294 |
+
G.add_edges_from(edgelist)
|
| 295 |
+
# Innermost dict value == edge_data when edge_data != None.
|
| 296 |
+
# In the case when G has edge data, it is overwritten
|
| 297 |
+
expected = {0: {1: 10}, 1: {0: 10, 2: 10}, 2: {1: 10}}
|
| 298 |
+
assert nx.to_dict_of_dicts(G, edge_data=10) == expected
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
def test_to_dict_of_dicts_with_edgedata_and_nodelist():
|
| 302 |
+
G = nx.path_graph(5)
|
| 303 |
+
nodelist = [2, 3, 4]
|
| 304 |
+
expected = {2: {3: 10}, 3: {2: 10, 4: 10}, 4: {3: 10}}
|
| 305 |
+
assert nx.to_dict_of_dicts(G, nodelist=nodelist, edge_data=10) == expected
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
def test_to_dict_of_dicts_with_edgedata_multigraph():
|
| 309 |
+
"""Multi edge data overwritten when edge_data != None"""
|
| 310 |
+
G = nx.MultiGraph()
|
| 311 |
+
G.add_edge(0, 1, key="a")
|
| 312 |
+
G.add_edge(0, 1, key="b")
|
| 313 |
+
# Multi edge data lost when edge_data is not None
|
| 314 |
+
expected = {0: {1: 10}, 1: {0: 10}}
|
| 315 |
+
assert nx.to_dict_of_dicts(G, edge_data=10) == expected
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def test_to_networkx_graph_non_edgelist():
|
| 319 |
+
invalid_edgelist = [1, 2, 3]
|
| 320 |
+
with pytest.raises(nx.NetworkXError, match="Input is not a valid edge list"):
|
| 321 |
+
nx.to_networkx_graph(invalid_edgelist)
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_convert_numpy.py
ADDED
|
@@ -0,0 +1,532 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import itertools
|
| 2 |
+
|
| 3 |
+
import pytest
|
| 4 |
+
|
| 5 |
+
np = pytest.importorskip("numpy")
|
| 6 |
+
npt = pytest.importorskip("numpy.testing")
|
| 7 |
+
|
| 8 |
+
import networkx as nx
|
| 9 |
+
from networkx.generators.classic import barbell_graph, cycle_graph, path_graph
|
| 10 |
+
from networkx.utils import graphs_equal
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class TestConvertNumpyArray:
|
| 14 |
+
def setup_method(self):
|
| 15 |
+
self.G1 = barbell_graph(10, 3)
|
| 16 |
+
self.G2 = cycle_graph(10, create_using=nx.DiGraph)
|
| 17 |
+
self.G3 = self.create_weighted(nx.Graph())
|
| 18 |
+
self.G4 = self.create_weighted(nx.DiGraph())
|
| 19 |
+
|
| 20 |
+
def create_weighted(self, G):
|
| 21 |
+
g = cycle_graph(4)
|
| 22 |
+
G.add_nodes_from(g)
|
| 23 |
+
G.add_weighted_edges_from((u, v, 10 + u) for u, v in g.edges())
|
| 24 |
+
return G
|
| 25 |
+
|
| 26 |
+
def assert_equal(self, G1, G2):
|
| 27 |
+
assert sorted(G1.nodes()) == sorted(G2.nodes())
|
| 28 |
+
assert sorted(G1.edges()) == sorted(G2.edges())
|
| 29 |
+
|
| 30 |
+
def identity_conversion(self, G, A, create_using):
|
| 31 |
+
assert A.sum() > 0
|
| 32 |
+
GG = nx.from_numpy_array(A, create_using=create_using)
|
| 33 |
+
self.assert_equal(G, GG)
|
| 34 |
+
GW = nx.to_networkx_graph(A, create_using=create_using)
|
| 35 |
+
self.assert_equal(G, GW)
|
| 36 |
+
GI = nx.empty_graph(0, create_using).__class__(A)
|
| 37 |
+
self.assert_equal(G, GI)
|
| 38 |
+
|
| 39 |
+
def test_shape(self):
|
| 40 |
+
"Conversion from non-square array."
|
| 41 |
+
A = np.array([[1, 2, 3], [4, 5, 6]])
|
| 42 |
+
pytest.raises(nx.NetworkXError, nx.from_numpy_array, A)
|
| 43 |
+
|
| 44 |
+
def test_identity_graph_array(self):
|
| 45 |
+
"Conversion from graph to array to graph."
|
| 46 |
+
A = nx.to_numpy_array(self.G1)
|
| 47 |
+
self.identity_conversion(self.G1, A, nx.Graph())
|
| 48 |
+
|
| 49 |
+
def test_identity_digraph_array(self):
|
| 50 |
+
"""Conversion from digraph to array to digraph."""
|
| 51 |
+
A = nx.to_numpy_array(self.G2)
|
| 52 |
+
self.identity_conversion(self.G2, A, nx.DiGraph())
|
| 53 |
+
|
| 54 |
+
def test_identity_weighted_graph_array(self):
|
| 55 |
+
"""Conversion from weighted graph to array to weighted graph."""
|
| 56 |
+
A = nx.to_numpy_array(self.G3)
|
| 57 |
+
self.identity_conversion(self.G3, A, nx.Graph())
|
| 58 |
+
|
| 59 |
+
def test_identity_weighted_digraph_array(self):
|
| 60 |
+
"""Conversion from weighted digraph to array to weighted digraph."""
|
| 61 |
+
A = nx.to_numpy_array(self.G4)
|
| 62 |
+
self.identity_conversion(self.G4, A, nx.DiGraph())
|
| 63 |
+
|
| 64 |
+
def test_nodelist(self):
|
| 65 |
+
"""Conversion from graph to array to graph with nodelist."""
|
| 66 |
+
P4 = path_graph(4)
|
| 67 |
+
P3 = path_graph(3)
|
| 68 |
+
nodelist = list(P3)
|
| 69 |
+
A = nx.to_numpy_array(P4, nodelist=nodelist)
|
| 70 |
+
GA = nx.Graph(A)
|
| 71 |
+
self.assert_equal(GA, P3)
|
| 72 |
+
|
| 73 |
+
# Make nodelist ambiguous by containing duplicates.
|
| 74 |
+
nodelist += [nodelist[0]]
|
| 75 |
+
pytest.raises(nx.NetworkXError, nx.to_numpy_array, P3, nodelist=nodelist)
|
| 76 |
+
|
| 77 |
+
# Make nodelist invalid by including nonexistent nodes
|
| 78 |
+
nodelist = [-1, 0, 1]
|
| 79 |
+
with pytest.raises(
|
| 80 |
+
nx.NetworkXError,
|
| 81 |
+
match=f"Nodes {nodelist - P3.nodes} in nodelist is not in G",
|
| 82 |
+
):
|
| 83 |
+
nx.to_numpy_array(P3, nodelist=nodelist)
|
| 84 |
+
|
| 85 |
+
def test_weight_keyword(self):
|
| 86 |
+
WP4 = nx.Graph()
|
| 87 |
+
WP4.add_edges_from((n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3))
|
| 88 |
+
P4 = path_graph(4)
|
| 89 |
+
A = nx.to_numpy_array(P4)
|
| 90 |
+
np.testing.assert_equal(A, nx.to_numpy_array(WP4, weight=None))
|
| 91 |
+
np.testing.assert_equal(0.5 * A, nx.to_numpy_array(WP4))
|
| 92 |
+
np.testing.assert_equal(0.3 * A, nx.to_numpy_array(WP4, weight="other"))
|
| 93 |
+
|
| 94 |
+
def test_from_numpy_array_type(self):
|
| 95 |
+
A = np.array([[1]])
|
| 96 |
+
G = nx.from_numpy_array(A)
|
| 97 |
+
assert type(G[0][0]["weight"]) == int
|
| 98 |
+
|
| 99 |
+
A = np.array([[1]]).astype(float)
|
| 100 |
+
G = nx.from_numpy_array(A)
|
| 101 |
+
assert type(G[0][0]["weight"]) == float
|
| 102 |
+
|
| 103 |
+
A = np.array([[1]]).astype(str)
|
| 104 |
+
G = nx.from_numpy_array(A)
|
| 105 |
+
assert type(G[0][0]["weight"]) == str
|
| 106 |
+
|
| 107 |
+
A = np.array([[1]]).astype(bool)
|
| 108 |
+
G = nx.from_numpy_array(A)
|
| 109 |
+
assert type(G[0][0]["weight"]) == bool
|
| 110 |
+
|
| 111 |
+
A = np.array([[1]]).astype(complex)
|
| 112 |
+
G = nx.from_numpy_array(A)
|
| 113 |
+
assert type(G[0][0]["weight"]) == complex
|
| 114 |
+
|
| 115 |
+
A = np.array([[1]]).astype(object)
|
| 116 |
+
pytest.raises(TypeError, nx.from_numpy_array, A)
|
| 117 |
+
|
| 118 |
+
A = np.array([[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]])
|
| 119 |
+
with pytest.raises(
|
| 120 |
+
nx.NetworkXError, match=f"Input array must be 2D, not {A.ndim}"
|
| 121 |
+
):
|
| 122 |
+
g = nx.from_numpy_array(A)
|
| 123 |
+
|
| 124 |
+
def test_from_numpy_array_dtype(self):
|
| 125 |
+
dt = [("weight", float), ("cost", int)]
|
| 126 |
+
A = np.array([[(1.0, 2)]], dtype=dt)
|
| 127 |
+
G = nx.from_numpy_array(A)
|
| 128 |
+
assert type(G[0][0]["weight"]) == float
|
| 129 |
+
assert type(G[0][0]["cost"]) == int
|
| 130 |
+
assert G[0][0]["cost"] == 2
|
| 131 |
+
assert G[0][0]["weight"] == 1.0
|
| 132 |
+
|
| 133 |
+
def test_from_numpy_array_parallel_edges(self):
|
| 134 |
+
"""Tests that the :func:`networkx.from_numpy_array` function
|
| 135 |
+
interprets integer weights as the number of parallel edges when
|
| 136 |
+
creating a multigraph.
|
| 137 |
+
|
| 138 |
+
"""
|
| 139 |
+
A = np.array([[1, 1], [1, 2]])
|
| 140 |
+
# First, with a simple graph, each integer entry in the adjacency
|
| 141 |
+
# matrix is interpreted as the weight of a single edge in the graph.
|
| 142 |
+
expected = nx.DiGraph()
|
| 143 |
+
edges = [(0, 0), (0, 1), (1, 0)]
|
| 144 |
+
expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges])
|
| 145 |
+
expected.add_edge(1, 1, weight=2)
|
| 146 |
+
actual = nx.from_numpy_array(A, parallel_edges=True, create_using=nx.DiGraph)
|
| 147 |
+
assert graphs_equal(actual, expected)
|
| 148 |
+
actual = nx.from_numpy_array(A, parallel_edges=False, create_using=nx.DiGraph)
|
| 149 |
+
assert graphs_equal(actual, expected)
|
| 150 |
+
# Now each integer entry in the adjacency matrix is interpreted as the
|
| 151 |
+
# number of parallel edges in the graph if the appropriate keyword
|
| 152 |
+
# argument is specified.
|
| 153 |
+
edges = [(0, 0), (0, 1), (1, 0), (1, 1), (1, 1)]
|
| 154 |
+
expected = nx.MultiDiGraph()
|
| 155 |
+
expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges])
|
| 156 |
+
actual = nx.from_numpy_array(
|
| 157 |
+
A, parallel_edges=True, create_using=nx.MultiDiGraph
|
| 158 |
+
)
|
| 159 |
+
assert graphs_equal(actual, expected)
|
| 160 |
+
expected = nx.MultiDiGraph()
|
| 161 |
+
expected.add_edges_from(set(edges), weight=1)
|
| 162 |
+
# The sole self-loop (edge 0) on vertex 1 should have weight 2.
|
| 163 |
+
expected[1][1][0]["weight"] = 2
|
| 164 |
+
actual = nx.from_numpy_array(
|
| 165 |
+
A, parallel_edges=False, create_using=nx.MultiDiGraph
|
| 166 |
+
)
|
| 167 |
+
assert graphs_equal(actual, expected)
|
| 168 |
+
|
| 169 |
+
@pytest.mark.parametrize(
|
| 170 |
+
"dt",
|
| 171 |
+
(
|
| 172 |
+
None, # default
|
| 173 |
+
int, # integer dtype
|
| 174 |
+
np.dtype(
|
| 175 |
+
[("weight", "f8"), ("color", "i1")]
|
| 176 |
+
), # Structured dtype with named fields
|
| 177 |
+
),
|
| 178 |
+
)
|
| 179 |
+
def test_from_numpy_array_no_edge_attr(self, dt):
|
| 180 |
+
A = np.array([[0, 1], [1, 0]], dtype=dt)
|
| 181 |
+
G = nx.from_numpy_array(A, edge_attr=None)
|
| 182 |
+
assert "weight" not in G.edges[0, 1]
|
| 183 |
+
assert len(G.edges[0, 1]) == 0
|
| 184 |
+
|
| 185 |
+
def test_from_numpy_array_multiedge_no_edge_attr(self):
|
| 186 |
+
A = np.array([[0, 2], [2, 0]])
|
| 187 |
+
G = nx.from_numpy_array(A, create_using=nx.MultiDiGraph, edge_attr=None)
|
| 188 |
+
assert all("weight" not in e for _, e in G[0][1].items())
|
| 189 |
+
assert len(G[0][1][0]) == 0
|
| 190 |
+
|
| 191 |
+
def test_from_numpy_array_custom_edge_attr(self):
|
| 192 |
+
A = np.array([[0, 2], [3, 0]])
|
| 193 |
+
G = nx.from_numpy_array(A, edge_attr="cost")
|
| 194 |
+
assert "weight" not in G.edges[0, 1]
|
| 195 |
+
assert G.edges[0, 1]["cost"] == 3
|
| 196 |
+
|
| 197 |
+
def test_symmetric(self):
|
| 198 |
+
"""Tests that a symmetric array has edges added only once to an
|
| 199 |
+
undirected multigraph when using :func:`networkx.from_numpy_array`.
|
| 200 |
+
|
| 201 |
+
"""
|
| 202 |
+
A = np.array([[0, 1], [1, 0]])
|
| 203 |
+
G = nx.from_numpy_array(A, create_using=nx.MultiGraph)
|
| 204 |
+
expected = nx.MultiGraph()
|
| 205 |
+
expected.add_edge(0, 1, weight=1)
|
| 206 |
+
assert graphs_equal(G, expected)
|
| 207 |
+
|
| 208 |
+
def test_dtype_int_graph(self):
|
| 209 |
+
"""Test that setting dtype int actually gives an integer array.
|
| 210 |
+
|
| 211 |
+
For more information, see GitHub pull request #1363.
|
| 212 |
+
|
| 213 |
+
"""
|
| 214 |
+
G = nx.complete_graph(3)
|
| 215 |
+
A = nx.to_numpy_array(G, dtype=int)
|
| 216 |
+
assert A.dtype == int
|
| 217 |
+
|
| 218 |
+
def test_dtype_int_multigraph(self):
|
| 219 |
+
"""Test that setting dtype int actually gives an integer array.
|
| 220 |
+
|
| 221 |
+
For more information, see GitHub pull request #1363.
|
| 222 |
+
|
| 223 |
+
"""
|
| 224 |
+
G = nx.MultiGraph(nx.complete_graph(3))
|
| 225 |
+
A = nx.to_numpy_array(G, dtype=int)
|
| 226 |
+
assert A.dtype == int
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
@pytest.fixture
|
| 230 |
+
def multigraph_test_graph():
|
| 231 |
+
G = nx.MultiGraph()
|
| 232 |
+
G.add_edge(1, 2, weight=7)
|
| 233 |
+
G.add_edge(1, 2, weight=70)
|
| 234 |
+
return G
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
@pytest.mark.parametrize(("operator", "expected"), ((sum, 77), (min, 7), (max, 70)))
|
| 238 |
+
def test_numpy_multigraph(multigraph_test_graph, operator, expected):
|
| 239 |
+
A = nx.to_numpy_array(multigraph_test_graph, multigraph_weight=operator)
|
| 240 |
+
assert A[1, 0] == expected
|
| 241 |
+
|
| 242 |
+
|
| 243 |
+
def test_to_numpy_array_multigraph_nodelist(multigraph_test_graph):
|
| 244 |
+
G = multigraph_test_graph
|
| 245 |
+
G.add_edge(0, 1, weight=3)
|
| 246 |
+
A = nx.to_numpy_array(G, nodelist=[1, 2])
|
| 247 |
+
assert A.shape == (2, 2)
|
| 248 |
+
assert A[1, 0] == 77
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
@pytest.mark.parametrize(
|
| 252 |
+
"G, expected",
|
| 253 |
+
[
|
| 254 |
+
(nx.Graph(), np.array([[0, 1 + 2j], [1 + 2j, 0]], dtype=complex)),
|
| 255 |
+
(nx.DiGraph(), np.array([[0, 1 + 2j], [0, 0]], dtype=complex)),
|
| 256 |
+
],
|
| 257 |
+
)
|
| 258 |
+
def test_to_numpy_array_complex_weights(G, expected):
|
| 259 |
+
G.add_edge(0, 1, weight=1 + 2j)
|
| 260 |
+
A = nx.to_numpy_array(G, dtype=complex)
|
| 261 |
+
npt.assert_array_equal(A, expected)
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def test_to_numpy_array_arbitrary_weights():
|
| 265 |
+
G = nx.DiGraph()
|
| 266 |
+
w = 922337203685477580102 # Out of range for int64
|
| 267 |
+
G.add_edge(0, 1, weight=922337203685477580102) # val not representable by int64
|
| 268 |
+
A = nx.to_numpy_array(G, dtype=object)
|
| 269 |
+
expected = np.array([[0, w], [0, 0]], dtype=object)
|
| 270 |
+
npt.assert_array_equal(A, expected)
|
| 271 |
+
|
| 272 |
+
# Undirected
|
| 273 |
+
A = nx.to_numpy_array(G.to_undirected(), dtype=object)
|
| 274 |
+
expected = np.array([[0, w], [w, 0]], dtype=object)
|
| 275 |
+
npt.assert_array_equal(A, expected)
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
@pytest.mark.parametrize(
|
| 279 |
+
"func, expected",
|
| 280 |
+
((min, -1), (max, 10), (sum, 11), (np.mean, 11 / 3), (np.median, 2)),
|
| 281 |
+
)
|
| 282 |
+
def test_to_numpy_array_multiweight_reduction(func, expected):
|
| 283 |
+
"""Test various functions for reducing multiedge weights."""
|
| 284 |
+
G = nx.MultiDiGraph()
|
| 285 |
+
weights = [-1, 2, 10.0]
|
| 286 |
+
for w in weights:
|
| 287 |
+
G.add_edge(0, 1, weight=w)
|
| 288 |
+
A = nx.to_numpy_array(G, multigraph_weight=func, dtype=float)
|
| 289 |
+
assert np.allclose(A, [[0, expected], [0, 0]])
|
| 290 |
+
|
| 291 |
+
# Undirected case
|
| 292 |
+
A = nx.to_numpy_array(G.to_undirected(), multigraph_weight=func, dtype=float)
|
| 293 |
+
assert np.allclose(A, [[0, expected], [expected, 0]])
|
| 294 |
+
|
| 295 |
+
|
| 296 |
+
@pytest.mark.parametrize(
|
| 297 |
+
("G, expected"),
|
| 298 |
+
[
|
| 299 |
+
(nx.Graph(), [[(0, 0), (10, 5)], [(10, 5), (0, 0)]]),
|
| 300 |
+
(nx.DiGraph(), [[(0, 0), (10, 5)], [(0, 0), (0, 0)]]),
|
| 301 |
+
],
|
| 302 |
+
)
|
| 303 |
+
def test_to_numpy_array_structured_dtype_attrs_from_fields(G, expected):
|
| 304 |
+
"""When `dtype` is structured (i.e. has names) and `weight` is None, use
|
| 305 |
+
the named fields of the dtype to look up edge attributes."""
|
| 306 |
+
G.add_edge(0, 1, weight=10, cost=5.0)
|
| 307 |
+
dtype = np.dtype([("weight", int), ("cost", int)])
|
| 308 |
+
A = nx.to_numpy_array(G, dtype=dtype, weight=None)
|
| 309 |
+
expected = np.asarray(expected, dtype=dtype)
|
| 310 |
+
npt.assert_array_equal(A, expected)
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
def test_to_numpy_array_structured_dtype_single_attr_default():
|
| 314 |
+
G = nx.path_graph(3)
|
| 315 |
+
dtype = np.dtype([("weight", float)]) # A single named field
|
| 316 |
+
A = nx.to_numpy_array(G, dtype=dtype, weight=None)
|
| 317 |
+
expected = np.array([[0, 1, 0], [1, 0, 1], [0, 1, 0]], dtype=float)
|
| 318 |
+
npt.assert_array_equal(A["weight"], expected)
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
@pytest.mark.parametrize(
|
| 322 |
+
("field_name", "expected_attr_val"),
|
| 323 |
+
[
|
| 324 |
+
("weight", 1),
|
| 325 |
+
("cost", 3),
|
| 326 |
+
],
|
| 327 |
+
)
|
| 328 |
+
def test_to_numpy_array_structured_dtype_single_attr(field_name, expected_attr_val):
|
| 329 |
+
G = nx.Graph()
|
| 330 |
+
G.add_edge(0, 1, cost=3)
|
| 331 |
+
dtype = np.dtype([(field_name, float)])
|
| 332 |
+
A = nx.to_numpy_array(G, dtype=dtype, weight=None)
|
| 333 |
+
expected = np.array([[0, expected_attr_val], [expected_attr_val, 0]], dtype=float)
|
| 334 |
+
npt.assert_array_equal(A[field_name], expected)
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph))
|
| 338 |
+
@pytest.mark.parametrize(
|
| 339 |
+
"edge",
|
| 340 |
+
[
|
| 341 |
+
(0, 1), # No edge attributes
|
| 342 |
+
(0, 1, {"weight": 10}), # One edge attr
|
| 343 |
+
(0, 1, {"weight": 5, "flow": -4}), # Multiple but not all edge attrs
|
| 344 |
+
(0, 1, {"weight": 2.0, "cost": 10, "flow": -45}), # All attrs
|
| 345 |
+
],
|
| 346 |
+
)
|
| 347 |
+
def test_to_numpy_array_structured_dtype_multiple_fields(graph_type, edge):
|
| 348 |
+
G = graph_type([edge])
|
| 349 |
+
dtype = np.dtype([("weight", float), ("cost", float), ("flow", float)])
|
| 350 |
+
A = nx.to_numpy_array(G, dtype=dtype, weight=None)
|
| 351 |
+
for attr in dtype.names:
|
| 352 |
+
expected = nx.to_numpy_array(G, dtype=float, weight=attr)
|
| 353 |
+
npt.assert_array_equal(A[attr], expected)
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
@pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph()))
|
| 357 |
+
def test_to_numpy_array_structured_dtype_scalar_nonedge(G):
|
| 358 |
+
G.add_edge(0, 1, weight=10)
|
| 359 |
+
dtype = np.dtype([("weight", float), ("cost", float)])
|
| 360 |
+
A = nx.to_numpy_array(G, dtype=dtype, weight=None, nonedge=np.nan)
|
| 361 |
+
for attr in dtype.names:
|
| 362 |
+
expected = nx.to_numpy_array(G, dtype=float, weight=attr, nonedge=np.nan)
|
| 363 |
+
npt.assert_array_equal(A[attr], expected)
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
@pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph()))
|
| 367 |
+
def test_to_numpy_array_structured_dtype_nonedge_ary(G):
|
| 368 |
+
"""Similar to the scalar case, except has a different non-edge value for
|
| 369 |
+
each named field."""
|
| 370 |
+
G.add_edge(0, 1, weight=10)
|
| 371 |
+
dtype = np.dtype([("weight", float), ("cost", float)])
|
| 372 |
+
nonedges = np.array([(0, np.inf)], dtype=dtype)
|
| 373 |
+
A = nx.to_numpy_array(G, dtype=dtype, weight=None, nonedge=nonedges)
|
| 374 |
+
for attr in dtype.names:
|
| 375 |
+
nonedge = nonedges[attr]
|
| 376 |
+
expected = nx.to_numpy_array(G, dtype=float, weight=attr, nonedge=nonedge)
|
| 377 |
+
npt.assert_array_equal(A[attr], expected)
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
def test_to_numpy_array_structured_dtype_with_weight_raises():
|
| 381 |
+
"""Using both a structured dtype (with named fields) and specifying a `weight`
|
| 382 |
+
parameter is ambiguous."""
|
| 383 |
+
G = nx.path_graph(3)
|
| 384 |
+
dtype = np.dtype([("weight", int), ("cost", int)])
|
| 385 |
+
exception_msg = "Specifying `weight` not supported for structured dtypes"
|
| 386 |
+
with pytest.raises(ValueError, match=exception_msg):
|
| 387 |
+
nx.to_numpy_array(G, dtype=dtype) # Default is weight="weight"
|
| 388 |
+
with pytest.raises(ValueError, match=exception_msg):
|
| 389 |
+
nx.to_numpy_array(G, dtype=dtype, weight="cost")
|
| 390 |
+
|
| 391 |
+
|
| 392 |
+
@pytest.mark.parametrize("graph_type", (nx.MultiGraph, nx.MultiDiGraph))
|
| 393 |
+
def test_to_numpy_array_structured_multigraph_raises(graph_type):
|
| 394 |
+
G = nx.path_graph(3, create_using=graph_type)
|
| 395 |
+
dtype = np.dtype([("weight", int), ("cost", int)])
|
| 396 |
+
with pytest.raises(nx.NetworkXError, match="Structured arrays are not supported"):
|
| 397 |
+
nx.to_numpy_array(G, dtype=dtype, weight=None)
|
| 398 |
+
|
| 399 |
+
|
| 400 |
+
def test_from_numpy_array_nodelist_bad_size():
|
| 401 |
+
"""An exception is raised when `len(nodelist) != A.shape[0]`."""
|
| 402 |
+
n = 5 # Number of nodes
|
| 403 |
+
A = np.diag(np.ones(n - 1), k=1) # Adj. matrix for P_n
|
| 404 |
+
expected = nx.path_graph(n)
|
| 405 |
+
|
| 406 |
+
assert graphs_equal(nx.from_numpy_array(A, edge_attr=None), expected)
|
| 407 |
+
nodes = list(range(n))
|
| 408 |
+
assert graphs_equal(
|
| 409 |
+
nx.from_numpy_array(A, edge_attr=None, nodelist=nodes), expected
|
| 410 |
+
)
|
| 411 |
+
|
| 412 |
+
# Too many node labels
|
| 413 |
+
nodes = list(range(n + 1))
|
| 414 |
+
with pytest.raises(ValueError, match="nodelist must have the same length as A"):
|
| 415 |
+
nx.from_numpy_array(A, nodelist=nodes)
|
| 416 |
+
|
| 417 |
+
# Too few node labels
|
| 418 |
+
nodes = list(range(n - 1))
|
| 419 |
+
with pytest.raises(ValueError, match="nodelist must have the same length as A"):
|
| 420 |
+
nx.from_numpy_array(A, nodelist=nodes)
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
@pytest.mark.parametrize(
|
| 424 |
+
"nodes",
|
| 425 |
+
(
|
| 426 |
+
[4, 3, 2, 1, 0],
|
| 427 |
+
[9, 7, 1, 2, 8],
|
| 428 |
+
["a", "b", "c", "d", "e"],
|
| 429 |
+
[(0, 0), (1, 1), (2, 3), (0, 2), (3, 1)],
|
| 430 |
+
["A", 2, 7, "spam", (1, 3)],
|
| 431 |
+
),
|
| 432 |
+
)
|
| 433 |
+
def test_from_numpy_array_nodelist(nodes):
|
| 434 |
+
A = np.diag(np.ones(4), k=1)
|
| 435 |
+
# Without edge attributes
|
| 436 |
+
expected = nx.relabel_nodes(
|
| 437 |
+
nx.path_graph(5), mapping=dict(enumerate(nodes)), copy=True
|
| 438 |
+
)
|
| 439 |
+
G = nx.from_numpy_array(A, edge_attr=None, nodelist=nodes)
|
| 440 |
+
assert graphs_equal(G, expected)
|
| 441 |
+
|
| 442 |
+
# With edge attributes
|
| 443 |
+
nx.set_edge_attributes(expected, 1.0, name="weight")
|
| 444 |
+
G = nx.from_numpy_array(A, nodelist=nodes)
|
| 445 |
+
assert graphs_equal(G, expected)
|
| 446 |
+
|
| 447 |
+
|
| 448 |
+
@pytest.mark.parametrize(
|
| 449 |
+
"nodes",
|
| 450 |
+
(
|
| 451 |
+
[4, 3, 2, 1, 0],
|
| 452 |
+
[9, 7, 1, 2, 8],
|
| 453 |
+
["a", "b", "c", "d", "e"],
|
| 454 |
+
[(0, 0), (1, 1), (2, 3), (0, 2), (3, 1)],
|
| 455 |
+
["A", 2, 7, "spam", (1, 3)],
|
| 456 |
+
),
|
| 457 |
+
)
|
| 458 |
+
def test_from_numpy_array_nodelist_directed(nodes):
|
| 459 |
+
A = np.diag(np.ones(4), k=1)
|
| 460 |
+
# Without edge attributes
|
| 461 |
+
H = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4)])
|
| 462 |
+
expected = nx.relabel_nodes(H, mapping=dict(enumerate(nodes)), copy=True)
|
| 463 |
+
G = nx.from_numpy_array(A, create_using=nx.DiGraph, edge_attr=None, nodelist=nodes)
|
| 464 |
+
assert graphs_equal(G, expected)
|
| 465 |
+
|
| 466 |
+
# With edge attributes
|
| 467 |
+
nx.set_edge_attributes(expected, 1.0, name="weight")
|
| 468 |
+
G = nx.from_numpy_array(A, create_using=nx.DiGraph, nodelist=nodes)
|
| 469 |
+
assert graphs_equal(G, expected)
|
| 470 |
+
|
| 471 |
+
|
| 472 |
+
@pytest.mark.parametrize(
|
| 473 |
+
"nodes",
|
| 474 |
+
(
|
| 475 |
+
[4, 3, 2, 1, 0],
|
| 476 |
+
[9, 7, 1, 2, 8],
|
| 477 |
+
["a", "b", "c", "d", "e"],
|
| 478 |
+
[(0, 0), (1, 1), (2, 3), (0, 2), (3, 1)],
|
| 479 |
+
["A", 2, 7, "spam", (1, 3)],
|
| 480 |
+
),
|
| 481 |
+
)
|
| 482 |
+
def test_from_numpy_array_nodelist_multigraph(nodes):
|
| 483 |
+
A = np.array(
|
| 484 |
+
[
|
| 485 |
+
[0, 1, 0, 0, 0],
|
| 486 |
+
[1, 0, 2, 0, 0],
|
| 487 |
+
[0, 2, 0, 3, 0],
|
| 488 |
+
[0, 0, 3, 0, 4],
|
| 489 |
+
[0, 0, 0, 4, 0],
|
| 490 |
+
]
|
| 491 |
+
)
|
| 492 |
+
|
| 493 |
+
H = nx.MultiGraph()
|
| 494 |
+
for i, edge in enumerate(((0, 1), (1, 2), (2, 3), (3, 4))):
|
| 495 |
+
H.add_edges_from(itertools.repeat(edge, i + 1))
|
| 496 |
+
expected = nx.relabel_nodes(H, mapping=dict(enumerate(nodes)), copy=True)
|
| 497 |
+
|
| 498 |
+
G = nx.from_numpy_array(
|
| 499 |
+
A,
|
| 500 |
+
parallel_edges=True,
|
| 501 |
+
create_using=nx.MultiGraph,
|
| 502 |
+
edge_attr=None,
|
| 503 |
+
nodelist=nodes,
|
| 504 |
+
)
|
| 505 |
+
assert graphs_equal(G, expected)
|
| 506 |
+
|
| 507 |
+
|
| 508 |
+
@pytest.mark.parametrize(
|
| 509 |
+
"nodes",
|
| 510 |
+
(
|
| 511 |
+
[4, 3, 2, 1, 0],
|
| 512 |
+
[9, 7, 1, 2, 8],
|
| 513 |
+
["a", "b", "c", "d", "e"],
|
| 514 |
+
[(0, 0), (1, 1), (2, 3), (0, 2), (3, 1)],
|
| 515 |
+
["A", 2, 7, "spam", (1, 3)],
|
| 516 |
+
),
|
| 517 |
+
)
|
| 518 |
+
@pytest.mark.parametrize("graph", (nx.complete_graph, nx.cycle_graph, nx.wheel_graph))
|
| 519 |
+
def test_from_numpy_array_nodelist_rountrip(graph, nodes):
|
| 520 |
+
G = graph(5)
|
| 521 |
+
A = nx.to_numpy_array(G)
|
| 522 |
+
expected = nx.relabel_nodes(G, mapping=dict(enumerate(nodes)), copy=True)
|
| 523 |
+
H = nx.from_numpy_array(A, edge_attr=None, nodelist=nodes)
|
| 524 |
+
assert graphs_equal(H, expected)
|
| 525 |
+
|
| 526 |
+
# With an isolated node
|
| 527 |
+
G = graph(4)
|
| 528 |
+
G.add_node("foo")
|
| 529 |
+
A = nx.to_numpy_array(G)
|
| 530 |
+
expected = nx.relabel_nodes(G, mapping=dict(zip(G.nodes, nodes)), copy=True)
|
| 531 |
+
H = nx.from_numpy_array(A, edge_attr=None, nodelist=nodes)
|
| 532 |
+
assert graphs_equal(H, expected)
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_convert_pandas.py
ADDED
|
@@ -0,0 +1,349 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.utils import edges_equal, graphs_equal, nodes_equal
|
| 5 |
+
|
| 6 |
+
np = pytest.importorskip("numpy")
|
| 7 |
+
pd = pytest.importorskip("pandas")
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TestConvertPandas:
|
| 11 |
+
def setup_method(self):
|
| 12 |
+
self.rng = np.random.RandomState(seed=5)
|
| 13 |
+
ints = self.rng.randint(1, 11, size=(3, 2))
|
| 14 |
+
a = ["A", "B", "C"]
|
| 15 |
+
b = ["D", "A", "E"]
|
| 16 |
+
df = pd.DataFrame(ints, columns=["weight", "cost"])
|
| 17 |
+
df[0] = a # Column label 0 (int)
|
| 18 |
+
df["b"] = b # Column label 'b' (str)
|
| 19 |
+
self.df = df
|
| 20 |
+
|
| 21 |
+
mdf = pd.DataFrame([[4, 16, "A", "D"]], columns=["weight", "cost", 0, "b"])
|
| 22 |
+
self.mdf = pd.concat([df, mdf])
|
| 23 |
+
|
| 24 |
+
def test_exceptions(self):
|
| 25 |
+
G = pd.DataFrame(["a"]) # adj
|
| 26 |
+
pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G)
|
| 27 |
+
G = pd.DataFrame(["a", 0.0]) # elist
|
| 28 |
+
pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G)
|
| 29 |
+
df = pd.DataFrame([[1, 1], [1, 0]], dtype=int, index=[1, 2], columns=["a", "b"])
|
| 30 |
+
pytest.raises(nx.NetworkXError, nx.from_pandas_adjacency, df)
|
| 31 |
+
|
| 32 |
+
def test_from_edgelist_all_attr(self):
|
| 33 |
+
Gtrue = nx.Graph(
|
| 34 |
+
[
|
| 35 |
+
("E", "C", {"cost": 9, "weight": 10}),
|
| 36 |
+
("B", "A", {"cost": 1, "weight": 7}),
|
| 37 |
+
("A", "D", {"cost": 7, "weight": 4}),
|
| 38 |
+
]
|
| 39 |
+
)
|
| 40 |
+
G = nx.from_pandas_edgelist(self.df, 0, "b", True)
|
| 41 |
+
assert graphs_equal(G, Gtrue)
|
| 42 |
+
# MultiGraph
|
| 43 |
+
MGtrue = nx.MultiGraph(Gtrue)
|
| 44 |
+
MGtrue.add_edge("A", "D", cost=16, weight=4)
|
| 45 |
+
MG = nx.from_pandas_edgelist(self.mdf, 0, "b", True, nx.MultiGraph())
|
| 46 |
+
assert graphs_equal(MG, MGtrue)
|
| 47 |
+
|
| 48 |
+
def test_from_edgelist_multi_attr(self):
|
| 49 |
+
Gtrue = nx.Graph(
|
| 50 |
+
[
|
| 51 |
+
("E", "C", {"cost": 9, "weight": 10}),
|
| 52 |
+
("B", "A", {"cost": 1, "weight": 7}),
|
| 53 |
+
("A", "D", {"cost": 7, "weight": 4}),
|
| 54 |
+
]
|
| 55 |
+
)
|
| 56 |
+
G = nx.from_pandas_edgelist(self.df, 0, "b", ["weight", "cost"])
|
| 57 |
+
assert graphs_equal(G, Gtrue)
|
| 58 |
+
|
| 59 |
+
def test_from_edgelist_multi_attr_incl_target(self):
|
| 60 |
+
Gtrue = nx.Graph(
|
| 61 |
+
[
|
| 62 |
+
("E", "C", {0: "C", "b": "E", "weight": 10}),
|
| 63 |
+
("B", "A", {0: "B", "b": "A", "weight": 7}),
|
| 64 |
+
("A", "D", {0: "A", "b": "D", "weight": 4}),
|
| 65 |
+
]
|
| 66 |
+
)
|
| 67 |
+
G = nx.from_pandas_edgelist(self.df, 0, "b", [0, "b", "weight"])
|
| 68 |
+
assert graphs_equal(G, Gtrue)
|
| 69 |
+
|
| 70 |
+
def test_from_edgelist_multidigraph_and_edge_attr(self):
|
| 71 |
+
# example from issue #2374
|
| 72 |
+
edges = [
|
| 73 |
+
("X1", "X4", {"Co": "zA", "Mi": 0, "St": "X1"}),
|
| 74 |
+
("X1", "X4", {"Co": "zB", "Mi": 54, "St": "X2"}),
|
| 75 |
+
("X1", "X4", {"Co": "zB", "Mi": 49, "St": "X3"}),
|
| 76 |
+
("X1", "X4", {"Co": "zB", "Mi": 44, "St": "X4"}),
|
| 77 |
+
("Y1", "Y3", {"Co": "zC", "Mi": 0, "St": "Y1"}),
|
| 78 |
+
("Y1", "Y3", {"Co": "zC", "Mi": 34, "St": "Y2"}),
|
| 79 |
+
("Y1", "Y3", {"Co": "zC", "Mi": 29, "St": "X2"}),
|
| 80 |
+
("Y1", "Y3", {"Co": "zC", "Mi": 24, "St": "Y3"}),
|
| 81 |
+
("Z1", "Z3", {"Co": "zD", "Mi": 0, "St": "Z1"}),
|
| 82 |
+
("Z1", "Z3", {"Co": "zD", "Mi": 14, "St": "X3"}),
|
| 83 |
+
]
|
| 84 |
+
Gtrue = nx.MultiDiGraph(edges)
|
| 85 |
+
data = {
|
| 86 |
+
"O": ["X1", "X1", "X1", "X1", "Y1", "Y1", "Y1", "Y1", "Z1", "Z1"],
|
| 87 |
+
"D": ["X4", "X4", "X4", "X4", "Y3", "Y3", "Y3", "Y3", "Z3", "Z3"],
|
| 88 |
+
"St": ["X1", "X2", "X3", "X4", "Y1", "Y2", "X2", "Y3", "Z1", "X3"],
|
| 89 |
+
"Co": ["zA", "zB", "zB", "zB", "zC", "zC", "zC", "zC", "zD", "zD"],
|
| 90 |
+
"Mi": [0, 54, 49, 44, 0, 34, 29, 24, 0, 14],
|
| 91 |
+
}
|
| 92 |
+
df = pd.DataFrame.from_dict(data)
|
| 93 |
+
G1 = nx.from_pandas_edgelist(
|
| 94 |
+
df, source="O", target="D", edge_attr=True, create_using=nx.MultiDiGraph
|
| 95 |
+
)
|
| 96 |
+
G2 = nx.from_pandas_edgelist(
|
| 97 |
+
df,
|
| 98 |
+
source="O",
|
| 99 |
+
target="D",
|
| 100 |
+
edge_attr=["St", "Co", "Mi"],
|
| 101 |
+
create_using=nx.MultiDiGraph,
|
| 102 |
+
)
|
| 103 |
+
assert graphs_equal(G1, Gtrue)
|
| 104 |
+
assert graphs_equal(G2, Gtrue)
|
| 105 |
+
|
| 106 |
+
def test_from_edgelist_one_attr(self):
|
| 107 |
+
Gtrue = nx.Graph(
|
| 108 |
+
[
|
| 109 |
+
("E", "C", {"weight": 10}),
|
| 110 |
+
("B", "A", {"weight": 7}),
|
| 111 |
+
("A", "D", {"weight": 4}),
|
| 112 |
+
]
|
| 113 |
+
)
|
| 114 |
+
G = nx.from_pandas_edgelist(self.df, 0, "b", "weight")
|
| 115 |
+
assert graphs_equal(G, Gtrue)
|
| 116 |
+
|
| 117 |
+
def test_from_edgelist_int_attr_name(self):
|
| 118 |
+
# note: this also tests that edge_attr can be `source`
|
| 119 |
+
Gtrue = nx.Graph(
|
| 120 |
+
[("E", "C", {0: "C"}), ("B", "A", {0: "B"}), ("A", "D", {0: "A"})]
|
| 121 |
+
)
|
| 122 |
+
G = nx.from_pandas_edgelist(self.df, 0, "b", 0)
|
| 123 |
+
assert graphs_equal(G, Gtrue)
|
| 124 |
+
|
| 125 |
+
def test_from_edgelist_invalid_attr(self):
|
| 126 |
+
pytest.raises(
|
| 127 |
+
nx.NetworkXError, nx.from_pandas_edgelist, self.df, 0, "b", "misspell"
|
| 128 |
+
)
|
| 129 |
+
pytest.raises(nx.NetworkXError, nx.from_pandas_edgelist, self.df, 0, "b", 1)
|
| 130 |
+
# see Issue #3562
|
| 131 |
+
edgeframe = pd.DataFrame([[0, 1], [1, 2], [2, 0]], columns=["s", "t"])
|
| 132 |
+
pytest.raises(
|
| 133 |
+
nx.NetworkXError, nx.from_pandas_edgelist, edgeframe, "s", "t", True
|
| 134 |
+
)
|
| 135 |
+
pytest.raises(
|
| 136 |
+
nx.NetworkXError, nx.from_pandas_edgelist, edgeframe, "s", "t", "weight"
|
| 137 |
+
)
|
| 138 |
+
pytest.raises(
|
| 139 |
+
nx.NetworkXError,
|
| 140 |
+
nx.from_pandas_edgelist,
|
| 141 |
+
edgeframe,
|
| 142 |
+
"s",
|
| 143 |
+
"t",
|
| 144 |
+
["weight", "size"],
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
def test_from_edgelist_no_attr(self):
|
| 148 |
+
Gtrue = nx.Graph([("E", "C", {}), ("B", "A", {}), ("A", "D", {})])
|
| 149 |
+
G = nx.from_pandas_edgelist(self.df, 0, "b")
|
| 150 |
+
assert graphs_equal(G, Gtrue)
|
| 151 |
+
|
| 152 |
+
def test_from_edgelist(self):
|
| 153 |
+
# Pandas DataFrame
|
| 154 |
+
G = nx.cycle_graph(10)
|
| 155 |
+
G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges))
|
| 156 |
+
|
| 157 |
+
edgelist = nx.to_edgelist(G)
|
| 158 |
+
source = [s for s, t, d in edgelist]
|
| 159 |
+
target = [t for s, t, d in edgelist]
|
| 160 |
+
weight = [d["weight"] for s, t, d in edgelist]
|
| 161 |
+
edges = pd.DataFrame({"source": source, "target": target, "weight": weight})
|
| 162 |
+
|
| 163 |
+
GG = nx.from_pandas_edgelist(edges, edge_attr="weight")
|
| 164 |
+
assert nodes_equal(G.nodes(), GG.nodes())
|
| 165 |
+
assert edges_equal(G.edges(), GG.edges())
|
| 166 |
+
GW = nx.to_networkx_graph(edges, create_using=nx.Graph)
|
| 167 |
+
assert nodes_equal(G.nodes(), GW.nodes())
|
| 168 |
+
assert edges_equal(G.edges(), GW.edges())
|
| 169 |
+
|
| 170 |
+
def test_to_edgelist_default_source_or_target_col_exists(self):
|
| 171 |
+
G = nx.path_graph(10)
|
| 172 |
+
G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges))
|
| 173 |
+
nx.set_edge_attributes(G, 0, name="source")
|
| 174 |
+
pytest.raises(nx.NetworkXError, nx.to_pandas_edgelist, G)
|
| 175 |
+
|
| 176 |
+
# drop source column to test an exception raised for the target column
|
| 177 |
+
for u, v, d in G.edges(data=True):
|
| 178 |
+
d.pop("source", None)
|
| 179 |
+
|
| 180 |
+
nx.set_edge_attributes(G, 0, name="target")
|
| 181 |
+
pytest.raises(nx.NetworkXError, nx.to_pandas_edgelist, G)
|
| 182 |
+
|
| 183 |
+
def test_to_edgelist_custom_source_or_target_col_exists(self):
|
| 184 |
+
G = nx.path_graph(10)
|
| 185 |
+
G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges))
|
| 186 |
+
nx.set_edge_attributes(G, 0, name="source_col_name")
|
| 187 |
+
pytest.raises(
|
| 188 |
+
nx.NetworkXError, nx.to_pandas_edgelist, G, source="source_col_name"
|
| 189 |
+
)
|
| 190 |
+
|
| 191 |
+
# drop source column to test an exception raised for the target column
|
| 192 |
+
for u, v, d in G.edges(data=True):
|
| 193 |
+
d.pop("source_col_name", None)
|
| 194 |
+
|
| 195 |
+
nx.set_edge_attributes(G, 0, name="target_col_name")
|
| 196 |
+
pytest.raises(
|
| 197 |
+
nx.NetworkXError, nx.to_pandas_edgelist, G, target="target_col_name"
|
| 198 |
+
)
|
| 199 |
+
|
| 200 |
+
def test_to_edgelist_edge_key_col_exists(self):
|
| 201 |
+
G = nx.path_graph(10, create_using=nx.MultiGraph)
|
| 202 |
+
G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges()))
|
| 203 |
+
nx.set_edge_attributes(G, 0, name="edge_key_name")
|
| 204 |
+
pytest.raises(
|
| 205 |
+
nx.NetworkXError, nx.to_pandas_edgelist, G, edge_key="edge_key_name"
|
| 206 |
+
)
|
| 207 |
+
|
| 208 |
+
def test_from_adjacency(self):
|
| 209 |
+
nodelist = [1, 2]
|
| 210 |
+
dftrue = pd.DataFrame(
|
| 211 |
+
[[1, 1], [1, 0]], dtype=int, index=nodelist, columns=nodelist
|
| 212 |
+
)
|
| 213 |
+
G = nx.Graph([(1, 1), (1, 2)])
|
| 214 |
+
df = nx.to_pandas_adjacency(G, dtype=int)
|
| 215 |
+
pd.testing.assert_frame_equal(df, dftrue)
|
| 216 |
+
|
| 217 |
+
@pytest.mark.parametrize("graph", [nx.Graph, nx.MultiGraph])
|
| 218 |
+
def test_roundtrip(self, graph):
|
| 219 |
+
# edgelist
|
| 220 |
+
Gtrue = graph([(1, 1), (1, 2)])
|
| 221 |
+
df = nx.to_pandas_edgelist(Gtrue)
|
| 222 |
+
G = nx.from_pandas_edgelist(df, create_using=graph)
|
| 223 |
+
assert graphs_equal(Gtrue, G)
|
| 224 |
+
# adjacency
|
| 225 |
+
adj = {1: {1: {"weight": 1}, 2: {"weight": 1}}, 2: {1: {"weight": 1}}}
|
| 226 |
+
Gtrue = graph(adj)
|
| 227 |
+
df = nx.to_pandas_adjacency(Gtrue, dtype=int)
|
| 228 |
+
G = nx.from_pandas_adjacency(df, create_using=graph)
|
| 229 |
+
assert graphs_equal(Gtrue, G)
|
| 230 |
+
|
| 231 |
+
def test_from_adjacency_named(self):
|
| 232 |
+
# example from issue #3105
|
| 233 |
+
data = {
|
| 234 |
+
"A": {"A": 0, "B": 0, "C": 0},
|
| 235 |
+
"B": {"A": 1, "B": 0, "C": 0},
|
| 236 |
+
"C": {"A": 0, "B": 1, "C": 0},
|
| 237 |
+
}
|
| 238 |
+
dftrue = pd.DataFrame(data, dtype=np.intp)
|
| 239 |
+
df = dftrue[["A", "C", "B"]]
|
| 240 |
+
G = nx.from_pandas_adjacency(df, create_using=nx.DiGraph())
|
| 241 |
+
df = nx.to_pandas_adjacency(G, dtype=np.intp)
|
| 242 |
+
pd.testing.assert_frame_equal(df, dftrue)
|
| 243 |
+
|
| 244 |
+
@pytest.mark.parametrize("edge_attr", [["attr2", "attr3"], True])
|
| 245 |
+
def test_edgekey_with_multigraph(self, edge_attr):
|
| 246 |
+
df = pd.DataFrame(
|
| 247 |
+
{
|
| 248 |
+
"source": {"A": "N1", "B": "N2", "C": "N1", "D": "N1"},
|
| 249 |
+
"target": {"A": "N2", "B": "N3", "C": "N1", "D": "N2"},
|
| 250 |
+
"attr1": {"A": "F1", "B": "F2", "C": "F3", "D": "F4"},
|
| 251 |
+
"attr2": {"A": 1, "B": 0, "C": 0, "D": 0},
|
| 252 |
+
"attr3": {"A": 0, "B": 1, "C": 0, "D": 1},
|
| 253 |
+
}
|
| 254 |
+
)
|
| 255 |
+
Gtrue = nx.MultiGraph(
|
| 256 |
+
[
|
| 257 |
+
("N1", "N2", "F1", {"attr2": 1, "attr3": 0}),
|
| 258 |
+
("N2", "N3", "F2", {"attr2": 0, "attr3": 1}),
|
| 259 |
+
("N1", "N1", "F3", {"attr2": 0, "attr3": 0}),
|
| 260 |
+
("N1", "N2", "F4", {"attr2": 0, "attr3": 1}),
|
| 261 |
+
]
|
| 262 |
+
)
|
| 263 |
+
# example from issue #4065
|
| 264 |
+
G = nx.from_pandas_edgelist(
|
| 265 |
+
df,
|
| 266 |
+
source="source",
|
| 267 |
+
target="target",
|
| 268 |
+
edge_attr=edge_attr,
|
| 269 |
+
edge_key="attr1",
|
| 270 |
+
create_using=nx.MultiGraph(),
|
| 271 |
+
)
|
| 272 |
+
assert graphs_equal(G, Gtrue)
|
| 273 |
+
|
| 274 |
+
df_roundtrip = nx.to_pandas_edgelist(G, edge_key="attr1")
|
| 275 |
+
df_roundtrip = df_roundtrip.sort_values("attr1")
|
| 276 |
+
df_roundtrip.index = ["A", "B", "C", "D"]
|
| 277 |
+
pd.testing.assert_frame_equal(
|
| 278 |
+
df, df_roundtrip[["source", "target", "attr1", "attr2", "attr3"]]
|
| 279 |
+
)
|
| 280 |
+
|
| 281 |
+
def test_edgekey_with_normal_graph_no_action(self):
|
| 282 |
+
Gtrue = nx.Graph(
|
| 283 |
+
[
|
| 284 |
+
("E", "C", {"cost": 9, "weight": 10}),
|
| 285 |
+
("B", "A", {"cost": 1, "weight": 7}),
|
| 286 |
+
("A", "D", {"cost": 7, "weight": 4}),
|
| 287 |
+
]
|
| 288 |
+
)
|
| 289 |
+
G = nx.from_pandas_edgelist(self.df, 0, "b", True, edge_key="weight")
|
| 290 |
+
assert graphs_equal(G, Gtrue)
|
| 291 |
+
|
| 292 |
+
def test_nonexisting_edgekey_raises(self):
|
| 293 |
+
with pytest.raises(nx.exception.NetworkXError):
|
| 294 |
+
nx.from_pandas_edgelist(
|
| 295 |
+
self.df,
|
| 296 |
+
source="source",
|
| 297 |
+
target="target",
|
| 298 |
+
edge_key="Not_real",
|
| 299 |
+
edge_attr=True,
|
| 300 |
+
create_using=nx.MultiGraph(),
|
| 301 |
+
)
|
| 302 |
+
|
| 303 |
+
def test_multigraph_with_edgekey_no_edgeattrs(self):
|
| 304 |
+
Gtrue = nx.MultiGraph()
|
| 305 |
+
Gtrue.add_edge(0, 1, key=0)
|
| 306 |
+
Gtrue.add_edge(0, 1, key=3)
|
| 307 |
+
df = nx.to_pandas_edgelist(Gtrue, edge_key="key")
|
| 308 |
+
expected = pd.DataFrame({"source": [0, 0], "target": [1, 1], "key": [0, 3]})
|
| 309 |
+
pd.testing.assert_frame_equal(expected, df)
|
| 310 |
+
G = nx.from_pandas_edgelist(df, edge_key="key", create_using=nx.MultiGraph)
|
| 311 |
+
assert graphs_equal(Gtrue, G)
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
def test_to_pandas_adjacency_with_nodelist():
|
| 315 |
+
G = nx.complete_graph(5)
|
| 316 |
+
nodelist = [1, 4]
|
| 317 |
+
expected = pd.DataFrame(
|
| 318 |
+
[[0, 1], [1, 0]], dtype=int, index=nodelist, columns=nodelist
|
| 319 |
+
)
|
| 320 |
+
pd.testing.assert_frame_equal(
|
| 321 |
+
expected, nx.to_pandas_adjacency(G, nodelist, dtype=int)
|
| 322 |
+
)
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
def test_to_pandas_edgelist_with_nodelist():
|
| 326 |
+
G = nx.Graph()
|
| 327 |
+
G.add_edges_from([(0, 1), (1, 2), (1, 3)], weight=2.0)
|
| 328 |
+
G.add_edge(0, 5, weight=100)
|
| 329 |
+
df = nx.to_pandas_edgelist(G, nodelist=[1, 2])
|
| 330 |
+
assert 0 not in df["source"].to_numpy()
|
| 331 |
+
assert 100 not in df["weight"].to_numpy()
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
def test_from_pandas_adjacency_with_index_collisions():
|
| 335 |
+
"""See gh-7407"""
|
| 336 |
+
df = pd.DataFrame(
|
| 337 |
+
[
|
| 338 |
+
[0, 1, 0, 0],
|
| 339 |
+
[0, 0, 1, 0],
|
| 340 |
+
[0, 0, 0, 1],
|
| 341 |
+
[0, 0, 0, 0],
|
| 342 |
+
],
|
| 343 |
+
index=[1010001, 2, 1, 1010002],
|
| 344 |
+
columns=[1010001, 2, 1, 1010002],
|
| 345 |
+
)
|
| 346 |
+
G = nx.from_pandas_adjacency(df, create_using=nx.DiGraph)
|
| 347 |
+
expected = nx.DiGraph([(1010001, 2), (2, 1), (1, 1010002)])
|
| 348 |
+
assert nodes_equal(G.nodes, expected.nodes)
|
| 349 |
+
assert edges_equal(G.edges, expected.edges)
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_convert_scipy.py
ADDED
|
@@ -0,0 +1,282 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
np = pytest.importorskip("numpy")
|
| 4 |
+
sp = pytest.importorskip("scipy")
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
from networkx.generators.classic import barbell_graph, cycle_graph, path_graph
|
| 8 |
+
from networkx.utils import graphs_equal
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class TestConvertScipy:
|
| 12 |
+
def setup_method(self):
|
| 13 |
+
self.G1 = barbell_graph(10, 3)
|
| 14 |
+
self.G2 = cycle_graph(10, create_using=nx.DiGraph)
|
| 15 |
+
|
| 16 |
+
self.G3 = self.create_weighted(nx.Graph())
|
| 17 |
+
self.G4 = self.create_weighted(nx.DiGraph())
|
| 18 |
+
|
| 19 |
+
def test_exceptions(self):
|
| 20 |
+
class G:
|
| 21 |
+
format = None
|
| 22 |
+
|
| 23 |
+
pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G)
|
| 24 |
+
|
| 25 |
+
def create_weighted(self, G):
|
| 26 |
+
g = cycle_graph(4)
|
| 27 |
+
e = list(g.edges())
|
| 28 |
+
source = [u for u, v in e]
|
| 29 |
+
dest = [v for u, v in e]
|
| 30 |
+
weight = [s + 10 for s in source]
|
| 31 |
+
ex = zip(source, dest, weight)
|
| 32 |
+
G.add_weighted_edges_from(ex)
|
| 33 |
+
return G
|
| 34 |
+
|
| 35 |
+
def identity_conversion(self, G, A, create_using):
|
| 36 |
+
GG = nx.from_scipy_sparse_array(A, create_using=create_using)
|
| 37 |
+
assert nx.is_isomorphic(G, GG)
|
| 38 |
+
|
| 39 |
+
GW = nx.to_networkx_graph(A, create_using=create_using)
|
| 40 |
+
assert nx.is_isomorphic(G, GW)
|
| 41 |
+
|
| 42 |
+
GI = nx.empty_graph(0, create_using).__class__(A)
|
| 43 |
+
assert nx.is_isomorphic(G, GI)
|
| 44 |
+
|
| 45 |
+
ACSR = A.tocsr()
|
| 46 |
+
GI = nx.empty_graph(0, create_using).__class__(ACSR)
|
| 47 |
+
assert nx.is_isomorphic(G, GI)
|
| 48 |
+
|
| 49 |
+
ACOO = A.tocoo()
|
| 50 |
+
GI = nx.empty_graph(0, create_using).__class__(ACOO)
|
| 51 |
+
assert nx.is_isomorphic(G, GI)
|
| 52 |
+
|
| 53 |
+
ACSC = A.tocsc()
|
| 54 |
+
GI = nx.empty_graph(0, create_using).__class__(ACSC)
|
| 55 |
+
assert nx.is_isomorphic(G, GI)
|
| 56 |
+
|
| 57 |
+
AD = A.todense()
|
| 58 |
+
GI = nx.empty_graph(0, create_using).__class__(AD)
|
| 59 |
+
assert nx.is_isomorphic(G, GI)
|
| 60 |
+
|
| 61 |
+
AA = A.toarray()
|
| 62 |
+
GI = nx.empty_graph(0, create_using).__class__(AA)
|
| 63 |
+
assert nx.is_isomorphic(G, GI)
|
| 64 |
+
|
| 65 |
+
def test_shape(self):
|
| 66 |
+
"Conversion from non-square sparse array."
|
| 67 |
+
A = sp.sparse.lil_array([[1, 2, 3], [4, 5, 6]])
|
| 68 |
+
pytest.raises(nx.NetworkXError, nx.from_scipy_sparse_array, A)
|
| 69 |
+
|
| 70 |
+
def test_identity_graph_matrix(self):
|
| 71 |
+
"Conversion from graph to sparse matrix to graph."
|
| 72 |
+
A = nx.to_scipy_sparse_array(self.G1)
|
| 73 |
+
self.identity_conversion(self.G1, A, nx.Graph())
|
| 74 |
+
|
| 75 |
+
def test_identity_digraph_matrix(self):
|
| 76 |
+
"Conversion from digraph to sparse matrix to digraph."
|
| 77 |
+
A = nx.to_scipy_sparse_array(self.G2)
|
| 78 |
+
self.identity_conversion(self.G2, A, nx.DiGraph())
|
| 79 |
+
|
| 80 |
+
def test_identity_weighted_graph_matrix(self):
|
| 81 |
+
"""Conversion from weighted graph to sparse matrix to weighted graph."""
|
| 82 |
+
A = nx.to_scipy_sparse_array(self.G3)
|
| 83 |
+
self.identity_conversion(self.G3, A, nx.Graph())
|
| 84 |
+
|
| 85 |
+
def test_identity_weighted_digraph_matrix(self):
|
| 86 |
+
"""Conversion from weighted digraph to sparse matrix to weighted digraph."""
|
| 87 |
+
A = nx.to_scipy_sparse_array(self.G4)
|
| 88 |
+
self.identity_conversion(self.G4, A, nx.DiGraph())
|
| 89 |
+
|
| 90 |
+
def test_nodelist(self):
|
| 91 |
+
"""Conversion from graph to sparse matrix to graph with nodelist."""
|
| 92 |
+
P4 = path_graph(4)
|
| 93 |
+
P3 = path_graph(3)
|
| 94 |
+
nodelist = list(P3.nodes())
|
| 95 |
+
A = nx.to_scipy_sparse_array(P4, nodelist=nodelist)
|
| 96 |
+
GA = nx.Graph(A)
|
| 97 |
+
assert nx.is_isomorphic(GA, P3)
|
| 98 |
+
|
| 99 |
+
pytest.raises(nx.NetworkXError, nx.to_scipy_sparse_array, P3, nodelist=[])
|
| 100 |
+
# Test nodelist duplicates.
|
| 101 |
+
long_nl = nodelist + [0]
|
| 102 |
+
pytest.raises(nx.NetworkXError, nx.to_scipy_sparse_array, P3, nodelist=long_nl)
|
| 103 |
+
|
| 104 |
+
# Test nodelist contains non-nodes
|
| 105 |
+
non_nl = [-1, 0, 1, 2]
|
| 106 |
+
pytest.raises(nx.NetworkXError, nx.to_scipy_sparse_array, P3, nodelist=non_nl)
|
| 107 |
+
|
| 108 |
+
def test_weight_keyword(self):
|
| 109 |
+
WP4 = nx.Graph()
|
| 110 |
+
WP4.add_edges_from((n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3))
|
| 111 |
+
P4 = path_graph(4)
|
| 112 |
+
A = nx.to_scipy_sparse_array(P4)
|
| 113 |
+
np.testing.assert_equal(
|
| 114 |
+
A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
|
| 115 |
+
)
|
| 116 |
+
np.testing.assert_equal(
|
| 117 |
+
0.5 * A.todense(), nx.to_scipy_sparse_array(WP4).todense()
|
| 118 |
+
)
|
| 119 |
+
np.testing.assert_equal(
|
| 120 |
+
0.3 * A.todense(), nx.to_scipy_sparse_array(WP4, weight="other").todense()
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
def test_format_keyword(self):
|
| 124 |
+
WP4 = nx.Graph()
|
| 125 |
+
WP4.add_edges_from((n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3))
|
| 126 |
+
P4 = path_graph(4)
|
| 127 |
+
A = nx.to_scipy_sparse_array(P4, format="csr")
|
| 128 |
+
np.testing.assert_equal(
|
| 129 |
+
A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
|
| 130 |
+
)
|
| 131 |
+
|
| 132 |
+
A = nx.to_scipy_sparse_array(P4, format="csc")
|
| 133 |
+
np.testing.assert_equal(
|
| 134 |
+
A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
|
| 135 |
+
)
|
| 136 |
+
|
| 137 |
+
A = nx.to_scipy_sparse_array(P4, format="coo")
|
| 138 |
+
np.testing.assert_equal(
|
| 139 |
+
A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
A = nx.to_scipy_sparse_array(P4, format="bsr")
|
| 143 |
+
np.testing.assert_equal(
|
| 144 |
+
A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
A = nx.to_scipy_sparse_array(P4, format="lil")
|
| 148 |
+
np.testing.assert_equal(
|
| 149 |
+
A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
|
| 150 |
+
)
|
| 151 |
+
|
| 152 |
+
A = nx.to_scipy_sparse_array(P4, format="dia")
|
| 153 |
+
np.testing.assert_equal(
|
| 154 |
+
A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
A = nx.to_scipy_sparse_array(P4, format="dok")
|
| 158 |
+
np.testing.assert_equal(
|
| 159 |
+
A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense()
|
| 160 |
+
)
|
| 161 |
+
|
| 162 |
+
def test_format_keyword_raise(self):
|
| 163 |
+
with pytest.raises(nx.NetworkXError):
|
| 164 |
+
WP4 = nx.Graph()
|
| 165 |
+
WP4.add_edges_from(
|
| 166 |
+
(n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3)
|
| 167 |
+
)
|
| 168 |
+
P4 = path_graph(4)
|
| 169 |
+
nx.to_scipy_sparse_array(P4, format="any_other")
|
| 170 |
+
|
| 171 |
+
def test_null_raise(self):
|
| 172 |
+
with pytest.raises(nx.NetworkXError):
|
| 173 |
+
nx.to_scipy_sparse_array(nx.Graph())
|
| 174 |
+
|
| 175 |
+
def test_empty(self):
|
| 176 |
+
G = nx.Graph()
|
| 177 |
+
G.add_node(1)
|
| 178 |
+
M = nx.to_scipy_sparse_array(G)
|
| 179 |
+
np.testing.assert_equal(M.toarray(), np.array([[0]]))
|
| 180 |
+
|
| 181 |
+
def test_ordering(self):
|
| 182 |
+
G = nx.DiGraph()
|
| 183 |
+
G.add_edge(1, 2)
|
| 184 |
+
G.add_edge(2, 3)
|
| 185 |
+
G.add_edge(3, 1)
|
| 186 |
+
M = nx.to_scipy_sparse_array(G, nodelist=[3, 2, 1])
|
| 187 |
+
np.testing.assert_equal(
|
| 188 |
+
M.toarray(), np.array([[0, 0, 1], [1, 0, 0], [0, 1, 0]])
|
| 189 |
+
)
|
| 190 |
+
|
| 191 |
+
def test_selfloop_graph(self):
|
| 192 |
+
G = nx.Graph([(1, 1)])
|
| 193 |
+
M = nx.to_scipy_sparse_array(G)
|
| 194 |
+
np.testing.assert_equal(M.toarray(), np.array([[1]]))
|
| 195 |
+
|
| 196 |
+
G.add_edges_from([(2, 3), (3, 4)])
|
| 197 |
+
M = nx.to_scipy_sparse_array(G, nodelist=[2, 3, 4])
|
| 198 |
+
np.testing.assert_equal(
|
| 199 |
+
M.toarray(), np.array([[0, 1, 0], [1, 0, 1], [0, 1, 0]])
|
| 200 |
+
)
|
| 201 |
+
|
| 202 |
+
def test_selfloop_digraph(self):
|
| 203 |
+
G = nx.DiGraph([(1, 1)])
|
| 204 |
+
M = nx.to_scipy_sparse_array(G)
|
| 205 |
+
np.testing.assert_equal(M.toarray(), np.array([[1]]))
|
| 206 |
+
|
| 207 |
+
G.add_edges_from([(2, 3), (3, 4)])
|
| 208 |
+
M = nx.to_scipy_sparse_array(G, nodelist=[2, 3, 4])
|
| 209 |
+
np.testing.assert_equal(
|
| 210 |
+
M.toarray(), np.array([[0, 1, 0], [0, 0, 1], [0, 0, 0]])
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
def test_from_scipy_sparse_array_parallel_edges(self):
|
| 214 |
+
"""Tests that the :func:`networkx.from_scipy_sparse_array` function
|
| 215 |
+
interprets integer weights as the number of parallel edges when
|
| 216 |
+
creating a multigraph.
|
| 217 |
+
|
| 218 |
+
"""
|
| 219 |
+
A = sp.sparse.csr_array([[1, 1], [1, 2]])
|
| 220 |
+
# First, with a simple graph, each integer entry in the adjacency
|
| 221 |
+
# matrix is interpreted as the weight of a single edge in the graph.
|
| 222 |
+
expected = nx.DiGraph()
|
| 223 |
+
edges = [(0, 0), (0, 1), (1, 0)]
|
| 224 |
+
expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges])
|
| 225 |
+
expected.add_edge(1, 1, weight=2)
|
| 226 |
+
actual = nx.from_scipy_sparse_array(
|
| 227 |
+
A, parallel_edges=True, create_using=nx.DiGraph
|
| 228 |
+
)
|
| 229 |
+
assert graphs_equal(actual, expected)
|
| 230 |
+
actual = nx.from_scipy_sparse_array(
|
| 231 |
+
A, parallel_edges=False, create_using=nx.DiGraph
|
| 232 |
+
)
|
| 233 |
+
assert graphs_equal(actual, expected)
|
| 234 |
+
# Now each integer entry in the adjacency matrix is interpreted as the
|
| 235 |
+
# number of parallel edges in the graph if the appropriate keyword
|
| 236 |
+
# argument is specified.
|
| 237 |
+
edges = [(0, 0), (0, 1), (1, 0), (1, 1), (1, 1)]
|
| 238 |
+
expected = nx.MultiDiGraph()
|
| 239 |
+
expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges])
|
| 240 |
+
actual = nx.from_scipy_sparse_array(
|
| 241 |
+
A, parallel_edges=True, create_using=nx.MultiDiGraph
|
| 242 |
+
)
|
| 243 |
+
assert graphs_equal(actual, expected)
|
| 244 |
+
expected = nx.MultiDiGraph()
|
| 245 |
+
expected.add_edges_from(set(edges), weight=1)
|
| 246 |
+
# The sole self-loop (edge 0) on vertex 1 should have weight 2.
|
| 247 |
+
expected[1][1][0]["weight"] = 2
|
| 248 |
+
actual = nx.from_scipy_sparse_array(
|
| 249 |
+
A, parallel_edges=False, create_using=nx.MultiDiGraph
|
| 250 |
+
)
|
| 251 |
+
assert graphs_equal(actual, expected)
|
| 252 |
+
|
| 253 |
+
def test_symmetric(self):
|
| 254 |
+
"""Tests that a symmetric matrix has edges added only once to an
|
| 255 |
+
undirected multigraph when using
|
| 256 |
+
:func:`networkx.from_scipy_sparse_array`.
|
| 257 |
+
|
| 258 |
+
"""
|
| 259 |
+
A = sp.sparse.csr_array([[0, 1], [1, 0]])
|
| 260 |
+
G = nx.from_scipy_sparse_array(A, create_using=nx.MultiGraph)
|
| 261 |
+
expected = nx.MultiGraph()
|
| 262 |
+
expected.add_edge(0, 1, weight=1)
|
| 263 |
+
assert graphs_equal(G, expected)
|
| 264 |
+
|
| 265 |
+
|
| 266 |
+
@pytest.mark.parametrize("sparse_format", ("csr", "csc", "dok"))
|
| 267 |
+
def test_from_scipy_sparse_array_formats(sparse_format):
|
| 268 |
+
"""Test all formats supported by _generate_weighted_edges."""
|
| 269 |
+
# trinode complete graph with non-uniform edge weights
|
| 270 |
+
expected = nx.Graph()
|
| 271 |
+
expected.add_edges_from(
|
| 272 |
+
[
|
| 273 |
+
(0, 1, {"weight": 3}),
|
| 274 |
+
(0, 2, {"weight": 2}),
|
| 275 |
+
(1, 0, {"weight": 3}),
|
| 276 |
+
(1, 2, {"weight": 1}),
|
| 277 |
+
(2, 0, {"weight": 2}),
|
| 278 |
+
(2, 1, {"weight": 1}),
|
| 279 |
+
]
|
| 280 |
+
)
|
| 281 |
+
A = sp.sparse.coo_array([[0, 3, 2], [3, 0, 1], [2, 1, 0]]).asformat(sparse_format)
|
| 282 |
+
assert graphs_equal(expected, nx.from_scipy_sparse_array(A))
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_exceptions.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
# smoke tests for exceptions
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def test_raises_networkxexception():
|
| 9 |
+
with pytest.raises(nx.NetworkXException):
|
| 10 |
+
raise nx.NetworkXException
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def test_raises_networkxerr():
|
| 14 |
+
with pytest.raises(nx.NetworkXError):
|
| 15 |
+
raise nx.NetworkXError
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def test_raises_networkx_pointless_concept():
|
| 19 |
+
with pytest.raises(nx.NetworkXPointlessConcept):
|
| 20 |
+
raise nx.NetworkXPointlessConcept
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def test_raises_networkxalgorithmerr():
|
| 24 |
+
with pytest.raises(nx.NetworkXAlgorithmError):
|
| 25 |
+
raise nx.NetworkXAlgorithmError
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def test_raises_networkx_unfeasible():
|
| 29 |
+
with pytest.raises(nx.NetworkXUnfeasible):
|
| 30 |
+
raise nx.NetworkXUnfeasible
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def test_raises_networkx_no_path():
|
| 34 |
+
with pytest.raises(nx.NetworkXNoPath):
|
| 35 |
+
raise nx.NetworkXNoPath
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def test_raises_networkx_unbounded():
|
| 39 |
+
with pytest.raises(nx.NetworkXUnbounded):
|
| 40 |
+
raise nx.NetworkXUnbounded
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_import.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def test_namespace_alias():
|
| 5 |
+
with pytest.raises(ImportError):
|
| 6 |
+
from networkx import nx
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def test_namespace_nesting():
|
| 10 |
+
with pytest.raises(ImportError):
|
| 11 |
+
from networkx import networkx
|
infer_4_37_2/lib/python3.10/site-packages/networkx/tests/test_lazy_imports.py
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib
|
| 2 |
+
import sys
|
| 3 |
+
import types
|
| 4 |
+
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
import networkx.lazy_imports as lazy
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def test_lazy_import_basics():
|
| 11 |
+
math = lazy._lazy_import("math")
|
| 12 |
+
anything_not_real = lazy._lazy_import("anything_not_real")
|
| 13 |
+
|
| 14 |
+
# Now test that accessing attributes does what it should
|
| 15 |
+
assert math.sin(math.pi) == pytest.approx(0, 1e-6)
|
| 16 |
+
# poor-mans pytest.raises for testing errors on attribute access
|
| 17 |
+
try:
|
| 18 |
+
anything_not_real.pi
|
| 19 |
+
assert False # Should not get here
|
| 20 |
+
except ModuleNotFoundError:
|
| 21 |
+
pass
|
| 22 |
+
assert isinstance(anything_not_real, lazy.DelayedImportErrorModule)
|
| 23 |
+
# see if it changes for second access
|
| 24 |
+
try:
|
| 25 |
+
anything_not_real.pi
|
| 26 |
+
assert False # Should not get here
|
| 27 |
+
except ModuleNotFoundError:
|
| 28 |
+
pass
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def test_lazy_import_impact_on_sys_modules():
|
| 32 |
+
math = lazy._lazy_import("math")
|
| 33 |
+
anything_not_real = lazy._lazy_import("anything_not_real")
|
| 34 |
+
|
| 35 |
+
assert type(math) == types.ModuleType
|
| 36 |
+
assert "math" in sys.modules
|
| 37 |
+
assert type(anything_not_real) == lazy.DelayedImportErrorModule
|
| 38 |
+
assert "anything_not_real" not in sys.modules
|
| 39 |
+
|
| 40 |
+
# only do this if numpy is installed
|
| 41 |
+
np_test = pytest.importorskip("numpy")
|
| 42 |
+
np = lazy._lazy_import("numpy")
|
| 43 |
+
assert type(np) == types.ModuleType
|
| 44 |
+
assert "numpy" in sys.modules
|
| 45 |
+
|
| 46 |
+
np.pi # trigger load of numpy
|
| 47 |
+
|
| 48 |
+
assert type(np) == types.ModuleType
|
| 49 |
+
assert "numpy" in sys.modules
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def test_lazy_import_nonbuiltins():
|
| 53 |
+
sp = lazy._lazy_import("scipy")
|
| 54 |
+
np = lazy._lazy_import("numpy")
|
| 55 |
+
if isinstance(sp, lazy.DelayedImportErrorModule):
|
| 56 |
+
try:
|
| 57 |
+
sp.special.erf
|
| 58 |
+
assert False
|
| 59 |
+
except ModuleNotFoundError:
|
| 60 |
+
pass
|
| 61 |
+
elif isinstance(np, lazy.DelayedImportErrorModule):
|
| 62 |
+
try:
|
| 63 |
+
np.sin(np.pi)
|
| 64 |
+
assert False
|
| 65 |
+
except ModuleNotFoundError:
|
| 66 |
+
pass
|
| 67 |
+
else:
|
| 68 |
+
assert sp.special.erf(np.pi) == pytest.approx(1, 1e-4)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def test_lazy_attach():
|
| 72 |
+
name = "mymod"
|
| 73 |
+
submods = ["mysubmodule", "anothersubmodule"]
|
| 74 |
+
myall = {"not_real_submod": ["some_var_or_func"]}
|
| 75 |
+
|
| 76 |
+
locls = {
|
| 77 |
+
"attach": lazy.attach,
|
| 78 |
+
"name": name,
|
| 79 |
+
"submods": submods,
|
| 80 |
+
"myall": myall,
|
| 81 |
+
}
|
| 82 |
+
s = "__getattr__, __lazy_dir__, __all__ = attach(name, submods, myall)"
|
| 83 |
+
|
| 84 |
+
exec(s, {}, locls)
|
| 85 |
+
expected = {
|
| 86 |
+
"attach": lazy.attach,
|
| 87 |
+
"name": name,
|
| 88 |
+
"submods": submods,
|
| 89 |
+
"myall": myall,
|
| 90 |
+
"__getattr__": None,
|
| 91 |
+
"__lazy_dir__": None,
|
| 92 |
+
"__all__": None,
|
| 93 |
+
}
|
| 94 |
+
assert locls.keys() == expected.keys()
|
| 95 |
+
for k, v in expected.items():
|
| 96 |
+
if v is not None:
|
| 97 |
+
assert locls[k] == v
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from networkx.utils.misc import *
|
| 2 |
+
from networkx.utils.decorators import *
|
| 3 |
+
from networkx.utils.random_sequence import *
|
| 4 |
+
from networkx.utils.union_find import *
|
| 5 |
+
from networkx.utils.rcm import *
|
| 6 |
+
from networkx.utils.heaps import *
|
| 7 |
+
from networkx.utils.configs import *
|
| 8 |
+
from networkx.utils.backends import *
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/backends.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/configs.py
ADDED
|
@@ -0,0 +1,387 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import os
|
| 3 |
+
import typing
|
| 4 |
+
import warnings
|
| 5 |
+
from dataclasses import dataclass
|
| 6 |
+
|
| 7 |
+
__all__ = ["Config"]
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@dataclass(init=False, eq=False, slots=True, kw_only=True, match_args=False)
|
| 11 |
+
class Config:
|
| 12 |
+
"""The base class for NetworkX configuration.
|
| 13 |
+
|
| 14 |
+
There are two ways to use this to create configurations. The recommended way
|
| 15 |
+
is to subclass ``Config`` with docs and annotations.
|
| 16 |
+
|
| 17 |
+
>>> class MyConfig(Config):
|
| 18 |
+
... '''Breakfast!'''
|
| 19 |
+
...
|
| 20 |
+
... eggs: int
|
| 21 |
+
... spam: int
|
| 22 |
+
...
|
| 23 |
+
... def _on_setattr(self, key, value):
|
| 24 |
+
... assert isinstance(value, int) and value >= 0
|
| 25 |
+
... return value
|
| 26 |
+
>>> cfg = MyConfig(eggs=1, spam=5)
|
| 27 |
+
|
| 28 |
+
Another way is to simply pass the initial configuration as keyword arguments to
|
| 29 |
+
the ``Config`` instance:
|
| 30 |
+
|
| 31 |
+
>>> cfg1 = Config(eggs=1, spam=5)
|
| 32 |
+
>>> cfg1
|
| 33 |
+
Config(eggs=1, spam=5)
|
| 34 |
+
|
| 35 |
+
Once defined, config items may be modified, but can't be added or deleted by default.
|
| 36 |
+
``Config`` is a ``Mapping``, and can get and set configs via attributes or brackets:
|
| 37 |
+
|
| 38 |
+
>>> cfg.eggs = 2
|
| 39 |
+
>>> cfg.eggs
|
| 40 |
+
2
|
| 41 |
+
>>> cfg["spam"] = 42
|
| 42 |
+
>>> cfg["spam"]
|
| 43 |
+
42
|
| 44 |
+
|
| 45 |
+
For convenience, it can also set configs within a context with the "with" statement:
|
| 46 |
+
|
| 47 |
+
>>> with cfg(spam=3):
|
| 48 |
+
... print("spam (in context):", cfg.spam)
|
| 49 |
+
spam (in context): 3
|
| 50 |
+
>>> print("spam (after context):", cfg.spam)
|
| 51 |
+
spam (after context): 42
|
| 52 |
+
|
| 53 |
+
Subclasses may also define ``_on_setattr`` (as done in the example above)
|
| 54 |
+
to ensure the value being assigned is valid:
|
| 55 |
+
|
| 56 |
+
>>> cfg.spam = -1
|
| 57 |
+
Traceback (most recent call last):
|
| 58 |
+
...
|
| 59 |
+
AssertionError
|
| 60 |
+
|
| 61 |
+
If a more flexible configuration object is needed that allows adding and deleting
|
| 62 |
+
configurations, then pass ``strict=False`` when defining the subclass:
|
| 63 |
+
|
| 64 |
+
>>> class FlexibleConfig(Config, strict=False):
|
| 65 |
+
... default_greeting: str = "Hello"
|
| 66 |
+
>>> flexcfg = FlexibleConfig()
|
| 67 |
+
>>> flexcfg.name = "Mr. Anderson"
|
| 68 |
+
>>> flexcfg
|
| 69 |
+
FlexibleConfig(default_greeting='Hello', name='Mr. Anderson')
|
| 70 |
+
"""
|
| 71 |
+
|
| 72 |
+
def __init_subclass__(cls, strict=True):
|
| 73 |
+
cls._strict = strict
|
| 74 |
+
|
| 75 |
+
def __new__(cls, **kwargs):
|
| 76 |
+
orig_class = cls
|
| 77 |
+
if cls is Config:
|
| 78 |
+
# Enable the "simple" case of accepting config definition as keywords
|
| 79 |
+
cls = type(
|
| 80 |
+
cls.__name__,
|
| 81 |
+
(cls,),
|
| 82 |
+
{"__annotations__": {key: typing.Any for key in kwargs}},
|
| 83 |
+
)
|
| 84 |
+
cls = dataclass(
|
| 85 |
+
eq=False,
|
| 86 |
+
repr=cls._strict,
|
| 87 |
+
slots=cls._strict,
|
| 88 |
+
kw_only=True,
|
| 89 |
+
match_args=False,
|
| 90 |
+
)(cls)
|
| 91 |
+
if not cls._strict:
|
| 92 |
+
cls.__repr__ = _flexible_repr
|
| 93 |
+
cls._orig_class = orig_class # Save original class so we can pickle
|
| 94 |
+
cls._prev = None # Stage previous configs to enable use as context manager
|
| 95 |
+
cls._context_stack = [] # Stack of previous configs when used as context
|
| 96 |
+
instance = object.__new__(cls)
|
| 97 |
+
instance.__init__(**kwargs)
|
| 98 |
+
return instance
|
| 99 |
+
|
| 100 |
+
def _on_setattr(self, key, value):
|
| 101 |
+
"""Process config value and check whether it is valid. Useful for subclasses."""
|
| 102 |
+
return value
|
| 103 |
+
|
| 104 |
+
def _on_delattr(self, key):
|
| 105 |
+
"""Callback for when a config item is being deleted. Useful for subclasses."""
|
| 106 |
+
|
| 107 |
+
# Control behavior of attributes
|
| 108 |
+
def __dir__(self):
|
| 109 |
+
return self.__dataclass_fields__.keys()
|
| 110 |
+
|
| 111 |
+
def __setattr__(self, key, value):
|
| 112 |
+
if self._strict and key not in self.__dataclass_fields__:
|
| 113 |
+
raise AttributeError(f"Invalid config name: {key!r}")
|
| 114 |
+
value = self._on_setattr(key, value)
|
| 115 |
+
object.__setattr__(self, key, value)
|
| 116 |
+
self.__class__._prev = None
|
| 117 |
+
|
| 118 |
+
def __delattr__(self, key):
|
| 119 |
+
if self._strict:
|
| 120 |
+
raise TypeError(
|
| 121 |
+
f"Configuration items can't be deleted (can't delete {key!r})."
|
| 122 |
+
)
|
| 123 |
+
self._on_delattr(key)
|
| 124 |
+
object.__delattr__(self, key)
|
| 125 |
+
self.__class__._prev = None
|
| 126 |
+
|
| 127 |
+
# Be a `collection.abc.Collection`
|
| 128 |
+
def __contains__(self, key):
|
| 129 |
+
return (
|
| 130 |
+
key in self.__dataclass_fields__ if self._strict else key in self.__dict__
|
| 131 |
+
)
|
| 132 |
+
|
| 133 |
+
def __iter__(self):
|
| 134 |
+
return iter(self.__dataclass_fields__ if self._strict else self.__dict__)
|
| 135 |
+
|
| 136 |
+
def __len__(self):
|
| 137 |
+
return len(self.__dataclass_fields__ if self._strict else self.__dict__)
|
| 138 |
+
|
| 139 |
+
def __reversed__(self):
|
| 140 |
+
return reversed(self.__dataclass_fields__ if self._strict else self.__dict__)
|
| 141 |
+
|
| 142 |
+
# Add dunder methods for `collections.abc.Mapping`
|
| 143 |
+
def __getitem__(self, key):
|
| 144 |
+
try:
|
| 145 |
+
return getattr(self, key)
|
| 146 |
+
except AttributeError as err:
|
| 147 |
+
raise KeyError(*err.args) from None
|
| 148 |
+
|
| 149 |
+
def __setitem__(self, key, value):
|
| 150 |
+
try:
|
| 151 |
+
self.__setattr__(key, value)
|
| 152 |
+
except AttributeError as err:
|
| 153 |
+
raise KeyError(*err.args) from None
|
| 154 |
+
|
| 155 |
+
def __delitem__(self, key):
|
| 156 |
+
try:
|
| 157 |
+
self.__delattr__(key)
|
| 158 |
+
except AttributeError as err:
|
| 159 |
+
raise KeyError(*err.args) from None
|
| 160 |
+
|
| 161 |
+
_ipython_key_completions_ = __dir__ # config["<TAB>
|
| 162 |
+
|
| 163 |
+
# Go ahead and make it a `collections.abc.Mapping`
|
| 164 |
+
def get(self, key, default=None):
|
| 165 |
+
return getattr(self, key, default)
|
| 166 |
+
|
| 167 |
+
def items(self):
|
| 168 |
+
return collections.abc.ItemsView(self)
|
| 169 |
+
|
| 170 |
+
def keys(self):
|
| 171 |
+
return collections.abc.KeysView(self)
|
| 172 |
+
|
| 173 |
+
def values(self):
|
| 174 |
+
return collections.abc.ValuesView(self)
|
| 175 |
+
|
| 176 |
+
# dataclass can define __eq__ for us, but do it here so it works after pickling
|
| 177 |
+
def __eq__(self, other):
|
| 178 |
+
if not isinstance(other, Config):
|
| 179 |
+
return NotImplemented
|
| 180 |
+
return self._orig_class == other._orig_class and self.items() == other.items()
|
| 181 |
+
|
| 182 |
+
# Make pickle work
|
| 183 |
+
def __reduce__(self):
|
| 184 |
+
return self._deserialize, (self._orig_class, dict(self))
|
| 185 |
+
|
| 186 |
+
@staticmethod
|
| 187 |
+
def _deserialize(cls, kwargs):
|
| 188 |
+
return cls(**kwargs)
|
| 189 |
+
|
| 190 |
+
# Allow to be used as context manager
|
| 191 |
+
def __call__(self, **kwargs):
|
| 192 |
+
kwargs = {key: self._on_setattr(key, val) for key, val in kwargs.items()}
|
| 193 |
+
prev = dict(self)
|
| 194 |
+
for key, val in kwargs.items():
|
| 195 |
+
setattr(self, key, val)
|
| 196 |
+
self.__class__._prev = prev
|
| 197 |
+
return self
|
| 198 |
+
|
| 199 |
+
def __enter__(self):
|
| 200 |
+
if self.__class__._prev is None:
|
| 201 |
+
raise RuntimeError(
|
| 202 |
+
"Config being used as a context manager without config items being set. "
|
| 203 |
+
"Set config items via keyword arguments when calling the config object. "
|
| 204 |
+
"For example, using config as a context manager should be like:\n\n"
|
| 205 |
+
' >>> with cfg(breakfast="spam"):\n'
|
| 206 |
+
" ... ... # Do stuff\n"
|
| 207 |
+
)
|
| 208 |
+
self.__class__._context_stack.append(self.__class__._prev)
|
| 209 |
+
self.__class__._prev = None
|
| 210 |
+
return self
|
| 211 |
+
|
| 212 |
+
def __exit__(self, exc_type, exc_value, traceback):
|
| 213 |
+
prev = self.__class__._context_stack.pop()
|
| 214 |
+
for key, val in prev.items():
|
| 215 |
+
setattr(self, key, val)
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
def _flexible_repr(self):
|
| 219 |
+
return (
|
| 220 |
+
f"{self.__class__.__qualname__}("
|
| 221 |
+
+ ", ".join(f"{key}={val!r}" for key, val in self.__dict__.items())
|
| 222 |
+
+ ")"
|
| 223 |
+
)
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
# Register, b/c `Mapping.__subclasshook__` returns `NotImplemented`
|
| 227 |
+
collections.abc.Mapping.register(Config)
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
class BackendPriorities(Config, strict=False):
|
| 231 |
+
"""Configuration to control automatic conversion to and calling of backends.
|
| 232 |
+
|
| 233 |
+
Priority is given to backends listed earlier.
|
| 234 |
+
|
| 235 |
+
Parameters
|
| 236 |
+
----------
|
| 237 |
+
algos : list of backend names
|
| 238 |
+
This controls "algorithms" such as ``nx.pagerank`` that don't return a graph.
|
| 239 |
+
generators : list of backend names
|
| 240 |
+
This controls "generators" such as ``nx.from_pandas_edgelist`` that return a graph.
|
| 241 |
+
kwargs : variadic keyword arguments of function name to list of backend names
|
| 242 |
+
This allows each function to be configured separately and will override the config
|
| 243 |
+
in ``algos`` or ``generators`` if present. The dispatchable function name may be
|
| 244 |
+
gotten from the ``.name`` attribute such as ``nx.pagerank.name`` (it's typically
|
| 245 |
+
the same as the name of the function).
|
| 246 |
+
"""
|
| 247 |
+
|
| 248 |
+
algos: list[str]
|
| 249 |
+
generators: list[str]
|
| 250 |
+
|
| 251 |
+
def _on_setattr(self, key, value):
|
| 252 |
+
from .backends import _registered_algorithms, backend_info
|
| 253 |
+
|
| 254 |
+
if key in {"algos", "generators"}:
|
| 255 |
+
pass
|
| 256 |
+
elif key not in _registered_algorithms:
|
| 257 |
+
raise AttributeError(
|
| 258 |
+
f"Invalid config name: {key!r}. Expected 'algos', 'generators', or a name "
|
| 259 |
+
"of a dispatchable function (e.g. `.name` attribute of the function)."
|
| 260 |
+
)
|
| 261 |
+
if not (isinstance(value, list) and all(isinstance(x, str) for x in value)):
|
| 262 |
+
raise TypeError(
|
| 263 |
+
f"{key!r} config must be a list of backend names; got {value!r}"
|
| 264 |
+
)
|
| 265 |
+
if missing := {x for x in value if x not in backend_info}:
|
| 266 |
+
missing = ", ".join(map(repr, sorted(missing)))
|
| 267 |
+
raise ValueError(f"Unknown backend when setting {key!r}: {missing}")
|
| 268 |
+
return value
|
| 269 |
+
|
| 270 |
+
def _on_delattr(self, key):
|
| 271 |
+
if key in {"algos", "generators"}:
|
| 272 |
+
raise TypeError(f"{key!r} configuration item can't be deleted.")
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
class NetworkXConfig(Config):
|
| 276 |
+
"""Configuration for NetworkX that controls behaviors such as how to use backends.
|
| 277 |
+
|
| 278 |
+
Attribute and bracket notation are supported for getting and setting configurations::
|
| 279 |
+
|
| 280 |
+
>>> nx.config.backend_priority == nx.config["backend_priority"]
|
| 281 |
+
True
|
| 282 |
+
|
| 283 |
+
Parameters
|
| 284 |
+
----------
|
| 285 |
+
backend_priority : list of backend names or dict or BackendPriorities
|
| 286 |
+
Enable automatic conversion of graphs to backend graphs for functions
|
| 287 |
+
implemented by the backend. Priority is given to backends listed earlier.
|
| 288 |
+
This is a nested configuration with keys ``algos``, ``generators``, and,
|
| 289 |
+
optionally, function names. Setting this value to a list of backend names
|
| 290 |
+
will set ``nx.config.backend_priority.algos``. For more information, see
|
| 291 |
+
``help(nx.config.backend_priority)``. Default is empty list.
|
| 292 |
+
|
| 293 |
+
backends : Config mapping of backend names to backend Config
|
| 294 |
+
The keys of the Config mapping are names of all installed NetworkX backends,
|
| 295 |
+
and the values are their configurations as Config mappings.
|
| 296 |
+
|
| 297 |
+
cache_converted_graphs : bool
|
| 298 |
+
If True, then save converted graphs to the cache of the input graph. Graph
|
| 299 |
+
conversion may occur when automatically using a backend from `backend_priority`
|
| 300 |
+
or when using the `backend=` keyword argument to a function call. Caching can
|
| 301 |
+
improve performance by avoiding repeated conversions, but it uses more memory.
|
| 302 |
+
Care should be taken to not manually mutate a graph that has cached graphs; for
|
| 303 |
+
example, ``G[u][v][k] = val`` changes the graph, but does not clear the cache.
|
| 304 |
+
Using methods such as ``G.add_edge(u, v, weight=val)`` will clear the cache to
|
| 305 |
+
keep it consistent. ``G.__networkx_cache__.clear()`` manually clears the cache.
|
| 306 |
+
Default is True.
|
| 307 |
+
|
| 308 |
+
fallback_to_nx : bool
|
| 309 |
+
If True, then "fall back" and run with the default "networkx" implementation
|
| 310 |
+
for dispatchable functions not implemented by backends of input graphs. When a
|
| 311 |
+
backend graph is passed to a dispatchable function, the default behavior is to
|
| 312 |
+
use the implementation from that backend if possible and raise if not. Enabling
|
| 313 |
+
``fallback_to_nx`` makes the networkx implementation the fallback to use instead
|
| 314 |
+
of raising, and will convert the backend graph to a networkx-compatible graph.
|
| 315 |
+
Default is False.
|
| 316 |
+
|
| 317 |
+
warnings_to_ignore : set of strings
|
| 318 |
+
Control which warnings from NetworkX are not emitted. Valid elements:
|
| 319 |
+
|
| 320 |
+
- `"cache"`: when a cached value is used from ``G.__networkx_cache__``.
|
| 321 |
+
|
| 322 |
+
Notes
|
| 323 |
+
-----
|
| 324 |
+
Environment variables may be used to control some default configurations:
|
| 325 |
+
|
| 326 |
+
- ``NETWORKX_BACKEND_PRIORITY``: set ``backend_priority.algos`` from comma-separated names.
|
| 327 |
+
- ``NETWORKX_CACHE_CONVERTED_GRAPHS``: set ``cache_converted_graphs`` to True if nonempty.
|
| 328 |
+
- ``NETWORKX_FALLBACK_TO_NX``: set ``fallback_to_nx`` to True if nonempty.
|
| 329 |
+
- ``NETWORKX_WARNINGS_TO_IGNORE``: set `warnings_to_ignore` from comma-separated names.
|
| 330 |
+
|
| 331 |
+
and can be used for finer control of ``backend_priority`` such as:
|
| 332 |
+
|
| 333 |
+
- ``NETWORKX_BACKEND_PRIORITY_ALGOS``: same as ``NETWORKX_BACKEND_PRIORITY`` to set ``backend_priority.algos``.
|
| 334 |
+
|
| 335 |
+
This is a global configuration. Use with caution when using from multiple threads.
|
| 336 |
+
"""
|
| 337 |
+
|
| 338 |
+
backend_priority: BackendPriorities
|
| 339 |
+
backends: Config
|
| 340 |
+
cache_converted_graphs: bool
|
| 341 |
+
fallback_to_nx: bool
|
| 342 |
+
warnings_to_ignore: set[str]
|
| 343 |
+
|
| 344 |
+
def _on_setattr(self, key, value):
|
| 345 |
+
from .backends import backend_info
|
| 346 |
+
|
| 347 |
+
if key == "backend_priority":
|
| 348 |
+
if isinstance(value, list):
|
| 349 |
+
getattr(self, key).algos = value
|
| 350 |
+
value = getattr(self, key)
|
| 351 |
+
elif isinstance(value, dict):
|
| 352 |
+
kwargs = value
|
| 353 |
+
value = BackendPriorities(algos=[], generators=[])
|
| 354 |
+
for key, val in kwargs.items():
|
| 355 |
+
setattr(value, key, val)
|
| 356 |
+
elif not isinstance(value, BackendPriorities):
|
| 357 |
+
raise TypeError(
|
| 358 |
+
f"{key!r} config must be a dict of lists of backend names; got {value!r}"
|
| 359 |
+
)
|
| 360 |
+
elif key == "backends":
|
| 361 |
+
if not (
|
| 362 |
+
isinstance(value, Config)
|
| 363 |
+
and all(isinstance(key, str) for key in value)
|
| 364 |
+
and all(isinstance(val, Config) for val in value.values())
|
| 365 |
+
):
|
| 366 |
+
raise TypeError(
|
| 367 |
+
f"{key!r} config must be a Config of backend configs; got {value!r}"
|
| 368 |
+
)
|
| 369 |
+
if missing := {x for x in value if x not in backend_info}:
|
| 370 |
+
missing = ", ".join(map(repr, sorted(missing)))
|
| 371 |
+
raise ValueError(f"Unknown backend when setting {key!r}: {missing}")
|
| 372 |
+
elif key in {"cache_converted_graphs", "fallback_to_nx"}:
|
| 373 |
+
if not isinstance(value, bool):
|
| 374 |
+
raise TypeError(f"{key!r} config must be True or False; got {value!r}")
|
| 375 |
+
elif key == "warnings_to_ignore":
|
| 376 |
+
if not (isinstance(value, set) and all(isinstance(x, str) for x in value)):
|
| 377 |
+
raise TypeError(
|
| 378 |
+
f"{key!r} config must be a set of warning names; got {value!r}"
|
| 379 |
+
)
|
| 380 |
+
known_warnings = {"cache"}
|
| 381 |
+
if missing := {x for x in value if x not in known_warnings}:
|
| 382 |
+
missing = ", ".join(map(repr, sorted(missing)))
|
| 383 |
+
raise ValueError(
|
| 384 |
+
f"Unknown warning when setting {key!r}: {missing}. Valid entries: "
|
| 385 |
+
+ ", ".join(sorted(known_warnings))
|
| 386 |
+
)
|
| 387 |
+
return value
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/decorators.py
ADDED
|
@@ -0,0 +1,1237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import bz2
|
| 2 |
+
import collections
|
| 3 |
+
import gzip
|
| 4 |
+
import inspect
|
| 5 |
+
import itertools
|
| 6 |
+
import re
|
| 7 |
+
import warnings
|
| 8 |
+
from collections import defaultdict
|
| 9 |
+
from contextlib import contextmanager
|
| 10 |
+
from functools import wraps
|
| 11 |
+
from inspect import Parameter, signature
|
| 12 |
+
from os.path import splitext
|
| 13 |
+
from pathlib import Path
|
| 14 |
+
|
| 15 |
+
import networkx as nx
|
| 16 |
+
from networkx.utils import create_py_random_state, create_random_state
|
| 17 |
+
|
| 18 |
+
__all__ = [
|
| 19 |
+
"not_implemented_for",
|
| 20 |
+
"open_file",
|
| 21 |
+
"nodes_or_number",
|
| 22 |
+
"np_random_state",
|
| 23 |
+
"py_random_state",
|
| 24 |
+
"argmap",
|
| 25 |
+
]
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def not_implemented_for(*graph_types):
|
| 29 |
+
"""Decorator to mark algorithms as not implemented
|
| 30 |
+
|
| 31 |
+
Parameters
|
| 32 |
+
----------
|
| 33 |
+
graph_types : container of strings
|
| 34 |
+
Entries must be one of "directed", "undirected", "multigraph", or "graph".
|
| 35 |
+
|
| 36 |
+
Returns
|
| 37 |
+
-------
|
| 38 |
+
_require : function
|
| 39 |
+
The decorated function.
|
| 40 |
+
|
| 41 |
+
Raises
|
| 42 |
+
------
|
| 43 |
+
NetworkXNotImplemented
|
| 44 |
+
If any of the packages cannot be imported
|
| 45 |
+
|
| 46 |
+
Notes
|
| 47 |
+
-----
|
| 48 |
+
Multiple types are joined logically with "and".
|
| 49 |
+
For "or" use multiple @not_implemented_for() lines.
|
| 50 |
+
|
| 51 |
+
Examples
|
| 52 |
+
--------
|
| 53 |
+
Decorate functions like this::
|
| 54 |
+
|
| 55 |
+
@not_implemented_for("directed")
|
| 56 |
+
def sp_function(G):
|
| 57 |
+
pass
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
# rule out MultiDiGraph
|
| 61 |
+
@not_implemented_for("directed", "multigraph")
|
| 62 |
+
def sp_np_function(G):
|
| 63 |
+
pass
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
# rule out all except DiGraph
|
| 67 |
+
@not_implemented_for("undirected")
|
| 68 |
+
@not_implemented_for("multigraph")
|
| 69 |
+
def sp_np_function(G):
|
| 70 |
+
pass
|
| 71 |
+
"""
|
| 72 |
+
if ("directed" in graph_types) and ("undirected" in graph_types):
|
| 73 |
+
raise ValueError("Function not implemented on directed AND undirected graphs?")
|
| 74 |
+
if ("multigraph" in graph_types) and ("graph" in graph_types):
|
| 75 |
+
raise ValueError("Function not implemented on graph AND multigraphs?")
|
| 76 |
+
if not set(graph_types) < {"directed", "undirected", "multigraph", "graph"}:
|
| 77 |
+
raise KeyError(
|
| 78 |
+
"use one or more of directed, undirected, multigraph, graph. "
|
| 79 |
+
f"You used {graph_types}"
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
# 3-way logic: True if "directed" input, False if "undirected" input, else None
|
| 83 |
+
dval = ("directed" in graph_types) or "undirected" not in graph_types and None
|
| 84 |
+
mval = ("multigraph" in graph_types) or "graph" not in graph_types and None
|
| 85 |
+
errmsg = f"not implemented for {' '.join(graph_types)} type"
|
| 86 |
+
|
| 87 |
+
def _not_implemented_for(g):
|
| 88 |
+
if (mval is None or mval == g.is_multigraph()) and (
|
| 89 |
+
dval is None or dval == g.is_directed()
|
| 90 |
+
):
|
| 91 |
+
raise nx.NetworkXNotImplemented(errmsg)
|
| 92 |
+
|
| 93 |
+
return g
|
| 94 |
+
|
| 95 |
+
return argmap(_not_implemented_for, 0)
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
# To handle new extensions, define a function accepting a `path` and `mode`.
|
| 99 |
+
# Then add the extension to _dispatch_dict.
|
| 100 |
+
fopeners = {
|
| 101 |
+
".gz": gzip.open,
|
| 102 |
+
".gzip": gzip.open,
|
| 103 |
+
".bz2": bz2.BZ2File,
|
| 104 |
+
}
|
| 105 |
+
_dispatch_dict = defaultdict(lambda: open, **fopeners)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def open_file(path_arg, mode="r"):
|
| 109 |
+
"""Decorator to ensure clean opening and closing of files.
|
| 110 |
+
|
| 111 |
+
Parameters
|
| 112 |
+
----------
|
| 113 |
+
path_arg : string or int
|
| 114 |
+
Name or index of the argument that is a path.
|
| 115 |
+
|
| 116 |
+
mode : str
|
| 117 |
+
String for opening mode.
|
| 118 |
+
|
| 119 |
+
Returns
|
| 120 |
+
-------
|
| 121 |
+
_open_file : function
|
| 122 |
+
Function which cleanly executes the io.
|
| 123 |
+
|
| 124 |
+
Examples
|
| 125 |
+
--------
|
| 126 |
+
Decorate functions like this::
|
| 127 |
+
|
| 128 |
+
@open_file(0, "r")
|
| 129 |
+
def read_function(pathname):
|
| 130 |
+
pass
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
@open_file(1, "w")
|
| 134 |
+
def write_function(G, pathname):
|
| 135 |
+
pass
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
@open_file(1, "w")
|
| 139 |
+
def write_function(G, pathname="graph.dot"):
|
| 140 |
+
pass
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
@open_file("pathname", "w")
|
| 144 |
+
def write_function(G, pathname="graph.dot"):
|
| 145 |
+
pass
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
@open_file("path", "w+")
|
| 149 |
+
def another_function(arg, **kwargs):
|
| 150 |
+
path = kwargs["path"]
|
| 151 |
+
pass
|
| 152 |
+
|
| 153 |
+
Notes
|
| 154 |
+
-----
|
| 155 |
+
Note that this decorator solves the problem when a path argument is
|
| 156 |
+
specified as a string, but it does not handle the situation when the
|
| 157 |
+
function wants to accept a default of None (and then handle it).
|
| 158 |
+
|
| 159 |
+
Here is an example of how to handle this case::
|
| 160 |
+
|
| 161 |
+
@open_file("path")
|
| 162 |
+
def some_function(arg1, arg2, path=None):
|
| 163 |
+
if path is None:
|
| 164 |
+
fobj = tempfile.NamedTemporaryFile(delete=False)
|
| 165 |
+
else:
|
| 166 |
+
# `path` could have been a string or file object or something
|
| 167 |
+
# similar. In any event, the decorator has given us a file object
|
| 168 |
+
# and it will close it for us, if it should.
|
| 169 |
+
fobj = path
|
| 170 |
+
|
| 171 |
+
try:
|
| 172 |
+
fobj.write("blah")
|
| 173 |
+
finally:
|
| 174 |
+
if path is None:
|
| 175 |
+
fobj.close()
|
| 176 |
+
|
| 177 |
+
Normally, we'd want to use "with" to ensure that fobj gets closed.
|
| 178 |
+
However, the decorator will make `path` a file object for us,
|
| 179 |
+
and using "with" would undesirably close that file object.
|
| 180 |
+
Instead, we use a try block, as shown above.
|
| 181 |
+
When we exit the function, fobj will be closed, if it should be, by the decorator.
|
| 182 |
+
"""
|
| 183 |
+
|
| 184 |
+
def _open_file(path):
|
| 185 |
+
# Now we have the path_arg. There are two types of input to consider:
|
| 186 |
+
# 1) string representing a path that should be opened
|
| 187 |
+
# 2) an already opened file object
|
| 188 |
+
if isinstance(path, str):
|
| 189 |
+
ext = splitext(path)[1]
|
| 190 |
+
elif isinstance(path, Path):
|
| 191 |
+
# path is a pathlib reference to a filename
|
| 192 |
+
ext = path.suffix
|
| 193 |
+
path = str(path)
|
| 194 |
+
else:
|
| 195 |
+
# could be None, or a file handle, in which case the algorithm will deal with it
|
| 196 |
+
return path, lambda: None
|
| 197 |
+
|
| 198 |
+
fobj = _dispatch_dict[ext](path, mode=mode)
|
| 199 |
+
return fobj, lambda: fobj.close()
|
| 200 |
+
|
| 201 |
+
return argmap(_open_file, path_arg, try_finally=True)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def nodes_or_number(which_args):
|
| 205 |
+
"""Decorator to allow number of nodes or container of nodes.
|
| 206 |
+
|
| 207 |
+
With this decorator, the specified argument can be either a number or a container
|
| 208 |
+
of nodes. If it is a number, the nodes used are `range(n)`.
|
| 209 |
+
This allows `nx.complete_graph(50)` in place of `nx.complete_graph(list(range(50)))`.
|
| 210 |
+
And it also allows `nx.complete_graph(any_list_of_nodes)`.
|
| 211 |
+
|
| 212 |
+
Parameters
|
| 213 |
+
----------
|
| 214 |
+
which_args : string or int or sequence of strings or ints
|
| 215 |
+
If string, the name of the argument to be treated.
|
| 216 |
+
If int, the index of the argument to be treated.
|
| 217 |
+
If more than one node argument is allowed, can be a list of locations.
|
| 218 |
+
|
| 219 |
+
Returns
|
| 220 |
+
-------
|
| 221 |
+
_nodes_or_numbers : function
|
| 222 |
+
Function which replaces int args with ranges.
|
| 223 |
+
|
| 224 |
+
Examples
|
| 225 |
+
--------
|
| 226 |
+
Decorate functions like this::
|
| 227 |
+
|
| 228 |
+
@nodes_or_number("nodes")
|
| 229 |
+
def empty_graph(nodes):
|
| 230 |
+
# nodes is converted to a list of nodes
|
| 231 |
+
|
| 232 |
+
@nodes_or_number(0)
|
| 233 |
+
def empty_graph(nodes):
|
| 234 |
+
# nodes is converted to a list of nodes
|
| 235 |
+
|
| 236 |
+
@nodes_or_number(["m1", "m2"])
|
| 237 |
+
def grid_2d_graph(m1, m2, periodic=False):
|
| 238 |
+
# m1 and m2 are each converted to a list of nodes
|
| 239 |
+
|
| 240 |
+
@nodes_or_number([0, 1])
|
| 241 |
+
def grid_2d_graph(m1, m2, periodic=False):
|
| 242 |
+
# m1 and m2 are each converted to a list of nodes
|
| 243 |
+
|
| 244 |
+
@nodes_or_number(1)
|
| 245 |
+
def full_rary_tree(r, n)
|
| 246 |
+
# presumably r is a number. It is not handled by this decorator.
|
| 247 |
+
# n is converted to a list of nodes
|
| 248 |
+
"""
|
| 249 |
+
|
| 250 |
+
def _nodes_or_number(n):
|
| 251 |
+
try:
|
| 252 |
+
nodes = list(range(n))
|
| 253 |
+
except TypeError:
|
| 254 |
+
nodes = tuple(n)
|
| 255 |
+
else:
|
| 256 |
+
if n < 0:
|
| 257 |
+
raise nx.NetworkXError(f"Negative number of nodes not valid: {n}")
|
| 258 |
+
return (n, nodes)
|
| 259 |
+
|
| 260 |
+
try:
|
| 261 |
+
iter_wa = iter(which_args)
|
| 262 |
+
except TypeError:
|
| 263 |
+
iter_wa = (which_args,)
|
| 264 |
+
|
| 265 |
+
return argmap(_nodes_or_number, *iter_wa)
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
def np_random_state(random_state_argument):
|
| 269 |
+
"""Decorator to generate a numpy RandomState or Generator instance.
|
| 270 |
+
|
| 271 |
+
The decorator processes the argument indicated by `random_state_argument`
|
| 272 |
+
using :func:`nx.utils.create_random_state`.
|
| 273 |
+
The argument value can be a seed (integer), or a `numpy.random.RandomState`
|
| 274 |
+
or `numpy.random.RandomState` instance or (`None` or `numpy.random`).
|
| 275 |
+
The latter two options use the global random number generator for `numpy.random`.
|
| 276 |
+
|
| 277 |
+
The returned instance is a `numpy.random.RandomState` or `numpy.random.Generator`.
|
| 278 |
+
|
| 279 |
+
Parameters
|
| 280 |
+
----------
|
| 281 |
+
random_state_argument : string or int
|
| 282 |
+
The name or index of the argument to be converted
|
| 283 |
+
to a `numpy.random.RandomState` instance.
|
| 284 |
+
|
| 285 |
+
Returns
|
| 286 |
+
-------
|
| 287 |
+
_random_state : function
|
| 288 |
+
Function whose random_state keyword argument is a RandomState instance.
|
| 289 |
+
|
| 290 |
+
Examples
|
| 291 |
+
--------
|
| 292 |
+
Decorate functions like this::
|
| 293 |
+
|
| 294 |
+
@np_random_state("seed")
|
| 295 |
+
def random_float(seed=None):
|
| 296 |
+
return seed.rand()
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
@np_random_state(0)
|
| 300 |
+
def random_float(rng=None):
|
| 301 |
+
return rng.rand()
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
@np_random_state(1)
|
| 305 |
+
def random_array(dims, random_state=1):
|
| 306 |
+
return random_state.rand(*dims)
|
| 307 |
+
|
| 308 |
+
See Also
|
| 309 |
+
--------
|
| 310 |
+
py_random_state
|
| 311 |
+
"""
|
| 312 |
+
return argmap(create_random_state, random_state_argument)
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
def py_random_state(random_state_argument):
|
| 316 |
+
"""Decorator to generate a random.Random instance (or equiv).
|
| 317 |
+
|
| 318 |
+
This decorator processes `random_state_argument` using
|
| 319 |
+
:func:`nx.utils.create_py_random_state`.
|
| 320 |
+
The input value can be a seed (integer), or a random number generator::
|
| 321 |
+
|
| 322 |
+
If int, return a random.Random instance set with seed=int.
|
| 323 |
+
If random.Random instance, return it.
|
| 324 |
+
If None or the `random` package, return the global random number
|
| 325 |
+
generator used by `random`.
|
| 326 |
+
If np.random package, or the default numpy RandomState instance,
|
| 327 |
+
return the default numpy random number generator wrapped in a
|
| 328 |
+
`PythonRandomViaNumpyBits` class.
|
| 329 |
+
If np.random.Generator instance, return it wrapped in a
|
| 330 |
+
`PythonRandomViaNumpyBits` class.
|
| 331 |
+
|
| 332 |
+
# Legacy options
|
| 333 |
+
If np.random.RandomState instance, return it wrapped in a
|
| 334 |
+
`PythonRandomInterface` class.
|
| 335 |
+
If a `PythonRandomInterface` instance, return it
|
| 336 |
+
|
| 337 |
+
Parameters
|
| 338 |
+
----------
|
| 339 |
+
random_state_argument : string or int
|
| 340 |
+
The name of the argument or the index of the argument in args that is
|
| 341 |
+
to be converted to the random.Random instance or numpy.random.RandomState
|
| 342 |
+
instance that mimics basic methods of random.Random.
|
| 343 |
+
|
| 344 |
+
Returns
|
| 345 |
+
-------
|
| 346 |
+
_random_state : function
|
| 347 |
+
Function whose random_state_argument is converted to a Random instance.
|
| 348 |
+
|
| 349 |
+
Examples
|
| 350 |
+
--------
|
| 351 |
+
Decorate functions like this::
|
| 352 |
+
|
| 353 |
+
@py_random_state("random_state")
|
| 354 |
+
def random_float(random_state=None):
|
| 355 |
+
return random_state.rand()
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
@py_random_state(0)
|
| 359 |
+
def random_float(rng=None):
|
| 360 |
+
return rng.rand()
|
| 361 |
+
|
| 362 |
+
|
| 363 |
+
@py_random_state(1)
|
| 364 |
+
def random_array(dims, seed=12345):
|
| 365 |
+
return seed.rand(*dims)
|
| 366 |
+
|
| 367 |
+
See Also
|
| 368 |
+
--------
|
| 369 |
+
np_random_state
|
| 370 |
+
"""
|
| 371 |
+
|
| 372 |
+
return argmap(create_py_random_state, random_state_argument)
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
class argmap:
|
| 376 |
+
"""A decorator to apply a map to arguments before calling the function
|
| 377 |
+
|
| 378 |
+
This class provides a decorator that maps (transforms) arguments of the function
|
| 379 |
+
before the function is called. Thus for example, we have similar code
|
| 380 |
+
in many functions to determine whether an argument is the number of nodes
|
| 381 |
+
to be created, or a list of nodes to be handled. The decorator provides
|
| 382 |
+
the code to accept either -- transforming the indicated argument into a
|
| 383 |
+
list of nodes before the actual function is called.
|
| 384 |
+
|
| 385 |
+
This decorator class allows us to process single or multiple arguments.
|
| 386 |
+
The arguments to be processed can be specified by string, naming the argument,
|
| 387 |
+
or by index, specifying the item in the args list.
|
| 388 |
+
|
| 389 |
+
Parameters
|
| 390 |
+
----------
|
| 391 |
+
func : callable
|
| 392 |
+
The function to apply to arguments
|
| 393 |
+
|
| 394 |
+
*args : iterable of (int, str or tuple)
|
| 395 |
+
A list of parameters, specified either as strings (their names), ints
|
| 396 |
+
(numerical indices) or tuples, which may contain ints, strings, and
|
| 397 |
+
(recursively) tuples. Each indicates which parameters the decorator
|
| 398 |
+
should map. Tuples indicate that the map function takes (and returns)
|
| 399 |
+
multiple parameters in the same order and nested structure as indicated
|
| 400 |
+
here.
|
| 401 |
+
|
| 402 |
+
try_finally : bool (default: False)
|
| 403 |
+
When True, wrap the function call in a try-finally block with code
|
| 404 |
+
for the finally block created by `func`. This is used when the map
|
| 405 |
+
function constructs an object (like a file handle) that requires
|
| 406 |
+
post-processing (like closing).
|
| 407 |
+
|
| 408 |
+
Note: try_finally decorators cannot be used to decorate generator
|
| 409 |
+
functions.
|
| 410 |
+
|
| 411 |
+
Examples
|
| 412 |
+
--------
|
| 413 |
+
Most of these examples use `@argmap(...)` to apply the decorator to
|
| 414 |
+
the function defined on the next line.
|
| 415 |
+
In the NetworkX codebase however, `argmap` is used within a function to
|
| 416 |
+
construct a decorator. That is, the decorator defines a mapping function
|
| 417 |
+
and then uses `argmap` to build and return a decorated function.
|
| 418 |
+
A simple example is a decorator that specifies which currency to report money.
|
| 419 |
+
The decorator (named `convert_to`) would be used like::
|
| 420 |
+
|
| 421 |
+
@convert_to("US_Dollars", "income")
|
| 422 |
+
def show_me_the_money(name, income):
|
| 423 |
+
print(f"{name} : {income}")
|
| 424 |
+
|
| 425 |
+
And the code to create the decorator might be::
|
| 426 |
+
|
| 427 |
+
def convert_to(currency, which_arg):
|
| 428 |
+
def _convert(amount):
|
| 429 |
+
if amount.currency != currency:
|
| 430 |
+
amount = amount.to_currency(currency)
|
| 431 |
+
return amount
|
| 432 |
+
|
| 433 |
+
return argmap(_convert, which_arg)
|
| 434 |
+
|
| 435 |
+
Despite this common idiom for argmap, most of the following examples
|
| 436 |
+
use the `@argmap(...)` idiom to save space.
|
| 437 |
+
|
| 438 |
+
Here's an example use of argmap to sum the elements of two of the functions
|
| 439 |
+
arguments. The decorated function::
|
| 440 |
+
|
| 441 |
+
@argmap(sum, "xlist", "zlist")
|
| 442 |
+
def foo(xlist, y, zlist):
|
| 443 |
+
return xlist - y + zlist
|
| 444 |
+
|
| 445 |
+
is syntactic sugar for::
|
| 446 |
+
|
| 447 |
+
def foo(xlist, y, zlist):
|
| 448 |
+
x = sum(xlist)
|
| 449 |
+
z = sum(zlist)
|
| 450 |
+
return x - y + z
|
| 451 |
+
|
| 452 |
+
and is equivalent to (using argument indexes)::
|
| 453 |
+
|
| 454 |
+
@argmap(sum, "xlist", 2)
|
| 455 |
+
def foo(xlist, y, zlist):
|
| 456 |
+
return xlist - y + zlist
|
| 457 |
+
|
| 458 |
+
or::
|
| 459 |
+
|
| 460 |
+
@argmap(sum, "zlist", 0)
|
| 461 |
+
def foo(xlist, y, zlist):
|
| 462 |
+
return xlist - y + zlist
|
| 463 |
+
|
| 464 |
+
Transforming functions can be applied to multiple arguments, such as::
|
| 465 |
+
|
| 466 |
+
def swap(x, y):
|
| 467 |
+
return y, x
|
| 468 |
+
|
| 469 |
+
# the 2-tuple tells argmap that the map `swap` has 2 inputs/outputs.
|
| 470 |
+
@argmap(swap, ("a", "b")):
|
| 471 |
+
def foo(a, b, c):
|
| 472 |
+
return a / b * c
|
| 473 |
+
|
| 474 |
+
is equivalent to::
|
| 475 |
+
|
| 476 |
+
def foo(a, b, c):
|
| 477 |
+
a, b = swap(a, b)
|
| 478 |
+
return a / b * c
|
| 479 |
+
|
| 480 |
+
More generally, the applied arguments can be nested tuples of strings or ints.
|
| 481 |
+
The syntax `@argmap(some_func, ("a", ("b", "c")))` would expect `some_func` to
|
| 482 |
+
accept 2 inputs with the second expected to be a 2-tuple. It should then return
|
| 483 |
+
2 outputs with the second a 2-tuple. The returns values would replace input "a"
|
| 484 |
+
"b" and "c" respectively. Similarly for `@argmap(some_func, (0, ("b", 2)))`.
|
| 485 |
+
|
| 486 |
+
Also, note that an index larger than the number of named parameters is allowed
|
| 487 |
+
for variadic functions. For example::
|
| 488 |
+
|
| 489 |
+
def double(a):
|
| 490 |
+
return 2 * a
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
@argmap(double, 3)
|
| 494 |
+
def overflow(a, *args):
|
| 495 |
+
return a, args
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
print(overflow(1, 2, 3, 4, 5, 6)) # output is 1, (2, 3, 8, 5, 6)
|
| 499 |
+
|
| 500 |
+
**Try Finally**
|
| 501 |
+
|
| 502 |
+
Additionally, this `argmap` class can be used to create a decorator that
|
| 503 |
+
initiates a try...finally block. The decorator must be written to return
|
| 504 |
+
both the transformed argument and a closing function.
|
| 505 |
+
This feature was included to enable the `open_file` decorator which might
|
| 506 |
+
need to close the file or not depending on whether it had to open that file.
|
| 507 |
+
This feature uses the keyword-only `try_finally` argument to `@argmap`.
|
| 508 |
+
|
| 509 |
+
For example this map opens a file and then makes sure it is closed::
|
| 510 |
+
|
| 511 |
+
def open_file(fn):
|
| 512 |
+
f = open(fn)
|
| 513 |
+
return f, lambda: f.close()
|
| 514 |
+
|
| 515 |
+
The decorator applies that to the function `foo`::
|
| 516 |
+
|
| 517 |
+
@argmap(open_file, "file", try_finally=True)
|
| 518 |
+
def foo(file):
|
| 519 |
+
print(file.read())
|
| 520 |
+
|
| 521 |
+
is syntactic sugar for::
|
| 522 |
+
|
| 523 |
+
def foo(file):
|
| 524 |
+
file, close_file = open_file(file)
|
| 525 |
+
try:
|
| 526 |
+
print(file.read())
|
| 527 |
+
finally:
|
| 528 |
+
close_file()
|
| 529 |
+
|
| 530 |
+
and is equivalent to (using indexes)::
|
| 531 |
+
|
| 532 |
+
@argmap(open_file, 0, try_finally=True)
|
| 533 |
+
def foo(file):
|
| 534 |
+
print(file.read())
|
| 535 |
+
|
| 536 |
+
Here's an example of the try_finally feature used to create a decorator::
|
| 537 |
+
|
| 538 |
+
def my_closing_decorator(which_arg):
|
| 539 |
+
def _opener(path):
|
| 540 |
+
if path is None:
|
| 541 |
+
path = open(path)
|
| 542 |
+
fclose = path.close
|
| 543 |
+
else:
|
| 544 |
+
# assume `path` handles the closing
|
| 545 |
+
fclose = lambda: None
|
| 546 |
+
return path, fclose
|
| 547 |
+
|
| 548 |
+
return argmap(_opener, which_arg, try_finally=True)
|
| 549 |
+
|
| 550 |
+
which can then be used as::
|
| 551 |
+
|
| 552 |
+
@my_closing_decorator("file")
|
| 553 |
+
def fancy_reader(file=None):
|
| 554 |
+
# this code doesn't need to worry about closing the file
|
| 555 |
+
print(file.read())
|
| 556 |
+
|
| 557 |
+
Decorators with try_finally = True cannot be used with generator functions,
|
| 558 |
+
because the `finally` block is evaluated before the generator is exhausted::
|
| 559 |
+
|
| 560 |
+
@argmap(open_file, "file", try_finally=True)
|
| 561 |
+
def file_to_lines(file):
|
| 562 |
+
for line in file.readlines():
|
| 563 |
+
yield line
|
| 564 |
+
|
| 565 |
+
is equivalent to::
|
| 566 |
+
|
| 567 |
+
def file_to_lines_wrapped(file):
|
| 568 |
+
for line in file.readlines():
|
| 569 |
+
yield line
|
| 570 |
+
|
| 571 |
+
|
| 572 |
+
def file_to_lines_wrapper(file):
|
| 573 |
+
try:
|
| 574 |
+
file = open_file(file)
|
| 575 |
+
return file_to_lines_wrapped(file)
|
| 576 |
+
finally:
|
| 577 |
+
file.close()
|
| 578 |
+
|
| 579 |
+
which behaves similarly to::
|
| 580 |
+
|
| 581 |
+
def file_to_lines_whoops(file):
|
| 582 |
+
file = open_file(file)
|
| 583 |
+
file.close()
|
| 584 |
+
for line in file.readlines():
|
| 585 |
+
yield line
|
| 586 |
+
|
| 587 |
+
because the `finally` block of `file_to_lines_wrapper` is executed before
|
| 588 |
+
the caller has a chance to exhaust the iterator.
|
| 589 |
+
|
| 590 |
+
Notes
|
| 591 |
+
-----
|
| 592 |
+
An object of this class is callable and intended to be used when
|
| 593 |
+
defining a decorator. Generally, a decorator takes a function as input
|
| 594 |
+
and constructs a function as output. Specifically, an `argmap` object
|
| 595 |
+
returns the input function decorated/wrapped so that specified arguments
|
| 596 |
+
are mapped (transformed) to new values before the decorated function is called.
|
| 597 |
+
|
| 598 |
+
As an overview, the argmap object returns a new function with all the
|
| 599 |
+
dunder values of the original function (like `__doc__`, `__name__`, etc).
|
| 600 |
+
Code for this decorated function is built based on the original function's
|
| 601 |
+
signature. It starts by mapping the input arguments to potentially new
|
| 602 |
+
values. Then it calls the decorated function with these new values in place
|
| 603 |
+
of the indicated arguments that have been mapped. The return value of the
|
| 604 |
+
original function is then returned. This new function is the function that
|
| 605 |
+
is actually called by the user.
|
| 606 |
+
|
| 607 |
+
Three additional features are provided.
|
| 608 |
+
1) The code is lazily compiled. That is, the new function is returned
|
| 609 |
+
as an object without the code compiled, but with all information
|
| 610 |
+
needed so it can be compiled upon it's first invocation. This saves
|
| 611 |
+
time on import at the cost of additional time on the first call of
|
| 612 |
+
the function. Subsequent calls are then just as fast as normal.
|
| 613 |
+
|
| 614 |
+
2) If the "try_finally" keyword-only argument is True, a try block
|
| 615 |
+
follows each mapped argument, matched on the other side of the wrapped
|
| 616 |
+
call, by a finally block closing that mapping. We expect func to return
|
| 617 |
+
a 2-tuple: the mapped value and a function to be called in the finally
|
| 618 |
+
clause. This feature was included so the `open_file` decorator could
|
| 619 |
+
provide a file handle to the decorated function and close the file handle
|
| 620 |
+
after the function call. It even keeps track of whether to close the file
|
| 621 |
+
handle or not based on whether it had to open the file or the input was
|
| 622 |
+
already open. So, the decorated function does not need to include any
|
| 623 |
+
code to open or close files.
|
| 624 |
+
|
| 625 |
+
3) The maps applied can process multiple arguments. For example,
|
| 626 |
+
you could swap two arguments using a mapping, or transform
|
| 627 |
+
them to their sum and their difference. This was included to allow
|
| 628 |
+
a decorator in the `quality.py` module that checks that an input
|
| 629 |
+
`partition` is a valid partition of the nodes of the input graph `G`.
|
| 630 |
+
In this example, the map has inputs `(G, partition)`. After checking
|
| 631 |
+
for a valid partition, the map either raises an exception or leaves
|
| 632 |
+
the inputs unchanged. Thus many functions that make this check can
|
| 633 |
+
use the decorator rather than copy the checking code into each function.
|
| 634 |
+
More complicated nested argument structures are described below.
|
| 635 |
+
|
| 636 |
+
The remaining notes describe the code structure and methods for this
|
| 637 |
+
class in broad terms to aid in understanding how to use it.
|
| 638 |
+
|
| 639 |
+
Instantiating an `argmap` object simply stores the mapping function and
|
| 640 |
+
the input identifiers of which arguments to map. The resulting decorator
|
| 641 |
+
is ready to use this map to decorate any function. Calling that object
|
| 642 |
+
(`argmap.__call__`, but usually done via `@my_decorator`) a lazily
|
| 643 |
+
compiled thin wrapper of the decorated function is constructed,
|
| 644 |
+
wrapped with the necessary function dunder attributes like `__doc__`
|
| 645 |
+
and `__name__`. That thinly wrapped function is returned as the
|
| 646 |
+
decorated function. When that decorated function is called, the thin
|
| 647 |
+
wrapper of code calls `argmap._lazy_compile` which compiles the decorated
|
| 648 |
+
function (using `argmap.compile`) and replaces the code of the thin
|
| 649 |
+
wrapper with the newly compiled code. This saves the compilation step
|
| 650 |
+
every import of networkx, at the cost of compiling upon the first call
|
| 651 |
+
to the decorated function.
|
| 652 |
+
|
| 653 |
+
When the decorated function is compiled, the code is recursively assembled
|
| 654 |
+
using the `argmap.assemble` method. The recursive nature is needed in
|
| 655 |
+
case of nested decorators. The result of the assembly is a number of
|
| 656 |
+
useful objects.
|
| 657 |
+
|
| 658 |
+
sig : the function signature of the original decorated function as
|
| 659 |
+
constructed by :func:`argmap.signature`. This is constructed
|
| 660 |
+
using `inspect.signature` but enhanced with attribute
|
| 661 |
+
strings `sig_def` and `sig_call`, and other information
|
| 662 |
+
specific to mapping arguments of this function.
|
| 663 |
+
This information is used to construct a string of code defining
|
| 664 |
+
the new decorated function.
|
| 665 |
+
|
| 666 |
+
wrapped_name : a unique internally used name constructed by argmap
|
| 667 |
+
for the decorated function.
|
| 668 |
+
|
| 669 |
+
functions : a dict of the functions used inside the code of this
|
| 670 |
+
decorated function, to be used as `globals` in `exec`.
|
| 671 |
+
This dict is recursively updated to allow for nested decorating.
|
| 672 |
+
|
| 673 |
+
mapblock : code (as a list of strings) to map the incoming argument
|
| 674 |
+
values to their mapped values.
|
| 675 |
+
|
| 676 |
+
finallys : code (as a list of strings) to provide the possibly nested
|
| 677 |
+
set of finally clauses if needed.
|
| 678 |
+
|
| 679 |
+
mutable_args : a bool indicating whether the `sig.args` tuple should be
|
| 680 |
+
converted to a list so mutation can occur.
|
| 681 |
+
|
| 682 |
+
After this recursive assembly process, the `argmap.compile` method
|
| 683 |
+
constructs code (as strings) to convert the tuple `sig.args` to a list
|
| 684 |
+
if needed. It joins the defining code with appropriate indents and
|
| 685 |
+
compiles the result. Finally, this code is evaluated and the original
|
| 686 |
+
wrapper's implementation is replaced with the compiled version (see
|
| 687 |
+
`argmap._lazy_compile` for more details).
|
| 688 |
+
|
| 689 |
+
Other `argmap` methods include `_name` and `_count` which allow internally
|
| 690 |
+
generated names to be unique within a python session.
|
| 691 |
+
The methods `_flatten` and `_indent` process the nested lists of strings
|
| 692 |
+
into properly indented python code ready to be compiled.
|
| 693 |
+
|
| 694 |
+
More complicated nested tuples of arguments also allowed though
|
| 695 |
+
usually not used. For the simple 2 argument case, the argmap
|
| 696 |
+
input ("a", "b") implies the mapping function will take 2 arguments
|
| 697 |
+
and return a 2-tuple of mapped values. A more complicated example
|
| 698 |
+
with argmap input `("a", ("b", "c"))` requires the mapping function
|
| 699 |
+
take 2 inputs, with the second being a 2-tuple. It then must output
|
| 700 |
+
the 3 mapped values in the same nested structure `(newa, (newb, newc))`.
|
| 701 |
+
This level of generality is not often needed, but was convenient
|
| 702 |
+
to implement when handling the multiple arguments.
|
| 703 |
+
|
| 704 |
+
See Also
|
| 705 |
+
--------
|
| 706 |
+
not_implemented_for
|
| 707 |
+
open_file
|
| 708 |
+
nodes_or_number
|
| 709 |
+
py_random_state
|
| 710 |
+
networkx.algorithms.community.quality.require_partition
|
| 711 |
+
|
| 712 |
+
"""
|
| 713 |
+
|
| 714 |
+
def __init__(self, func, *args, try_finally=False):
|
| 715 |
+
self._func = func
|
| 716 |
+
self._args = args
|
| 717 |
+
self._finally = try_finally
|
| 718 |
+
|
| 719 |
+
@staticmethod
|
| 720 |
+
def _lazy_compile(func):
|
| 721 |
+
"""Compile the source of a wrapped function
|
| 722 |
+
|
| 723 |
+
Assemble and compile the decorated function, and intrusively replace its
|
| 724 |
+
code with the compiled version's. The thinly wrapped function becomes
|
| 725 |
+
the decorated function.
|
| 726 |
+
|
| 727 |
+
Parameters
|
| 728 |
+
----------
|
| 729 |
+
func : callable
|
| 730 |
+
A function returned by argmap.__call__ which is in the process
|
| 731 |
+
of being called for the first time.
|
| 732 |
+
|
| 733 |
+
Returns
|
| 734 |
+
-------
|
| 735 |
+
func : callable
|
| 736 |
+
The same function, with a new __code__ object.
|
| 737 |
+
|
| 738 |
+
Notes
|
| 739 |
+
-----
|
| 740 |
+
It was observed in NetworkX issue #4732 [1] that the import time of
|
| 741 |
+
NetworkX was significantly bloated by the use of decorators: over half
|
| 742 |
+
of the import time was being spent decorating functions. This was
|
| 743 |
+
somewhat improved by a change made to the `decorator` library, at the
|
| 744 |
+
cost of a relatively heavy-weight call to `inspect.Signature.bind`
|
| 745 |
+
for each call to the decorated function.
|
| 746 |
+
|
| 747 |
+
The workaround we arrived at is to do minimal work at the time of
|
| 748 |
+
decoration. When the decorated function is called for the first time,
|
| 749 |
+
we compile a function with the same function signature as the wrapped
|
| 750 |
+
function. The resulting decorated function is faster than one made by
|
| 751 |
+
the `decorator` library, so that the overhead of the first call is
|
| 752 |
+
'paid off' after a small number of calls.
|
| 753 |
+
|
| 754 |
+
References
|
| 755 |
+
----------
|
| 756 |
+
|
| 757 |
+
[1] https://github.com/networkx/networkx/issues/4732
|
| 758 |
+
|
| 759 |
+
"""
|
| 760 |
+
real_func = func.__argmap__.compile(func.__wrapped__)
|
| 761 |
+
func.__code__ = real_func.__code__
|
| 762 |
+
func.__globals__.update(real_func.__globals__)
|
| 763 |
+
func.__dict__.update(real_func.__dict__)
|
| 764 |
+
return func
|
| 765 |
+
|
| 766 |
+
def __call__(self, f):
|
| 767 |
+
"""Construct a lazily decorated wrapper of f.
|
| 768 |
+
|
| 769 |
+
The decorated function will be compiled when it is called for the first time,
|
| 770 |
+
and it will replace its own __code__ object so subsequent calls are fast.
|
| 771 |
+
|
| 772 |
+
Parameters
|
| 773 |
+
----------
|
| 774 |
+
f : callable
|
| 775 |
+
A function to be decorated.
|
| 776 |
+
|
| 777 |
+
Returns
|
| 778 |
+
-------
|
| 779 |
+
func : callable
|
| 780 |
+
The decorated function.
|
| 781 |
+
|
| 782 |
+
See Also
|
| 783 |
+
--------
|
| 784 |
+
argmap._lazy_compile
|
| 785 |
+
"""
|
| 786 |
+
|
| 787 |
+
def func(*args, __wrapper=None, **kwargs):
|
| 788 |
+
return argmap._lazy_compile(__wrapper)(*args, **kwargs)
|
| 789 |
+
|
| 790 |
+
# standard function-wrapping stuff
|
| 791 |
+
func.__name__ = f.__name__
|
| 792 |
+
func.__doc__ = f.__doc__
|
| 793 |
+
func.__defaults__ = f.__defaults__
|
| 794 |
+
func.__kwdefaults__.update(f.__kwdefaults__ or {})
|
| 795 |
+
func.__module__ = f.__module__
|
| 796 |
+
func.__qualname__ = f.__qualname__
|
| 797 |
+
func.__dict__.update(f.__dict__)
|
| 798 |
+
func.__wrapped__ = f
|
| 799 |
+
|
| 800 |
+
# now that we've wrapped f, we may have picked up some __dict__ or
|
| 801 |
+
# __kwdefaults__ items that were set by a previous argmap. Thus, we set
|
| 802 |
+
# these values after those update() calls.
|
| 803 |
+
|
| 804 |
+
# If we attempt to access func from within itself, that happens through
|
| 805 |
+
# a closure -- which trips an error when we replace func.__code__. The
|
| 806 |
+
# standard workaround for functions which can't see themselves is to use
|
| 807 |
+
# a Y-combinator, as we do here.
|
| 808 |
+
func.__kwdefaults__["_argmap__wrapper"] = func
|
| 809 |
+
|
| 810 |
+
# this self-reference is here because functools.wraps preserves
|
| 811 |
+
# everything in __dict__, and we don't want to mistake a non-argmap
|
| 812 |
+
# wrapper for an argmap wrapper
|
| 813 |
+
func.__self__ = func
|
| 814 |
+
|
| 815 |
+
# this is used to variously call self.assemble and self.compile
|
| 816 |
+
func.__argmap__ = self
|
| 817 |
+
|
| 818 |
+
if hasattr(f, "__argmap__"):
|
| 819 |
+
func.__is_generator = f.__is_generator
|
| 820 |
+
else:
|
| 821 |
+
func.__is_generator = inspect.isgeneratorfunction(f)
|
| 822 |
+
|
| 823 |
+
if self._finally and func.__is_generator:
|
| 824 |
+
raise nx.NetworkXError("argmap cannot decorate generators with try_finally")
|
| 825 |
+
|
| 826 |
+
return func
|
| 827 |
+
|
| 828 |
+
__count = 0
|
| 829 |
+
|
| 830 |
+
@classmethod
|
| 831 |
+
def _count(cls):
|
| 832 |
+
"""Maintain a globally-unique identifier for function names and "file" names
|
| 833 |
+
|
| 834 |
+
Note that this counter is a class method reporting a class variable
|
| 835 |
+
so the count is unique within a Python session. It could differ from
|
| 836 |
+
session to session for a specific decorator depending on the order
|
| 837 |
+
that the decorators are created. But that doesn't disrupt `argmap`.
|
| 838 |
+
|
| 839 |
+
This is used in two places: to construct unique variable names
|
| 840 |
+
in the `_name` method and to construct unique fictitious filenames
|
| 841 |
+
in the `_compile` method.
|
| 842 |
+
|
| 843 |
+
Returns
|
| 844 |
+
-------
|
| 845 |
+
count : int
|
| 846 |
+
An integer unique to this Python session (simply counts from zero)
|
| 847 |
+
"""
|
| 848 |
+
cls.__count += 1
|
| 849 |
+
return cls.__count
|
| 850 |
+
|
| 851 |
+
_bad_chars = re.compile("[^a-zA-Z0-9_]")
|
| 852 |
+
|
| 853 |
+
@classmethod
|
| 854 |
+
def _name(cls, f):
|
| 855 |
+
"""Mangle the name of a function to be unique but somewhat human-readable
|
| 856 |
+
|
| 857 |
+
The names are unique within a Python session and set using `_count`.
|
| 858 |
+
|
| 859 |
+
Parameters
|
| 860 |
+
----------
|
| 861 |
+
f : str or object
|
| 862 |
+
|
| 863 |
+
Returns
|
| 864 |
+
-------
|
| 865 |
+
name : str
|
| 866 |
+
The mangled version of `f.__name__` (if `f.__name__` exists) or `f`
|
| 867 |
+
|
| 868 |
+
"""
|
| 869 |
+
f = f.__name__ if hasattr(f, "__name__") else f
|
| 870 |
+
fname = re.sub(cls._bad_chars, "_", f)
|
| 871 |
+
return f"argmap_{fname}_{cls._count()}"
|
| 872 |
+
|
| 873 |
+
def compile(self, f):
|
| 874 |
+
"""Compile the decorated function.
|
| 875 |
+
|
| 876 |
+
Called once for a given decorated function -- collects the code from all
|
| 877 |
+
argmap decorators in the stack, and compiles the decorated function.
|
| 878 |
+
|
| 879 |
+
Much of the work done here uses the `assemble` method to allow recursive
|
| 880 |
+
treatment of multiple argmap decorators on a single decorated function.
|
| 881 |
+
That flattens the argmap decorators, collects the source code to construct
|
| 882 |
+
a single decorated function, then compiles/executes/returns that function.
|
| 883 |
+
|
| 884 |
+
The source code for the decorated function is stored as an attribute
|
| 885 |
+
`_code` on the function object itself.
|
| 886 |
+
|
| 887 |
+
Note that Python's `compile` function requires a filename, but this
|
| 888 |
+
code is constructed without a file, so a fictitious filename is used
|
| 889 |
+
to describe where the function comes from. The name is something like:
|
| 890 |
+
"argmap compilation 4".
|
| 891 |
+
|
| 892 |
+
Parameters
|
| 893 |
+
----------
|
| 894 |
+
f : callable
|
| 895 |
+
The function to be decorated
|
| 896 |
+
|
| 897 |
+
Returns
|
| 898 |
+
-------
|
| 899 |
+
func : callable
|
| 900 |
+
The decorated file
|
| 901 |
+
|
| 902 |
+
"""
|
| 903 |
+
sig, wrapped_name, functions, mapblock, finallys, mutable_args = self.assemble(
|
| 904 |
+
f
|
| 905 |
+
)
|
| 906 |
+
|
| 907 |
+
call = f"{sig.call_sig.format(wrapped_name)}#"
|
| 908 |
+
mut_args = f"{sig.args} = list({sig.args})" if mutable_args else ""
|
| 909 |
+
body = argmap._indent(sig.def_sig, mut_args, mapblock, call, finallys)
|
| 910 |
+
code = "\n".join(body)
|
| 911 |
+
|
| 912 |
+
locl = {}
|
| 913 |
+
globl = dict(functions.values())
|
| 914 |
+
filename = f"{self.__class__} compilation {self._count()}"
|
| 915 |
+
compiled = compile(code, filename, "exec")
|
| 916 |
+
exec(compiled, globl, locl)
|
| 917 |
+
func = locl[sig.name]
|
| 918 |
+
func._code = code
|
| 919 |
+
return func
|
| 920 |
+
|
| 921 |
+
def assemble(self, f):
|
| 922 |
+
"""Collects components of the source for the decorated function wrapping f.
|
| 923 |
+
|
| 924 |
+
If `f` has multiple argmap decorators, we recursively assemble the stack of
|
| 925 |
+
decorators into a single flattened function.
|
| 926 |
+
|
| 927 |
+
This method is part of the `compile` method's process yet separated
|
| 928 |
+
from that method to allow recursive processing. The outputs are
|
| 929 |
+
strings, dictionaries and lists that collect needed info to
|
| 930 |
+
flatten any nested argmap-decoration.
|
| 931 |
+
|
| 932 |
+
Parameters
|
| 933 |
+
----------
|
| 934 |
+
f : callable
|
| 935 |
+
The function to be decorated. If f is argmapped, we assemble it.
|
| 936 |
+
|
| 937 |
+
Returns
|
| 938 |
+
-------
|
| 939 |
+
sig : argmap.Signature
|
| 940 |
+
The function signature as an `argmap.Signature` object.
|
| 941 |
+
wrapped_name : str
|
| 942 |
+
The mangled name used to represent the wrapped function in the code
|
| 943 |
+
being assembled.
|
| 944 |
+
functions : dict
|
| 945 |
+
A dictionary mapping id(g) -> (mangled_name(g), g) for functions g
|
| 946 |
+
referred to in the code being assembled. These need to be present
|
| 947 |
+
in the ``globals`` scope of ``exec`` when defining the decorated
|
| 948 |
+
function.
|
| 949 |
+
mapblock : list of lists and/or strings
|
| 950 |
+
Code that implements mapping of parameters including any try blocks
|
| 951 |
+
if needed. This code will precede the decorated function call.
|
| 952 |
+
finallys : list of lists and/or strings
|
| 953 |
+
Code that implements the finally blocks to post-process the
|
| 954 |
+
arguments (usually close any files if needed) after the
|
| 955 |
+
decorated function is called.
|
| 956 |
+
mutable_args : bool
|
| 957 |
+
True if the decorator needs to modify positional arguments
|
| 958 |
+
via their indices. The compile method then turns the argument
|
| 959 |
+
tuple into a list so that the arguments can be modified.
|
| 960 |
+
"""
|
| 961 |
+
|
| 962 |
+
# first, we check if f is already argmapped -- if that's the case,
|
| 963 |
+
# build up the function recursively.
|
| 964 |
+
# > mapblock is generally a list of function calls of the sort
|
| 965 |
+
# arg = func(arg)
|
| 966 |
+
# in addition to some try-blocks if needed.
|
| 967 |
+
# > finallys is a recursive list of finally blocks of the sort
|
| 968 |
+
# finally:
|
| 969 |
+
# close_func_1()
|
| 970 |
+
# finally:
|
| 971 |
+
# close_func_2()
|
| 972 |
+
# > functions is a dict of functions used in the scope of our decorated
|
| 973 |
+
# function. It will be used to construct globals used in compilation.
|
| 974 |
+
# We make functions[id(f)] = name_of_f, f to ensure that a given
|
| 975 |
+
# function is stored and named exactly once even if called by
|
| 976 |
+
# nested decorators.
|
| 977 |
+
if hasattr(f, "__argmap__") and f.__self__ is f:
|
| 978 |
+
(
|
| 979 |
+
sig,
|
| 980 |
+
wrapped_name,
|
| 981 |
+
functions,
|
| 982 |
+
mapblock,
|
| 983 |
+
finallys,
|
| 984 |
+
mutable_args,
|
| 985 |
+
) = f.__argmap__.assemble(f.__wrapped__)
|
| 986 |
+
functions = dict(functions) # shallow-copy just in case
|
| 987 |
+
else:
|
| 988 |
+
sig = self.signature(f)
|
| 989 |
+
wrapped_name = self._name(f)
|
| 990 |
+
mapblock, finallys = [], []
|
| 991 |
+
functions = {id(f): (wrapped_name, f)}
|
| 992 |
+
mutable_args = False
|
| 993 |
+
|
| 994 |
+
if id(self._func) in functions:
|
| 995 |
+
fname, _ = functions[id(self._func)]
|
| 996 |
+
else:
|
| 997 |
+
fname, _ = functions[id(self._func)] = self._name(self._func), self._func
|
| 998 |
+
|
| 999 |
+
# this is a bit complicated -- we can call functions with a variety of
|
| 1000 |
+
# nested arguments, so long as their input and output are tuples with
|
| 1001 |
+
# the same nested structure. e.g. ("a", "b") maps arguments a and b.
|
| 1002 |
+
# A more complicated nesting like (0, (3, 4)) maps arguments 0, 3, 4
|
| 1003 |
+
# expecting the mapping to output new values in the same nested shape.
|
| 1004 |
+
# The ability to argmap multiple arguments was necessary for
|
| 1005 |
+
# the decorator `nx.algorithms.community.quality.require_partition`, and
|
| 1006 |
+
# while we're not taking full advantage of the ability to handle
|
| 1007 |
+
# multiply-nested tuples, it was convenient to implement this in
|
| 1008 |
+
# generality because the recursive call to `get_name` is necessary in
|
| 1009 |
+
# any case.
|
| 1010 |
+
applied = set()
|
| 1011 |
+
|
| 1012 |
+
def get_name(arg, first=True):
|
| 1013 |
+
nonlocal mutable_args
|
| 1014 |
+
if isinstance(arg, tuple):
|
| 1015 |
+
name = ", ".join(get_name(x, False) for x in arg)
|
| 1016 |
+
return name if first else f"({name})"
|
| 1017 |
+
if arg in applied:
|
| 1018 |
+
raise nx.NetworkXError(f"argument {arg} is specified multiple times")
|
| 1019 |
+
applied.add(arg)
|
| 1020 |
+
if arg in sig.names:
|
| 1021 |
+
return sig.names[arg]
|
| 1022 |
+
elif isinstance(arg, str):
|
| 1023 |
+
if sig.kwargs is None:
|
| 1024 |
+
raise nx.NetworkXError(
|
| 1025 |
+
f"name {arg} is not a named parameter and this function doesn't have kwargs"
|
| 1026 |
+
)
|
| 1027 |
+
return f"{sig.kwargs}[{arg!r}]"
|
| 1028 |
+
else:
|
| 1029 |
+
if sig.args is None:
|
| 1030 |
+
raise nx.NetworkXError(
|
| 1031 |
+
f"index {arg} not a parameter index and this function doesn't have args"
|
| 1032 |
+
)
|
| 1033 |
+
mutable_args = True
|
| 1034 |
+
return f"{sig.args}[{arg - sig.n_positional}]"
|
| 1035 |
+
|
| 1036 |
+
if self._finally:
|
| 1037 |
+
# here's where we handle try_finally decorators. Such a decorator
|
| 1038 |
+
# returns a mapped argument and a function to be called in a
|
| 1039 |
+
# finally block. This feature was required by the open_file
|
| 1040 |
+
# decorator. The below generates the code
|
| 1041 |
+
#
|
| 1042 |
+
# name, final = func(name) #<--append to mapblock
|
| 1043 |
+
# try: #<--append to mapblock
|
| 1044 |
+
# ... more argmapping and try blocks
|
| 1045 |
+
# return WRAPPED_FUNCTION(...)
|
| 1046 |
+
# ... more finally blocks
|
| 1047 |
+
# finally: #<--prepend to finallys
|
| 1048 |
+
# final() #<--prepend to finallys
|
| 1049 |
+
#
|
| 1050 |
+
for a in self._args:
|
| 1051 |
+
name = get_name(a)
|
| 1052 |
+
final = self._name(name)
|
| 1053 |
+
mapblock.append(f"{name}, {final} = {fname}({name})")
|
| 1054 |
+
mapblock.append("try:")
|
| 1055 |
+
finallys = ["finally:", f"{final}()#", "#", finallys]
|
| 1056 |
+
else:
|
| 1057 |
+
mapblock.extend(
|
| 1058 |
+
f"{name} = {fname}({name})" for name in map(get_name, self._args)
|
| 1059 |
+
)
|
| 1060 |
+
|
| 1061 |
+
return sig, wrapped_name, functions, mapblock, finallys, mutable_args
|
| 1062 |
+
|
| 1063 |
+
@classmethod
|
| 1064 |
+
def signature(cls, f):
|
| 1065 |
+
r"""Construct a Signature object describing `f`
|
| 1066 |
+
|
| 1067 |
+
Compute a Signature so that we can write a function wrapping f with
|
| 1068 |
+
the same signature and call-type.
|
| 1069 |
+
|
| 1070 |
+
Parameters
|
| 1071 |
+
----------
|
| 1072 |
+
f : callable
|
| 1073 |
+
A function to be decorated
|
| 1074 |
+
|
| 1075 |
+
Returns
|
| 1076 |
+
-------
|
| 1077 |
+
sig : argmap.Signature
|
| 1078 |
+
The Signature of f
|
| 1079 |
+
|
| 1080 |
+
Notes
|
| 1081 |
+
-----
|
| 1082 |
+
The Signature is a namedtuple with names:
|
| 1083 |
+
|
| 1084 |
+
name : a unique version of the name of the decorated function
|
| 1085 |
+
signature : the inspect.signature of the decorated function
|
| 1086 |
+
def_sig : a string used as code to define the new function
|
| 1087 |
+
call_sig : a string used as code to call the decorated function
|
| 1088 |
+
names : a dict keyed by argument name and index to the argument's name
|
| 1089 |
+
n_positional : the number of positional arguments in the signature
|
| 1090 |
+
args : the name of the VAR_POSITIONAL argument if any, i.e. \*theseargs
|
| 1091 |
+
kwargs : the name of the VAR_KEYWORDS argument if any, i.e. \*\*kwargs
|
| 1092 |
+
|
| 1093 |
+
These named attributes of the signature are used in `assemble` and `compile`
|
| 1094 |
+
to construct a string of source code for the decorated function.
|
| 1095 |
+
|
| 1096 |
+
"""
|
| 1097 |
+
sig = inspect.signature(f, follow_wrapped=False)
|
| 1098 |
+
def_sig = []
|
| 1099 |
+
call_sig = []
|
| 1100 |
+
names = {}
|
| 1101 |
+
|
| 1102 |
+
kind = None
|
| 1103 |
+
args = None
|
| 1104 |
+
kwargs = None
|
| 1105 |
+
npos = 0
|
| 1106 |
+
for i, param in enumerate(sig.parameters.values()):
|
| 1107 |
+
# parameters can be position-only, keyword-or-position, keyword-only
|
| 1108 |
+
# in any combination, but only in the order as above. we do edge
|
| 1109 |
+
# detection to add the appropriate punctuation
|
| 1110 |
+
prev = kind
|
| 1111 |
+
kind = param.kind
|
| 1112 |
+
if prev == param.POSITIONAL_ONLY != kind:
|
| 1113 |
+
# the last token was position-only, but this one isn't
|
| 1114 |
+
def_sig.append("/")
|
| 1115 |
+
if (
|
| 1116 |
+
param.VAR_POSITIONAL
|
| 1117 |
+
!= prev
|
| 1118 |
+
!= param.KEYWORD_ONLY
|
| 1119 |
+
== kind
|
| 1120 |
+
!= param.VAR_POSITIONAL
|
| 1121 |
+
):
|
| 1122 |
+
# param is the first keyword-only arg and isn't starred
|
| 1123 |
+
def_sig.append("*")
|
| 1124 |
+
|
| 1125 |
+
# star arguments as appropriate
|
| 1126 |
+
if kind == param.VAR_POSITIONAL:
|
| 1127 |
+
name = "*" + param.name
|
| 1128 |
+
args = param.name
|
| 1129 |
+
count = 0
|
| 1130 |
+
elif kind == param.VAR_KEYWORD:
|
| 1131 |
+
name = "**" + param.name
|
| 1132 |
+
kwargs = param.name
|
| 1133 |
+
count = 0
|
| 1134 |
+
else:
|
| 1135 |
+
names[i] = names[param.name] = param.name
|
| 1136 |
+
name = param.name
|
| 1137 |
+
count = 1
|
| 1138 |
+
|
| 1139 |
+
# assign to keyword-only args in the function call
|
| 1140 |
+
if kind == param.KEYWORD_ONLY:
|
| 1141 |
+
call_sig.append(f"{name} = {name}")
|
| 1142 |
+
else:
|
| 1143 |
+
npos += count
|
| 1144 |
+
call_sig.append(name)
|
| 1145 |
+
|
| 1146 |
+
def_sig.append(name)
|
| 1147 |
+
|
| 1148 |
+
fname = cls._name(f)
|
| 1149 |
+
def_sig = f'def {fname}({", ".join(def_sig)}):'
|
| 1150 |
+
|
| 1151 |
+
call_sig = f"return {{}}({', '.join(call_sig)})"
|
| 1152 |
+
|
| 1153 |
+
return cls.Signature(fname, sig, def_sig, call_sig, names, npos, args, kwargs)
|
| 1154 |
+
|
| 1155 |
+
Signature = collections.namedtuple(
|
| 1156 |
+
"Signature",
|
| 1157 |
+
[
|
| 1158 |
+
"name",
|
| 1159 |
+
"signature",
|
| 1160 |
+
"def_sig",
|
| 1161 |
+
"call_sig",
|
| 1162 |
+
"names",
|
| 1163 |
+
"n_positional",
|
| 1164 |
+
"args",
|
| 1165 |
+
"kwargs",
|
| 1166 |
+
],
|
| 1167 |
+
)
|
| 1168 |
+
|
| 1169 |
+
@staticmethod
|
| 1170 |
+
def _flatten(nestlist, visited):
|
| 1171 |
+
"""flattens a recursive list of lists that doesn't have cyclic references
|
| 1172 |
+
|
| 1173 |
+
Parameters
|
| 1174 |
+
----------
|
| 1175 |
+
nestlist : iterable
|
| 1176 |
+
A recursive list of objects to be flattened into a single iterable
|
| 1177 |
+
|
| 1178 |
+
visited : set
|
| 1179 |
+
A set of object ids which have been walked -- initialize with an
|
| 1180 |
+
empty set
|
| 1181 |
+
|
| 1182 |
+
Yields
|
| 1183 |
+
------
|
| 1184 |
+
Non-list objects contained in nestlist
|
| 1185 |
+
|
| 1186 |
+
"""
|
| 1187 |
+
for thing in nestlist:
|
| 1188 |
+
if isinstance(thing, list):
|
| 1189 |
+
if id(thing) in visited:
|
| 1190 |
+
raise ValueError("A cycle was found in nestlist. Be a tree.")
|
| 1191 |
+
else:
|
| 1192 |
+
visited.add(id(thing))
|
| 1193 |
+
yield from argmap._flatten(thing, visited)
|
| 1194 |
+
else:
|
| 1195 |
+
yield thing
|
| 1196 |
+
|
| 1197 |
+
_tabs = " " * 64
|
| 1198 |
+
|
| 1199 |
+
@staticmethod
|
| 1200 |
+
def _indent(*lines):
|
| 1201 |
+
"""Indent list of code lines to make executable Python code
|
| 1202 |
+
|
| 1203 |
+
Indents a tree-recursive list of strings, following the rule that one
|
| 1204 |
+
space is added to the tab after a line that ends in a colon, and one is
|
| 1205 |
+
removed after a line that ends in an hashmark.
|
| 1206 |
+
|
| 1207 |
+
Parameters
|
| 1208 |
+
----------
|
| 1209 |
+
*lines : lists and/or strings
|
| 1210 |
+
A recursive list of strings to be assembled into properly indented
|
| 1211 |
+
code.
|
| 1212 |
+
|
| 1213 |
+
Returns
|
| 1214 |
+
-------
|
| 1215 |
+
code : str
|
| 1216 |
+
|
| 1217 |
+
Examples
|
| 1218 |
+
--------
|
| 1219 |
+
|
| 1220 |
+
argmap._indent(*["try:", "try:", "pass#", "finally:", "pass#", "#",
|
| 1221 |
+
"finally:", "pass#"])
|
| 1222 |
+
|
| 1223 |
+
renders to
|
| 1224 |
+
|
| 1225 |
+
'''try:
|
| 1226 |
+
try:
|
| 1227 |
+
pass#
|
| 1228 |
+
finally:
|
| 1229 |
+
pass#
|
| 1230 |
+
#
|
| 1231 |
+
finally:
|
| 1232 |
+
pass#'''
|
| 1233 |
+
"""
|
| 1234 |
+
depth = 0
|
| 1235 |
+
for line in argmap._flatten(lines, set()):
|
| 1236 |
+
yield f"{argmap._tabs[:depth]}{line}"
|
| 1237 |
+
depth += (line[-1:] == ":") - (line[-1:] == "#")
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/heaps.py
ADDED
|
@@ -0,0 +1,340 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Min-heaps.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from heapq import heappop, heappush
|
| 6 |
+
from itertools import count
|
| 7 |
+
|
| 8 |
+
import networkx as nx
|
| 9 |
+
|
| 10 |
+
__all__ = ["MinHeap", "PairingHeap", "BinaryHeap"]
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class MinHeap:
|
| 14 |
+
"""Base class for min-heaps.
|
| 15 |
+
|
| 16 |
+
A MinHeap stores a collection of key-value pairs ordered by their values.
|
| 17 |
+
It supports querying the minimum pair, inserting a new pair, decreasing the
|
| 18 |
+
value in an existing pair and deleting the minimum pair.
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
class _Item:
|
| 22 |
+
"""Used by subclassess to represent a key-value pair."""
|
| 23 |
+
|
| 24 |
+
__slots__ = ("key", "value")
|
| 25 |
+
|
| 26 |
+
def __init__(self, key, value):
|
| 27 |
+
self.key = key
|
| 28 |
+
self.value = value
|
| 29 |
+
|
| 30 |
+
def __repr__(self):
|
| 31 |
+
return repr((self.key, self.value))
|
| 32 |
+
|
| 33 |
+
def __init__(self):
|
| 34 |
+
"""Initialize a new min-heap."""
|
| 35 |
+
self._dict = {}
|
| 36 |
+
|
| 37 |
+
def min(self):
|
| 38 |
+
"""Query the minimum key-value pair.
|
| 39 |
+
|
| 40 |
+
Returns
|
| 41 |
+
-------
|
| 42 |
+
key, value : tuple
|
| 43 |
+
The key-value pair with the minimum value in the heap.
|
| 44 |
+
|
| 45 |
+
Raises
|
| 46 |
+
------
|
| 47 |
+
NetworkXError
|
| 48 |
+
If the heap is empty.
|
| 49 |
+
"""
|
| 50 |
+
raise NotImplementedError
|
| 51 |
+
|
| 52 |
+
def pop(self):
|
| 53 |
+
"""Delete the minimum pair in the heap.
|
| 54 |
+
|
| 55 |
+
Returns
|
| 56 |
+
-------
|
| 57 |
+
key, value : tuple
|
| 58 |
+
The key-value pair with the minimum value in the heap.
|
| 59 |
+
|
| 60 |
+
Raises
|
| 61 |
+
------
|
| 62 |
+
NetworkXError
|
| 63 |
+
If the heap is empty.
|
| 64 |
+
"""
|
| 65 |
+
raise NotImplementedError
|
| 66 |
+
|
| 67 |
+
def get(self, key, default=None):
|
| 68 |
+
"""Returns the value associated with a key.
|
| 69 |
+
|
| 70 |
+
Parameters
|
| 71 |
+
----------
|
| 72 |
+
key : hashable object
|
| 73 |
+
The key to be looked up.
|
| 74 |
+
|
| 75 |
+
default : object
|
| 76 |
+
Default value to return if the key is not present in the heap.
|
| 77 |
+
Default value: None.
|
| 78 |
+
|
| 79 |
+
Returns
|
| 80 |
+
-------
|
| 81 |
+
value : object.
|
| 82 |
+
The value associated with the key.
|
| 83 |
+
"""
|
| 84 |
+
raise NotImplementedError
|
| 85 |
+
|
| 86 |
+
def insert(self, key, value, allow_increase=False):
|
| 87 |
+
"""Insert a new key-value pair or modify the value in an existing
|
| 88 |
+
pair.
|
| 89 |
+
|
| 90 |
+
Parameters
|
| 91 |
+
----------
|
| 92 |
+
key : hashable object
|
| 93 |
+
The key.
|
| 94 |
+
|
| 95 |
+
value : object comparable with existing values.
|
| 96 |
+
The value.
|
| 97 |
+
|
| 98 |
+
allow_increase : bool
|
| 99 |
+
Whether the value is allowed to increase. If False, attempts to
|
| 100 |
+
increase an existing value have no effect. Default value: False.
|
| 101 |
+
|
| 102 |
+
Returns
|
| 103 |
+
-------
|
| 104 |
+
decreased : bool
|
| 105 |
+
True if a pair is inserted or the existing value is decreased.
|
| 106 |
+
"""
|
| 107 |
+
raise NotImplementedError
|
| 108 |
+
|
| 109 |
+
def __nonzero__(self):
|
| 110 |
+
"""Returns whether the heap if empty."""
|
| 111 |
+
return bool(self._dict)
|
| 112 |
+
|
| 113 |
+
def __bool__(self):
|
| 114 |
+
"""Returns whether the heap if empty."""
|
| 115 |
+
return bool(self._dict)
|
| 116 |
+
|
| 117 |
+
def __len__(self):
|
| 118 |
+
"""Returns the number of key-value pairs in the heap."""
|
| 119 |
+
return len(self._dict)
|
| 120 |
+
|
| 121 |
+
def __contains__(self, key):
|
| 122 |
+
"""Returns whether a key exists in the heap.
|
| 123 |
+
|
| 124 |
+
Parameters
|
| 125 |
+
----------
|
| 126 |
+
key : any hashable object.
|
| 127 |
+
The key to be looked up.
|
| 128 |
+
"""
|
| 129 |
+
return key in self._dict
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class PairingHeap(MinHeap):
|
| 133 |
+
"""A pairing heap."""
|
| 134 |
+
|
| 135 |
+
class _Node(MinHeap._Item):
|
| 136 |
+
"""A node in a pairing heap.
|
| 137 |
+
|
| 138 |
+
A tree in a pairing heap is stored using the left-child, right-sibling
|
| 139 |
+
representation.
|
| 140 |
+
"""
|
| 141 |
+
|
| 142 |
+
__slots__ = ("left", "next", "prev", "parent")
|
| 143 |
+
|
| 144 |
+
def __init__(self, key, value):
|
| 145 |
+
super().__init__(key, value)
|
| 146 |
+
# The leftmost child.
|
| 147 |
+
self.left = None
|
| 148 |
+
# The next sibling.
|
| 149 |
+
self.next = None
|
| 150 |
+
# The previous sibling.
|
| 151 |
+
self.prev = None
|
| 152 |
+
# The parent.
|
| 153 |
+
self.parent = None
|
| 154 |
+
|
| 155 |
+
def __init__(self):
|
| 156 |
+
"""Initialize a pairing heap."""
|
| 157 |
+
super().__init__()
|
| 158 |
+
self._root = None
|
| 159 |
+
|
| 160 |
+
def min(self):
|
| 161 |
+
if self._root is None:
|
| 162 |
+
raise nx.NetworkXError("heap is empty.")
|
| 163 |
+
return (self._root.key, self._root.value)
|
| 164 |
+
|
| 165 |
+
def pop(self):
|
| 166 |
+
if self._root is None:
|
| 167 |
+
raise nx.NetworkXError("heap is empty.")
|
| 168 |
+
min_node = self._root
|
| 169 |
+
self._root = self._merge_children(self._root)
|
| 170 |
+
del self._dict[min_node.key]
|
| 171 |
+
return (min_node.key, min_node.value)
|
| 172 |
+
|
| 173 |
+
def get(self, key, default=None):
|
| 174 |
+
node = self._dict.get(key)
|
| 175 |
+
return node.value if node is not None else default
|
| 176 |
+
|
| 177 |
+
def insert(self, key, value, allow_increase=False):
|
| 178 |
+
node = self._dict.get(key)
|
| 179 |
+
root = self._root
|
| 180 |
+
if node is not None:
|
| 181 |
+
if value < node.value:
|
| 182 |
+
node.value = value
|
| 183 |
+
if node is not root and value < node.parent.value:
|
| 184 |
+
self._cut(node)
|
| 185 |
+
self._root = self._link(root, node)
|
| 186 |
+
return True
|
| 187 |
+
elif allow_increase and value > node.value:
|
| 188 |
+
node.value = value
|
| 189 |
+
child = self._merge_children(node)
|
| 190 |
+
# Nonstandard step: Link the merged subtree with the root. See
|
| 191 |
+
# below for the standard step.
|
| 192 |
+
if child is not None:
|
| 193 |
+
self._root = self._link(self._root, child)
|
| 194 |
+
# Standard step: Perform a decrease followed by a pop as if the
|
| 195 |
+
# value were the smallest in the heap. Then insert the new
|
| 196 |
+
# value into the heap.
|
| 197 |
+
# if node is not root:
|
| 198 |
+
# self._cut(node)
|
| 199 |
+
# if child is not None:
|
| 200 |
+
# root = self._link(root, child)
|
| 201 |
+
# self._root = self._link(root, node)
|
| 202 |
+
# else:
|
| 203 |
+
# self._root = (self._link(node, child)
|
| 204 |
+
# if child is not None else node)
|
| 205 |
+
return False
|
| 206 |
+
else:
|
| 207 |
+
# Insert a new key.
|
| 208 |
+
node = self._Node(key, value)
|
| 209 |
+
self._dict[key] = node
|
| 210 |
+
self._root = self._link(root, node) if root is not None else node
|
| 211 |
+
return True
|
| 212 |
+
|
| 213 |
+
def _link(self, root, other):
|
| 214 |
+
"""Link two nodes, making the one with the smaller value the parent of
|
| 215 |
+
the other.
|
| 216 |
+
"""
|
| 217 |
+
if other.value < root.value:
|
| 218 |
+
root, other = other, root
|
| 219 |
+
next = root.left
|
| 220 |
+
other.next = next
|
| 221 |
+
if next is not None:
|
| 222 |
+
next.prev = other
|
| 223 |
+
other.prev = None
|
| 224 |
+
root.left = other
|
| 225 |
+
other.parent = root
|
| 226 |
+
return root
|
| 227 |
+
|
| 228 |
+
def _merge_children(self, root):
|
| 229 |
+
"""Merge the subtrees of the root using the standard two-pass method.
|
| 230 |
+
The resulting subtree is detached from the root.
|
| 231 |
+
"""
|
| 232 |
+
node = root.left
|
| 233 |
+
root.left = None
|
| 234 |
+
if node is not None:
|
| 235 |
+
link = self._link
|
| 236 |
+
# Pass 1: Merge pairs of consecutive subtrees from left to right.
|
| 237 |
+
# At the end of the pass, only the prev pointers of the resulting
|
| 238 |
+
# subtrees have meaningful values. The other pointers will be fixed
|
| 239 |
+
# in pass 2.
|
| 240 |
+
prev = None
|
| 241 |
+
while True:
|
| 242 |
+
next = node.next
|
| 243 |
+
if next is None:
|
| 244 |
+
node.prev = prev
|
| 245 |
+
break
|
| 246 |
+
next_next = next.next
|
| 247 |
+
node = link(node, next)
|
| 248 |
+
node.prev = prev
|
| 249 |
+
prev = node
|
| 250 |
+
if next_next is None:
|
| 251 |
+
break
|
| 252 |
+
node = next_next
|
| 253 |
+
# Pass 2: Successively merge the subtrees produced by pass 1 from
|
| 254 |
+
# right to left with the rightmost one.
|
| 255 |
+
prev = node.prev
|
| 256 |
+
while prev is not None:
|
| 257 |
+
prev_prev = prev.prev
|
| 258 |
+
node = link(prev, node)
|
| 259 |
+
prev = prev_prev
|
| 260 |
+
# Now node can become the new root. Its has no parent nor siblings.
|
| 261 |
+
node.prev = None
|
| 262 |
+
node.next = None
|
| 263 |
+
node.parent = None
|
| 264 |
+
return node
|
| 265 |
+
|
| 266 |
+
def _cut(self, node):
|
| 267 |
+
"""Cut a node from its parent."""
|
| 268 |
+
prev = node.prev
|
| 269 |
+
next = node.next
|
| 270 |
+
if prev is not None:
|
| 271 |
+
prev.next = next
|
| 272 |
+
else:
|
| 273 |
+
node.parent.left = next
|
| 274 |
+
node.prev = None
|
| 275 |
+
if next is not None:
|
| 276 |
+
next.prev = prev
|
| 277 |
+
node.next = None
|
| 278 |
+
node.parent = None
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
class BinaryHeap(MinHeap):
|
| 282 |
+
"""A binary heap."""
|
| 283 |
+
|
| 284 |
+
def __init__(self):
|
| 285 |
+
"""Initialize a binary heap."""
|
| 286 |
+
super().__init__()
|
| 287 |
+
self._heap = []
|
| 288 |
+
self._count = count()
|
| 289 |
+
|
| 290 |
+
def min(self):
|
| 291 |
+
dict = self._dict
|
| 292 |
+
if not dict:
|
| 293 |
+
raise nx.NetworkXError("heap is empty")
|
| 294 |
+
heap = self._heap
|
| 295 |
+
pop = heappop
|
| 296 |
+
# Repeatedly remove stale key-value pairs until a up-to-date one is
|
| 297 |
+
# met.
|
| 298 |
+
while True:
|
| 299 |
+
value, _, key = heap[0]
|
| 300 |
+
if key in dict and value == dict[key]:
|
| 301 |
+
break
|
| 302 |
+
pop(heap)
|
| 303 |
+
return (key, value)
|
| 304 |
+
|
| 305 |
+
def pop(self):
|
| 306 |
+
dict = self._dict
|
| 307 |
+
if not dict:
|
| 308 |
+
raise nx.NetworkXError("heap is empty")
|
| 309 |
+
heap = self._heap
|
| 310 |
+
pop = heappop
|
| 311 |
+
# Repeatedly remove stale key-value pairs until a up-to-date one is
|
| 312 |
+
# met.
|
| 313 |
+
while True:
|
| 314 |
+
value, _, key = heap[0]
|
| 315 |
+
pop(heap)
|
| 316 |
+
if key in dict and value == dict[key]:
|
| 317 |
+
break
|
| 318 |
+
del dict[key]
|
| 319 |
+
return (key, value)
|
| 320 |
+
|
| 321 |
+
def get(self, key, default=None):
|
| 322 |
+
return self._dict.get(key, default)
|
| 323 |
+
|
| 324 |
+
def insert(self, key, value, allow_increase=False):
|
| 325 |
+
dict = self._dict
|
| 326 |
+
if key in dict:
|
| 327 |
+
old_value = dict[key]
|
| 328 |
+
if value < old_value or (allow_increase and value > old_value):
|
| 329 |
+
# Since there is no way to efficiently obtain the location of a
|
| 330 |
+
# key-value pair in the heap, insert a new pair even if ones
|
| 331 |
+
# with the same key may already be present. Deem the old ones
|
| 332 |
+
# as stale and skip them when the minimum pair is queried.
|
| 333 |
+
dict[key] = value
|
| 334 |
+
heappush(self._heap, (value, next(self._count), key))
|
| 335 |
+
return value < old_value
|
| 336 |
+
return False
|
| 337 |
+
else:
|
| 338 |
+
dict[key] = value
|
| 339 |
+
heappush(self._heap, (value, next(self._count), key))
|
| 340 |
+
return True
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/mapped_queue.py
ADDED
|
@@ -0,0 +1,297 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Priority queue class with updatable priorities."""
|
| 2 |
+
|
| 3 |
+
import heapq
|
| 4 |
+
|
| 5 |
+
__all__ = ["MappedQueue"]
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class _HeapElement:
|
| 9 |
+
"""This proxy class separates the heap element from its priority.
|
| 10 |
+
|
| 11 |
+
The idea is that using a 2-tuple (priority, element) works
|
| 12 |
+
for sorting, but not for dict lookup because priorities are
|
| 13 |
+
often floating point values so round-off can mess up equality.
|
| 14 |
+
|
| 15 |
+
So, we need inequalities to look at the priority (for sorting)
|
| 16 |
+
and equality (and hash) to look at the element to enable
|
| 17 |
+
updates to the priority.
|
| 18 |
+
|
| 19 |
+
Unfortunately, this class can be tricky to work with if you forget that
|
| 20 |
+
`__lt__` compares the priority while `__eq__` compares the element.
|
| 21 |
+
In `greedy_modularity_communities()` the following code is
|
| 22 |
+
used to check that two _HeapElements differ in either element or priority:
|
| 23 |
+
|
| 24 |
+
if d_oldmax != row_max or d_oldmax.priority != row_max.priority:
|
| 25 |
+
|
| 26 |
+
If the priorities are the same, this implementation uses the element
|
| 27 |
+
as a tiebreaker. This provides compatibility with older systems that
|
| 28 |
+
use tuples to combine priority and elements.
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
__slots__ = ["priority", "element", "_hash"]
|
| 32 |
+
|
| 33 |
+
def __init__(self, priority, element):
|
| 34 |
+
self.priority = priority
|
| 35 |
+
self.element = element
|
| 36 |
+
self._hash = hash(element)
|
| 37 |
+
|
| 38 |
+
def __lt__(self, other):
|
| 39 |
+
try:
|
| 40 |
+
other_priority = other.priority
|
| 41 |
+
except AttributeError:
|
| 42 |
+
return self.priority < other
|
| 43 |
+
# assume comparing to another _HeapElement
|
| 44 |
+
if self.priority == other_priority:
|
| 45 |
+
try:
|
| 46 |
+
return self.element < other.element
|
| 47 |
+
except TypeError as err:
|
| 48 |
+
raise TypeError(
|
| 49 |
+
"Consider using a tuple, with a priority value that can be compared."
|
| 50 |
+
)
|
| 51 |
+
return self.priority < other_priority
|
| 52 |
+
|
| 53 |
+
def __gt__(self, other):
|
| 54 |
+
try:
|
| 55 |
+
other_priority = other.priority
|
| 56 |
+
except AttributeError:
|
| 57 |
+
return self.priority > other
|
| 58 |
+
# assume comparing to another _HeapElement
|
| 59 |
+
if self.priority == other_priority:
|
| 60 |
+
try:
|
| 61 |
+
return self.element > other.element
|
| 62 |
+
except TypeError as err:
|
| 63 |
+
raise TypeError(
|
| 64 |
+
"Consider using a tuple, with a priority value that can be compared."
|
| 65 |
+
)
|
| 66 |
+
return self.priority > other_priority
|
| 67 |
+
|
| 68 |
+
def __eq__(self, other):
|
| 69 |
+
try:
|
| 70 |
+
return self.element == other.element
|
| 71 |
+
except AttributeError:
|
| 72 |
+
return self.element == other
|
| 73 |
+
|
| 74 |
+
def __hash__(self):
|
| 75 |
+
return self._hash
|
| 76 |
+
|
| 77 |
+
def __getitem__(self, indx):
|
| 78 |
+
return self.priority if indx == 0 else self.element[indx - 1]
|
| 79 |
+
|
| 80 |
+
def __iter__(self):
|
| 81 |
+
yield self.priority
|
| 82 |
+
try:
|
| 83 |
+
yield from self.element
|
| 84 |
+
except TypeError:
|
| 85 |
+
yield self.element
|
| 86 |
+
|
| 87 |
+
def __repr__(self):
|
| 88 |
+
return f"_HeapElement({self.priority}, {self.element})"
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class MappedQueue:
|
| 92 |
+
"""The MappedQueue class implements a min-heap with removal and update-priority.
|
| 93 |
+
|
| 94 |
+
The min heap uses heapq as well as custom written _siftup and _siftdown
|
| 95 |
+
methods to allow the heap positions to be tracked by an additional dict
|
| 96 |
+
keyed by element to position. The smallest element can be popped in O(1) time,
|
| 97 |
+
new elements can be pushed in O(log n) time, and any element can be removed
|
| 98 |
+
or updated in O(log n) time. The queue cannot contain duplicate elements
|
| 99 |
+
and an attempt to push an element already in the queue will have no effect.
|
| 100 |
+
|
| 101 |
+
MappedQueue complements the heapq package from the python standard
|
| 102 |
+
library. While MappedQueue is designed for maximum compatibility with
|
| 103 |
+
heapq, it adds element removal, lookup, and priority update.
|
| 104 |
+
|
| 105 |
+
Parameters
|
| 106 |
+
----------
|
| 107 |
+
data : dict or iterable
|
| 108 |
+
|
| 109 |
+
Examples
|
| 110 |
+
--------
|
| 111 |
+
|
| 112 |
+
A `MappedQueue` can be created empty, or optionally, given a dictionary
|
| 113 |
+
of initial elements and priorities. The methods `push`, `pop`,
|
| 114 |
+
`remove`, and `update` operate on the queue.
|
| 115 |
+
|
| 116 |
+
>>> colors_nm = {"red": 665, "blue": 470, "green": 550}
|
| 117 |
+
>>> q = MappedQueue(colors_nm)
|
| 118 |
+
>>> q.remove("red")
|
| 119 |
+
>>> q.update("green", "violet", 400)
|
| 120 |
+
>>> q.push("indigo", 425)
|
| 121 |
+
True
|
| 122 |
+
>>> [q.pop().element for i in range(len(q.heap))]
|
| 123 |
+
['violet', 'indigo', 'blue']
|
| 124 |
+
|
| 125 |
+
A `MappedQueue` can also be initialized with a list or other iterable. The priority is assumed
|
| 126 |
+
to be the sort order of the items in the list.
|
| 127 |
+
|
| 128 |
+
>>> q = MappedQueue([916, 50, 4609, 493, 237])
|
| 129 |
+
>>> q.remove(493)
|
| 130 |
+
>>> q.update(237, 1117)
|
| 131 |
+
>>> [q.pop() for i in range(len(q.heap))]
|
| 132 |
+
[50, 916, 1117, 4609]
|
| 133 |
+
|
| 134 |
+
An exception is raised if the elements are not comparable.
|
| 135 |
+
|
| 136 |
+
>>> q = MappedQueue([100, "a"])
|
| 137 |
+
Traceback (most recent call last):
|
| 138 |
+
...
|
| 139 |
+
TypeError: '<' not supported between instances of 'int' and 'str'
|
| 140 |
+
|
| 141 |
+
To avoid the exception, use a dictionary to assign priorities to the elements.
|
| 142 |
+
|
| 143 |
+
>>> q = MappedQueue({100: 0, "a": 1})
|
| 144 |
+
|
| 145 |
+
References
|
| 146 |
+
----------
|
| 147 |
+
.. [1] Cormen, T. H., Leiserson, C. E., Rivest, R. L., & Stein, C. (2001).
|
| 148 |
+
Introduction to algorithms second edition.
|
| 149 |
+
.. [2] Knuth, D. E. (1997). The art of computer programming (Vol. 3).
|
| 150 |
+
Pearson Education.
|
| 151 |
+
"""
|
| 152 |
+
|
| 153 |
+
def __init__(self, data=None):
|
| 154 |
+
"""Priority queue class with updatable priorities."""
|
| 155 |
+
if data is None:
|
| 156 |
+
self.heap = []
|
| 157 |
+
elif isinstance(data, dict):
|
| 158 |
+
self.heap = [_HeapElement(v, k) for k, v in data.items()]
|
| 159 |
+
else:
|
| 160 |
+
self.heap = list(data)
|
| 161 |
+
self.position = {}
|
| 162 |
+
self._heapify()
|
| 163 |
+
|
| 164 |
+
def _heapify(self):
|
| 165 |
+
"""Restore heap invariant and recalculate map."""
|
| 166 |
+
heapq.heapify(self.heap)
|
| 167 |
+
self.position = {elt: pos for pos, elt in enumerate(self.heap)}
|
| 168 |
+
if len(self.heap) != len(self.position):
|
| 169 |
+
raise AssertionError("Heap contains duplicate elements")
|
| 170 |
+
|
| 171 |
+
def __len__(self):
|
| 172 |
+
return len(self.heap)
|
| 173 |
+
|
| 174 |
+
def push(self, elt, priority=None):
|
| 175 |
+
"""Add an element to the queue."""
|
| 176 |
+
if priority is not None:
|
| 177 |
+
elt = _HeapElement(priority, elt)
|
| 178 |
+
# If element is already in queue, do nothing
|
| 179 |
+
if elt in self.position:
|
| 180 |
+
return False
|
| 181 |
+
# Add element to heap and dict
|
| 182 |
+
pos = len(self.heap)
|
| 183 |
+
self.heap.append(elt)
|
| 184 |
+
self.position[elt] = pos
|
| 185 |
+
# Restore invariant by sifting down
|
| 186 |
+
self._siftdown(0, pos)
|
| 187 |
+
return True
|
| 188 |
+
|
| 189 |
+
def pop(self):
|
| 190 |
+
"""Remove and return the smallest element in the queue."""
|
| 191 |
+
# Remove smallest element
|
| 192 |
+
elt = self.heap[0]
|
| 193 |
+
del self.position[elt]
|
| 194 |
+
# If elt is last item, remove and return
|
| 195 |
+
if len(self.heap) == 1:
|
| 196 |
+
self.heap.pop()
|
| 197 |
+
return elt
|
| 198 |
+
# Replace root with last element
|
| 199 |
+
last = self.heap.pop()
|
| 200 |
+
self.heap[0] = last
|
| 201 |
+
self.position[last] = 0
|
| 202 |
+
# Restore invariant by sifting up
|
| 203 |
+
self._siftup(0)
|
| 204 |
+
# Return smallest element
|
| 205 |
+
return elt
|
| 206 |
+
|
| 207 |
+
def update(self, elt, new, priority=None):
|
| 208 |
+
"""Replace an element in the queue with a new one."""
|
| 209 |
+
if priority is not None:
|
| 210 |
+
new = _HeapElement(priority, new)
|
| 211 |
+
# Replace
|
| 212 |
+
pos = self.position[elt]
|
| 213 |
+
self.heap[pos] = new
|
| 214 |
+
del self.position[elt]
|
| 215 |
+
self.position[new] = pos
|
| 216 |
+
# Restore invariant by sifting up
|
| 217 |
+
self._siftup(pos)
|
| 218 |
+
|
| 219 |
+
def remove(self, elt):
|
| 220 |
+
"""Remove an element from the queue."""
|
| 221 |
+
# Find and remove element
|
| 222 |
+
try:
|
| 223 |
+
pos = self.position[elt]
|
| 224 |
+
del self.position[elt]
|
| 225 |
+
except KeyError:
|
| 226 |
+
# Not in queue
|
| 227 |
+
raise
|
| 228 |
+
# If elt is last item, remove and return
|
| 229 |
+
if pos == len(self.heap) - 1:
|
| 230 |
+
self.heap.pop()
|
| 231 |
+
return
|
| 232 |
+
# Replace elt with last element
|
| 233 |
+
last = self.heap.pop()
|
| 234 |
+
self.heap[pos] = last
|
| 235 |
+
self.position[last] = pos
|
| 236 |
+
# Restore invariant by sifting up
|
| 237 |
+
self._siftup(pos)
|
| 238 |
+
|
| 239 |
+
def _siftup(self, pos):
|
| 240 |
+
"""Move smaller child up until hitting a leaf.
|
| 241 |
+
|
| 242 |
+
Built to mimic code for heapq._siftup
|
| 243 |
+
only updating position dict too.
|
| 244 |
+
"""
|
| 245 |
+
heap, position = self.heap, self.position
|
| 246 |
+
end_pos = len(heap)
|
| 247 |
+
startpos = pos
|
| 248 |
+
newitem = heap[pos]
|
| 249 |
+
# Shift up the smaller child until hitting a leaf
|
| 250 |
+
child_pos = (pos << 1) + 1 # start with leftmost child position
|
| 251 |
+
while child_pos < end_pos:
|
| 252 |
+
# Set child_pos to index of smaller child.
|
| 253 |
+
child = heap[child_pos]
|
| 254 |
+
right_pos = child_pos + 1
|
| 255 |
+
if right_pos < end_pos:
|
| 256 |
+
right = heap[right_pos]
|
| 257 |
+
if not child < right:
|
| 258 |
+
child = right
|
| 259 |
+
child_pos = right_pos
|
| 260 |
+
# Move the smaller child up.
|
| 261 |
+
heap[pos] = child
|
| 262 |
+
position[child] = pos
|
| 263 |
+
pos = child_pos
|
| 264 |
+
child_pos = (pos << 1) + 1
|
| 265 |
+
# pos is a leaf position. Put newitem there, and bubble it up
|
| 266 |
+
# to its final resting place (by sifting its parents down).
|
| 267 |
+
while pos > 0:
|
| 268 |
+
parent_pos = (pos - 1) >> 1
|
| 269 |
+
parent = heap[parent_pos]
|
| 270 |
+
if not newitem < parent:
|
| 271 |
+
break
|
| 272 |
+
heap[pos] = parent
|
| 273 |
+
position[parent] = pos
|
| 274 |
+
pos = parent_pos
|
| 275 |
+
heap[pos] = newitem
|
| 276 |
+
position[newitem] = pos
|
| 277 |
+
|
| 278 |
+
def _siftdown(self, start_pos, pos):
|
| 279 |
+
"""Restore invariant. keep swapping with parent until smaller.
|
| 280 |
+
|
| 281 |
+
Built to mimic code for heapq._siftdown
|
| 282 |
+
only updating position dict too.
|
| 283 |
+
"""
|
| 284 |
+
heap, position = self.heap, self.position
|
| 285 |
+
newitem = heap[pos]
|
| 286 |
+
# Follow the path to the root, moving parents down until finding a place
|
| 287 |
+
# newitem fits.
|
| 288 |
+
while pos > start_pos:
|
| 289 |
+
parent_pos = (pos - 1) >> 1
|
| 290 |
+
parent = heap[parent_pos]
|
| 291 |
+
if not newitem < parent:
|
| 292 |
+
break
|
| 293 |
+
heap[pos] = parent
|
| 294 |
+
position[parent] = pos
|
| 295 |
+
pos = parent_pos
|
| 296 |
+
heap[pos] = newitem
|
| 297 |
+
position[newitem] = pos
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/misc.py
ADDED
|
@@ -0,0 +1,653 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Miscellaneous Helpers for NetworkX.
|
| 3 |
+
|
| 4 |
+
These are not imported into the base networkx namespace but
|
| 5 |
+
can be accessed, for example, as
|
| 6 |
+
|
| 7 |
+
>>> import networkx
|
| 8 |
+
>>> networkx.utils.make_list_of_ints({1, 2, 3})
|
| 9 |
+
[1, 2, 3]
|
| 10 |
+
>>> networkx.utils.arbitrary_element({5, 1, 7}) # doctest: +SKIP
|
| 11 |
+
1
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
import random
|
| 15 |
+
import sys
|
| 16 |
+
import uuid
|
| 17 |
+
import warnings
|
| 18 |
+
from collections import defaultdict, deque
|
| 19 |
+
from collections.abc import Iterable, Iterator, Sized
|
| 20 |
+
from itertools import chain, tee
|
| 21 |
+
|
| 22 |
+
import networkx as nx
|
| 23 |
+
|
| 24 |
+
__all__ = [
|
| 25 |
+
"flatten",
|
| 26 |
+
"make_list_of_ints",
|
| 27 |
+
"dict_to_numpy_array",
|
| 28 |
+
"arbitrary_element",
|
| 29 |
+
"pairwise",
|
| 30 |
+
"groups",
|
| 31 |
+
"create_random_state",
|
| 32 |
+
"create_py_random_state",
|
| 33 |
+
"PythonRandomInterface",
|
| 34 |
+
"PythonRandomViaNumpyBits",
|
| 35 |
+
"nodes_equal",
|
| 36 |
+
"edges_equal",
|
| 37 |
+
"graphs_equal",
|
| 38 |
+
"_clear_cache",
|
| 39 |
+
]
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
# some cookbook stuff
|
| 43 |
+
# used in deciding whether something is a bunch of nodes, edges, etc.
|
| 44 |
+
# see G.add_nodes and others in Graph Class in networkx/base.py
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def flatten(obj, result=None):
|
| 48 |
+
"""Return flattened version of (possibly nested) iterable object."""
|
| 49 |
+
if not isinstance(obj, Iterable | Sized) or isinstance(obj, str):
|
| 50 |
+
return obj
|
| 51 |
+
if result is None:
|
| 52 |
+
result = []
|
| 53 |
+
for item in obj:
|
| 54 |
+
if not isinstance(item, Iterable | Sized) or isinstance(item, str):
|
| 55 |
+
result.append(item)
|
| 56 |
+
else:
|
| 57 |
+
flatten(item, result)
|
| 58 |
+
return tuple(result)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def make_list_of_ints(sequence):
|
| 62 |
+
"""Return list of ints from sequence of integral numbers.
|
| 63 |
+
|
| 64 |
+
All elements of the sequence must satisfy int(element) == element
|
| 65 |
+
or a ValueError is raised. Sequence is iterated through once.
|
| 66 |
+
|
| 67 |
+
If sequence is a list, the non-int values are replaced with ints.
|
| 68 |
+
So, no new list is created
|
| 69 |
+
"""
|
| 70 |
+
if not isinstance(sequence, list):
|
| 71 |
+
result = []
|
| 72 |
+
for i in sequence:
|
| 73 |
+
errmsg = f"sequence is not all integers: {i}"
|
| 74 |
+
try:
|
| 75 |
+
ii = int(i)
|
| 76 |
+
except ValueError:
|
| 77 |
+
raise nx.NetworkXError(errmsg) from None
|
| 78 |
+
if ii != i:
|
| 79 |
+
raise nx.NetworkXError(errmsg)
|
| 80 |
+
result.append(ii)
|
| 81 |
+
return result
|
| 82 |
+
# original sequence is a list... in-place conversion to ints
|
| 83 |
+
for indx, i in enumerate(sequence):
|
| 84 |
+
errmsg = f"sequence is not all integers: {i}"
|
| 85 |
+
if isinstance(i, int):
|
| 86 |
+
continue
|
| 87 |
+
try:
|
| 88 |
+
ii = int(i)
|
| 89 |
+
except ValueError:
|
| 90 |
+
raise nx.NetworkXError(errmsg) from None
|
| 91 |
+
if ii != i:
|
| 92 |
+
raise nx.NetworkXError(errmsg)
|
| 93 |
+
sequence[indx] = ii
|
| 94 |
+
return sequence
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def dict_to_numpy_array(d, mapping=None):
|
| 98 |
+
"""Convert a dictionary of dictionaries to a numpy array
|
| 99 |
+
with optional mapping."""
|
| 100 |
+
try:
|
| 101 |
+
return _dict_to_numpy_array2(d, mapping)
|
| 102 |
+
except (AttributeError, TypeError):
|
| 103 |
+
# AttributeError is when no mapping was provided and v.keys() fails.
|
| 104 |
+
# TypeError is when a mapping was provided and d[k1][k2] fails.
|
| 105 |
+
return _dict_to_numpy_array1(d, mapping)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def _dict_to_numpy_array2(d, mapping=None):
|
| 109 |
+
"""Convert a dictionary of dictionaries to a 2d numpy array
|
| 110 |
+
with optional mapping.
|
| 111 |
+
|
| 112 |
+
"""
|
| 113 |
+
import numpy as np
|
| 114 |
+
|
| 115 |
+
if mapping is None:
|
| 116 |
+
s = set(d.keys())
|
| 117 |
+
for k, v in d.items():
|
| 118 |
+
s.update(v.keys())
|
| 119 |
+
mapping = dict(zip(s, range(len(s))))
|
| 120 |
+
n = len(mapping)
|
| 121 |
+
a = np.zeros((n, n))
|
| 122 |
+
for k1, i in mapping.items():
|
| 123 |
+
for k2, j in mapping.items():
|
| 124 |
+
try:
|
| 125 |
+
a[i, j] = d[k1][k2]
|
| 126 |
+
except KeyError:
|
| 127 |
+
pass
|
| 128 |
+
return a
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def _dict_to_numpy_array1(d, mapping=None):
|
| 132 |
+
"""Convert a dictionary of numbers to a 1d numpy array with optional mapping."""
|
| 133 |
+
import numpy as np
|
| 134 |
+
|
| 135 |
+
if mapping is None:
|
| 136 |
+
s = set(d.keys())
|
| 137 |
+
mapping = dict(zip(s, range(len(s))))
|
| 138 |
+
n = len(mapping)
|
| 139 |
+
a = np.zeros(n)
|
| 140 |
+
for k1, i in mapping.items():
|
| 141 |
+
i = mapping[k1]
|
| 142 |
+
a[i] = d[k1]
|
| 143 |
+
return a
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def arbitrary_element(iterable):
|
| 147 |
+
"""Returns an arbitrary element of `iterable` without removing it.
|
| 148 |
+
|
| 149 |
+
This is most useful for "peeking" at an arbitrary element of a set,
|
| 150 |
+
but can be used for any list, dictionary, etc., as well.
|
| 151 |
+
|
| 152 |
+
Parameters
|
| 153 |
+
----------
|
| 154 |
+
iterable : `abc.collections.Iterable` instance
|
| 155 |
+
Any object that implements ``__iter__``, e.g. set, dict, list, tuple,
|
| 156 |
+
etc.
|
| 157 |
+
|
| 158 |
+
Returns
|
| 159 |
+
-------
|
| 160 |
+
The object that results from ``next(iter(iterable))``
|
| 161 |
+
|
| 162 |
+
Raises
|
| 163 |
+
------
|
| 164 |
+
ValueError
|
| 165 |
+
If `iterable` is an iterator (because the current implementation of
|
| 166 |
+
this function would consume an element from the iterator).
|
| 167 |
+
|
| 168 |
+
Examples
|
| 169 |
+
--------
|
| 170 |
+
Arbitrary elements from common Iterable objects:
|
| 171 |
+
|
| 172 |
+
>>> nx.utils.arbitrary_element([1, 2, 3]) # list
|
| 173 |
+
1
|
| 174 |
+
>>> nx.utils.arbitrary_element((1, 2, 3)) # tuple
|
| 175 |
+
1
|
| 176 |
+
>>> nx.utils.arbitrary_element({1, 2, 3}) # set
|
| 177 |
+
1
|
| 178 |
+
>>> d = {k: v for k, v in zip([1, 2, 3], [3, 2, 1])}
|
| 179 |
+
>>> nx.utils.arbitrary_element(d) # dict_keys
|
| 180 |
+
1
|
| 181 |
+
>>> nx.utils.arbitrary_element(d.values()) # dict values
|
| 182 |
+
3
|
| 183 |
+
|
| 184 |
+
`str` is also an Iterable:
|
| 185 |
+
|
| 186 |
+
>>> nx.utils.arbitrary_element("hello")
|
| 187 |
+
'h'
|
| 188 |
+
|
| 189 |
+
:exc:`ValueError` is raised if `iterable` is an iterator:
|
| 190 |
+
|
| 191 |
+
>>> iterator = iter([1, 2, 3]) # Iterator, *not* Iterable
|
| 192 |
+
>>> nx.utils.arbitrary_element(iterator)
|
| 193 |
+
Traceback (most recent call last):
|
| 194 |
+
...
|
| 195 |
+
ValueError: cannot return an arbitrary item from an iterator
|
| 196 |
+
|
| 197 |
+
Notes
|
| 198 |
+
-----
|
| 199 |
+
This function does not return a *random* element. If `iterable` is
|
| 200 |
+
ordered, sequential calls will return the same value::
|
| 201 |
+
|
| 202 |
+
>>> l = [1, 2, 3]
|
| 203 |
+
>>> nx.utils.arbitrary_element(l)
|
| 204 |
+
1
|
| 205 |
+
>>> nx.utils.arbitrary_element(l)
|
| 206 |
+
1
|
| 207 |
+
|
| 208 |
+
"""
|
| 209 |
+
if isinstance(iterable, Iterator):
|
| 210 |
+
raise ValueError("cannot return an arbitrary item from an iterator")
|
| 211 |
+
# Another possible implementation is ``for x in iterable: return x``.
|
| 212 |
+
return next(iter(iterable))
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
# Recipe from the itertools documentation.
|
| 216 |
+
def pairwise(iterable, cyclic=False):
|
| 217 |
+
"s -> (s0, s1), (s1, s2), (s2, s3), ..."
|
| 218 |
+
a, b = tee(iterable)
|
| 219 |
+
first = next(b, None)
|
| 220 |
+
if cyclic is True:
|
| 221 |
+
return zip(a, chain(b, (first,)))
|
| 222 |
+
return zip(a, b)
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
def groups(many_to_one):
|
| 226 |
+
"""Converts a many-to-one mapping into a one-to-many mapping.
|
| 227 |
+
|
| 228 |
+
`many_to_one` must be a dictionary whose keys and values are all
|
| 229 |
+
:term:`hashable`.
|
| 230 |
+
|
| 231 |
+
The return value is a dictionary mapping values from `many_to_one`
|
| 232 |
+
to sets of keys from `many_to_one` that have that value.
|
| 233 |
+
|
| 234 |
+
Examples
|
| 235 |
+
--------
|
| 236 |
+
>>> from networkx.utils import groups
|
| 237 |
+
>>> many_to_one = {"a": 1, "b": 1, "c": 2, "d": 3, "e": 3}
|
| 238 |
+
>>> groups(many_to_one) # doctest: +SKIP
|
| 239 |
+
{1: {'a', 'b'}, 2: {'c'}, 3: {'e', 'd'}}
|
| 240 |
+
"""
|
| 241 |
+
one_to_many = defaultdict(set)
|
| 242 |
+
for v, k in many_to_one.items():
|
| 243 |
+
one_to_many[k].add(v)
|
| 244 |
+
return dict(one_to_many)
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
def create_random_state(random_state=None):
|
| 248 |
+
"""Returns a numpy.random.RandomState or numpy.random.Generator instance
|
| 249 |
+
depending on input.
|
| 250 |
+
|
| 251 |
+
Parameters
|
| 252 |
+
----------
|
| 253 |
+
random_state : int or NumPy RandomState or Generator instance, optional (default=None)
|
| 254 |
+
If int, return a numpy.random.RandomState instance set with seed=int.
|
| 255 |
+
if `numpy.random.RandomState` instance, return it.
|
| 256 |
+
if `numpy.random.Generator` instance, return it.
|
| 257 |
+
if None or numpy.random, return the global random number generator used
|
| 258 |
+
by numpy.random.
|
| 259 |
+
"""
|
| 260 |
+
import numpy as np
|
| 261 |
+
|
| 262 |
+
if random_state is None or random_state is np.random:
|
| 263 |
+
return np.random.mtrand._rand
|
| 264 |
+
if isinstance(random_state, np.random.RandomState):
|
| 265 |
+
return random_state
|
| 266 |
+
if isinstance(random_state, int):
|
| 267 |
+
return np.random.RandomState(random_state)
|
| 268 |
+
if isinstance(random_state, np.random.Generator):
|
| 269 |
+
return random_state
|
| 270 |
+
msg = (
|
| 271 |
+
f"{random_state} cannot be used to create a numpy.random.RandomState or\n"
|
| 272 |
+
"numpy.random.Generator instance"
|
| 273 |
+
)
|
| 274 |
+
raise ValueError(msg)
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
class PythonRandomViaNumpyBits(random.Random):
|
| 278 |
+
"""Provide the random.random algorithms using a numpy.random bit generator
|
| 279 |
+
|
| 280 |
+
The intent is to allow people to contribute code that uses Python's random
|
| 281 |
+
library, but still allow users to provide a single easily controlled random
|
| 282 |
+
bit-stream for all work with NetworkX. This implementation is based on helpful
|
| 283 |
+
comments and code from Robert Kern on NumPy's GitHub Issue #24458.
|
| 284 |
+
|
| 285 |
+
This implementation supersedes that of `PythonRandomInterface` which rewrote
|
| 286 |
+
methods to account for subtle differences in API between `random` and
|
| 287 |
+
`numpy.random`. Instead this subclasses `random.Random` and overwrites
|
| 288 |
+
the methods `random`, `getrandbits`, `getstate`, `setstate` and `seed`.
|
| 289 |
+
It makes them use the rng values from an input numpy `RandomState` or `Generator`.
|
| 290 |
+
Those few methods allow the rest of the `random.Random` methods to provide
|
| 291 |
+
the API interface of `random.random` while using randomness generated by
|
| 292 |
+
a numpy generator.
|
| 293 |
+
"""
|
| 294 |
+
|
| 295 |
+
def __init__(self, rng=None):
|
| 296 |
+
try:
|
| 297 |
+
import numpy as np
|
| 298 |
+
except ImportError:
|
| 299 |
+
msg = "numpy not found, only random.random available."
|
| 300 |
+
warnings.warn(msg, ImportWarning)
|
| 301 |
+
|
| 302 |
+
if rng is None:
|
| 303 |
+
self._rng = np.random.mtrand._rand
|
| 304 |
+
else:
|
| 305 |
+
self._rng = rng
|
| 306 |
+
|
| 307 |
+
# Not necessary, given our overriding of gauss() below, but it's
|
| 308 |
+
# in the superclass and nominally public, so initialize it here.
|
| 309 |
+
self.gauss_next = None
|
| 310 |
+
|
| 311 |
+
def random(self):
|
| 312 |
+
"""Get the next random number in the range 0.0 <= X < 1.0."""
|
| 313 |
+
return self._rng.random()
|
| 314 |
+
|
| 315 |
+
def getrandbits(self, k):
|
| 316 |
+
"""getrandbits(k) -> x. Generates an int with k random bits."""
|
| 317 |
+
if k < 0:
|
| 318 |
+
raise ValueError("number of bits must be non-negative")
|
| 319 |
+
numbytes = (k + 7) // 8 # bits / 8 and rounded up
|
| 320 |
+
x = int.from_bytes(self._rng.bytes(numbytes), "big")
|
| 321 |
+
return x >> (numbytes * 8 - k) # trim excess bits
|
| 322 |
+
|
| 323 |
+
def getstate(self):
|
| 324 |
+
return self._rng.__getstate__()
|
| 325 |
+
|
| 326 |
+
def setstate(self, state):
|
| 327 |
+
self._rng.__setstate__(state)
|
| 328 |
+
|
| 329 |
+
def seed(self, *args, **kwds):
|
| 330 |
+
"Do nothing override method."
|
| 331 |
+
raise NotImplementedError("seed() not implemented in PythonRandomViaNumpyBits")
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
##################################################################
|
| 335 |
+
class PythonRandomInterface:
|
| 336 |
+
"""PythonRandomInterface is included for backward compatibility
|
| 337 |
+
New code should use PythonRandomViaNumpyBits instead.
|
| 338 |
+
"""
|
| 339 |
+
|
| 340 |
+
def __init__(self, rng=None):
|
| 341 |
+
try:
|
| 342 |
+
import numpy as np
|
| 343 |
+
except ImportError:
|
| 344 |
+
msg = "numpy not found, only random.random available."
|
| 345 |
+
warnings.warn(msg, ImportWarning)
|
| 346 |
+
|
| 347 |
+
if rng is None:
|
| 348 |
+
self._rng = np.random.mtrand._rand
|
| 349 |
+
else:
|
| 350 |
+
self._rng = rng
|
| 351 |
+
|
| 352 |
+
def random(self):
|
| 353 |
+
return self._rng.random()
|
| 354 |
+
|
| 355 |
+
def uniform(self, a, b):
|
| 356 |
+
return a + (b - a) * self._rng.random()
|
| 357 |
+
|
| 358 |
+
def randrange(self, a, b=None):
|
| 359 |
+
import numpy as np
|
| 360 |
+
|
| 361 |
+
if b is None:
|
| 362 |
+
a, b = 0, a
|
| 363 |
+
if b > 9223372036854775807: # from np.iinfo(np.int64).max
|
| 364 |
+
tmp_rng = PythonRandomViaNumpyBits(self._rng)
|
| 365 |
+
return tmp_rng.randrange(a, b)
|
| 366 |
+
|
| 367 |
+
if isinstance(self._rng, np.random.Generator):
|
| 368 |
+
return self._rng.integers(a, b)
|
| 369 |
+
return self._rng.randint(a, b)
|
| 370 |
+
|
| 371 |
+
# NOTE: the numpy implementations of `choice` don't support strings, so
|
| 372 |
+
# this cannot be replaced with self._rng.choice
|
| 373 |
+
def choice(self, seq):
|
| 374 |
+
import numpy as np
|
| 375 |
+
|
| 376 |
+
if isinstance(self._rng, np.random.Generator):
|
| 377 |
+
idx = self._rng.integers(0, len(seq))
|
| 378 |
+
else:
|
| 379 |
+
idx = self._rng.randint(0, len(seq))
|
| 380 |
+
return seq[idx]
|
| 381 |
+
|
| 382 |
+
def gauss(self, mu, sigma):
|
| 383 |
+
return self._rng.normal(mu, sigma)
|
| 384 |
+
|
| 385 |
+
def shuffle(self, seq):
|
| 386 |
+
return self._rng.shuffle(seq)
|
| 387 |
+
|
| 388 |
+
# Some methods don't match API for numpy RandomState.
|
| 389 |
+
# Commented out versions are not used by NetworkX
|
| 390 |
+
|
| 391 |
+
def sample(self, seq, k):
|
| 392 |
+
return self._rng.choice(list(seq), size=(k,), replace=False)
|
| 393 |
+
|
| 394 |
+
def randint(self, a, b):
|
| 395 |
+
import numpy as np
|
| 396 |
+
|
| 397 |
+
if b > 9223372036854775807: # from np.iinfo(np.int64).max
|
| 398 |
+
tmp_rng = PythonRandomViaNumpyBits(self._rng)
|
| 399 |
+
return tmp_rng.randint(a, b)
|
| 400 |
+
|
| 401 |
+
if isinstance(self._rng, np.random.Generator):
|
| 402 |
+
return self._rng.integers(a, b + 1)
|
| 403 |
+
return self._rng.randint(a, b + 1)
|
| 404 |
+
|
| 405 |
+
# exponential as expovariate with 1/argument,
|
| 406 |
+
def expovariate(self, scale):
|
| 407 |
+
return self._rng.exponential(1 / scale)
|
| 408 |
+
|
| 409 |
+
# pareto as paretovariate with 1/argument,
|
| 410 |
+
def paretovariate(self, shape):
|
| 411 |
+
return self._rng.pareto(shape)
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
# weibull as weibullvariate multiplied by beta,
|
| 415 |
+
# def weibullvariate(self, alpha, beta):
|
| 416 |
+
# return self._rng.weibull(alpha) * beta
|
| 417 |
+
#
|
| 418 |
+
# def triangular(self, low, high, mode):
|
| 419 |
+
# return self._rng.triangular(low, mode, high)
|
| 420 |
+
#
|
| 421 |
+
# def choices(self, seq, weights=None, cum_weights=None, k=1):
|
| 422 |
+
# return self._rng.choice(seq
|
| 423 |
+
|
| 424 |
+
|
| 425 |
+
def create_py_random_state(random_state=None):
|
| 426 |
+
"""Returns a random.Random instance depending on input.
|
| 427 |
+
|
| 428 |
+
Parameters
|
| 429 |
+
----------
|
| 430 |
+
random_state : int or random number generator or None (default=None)
|
| 431 |
+
- If int, return a `random.Random` instance set with seed=int.
|
| 432 |
+
- If `random.Random` instance, return it.
|
| 433 |
+
- If None or the `np.random` package, return the global random number
|
| 434 |
+
generator used by `np.random`.
|
| 435 |
+
- If an `np.random.Generator` instance, or the `np.random` package, or
|
| 436 |
+
the global numpy random number generator, then return it.
|
| 437 |
+
wrapped in a `PythonRandomViaNumpyBits` class.
|
| 438 |
+
- If a `PythonRandomViaNumpyBits` instance, return it.
|
| 439 |
+
- If a `PythonRandomInterface` instance, return it.
|
| 440 |
+
- If a `np.random.RandomState` instance and not the global numpy default,
|
| 441 |
+
return it wrapped in `PythonRandomInterface` for backward bit-stream
|
| 442 |
+
matching with legacy code.
|
| 443 |
+
|
| 444 |
+
Notes
|
| 445 |
+
-----
|
| 446 |
+
- A diagram intending to illustrate the relationships behind our support
|
| 447 |
+
for numpy random numbers is called
|
| 448 |
+
`NetworkX Numpy Random Numbers <https://excalidraw.com/#room=b5303f2b03d3af7ccc6a,e5ZDIWdWWCTTsg8OqoRvPA>`_.
|
| 449 |
+
- More discussion about this support also appears in
|
| 450 |
+
`gh-6869#comment <https://github.com/networkx/networkx/pull/6869#issuecomment-1944799534>`_.
|
| 451 |
+
- Wrappers of numpy.random number generators allow them to mimic the Python random
|
| 452 |
+
number generation algorithms. For example, Python can create arbitrarily large
|
| 453 |
+
random ints, and the wrappers use Numpy bit-streams with CPython's random module
|
| 454 |
+
to choose arbitrarily large random integers too.
|
| 455 |
+
- We provide two wrapper classes:
|
| 456 |
+
`PythonRandomViaNumpyBits` is usually what you want and is always used for
|
| 457 |
+
`np.Generator` instances. But for users who need to recreate random numbers
|
| 458 |
+
produced in NetworkX 3.2 or earlier, we maintain the `PythonRandomInterface`
|
| 459 |
+
wrapper as well. We use it only used if passed a (non-default) `np.RandomState`
|
| 460 |
+
instance pre-initialized from a seed. Otherwise the newer wrapper is used.
|
| 461 |
+
"""
|
| 462 |
+
if random_state is None or random_state is random:
|
| 463 |
+
return random._inst
|
| 464 |
+
if isinstance(random_state, random.Random):
|
| 465 |
+
return random_state
|
| 466 |
+
if isinstance(random_state, int):
|
| 467 |
+
return random.Random(random_state)
|
| 468 |
+
|
| 469 |
+
try:
|
| 470 |
+
import numpy as np
|
| 471 |
+
except ImportError:
|
| 472 |
+
pass
|
| 473 |
+
else:
|
| 474 |
+
if isinstance(random_state, PythonRandomInterface | PythonRandomViaNumpyBits):
|
| 475 |
+
return random_state
|
| 476 |
+
if isinstance(random_state, np.random.Generator):
|
| 477 |
+
return PythonRandomViaNumpyBits(random_state)
|
| 478 |
+
if random_state is np.random:
|
| 479 |
+
return PythonRandomViaNumpyBits(np.random.mtrand._rand)
|
| 480 |
+
|
| 481 |
+
if isinstance(random_state, np.random.RandomState):
|
| 482 |
+
if random_state is np.random.mtrand._rand:
|
| 483 |
+
return PythonRandomViaNumpyBits(random_state)
|
| 484 |
+
# Only need older interface if specially constructed RandomState used
|
| 485 |
+
return PythonRandomInterface(random_state)
|
| 486 |
+
|
| 487 |
+
msg = f"{random_state} cannot be used to generate a random.Random instance"
|
| 488 |
+
raise ValueError(msg)
|
| 489 |
+
|
| 490 |
+
|
| 491 |
+
def nodes_equal(nodes1, nodes2):
|
| 492 |
+
"""Check if nodes are equal.
|
| 493 |
+
|
| 494 |
+
Equality here means equal as Python objects.
|
| 495 |
+
Node data must match if included.
|
| 496 |
+
The order of nodes is not relevant.
|
| 497 |
+
|
| 498 |
+
Parameters
|
| 499 |
+
----------
|
| 500 |
+
nodes1, nodes2 : iterables of nodes, or (node, datadict) tuples
|
| 501 |
+
|
| 502 |
+
Returns
|
| 503 |
+
-------
|
| 504 |
+
bool
|
| 505 |
+
True if nodes are equal, False otherwise.
|
| 506 |
+
"""
|
| 507 |
+
nlist1 = list(nodes1)
|
| 508 |
+
nlist2 = list(nodes2)
|
| 509 |
+
try:
|
| 510 |
+
d1 = dict(nlist1)
|
| 511 |
+
d2 = dict(nlist2)
|
| 512 |
+
except (ValueError, TypeError):
|
| 513 |
+
d1 = dict.fromkeys(nlist1)
|
| 514 |
+
d2 = dict.fromkeys(nlist2)
|
| 515 |
+
return d1 == d2
|
| 516 |
+
|
| 517 |
+
|
| 518 |
+
def edges_equal(edges1, edges2):
|
| 519 |
+
"""Check if edges are equal.
|
| 520 |
+
|
| 521 |
+
Equality here means equal as Python objects.
|
| 522 |
+
Edge data must match if included.
|
| 523 |
+
The order of the edges is not relevant.
|
| 524 |
+
|
| 525 |
+
Parameters
|
| 526 |
+
----------
|
| 527 |
+
edges1, edges2 : iterables of with u, v nodes as
|
| 528 |
+
edge tuples (u, v), or
|
| 529 |
+
edge tuples with data dicts (u, v, d), or
|
| 530 |
+
edge tuples with keys and data dicts (u, v, k, d)
|
| 531 |
+
|
| 532 |
+
Returns
|
| 533 |
+
-------
|
| 534 |
+
bool
|
| 535 |
+
True if edges are equal, False otherwise.
|
| 536 |
+
"""
|
| 537 |
+
from collections import defaultdict
|
| 538 |
+
|
| 539 |
+
d1 = defaultdict(dict)
|
| 540 |
+
d2 = defaultdict(dict)
|
| 541 |
+
c1 = 0
|
| 542 |
+
for c1, e in enumerate(edges1):
|
| 543 |
+
u, v = e[0], e[1]
|
| 544 |
+
data = [e[2:]]
|
| 545 |
+
if v in d1[u]:
|
| 546 |
+
data = d1[u][v] + data
|
| 547 |
+
d1[u][v] = data
|
| 548 |
+
d1[v][u] = data
|
| 549 |
+
c2 = 0
|
| 550 |
+
for c2, e in enumerate(edges2):
|
| 551 |
+
u, v = e[0], e[1]
|
| 552 |
+
data = [e[2:]]
|
| 553 |
+
if v in d2[u]:
|
| 554 |
+
data = d2[u][v] + data
|
| 555 |
+
d2[u][v] = data
|
| 556 |
+
d2[v][u] = data
|
| 557 |
+
if c1 != c2:
|
| 558 |
+
return False
|
| 559 |
+
# can check one direction because lengths are the same.
|
| 560 |
+
for n, nbrdict in d1.items():
|
| 561 |
+
for nbr, datalist in nbrdict.items():
|
| 562 |
+
if n not in d2:
|
| 563 |
+
return False
|
| 564 |
+
if nbr not in d2[n]:
|
| 565 |
+
return False
|
| 566 |
+
d2datalist = d2[n][nbr]
|
| 567 |
+
for data in datalist:
|
| 568 |
+
if datalist.count(data) != d2datalist.count(data):
|
| 569 |
+
return False
|
| 570 |
+
return True
|
| 571 |
+
|
| 572 |
+
|
| 573 |
+
def graphs_equal(graph1, graph2):
|
| 574 |
+
"""Check if graphs are equal.
|
| 575 |
+
|
| 576 |
+
Equality here means equal as Python objects (not isomorphism).
|
| 577 |
+
Node, edge and graph data must match.
|
| 578 |
+
|
| 579 |
+
Parameters
|
| 580 |
+
----------
|
| 581 |
+
graph1, graph2 : graph
|
| 582 |
+
|
| 583 |
+
Returns
|
| 584 |
+
-------
|
| 585 |
+
bool
|
| 586 |
+
True if graphs are equal, False otherwise.
|
| 587 |
+
"""
|
| 588 |
+
return (
|
| 589 |
+
graph1.adj == graph2.adj
|
| 590 |
+
and graph1.nodes == graph2.nodes
|
| 591 |
+
and graph1.graph == graph2.graph
|
| 592 |
+
)
|
| 593 |
+
|
| 594 |
+
|
| 595 |
+
def _clear_cache(G):
|
| 596 |
+
"""Clear the cache of a graph (currently stores converted graphs).
|
| 597 |
+
|
| 598 |
+
Caching is controlled via ``nx.config.cache_converted_graphs`` configuration.
|
| 599 |
+
"""
|
| 600 |
+
if cache := getattr(G, "__networkx_cache__", None):
|
| 601 |
+
cache.clear()
|
| 602 |
+
|
| 603 |
+
|
| 604 |
+
def check_create_using(create_using, *, directed=None, multigraph=None, default=None):
|
| 605 |
+
"""Assert that create_using has good properties
|
| 606 |
+
|
| 607 |
+
This checks for desired directedness and multi-edge properties.
|
| 608 |
+
It returns `create_using` unless that is `None` when it returns
|
| 609 |
+
the optionally specified default value.
|
| 610 |
+
|
| 611 |
+
Parameters
|
| 612 |
+
----------
|
| 613 |
+
create_using : None, graph class or instance
|
| 614 |
+
The input value of create_using for a function.
|
| 615 |
+
directed : None or bool
|
| 616 |
+
Whether to check `create_using.is_directed() == directed`.
|
| 617 |
+
If None, do not assert directedness.
|
| 618 |
+
multigraph : None or bool
|
| 619 |
+
Whether to check `create_using.is_multigraph() == multigraph`.
|
| 620 |
+
If None, do not assert multi-edge property.
|
| 621 |
+
default : None or graph class
|
| 622 |
+
The graph class to return if create_using is None.
|
| 623 |
+
|
| 624 |
+
Returns
|
| 625 |
+
-------
|
| 626 |
+
create_using : graph class or instance
|
| 627 |
+
The provided graph class or instance, or if None, the `default` value.
|
| 628 |
+
|
| 629 |
+
Raises
|
| 630 |
+
------
|
| 631 |
+
NetworkXError
|
| 632 |
+
When `create_using` doesn't match the properties specified by `directed`
|
| 633 |
+
or `multigraph` parameters.
|
| 634 |
+
"""
|
| 635 |
+
if default is None:
|
| 636 |
+
default = nx.Graph
|
| 637 |
+
G = create_using if create_using is not None else default
|
| 638 |
+
|
| 639 |
+
G_directed = G.is_directed(None) if isinstance(G, type) else G.is_directed()
|
| 640 |
+
G_multigraph = G.is_multigraph(None) if isinstance(G, type) else G.is_multigraph()
|
| 641 |
+
|
| 642 |
+
if directed is not None:
|
| 643 |
+
if directed and not G_directed:
|
| 644 |
+
raise nx.NetworkXError("create_using must be directed")
|
| 645 |
+
if not directed and G_directed:
|
| 646 |
+
raise nx.NetworkXError("create_using must not be directed")
|
| 647 |
+
|
| 648 |
+
if multigraph is not None:
|
| 649 |
+
if multigraph and not G_multigraph:
|
| 650 |
+
raise nx.NetworkXError("create_using must be a multi-graph")
|
| 651 |
+
if not multigraph and G_multigraph:
|
| 652 |
+
raise nx.NetworkXError("create_using must not be a multi-graph")
|
| 653 |
+
return G
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/random_sequence.py
ADDED
|
@@ -0,0 +1,164 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Utilities for generating random numbers, random sequences, and
|
| 3 |
+
random selections.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
from networkx.utils import py_random_state
|
| 8 |
+
|
| 9 |
+
__all__ = [
|
| 10 |
+
"powerlaw_sequence",
|
| 11 |
+
"zipf_rv",
|
| 12 |
+
"cumulative_distribution",
|
| 13 |
+
"discrete_sequence",
|
| 14 |
+
"random_weighted_sample",
|
| 15 |
+
"weighted_choice",
|
| 16 |
+
]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
# The same helpers for choosing random sequences from distributions
|
| 20 |
+
# uses Python's random module
|
| 21 |
+
# https://docs.python.org/3/library/random.html
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
@py_random_state(2)
|
| 25 |
+
def powerlaw_sequence(n, exponent=2.0, seed=None):
|
| 26 |
+
"""
|
| 27 |
+
Return sample sequence of length n from a power law distribution.
|
| 28 |
+
"""
|
| 29 |
+
return [seed.paretovariate(exponent - 1) for i in range(n)]
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
@py_random_state(2)
|
| 33 |
+
def zipf_rv(alpha, xmin=1, seed=None):
|
| 34 |
+
r"""Returns a random value chosen from the Zipf distribution.
|
| 35 |
+
|
| 36 |
+
The return value is an integer drawn from the probability distribution
|
| 37 |
+
|
| 38 |
+
.. math::
|
| 39 |
+
|
| 40 |
+
p(x)=\frac{x^{-\alpha}}{\zeta(\alpha, x_{\min})},
|
| 41 |
+
|
| 42 |
+
where $\zeta(\alpha, x_{\min})$ is the Hurwitz zeta function.
|
| 43 |
+
|
| 44 |
+
Parameters
|
| 45 |
+
----------
|
| 46 |
+
alpha : float
|
| 47 |
+
Exponent value of the distribution
|
| 48 |
+
xmin : int
|
| 49 |
+
Minimum value
|
| 50 |
+
seed : integer, random_state, or None (default)
|
| 51 |
+
Indicator of random number generation state.
|
| 52 |
+
See :ref:`Randomness<randomness>`.
|
| 53 |
+
|
| 54 |
+
Returns
|
| 55 |
+
-------
|
| 56 |
+
x : int
|
| 57 |
+
Random value from Zipf distribution
|
| 58 |
+
|
| 59 |
+
Raises
|
| 60 |
+
------
|
| 61 |
+
ValueError:
|
| 62 |
+
If xmin < 1 or
|
| 63 |
+
If alpha <= 1
|
| 64 |
+
|
| 65 |
+
Notes
|
| 66 |
+
-----
|
| 67 |
+
The rejection algorithm generates random values for a the power-law
|
| 68 |
+
distribution in uniformly bounded expected time dependent on
|
| 69 |
+
parameters. See [1]_ for details on its operation.
|
| 70 |
+
|
| 71 |
+
Examples
|
| 72 |
+
--------
|
| 73 |
+
>>> nx.utils.zipf_rv(alpha=2, xmin=3, seed=42)
|
| 74 |
+
8
|
| 75 |
+
|
| 76 |
+
References
|
| 77 |
+
----------
|
| 78 |
+
.. [1] Luc Devroye, Non-Uniform Random Variate Generation,
|
| 79 |
+
Springer-Verlag, New York, 1986.
|
| 80 |
+
"""
|
| 81 |
+
if xmin < 1:
|
| 82 |
+
raise ValueError("xmin < 1")
|
| 83 |
+
if alpha <= 1:
|
| 84 |
+
raise ValueError("a <= 1.0")
|
| 85 |
+
a1 = alpha - 1.0
|
| 86 |
+
b = 2**a1
|
| 87 |
+
while True:
|
| 88 |
+
u = 1.0 - seed.random() # u in (0,1]
|
| 89 |
+
v = seed.random() # v in [0,1)
|
| 90 |
+
x = int(xmin * u ** -(1.0 / a1))
|
| 91 |
+
t = (1.0 + (1.0 / x)) ** a1
|
| 92 |
+
if v * x * (t - 1.0) / (b - 1.0) <= t / b:
|
| 93 |
+
break
|
| 94 |
+
return x
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def cumulative_distribution(distribution):
|
| 98 |
+
"""Returns normalized cumulative distribution from discrete distribution."""
|
| 99 |
+
|
| 100 |
+
cdf = [0.0]
|
| 101 |
+
psum = sum(distribution)
|
| 102 |
+
for i in range(len(distribution)):
|
| 103 |
+
cdf.append(cdf[i] + distribution[i] / psum)
|
| 104 |
+
return cdf
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
@py_random_state(3)
|
| 108 |
+
def discrete_sequence(n, distribution=None, cdistribution=None, seed=None):
|
| 109 |
+
"""
|
| 110 |
+
Return sample sequence of length n from a given discrete distribution
|
| 111 |
+
or discrete cumulative distribution.
|
| 112 |
+
|
| 113 |
+
One of the following must be specified.
|
| 114 |
+
|
| 115 |
+
distribution = histogram of values, will be normalized
|
| 116 |
+
|
| 117 |
+
cdistribution = normalized discrete cumulative distribution
|
| 118 |
+
|
| 119 |
+
"""
|
| 120 |
+
import bisect
|
| 121 |
+
|
| 122 |
+
if cdistribution is not None:
|
| 123 |
+
cdf = cdistribution
|
| 124 |
+
elif distribution is not None:
|
| 125 |
+
cdf = cumulative_distribution(distribution)
|
| 126 |
+
else:
|
| 127 |
+
raise nx.NetworkXError(
|
| 128 |
+
"discrete_sequence: distribution or cdistribution missing"
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
# get a uniform random number
|
| 132 |
+
inputseq = [seed.random() for i in range(n)]
|
| 133 |
+
|
| 134 |
+
# choose from CDF
|
| 135 |
+
seq = [bisect.bisect_left(cdf, s) - 1 for s in inputseq]
|
| 136 |
+
return seq
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
@py_random_state(2)
|
| 140 |
+
def random_weighted_sample(mapping, k, seed=None):
|
| 141 |
+
"""Returns k items without replacement from a weighted sample.
|
| 142 |
+
|
| 143 |
+
The input is a dictionary of items with weights as values.
|
| 144 |
+
"""
|
| 145 |
+
if k > len(mapping):
|
| 146 |
+
raise ValueError("sample larger than population")
|
| 147 |
+
sample = set()
|
| 148 |
+
while len(sample) < k:
|
| 149 |
+
sample.add(weighted_choice(mapping, seed))
|
| 150 |
+
return list(sample)
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
@py_random_state(1)
|
| 154 |
+
def weighted_choice(mapping, seed=None):
|
| 155 |
+
"""Returns a single element from a weighted sample.
|
| 156 |
+
|
| 157 |
+
The input is a dictionary of items with weights as values.
|
| 158 |
+
"""
|
| 159 |
+
# use roulette method
|
| 160 |
+
rnd = seed.random() * sum(mapping.values())
|
| 161 |
+
for k, w in mapping.items():
|
| 162 |
+
rnd -= w
|
| 163 |
+
if rnd < 0:
|
| 164 |
+
return k
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/rcm.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Cuthill-McKee ordering of graph nodes to produce sparse matrices
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from collections import deque
|
| 6 |
+
from operator import itemgetter
|
| 7 |
+
|
| 8 |
+
import networkx as nx
|
| 9 |
+
|
| 10 |
+
from ..utils import arbitrary_element
|
| 11 |
+
|
| 12 |
+
__all__ = ["cuthill_mckee_ordering", "reverse_cuthill_mckee_ordering"]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def cuthill_mckee_ordering(G, heuristic=None):
|
| 16 |
+
"""Generate an ordering (permutation) of the graph nodes to make
|
| 17 |
+
a sparse matrix.
|
| 18 |
+
|
| 19 |
+
Uses the Cuthill-McKee heuristic (based on breadth-first search) [1]_.
|
| 20 |
+
|
| 21 |
+
Parameters
|
| 22 |
+
----------
|
| 23 |
+
G : graph
|
| 24 |
+
A NetworkX graph
|
| 25 |
+
|
| 26 |
+
heuristic : function, optional
|
| 27 |
+
Function to choose starting node for RCM algorithm. If None
|
| 28 |
+
a node from a pseudo-peripheral pair is used. A user-defined function
|
| 29 |
+
can be supplied that takes a graph object and returns a single node.
|
| 30 |
+
|
| 31 |
+
Returns
|
| 32 |
+
-------
|
| 33 |
+
nodes : generator
|
| 34 |
+
Generator of nodes in Cuthill-McKee ordering.
|
| 35 |
+
|
| 36 |
+
Examples
|
| 37 |
+
--------
|
| 38 |
+
>>> from networkx.utils import cuthill_mckee_ordering
|
| 39 |
+
>>> G = nx.path_graph(4)
|
| 40 |
+
>>> rcm = list(cuthill_mckee_ordering(G))
|
| 41 |
+
>>> A = nx.adjacency_matrix(G, nodelist=rcm)
|
| 42 |
+
|
| 43 |
+
Smallest degree node as heuristic function:
|
| 44 |
+
|
| 45 |
+
>>> def smallest_degree(G):
|
| 46 |
+
... return min(G, key=G.degree)
|
| 47 |
+
>>> rcm = list(cuthill_mckee_ordering(G, heuristic=smallest_degree))
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
See Also
|
| 51 |
+
--------
|
| 52 |
+
reverse_cuthill_mckee_ordering
|
| 53 |
+
|
| 54 |
+
Notes
|
| 55 |
+
-----
|
| 56 |
+
The optimal solution the bandwidth reduction is NP-complete [2]_.
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
References
|
| 60 |
+
----------
|
| 61 |
+
.. [1] E. Cuthill and J. McKee.
|
| 62 |
+
Reducing the bandwidth of sparse symmetric matrices,
|
| 63 |
+
In Proc. 24th Nat. Conf. ACM, pages 157-172, 1969.
|
| 64 |
+
http://doi.acm.org/10.1145/800195.805928
|
| 65 |
+
.. [2] Steven S. Skiena. 1997. The Algorithm Design Manual.
|
| 66 |
+
Springer-Verlag New York, Inc., New York, NY, USA.
|
| 67 |
+
"""
|
| 68 |
+
for c in nx.connected_components(G):
|
| 69 |
+
yield from connected_cuthill_mckee_ordering(G.subgraph(c), heuristic)
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def reverse_cuthill_mckee_ordering(G, heuristic=None):
|
| 73 |
+
"""Generate an ordering (permutation) of the graph nodes to make
|
| 74 |
+
a sparse matrix.
|
| 75 |
+
|
| 76 |
+
Uses the reverse Cuthill-McKee heuristic (based on breadth-first search)
|
| 77 |
+
[1]_.
|
| 78 |
+
|
| 79 |
+
Parameters
|
| 80 |
+
----------
|
| 81 |
+
G : graph
|
| 82 |
+
A NetworkX graph
|
| 83 |
+
|
| 84 |
+
heuristic : function, optional
|
| 85 |
+
Function to choose starting node for RCM algorithm. If None
|
| 86 |
+
a node from a pseudo-peripheral pair is used. A user-defined function
|
| 87 |
+
can be supplied that takes a graph object and returns a single node.
|
| 88 |
+
|
| 89 |
+
Returns
|
| 90 |
+
-------
|
| 91 |
+
nodes : generator
|
| 92 |
+
Generator of nodes in reverse Cuthill-McKee ordering.
|
| 93 |
+
|
| 94 |
+
Examples
|
| 95 |
+
--------
|
| 96 |
+
>>> from networkx.utils import reverse_cuthill_mckee_ordering
|
| 97 |
+
>>> G = nx.path_graph(4)
|
| 98 |
+
>>> rcm = list(reverse_cuthill_mckee_ordering(G))
|
| 99 |
+
>>> A = nx.adjacency_matrix(G, nodelist=rcm)
|
| 100 |
+
|
| 101 |
+
Smallest degree node as heuristic function:
|
| 102 |
+
|
| 103 |
+
>>> def smallest_degree(G):
|
| 104 |
+
... return min(G, key=G.degree)
|
| 105 |
+
>>> rcm = list(reverse_cuthill_mckee_ordering(G, heuristic=smallest_degree))
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
See Also
|
| 109 |
+
--------
|
| 110 |
+
cuthill_mckee_ordering
|
| 111 |
+
|
| 112 |
+
Notes
|
| 113 |
+
-----
|
| 114 |
+
The optimal solution the bandwidth reduction is NP-complete [2]_.
|
| 115 |
+
|
| 116 |
+
References
|
| 117 |
+
----------
|
| 118 |
+
.. [1] E. Cuthill and J. McKee.
|
| 119 |
+
Reducing the bandwidth of sparse symmetric matrices,
|
| 120 |
+
In Proc. 24th Nat. Conf. ACM, pages 157-72, 1969.
|
| 121 |
+
http://doi.acm.org/10.1145/800195.805928
|
| 122 |
+
.. [2] Steven S. Skiena. 1997. The Algorithm Design Manual.
|
| 123 |
+
Springer-Verlag New York, Inc., New York, NY, USA.
|
| 124 |
+
"""
|
| 125 |
+
return reversed(list(cuthill_mckee_ordering(G, heuristic=heuristic)))
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def connected_cuthill_mckee_ordering(G, heuristic=None):
|
| 129 |
+
# the cuthill mckee algorithm for connected graphs
|
| 130 |
+
if heuristic is None:
|
| 131 |
+
start = pseudo_peripheral_node(G)
|
| 132 |
+
else:
|
| 133 |
+
start = heuristic(G)
|
| 134 |
+
visited = {start}
|
| 135 |
+
queue = deque([start])
|
| 136 |
+
while queue:
|
| 137 |
+
parent = queue.popleft()
|
| 138 |
+
yield parent
|
| 139 |
+
nd = sorted(G.degree(set(G[parent]) - visited), key=itemgetter(1))
|
| 140 |
+
children = [n for n, d in nd]
|
| 141 |
+
visited.update(children)
|
| 142 |
+
queue.extend(children)
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def pseudo_peripheral_node(G):
|
| 146 |
+
# helper for cuthill-mckee to find a node in a "pseudo peripheral pair"
|
| 147 |
+
# to use as good starting node
|
| 148 |
+
u = arbitrary_element(G)
|
| 149 |
+
lp = 0
|
| 150 |
+
v = u
|
| 151 |
+
while True:
|
| 152 |
+
spl = dict(nx.shortest_path_length(G, v))
|
| 153 |
+
l = max(spl.values())
|
| 154 |
+
if l <= lp:
|
| 155 |
+
break
|
| 156 |
+
lp = l
|
| 157 |
+
farthest = (n for n, dist in spl.items() if dist == l)
|
| 158 |
+
v, deg = min(G.degree(farthest), key=itemgetter(1))
|
| 159 |
+
return v
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/tests/__init__.py
ADDED
|
File without changes
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/tests/test__init.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def test_utils_namespace():
|
| 5 |
+
"""Ensure objects are not unintentionally exposed in utils namespace."""
|
| 6 |
+
with pytest.raises(ImportError):
|
| 7 |
+
from networkx.utils import nx
|
| 8 |
+
with pytest.raises(ImportError):
|
| 9 |
+
from networkx.utils import sys
|
| 10 |
+
with pytest.raises(ImportError):
|
| 11 |
+
from networkx.utils import defaultdict, deque
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/tests/test_config.py
ADDED
|
@@ -0,0 +1,231 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import pickle
|
| 3 |
+
|
| 4 |
+
import pytest
|
| 5 |
+
|
| 6 |
+
import networkx as nx
|
| 7 |
+
from networkx.utils.configs import BackendPriorities, Config
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
# Define this at module level so we can test pickling
|
| 11 |
+
class ExampleConfig(Config):
|
| 12 |
+
"""Example configuration."""
|
| 13 |
+
|
| 14 |
+
x: int
|
| 15 |
+
y: str
|
| 16 |
+
|
| 17 |
+
def _on_setattr(self, key, value):
|
| 18 |
+
if key == "x" and value <= 0:
|
| 19 |
+
raise ValueError("x must be positive")
|
| 20 |
+
if key == "y" and not isinstance(value, str):
|
| 21 |
+
raise TypeError("y must be a str")
|
| 22 |
+
return value
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class EmptyConfig(Config):
|
| 26 |
+
pass
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
@pytest.mark.parametrize("cfg", [EmptyConfig(), Config()])
|
| 30 |
+
def test_config_empty(cfg):
|
| 31 |
+
assert dir(cfg) == []
|
| 32 |
+
with pytest.raises(AttributeError):
|
| 33 |
+
cfg.x = 1
|
| 34 |
+
with pytest.raises(KeyError):
|
| 35 |
+
cfg["x"] = 1
|
| 36 |
+
with pytest.raises(AttributeError):
|
| 37 |
+
cfg.x
|
| 38 |
+
with pytest.raises(KeyError):
|
| 39 |
+
cfg["x"]
|
| 40 |
+
assert len(cfg) == 0
|
| 41 |
+
assert "x" not in cfg
|
| 42 |
+
assert cfg == cfg
|
| 43 |
+
assert cfg.get("x", 2) == 2
|
| 44 |
+
assert set(cfg.keys()) == set()
|
| 45 |
+
assert set(cfg.values()) == set()
|
| 46 |
+
assert set(cfg.items()) == set()
|
| 47 |
+
cfg2 = pickle.loads(pickle.dumps(cfg))
|
| 48 |
+
assert cfg == cfg2
|
| 49 |
+
assert isinstance(cfg, collections.abc.Collection)
|
| 50 |
+
assert isinstance(cfg, collections.abc.Mapping)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def test_config_subclass():
|
| 54 |
+
with pytest.raises(TypeError, match="missing 2 required keyword-only"):
|
| 55 |
+
ExampleConfig()
|
| 56 |
+
with pytest.raises(ValueError, match="x must be positive"):
|
| 57 |
+
ExampleConfig(x=0, y="foo")
|
| 58 |
+
with pytest.raises(TypeError, match="unexpected keyword"):
|
| 59 |
+
ExampleConfig(x=1, y="foo", z="bad config")
|
| 60 |
+
with pytest.raises(TypeError, match="unexpected keyword"):
|
| 61 |
+
EmptyConfig(z="bad config")
|
| 62 |
+
cfg = ExampleConfig(x=1, y="foo")
|
| 63 |
+
assert cfg.x == 1
|
| 64 |
+
assert cfg["x"] == 1
|
| 65 |
+
assert cfg["y"] == "foo"
|
| 66 |
+
assert cfg.y == "foo"
|
| 67 |
+
assert "x" in cfg
|
| 68 |
+
assert "y" in cfg
|
| 69 |
+
assert "z" not in cfg
|
| 70 |
+
assert len(cfg) == 2
|
| 71 |
+
assert set(iter(cfg)) == {"x", "y"}
|
| 72 |
+
assert set(cfg.keys()) == {"x", "y"}
|
| 73 |
+
assert set(cfg.values()) == {1, "foo"}
|
| 74 |
+
assert set(cfg.items()) == {("x", 1), ("y", "foo")}
|
| 75 |
+
assert dir(cfg) == ["x", "y"]
|
| 76 |
+
cfg.x = 2
|
| 77 |
+
cfg["y"] = "bar"
|
| 78 |
+
assert cfg["x"] == 2
|
| 79 |
+
assert cfg.y == "bar"
|
| 80 |
+
with pytest.raises(TypeError, match="can't be deleted"):
|
| 81 |
+
del cfg.x
|
| 82 |
+
with pytest.raises(TypeError, match="can't be deleted"):
|
| 83 |
+
del cfg["y"]
|
| 84 |
+
assert cfg.x == 2
|
| 85 |
+
assert cfg == cfg
|
| 86 |
+
assert cfg == ExampleConfig(x=2, y="bar")
|
| 87 |
+
assert cfg != ExampleConfig(x=3, y="baz")
|
| 88 |
+
assert cfg != Config(x=2, y="bar")
|
| 89 |
+
with pytest.raises(TypeError, match="y must be a str"):
|
| 90 |
+
cfg["y"] = 5
|
| 91 |
+
with pytest.raises(ValueError, match="x must be positive"):
|
| 92 |
+
cfg.x = -5
|
| 93 |
+
assert cfg.get("x", 10) == 2
|
| 94 |
+
with pytest.raises(AttributeError):
|
| 95 |
+
cfg.z = 5
|
| 96 |
+
with pytest.raises(KeyError):
|
| 97 |
+
cfg["z"] = 5
|
| 98 |
+
with pytest.raises(AttributeError):
|
| 99 |
+
cfg.z
|
| 100 |
+
with pytest.raises(KeyError):
|
| 101 |
+
cfg["z"]
|
| 102 |
+
cfg2 = pickle.loads(pickle.dumps(cfg))
|
| 103 |
+
assert cfg == cfg2
|
| 104 |
+
assert cfg.__doc__ == "Example configuration."
|
| 105 |
+
assert cfg2.__doc__ == "Example configuration."
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def test_config_defaults():
|
| 109 |
+
class DefaultConfig(Config):
|
| 110 |
+
x: int = 0
|
| 111 |
+
y: int
|
| 112 |
+
|
| 113 |
+
cfg = DefaultConfig(y=1)
|
| 114 |
+
assert cfg.x == 0
|
| 115 |
+
cfg = DefaultConfig(x=2, y=1)
|
| 116 |
+
assert cfg.x == 2
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def test_nxconfig():
|
| 120 |
+
assert isinstance(nx.config.backend_priority, BackendPriorities)
|
| 121 |
+
assert isinstance(nx.config.backend_priority.algos, list)
|
| 122 |
+
assert isinstance(nx.config.backends, Config)
|
| 123 |
+
with pytest.raises(TypeError, match="must be a list of backend names"):
|
| 124 |
+
nx.config.backend_priority.algos = "nx_loopback"
|
| 125 |
+
with pytest.raises(ValueError, match="Unknown backend when setting"):
|
| 126 |
+
nx.config.backend_priority.algos = ["this_almost_certainly_is_not_a_backend"]
|
| 127 |
+
with pytest.raises(TypeError, match="must be a Config of backend configs"):
|
| 128 |
+
nx.config.backends = {}
|
| 129 |
+
with pytest.raises(TypeError, match="must be a Config of backend configs"):
|
| 130 |
+
nx.config.backends = Config(plausible_backend_name={})
|
| 131 |
+
with pytest.raises(ValueError, match="Unknown backend when setting"):
|
| 132 |
+
nx.config.backends = Config(this_almost_certainly_is_not_a_backend=Config())
|
| 133 |
+
with pytest.raises(TypeError, match="must be True or False"):
|
| 134 |
+
nx.config.cache_converted_graphs = "bad value"
|
| 135 |
+
with pytest.raises(TypeError, match="must be a set of "):
|
| 136 |
+
nx.config.warnings_to_ignore = 7
|
| 137 |
+
with pytest.raises(ValueError, match="Unknown warning "):
|
| 138 |
+
nx.config.warnings_to_ignore = {"bad value"}
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def test_not_strict():
|
| 142 |
+
class FlexibleConfig(Config, strict=False):
|
| 143 |
+
x: int
|
| 144 |
+
|
| 145 |
+
cfg = FlexibleConfig(x=1)
|
| 146 |
+
assert "_strict" not in cfg
|
| 147 |
+
assert len(cfg) == 1
|
| 148 |
+
assert list(cfg) == ["x"]
|
| 149 |
+
assert list(cfg.keys()) == ["x"]
|
| 150 |
+
assert list(cfg.values()) == [1]
|
| 151 |
+
assert list(cfg.items()) == [("x", 1)]
|
| 152 |
+
assert cfg.x == 1
|
| 153 |
+
assert cfg["x"] == 1
|
| 154 |
+
assert "x" in cfg
|
| 155 |
+
assert hasattr(cfg, "x")
|
| 156 |
+
assert "FlexibleConfig(x=1)" in repr(cfg)
|
| 157 |
+
assert cfg == FlexibleConfig(x=1)
|
| 158 |
+
del cfg.x
|
| 159 |
+
assert "FlexibleConfig()" in repr(cfg)
|
| 160 |
+
assert len(cfg) == 0
|
| 161 |
+
assert not hasattr(cfg, "x")
|
| 162 |
+
assert "x" not in cfg
|
| 163 |
+
assert not hasattr(cfg, "y")
|
| 164 |
+
assert "y" not in cfg
|
| 165 |
+
cfg.y = 2
|
| 166 |
+
assert len(cfg) == 1
|
| 167 |
+
assert list(cfg) == ["y"]
|
| 168 |
+
assert list(cfg.keys()) == ["y"]
|
| 169 |
+
assert list(cfg.values()) == [2]
|
| 170 |
+
assert list(cfg.items()) == [("y", 2)]
|
| 171 |
+
assert cfg.y == 2
|
| 172 |
+
assert cfg["y"] == 2
|
| 173 |
+
assert hasattr(cfg, "y")
|
| 174 |
+
assert "y" in cfg
|
| 175 |
+
del cfg["y"]
|
| 176 |
+
assert len(cfg) == 0
|
| 177 |
+
assert list(cfg) == []
|
| 178 |
+
with pytest.raises(AttributeError, match="y"):
|
| 179 |
+
del cfg.y
|
| 180 |
+
with pytest.raises(KeyError, match="y"):
|
| 181 |
+
del cfg["y"]
|
| 182 |
+
with pytest.raises(TypeError, match="missing 1 required keyword-only"):
|
| 183 |
+
FlexibleConfig()
|
| 184 |
+
# Be strict when first creating the config object
|
| 185 |
+
with pytest.raises(TypeError, match="unexpected keyword argument 'y'"):
|
| 186 |
+
FlexibleConfig(x=1, y=2)
|
| 187 |
+
|
| 188 |
+
class FlexibleConfigWithDefault(Config, strict=False):
|
| 189 |
+
x: int = 0
|
| 190 |
+
|
| 191 |
+
assert FlexibleConfigWithDefault().x == 0
|
| 192 |
+
assert FlexibleConfigWithDefault(x=1)["x"] == 1
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
def test_context():
|
| 196 |
+
cfg = Config(x=1)
|
| 197 |
+
with cfg(x=2) as c:
|
| 198 |
+
assert c.x == 2
|
| 199 |
+
c.x = 3
|
| 200 |
+
assert cfg.x == 3
|
| 201 |
+
assert cfg.x == 1
|
| 202 |
+
|
| 203 |
+
with cfg(x=2) as c:
|
| 204 |
+
assert c == cfg
|
| 205 |
+
assert cfg.x == 2
|
| 206 |
+
with cfg(x=3) as c2:
|
| 207 |
+
assert c2 == cfg
|
| 208 |
+
assert cfg.x == 3
|
| 209 |
+
with pytest.raises(RuntimeError, match="context manager without"):
|
| 210 |
+
with cfg as c3: # Forgot to call `cfg(...)`
|
| 211 |
+
pass
|
| 212 |
+
assert cfg.x == 3
|
| 213 |
+
assert cfg.x == 2
|
| 214 |
+
assert cfg.x == 1
|
| 215 |
+
|
| 216 |
+
c = cfg(x=4) # Not yet as context (not recommended, but possible)
|
| 217 |
+
assert c == cfg
|
| 218 |
+
assert cfg.x == 4
|
| 219 |
+
# Cheat by looking at internal data; context stack should only grow with __enter__
|
| 220 |
+
assert cfg._prev is not None
|
| 221 |
+
assert cfg._context_stack == []
|
| 222 |
+
with c:
|
| 223 |
+
assert c == cfg
|
| 224 |
+
assert cfg.x == 4
|
| 225 |
+
assert cfg.x == 1
|
| 226 |
+
# Cheat again; there was no preceding `cfg(...)` call this time
|
| 227 |
+
assert cfg._prev is None
|
| 228 |
+
with pytest.raises(RuntimeError, match="context manager without"):
|
| 229 |
+
with cfg:
|
| 230 |
+
pass
|
| 231 |
+
assert cfg.x == 1
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/tests/test_unionfind.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def test_unionfind():
|
| 5 |
+
# Fixed by: 2cddd5958689bdecdcd89b91ac9aaf6ce0e4f6b8
|
| 6 |
+
# Previously (in 2.x), the UnionFind class could handle mixed types.
|
| 7 |
+
# But in Python 3.x, this causes a TypeError such as:
|
| 8 |
+
# TypeError: unorderable types: str() > int()
|
| 9 |
+
#
|
| 10 |
+
# Now we just make sure that no exception is raised.
|
| 11 |
+
x = nx.utils.UnionFind()
|
| 12 |
+
x.union(0, "a")
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def test_subtree_union():
|
| 16 |
+
# See https://github.com/networkx/networkx/pull/3224
|
| 17 |
+
# (35db1b551ee65780794a357794f521d8768d5049).
|
| 18 |
+
# Test if subtree unions hare handled correctly by to_sets().
|
| 19 |
+
uf = nx.utils.UnionFind()
|
| 20 |
+
uf.union(1, 2)
|
| 21 |
+
uf.union(3, 4)
|
| 22 |
+
uf.union(4, 5)
|
| 23 |
+
uf.union(1, 5)
|
| 24 |
+
assert list(uf.to_sets()) == [{1, 2, 3, 4, 5}]
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def test_unionfind_weights():
|
| 28 |
+
# Tests if weights are computed correctly with unions of many elements
|
| 29 |
+
uf = nx.utils.UnionFind()
|
| 30 |
+
uf.union(1, 4, 7)
|
| 31 |
+
uf.union(2, 5, 8)
|
| 32 |
+
uf.union(3, 6, 9)
|
| 33 |
+
uf.union(1, 2, 3, 4, 5, 6, 7, 8, 9)
|
| 34 |
+
assert uf.weights[uf[1]] == 9
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def test_unbalanced_merge_weights():
|
| 38 |
+
# Tests if the largest set's root is used as the new root when merging
|
| 39 |
+
uf = nx.utils.UnionFind()
|
| 40 |
+
uf.union(1, 2, 3)
|
| 41 |
+
uf.union(4, 5, 6, 7, 8, 9)
|
| 42 |
+
assert uf.weights[uf[1]] == 3
|
| 43 |
+
assert uf.weights[uf[4]] == 6
|
| 44 |
+
largest_root = uf[4]
|
| 45 |
+
uf.union(1, 4)
|
| 46 |
+
assert uf[1] == largest_root
|
| 47 |
+
assert uf.weights[largest_root] == 9
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def test_empty_union():
|
| 51 |
+
# Tests if a null-union does nothing.
|
| 52 |
+
uf = nx.utils.UnionFind((0, 1))
|
| 53 |
+
uf.union()
|
| 54 |
+
assert uf[0] == 0
|
| 55 |
+
assert uf[1] == 1
|
infer_4_37_2/lib/python3.10/site-packages/networkx/utils/union_find.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Union-find data structure.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from networkx.utils import groups
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class UnionFind:
|
| 9 |
+
"""Union-find data structure.
|
| 10 |
+
|
| 11 |
+
Each unionFind instance X maintains a family of disjoint sets of
|
| 12 |
+
hashable objects, supporting the following two methods:
|
| 13 |
+
|
| 14 |
+
- X[item] returns a name for the set containing the given item.
|
| 15 |
+
Each set is named by an arbitrarily-chosen one of its members; as
|
| 16 |
+
long as the set remains unchanged it will keep the same name. If
|
| 17 |
+
the item is not yet part of a set in X, a new singleton set is
|
| 18 |
+
created for it.
|
| 19 |
+
|
| 20 |
+
- X.union(item1, item2, ...) merges the sets containing each item
|
| 21 |
+
into a single larger set. If any item is not yet part of a set
|
| 22 |
+
in X, it is added to X as one of the members of the merged set.
|
| 23 |
+
|
| 24 |
+
Union-find data structure. Based on Josiah Carlson's code,
|
| 25 |
+
https://code.activestate.com/recipes/215912/
|
| 26 |
+
with significant additional changes by D. Eppstein.
|
| 27 |
+
http://www.ics.uci.edu/~eppstein/PADS/UnionFind.py
|
| 28 |
+
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
def __init__(self, elements=None):
|
| 32 |
+
"""Create a new empty union-find structure.
|
| 33 |
+
|
| 34 |
+
If *elements* is an iterable, this structure will be initialized
|
| 35 |
+
with the discrete partition on the given set of elements.
|
| 36 |
+
|
| 37 |
+
"""
|
| 38 |
+
if elements is None:
|
| 39 |
+
elements = ()
|
| 40 |
+
self.parents = {}
|
| 41 |
+
self.weights = {}
|
| 42 |
+
for x in elements:
|
| 43 |
+
self.weights[x] = 1
|
| 44 |
+
self.parents[x] = x
|
| 45 |
+
|
| 46 |
+
def __getitem__(self, object):
|
| 47 |
+
"""Find and return the name of the set containing the object."""
|
| 48 |
+
|
| 49 |
+
# check for previously unknown object
|
| 50 |
+
if object not in self.parents:
|
| 51 |
+
self.parents[object] = object
|
| 52 |
+
self.weights[object] = 1
|
| 53 |
+
return object
|
| 54 |
+
|
| 55 |
+
# find path of objects leading to the root
|
| 56 |
+
path = []
|
| 57 |
+
root = self.parents[object]
|
| 58 |
+
while root != object:
|
| 59 |
+
path.append(object)
|
| 60 |
+
object = root
|
| 61 |
+
root = self.parents[object]
|
| 62 |
+
|
| 63 |
+
# compress the path and return
|
| 64 |
+
for ancestor in path:
|
| 65 |
+
self.parents[ancestor] = root
|
| 66 |
+
return root
|
| 67 |
+
|
| 68 |
+
def __iter__(self):
|
| 69 |
+
"""Iterate through all items ever found or unioned by this structure."""
|
| 70 |
+
return iter(self.parents)
|
| 71 |
+
|
| 72 |
+
def to_sets(self):
|
| 73 |
+
"""Iterates over the sets stored in this structure.
|
| 74 |
+
|
| 75 |
+
For example::
|
| 76 |
+
|
| 77 |
+
>>> partition = UnionFind("xyz")
|
| 78 |
+
>>> sorted(map(sorted, partition.to_sets()))
|
| 79 |
+
[['x'], ['y'], ['z']]
|
| 80 |
+
>>> partition.union("x", "y")
|
| 81 |
+
>>> sorted(map(sorted, partition.to_sets()))
|
| 82 |
+
[['x', 'y'], ['z']]
|
| 83 |
+
|
| 84 |
+
"""
|
| 85 |
+
# Ensure fully pruned paths
|
| 86 |
+
for x in self.parents:
|
| 87 |
+
_ = self[x] # Evaluated for side-effect only
|
| 88 |
+
|
| 89 |
+
yield from groups(self.parents).values()
|
| 90 |
+
|
| 91 |
+
def union(self, *objects):
|
| 92 |
+
"""Find the sets containing the objects and merge them all."""
|
| 93 |
+
# Find the heaviest root according to its weight.
|
| 94 |
+
roots = iter(
|
| 95 |
+
sorted(
|
| 96 |
+
{self[x] for x in objects}, key=lambda r: self.weights[r], reverse=True
|
| 97 |
+
)
|
| 98 |
+
)
|
| 99 |
+
try:
|
| 100 |
+
root = next(roots)
|
| 101 |
+
except StopIteration:
|
| 102 |
+
return
|
| 103 |
+
|
| 104 |
+
for r in roots:
|
| 105 |
+
self.weights[root] += self.weights[r]
|
| 106 |
+
self.parents[r] = root
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (5.87 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/activations.cpython-310.pyc
ADDED
|
Binary file (7.13 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/attention2d.cpython-310.pyc
ADDED
|
Binary file (9.64 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/attention_pool.cpython-310.pyc
ADDED
|
Binary file (3.12 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/config.cpython-310.pyc
ADDED
|
Binary file (4.36 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/create_norm.cpython-310.pyc
ADDED
|
Binary file (1.68 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/create_norm_act.cpython-310.pyc
ADDED
|
Binary file (2.8 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/drop.cpython-310.pyc
ADDED
|
Binary file (6.08 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/evo_norm.cpython-310.pyc
ADDED
|
Binary file (11.6 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/gather_excite.cpython-310.pyc
ADDED
|
Binary file (3.04 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/global_context.cpython-310.pyc
ADDED
|
Binary file (2.41 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/halo_attn.cpython-310.pyc
ADDED
|
Binary file (7.47 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/helpers.cpython-310.pyc
ADDED
|
Binary file (1.26 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/inplace_abn.cpython-310.pyc
ADDED
|
Binary file (3.14 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/layer_scale.cpython-310.pyc
ADDED
|
Binary file (1.62 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/linear.cpython-310.pyc
ADDED
|
Binary file (1.06 kB). View file
|
|
|
janus/lib/python3.10/site-packages/timm/layers/__pycache__/norm_act.cpython-310.pyc
ADDED
|
Binary file (11.3 kB). View file
|
|
|