ZTWHHH commited on
Commit
c6f1d5b
·
verified ·
1 Parent(s): 440bd13

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. wemm/lib/python3.10/site-packages/botocore/data/proton/2020-07-20/endpoint-rule-set-1.json.gz +3 -0
  2. wemm/lib/python3.10/site-packages/charset_normalizer/__main__.py +6 -0
  3. wemm/lib/python3.10/site-packages/charset_normalizer/__pycache__/__main__.cpython-310.pyc +0 -0
  4. wemm/lib/python3.10/site-packages/charset_normalizer/__pycache__/legacy.cpython-310.pyc +0 -0
  5. wemm/lib/python3.10/site-packages/charset_normalizer/__pycache__/md.cpython-310.pyc +0 -0
  6. wemm/lib/python3.10/site-packages/charset_normalizer/__pycache__/version.cpython-310.pyc +0 -0
  7. wemm/lib/python3.10/site-packages/charset_normalizer/cd.py +395 -0
  8. wemm/lib/python3.10/site-packages/charset_normalizer/cli/__init__.py +8 -0
  9. wemm/lib/python3.10/site-packages/charset_normalizer/cli/__main__.py +321 -0
  10. wemm/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-310.pyc +0 -0
  11. wemm/lib/python3.10/site-packages/charset_normalizer/models.py +360 -0
  12. wemm/lib/python3.10/site-packages/charset_normalizer/version.py +8 -0
  13. wemm/lib/python3.10/site-packages/idna/__init__.py +45 -0
  14. wemm/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc +0 -0
  15. wemm/lib/python3.10/site-packages/idna/__pycache__/intranges.cpython-310.pyc +0 -0
  16. wemm/lib/python3.10/site-packages/idna/codec.py +122 -0
  17. wemm/lib/python3.10/site-packages/idna/core.py +437 -0
  18. wemm/lib/python3.10/site-packages/idna/intranges.py +57 -0
  19. wemm/lib/python3.10/site-packages/lightning_utilities/__init__.py +23 -0
  20. wemm/lib/python3.10/site-packages/lightning_utilities/cli/__main__.py +24 -0
  21. wemm/lib/python3.10/site-packages/lightning_utilities/install/__init__.py +5 -0
  22. wemm/lib/python3.10/site-packages/lightning_utilities/install/__pycache__/requirements.cpython-310.pyc +0 -0
  23. wemm/lib/python3.10/site-packages/networkx/__init__.py +53 -0
  24. wemm/lib/python3.10/site-packages/networkx/algorithms/__init__.py +133 -0
  25. wemm/lib/python3.10/site-packages/networkx/algorithms/chordal.py +443 -0
  26. wemm/lib/python3.10/site-packages/networkx/algorithms/cluster.py +609 -0
  27. wemm/lib/python3.10/site-packages/networkx/algorithms/communicability_alg.py +163 -0
  28. wemm/lib/python3.10/site-packages/networkx/algorithms/core.py +649 -0
  29. wemm/lib/python3.10/site-packages/networkx/algorithms/covering.py +142 -0
  30. wemm/lib/python3.10/site-packages/networkx/algorithms/dag.py +1418 -0
  31. wemm/lib/python3.10/site-packages/networkx/algorithms/distance_measures.py +1022 -0
  32. wemm/lib/python3.10/site-packages/networkx/algorithms/efficiency_measures.py +167 -0
  33. wemm/lib/python3.10/site-packages/networkx/algorithms/graph_hashing.py +328 -0
  34. wemm/lib/python3.10/site-packages/networkx/algorithms/graphical.py +483 -0
  35. wemm/lib/python3.10/site-packages/networkx/algorithms/hierarchy.py +57 -0
  36. wemm/lib/python3.10/site-packages/networkx/algorithms/hybrid.py +196 -0
  37. wemm/lib/python3.10/site-packages/networkx/algorithms/isolate.py +107 -0
  38. wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-310.pyc +0 -0
  39. wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2pp.cpython-310.pyc +0 -0
  40. wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-310.pyc +0 -0
  41. wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py +1238 -0
  42. wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/matchhelpers.py +352 -0
  43. wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py +308 -0
  44. wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__init__.py +0 -0
  45. wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-310.pyc +0 -0
  46. wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-310.pyc +0 -0
  47. wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-310.pyc +0 -0
  48. wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp.cpython-310.pyc +0 -0
  49. wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99 +0 -0
  50. wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py +410 -0
wemm/lib/python3.10/site-packages/botocore/data/proton/2020-07-20/endpoint-rule-set-1.json.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b3ac3f5f2c24aa1b05086605e2084f40b3fff4c78424bb9efcafbde61e1ad52
3
+ size 1288
wemm/lib/python3.10/site-packages/charset_normalizer/__main__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from .cli import cli_detect
4
+
5
+ if __name__ == "__main__":
6
+ cli_detect()
wemm/lib/python3.10/site-packages/charset_normalizer/__pycache__/__main__.cpython-310.pyc ADDED
Binary file (301 Bytes). View file
 
wemm/lib/python3.10/site-packages/charset_normalizer/__pycache__/legacy.cpython-310.pyc ADDED
Binary file (2.15 kB). View file
 
wemm/lib/python3.10/site-packages/charset_normalizer/__pycache__/md.cpython-310.pyc ADDED
Binary file (16.2 kB). View file
 
wemm/lib/python3.10/site-packages/charset_normalizer/__pycache__/version.cpython-310.pyc ADDED
Binary file (303 Bytes). View file
 
wemm/lib/python3.10/site-packages/charset_normalizer/cd.py ADDED
@@ -0,0 +1,395 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import importlib
4
+ from codecs import IncrementalDecoder
5
+ from collections import Counter
6
+ from functools import lru_cache
7
+ from typing import Counter as TypeCounter
8
+
9
+ from .constant import (
10
+ FREQUENCIES,
11
+ KO_NAMES,
12
+ LANGUAGE_SUPPORTED_COUNT,
13
+ TOO_SMALL_SEQUENCE,
14
+ ZH_NAMES,
15
+ )
16
+ from .md import is_suspiciously_successive_range
17
+ from .models import CoherenceMatches
18
+ from .utils import (
19
+ is_accentuated,
20
+ is_latin,
21
+ is_multi_byte_encoding,
22
+ is_unicode_range_secondary,
23
+ unicode_range,
24
+ )
25
+
26
+
27
+ def encoding_unicode_range(iana_name: str) -> list[str]:
28
+ """
29
+ Return associated unicode ranges in a single byte code page.
30
+ """
31
+ if is_multi_byte_encoding(iana_name):
32
+ raise OSError("Function not supported on multi-byte code page")
33
+
34
+ decoder = importlib.import_module(f"encodings.{iana_name}").IncrementalDecoder
35
+
36
+ p: IncrementalDecoder = decoder(errors="ignore")
37
+ seen_ranges: dict[str, int] = {}
38
+ character_count: int = 0
39
+
40
+ for i in range(0x40, 0xFF):
41
+ chunk: str = p.decode(bytes([i]))
42
+
43
+ if chunk:
44
+ character_range: str | None = unicode_range(chunk)
45
+
46
+ if character_range is None:
47
+ continue
48
+
49
+ if is_unicode_range_secondary(character_range) is False:
50
+ if character_range not in seen_ranges:
51
+ seen_ranges[character_range] = 0
52
+ seen_ranges[character_range] += 1
53
+ character_count += 1
54
+
55
+ return sorted(
56
+ [
57
+ character_range
58
+ for character_range in seen_ranges
59
+ if seen_ranges[character_range] / character_count >= 0.15
60
+ ]
61
+ )
62
+
63
+
64
+ def unicode_range_languages(primary_range: str) -> list[str]:
65
+ """
66
+ Return inferred languages used with a unicode range.
67
+ """
68
+ languages: list[str] = []
69
+
70
+ for language, characters in FREQUENCIES.items():
71
+ for character in characters:
72
+ if unicode_range(character) == primary_range:
73
+ languages.append(language)
74
+ break
75
+
76
+ return languages
77
+
78
+
79
+ @lru_cache()
80
+ def encoding_languages(iana_name: str) -> list[str]:
81
+ """
82
+ Single-byte encoding language association. Some code page are heavily linked to particular language(s).
83
+ This function does the correspondence.
84
+ """
85
+ unicode_ranges: list[str] = encoding_unicode_range(iana_name)
86
+ primary_range: str | None = None
87
+
88
+ for specified_range in unicode_ranges:
89
+ if "Latin" not in specified_range:
90
+ primary_range = specified_range
91
+ break
92
+
93
+ if primary_range is None:
94
+ return ["Latin Based"]
95
+
96
+ return unicode_range_languages(primary_range)
97
+
98
+
99
+ @lru_cache()
100
+ def mb_encoding_languages(iana_name: str) -> list[str]:
101
+ """
102
+ Multi-byte encoding language association. Some code page are heavily linked to particular language(s).
103
+ This function does the correspondence.
104
+ """
105
+ if (
106
+ iana_name.startswith("shift_")
107
+ or iana_name.startswith("iso2022_jp")
108
+ or iana_name.startswith("euc_j")
109
+ or iana_name == "cp932"
110
+ ):
111
+ return ["Japanese"]
112
+ if iana_name.startswith("gb") or iana_name in ZH_NAMES:
113
+ return ["Chinese"]
114
+ if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES:
115
+ return ["Korean"]
116
+
117
+ return []
118
+
119
+
120
+ @lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT)
121
+ def get_target_features(language: str) -> tuple[bool, bool]:
122
+ """
123
+ Determine main aspects from a supported language if it contains accents and if is pure Latin.
124
+ """
125
+ target_have_accents: bool = False
126
+ target_pure_latin: bool = True
127
+
128
+ for character in FREQUENCIES[language]:
129
+ if not target_have_accents and is_accentuated(character):
130
+ target_have_accents = True
131
+ if target_pure_latin and is_latin(character) is False:
132
+ target_pure_latin = False
133
+
134
+ return target_have_accents, target_pure_latin
135
+
136
+
137
+ def alphabet_languages(
138
+ characters: list[str], ignore_non_latin: bool = False
139
+ ) -> list[str]:
140
+ """
141
+ Return associated languages associated to given characters.
142
+ """
143
+ languages: list[tuple[str, float]] = []
144
+
145
+ source_have_accents = any(is_accentuated(character) for character in characters)
146
+
147
+ for language, language_characters in FREQUENCIES.items():
148
+ target_have_accents, target_pure_latin = get_target_features(language)
149
+
150
+ if ignore_non_latin and target_pure_latin is False:
151
+ continue
152
+
153
+ if target_have_accents is False and source_have_accents:
154
+ continue
155
+
156
+ character_count: int = len(language_characters)
157
+
158
+ character_match_count: int = len(
159
+ [c for c in language_characters if c in characters]
160
+ )
161
+
162
+ ratio: float = character_match_count / character_count
163
+
164
+ if ratio >= 0.2:
165
+ languages.append((language, ratio))
166
+
167
+ languages = sorted(languages, key=lambda x: x[1], reverse=True)
168
+
169
+ return [compatible_language[0] for compatible_language in languages]
170
+
171
+
172
+ def characters_popularity_compare(
173
+ language: str, ordered_characters: list[str]
174
+ ) -> float:
175
+ """
176
+ Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language.
177
+ The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit).
178
+ Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.)
179
+ """
180
+ if language not in FREQUENCIES:
181
+ raise ValueError(f"{language} not available")
182
+
183
+ character_approved_count: int = 0
184
+ FREQUENCIES_language_set = set(FREQUENCIES[language])
185
+
186
+ ordered_characters_count: int = len(ordered_characters)
187
+ target_language_characters_count: int = len(FREQUENCIES[language])
188
+
189
+ large_alphabet: bool = target_language_characters_count > 26
190
+
191
+ for character, character_rank in zip(
192
+ ordered_characters, range(0, ordered_characters_count)
193
+ ):
194
+ if character not in FREQUENCIES_language_set:
195
+ continue
196
+
197
+ character_rank_in_language: int = FREQUENCIES[language].index(character)
198
+ expected_projection_ratio: float = (
199
+ target_language_characters_count / ordered_characters_count
200
+ )
201
+ character_rank_projection: int = int(character_rank * expected_projection_ratio)
202
+
203
+ if (
204
+ large_alphabet is False
205
+ and abs(character_rank_projection - character_rank_in_language) > 4
206
+ ):
207
+ continue
208
+
209
+ if (
210
+ large_alphabet is True
211
+ and abs(character_rank_projection - character_rank_in_language)
212
+ < target_language_characters_count / 3
213
+ ):
214
+ character_approved_count += 1
215
+ continue
216
+
217
+ characters_before_source: list[str] = FREQUENCIES[language][
218
+ 0:character_rank_in_language
219
+ ]
220
+ characters_after_source: list[str] = FREQUENCIES[language][
221
+ character_rank_in_language:
222
+ ]
223
+ characters_before: list[str] = ordered_characters[0:character_rank]
224
+ characters_after: list[str] = ordered_characters[character_rank:]
225
+
226
+ before_match_count: int = len(
227
+ set(characters_before) & set(characters_before_source)
228
+ )
229
+
230
+ after_match_count: int = len(
231
+ set(characters_after) & set(characters_after_source)
232
+ )
233
+
234
+ if len(characters_before_source) == 0 and before_match_count <= 4:
235
+ character_approved_count += 1
236
+ continue
237
+
238
+ if len(characters_after_source) == 0 and after_match_count <= 4:
239
+ character_approved_count += 1
240
+ continue
241
+
242
+ if (
243
+ before_match_count / len(characters_before_source) >= 0.4
244
+ or after_match_count / len(characters_after_source) >= 0.4
245
+ ):
246
+ character_approved_count += 1
247
+ continue
248
+
249
+ return character_approved_count / len(ordered_characters)
250
+
251
+
252
+ def alpha_unicode_split(decoded_sequence: str) -> list[str]:
253
+ """
254
+ Given a decoded text sequence, return a list of str. Unicode range / alphabet separation.
255
+ Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list;
256
+ One containing the latin letters and the other hebrew.
257
+ """
258
+ layers: dict[str, str] = {}
259
+
260
+ for character in decoded_sequence:
261
+ if character.isalpha() is False:
262
+ continue
263
+
264
+ character_range: str | None = unicode_range(character)
265
+
266
+ if character_range is None:
267
+ continue
268
+
269
+ layer_target_range: str | None = None
270
+
271
+ for discovered_range in layers:
272
+ if (
273
+ is_suspiciously_successive_range(discovered_range, character_range)
274
+ is False
275
+ ):
276
+ layer_target_range = discovered_range
277
+ break
278
+
279
+ if layer_target_range is None:
280
+ layer_target_range = character_range
281
+
282
+ if layer_target_range not in layers:
283
+ layers[layer_target_range] = character.lower()
284
+ continue
285
+
286
+ layers[layer_target_range] += character.lower()
287
+
288
+ return list(layers.values())
289
+
290
+
291
+ def merge_coherence_ratios(results: list[CoherenceMatches]) -> CoherenceMatches:
292
+ """
293
+ This function merge results previously given by the function coherence_ratio.
294
+ The return type is the same as coherence_ratio.
295
+ """
296
+ per_language_ratios: dict[str, list[float]] = {}
297
+ for result in results:
298
+ for sub_result in result:
299
+ language, ratio = sub_result
300
+ if language not in per_language_ratios:
301
+ per_language_ratios[language] = [ratio]
302
+ continue
303
+ per_language_ratios[language].append(ratio)
304
+
305
+ merge = [
306
+ (
307
+ language,
308
+ round(
309
+ sum(per_language_ratios[language]) / len(per_language_ratios[language]),
310
+ 4,
311
+ ),
312
+ )
313
+ for language in per_language_ratios
314
+ ]
315
+
316
+ return sorted(merge, key=lambda x: x[1], reverse=True)
317
+
318
+
319
+ def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches:
320
+ """
321
+ We shall NOT return "English—" in CoherenceMatches because it is an alternative
322
+ of "English". This function only keeps the best match and remove the em-dash in it.
323
+ """
324
+ index_results: dict[str, list[float]] = dict()
325
+
326
+ for result in results:
327
+ language, ratio = result
328
+ no_em_name: str = language.replace("—", "")
329
+
330
+ if no_em_name not in index_results:
331
+ index_results[no_em_name] = []
332
+
333
+ index_results[no_em_name].append(ratio)
334
+
335
+ if any(len(index_results[e]) > 1 for e in index_results):
336
+ filtered_results: CoherenceMatches = []
337
+
338
+ for language in index_results:
339
+ filtered_results.append((language, max(index_results[language])))
340
+
341
+ return filtered_results
342
+
343
+ return results
344
+
345
+
346
+ @lru_cache(maxsize=2048)
347
+ def coherence_ratio(
348
+ decoded_sequence: str, threshold: float = 0.1, lg_inclusion: str | None = None
349
+ ) -> CoherenceMatches:
350
+ """
351
+ Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers.
352
+ A layer = Character extraction by alphabets/ranges.
353
+ """
354
+
355
+ results: list[tuple[str, float]] = []
356
+ ignore_non_latin: bool = False
357
+
358
+ sufficient_match_count: int = 0
359
+
360
+ lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else []
361
+ if "Latin Based" in lg_inclusion_list:
362
+ ignore_non_latin = True
363
+ lg_inclusion_list.remove("Latin Based")
364
+
365
+ for layer in alpha_unicode_split(decoded_sequence):
366
+ sequence_frequencies: TypeCounter[str] = Counter(layer)
367
+ most_common = sequence_frequencies.most_common()
368
+
369
+ character_count: int = sum(o for c, o in most_common)
370
+
371
+ if character_count <= TOO_SMALL_SEQUENCE:
372
+ continue
373
+
374
+ popular_character_ordered: list[str] = [c for c, o in most_common]
375
+
376
+ for language in lg_inclusion_list or alphabet_languages(
377
+ popular_character_ordered, ignore_non_latin
378
+ ):
379
+ ratio: float = characters_popularity_compare(
380
+ language, popular_character_ordered
381
+ )
382
+
383
+ if ratio < threshold:
384
+ continue
385
+ elif ratio >= 0.8:
386
+ sufficient_match_count += 1
387
+
388
+ results.append((language, round(ratio, 4)))
389
+
390
+ if sufficient_match_count >= 3:
391
+ break
392
+
393
+ return sorted(
394
+ filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True
395
+ )
wemm/lib/python3.10/site-packages/charset_normalizer/cli/__init__.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from .__main__ import cli_detect, query_yes_no
4
+
5
+ __all__ = (
6
+ "cli_detect",
7
+ "query_yes_no",
8
+ )
wemm/lib/python3.10/site-packages/charset_normalizer/cli/__main__.py ADDED
@@ -0,0 +1,321 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import argparse
4
+ import sys
5
+ from json import dumps
6
+ from os.path import abspath, basename, dirname, join, realpath
7
+ from platform import python_version
8
+ from unicodedata import unidata_version
9
+
10
+ import charset_normalizer.md as md_module
11
+ from charset_normalizer import from_fp
12
+ from charset_normalizer.models import CliDetectionResult
13
+ from charset_normalizer.version import __version__
14
+
15
+
16
+ def query_yes_no(question: str, default: str = "yes") -> bool:
17
+ """Ask a yes/no question via input() and return their answer.
18
+
19
+ "question" is a string that is presented to the user.
20
+ "default" is the presumed answer if the user just hits <Enter>.
21
+ It must be "yes" (the default), "no" or None (meaning
22
+ an answer is required of the user).
23
+
24
+ The "answer" return value is True for "yes" or False for "no".
25
+
26
+ Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input
27
+ """
28
+ valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
29
+ if default is None:
30
+ prompt = " [y/n] "
31
+ elif default == "yes":
32
+ prompt = " [Y/n] "
33
+ elif default == "no":
34
+ prompt = " [y/N] "
35
+ else:
36
+ raise ValueError("invalid default answer: '%s'" % default)
37
+
38
+ while True:
39
+ sys.stdout.write(question + prompt)
40
+ choice = input().lower()
41
+ if default is not None and choice == "":
42
+ return valid[default]
43
+ elif choice in valid:
44
+ return valid[choice]
45
+ else:
46
+ sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n")
47
+
48
+
49
+ def cli_detect(argv: list[str] | None = None) -> int:
50
+ """
51
+ CLI assistant using ARGV and ArgumentParser
52
+ :param argv:
53
+ :return: 0 if everything is fine, anything else equal trouble
54
+ """
55
+ parser = argparse.ArgumentParser(
56
+ description="The Real First Universal Charset Detector. "
57
+ "Discover originating encoding used on text file. "
58
+ "Normalize text to unicode."
59
+ )
60
+
61
+ parser.add_argument(
62
+ "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed"
63
+ )
64
+ parser.add_argument(
65
+ "-v",
66
+ "--verbose",
67
+ action="store_true",
68
+ default=False,
69
+ dest="verbose",
70
+ help="Display complementary information about file if any. "
71
+ "Stdout will contain logs about the detection process.",
72
+ )
73
+ parser.add_argument(
74
+ "-a",
75
+ "--with-alternative",
76
+ action="store_true",
77
+ default=False,
78
+ dest="alternatives",
79
+ help="Output complementary possibilities if any. Top-level JSON WILL be a list.",
80
+ )
81
+ parser.add_argument(
82
+ "-n",
83
+ "--normalize",
84
+ action="store_true",
85
+ default=False,
86
+ dest="normalize",
87
+ help="Permit to normalize input file. If not set, program does not write anything.",
88
+ )
89
+ parser.add_argument(
90
+ "-m",
91
+ "--minimal",
92
+ action="store_true",
93
+ default=False,
94
+ dest="minimal",
95
+ help="Only output the charset detected to STDOUT. Disabling JSON output.",
96
+ )
97
+ parser.add_argument(
98
+ "-r",
99
+ "--replace",
100
+ action="store_true",
101
+ default=False,
102
+ dest="replace",
103
+ help="Replace file when trying to normalize it instead of creating a new one.",
104
+ )
105
+ parser.add_argument(
106
+ "-f",
107
+ "--force",
108
+ action="store_true",
109
+ default=False,
110
+ dest="force",
111
+ help="Replace file without asking if you are sure, use this flag with caution.",
112
+ )
113
+ parser.add_argument(
114
+ "-i",
115
+ "--no-preemptive",
116
+ action="store_true",
117
+ default=False,
118
+ dest="no_preemptive",
119
+ help="Disable looking at a charset declaration to hint the detector.",
120
+ )
121
+ parser.add_argument(
122
+ "-t",
123
+ "--threshold",
124
+ action="store",
125
+ default=0.2,
126
+ type=float,
127
+ dest="threshold",
128
+ help="Define a custom maximum amount of noise allowed in decoded content. 0. <= noise <= 1.",
129
+ )
130
+ parser.add_argument(
131
+ "--version",
132
+ action="version",
133
+ version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format(
134
+ __version__,
135
+ python_version(),
136
+ unidata_version,
137
+ "OFF" if md_module.__file__.lower().endswith(".py") else "ON",
138
+ ),
139
+ help="Show version information and exit.",
140
+ )
141
+
142
+ args = parser.parse_args(argv)
143
+
144
+ if args.replace is True and args.normalize is False:
145
+ if args.files:
146
+ for my_file in args.files:
147
+ my_file.close()
148
+ print("Use --replace in addition of --normalize only.", file=sys.stderr)
149
+ return 1
150
+
151
+ if args.force is True and args.replace is False:
152
+ if args.files:
153
+ for my_file in args.files:
154
+ my_file.close()
155
+ print("Use --force in addition of --replace only.", file=sys.stderr)
156
+ return 1
157
+
158
+ if args.threshold < 0.0 or args.threshold > 1.0:
159
+ if args.files:
160
+ for my_file in args.files:
161
+ my_file.close()
162
+ print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
163
+ return 1
164
+
165
+ x_ = []
166
+
167
+ for my_file in args.files:
168
+ matches = from_fp(
169
+ my_file,
170
+ threshold=args.threshold,
171
+ explain=args.verbose,
172
+ preemptive_behaviour=args.no_preemptive is False,
173
+ )
174
+
175
+ best_guess = matches.best()
176
+
177
+ if best_guess is None:
178
+ print(
179
+ 'Unable to identify originating encoding for "{}". {}'.format(
180
+ my_file.name,
181
+ (
182
+ "Maybe try increasing maximum amount of chaos."
183
+ if args.threshold < 1.0
184
+ else ""
185
+ ),
186
+ ),
187
+ file=sys.stderr,
188
+ )
189
+ x_.append(
190
+ CliDetectionResult(
191
+ abspath(my_file.name),
192
+ None,
193
+ [],
194
+ [],
195
+ "Unknown",
196
+ [],
197
+ False,
198
+ 1.0,
199
+ 0.0,
200
+ None,
201
+ True,
202
+ )
203
+ )
204
+ else:
205
+ x_.append(
206
+ CliDetectionResult(
207
+ abspath(my_file.name),
208
+ best_guess.encoding,
209
+ best_guess.encoding_aliases,
210
+ [
211
+ cp
212
+ for cp in best_guess.could_be_from_charset
213
+ if cp != best_guess.encoding
214
+ ],
215
+ best_guess.language,
216
+ best_guess.alphabets,
217
+ best_guess.bom,
218
+ best_guess.percent_chaos,
219
+ best_guess.percent_coherence,
220
+ None,
221
+ True,
222
+ )
223
+ )
224
+
225
+ if len(matches) > 1 and args.alternatives:
226
+ for el in matches:
227
+ if el != best_guess:
228
+ x_.append(
229
+ CliDetectionResult(
230
+ abspath(my_file.name),
231
+ el.encoding,
232
+ el.encoding_aliases,
233
+ [
234
+ cp
235
+ for cp in el.could_be_from_charset
236
+ if cp != el.encoding
237
+ ],
238
+ el.language,
239
+ el.alphabets,
240
+ el.bom,
241
+ el.percent_chaos,
242
+ el.percent_coherence,
243
+ None,
244
+ False,
245
+ )
246
+ )
247
+
248
+ if args.normalize is True:
249
+ if best_guess.encoding.startswith("utf") is True:
250
+ print(
251
+ '"{}" file does not need to be normalized, as it already came from unicode.'.format(
252
+ my_file.name
253
+ ),
254
+ file=sys.stderr,
255
+ )
256
+ if my_file.closed is False:
257
+ my_file.close()
258
+ continue
259
+
260
+ dir_path = dirname(realpath(my_file.name))
261
+ file_name = basename(realpath(my_file.name))
262
+
263
+ o_: list[str] = file_name.split(".")
264
+
265
+ if args.replace is False:
266
+ o_.insert(-1, best_guess.encoding)
267
+ if my_file.closed is False:
268
+ my_file.close()
269
+ elif (
270
+ args.force is False
271
+ and query_yes_no(
272
+ 'Are you sure to normalize "{}" by replacing it ?'.format(
273
+ my_file.name
274
+ ),
275
+ "no",
276
+ )
277
+ is False
278
+ ):
279
+ if my_file.closed is False:
280
+ my_file.close()
281
+ continue
282
+
283
+ try:
284
+ x_[0].unicode_path = join(dir_path, ".".join(o_))
285
+
286
+ with open(x_[0].unicode_path, "wb") as fp:
287
+ fp.write(best_guess.output())
288
+ except OSError as e:
289
+ print(str(e), file=sys.stderr)
290
+ if my_file.closed is False:
291
+ my_file.close()
292
+ return 2
293
+
294
+ if my_file.closed is False:
295
+ my_file.close()
296
+
297
+ if args.minimal is False:
298
+ print(
299
+ dumps(
300
+ [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__,
301
+ ensure_ascii=True,
302
+ indent=4,
303
+ )
304
+ )
305
+ else:
306
+ for my_file in args.files:
307
+ print(
308
+ ", ".join(
309
+ [
310
+ el.encoding or "undefined"
311
+ for el in x_
312
+ if el.path == abspath(my_file.name)
313
+ ]
314
+ )
315
+ )
316
+
317
+ return 0
318
+
319
+
320
+ if __name__ == "__main__":
321
+ cli_detect()
wemm/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-310.pyc ADDED
Binary file (6.76 kB). View file
 
wemm/lib/python3.10/site-packages/charset_normalizer/models.py ADDED
@@ -0,0 +1,360 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from encodings.aliases import aliases
4
+ from hashlib import sha256
5
+ from json import dumps
6
+ from re import sub
7
+ from typing import Any, Iterator, List, Tuple
8
+
9
+ from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE
10
+ from .utils import iana_name, is_multi_byte_encoding, unicode_range
11
+
12
+
13
+ class CharsetMatch:
14
+ def __init__(
15
+ self,
16
+ payload: bytes,
17
+ guessed_encoding: str,
18
+ mean_mess_ratio: float,
19
+ has_sig_or_bom: bool,
20
+ languages: CoherenceMatches,
21
+ decoded_payload: str | None = None,
22
+ preemptive_declaration: str | None = None,
23
+ ):
24
+ self._payload: bytes = payload
25
+
26
+ self._encoding: str = guessed_encoding
27
+ self._mean_mess_ratio: float = mean_mess_ratio
28
+ self._languages: CoherenceMatches = languages
29
+ self._has_sig_or_bom: bool = has_sig_or_bom
30
+ self._unicode_ranges: list[str] | None = None
31
+
32
+ self._leaves: list[CharsetMatch] = []
33
+ self._mean_coherence_ratio: float = 0.0
34
+
35
+ self._output_payload: bytes | None = None
36
+ self._output_encoding: str | None = None
37
+
38
+ self._string: str | None = decoded_payload
39
+
40
+ self._preemptive_declaration: str | None = preemptive_declaration
41
+
42
+ def __eq__(self, other: object) -> bool:
43
+ if not isinstance(other, CharsetMatch):
44
+ if isinstance(other, str):
45
+ return iana_name(other) == self.encoding
46
+ return False
47
+ return self.encoding == other.encoding and self.fingerprint == other.fingerprint
48
+
49
+ def __lt__(self, other: object) -> bool:
50
+ """
51
+ Implemented to make sorted available upon CharsetMatches items.
52
+ """
53
+ if not isinstance(other, CharsetMatch):
54
+ raise ValueError
55
+
56
+ chaos_difference: float = abs(self.chaos - other.chaos)
57
+ coherence_difference: float = abs(self.coherence - other.coherence)
58
+
59
+ # Below 1% difference --> Use Coherence
60
+ if chaos_difference < 0.01 and coherence_difference > 0.02:
61
+ return self.coherence > other.coherence
62
+ elif chaos_difference < 0.01 and coherence_difference <= 0.02:
63
+ # When having a difficult decision, use the result that decoded as many multi-byte as possible.
64
+ # preserve RAM usage!
65
+ if len(self._payload) >= TOO_BIG_SEQUENCE:
66
+ return self.chaos < other.chaos
67
+ return self.multi_byte_usage > other.multi_byte_usage
68
+
69
+ return self.chaos < other.chaos
70
+
71
+ @property
72
+ def multi_byte_usage(self) -> float:
73
+ return 1.0 - (len(str(self)) / len(self.raw))
74
+
75
+ def __str__(self) -> str:
76
+ # Lazy Str Loading
77
+ if self._string is None:
78
+ self._string = str(self._payload, self._encoding, "strict")
79
+ return self._string
80
+
81
+ def __repr__(self) -> str:
82
+ return f"<CharsetMatch '{self.encoding}' bytes({self.fingerprint})>"
83
+
84
+ def add_submatch(self, other: CharsetMatch) -> None:
85
+ if not isinstance(other, CharsetMatch) or other == self:
86
+ raise ValueError(
87
+ "Unable to add instance <{}> as a submatch of a CharsetMatch".format(
88
+ other.__class__
89
+ )
90
+ )
91
+
92
+ other._string = None # Unload RAM usage; dirty trick.
93
+ self._leaves.append(other)
94
+
95
+ @property
96
+ def encoding(self) -> str:
97
+ return self._encoding
98
+
99
+ @property
100
+ def encoding_aliases(self) -> list[str]:
101
+ """
102
+ Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855.
103
+ """
104
+ also_known_as: list[str] = []
105
+ for u, p in aliases.items():
106
+ if self.encoding == u:
107
+ also_known_as.append(p)
108
+ elif self.encoding == p:
109
+ also_known_as.append(u)
110
+ return also_known_as
111
+
112
+ @property
113
+ def bom(self) -> bool:
114
+ return self._has_sig_or_bom
115
+
116
+ @property
117
+ def byte_order_mark(self) -> bool:
118
+ return self._has_sig_or_bom
119
+
120
+ @property
121
+ def languages(self) -> list[str]:
122
+ """
123
+ Return the complete list of possible languages found in decoded sequence.
124
+ Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'.
125
+ """
126
+ return [e[0] for e in self._languages]
127
+
128
+ @property
129
+ def language(self) -> str:
130
+ """
131
+ Most probable language found in decoded sequence. If none were detected or inferred, the property will return
132
+ "Unknown".
133
+ """
134
+ if not self._languages:
135
+ # Trying to infer the language based on the given encoding
136
+ # Its either English or we should not pronounce ourselves in certain cases.
137
+ if "ascii" in self.could_be_from_charset:
138
+ return "English"
139
+
140
+ # doing it there to avoid circular import
141
+ from charset_normalizer.cd import encoding_languages, mb_encoding_languages
142
+
143
+ languages = (
144
+ mb_encoding_languages(self.encoding)
145
+ if is_multi_byte_encoding(self.encoding)
146
+ else encoding_languages(self.encoding)
147
+ )
148
+
149
+ if len(languages) == 0 or "Latin Based" in languages:
150
+ return "Unknown"
151
+
152
+ return languages[0]
153
+
154
+ return self._languages[0][0]
155
+
156
+ @property
157
+ def chaos(self) -> float:
158
+ return self._mean_mess_ratio
159
+
160
+ @property
161
+ def coherence(self) -> float:
162
+ if not self._languages:
163
+ return 0.0
164
+ return self._languages[0][1]
165
+
166
+ @property
167
+ def percent_chaos(self) -> float:
168
+ return round(self.chaos * 100, ndigits=3)
169
+
170
+ @property
171
+ def percent_coherence(self) -> float:
172
+ return round(self.coherence * 100, ndigits=3)
173
+
174
+ @property
175
+ def raw(self) -> bytes:
176
+ """
177
+ Original untouched bytes.
178
+ """
179
+ return self._payload
180
+
181
+ @property
182
+ def submatch(self) -> list[CharsetMatch]:
183
+ return self._leaves
184
+
185
+ @property
186
+ def has_submatch(self) -> bool:
187
+ return len(self._leaves) > 0
188
+
189
+ @property
190
+ def alphabets(self) -> list[str]:
191
+ if self._unicode_ranges is not None:
192
+ return self._unicode_ranges
193
+ # list detected ranges
194
+ detected_ranges: list[str | None] = [unicode_range(char) for char in str(self)]
195
+ # filter and sort
196
+ self._unicode_ranges = sorted(list({r for r in detected_ranges if r}))
197
+ return self._unicode_ranges
198
+
199
+ @property
200
+ def could_be_from_charset(self) -> list[str]:
201
+ """
202
+ The complete list of encoding that output the exact SAME str result and therefore could be the originating
203
+ encoding.
204
+ This list does include the encoding available in property 'encoding'.
205
+ """
206
+ return [self._encoding] + [m.encoding for m in self._leaves]
207
+
208
+ def output(self, encoding: str = "utf_8") -> bytes:
209
+ """
210
+ Method to get re-encoded bytes payload using given target encoding. Default to UTF-8.
211
+ Any errors will be simply ignored by the encoder NOT replaced.
212
+ """
213
+ if self._output_encoding is None or self._output_encoding != encoding:
214
+ self._output_encoding = encoding
215
+ decoded_string = str(self)
216
+ if (
217
+ self._preemptive_declaration is not None
218
+ and self._preemptive_declaration.lower()
219
+ not in ["utf-8", "utf8", "utf_8"]
220
+ ):
221
+ patched_header = sub(
222
+ RE_POSSIBLE_ENCODING_INDICATION,
223
+ lambda m: m.string[m.span()[0] : m.span()[1]].replace(
224
+ m.groups()[0],
225
+ iana_name(self._output_encoding).replace("_", "-"), # type: ignore[arg-type]
226
+ ),
227
+ decoded_string[:8192],
228
+ count=1,
229
+ )
230
+
231
+ decoded_string = patched_header + decoded_string[8192:]
232
+
233
+ self._output_payload = decoded_string.encode(encoding, "replace")
234
+
235
+ return self._output_payload # type: ignore
236
+
237
+ @property
238
+ def fingerprint(self) -> str:
239
+ """
240
+ Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one.
241
+ """
242
+ return sha256(self.output()).hexdigest()
243
+
244
+
245
+ class CharsetMatches:
246
+ """
247
+ Container with every CharsetMatch items ordered by default from most probable to the less one.
248
+ Act like a list(iterable) but does not implements all related methods.
249
+ """
250
+
251
+ def __init__(self, results: list[CharsetMatch] | None = None):
252
+ self._results: list[CharsetMatch] = sorted(results) if results else []
253
+
254
+ def __iter__(self) -> Iterator[CharsetMatch]:
255
+ yield from self._results
256
+
257
+ def __getitem__(self, item: int | str) -> CharsetMatch:
258
+ """
259
+ Retrieve a single item either by its position or encoding name (alias may be used here).
260
+ Raise KeyError upon invalid index or encoding not present in results.
261
+ """
262
+ if isinstance(item, int):
263
+ return self._results[item]
264
+ if isinstance(item, str):
265
+ item = iana_name(item, False)
266
+ for result in self._results:
267
+ if item in result.could_be_from_charset:
268
+ return result
269
+ raise KeyError
270
+
271
+ def __len__(self) -> int:
272
+ return len(self._results)
273
+
274
+ def __bool__(self) -> bool:
275
+ return len(self._results) > 0
276
+
277
+ def append(self, item: CharsetMatch) -> None:
278
+ """
279
+ Insert a single match. Will be inserted accordingly to preserve sort.
280
+ Can be inserted as a submatch.
281
+ """
282
+ if not isinstance(item, CharsetMatch):
283
+ raise ValueError(
284
+ "Cannot append instance '{}' to CharsetMatches".format(
285
+ str(item.__class__)
286
+ )
287
+ )
288
+ # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage)
289
+ if len(item.raw) < TOO_BIG_SEQUENCE:
290
+ for match in self._results:
291
+ if match.fingerprint == item.fingerprint and match.chaos == item.chaos:
292
+ match.add_submatch(item)
293
+ return
294
+ self._results.append(item)
295
+ self._results = sorted(self._results)
296
+
297
+ def best(self) -> CharsetMatch | None:
298
+ """
299
+ Simply return the first match. Strict equivalent to matches[0].
300
+ """
301
+ if not self._results:
302
+ return None
303
+ return self._results[0]
304
+
305
+ def first(self) -> CharsetMatch | None:
306
+ """
307
+ Redundant method, call the method best(). Kept for BC reasons.
308
+ """
309
+ return self.best()
310
+
311
+
312
+ CoherenceMatch = Tuple[str, float]
313
+ CoherenceMatches = List[CoherenceMatch]
314
+
315
+
316
+ class CliDetectionResult:
317
+ def __init__(
318
+ self,
319
+ path: str,
320
+ encoding: str | None,
321
+ encoding_aliases: list[str],
322
+ alternative_encodings: list[str],
323
+ language: str,
324
+ alphabets: list[str],
325
+ has_sig_or_bom: bool,
326
+ chaos: float,
327
+ coherence: float,
328
+ unicode_path: str | None,
329
+ is_preferred: bool,
330
+ ):
331
+ self.path: str = path
332
+ self.unicode_path: str | None = unicode_path
333
+ self.encoding: str | None = encoding
334
+ self.encoding_aliases: list[str] = encoding_aliases
335
+ self.alternative_encodings: list[str] = alternative_encodings
336
+ self.language: str = language
337
+ self.alphabets: list[str] = alphabets
338
+ self.has_sig_or_bom: bool = has_sig_or_bom
339
+ self.chaos: float = chaos
340
+ self.coherence: float = coherence
341
+ self.is_preferred: bool = is_preferred
342
+
343
+ @property
344
+ def __dict__(self) -> dict[str, Any]: # type: ignore
345
+ return {
346
+ "path": self.path,
347
+ "encoding": self.encoding,
348
+ "encoding_aliases": self.encoding_aliases,
349
+ "alternative_encodings": self.alternative_encodings,
350
+ "language": self.language,
351
+ "alphabets": self.alphabets,
352
+ "has_sig_or_bom": self.has_sig_or_bom,
353
+ "chaos": self.chaos,
354
+ "coherence": self.coherence,
355
+ "unicode_path": self.unicode_path,
356
+ "is_preferred": self.is_preferred,
357
+ }
358
+
359
+ def to_json(self) -> str:
360
+ return dumps(self.__dict__, ensure_ascii=True, indent=4)
wemm/lib/python3.10/site-packages/charset_normalizer/version.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Expose version
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ __version__ = "3.4.1"
8
+ VERSION = __version__.split(".")
wemm/lib/python3.10/site-packages/idna/__init__.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .core import (
2
+ IDNABidiError,
3
+ IDNAError,
4
+ InvalidCodepoint,
5
+ InvalidCodepointContext,
6
+ alabel,
7
+ check_bidi,
8
+ check_hyphen_ok,
9
+ check_initial_combiner,
10
+ check_label,
11
+ check_nfc,
12
+ decode,
13
+ encode,
14
+ ulabel,
15
+ uts46_remap,
16
+ valid_contextj,
17
+ valid_contexto,
18
+ valid_label_length,
19
+ valid_string_length,
20
+ )
21
+ from .intranges import intranges_contain
22
+ from .package_data import __version__
23
+
24
+ __all__ = [
25
+ "__version__",
26
+ "IDNABidiError",
27
+ "IDNAError",
28
+ "InvalidCodepoint",
29
+ "InvalidCodepointContext",
30
+ "alabel",
31
+ "check_bidi",
32
+ "check_hyphen_ok",
33
+ "check_initial_combiner",
34
+ "check_label",
35
+ "check_nfc",
36
+ "decode",
37
+ "encode",
38
+ "intranges_contain",
39
+ "ulabel",
40
+ "uts46_remap",
41
+ "valid_contextj",
42
+ "valid_contexto",
43
+ "valid_label_length",
44
+ "valid_string_length",
45
+ ]
wemm/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc ADDED
Binary file (9.64 kB). View file
 
wemm/lib/python3.10/site-packages/idna/__pycache__/intranges.cpython-310.pyc ADDED
Binary file (1.95 kB). View file
 
wemm/lib/python3.10/site-packages/idna/codec.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import codecs
2
+ import re
3
+ from typing import Any, Optional, Tuple
4
+
5
+ from .core import IDNAError, alabel, decode, encode, ulabel
6
+
7
+ _unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]")
8
+
9
+
10
+ class Codec(codecs.Codec):
11
+ def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]:
12
+ if errors != "strict":
13
+ raise IDNAError('Unsupported error handling "{}"'.format(errors))
14
+
15
+ if not data:
16
+ return b"", 0
17
+
18
+ return encode(data), len(data)
19
+
20
+ def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]:
21
+ if errors != "strict":
22
+ raise IDNAError('Unsupported error handling "{}"'.format(errors))
23
+
24
+ if not data:
25
+ return "", 0
26
+
27
+ return decode(data), len(data)
28
+
29
+
30
+ class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
31
+ def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]:
32
+ if errors != "strict":
33
+ raise IDNAError('Unsupported error handling "{}"'.format(errors))
34
+
35
+ if not data:
36
+ return b"", 0
37
+
38
+ labels = _unicode_dots_re.split(data)
39
+ trailing_dot = b""
40
+ if labels:
41
+ if not labels[-1]:
42
+ trailing_dot = b"."
43
+ del labels[-1]
44
+ elif not final:
45
+ # Keep potentially unfinished label until the next call
46
+ del labels[-1]
47
+ if labels:
48
+ trailing_dot = b"."
49
+
50
+ result = []
51
+ size = 0
52
+ for label in labels:
53
+ result.append(alabel(label))
54
+ if size:
55
+ size += 1
56
+ size += len(label)
57
+
58
+ # Join with U+002E
59
+ result_bytes = b".".join(result) + trailing_dot
60
+ size += len(trailing_dot)
61
+ return result_bytes, size
62
+
63
+
64
+ class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
65
+ def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]:
66
+ if errors != "strict":
67
+ raise IDNAError('Unsupported error handling "{}"'.format(errors))
68
+
69
+ if not data:
70
+ return ("", 0)
71
+
72
+ if not isinstance(data, str):
73
+ data = str(data, "ascii")
74
+
75
+ labels = _unicode_dots_re.split(data)
76
+ trailing_dot = ""
77
+ if labels:
78
+ if not labels[-1]:
79
+ trailing_dot = "."
80
+ del labels[-1]
81
+ elif not final:
82
+ # Keep potentially unfinished label until the next call
83
+ del labels[-1]
84
+ if labels:
85
+ trailing_dot = "."
86
+
87
+ result = []
88
+ size = 0
89
+ for label in labels:
90
+ result.append(ulabel(label))
91
+ if size:
92
+ size += 1
93
+ size += len(label)
94
+
95
+ result_str = ".".join(result) + trailing_dot
96
+ size += len(trailing_dot)
97
+ return (result_str, size)
98
+
99
+
100
+ class StreamWriter(Codec, codecs.StreamWriter):
101
+ pass
102
+
103
+
104
+ class StreamReader(Codec, codecs.StreamReader):
105
+ pass
106
+
107
+
108
+ def search_function(name: str) -> Optional[codecs.CodecInfo]:
109
+ if name != "idna2008":
110
+ return None
111
+ return codecs.CodecInfo(
112
+ name=name,
113
+ encode=Codec().encode,
114
+ decode=Codec().decode,
115
+ incrementalencoder=IncrementalEncoder,
116
+ incrementaldecoder=IncrementalDecoder,
117
+ streamwriter=StreamWriter,
118
+ streamreader=StreamReader,
119
+ )
120
+
121
+
122
+ codecs.register(search_function)
wemm/lib/python3.10/site-packages/idna/core.py ADDED
@@ -0,0 +1,437 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import bisect
2
+ import re
3
+ import unicodedata
4
+ from typing import Optional, Union
5
+
6
+ from . import idnadata
7
+ from .intranges import intranges_contain
8
+
9
+ _virama_combining_class = 9
10
+ _alabel_prefix = b"xn--"
11
+ _unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]")
12
+
13
+
14
+ class IDNAError(UnicodeError):
15
+ """Base exception for all IDNA-encoding related problems"""
16
+
17
+ pass
18
+
19
+
20
+ class IDNABidiError(IDNAError):
21
+ """Exception when bidirectional requirements are not satisfied"""
22
+
23
+ pass
24
+
25
+
26
+ class InvalidCodepoint(IDNAError):
27
+ """Exception when a disallowed or unallocated codepoint is used"""
28
+
29
+ pass
30
+
31
+
32
+ class InvalidCodepointContext(IDNAError):
33
+ """Exception when the codepoint is not valid in the context it is used"""
34
+
35
+ pass
36
+
37
+
38
+ def _combining_class(cp: int) -> int:
39
+ v = unicodedata.combining(chr(cp))
40
+ if v == 0:
41
+ if not unicodedata.name(chr(cp)):
42
+ raise ValueError("Unknown character in unicodedata")
43
+ return v
44
+
45
+
46
+ def _is_script(cp: str, script: str) -> bool:
47
+ return intranges_contain(ord(cp), idnadata.scripts[script])
48
+
49
+
50
+ def _punycode(s: str) -> bytes:
51
+ return s.encode("punycode")
52
+
53
+
54
+ def _unot(s: int) -> str:
55
+ return "U+{:04X}".format(s)
56
+
57
+
58
+ def valid_label_length(label: Union[bytes, str]) -> bool:
59
+ if len(label) > 63:
60
+ return False
61
+ return True
62
+
63
+
64
+ def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool:
65
+ if len(label) > (254 if trailing_dot else 253):
66
+ return False
67
+ return True
68
+
69
+
70
+ def check_bidi(label: str, check_ltr: bool = False) -> bool:
71
+ # Bidi rules should only be applied if string contains RTL characters
72
+ bidi_label = False
73
+ for idx, cp in enumerate(label, 1):
74
+ direction = unicodedata.bidirectional(cp)
75
+ if direction == "":
76
+ # String likely comes from a newer version of Unicode
77
+ raise IDNABidiError("Unknown directionality in label {} at position {}".format(repr(label), idx))
78
+ if direction in ["R", "AL", "AN"]:
79
+ bidi_label = True
80
+ if not bidi_label and not check_ltr:
81
+ return True
82
+
83
+ # Bidi rule 1
84
+ direction = unicodedata.bidirectional(label[0])
85
+ if direction in ["R", "AL"]:
86
+ rtl = True
87
+ elif direction == "L":
88
+ rtl = False
89
+ else:
90
+ raise IDNABidiError("First codepoint in label {} must be directionality L, R or AL".format(repr(label)))
91
+
92
+ valid_ending = False
93
+ number_type: Optional[str] = None
94
+ for idx, cp in enumerate(label, 1):
95
+ direction = unicodedata.bidirectional(cp)
96
+
97
+ if rtl:
98
+ # Bidi rule 2
99
+ if direction not in [
100
+ "R",
101
+ "AL",
102
+ "AN",
103
+ "EN",
104
+ "ES",
105
+ "CS",
106
+ "ET",
107
+ "ON",
108
+ "BN",
109
+ "NSM",
110
+ ]:
111
+ raise IDNABidiError("Invalid direction for codepoint at position {} in a right-to-left label".format(idx))
112
+ # Bidi rule 3
113
+ if direction in ["R", "AL", "EN", "AN"]:
114
+ valid_ending = True
115
+ elif direction != "NSM":
116
+ valid_ending = False
117
+ # Bidi rule 4
118
+ if direction in ["AN", "EN"]:
119
+ if not number_type:
120
+ number_type = direction
121
+ else:
122
+ if number_type != direction:
123
+ raise IDNABidiError("Can not mix numeral types in a right-to-left label")
124
+ else:
125
+ # Bidi rule 5
126
+ if direction not in ["L", "EN", "ES", "CS", "ET", "ON", "BN", "NSM"]:
127
+ raise IDNABidiError("Invalid direction for codepoint at position {} in a left-to-right label".format(idx))
128
+ # Bidi rule 6
129
+ if direction in ["L", "EN"]:
130
+ valid_ending = True
131
+ elif direction != "NSM":
132
+ valid_ending = False
133
+
134
+ if not valid_ending:
135
+ raise IDNABidiError("Label ends with illegal codepoint directionality")
136
+
137
+ return True
138
+
139
+
140
+ def check_initial_combiner(label: str) -> bool:
141
+ if unicodedata.category(label[0])[0] == "M":
142
+ raise IDNAError("Label begins with an illegal combining character")
143
+ return True
144
+
145
+
146
+ def check_hyphen_ok(label: str) -> bool:
147
+ if label[2:4] == "--":
148
+ raise IDNAError("Label has disallowed hyphens in 3rd and 4th position")
149
+ if label[0] == "-" or label[-1] == "-":
150
+ raise IDNAError("Label must not start or end with a hyphen")
151
+ return True
152
+
153
+
154
+ def check_nfc(label: str) -> None:
155
+ if unicodedata.normalize("NFC", label) != label:
156
+ raise IDNAError("Label must be in Normalization Form C")
157
+
158
+
159
+ def valid_contextj(label: str, pos: int) -> bool:
160
+ cp_value = ord(label[pos])
161
+
162
+ if cp_value == 0x200C:
163
+ if pos > 0:
164
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
165
+ return True
166
+
167
+ ok = False
168
+ for i in range(pos - 1, -1, -1):
169
+ joining_type = idnadata.joining_types.get(ord(label[i]))
170
+ if joining_type == ord("T"):
171
+ continue
172
+ elif joining_type in [ord("L"), ord("D")]:
173
+ ok = True
174
+ break
175
+ else:
176
+ break
177
+
178
+ if not ok:
179
+ return False
180
+
181
+ ok = False
182
+ for i in range(pos + 1, len(label)):
183
+ joining_type = idnadata.joining_types.get(ord(label[i]))
184
+ if joining_type == ord("T"):
185
+ continue
186
+ elif joining_type in [ord("R"), ord("D")]:
187
+ ok = True
188
+ break
189
+ else:
190
+ break
191
+ return ok
192
+
193
+ if cp_value == 0x200D:
194
+ if pos > 0:
195
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
196
+ return True
197
+ return False
198
+
199
+ else:
200
+ return False
201
+
202
+
203
+ def valid_contexto(label: str, pos: int, exception: bool = False) -> bool:
204
+ cp_value = ord(label[pos])
205
+
206
+ if cp_value == 0x00B7:
207
+ if 0 < pos < len(label) - 1:
208
+ if ord(label[pos - 1]) == 0x006C and ord(label[pos + 1]) == 0x006C:
209
+ return True
210
+ return False
211
+
212
+ elif cp_value == 0x0375:
213
+ if pos < len(label) - 1 and len(label) > 1:
214
+ return _is_script(label[pos + 1], "Greek")
215
+ return False
216
+
217
+ elif cp_value == 0x05F3 or cp_value == 0x05F4:
218
+ if pos > 0:
219
+ return _is_script(label[pos - 1], "Hebrew")
220
+ return False
221
+
222
+ elif cp_value == 0x30FB:
223
+ for cp in label:
224
+ if cp == "\u30fb":
225
+ continue
226
+ if _is_script(cp, "Hiragana") or _is_script(cp, "Katakana") or _is_script(cp, "Han"):
227
+ return True
228
+ return False
229
+
230
+ elif 0x660 <= cp_value <= 0x669:
231
+ for cp in label:
232
+ if 0x6F0 <= ord(cp) <= 0x06F9:
233
+ return False
234
+ return True
235
+
236
+ elif 0x6F0 <= cp_value <= 0x6F9:
237
+ for cp in label:
238
+ if 0x660 <= ord(cp) <= 0x0669:
239
+ return False
240
+ return True
241
+
242
+ return False
243
+
244
+
245
+ def check_label(label: Union[str, bytes, bytearray]) -> None:
246
+ if isinstance(label, (bytes, bytearray)):
247
+ label = label.decode("utf-8")
248
+ if len(label) == 0:
249
+ raise IDNAError("Empty Label")
250
+
251
+ check_nfc(label)
252
+ check_hyphen_ok(label)
253
+ check_initial_combiner(label)
254
+
255
+ for pos, cp in enumerate(label):
256
+ cp_value = ord(cp)
257
+ if intranges_contain(cp_value, idnadata.codepoint_classes["PVALID"]):
258
+ continue
259
+ elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTJ"]):
260
+ try:
261
+ if not valid_contextj(label, pos):
262
+ raise InvalidCodepointContext(
263
+ "Joiner {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label))
264
+ )
265
+ except ValueError:
266
+ raise IDNAError(
267
+ "Unknown codepoint adjacent to joiner {} at position {} in {}".format(
268
+ _unot(cp_value), pos + 1, repr(label)
269
+ )
270
+ )
271
+ elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTO"]):
272
+ if not valid_contexto(label, pos):
273
+ raise InvalidCodepointContext(
274
+ "Codepoint {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label))
275
+ )
276
+ else:
277
+ raise InvalidCodepoint(
278
+ "Codepoint {} at position {} of {} not allowed".format(_unot(cp_value), pos + 1, repr(label))
279
+ )
280
+
281
+ check_bidi(label)
282
+
283
+
284
+ def alabel(label: str) -> bytes:
285
+ try:
286
+ label_bytes = label.encode("ascii")
287
+ ulabel(label_bytes)
288
+ if not valid_label_length(label_bytes):
289
+ raise IDNAError("Label too long")
290
+ return label_bytes
291
+ except UnicodeEncodeError:
292
+ pass
293
+
294
+ check_label(label)
295
+ label_bytes = _alabel_prefix + _punycode(label)
296
+
297
+ if not valid_label_length(label_bytes):
298
+ raise IDNAError("Label too long")
299
+
300
+ return label_bytes
301
+
302
+
303
+ def ulabel(label: Union[str, bytes, bytearray]) -> str:
304
+ if not isinstance(label, (bytes, bytearray)):
305
+ try:
306
+ label_bytes = label.encode("ascii")
307
+ except UnicodeEncodeError:
308
+ check_label(label)
309
+ return label
310
+ else:
311
+ label_bytes = label
312
+
313
+ label_bytes = label_bytes.lower()
314
+ if label_bytes.startswith(_alabel_prefix):
315
+ label_bytes = label_bytes[len(_alabel_prefix) :]
316
+ if not label_bytes:
317
+ raise IDNAError("Malformed A-label, no Punycode eligible content found")
318
+ if label_bytes.decode("ascii")[-1] == "-":
319
+ raise IDNAError("A-label must not end with a hyphen")
320
+ else:
321
+ check_label(label_bytes)
322
+ return label_bytes.decode("ascii")
323
+
324
+ try:
325
+ label = label_bytes.decode("punycode")
326
+ except UnicodeError:
327
+ raise IDNAError("Invalid A-label")
328
+ check_label(label)
329
+ return label
330
+
331
+
332
+ def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str:
333
+ """Re-map the characters in the string according to UTS46 processing."""
334
+ from .uts46data import uts46data
335
+
336
+ output = ""
337
+
338
+ for pos, char in enumerate(domain):
339
+ code_point = ord(char)
340
+ try:
341
+ uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1]
342
+ status = uts46row[1]
343
+ replacement: Optional[str] = None
344
+ if len(uts46row) == 3:
345
+ replacement = uts46row[2]
346
+ if (
347
+ status == "V"
348
+ or (status == "D" and not transitional)
349
+ or (status == "3" and not std3_rules and replacement is None)
350
+ ):
351
+ output += char
352
+ elif replacement is not None and (
353
+ status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional)
354
+ ):
355
+ output += replacement
356
+ elif status != "I":
357
+ raise IndexError()
358
+ except IndexError:
359
+ raise InvalidCodepoint(
360
+ "Codepoint {} not allowed at position {} in {}".format(_unot(code_point), pos + 1, repr(domain))
361
+ )
362
+
363
+ return unicodedata.normalize("NFC", output)
364
+
365
+
366
+ def encode(
367
+ s: Union[str, bytes, bytearray],
368
+ strict: bool = False,
369
+ uts46: bool = False,
370
+ std3_rules: bool = False,
371
+ transitional: bool = False,
372
+ ) -> bytes:
373
+ if not isinstance(s, str):
374
+ try:
375
+ s = str(s, "ascii")
376
+ except UnicodeDecodeError:
377
+ raise IDNAError("should pass a unicode string to the function rather than a byte string.")
378
+ if uts46:
379
+ s = uts46_remap(s, std3_rules, transitional)
380
+ trailing_dot = False
381
+ result = []
382
+ if strict:
383
+ labels = s.split(".")
384
+ else:
385
+ labels = _unicode_dots_re.split(s)
386
+ if not labels or labels == [""]:
387
+ raise IDNAError("Empty domain")
388
+ if labels[-1] == "":
389
+ del labels[-1]
390
+ trailing_dot = True
391
+ for label in labels:
392
+ s = alabel(label)
393
+ if s:
394
+ result.append(s)
395
+ else:
396
+ raise IDNAError("Empty label")
397
+ if trailing_dot:
398
+ result.append(b"")
399
+ s = b".".join(result)
400
+ if not valid_string_length(s, trailing_dot):
401
+ raise IDNAError("Domain too long")
402
+ return s
403
+
404
+
405
+ def decode(
406
+ s: Union[str, bytes, bytearray],
407
+ strict: bool = False,
408
+ uts46: bool = False,
409
+ std3_rules: bool = False,
410
+ ) -> str:
411
+ try:
412
+ if not isinstance(s, str):
413
+ s = str(s, "ascii")
414
+ except UnicodeDecodeError:
415
+ raise IDNAError("Invalid ASCII in A-label")
416
+ if uts46:
417
+ s = uts46_remap(s, std3_rules, False)
418
+ trailing_dot = False
419
+ result = []
420
+ if not strict:
421
+ labels = _unicode_dots_re.split(s)
422
+ else:
423
+ labels = s.split(".")
424
+ if not labels or labels == [""]:
425
+ raise IDNAError("Empty domain")
426
+ if not labels[-1]:
427
+ del labels[-1]
428
+ trailing_dot = True
429
+ for label in labels:
430
+ s = ulabel(label)
431
+ if s:
432
+ result.append(s)
433
+ else:
434
+ raise IDNAError("Empty label")
435
+ if trailing_dot:
436
+ result.append("")
437
+ return ".".join(result)
wemm/lib/python3.10/site-packages/idna/intranges.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Given a list of integers, made up of (hopefully) a small number of long runs
3
+ of consecutive integers, compute a representation of the form
4
+ ((start1, end1), (start2, end2) ...). Then answer the question "was x present
5
+ in the original list?" in time O(log(# runs)).
6
+ """
7
+
8
+ import bisect
9
+ from typing import List, Tuple
10
+
11
+
12
+ def intranges_from_list(list_: List[int]) -> Tuple[int, ...]:
13
+ """Represent a list of integers as a sequence of ranges:
14
+ ((start_0, end_0), (start_1, end_1), ...), such that the original
15
+ integers are exactly those x such that start_i <= x < end_i for some i.
16
+
17
+ Ranges are encoded as single integers (start << 32 | end), not as tuples.
18
+ """
19
+
20
+ sorted_list = sorted(list_)
21
+ ranges = []
22
+ last_write = -1
23
+ for i in range(len(sorted_list)):
24
+ if i + 1 < len(sorted_list):
25
+ if sorted_list[i] == sorted_list[i + 1] - 1:
26
+ continue
27
+ current_range = sorted_list[last_write + 1 : i + 1]
28
+ ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
29
+ last_write = i
30
+
31
+ return tuple(ranges)
32
+
33
+
34
+ def _encode_range(start: int, end: int) -> int:
35
+ return (start << 32) | end
36
+
37
+
38
+ def _decode_range(r: int) -> Tuple[int, int]:
39
+ return (r >> 32), (r & ((1 << 32) - 1))
40
+
41
+
42
+ def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool:
43
+ """Determine if `int_` falls into one of the ranges in `ranges`."""
44
+ tuple_ = _encode_range(int_, 0)
45
+ pos = bisect.bisect_left(ranges, tuple_)
46
+ # we could be immediately ahead of a tuple (start, end)
47
+ # with start < int_ <= end
48
+ if pos > 0:
49
+ left, right = _decode_range(ranges[pos - 1])
50
+ if left <= int_ < right:
51
+ return True
52
+ # or we could be immediately behind a tuple (int_, end)
53
+ if pos < len(ranges):
54
+ left, _ = _decode_range(ranges[pos])
55
+ if left == int_:
56
+ return True
57
+ return False
wemm/lib/python3.10/site-packages/lightning_utilities/__init__.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Root package info."""
2
+
3
+ import os
4
+
5
+ from lightning_utilities.__about__ import * # noqa: F403
6
+ from lightning_utilities.core.apply_func import apply_to_collection
7
+ from lightning_utilities.core.enums import StrEnum
8
+ from lightning_utilities.core.imports import compare_version, module_available
9
+ from lightning_utilities.core.overrides import is_overridden
10
+ from lightning_utilities.core.rank_zero import WarningCache
11
+
12
+ _PACKAGE_ROOT = os.path.dirname(__file__)
13
+ _PROJECT_ROOT = os.path.dirname(_PACKAGE_ROOT)
14
+
15
+
16
+ __all__ = [
17
+ "apply_to_collection",
18
+ "StrEnum",
19
+ "module_available",
20
+ "compare_version",
21
+ "is_overridden",
22
+ "WarningCache",
23
+ ]
wemm/lib/python3.10/site-packages/lightning_utilities/cli/__main__.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright The Lightning AI team.
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # http://www.apache.org/licenses/LICENSE-2.0
4
+ #
5
+
6
+ import lightning_utilities
7
+ from lightning_utilities.cli.dependencies import prune_pkgs_in_requirements, replace_oldest_ver
8
+
9
+
10
+ def main() -> None:
11
+ """CLI entry point."""
12
+ from fire import Fire
13
+
14
+ Fire({
15
+ "requirements": {
16
+ "prune-pkgs": prune_pkgs_in_requirements,
17
+ "set-oldest": replace_oldest_ver,
18
+ },
19
+ "version": lambda: print(lightning_utilities.__version__),
20
+ })
21
+
22
+
23
+ if __name__ == "__main__":
24
+ main()
wemm/lib/python3.10/site-packages/lightning_utilities/install/__init__.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ """Generic Installation tools."""
2
+
3
+ from lightning_utilities.install.requirements import Requirement, load_requirements
4
+
5
+ __all__ = ["load_requirements", "Requirement"]
wemm/lib/python3.10/site-packages/lightning_utilities/install/__pycache__/requirements.cpython-310.pyc ADDED
Binary file (4.72 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/__init__.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ NetworkX
3
+ ========
4
+
5
+ NetworkX is a Python package for the creation, manipulation, and study of the
6
+ structure, dynamics, and functions of complex networks.
7
+
8
+ See https://networkx.org for complete documentation.
9
+ """
10
+
11
+ __version__ = "3.4.2"
12
+
13
+
14
+ # These are imported in order as listed
15
+ from networkx.lazy_imports import _lazy_import
16
+
17
+ from networkx.exception import *
18
+
19
+ from networkx import utils
20
+ from networkx.utils import _clear_cache, _dispatchable
21
+
22
+ # load_and_call entry_points, set configs
23
+ config = utils.backends._set_configs_from_environment()
24
+ utils.config = utils.configs.config = config # type: ignore[attr-defined]
25
+
26
+ from networkx import classes
27
+ from networkx.classes import filters
28
+ from networkx.classes import *
29
+
30
+ from networkx import convert
31
+ from networkx.convert import *
32
+
33
+ from networkx import convert_matrix
34
+ from networkx.convert_matrix import *
35
+
36
+ from networkx import relabel
37
+ from networkx.relabel import *
38
+
39
+ from networkx import generators
40
+ from networkx.generators import *
41
+
42
+ from networkx import readwrite
43
+ from networkx.readwrite import *
44
+
45
+ # Need to test with SciPy, when available
46
+ from networkx import algorithms
47
+ from networkx.algorithms import *
48
+
49
+ from networkx import linalg
50
+ from networkx.linalg import *
51
+
52
+ from networkx import drawing
53
+ from networkx.drawing import *
wemm/lib/python3.10/site-packages/networkx/algorithms/__init__.py ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from networkx.algorithms.assortativity import *
2
+ from networkx.algorithms.asteroidal import *
3
+ from networkx.algorithms.boundary import *
4
+ from networkx.algorithms.broadcasting import *
5
+ from networkx.algorithms.bridges import *
6
+ from networkx.algorithms.chains import *
7
+ from networkx.algorithms.centrality import *
8
+ from networkx.algorithms.chordal import *
9
+ from networkx.algorithms.cluster import *
10
+ from networkx.algorithms.clique import *
11
+ from networkx.algorithms.communicability_alg import *
12
+ from networkx.algorithms.components import *
13
+ from networkx.algorithms.coloring import *
14
+ from networkx.algorithms.core import *
15
+ from networkx.algorithms.covering import *
16
+ from networkx.algorithms.cycles import *
17
+ from networkx.algorithms.cuts import *
18
+ from networkx.algorithms.d_separation import *
19
+ from networkx.algorithms.dag import *
20
+ from networkx.algorithms.distance_measures import *
21
+ from networkx.algorithms.distance_regular import *
22
+ from networkx.algorithms.dominance import *
23
+ from networkx.algorithms.dominating import *
24
+ from networkx.algorithms.efficiency_measures import *
25
+ from networkx.algorithms.euler import *
26
+ from networkx.algorithms.graphical import *
27
+ from networkx.algorithms.hierarchy import *
28
+ from networkx.algorithms.hybrid import *
29
+ from networkx.algorithms.link_analysis import *
30
+ from networkx.algorithms.link_prediction import *
31
+ from networkx.algorithms.lowest_common_ancestors import *
32
+ from networkx.algorithms.isolate import *
33
+ from networkx.algorithms.matching import *
34
+ from networkx.algorithms.minors import *
35
+ from networkx.algorithms.mis import *
36
+ from networkx.algorithms.moral import *
37
+ from networkx.algorithms.non_randomness import *
38
+ from networkx.algorithms.operators import *
39
+ from networkx.algorithms.planarity import *
40
+ from networkx.algorithms.planar_drawing import *
41
+ from networkx.algorithms.polynomials import *
42
+ from networkx.algorithms.reciprocity import *
43
+ from networkx.algorithms.regular import *
44
+ from networkx.algorithms.richclub import *
45
+ from networkx.algorithms.shortest_paths import *
46
+ from networkx.algorithms.similarity import *
47
+ from networkx.algorithms.graph_hashing import *
48
+ from networkx.algorithms.simple_paths import *
49
+ from networkx.algorithms.smallworld import *
50
+ from networkx.algorithms.smetric import *
51
+ from networkx.algorithms.structuralholes import *
52
+ from networkx.algorithms.sparsifiers import *
53
+ from networkx.algorithms.summarization import *
54
+ from networkx.algorithms.swap import *
55
+ from networkx.algorithms.time_dependent import *
56
+ from networkx.algorithms.traversal import *
57
+ from networkx.algorithms.triads import *
58
+ from networkx.algorithms.vitality import *
59
+ from networkx.algorithms.voronoi import *
60
+ from networkx.algorithms.walks import *
61
+ from networkx.algorithms.wiener import *
62
+
63
+ # Make certain subpackages available to the user as direct imports from
64
+ # the `networkx` namespace.
65
+ from networkx.algorithms import approximation
66
+ from networkx.algorithms import assortativity
67
+ from networkx.algorithms import bipartite
68
+ from networkx.algorithms import node_classification
69
+ from networkx.algorithms import centrality
70
+ from networkx.algorithms import chordal
71
+ from networkx.algorithms import cluster
72
+ from networkx.algorithms import clique
73
+ from networkx.algorithms import components
74
+ from networkx.algorithms import connectivity
75
+ from networkx.algorithms import community
76
+ from networkx.algorithms import coloring
77
+ from networkx.algorithms import flow
78
+ from networkx.algorithms import isomorphism
79
+ from networkx.algorithms import link_analysis
80
+ from networkx.algorithms import lowest_common_ancestors
81
+ from networkx.algorithms import operators
82
+ from networkx.algorithms import shortest_paths
83
+ from networkx.algorithms import tournament
84
+ from networkx.algorithms import traversal
85
+ from networkx.algorithms import tree
86
+
87
+ # Make certain functions from some of the previous subpackages available
88
+ # to the user as direct imports from the `networkx` namespace.
89
+ from networkx.algorithms.bipartite import complete_bipartite_graph
90
+ from networkx.algorithms.bipartite import is_bipartite
91
+ from networkx.algorithms.bipartite import projected_graph
92
+ from networkx.algorithms.connectivity import all_pairs_node_connectivity
93
+ from networkx.algorithms.connectivity import all_node_cuts
94
+ from networkx.algorithms.connectivity import average_node_connectivity
95
+ from networkx.algorithms.connectivity import edge_connectivity
96
+ from networkx.algorithms.connectivity import edge_disjoint_paths
97
+ from networkx.algorithms.connectivity import k_components
98
+ from networkx.algorithms.connectivity import k_edge_components
99
+ from networkx.algorithms.connectivity import k_edge_subgraphs
100
+ from networkx.algorithms.connectivity import k_edge_augmentation
101
+ from networkx.algorithms.connectivity import is_k_edge_connected
102
+ from networkx.algorithms.connectivity import minimum_edge_cut
103
+ from networkx.algorithms.connectivity import minimum_node_cut
104
+ from networkx.algorithms.connectivity import node_connectivity
105
+ from networkx.algorithms.connectivity import node_disjoint_paths
106
+ from networkx.algorithms.connectivity import stoer_wagner
107
+ from networkx.algorithms.flow import capacity_scaling
108
+ from networkx.algorithms.flow import cost_of_flow
109
+ from networkx.algorithms.flow import gomory_hu_tree
110
+ from networkx.algorithms.flow import max_flow_min_cost
111
+ from networkx.algorithms.flow import maximum_flow
112
+ from networkx.algorithms.flow import maximum_flow_value
113
+ from networkx.algorithms.flow import min_cost_flow
114
+ from networkx.algorithms.flow import min_cost_flow_cost
115
+ from networkx.algorithms.flow import minimum_cut
116
+ from networkx.algorithms.flow import minimum_cut_value
117
+ from networkx.algorithms.flow import network_simplex
118
+ from networkx.algorithms.isomorphism import could_be_isomorphic
119
+ from networkx.algorithms.isomorphism import fast_could_be_isomorphic
120
+ from networkx.algorithms.isomorphism import faster_could_be_isomorphic
121
+ from networkx.algorithms.isomorphism import is_isomorphic
122
+ from networkx.algorithms.isomorphism.vf2pp import *
123
+ from networkx.algorithms.tree.branchings import maximum_branching
124
+ from networkx.algorithms.tree.branchings import maximum_spanning_arborescence
125
+ from networkx.algorithms.tree.branchings import minimum_branching
126
+ from networkx.algorithms.tree.branchings import minimum_spanning_arborescence
127
+ from networkx.algorithms.tree.branchings import ArborescenceIterator
128
+ from networkx.algorithms.tree.coding import *
129
+ from networkx.algorithms.tree.decomposition import *
130
+ from networkx.algorithms.tree.mst import *
131
+ from networkx.algorithms.tree.operations import *
132
+ from networkx.algorithms.tree.recognition import *
133
+ from networkx.algorithms.tournament import is_tournament
wemm/lib/python3.10/site-packages/networkx/algorithms/chordal.py ADDED
@@ -0,0 +1,443 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Algorithms for chordal graphs.
3
+
4
+ A graph is chordal if every cycle of length at least 4 has a chord
5
+ (an edge joining two nodes not adjacent in the cycle).
6
+ https://en.wikipedia.org/wiki/Chordal_graph
7
+ """
8
+
9
+ import sys
10
+
11
+ import networkx as nx
12
+ from networkx.algorithms.components import connected_components
13
+ from networkx.utils import arbitrary_element, not_implemented_for
14
+
15
+ __all__ = [
16
+ "is_chordal",
17
+ "find_induced_nodes",
18
+ "chordal_graph_cliques",
19
+ "chordal_graph_treewidth",
20
+ "NetworkXTreewidthBoundExceeded",
21
+ "complete_to_chordal_graph",
22
+ ]
23
+
24
+
25
+ class NetworkXTreewidthBoundExceeded(nx.NetworkXException):
26
+ """Exception raised when a treewidth bound has been provided and it has
27
+ been exceeded"""
28
+
29
+
30
+ @not_implemented_for("directed")
31
+ @not_implemented_for("multigraph")
32
+ @nx._dispatchable
33
+ def is_chordal(G):
34
+ """Checks whether G is a chordal graph.
35
+
36
+ A graph is chordal if every cycle of length at least 4 has a chord
37
+ (an edge joining two nodes not adjacent in the cycle).
38
+
39
+ Parameters
40
+ ----------
41
+ G : graph
42
+ A NetworkX graph.
43
+
44
+ Returns
45
+ -------
46
+ chordal : bool
47
+ True if G is a chordal graph and False otherwise.
48
+
49
+ Raises
50
+ ------
51
+ NetworkXNotImplemented
52
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
53
+
54
+ Examples
55
+ --------
56
+ >>> e = [
57
+ ... (1, 2),
58
+ ... (1, 3),
59
+ ... (2, 3),
60
+ ... (2, 4),
61
+ ... (3, 4),
62
+ ... (3, 5),
63
+ ... (3, 6),
64
+ ... (4, 5),
65
+ ... (4, 6),
66
+ ... (5, 6),
67
+ ... ]
68
+ >>> G = nx.Graph(e)
69
+ >>> nx.is_chordal(G)
70
+ True
71
+
72
+ Notes
73
+ -----
74
+ The routine tries to go through every node following maximum cardinality
75
+ search. It returns False when it finds that the separator for any node
76
+ is not a clique. Based on the algorithms in [1]_.
77
+
78
+ Self loops are ignored.
79
+
80
+ References
81
+ ----------
82
+ .. [1] R. E. Tarjan and M. Yannakakis, Simple linear-time algorithms
83
+ to test chordality of graphs, test acyclicity of hypergraphs, and
84
+ selectively reduce acyclic hypergraphs, SIAM J. Comput., 13 (1984),
85
+ pp. 566–579.
86
+ """
87
+ if len(G.nodes) <= 3:
88
+ return True
89
+ return len(_find_chordality_breaker(G)) == 0
90
+
91
+
92
+ @nx._dispatchable
93
+ def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize):
94
+ """Returns the set of induced nodes in the path from s to t.
95
+
96
+ Parameters
97
+ ----------
98
+ G : graph
99
+ A chordal NetworkX graph
100
+ s : node
101
+ Source node to look for induced nodes
102
+ t : node
103
+ Destination node to look for induced nodes
104
+ treewidth_bound: float
105
+ Maximum treewidth acceptable for the graph H. The search
106
+ for induced nodes will end as soon as the treewidth_bound is exceeded.
107
+
108
+ Returns
109
+ -------
110
+ induced_nodes : Set of nodes
111
+ The set of induced nodes in the path from s to t in G
112
+
113
+ Raises
114
+ ------
115
+ NetworkXError
116
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
117
+ If the input graph is an instance of one of these classes, a
118
+ :exc:`NetworkXError` is raised.
119
+ The algorithm can only be applied to chordal graphs. If the input
120
+ graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
121
+
122
+ Examples
123
+ --------
124
+ >>> G = nx.Graph()
125
+ >>> G = nx.generators.classic.path_graph(10)
126
+ >>> induced_nodes = nx.find_induced_nodes(G, 1, 9, 2)
127
+ >>> sorted(induced_nodes)
128
+ [1, 2, 3, 4, 5, 6, 7, 8, 9]
129
+
130
+ Notes
131
+ -----
132
+ G must be a chordal graph and (s,t) an edge that is not in G.
133
+
134
+ If a treewidth_bound is provided, the search for induced nodes will end
135
+ as soon as the treewidth_bound is exceeded.
136
+
137
+ The algorithm is inspired by Algorithm 4 in [1]_.
138
+ A formal definition of induced node can also be found on that reference.
139
+
140
+ Self Loops are ignored
141
+
142
+ References
143
+ ----------
144
+ .. [1] Learning Bounded Treewidth Bayesian Networks.
145
+ Gal Elidan, Stephen Gould; JMLR, 9(Dec):2699--2731, 2008.
146
+ http://jmlr.csail.mit.edu/papers/volume9/elidan08a/elidan08a.pdf
147
+ """
148
+ if not is_chordal(G):
149
+ raise nx.NetworkXError("Input graph is not chordal.")
150
+
151
+ H = nx.Graph(G)
152
+ H.add_edge(s, t)
153
+ induced_nodes = set()
154
+ triplet = _find_chordality_breaker(H, s, treewidth_bound)
155
+ while triplet:
156
+ (u, v, w) = triplet
157
+ induced_nodes.update(triplet)
158
+ for n in triplet:
159
+ if n != s:
160
+ H.add_edge(s, n)
161
+ triplet = _find_chordality_breaker(H, s, treewidth_bound)
162
+ if induced_nodes:
163
+ # Add t and the second node in the induced path from s to t.
164
+ induced_nodes.add(t)
165
+ for u in G[s]:
166
+ if len(induced_nodes & set(G[u])) == 2:
167
+ induced_nodes.add(u)
168
+ break
169
+ return induced_nodes
170
+
171
+
172
+ @nx._dispatchable
173
+ def chordal_graph_cliques(G):
174
+ """Returns all maximal cliques of a chordal graph.
175
+
176
+ The algorithm breaks the graph in connected components and performs a
177
+ maximum cardinality search in each component to get the cliques.
178
+
179
+ Parameters
180
+ ----------
181
+ G : graph
182
+ A NetworkX graph
183
+
184
+ Yields
185
+ ------
186
+ frozenset of nodes
187
+ Maximal cliques, each of which is a frozenset of
188
+ nodes in `G`. The order of cliques is arbitrary.
189
+
190
+ Raises
191
+ ------
192
+ NetworkXError
193
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
194
+ The algorithm can only be applied to chordal graphs. If the input
195
+ graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
196
+
197
+ Examples
198
+ --------
199
+ >>> e = [
200
+ ... (1, 2),
201
+ ... (1, 3),
202
+ ... (2, 3),
203
+ ... (2, 4),
204
+ ... (3, 4),
205
+ ... (3, 5),
206
+ ... (3, 6),
207
+ ... (4, 5),
208
+ ... (4, 6),
209
+ ... (5, 6),
210
+ ... (7, 8),
211
+ ... ]
212
+ >>> G = nx.Graph(e)
213
+ >>> G.add_node(9)
214
+ >>> cliques = [c for c in chordal_graph_cliques(G)]
215
+ >>> cliques[0]
216
+ frozenset({1, 2, 3})
217
+ """
218
+ for C in (G.subgraph(c).copy() for c in connected_components(G)):
219
+ if C.number_of_nodes() == 1:
220
+ if nx.number_of_selfloops(C) > 0:
221
+ raise nx.NetworkXError("Input graph is not chordal.")
222
+ yield frozenset(C.nodes())
223
+ else:
224
+ unnumbered = set(C.nodes())
225
+ v = arbitrary_element(C)
226
+ unnumbered.remove(v)
227
+ numbered = {v}
228
+ clique_wanna_be = {v}
229
+ while unnumbered:
230
+ v = _max_cardinality_node(C, unnumbered, numbered)
231
+ unnumbered.remove(v)
232
+ numbered.add(v)
233
+ new_clique_wanna_be = set(C.neighbors(v)) & numbered
234
+ sg = C.subgraph(clique_wanna_be)
235
+ if _is_complete_graph(sg):
236
+ new_clique_wanna_be.add(v)
237
+ if not new_clique_wanna_be >= clique_wanna_be:
238
+ yield frozenset(clique_wanna_be)
239
+ clique_wanna_be = new_clique_wanna_be
240
+ else:
241
+ raise nx.NetworkXError("Input graph is not chordal.")
242
+ yield frozenset(clique_wanna_be)
243
+
244
+
245
+ @nx._dispatchable
246
+ def chordal_graph_treewidth(G):
247
+ """Returns the treewidth of the chordal graph G.
248
+
249
+ Parameters
250
+ ----------
251
+ G : graph
252
+ A NetworkX graph
253
+
254
+ Returns
255
+ -------
256
+ treewidth : int
257
+ The size of the largest clique in the graph minus one.
258
+
259
+ Raises
260
+ ------
261
+ NetworkXError
262
+ The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
263
+ The algorithm can only be applied to chordal graphs. If the input
264
+ graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
265
+
266
+ Examples
267
+ --------
268
+ >>> e = [
269
+ ... (1, 2),
270
+ ... (1, 3),
271
+ ... (2, 3),
272
+ ... (2, 4),
273
+ ... (3, 4),
274
+ ... (3, 5),
275
+ ... (3, 6),
276
+ ... (4, 5),
277
+ ... (4, 6),
278
+ ... (5, 6),
279
+ ... (7, 8),
280
+ ... ]
281
+ >>> G = nx.Graph(e)
282
+ >>> G.add_node(9)
283
+ >>> nx.chordal_graph_treewidth(G)
284
+ 3
285
+
286
+ References
287
+ ----------
288
+ .. [1] https://en.wikipedia.org/wiki/Tree_decomposition#Treewidth
289
+ """
290
+ if not is_chordal(G):
291
+ raise nx.NetworkXError("Input graph is not chordal.")
292
+
293
+ max_clique = -1
294
+ for clique in nx.chordal_graph_cliques(G):
295
+ max_clique = max(max_clique, len(clique))
296
+ return max_clique - 1
297
+
298
+
299
+ def _is_complete_graph(G):
300
+ """Returns True if G is a complete graph."""
301
+ if nx.number_of_selfloops(G) > 0:
302
+ raise nx.NetworkXError("Self loop found in _is_complete_graph()")
303
+ n = G.number_of_nodes()
304
+ if n < 2:
305
+ return True
306
+ e = G.number_of_edges()
307
+ max_edges = (n * (n - 1)) / 2
308
+ return e == max_edges
309
+
310
+
311
+ def _find_missing_edge(G):
312
+ """Given a non-complete graph G, returns a missing edge."""
313
+ nodes = set(G)
314
+ for u in G:
315
+ missing = nodes - set(list(G[u].keys()) + [u])
316
+ if missing:
317
+ return (u, missing.pop())
318
+
319
+
320
+ def _max_cardinality_node(G, choices, wanna_connect):
321
+ """Returns a the node in choices that has more connections in G
322
+ to nodes in wanna_connect.
323
+ """
324
+ max_number = -1
325
+ for x in choices:
326
+ number = len([y for y in G[x] if y in wanna_connect])
327
+ if number > max_number:
328
+ max_number = number
329
+ max_cardinality_node = x
330
+ return max_cardinality_node
331
+
332
+
333
+ def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize):
334
+ """Given a graph G, starts a max cardinality search
335
+ (starting from s if s is given and from an arbitrary node otherwise)
336
+ trying to find a non-chordal cycle.
337
+
338
+ If it does find one, it returns (u,v,w) where u,v,w are the three
339
+ nodes that together with s are involved in the cycle.
340
+
341
+ It ignores any self loops.
342
+ """
343
+ if len(G) == 0:
344
+ raise nx.NetworkXPointlessConcept("Graph has no nodes.")
345
+ unnumbered = set(G)
346
+ if s is None:
347
+ s = arbitrary_element(G)
348
+ unnumbered.remove(s)
349
+ numbered = {s}
350
+ current_treewidth = -1
351
+ while unnumbered: # and current_treewidth <= treewidth_bound:
352
+ v = _max_cardinality_node(G, unnumbered, numbered)
353
+ unnumbered.remove(v)
354
+ numbered.add(v)
355
+ clique_wanna_be = set(G[v]) & numbered
356
+ sg = G.subgraph(clique_wanna_be)
357
+ if _is_complete_graph(sg):
358
+ # The graph seems to be chordal by now. We update the treewidth
359
+ current_treewidth = max(current_treewidth, len(clique_wanna_be))
360
+ if current_treewidth > treewidth_bound:
361
+ raise nx.NetworkXTreewidthBoundExceeded(
362
+ f"treewidth_bound exceeded: {current_treewidth}"
363
+ )
364
+ else:
365
+ # sg is not a clique,
366
+ # look for an edge that is not included in sg
367
+ (u, w) = _find_missing_edge(sg)
368
+ return (u, v, w)
369
+ return ()
370
+
371
+
372
+ @not_implemented_for("directed")
373
+ @nx._dispatchable(returns_graph=True)
374
+ def complete_to_chordal_graph(G):
375
+ """Return a copy of G completed to a chordal graph
376
+
377
+ Adds edges to a copy of G to create a chordal graph. A graph G=(V,E) is
378
+ called chordal if for each cycle with length bigger than 3, there exist
379
+ two non-adjacent nodes connected by an edge (called a chord).
380
+
381
+ Parameters
382
+ ----------
383
+ G : NetworkX graph
384
+ Undirected graph
385
+
386
+ Returns
387
+ -------
388
+ H : NetworkX graph
389
+ The chordal enhancement of G
390
+ alpha : Dictionary
391
+ The elimination ordering of nodes of G
392
+
393
+ Notes
394
+ -----
395
+ There are different approaches to calculate the chordal
396
+ enhancement of a graph. The algorithm used here is called
397
+ MCS-M and gives at least minimal (local) triangulation of graph. Note
398
+ that this triangulation is not necessarily a global minimum.
399
+
400
+ https://en.wikipedia.org/wiki/Chordal_graph
401
+
402
+ References
403
+ ----------
404
+ .. [1] Berry, Anne & Blair, Jean & Heggernes, Pinar & Peyton, Barry. (2004)
405
+ Maximum Cardinality Search for Computing Minimal Triangulations of
406
+ Graphs. Algorithmica. 39. 287-298. 10.1007/s00453-004-1084-3.
407
+
408
+ Examples
409
+ --------
410
+ >>> from networkx.algorithms.chordal import complete_to_chordal_graph
411
+ >>> G = nx.wheel_graph(10)
412
+ >>> H, alpha = complete_to_chordal_graph(G)
413
+ """
414
+ H = G.copy()
415
+ alpha = {node: 0 for node in H}
416
+ if nx.is_chordal(H):
417
+ return H, alpha
418
+ chords = set()
419
+ weight = {node: 0 for node in H.nodes()}
420
+ unnumbered_nodes = list(H.nodes())
421
+ for i in range(len(H.nodes()), 0, -1):
422
+ # get the node in unnumbered_nodes with the maximum weight
423
+ z = max(unnumbered_nodes, key=lambda node: weight[node])
424
+ unnumbered_nodes.remove(z)
425
+ alpha[z] = i
426
+ update_nodes = []
427
+ for y in unnumbered_nodes:
428
+ if G.has_edge(y, z):
429
+ update_nodes.append(y)
430
+ else:
431
+ # y_weight will be bigger than node weights between y and z
432
+ y_weight = weight[y]
433
+ lower_nodes = [
434
+ node for node in unnumbered_nodes if weight[node] < y_weight
435
+ ]
436
+ if nx.has_path(H.subgraph(lower_nodes + [z, y]), y, z):
437
+ update_nodes.append(y)
438
+ chords.add((z, y))
439
+ # during calculation of paths the weights should not be updated
440
+ for node in update_nodes:
441
+ weight[node] += 1
442
+ H.add_edges_from(chords)
443
+ return H, alpha
wemm/lib/python3.10/site-packages/networkx/algorithms/cluster.py ADDED
@@ -0,0 +1,609 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Algorithms to characterize the number of triangles in a graph."""
2
+
3
+ from collections import Counter
4
+ from itertools import chain, combinations
5
+
6
+ import networkx as nx
7
+ from networkx.utils import not_implemented_for
8
+
9
+ __all__ = [
10
+ "triangles",
11
+ "average_clustering",
12
+ "clustering",
13
+ "transitivity",
14
+ "square_clustering",
15
+ "generalized_degree",
16
+ ]
17
+
18
+
19
+ @not_implemented_for("directed")
20
+ @nx._dispatchable
21
+ def triangles(G, nodes=None):
22
+ """Compute the number of triangles.
23
+
24
+ Finds the number of triangles that include a node as one vertex.
25
+
26
+ Parameters
27
+ ----------
28
+ G : graph
29
+ A networkx graph
30
+
31
+ nodes : node, iterable of nodes, or None (default=None)
32
+ If a singleton node, return the number of triangles for that node.
33
+ If an iterable, compute the number of triangles for each of those nodes.
34
+ If `None` (the default) compute the number of triangles for all nodes in `G`.
35
+
36
+ Returns
37
+ -------
38
+ out : dict or int
39
+ If `nodes` is a container of nodes, returns number of triangles keyed by node (dict).
40
+ If `nodes` is a specific node, returns number of triangles for the node (int).
41
+
42
+ Examples
43
+ --------
44
+ >>> G = nx.complete_graph(5)
45
+ >>> print(nx.triangles(G, 0))
46
+ 6
47
+ >>> print(nx.triangles(G))
48
+ {0: 6, 1: 6, 2: 6, 3: 6, 4: 6}
49
+ >>> print(list(nx.triangles(G, [0, 1]).values()))
50
+ [6, 6]
51
+
52
+ Notes
53
+ -----
54
+ Self loops are ignored.
55
+
56
+ """
57
+ if nodes is not None:
58
+ # If `nodes` represents a single node, return only its number of triangles
59
+ if nodes in G:
60
+ return next(_triangles_and_degree_iter(G, nodes))[2] // 2
61
+
62
+ # if `nodes` is a container of nodes, then return a
63
+ # dictionary mapping node to number of triangles.
64
+ return {v: t // 2 for v, d, t, _ in _triangles_and_degree_iter(G, nodes)}
65
+
66
+ # if nodes is None, then compute triangles for the complete graph
67
+
68
+ # dict used to avoid visiting the same nodes twice
69
+ # this allows calculating/counting each triangle only once
70
+ later_nbrs = {}
71
+
72
+ # iterate over the nodes in a graph
73
+ for node, neighbors in G.adjacency():
74
+ later_nbrs[node] = {n for n in neighbors if n not in later_nbrs and n != node}
75
+
76
+ # instantiate Counter for each node to include isolated nodes
77
+ # add 1 to the count if a nodes neighbor's neighbor is also a neighbor
78
+ triangle_counts = Counter(dict.fromkeys(G, 0))
79
+ for node1, neighbors in later_nbrs.items():
80
+ for node2 in neighbors:
81
+ third_nodes = neighbors & later_nbrs[node2]
82
+ m = len(third_nodes)
83
+ triangle_counts[node1] += m
84
+ triangle_counts[node2] += m
85
+ triangle_counts.update(third_nodes)
86
+
87
+ return dict(triangle_counts)
88
+
89
+
90
+ @not_implemented_for("multigraph")
91
+ def _triangles_and_degree_iter(G, nodes=None):
92
+ """Return an iterator of (node, degree, triangles, generalized degree).
93
+
94
+ This double counts triangles so you may want to divide by 2.
95
+ See degree(), triangles() and generalized_degree() for definitions
96
+ and details.
97
+
98
+ """
99
+ if nodes is None:
100
+ nodes_nbrs = G.adj.items()
101
+ else:
102
+ nodes_nbrs = ((n, G[n]) for n in G.nbunch_iter(nodes))
103
+
104
+ for v, v_nbrs in nodes_nbrs:
105
+ vs = set(v_nbrs) - {v}
106
+ gen_degree = Counter(len(vs & (set(G[w]) - {w})) for w in vs)
107
+ ntriangles = sum(k * val for k, val in gen_degree.items())
108
+ yield (v, len(vs), ntriangles, gen_degree)
109
+
110
+
111
+ @not_implemented_for("multigraph")
112
+ def _weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"):
113
+ """Return an iterator of (node, degree, weighted_triangles).
114
+
115
+ Used for weighted clustering.
116
+ Note: this returns the geometric average weight of edges in the triangle.
117
+ Also, each triangle is counted twice (each direction).
118
+ So you may want to divide by 2.
119
+
120
+ """
121
+ import numpy as np
122
+
123
+ if weight is None or G.number_of_edges() == 0:
124
+ max_weight = 1
125
+ else:
126
+ max_weight = max(d.get(weight, 1) for u, v, d in G.edges(data=True))
127
+ if nodes is None:
128
+ nodes_nbrs = G.adj.items()
129
+ else:
130
+ nodes_nbrs = ((n, G[n]) for n in G.nbunch_iter(nodes))
131
+
132
+ def wt(u, v):
133
+ return G[u][v].get(weight, 1) / max_weight
134
+
135
+ for i, nbrs in nodes_nbrs:
136
+ inbrs = set(nbrs) - {i}
137
+ weighted_triangles = 0
138
+ seen = set()
139
+ for j in inbrs:
140
+ seen.add(j)
141
+ # This avoids counting twice -- we double at the end.
142
+ jnbrs = set(G[j]) - seen
143
+ # Only compute the edge weight once, before the inner inner
144
+ # loop.
145
+ wij = wt(i, j)
146
+ weighted_triangles += np.cbrt(
147
+ [(wij * wt(j, k) * wt(k, i)) for k in inbrs & jnbrs]
148
+ ).sum()
149
+ yield (i, len(inbrs), 2 * float(weighted_triangles))
150
+
151
+
152
+ @not_implemented_for("multigraph")
153
+ def _directed_triangles_and_degree_iter(G, nodes=None):
154
+ """Return an iterator of
155
+ (node, total_degree, reciprocal_degree, directed_triangles).
156
+
157
+ Used for directed clustering.
158
+ Note that unlike `_triangles_and_degree_iter()`, this function counts
159
+ directed triangles so does not count triangles twice.
160
+
161
+ """
162
+ nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes))
163
+
164
+ for i, preds, succs in nodes_nbrs:
165
+ ipreds = set(preds) - {i}
166
+ isuccs = set(succs) - {i}
167
+
168
+ directed_triangles = 0
169
+ for j in chain(ipreds, isuccs):
170
+ jpreds = set(G._pred[j]) - {j}
171
+ jsuccs = set(G._succ[j]) - {j}
172
+ directed_triangles += sum(
173
+ 1
174
+ for k in chain(
175
+ (ipreds & jpreds),
176
+ (ipreds & jsuccs),
177
+ (isuccs & jpreds),
178
+ (isuccs & jsuccs),
179
+ )
180
+ )
181
+ dtotal = len(ipreds) + len(isuccs)
182
+ dbidirectional = len(ipreds & isuccs)
183
+ yield (i, dtotal, dbidirectional, directed_triangles)
184
+
185
+
186
+ @not_implemented_for("multigraph")
187
+ def _directed_weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"):
188
+ """Return an iterator of
189
+ (node, total_degree, reciprocal_degree, directed_weighted_triangles).
190
+
191
+ Used for directed weighted clustering.
192
+ Note that unlike `_weighted_triangles_and_degree_iter()`, this function counts
193
+ directed triangles so does not count triangles twice.
194
+
195
+ """
196
+ import numpy as np
197
+
198
+ if weight is None or G.number_of_edges() == 0:
199
+ max_weight = 1
200
+ else:
201
+ max_weight = max(d.get(weight, 1) for u, v, d in G.edges(data=True))
202
+
203
+ nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes))
204
+
205
+ def wt(u, v):
206
+ return G[u][v].get(weight, 1) / max_weight
207
+
208
+ for i, preds, succs in nodes_nbrs:
209
+ ipreds = set(preds) - {i}
210
+ isuccs = set(succs) - {i}
211
+
212
+ directed_triangles = 0
213
+ for j in ipreds:
214
+ jpreds = set(G._pred[j]) - {j}
215
+ jsuccs = set(G._succ[j]) - {j}
216
+ directed_triangles += np.cbrt(
217
+ [(wt(j, i) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]
218
+ ).sum()
219
+ directed_triangles += np.cbrt(
220
+ [(wt(j, i) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]
221
+ ).sum()
222
+ directed_triangles += np.cbrt(
223
+ [(wt(j, i) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]
224
+ ).sum()
225
+ directed_triangles += np.cbrt(
226
+ [(wt(j, i) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]
227
+ ).sum()
228
+
229
+ for j in isuccs:
230
+ jpreds = set(G._pred[j]) - {j}
231
+ jsuccs = set(G._succ[j]) - {j}
232
+ directed_triangles += np.cbrt(
233
+ [(wt(i, j) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds]
234
+ ).sum()
235
+ directed_triangles += np.cbrt(
236
+ [(wt(i, j) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs]
237
+ ).sum()
238
+ directed_triangles += np.cbrt(
239
+ [(wt(i, j) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds]
240
+ ).sum()
241
+ directed_triangles += np.cbrt(
242
+ [(wt(i, j) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs]
243
+ ).sum()
244
+
245
+ dtotal = len(ipreds) + len(isuccs)
246
+ dbidirectional = len(ipreds & isuccs)
247
+ yield (i, dtotal, dbidirectional, float(directed_triangles))
248
+
249
+
250
+ @nx._dispatchable(edge_attrs="weight")
251
+ def average_clustering(G, nodes=None, weight=None, count_zeros=True):
252
+ r"""Compute the average clustering coefficient for the graph G.
253
+
254
+ The clustering coefficient for the graph is the average,
255
+
256
+ .. math::
257
+
258
+ C = \frac{1}{n}\sum_{v \in G} c_v,
259
+
260
+ where :math:`n` is the number of nodes in `G`.
261
+
262
+ Parameters
263
+ ----------
264
+ G : graph
265
+
266
+ nodes : container of nodes, optional (default=all nodes in G)
267
+ Compute average clustering for nodes in this container.
268
+
269
+ weight : string or None, optional (default=None)
270
+ The edge attribute that holds the numerical value used as a weight.
271
+ If None, then each edge has weight 1.
272
+
273
+ count_zeros : bool
274
+ If False include only the nodes with nonzero clustering in the average.
275
+
276
+ Returns
277
+ -------
278
+ avg : float
279
+ Average clustering
280
+
281
+ Examples
282
+ --------
283
+ >>> G = nx.complete_graph(5)
284
+ >>> print(nx.average_clustering(G))
285
+ 1.0
286
+
287
+ Notes
288
+ -----
289
+ This is a space saving routine; it might be faster
290
+ to use the clustering function to get a list and then take the average.
291
+
292
+ Self loops are ignored.
293
+
294
+ References
295
+ ----------
296
+ .. [1] Generalizations of the clustering coefficient to weighted
297
+ complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela,
298
+ K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007).
299
+ http://jponnela.com/web_documents/a9.pdf
300
+ .. [2] Marcus Kaiser, Mean clustering coefficients: the role of isolated
301
+ nodes and leafs on clustering measures for small-world networks.
302
+ https://arxiv.org/abs/0802.2512
303
+ """
304
+ c = clustering(G, nodes, weight=weight).values()
305
+ if not count_zeros:
306
+ c = [v for v in c if abs(v) > 0]
307
+ return sum(c) / len(c)
308
+
309
+
310
+ @nx._dispatchable(edge_attrs="weight")
311
+ def clustering(G, nodes=None, weight=None):
312
+ r"""Compute the clustering coefficient for nodes.
313
+
314
+ For unweighted graphs, the clustering of a node :math:`u`
315
+ is the fraction of possible triangles through that node that exist,
316
+
317
+ .. math::
318
+
319
+ c_u = \frac{2 T(u)}{deg(u)(deg(u)-1)},
320
+
321
+ where :math:`T(u)` is the number of triangles through node :math:`u` and
322
+ :math:`deg(u)` is the degree of :math:`u`.
323
+
324
+ For weighted graphs, there are several ways to define clustering [1]_.
325
+ the one used here is defined
326
+ as the geometric average of the subgraph edge weights [2]_,
327
+
328
+ .. math::
329
+
330
+ c_u = \frac{1}{deg(u)(deg(u)-1))}
331
+ \sum_{vw} (\hat{w}_{uv} \hat{w}_{uw} \hat{w}_{vw})^{1/3}.
332
+
333
+ The edge weights :math:`\hat{w}_{uv}` are normalized by the maximum weight
334
+ in the network :math:`\hat{w}_{uv} = w_{uv}/\max(w)`.
335
+
336
+ The value of :math:`c_u` is assigned to 0 if :math:`deg(u) < 2`.
337
+
338
+ Additionally, this weighted definition has been generalized to support negative edge weights [3]_.
339
+
340
+ For directed graphs, the clustering is similarly defined as the fraction
341
+ of all possible directed triangles or geometric average of the subgraph
342
+ edge weights for unweighted and weighted directed graph respectively [4]_.
343
+
344
+ .. math::
345
+
346
+ c_u = \frac{T(u)}{2(deg^{tot}(u)(deg^{tot}(u)-1) - 2deg^{\leftrightarrow}(u))},
347
+
348
+ where :math:`T(u)` is the number of directed triangles through node
349
+ :math:`u`, :math:`deg^{tot}(u)` is the sum of in degree and out degree of
350
+ :math:`u` and :math:`deg^{\leftrightarrow}(u)` is the reciprocal degree of
351
+ :math:`u`.
352
+
353
+
354
+ Parameters
355
+ ----------
356
+ G : graph
357
+
358
+ nodes : node, iterable of nodes, or None (default=None)
359
+ If a singleton node, return the number of triangles for that node.
360
+ If an iterable, compute the number of triangles for each of those nodes.
361
+ If `None` (the default) compute the number of triangles for all nodes in `G`.
362
+
363
+ weight : string or None, optional (default=None)
364
+ The edge attribute that holds the numerical value used as a weight.
365
+ If None, then each edge has weight 1.
366
+
367
+ Returns
368
+ -------
369
+ out : float, or dictionary
370
+ Clustering coefficient at specified nodes
371
+
372
+ Examples
373
+ --------
374
+ >>> G = nx.complete_graph(5)
375
+ >>> print(nx.clustering(G, 0))
376
+ 1.0
377
+ >>> print(nx.clustering(G))
378
+ {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
379
+
380
+ Notes
381
+ -----
382
+ Self loops are ignored.
383
+
384
+ References
385
+ ----------
386
+ .. [1] Generalizations of the clustering coefficient to weighted
387
+ complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela,
388
+ K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007).
389
+ http://jponnela.com/web_documents/a9.pdf
390
+ .. [2] Intensity and coherence of motifs in weighted complex
391
+ networks by J. P. Onnela, J. Saramäki, J. Kertész, and K. Kaski,
392
+ Physical Review E, 71(6), 065103 (2005).
393
+ .. [3] Generalization of Clustering Coefficients to Signed Correlation Networks
394
+ by G. Costantini and M. Perugini, PloS one, 9(2), e88669 (2014).
395
+ .. [4] Clustering in complex directed networks by G. Fagiolo,
396
+ Physical Review E, 76(2), 026107 (2007).
397
+ """
398
+ if G.is_directed():
399
+ if weight is not None:
400
+ td_iter = _directed_weighted_triangles_and_degree_iter(G, nodes, weight)
401
+ clusterc = {
402
+ v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2)
403
+ for v, dt, db, t in td_iter
404
+ }
405
+ else:
406
+ td_iter = _directed_triangles_and_degree_iter(G, nodes)
407
+ clusterc = {
408
+ v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2)
409
+ for v, dt, db, t in td_iter
410
+ }
411
+ else:
412
+ # The formula 2*T/(d*(d-1)) from docs is t/(d*(d-1)) here b/c t==2*T
413
+ if weight is not None:
414
+ td_iter = _weighted_triangles_and_degree_iter(G, nodes, weight)
415
+ clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t in td_iter}
416
+ else:
417
+ td_iter = _triangles_and_degree_iter(G, nodes)
418
+ clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t, _ in td_iter}
419
+ if nodes in G:
420
+ # Return the value of the sole entry in the dictionary.
421
+ return clusterc[nodes]
422
+ return clusterc
423
+
424
+
425
+ @nx._dispatchable
426
+ def transitivity(G):
427
+ r"""Compute graph transitivity, the fraction of all possible triangles
428
+ present in G.
429
+
430
+ Possible triangles are identified by the number of "triads"
431
+ (two edges with a shared vertex).
432
+
433
+ The transitivity is
434
+
435
+ .. math::
436
+
437
+ T = 3\frac{\#triangles}{\#triads}.
438
+
439
+ Parameters
440
+ ----------
441
+ G : graph
442
+
443
+ Returns
444
+ -------
445
+ out : float
446
+ Transitivity
447
+
448
+ Notes
449
+ -----
450
+ Self loops are ignored.
451
+
452
+ Examples
453
+ --------
454
+ >>> G = nx.complete_graph(5)
455
+ >>> print(nx.transitivity(G))
456
+ 1.0
457
+ """
458
+ triangles_contri = [
459
+ (t, d * (d - 1)) for v, d, t, _ in _triangles_and_degree_iter(G)
460
+ ]
461
+ # If the graph is empty
462
+ if len(triangles_contri) == 0:
463
+ return 0
464
+ triangles, contri = map(sum, zip(*triangles_contri))
465
+ return 0 if triangles == 0 else triangles / contri
466
+
467
+
468
+ @nx._dispatchable
469
+ def square_clustering(G, nodes=None):
470
+ r"""Compute the squares clustering coefficient for nodes.
471
+
472
+ For each node return the fraction of possible squares that exist at
473
+ the node [1]_
474
+
475
+ .. math::
476
+ C_4(v) = \frac{ \sum_{u=1}^{k_v}
477
+ \sum_{w=u+1}^{k_v} q_v(u,w) }{ \sum_{u=1}^{k_v}
478
+ \sum_{w=u+1}^{k_v} [a_v(u,w) + q_v(u,w)]},
479
+
480
+ where :math:`q_v(u,w)` are the number of common neighbors of :math:`u` and
481
+ :math:`w` other than :math:`v` (ie squares), and :math:`a_v(u,w) = (k_u -
482
+ (1+q_v(u,w)+\theta_{uv})) + (k_w - (1+q_v(u,w)+\theta_{uw}))`, where
483
+ :math:`\theta_{uw} = 1` if :math:`u` and :math:`w` are connected and 0
484
+ otherwise. [2]_
485
+
486
+ Parameters
487
+ ----------
488
+ G : graph
489
+
490
+ nodes : container of nodes, optional (default=all nodes in G)
491
+ Compute clustering for nodes in this container.
492
+
493
+ Returns
494
+ -------
495
+ c4 : dictionary
496
+ A dictionary keyed by node with the square clustering coefficient value.
497
+
498
+ Examples
499
+ --------
500
+ >>> G = nx.complete_graph(5)
501
+ >>> print(nx.square_clustering(G, 0))
502
+ 1.0
503
+ >>> print(nx.square_clustering(G))
504
+ {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
505
+
506
+ Notes
507
+ -----
508
+ While :math:`C_3(v)` (triangle clustering) gives the probability that
509
+ two neighbors of node v are connected with each other, :math:`C_4(v)` is
510
+ the probability that two neighbors of node v share a common
511
+ neighbor different from v. This algorithm can be applied to both
512
+ bipartite and unipartite networks.
513
+
514
+ References
515
+ ----------
516
+ .. [1] Pedro G. Lind, Marta C. González, and Hans J. Herrmann. 2005
517
+ Cycles and clustering in bipartite networks.
518
+ Physical Review E (72) 056127.
519
+ .. [2] Zhang, Peng et al. Clustering Coefficient and Community Structure of
520
+ Bipartite Networks. Physica A: Statistical Mechanics and its Applications 387.27 (2008): 6869–6875.
521
+ https://arxiv.org/abs/0710.0117v1
522
+ """
523
+ if nodes is None:
524
+ node_iter = G
525
+ else:
526
+ node_iter = G.nbunch_iter(nodes)
527
+ clustering = {}
528
+ for v in node_iter:
529
+ clustering[v] = 0
530
+ potential = 0
531
+ for u, w in combinations(G[v], 2):
532
+ squares = len((set(G[u]) & set(G[w])) - {v})
533
+ clustering[v] += squares
534
+ degm = squares + 1
535
+ if w in G[u]:
536
+ degm += 1
537
+ potential += (len(G[u]) - degm) + (len(G[w]) - degm) + squares
538
+ if potential > 0:
539
+ clustering[v] /= potential
540
+ if nodes in G:
541
+ # Return the value of the sole entry in the dictionary.
542
+ return clustering[nodes]
543
+ return clustering
544
+
545
+
546
+ @not_implemented_for("directed")
547
+ @nx._dispatchable
548
+ def generalized_degree(G, nodes=None):
549
+ r"""Compute the generalized degree for nodes.
550
+
551
+ For each node, the generalized degree shows how many edges of given
552
+ triangle multiplicity the node is connected to. The triangle multiplicity
553
+ of an edge is the number of triangles an edge participates in. The
554
+ generalized degree of node :math:`i` can be written as a vector
555
+ :math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc, k_i^{(N-2)})` where
556
+ :math:`k_i^{(j)}` is the number of edges attached to node :math:`i` that
557
+ participate in :math:`j` triangles.
558
+
559
+ Parameters
560
+ ----------
561
+ G : graph
562
+
563
+ nodes : container of nodes, optional (default=all nodes in G)
564
+ Compute the generalized degree for nodes in this container.
565
+
566
+ Returns
567
+ -------
568
+ out : Counter, or dictionary of Counters
569
+ Generalized degree of specified nodes. The Counter is keyed by edge
570
+ triangle multiplicity.
571
+
572
+ Examples
573
+ --------
574
+ >>> G = nx.complete_graph(5)
575
+ >>> print(nx.generalized_degree(G, 0))
576
+ Counter({3: 4})
577
+ >>> print(nx.generalized_degree(G))
578
+ {0: Counter({3: 4}), 1: Counter({3: 4}), 2: Counter({3: 4}), 3: Counter({3: 4}), 4: Counter({3: 4})}
579
+
580
+ To recover the number of triangles attached to a node:
581
+
582
+ >>> k1 = nx.generalized_degree(G, 0)
583
+ >>> sum([k * v for k, v in k1.items()]) / 2 == nx.triangles(G, 0)
584
+ True
585
+
586
+ Notes
587
+ -----
588
+ Self loops are ignored.
589
+
590
+ In a network of N nodes, the highest triangle multiplicity an edge can have
591
+ is N-2.
592
+
593
+ The return value does not include a `zero` entry if no edges of a
594
+ particular triangle multiplicity are present.
595
+
596
+ The number of triangles node :math:`i` is attached to can be recovered from
597
+ the generalized degree :math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc,
598
+ k_i^{(N-2)})` by :math:`(k_i^{(1)}+2k_i^{(2)}+\dotsc +(N-2)k_i^{(N-2)})/2`.
599
+
600
+ References
601
+ ----------
602
+ .. [1] Networks with arbitrary edge multiplicities by V. Zlatić,
603
+ D. Garlaschelli and G. Caldarelli, EPL (Europhysics Letters),
604
+ Volume 97, Number 2 (2012).
605
+ https://iopscience.iop.org/article/10.1209/0295-5075/97/28005
606
+ """
607
+ if nodes in G:
608
+ return next(_triangles_and_degree_iter(G, nodes))[3]
609
+ return {v: gd for v, d, t, gd in _triangles_and_degree_iter(G, nodes)}
wemm/lib/python3.10/site-packages/networkx/algorithms/communicability_alg.py ADDED
@@ -0,0 +1,163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Communicability.
3
+ """
4
+
5
+ import networkx as nx
6
+ from networkx.utils import not_implemented_for
7
+
8
+ __all__ = ["communicability", "communicability_exp"]
9
+
10
+
11
+ @not_implemented_for("directed")
12
+ @not_implemented_for("multigraph")
13
+ @nx._dispatchable
14
+ def communicability(G):
15
+ r"""Returns communicability between all pairs of nodes in G.
16
+
17
+ The communicability between pairs of nodes in G is the sum of
18
+ walks of different lengths starting at node u and ending at node v.
19
+
20
+ Parameters
21
+ ----------
22
+ G: graph
23
+
24
+ Returns
25
+ -------
26
+ comm: dictionary of dictionaries
27
+ Dictionary of dictionaries keyed by nodes with communicability
28
+ as the value.
29
+
30
+ Raises
31
+ ------
32
+ NetworkXError
33
+ If the graph is not undirected and simple.
34
+
35
+ See Also
36
+ --------
37
+ communicability_exp:
38
+ Communicability between all pairs of nodes in G using spectral
39
+ decomposition.
40
+ communicability_betweenness_centrality:
41
+ Communicability betweenness centrality for each node in G.
42
+
43
+ Notes
44
+ -----
45
+ This algorithm uses a spectral decomposition of the adjacency matrix.
46
+ Let G=(V,E) be a simple undirected graph. Using the connection between
47
+ the powers of the adjacency matrix and the number of walks in the graph,
48
+ the communicability between nodes `u` and `v` based on the graph spectrum
49
+ is [1]_
50
+
51
+ .. math::
52
+ C(u,v)=\sum_{j=1}^{n}\phi_{j}(u)\phi_{j}(v)e^{\lambda_{j}},
53
+
54
+ where `\phi_{j}(u)` is the `u\rm{th}` element of the `j\rm{th}` orthonormal
55
+ eigenvector of the adjacency matrix associated with the eigenvalue
56
+ `\lambda_{j}`.
57
+
58
+ References
59
+ ----------
60
+ .. [1] Ernesto Estrada, Naomichi Hatano,
61
+ "Communicability in complex networks",
62
+ Phys. Rev. E 77, 036111 (2008).
63
+ https://arxiv.org/abs/0707.0756
64
+
65
+ Examples
66
+ --------
67
+ >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
68
+ >>> c = nx.communicability(G)
69
+ """
70
+ import numpy as np
71
+
72
+ nodelist = list(G) # ordering of nodes in matrix
73
+ A = nx.to_numpy_array(G, nodelist)
74
+ # convert to 0-1 matrix
75
+ A[A != 0.0] = 1
76
+ w, vec = np.linalg.eigh(A)
77
+ expw = np.exp(w)
78
+ mapping = dict(zip(nodelist, range(len(nodelist))))
79
+ c = {}
80
+ # computing communicabilities
81
+ for u in G:
82
+ c[u] = {}
83
+ for v in G:
84
+ s = 0
85
+ p = mapping[u]
86
+ q = mapping[v]
87
+ for j in range(len(nodelist)):
88
+ s += vec[:, j][p] * vec[:, j][q] * expw[j]
89
+ c[u][v] = float(s)
90
+ return c
91
+
92
+
93
+ @not_implemented_for("directed")
94
+ @not_implemented_for("multigraph")
95
+ @nx._dispatchable
96
+ def communicability_exp(G):
97
+ r"""Returns communicability between all pairs of nodes in G.
98
+
99
+ Communicability between pair of node (u,v) of node in G is the sum of
100
+ walks of different lengths starting at node u and ending at node v.
101
+
102
+ Parameters
103
+ ----------
104
+ G: graph
105
+
106
+ Returns
107
+ -------
108
+ comm: dictionary of dictionaries
109
+ Dictionary of dictionaries keyed by nodes with communicability
110
+ as the value.
111
+
112
+ Raises
113
+ ------
114
+ NetworkXError
115
+ If the graph is not undirected and simple.
116
+
117
+ See Also
118
+ --------
119
+ communicability:
120
+ Communicability between pairs of nodes in G.
121
+ communicability_betweenness_centrality:
122
+ Communicability betweenness centrality for each node in G.
123
+
124
+ Notes
125
+ -----
126
+ This algorithm uses matrix exponentiation of the adjacency matrix.
127
+
128
+ Let G=(V,E) be a simple undirected graph. Using the connection between
129
+ the powers of the adjacency matrix and the number of walks in the graph,
130
+ the communicability between nodes u and v is [1]_,
131
+
132
+ .. math::
133
+ C(u,v) = (e^A)_{uv},
134
+
135
+ where `A` is the adjacency matrix of G.
136
+
137
+ References
138
+ ----------
139
+ .. [1] Ernesto Estrada, Naomichi Hatano,
140
+ "Communicability in complex networks",
141
+ Phys. Rev. E 77, 036111 (2008).
142
+ https://arxiv.org/abs/0707.0756
143
+
144
+ Examples
145
+ --------
146
+ >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
147
+ >>> c = nx.communicability_exp(G)
148
+ """
149
+ import scipy as sp
150
+
151
+ nodelist = list(G) # ordering of nodes in matrix
152
+ A = nx.to_numpy_array(G, nodelist)
153
+ # convert to 0-1 matrix
154
+ A[A != 0.0] = 1
155
+ # communicability matrix
156
+ expA = sp.linalg.expm(A)
157
+ mapping = dict(zip(nodelist, range(len(nodelist))))
158
+ c = {}
159
+ for u in G:
160
+ c[u] = {}
161
+ for v in G:
162
+ c[u][v] = float(expA[mapping[u], mapping[v]])
163
+ return c
wemm/lib/python3.10/site-packages/networkx/algorithms/core.py ADDED
@@ -0,0 +1,649 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Find the k-cores of a graph.
3
+
4
+ The k-core is found by recursively pruning nodes with degrees less than k.
5
+
6
+ See the following references for details:
7
+
8
+ An O(m) Algorithm for Cores Decomposition of Networks
9
+ Vladimir Batagelj and Matjaz Zaversnik, 2003.
10
+ https://arxiv.org/abs/cs.DS/0310049
11
+
12
+ Generalized Cores
13
+ Vladimir Batagelj and Matjaz Zaversnik, 2002.
14
+ https://arxiv.org/pdf/cs/0202039
15
+
16
+ For directed graphs a more general notion is that of D-cores which
17
+ looks at (k, l) restrictions on (in, out) degree. The (k, k) D-core
18
+ is the k-core.
19
+
20
+ D-cores: Measuring Collaboration of Directed Graphs Based on Degeneracy
21
+ Christos Giatsidis, Dimitrios M. Thilikos, Michalis Vazirgiannis, ICDM 2011.
22
+ http://www.graphdegeneracy.org/dcores_ICDM_2011.pdf
23
+
24
+ Multi-scale structure and topological anomaly detection via a new network \
25
+ statistic: The onion decomposition
26
+ L. Hébert-Dufresne, J. A. Grochow, and A. Allard
27
+ Scientific Reports 6, 31708 (2016)
28
+ http://doi.org/10.1038/srep31708
29
+
30
+ """
31
+
32
+ import networkx as nx
33
+
34
+ __all__ = [
35
+ "core_number",
36
+ "k_core",
37
+ "k_shell",
38
+ "k_crust",
39
+ "k_corona",
40
+ "k_truss",
41
+ "onion_layers",
42
+ ]
43
+
44
+
45
+ @nx.utils.not_implemented_for("multigraph")
46
+ @nx._dispatchable
47
+ def core_number(G):
48
+ """Returns the core number for each node.
49
+
50
+ A k-core is a maximal subgraph that contains nodes of degree k or more.
51
+
52
+ The core number of a node is the largest value k of a k-core containing
53
+ that node.
54
+
55
+ Parameters
56
+ ----------
57
+ G : NetworkX graph
58
+ An undirected or directed graph
59
+
60
+ Returns
61
+ -------
62
+ core_number : dictionary
63
+ A dictionary keyed by node to the core number.
64
+
65
+ Raises
66
+ ------
67
+ NetworkXNotImplemented
68
+ If `G` is a multigraph or contains self loops.
69
+
70
+ Notes
71
+ -----
72
+ For directed graphs the node degree is defined to be the
73
+ in-degree + out-degree.
74
+
75
+ Examples
76
+ --------
77
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
78
+ >>> H = nx.havel_hakimi_graph(degrees)
79
+ >>> nx.core_number(H)
80
+ {0: 1, 1: 2, 2: 2, 3: 2, 4: 1, 5: 2, 6: 0}
81
+ >>> G = nx.DiGraph()
82
+ >>> G.add_edges_from([(1, 2), (2, 1), (2, 3), (2, 4), (3, 4), (4, 3)])
83
+ >>> nx.core_number(G)
84
+ {1: 2, 2: 2, 3: 2, 4: 2}
85
+
86
+ References
87
+ ----------
88
+ .. [1] An O(m) Algorithm for Cores Decomposition of Networks
89
+ Vladimir Batagelj and Matjaz Zaversnik, 2003.
90
+ https://arxiv.org/abs/cs.DS/0310049
91
+ """
92
+ if nx.number_of_selfloops(G) > 0:
93
+ msg = (
94
+ "Input graph has self loops which is not permitted; "
95
+ "Consider using G.remove_edges_from(nx.selfloop_edges(G))."
96
+ )
97
+ raise nx.NetworkXNotImplemented(msg)
98
+ degrees = dict(G.degree())
99
+ # Sort nodes by degree.
100
+ nodes = sorted(degrees, key=degrees.get)
101
+ bin_boundaries = [0]
102
+ curr_degree = 0
103
+ for i, v in enumerate(nodes):
104
+ if degrees[v] > curr_degree:
105
+ bin_boundaries.extend([i] * (degrees[v] - curr_degree))
106
+ curr_degree = degrees[v]
107
+ node_pos = {v: pos for pos, v in enumerate(nodes)}
108
+ # The initial guess for the core number of a node is its degree.
109
+ core = degrees
110
+ nbrs = {v: list(nx.all_neighbors(G, v)) for v in G}
111
+ for v in nodes:
112
+ for u in nbrs[v]:
113
+ if core[u] > core[v]:
114
+ nbrs[u].remove(v)
115
+ pos = node_pos[u]
116
+ bin_start = bin_boundaries[core[u]]
117
+ node_pos[u] = bin_start
118
+ node_pos[nodes[bin_start]] = pos
119
+ nodes[bin_start], nodes[pos] = nodes[pos], nodes[bin_start]
120
+ bin_boundaries[core[u]] += 1
121
+ core[u] -= 1
122
+ return core
123
+
124
+
125
+ def _core_subgraph(G, k_filter, k=None, core=None):
126
+ """Returns the subgraph induced by nodes passing filter `k_filter`.
127
+
128
+ Parameters
129
+ ----------
130
+ G : NetworkX graph
131
+ The graph or directed graph to process
132
+ k_filter : filter function
133
+ This function filters the nodes chosen. It takes three inputs:
134
+ A node of G, the filter's cutoff, and the core dict of the graph.
135
+ The function should return a Boolean value.
136
+ k : int, optional
137
+ The order of the core. If not specified use the max core number.
138
+ This value is used as the cutoff for the filter.
139
+ core : dict, optional
140
+ Precomputed core numbers keyed by node for the graph `G`.
141
+ If not specified, the core numbers will be computed from `G`.
142
+
143
+ """
144
+ if core is None:
145
+ core = core_number(G)
146
+ if k is None:
147
+ k = max(core.values())
148
+ nodes = (v for v in core if k_filter(v, k, core))
149
+ return G.subgraph(nodes).copy()
150
+
151
+
152
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
153
+ def k_core(G, k=None, core_number=None):
154
+ """Returns the k-core of G.
155
+
156
+ A k-core is a maximal subgraph that contains nodes of degree `k` or more.
157
+
158
+ .. deprecated:: 3.3
159
+ `k_core` will not accept `MultiGraph` objects in version 3.5.
160
+
161
+ Parameters
162
+ ----------
163
+ G : NetworkX graph
164
+ A graph or directed graph
165
+ k : int, optional
166
+ The order of the core. If not specified return the main core.
167
+ core_number : dictionary, optional
168
+ Precomputed core numbers for the graph G.
169
+
170
+ Returns
171
+ -------
172
+ G : NetworkX graph
173
+ The k-core subgraph
174
+
175
+ Raises
176
+ ------
177
+ NetworkXNotImplemented
178
+ The k-core is not defined for multigraphs or graphs with self loops.
179
+
180
+ Notes
181
+ -----
182
+ The main core is the core with `k` as the largest core_number.
183
+
184
+ For directed graphs the node degree is defined to be the
185
+ in-degree + out-degree.
186
+
187
+ Graph, node, and edge attributes are copied to the subgraph.
188
+
189
+ Examples
190
+ --------
191
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
192
+ >>> H = nx.havel_hakimi_graph(degrees)
193
+ >>> H.degree
194
+ DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
195
+ >>> nx.k_core(H).nodes
196
+ NodeView((1, 2, 3, 5))
197
+
198
+ See Also
199
+ --------
200
+ core_number
201
+
202
+ References
203
+ ----------
204
+ .. [1] An O(m) Algorithm for Cores Decomposition of Networks
205
+ Vladimir Batagelj and Matjaz Zaversnik, 2003.
206
+ https://arxiv.org/abs/cs.DS/0310049
207
+ """
208
+
209
+ import warnings
210
+
211
+ if G.is_multigraph():
212
+ warnings.warn(
213
+ (
214
+ "\n\n`k_core` will not accept `MultiGraph` objects in version 3.5.\n"
215
+ "Convert it to an undirected graph instead, using::\n\n"
216
+ "\tG = nx.Graph(G)\n"
217
+ ),
218
+ category=DeprecationWarning,
219
+ stacklevel=5,
220
+ )
221
+
222
+ def k_filter(v, k, c):
223
+ return c[v] >= k
224
+
225
+ return _core_subgraph(G, k_filter, k, core_number)
226
+
227
+
228
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
229
+ def k_shell(G, k=None, core_number=None):
230
+ """Returns the k-shell of G.
231
+
232
+ The k-shell is the subgraph induced by nodes with core number k.
233
+ That is, nodes in the k-core that are not in the (k+1)-core.
234
+
235
+ .. deprecated:: 3.3
236
+ `k_shell` will not accept `MultiGraph` objects in version 3.5.
237
+
238
+ Parameters
239
+ ----------
240
+ G : NetworkX graph
241
+ A graph or directed graph.
242
+ k : int, optional
243
+ The order of the shell. If not specified return the outer shell.
244
+ core_number : dictionary, optional
245
+ Precomputed core numbers for the graph G.
246
+
247
+
248
+ Returns
249
+ -------
250
+ G : NetworkX graph
251
+ The k-shell subgraph
252
+
253
+ Raises
254
+ ------
255
+ NetworkXNotImplemented
256
+ The k-shell is not implemented for multigraphs or graphs with self loops.
257
+
258
+ Notes
259
+ -----
260
+ This is similar to k_corona but in that case only neighbors in the
261
+ k-core are considered.
262
+
263
+ For directed graphs the node degree is defined to be the
264
+ in-degree + out-degree.
265
+
266
+ Graph, node, and edge attributes are copied to the subgraph.
267
+
268
+ Examples
269
+ --------
270
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
271
+ >>> H = nx.havel_hakimi_graph(degrees)
272
+ >>> H.degree
273
+ DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
274
+ >>> nx.k_shell(H, k=1).nodes
275
+ NodeView((0, 4))
276
+
277
+ See Also
278
+ --------
279
+ core_number
280
+ k_corona
281
+
282
+
283
+ References
284
+ ----------
285
+ .. [1] A model of Internet topology using k-shell decomposition
286
+ Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt,
287
+ and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154
288
+ http://www.pnas.org/content/104/27/11150.full
289
+ """
290
+
291
+ import warnings
292
+
293
+ if G.is_multigraph():
294
+ warnings.warn(
295
+ (
296
+ "\n\n`k_shell` will not accept `MultiGraph` objects in version 3.5.\n"
297
+ "Convert it to an undirected graph instead, using::\n\n"
298
+ "\tG = nx.Graph(G)\n"
299
+ ),
300
+ category=DeprecationWarning,
301
+ stacklevel=5,
302
+ )
303
+
304
+ def k_filter(v, k, c):
305
+ return c[v] == k
306
+
307
+ return _core_subgraph(G, k_filter, k, core_number)
308
+
309
+
310
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
311
+ def k_crust(G, k=None, core_number=None):
312
+ """Returns the k-crust of G.
313
+
314
+ The k-crust is the graph G with the edges of the k-core removed
315
+ and isolated nodes found after the removal of edges are also removed.
316
+
317
+ .. deprecated:: 3.3
318
+ `k_crust` will not accept `MultiGraph` objects in version 3.5.
319
+
320
+ Parameters
321
+ ----------
322
+ G : NetworkX graph
323
+ A graph or directed graph.
324
+ k : int, optional
325
+ The order of the shell. If not specified return the main crust.
326
+ core_number : dictionary, optional
327
+ Precomputed core numbers for the graph G.
328
+
329
+ Returns
330
+ -------
331
+ G : NetworkX graph
332
+ The k-crust subgraph
333
+
334
+ Raises
335
+ ------
336
+ NetworkXNotImplemented
337
+ The k-crust is not implemented for multigraphs or graphs with self loops.
338
+
339
+ Notes
340
+ -----
341
+ This definition of k-crust is different than the definition in [1]_.
342
+ The k-crust in [1]_ is equivalent to the k+1 crust of this algorithm.
343
+
344
+ For directed graphs the node degree is defined to be the
345
+ in-degree + out-degree.
346
+
347
+ Graph, node, and edge attributes are copied to the subgraph.
348
+
349
+ Examples
350
+ --------
351
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
352
+ >>> H = nx.havel_hakimi_graph(degrees)
353
+ >>> H.degree
354
+ DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
355
+ >>> nx.k_crust(H, k=1).nodes
356
+ NodeView((0, 4, 6))
357
+
358
+ See Also
359
+ --------
360
+ core_number
361
+
362
+ References
363
+ ----------
364
+ .. [1] A model of Internet topology using k-shell decomposition
365
+ Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt,
366
+ and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154
367
+ http://www.pnas.org/content/104/27/11150.full
368
+ """
369
+
370
+ import warnings
371
+
372
+ if G.is_multigraph():
373
+ warnings.warn(
374
+ (
375
+ "\n\n`k_crust` will not accept `MultiGraph` objects in version 3.5.\n"
376
+ "Convert it to an undirected graph instead, using::\n\n"
377
+ "\tG = nx.Graph(G)\n"
378
+ ),
379
+ category=DeprecationWarning,
380
+ stacklevel=5,
381
+ )
382
+
383
+ # Default for k is one less than in _core_subgraph, so just inline.
384
+ # Filter is c[v] <= k
385
+ if core_number is None:
386
+ core_number = nx.core_number(G)
387
+ if k is None:
388
+ k = max(core_number.values()) - 1
389
+ nodes = (v for v in core_number if core_number[v] <= k)
390
+ return G.subgraph(nodes).copy()
391
+
392
+
393
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
394
+ def k_corona(G, k, core_number=None):
395
+ """Returns the k-corona of G.
396
+
397
+ The k-corona is the subgraph of nodes in the k-core which have
398
+ exactly k neighbors in the k-core.
399
+
400
+ .. deprecated:: 3.3
401
+ `k_corona` will not accept `MultiGraph` objects in version 3.5.
402
+
403
+ Parameters
404
+ ----------
405
+ G : NetworkX graph
406
+ A graph or directed graph
407
+ k : int
408
+ The order of the corona.
409
+ core_number : dictionary, optional
410
+ Precomputed core numbers for the graph G.
411
+
412
+ Returns
413
+ -------
414
+ G : NetworkX graph
415
+ The k-corona subgraph
416
+
417
+ Raises
418
+ ------
419
+ NetworkXNotImplemented
420
+ The k-corona is not defined for multigraphs or graphs with self loops.
421
+
422
+ Notes
423
+ -----
424
+ For directed graphs the node degree is defined to be the
425
+ in-degree + out-degree.
426
+
427
+ Graph, node, and edge attributes are copied to the subgraph.
428
+
429
+ Examples
430
+ --------
431
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
432
+ >>> H = nx.havel_hakimi_graph(degrees)
433
+ >>> H.degree
434
+ DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
435
+ >>> nx.k_corona(H, k=2).nodes
436
+ NodeView((1, 2, 3, 5))
437
+
438
+ See Also
439
+ --------
440
+ core_number
441
+
442
+ References
443
+ ----------
444
+ .. [1] k -core (bootstrap) percolation on complex networks:
445
+ Critical phenomena and nonlocal effects,
446
+ A. V. Goltsev, S. N. Dorogovtsev, and J. F. F. Mendes,
447
+ Phys. Rev. E 73, 056101 (2006)
448
+ http://link.aps.org/doi/10.1103/PhysRevE.73.056101
449
+ """
450
+
451
+ import warnings
452
+
453
+ if G.is_multigraph():
454
+ warnings.warn(
455
+ (
456
+ "\n\n`k_corona` will not accept `MultiGraph` objects in version 3.5.\n"
457
+ "Convert it to an undirected graph instead, using::\n\n"
458
+ "\tG = nx.Graph(G)\n"
459
+ ),
460
+ category=DeprecationWarning,
461
+ stacklevel=5,
462
+ )
463
+
464
+ def func(v, k, c):
465
+ return c[v] == k and k == sum(1 for w in G[v] if c[w] >= k)
466
+
467
+ return _core_subgraph(G, func, k, core_number)
468
+
469
+
470
+ @nx.utils.not_implemented_for("directed")
471
+ @nx.utils.not_implemented_for("multigraph")
472
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
473
+ def k_truss(G, k):
474
+ """Returns the k-truss of `G`.
475
+
476
+ The k-truss is the maximal induced subgraph of `G` which contains at least
477
+ three vertices where every edge is incident to at least `k-2` triangles.
478
+
479
+ Parameters
480
+ ----------
481
+ G : NetworkX graph
482
+ An undirected graph
483
+ k : int
484
+ The order of the truss
485
+
486
+ Returns
487
+ -------
488
+ H : NetworkX graph
489
+ The k-truss subgraph
490
+
491
+ Raises
492
+ ------
493
+ NetworkXNotImplemented
494
+ If `G` is a multigraph or directed graph or if it contains self loops.
495
+
496
+ Notes
497
+ -----
498
+ A k-clique is a (k-2)-truss and a k-truss is a (k+1)-core.
499
+
500
+ Graph, node, and edge attributes are copied to the subgraph.
501
+
502
+ K-trusses were originally defined in [2] which states that the k-truss
503
+ is the maximal induced subgraph where each edge belongs to at least
504
+ `k-2` triangles. A more recent paper, [1], uses a slightly different
505
+ definition requiring that each edge belong to at least `k` triangles.
506
+ This implementation uses the original definition of `k-2` triangles.
507
+
508
+ Examples
509
+ --------
510
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
511
+ >>> H = nx.havel_hakimi_graph(degrees)
512
+ >>> H.degree
513
+ DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
514
+ >>> nx.k_truss(H, k=2).nodes
515
+ NodeView((0, 1, 2, 3, 4, 5))
516
+
517
+ References
518
+ ----------
519
+ .. [1] Bounds and Algorithms for k-truss. Paul Burkhardt, Vance Faber,
520
+ David G. Harris, 2018. https://arxiv.org/abs/1806.05523v2
521
+ .. [2] Trusses: Cohesive Subgraphs for Social Network Analysis. Jonathan
522
+ Cohen, 2005.
523
+ """
524
+ if nx.number_of_selfloops(G) > 0:
525
+ msg = (
526
+ "Input graph has self loops which is not permitted; "
527
+ "Consider using G.remove_edges_from(nx.selfloop_edges(G))."
528
+ )
529
+ raise nx.NetworkXNotImplemented(msg)
530
+
531
+ H = G.copy()
532
+
533
+ n_dropped = 1
534
+ while n_dropped > 0:
535
+ n_dropped = 0
536
+ to_drop = []
537
+ seen = set()
538
+ for u in H:
539
+ nbrs_u = set(H[u])
540
+ seen.add(u)
541
+ new_nbrs = [v for v in nbrs_u if v not in seen]
542
+ for v in new_nbrs:
543
+ if len(nbrs_u & set(H[v])) < (k - 2):
544
+ to_drop.append((u, v))
545
+ H.remove_edges_from(to_drop)
546
+ n_dropped = len(to_drop)
547
+ H.remove_nodes_from(list(nx.isolates(H)))
548
+
549
+ return H
550
+
551
+
552
+ @nx.utils.not_implemented_for("multigraph")
553
+ @nx.utils.not_implemented_for("directed")
554
+ @nx._dispatchable
555
+ def onion_layers(G):
556
+ """Returns the layer of each vertex in an onion decomposition of the graph.
557
+
558
+ The onion decomposition refines the k-core decomposition by providing
559
+ information on the internal organization of each k-shell. It is usually
560
+ used alongside the `core numbers`.
561
+
562
+ Parameters
563
+ ----------
564
+ G : NetworkX graph
565
+ An undirected graph without self loops.
566
+
567
+ Returns
568
+ -------
569
+ od_layers : dictionary
570
+ A dictionary keyed by node to the onion layer. The layers are
571
+ contiguous integers starting at 1.
572
+
573
+ Raises
574
+ ------
575
+ NetworkXNotImplemented
576
+ If `G` is a multigraph or directed graph or if it contains self loops.
577
+
578
+ Examples
579
+ --------
580
+ >>> degrees = [0, 1, 2, 2, 2, 2, 3]
581
+ >>> H = nx.havel_hakimi_graph(degrees)
582
+ >>> H.degree
583
+ DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0})
584
+ >>> nx.onion_layers(H)
585
+ {6: 1, 0: 2, 4: 3, 1: 4, 2: 4, 3: 4, 5: 4}
586
+
587
+ See Also
588
+ --------
589
+ core_number
590
+
591
+ References
592
+ ----------
593
+ .. [1] Multi-scale structure and topological anomaly detection via a new
594
+ network statistic: The onion decomposition
595
+ L. Hébert-Dufresne, J. A. Grochow, and A. Allard
596
+ Scientific Reports 6, 31708 (2016)
597
+ http://doi.org/10.1038/srep31708
598
+ .. [2] Percolation and the effective structure of complex networks
599
+ A. Allard and L. Hébert-Dufresne
600
+ Physical Review X 9, 011023 (2019)
601
+ http://doi.org/10.1103/PhysRevX.9.011023
602
+ """
603
+ if nx.number_of_selfloops(G) > 0:
604
+ msg = (
605
+ "Input graph contains self loops which is not permitted; "
606
+ "Consider using G.remove_edges_from(nx.selfloop_edges(G))."
607
+ )
608
+ raise nx.NetworkXNotImplemented(msg)
609
+ # Dictionaries to register the k-core/onion decompositions.
610
+ od_layers = {}
611
+ # Adjacency list
612
+ neighbors = {v: list(nx.all_neighbors(G, v)) for v in G}
613
+ # Effective degree of nodes.
614
+ degrees = dict(G.degree())
615
+ # Performs the onion decomposition.
616
+ current_core = 1
617
+ current_layer = 1
618
+ # Sets vertices of degree 0 to layer 1, if any.
619
+ isolated_nodes = list(nx.isolates(G))
620
+ if len(isolated_nodes) > 0:
621
+ for v in isolated_nodes:
622
+ od_layers[v] = current_layer
623
+ degrees.pop(v)
624
+ current_layer = 2
625
+ # Finds the layer for the remaining nodes.
626
+ while len(degrees) > 0:
627
+ # Sets the order for looking at nodes.
628
+ nodes = sorted(degrees, key=degrees.get)
629
+ # Sets properly the current core.
630
+ min_degree = degrees[nodes[0]]
631
+ if min_degree > current_core:
632
+ current_core = min_degree
633
+ # Identifies vertices in the current layer.
634
+ this_layer = []
635
+ for n in nodes:
636
+ if degrees[n] > current_core:
637
+ break
638
+ this_layer.append(n)
639
+ # Identifies the core/layer of the vertices in the current layer.
640
+ for v in this_layer:
641
+ od_layers[v] = current_layer
642
+ for n in neighbors[v]:
643
+ neighbors[n].remove(v)
644
+ degrees[n] = degrees[n] - 1
645
+ degrees.pop(v)
646
+ # Updates the layer count.
647
+ current_layer = current_layer + 1
648
+ # Returns the dictionaries containing the onion layer of each vertices.
649
+ return od_layers
wemm/lib/python3.10/site-packages/networkx/algorithms/covering.py ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions related to graph covers."""
2
+
3
+ from functools import partial
4
+ from itertools import chain
5
+
6
+ import networkx as nx
7
+ from networkx.utils import arbitrary_element, not_implemented_for
8
+
9
+ __all__ = ["min_edge_cover", "is_edge_cover"]
10
+
11
+
12
+ @not_implemented_for("directed")
13
+ @not_implemented_for("multigraph")
14
+ @nx._dispatchable
15
+ def min_edge_cover(G, matching_algorithm=None):
16
+ """Returns the min cardinality edge cover of the graph as a set of edges.
17
+
18
+ A smallest edge cover can be found in polynomial time by finding
19
+ a maximum matching and extending it greedily so that all nodes
20
+ are covered. This function follows that process. A maximum matching
21
+ algorithm can be specified for the first step of the algorithm.
22
+ The resulting set may return a set with one 2-tuple for each edge,
23
+ (the usual case) or with both 2-tuples `(u, v)` and `(v, u)` for
24
+ each edge. The latter is only done when a bipartite matching algorithm
25
+ is specified as `matching_algorithm`.
26
+
27
+ Parameters
28
+ ----------
29
+ G : NetworkX graph
30
+ An undirected graph.
31
+
32
+ matching_algorithm : function
33
+ A function that returns a maximum cardinality matching for `G`.
34
+ The function must take one input, the graph `G`, and return
35
+ either a set of edges (with only one direction for the pair of nodes)
36
+ or a dictionary mapping each node to its mate. If not specified,
37
+ :func:`~networkx.algorithms.matching.max_weight_matching` is used.
38
+ Common bipartite matching functions include
39
+ :func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching`
40
+ or
41
+ :func:`~networkx.algorithms.bipartite.matching.eppstein_matching`.
42
+
43
+ Returns
44
+ -------
45
+ min_cover : set
46
+
47
+ A set of the edges in a minimum edge cover in the form of tuples.
48
+ It contains only one of the equivalent 2-tuples `(u, v)` and `(v, u)`
49
+ for each edge. If a bipartite method is used to compute the matching,
50
+ the returned set contains both the 2-tuples `(u, v)` and `(v, u)`
51
+ for each edge of a minimum edge cover.
52
+
53
+ Examples
54
+ --------
55
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
56
+ >>> sorted(nx.min_edge_cover(G))
57
+ [(2, 1), (3, 0)]
58
+
59
+ Notes
60
+ -----
61
+ An edge cover of a graph is a set of edges such that every node of
62
+ the graph is incident to at least one edge of the set.
63
+ The minimum edge cover is an edge covering of smallest cardinality.
64
+
65
+ Due to its implementation, the worst-case running time of this algorithm
66
+ is bounded by the worst-case running time of the function
67
+ ``matching_algorithm``.
68
+
69
+ Minimum edge cover for `G` can also be found using the `min_edge_covering`
70
+ function in :mod:`networkx.algorithms.bipartite.covering` which is
71
+ simply this function with a default matching algorithm of
72
+ :func:`~networkx.algorithms.bipartite.matching.hopcraft_karp_matching`
73
+ """
74
+ if len(G) == 0:
75
+ return set()
76
+ if nx.number_of_isolates(G) > 0:
77
+ # ``min_cover`` does not exist as there is an isolated node
78
+ raise nx.NetworkXException(
79
+ "Graph has a node with no edge incident on it, so no edge cover exists."
80
+ )
81
+ if matching_algorithm is None:
82
+ matching_algorithm = partial(nx.max_weight_matching, maxcardinality=True)
83
+ maximum_matching = matching_algorithm(G)
84
+ # ``min_cover`` is superset of ``maximum_matching``
85
+ try:
86
+ # bipartite matching algs return dict so convert if needed
87
+ min_cover = set(maximum_matching.items())
88
+ bipartite_cover = True
89
+ except AttributeError:
90
+ min_cover = maximum_matching
91
+ bipartite_cover = False
92
+ # iterate for uncovered nodes
93
+ uncovered_nodes = set(G) - {v for u, v in min_cover} - {u for u, v in min_cover}
94
+ for v in uncovered_nodes:
95
+ # Since `v` is uncovered, each edge incident to `v` will join it
96
+ # with a covered node (otherwise, if there were an edge joining
97
+ # uncovered nodes `u` and `v`, the maximum matching algorithm
98
+ # would have found it), so we can choose an arbitrary edge
99
+ # incident to `v`. (This applies only in a simple graph, not a
100
+ # multigraph.)
101
+ u = arbitrary_element(G[v])
102
+ min_cover.add((u, v))
103
+ if bipartite_cover:
104
+ min_cover.add((v, u))
105
+ return min_cover
106
+
107
+
108
+ @not_implemented_for("directed")
109
+ @nx._dispatchable
110
+ def is_edge_cover(G, cover):
111
+ """Decides whether a set of edges is a valid edge cover of the graph.
112
+
113
+ Given a set of edges, whether it is an edge covering can
114
+ be decided if we just check whether all nodes of the graph
115
+ has an edge from the set, incident on it.
116
+
117
+ Parameters
118
+ ----------
119
+ G : NetworkX graph
120
+ An undirected bipartite graph.
121
+
122
+ cover : set
123
+ Set of edges to be checked.
124
+
125
+ Returns
126
+ -------
127
+ bool
128
+ Whether the set of edges is a valid edge cover of the graph.
129
+
130
+ Examples
131
+ --------
132
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
133
+ >>> cover = {(2, 1), (3, 0)}
134
+ >>> nx.is_edge_cover(G, cover)
135
+ True
136
+
137
+ Notes
138
+ -----
139
+ An edge cover of a graph is a set of edges such that every node of
140
+ the graph is incident to at least one edge of the set.
141
+ """
142
+ return set(G) <= set(chain.from_iterable(cover))
wemm/lib/python3.10/site-packages/networkx/algorithms/dag.py ADDED
@@ -0,0 +1,1418 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Algorithms for directed acyclic graphs (DAGs).
2
+
3
+ Note that most of these functions are only guaranteed to work for DAGs.
4
+ In general, these functions do not check for acyclic-ness, so it is up
5
+ to the user to check for that.
6
+ """
7
+
8
+ import heapq
9
+ from collections import deque
10
+ from functools import partial
11
+ from itertools import chain, combinations, product, starmap
12
+ from math import gcd
13
+
14
+ import networkx as nx
15
+ from networkx.utils import arbitrary_element, not_implemented_for, pairwise
16
+
17
+ __all__ = [
18
+ "descendants",
19
+ "ancestors",
20
+ "topological_sort",
21
+ "lexicographical_topological_sort",
22
+ "all_topological_sorts",
23
+ "topological_generations",
24
+ "is_directed_acyclic_graph",
25
+ "is_aperiodic",
26
+ "transitive_closure",
27
+ "transitive_closure_dag",
28
+ "transitive_reduction",
29
+ "antichains",
30
+ "dag_longest_path",
31
+ "dag_longest_path_length",
32
+ "dag_to_branching",
33
+ "compute_v_structures",
34
+ ]
35
+
36
+ chaini = chain.from_iterable
37
+
38
+
39
+ @nx._dispatchable
40
+ def descendants(G, source):
41
+ """Returns all nodes reachable from `source` in `G`.
42
+
43
+ Parameters
44
+ ----------
45
+ G : NetworkX Graph
46
+ source : node in `G`
47
+
48
+ Returns
49
+ -------
50
+ set()
51
+ The descendants of `source` in `G`
52
+
53
+ Raises
54
+ ------
55
+ NetworkXError
56
+ If node `source` is not in `G`.
57
+
58
+ Examples
59
+ --------
60
+ >>> DG = nx.path_graph(5, create_using=nx.DiGraph)
61
+ >>> sorted(nx.descendants(DG, 2))
62
+ [3, 4]
63
+
64
+ The `source` node is not a descendant of itself, but can be included manually:
65
+
66
+ >>> sorted(nx.descendants(DG, 2) | {2})
67
+ [2, 3, 4]
68
+
69
+ See also
70
+ --------
71
+ ancestors
72
+ """
73
+ return {child for parent, child in nx.bfs_edges(G, source)}
74
+
75
+
76
+ @nx._dispatchable
77
+ def ancestors(G, source):
78
+ """Returns all nodes having a path to `source` in `G`.
79
+
80
+ Parameters
81
+ ----------
82
+ G : NetworkX Graph
83
+ source : node in `G`
84
+
85
+ Returns
86
+ -------
87
+ set()
88
+ The ancestors of `source` in `G`
89
+
90
+ Raises
91
+ ------
92
+ NetworkXError
93
+ If node `source` is not in `G`.
94
+
95
+ Examples
96
+ --------
97
+ >>> DG = nx.path_graph(5, create_using=nx.DiGraph)
98
+ >>> sorted(nx.ancestors(DG, 2))
99
+ [0, 1]
100
+
101
+ The `source` node is not an ancestor of itself, but can be included manually:
102
+
103
+ >>> sorted(nx.ancestors(DG, 2) | {2})
104
+ [0, 1, 2]
105
+
106
+ See also
107
+ --------
108
+ descendants
109
+ """
110
+ return {child for parent, child in nx.bfs_edges(G, source, reverse=True)}
111
+
112
+
113
+ @nx._dispatchable
114
+ def has_cycle(G):
115
+ """Decides whether the directed graph has a cycle."""
116
+ try:
117
+ # Feed the entire iterator into a zero-length deque.
118
+ deque(topological_sort(G), maxlen=0)
119
+ except nx.NetworkXUnfeasible:
120
+ return True
121
+ else:
122
+ return False
123
+
124
+
125
+ @nx._dispatchable
126
+ def is_directed_acyclic_graph(G):
127
+ """Returns True if the graph `G` is a directed acyclic graph (DAG) or
128
+ False if not.
129
+
130
+ Parameters
131
+ ----------
132
+ G : NetworkX graph
133
+
134
+ Returns
135
+ -------
136
+ bool
137
+ True if `G` is a DAG, False otherwise
138
+
139
+ Examples
140
+ --------
141
+ Undirected graph::
142
+
143
+ >>> G = nx.Graph([(1, 2), (2, 3)])
144
+ >>> nx.is_directed_acyclic_graph(G)
145
+ False
146
+
147
+ Directed graph with cycle::
148
+
149
+ >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
150
+ >>> nx.is_directed_acyclic_graph(G)
151
+ False
152
+
153
+ Directed acyclic graph::
154
+
155
+ >>> G = nx.DiGraph([(1, 2), (2, 3)])
156
+ >>> nx.is_directed_acyclic_graph(G)
157
+ True
158
+
159
+ See also
160
+ --------
161
+ topological_sort
162
+ """
163
+ return G.is_directed() and not has_cycle(G)
164
+
165
+
166
+ @nx._dispatchable
167
+ def topological_generations(G):
168
+ """Stratifies a DAG into generations.
169
+
170
+ A topological generation is node collection in which ancestors of a node in each
171
+ generation are guaranteed to be in a previous generation, and any descendants of
172
+ a node are guaranteed to be in a following generation. Nodes are guaranteed to
173
+ be in the earliest possible generation that they can belong to.
174
+
175
+ Parameters
176
+ ----------
177
+ G : NetworkX digraph
178
+ A directed acyclic graph (DAG)
179
+
180
+ Yields
181
+ ------
182
+ sets of nodes
183
+ Yields sets of nodes representing each generation.
184
+
185
+ Raises
186
+ ------
187
+ NetworkXError
188
+ Generations are defined for directed graphs only. If the graph
189
+ `G` is undirected, a :exc:`NetworkXError` is raised.
190
+
191
+ NetworkXUnfeasible
192
+ If `G` is not a directed acyclic graph (DAG) no topological generations
193
+ exist and a :exc:`NetworkXUnfeasible` exception is raised. This can also
194
+ be raised if `G` is changed while the returned iterator is being processed
195
+
196
+ RuntimeError
197
+ If `G` is changed while the returned iterator is being processed.
198
+
199
+ Examples
200
+ --------
201
+ >>> DG = nx.DiGraph([(2, 1), (3, 1)])
202
+ >>> [sorted(generation) for generation in nx.topological_generations(DG)]
203
+ [[2, 3], [1]]
204
+
205
+ Notes
206
+ -----
207
+ The generation in which a node resides can also be determined by taking the
208
+ max-path-distance from the node to the farthest leaf node. That value can
209
+ be obtained with this function using `enumerate(topological_generations(G))`.
210
+
211
+ See also
212
+ --------
213
+ topological_sort
214
+ """
215
+ if not G.is_directed():
216
+ raise nx.NetworkXError("Topological sort not defined on undirected graphs.")
217
+
218
+ multigraph = G.is_multigraph()
219
+ indegree_map = {v: d for v, d in G.in_degree() if d > 0}
220
+ zero_indegree = [v for v, d in G.in_degree() if d == 0]
221
+
222
+ while zero_indegree:
223
+ this_generation = zero_indegree
224
+ zero_indegree = []
225
+ for node in this_generation:
226
+ if node not in G:
227
+ raise RuntimeError("Graph changed during iteration")
228
+ for child in G.neighbors(node):
229
+ try:
230
+ indegree_map[child] -= len(G[node][child]) if multigraph else 1
231
+ except KeyError as err:
232
+ raise RuntimeError("Graph changed during iteration") from err
233
+ if indegree_map[child] == 0:
234
+ zero_indegree.append(child)
235
+ del indegree_map[child]
236
+ yield this_generation
237
+
238
+ if indegree_map:
239
+ raise nx.NetworkXUnfeasible(
240
+ "Graph contains a cycle or graph changed during iteration"
241
+ )
242
+
243
+
244
+ @nx._dispatchable
245
+ def topological_sort(G):
246
+ """Returns a generator of nodes in topologically sorted order.
247
+
248
+ A topological sort is a nonunique permutation of the nodes of a
249
+ directed graph such that an edge from u to v implies that u
250
+ appears before v in the topological sort order. This ordering is
251
+ valid only if the graph has no directed cycles.
252
+
253
+ Parameters
254
+ ----------
255
+ G : NetworkX digraph
256
+ A directed acyclic graph (DAG)
257
+
258
+ Yields
259
+ ------
260
+ nodes
261
+ Yields the nodes in topological sorted order.
262
+
263
+ Raises
264
+ ------
265
+ NetworkXError
266
+ Topological sort is defined for directed graphs only. If the graph `G`
267
+ is undirected, a :exc:`NetworkXError` is raised.
268
+
269
+ NetworkXUnfeasible
270
+ If `G` is not a directed acyclic graph (DAG) no topological sort exists
271
+ and a :exc:`NetworkXUnfeasible` exception is raised. This can also be
272
+ raised if `G` is changed while the returned iterator is being processed
273
+
274
+ RuntimeError
275
+ If `G` is changed while the returned iterator is being processed.
276
+
277
+ Examples
278
+ --------
279
+ To get the reverse order of the topological sort:
280
+
281
+ >>> DG = nx.DiGraph([(1, 2), (2, 3)])
282
+ >>> list(reversed(list(nx.topological_sort(DG))))
283
+ [3, 2, 1]
284
+
285
+ If your DiGraph naturally has the edges representing tasks/inputs
286
+ and nodes representing people/processes that initiate tasks, then
287
+ topological_sort is not quite what you need. You will have to change
288
+ the tasks to nodes with dependence reflected by edges. The result is
289
+ a kind of topological sort of the edges. This can be done
290
+ with :func:`networkx.line_graph` as follows:
291
+
292
+ >>> list(nx.topological_sort(nx.line_graph(DG)))
293
+ [(1, 2), (2, 3)]
294
+
295
+ Notes
296
+ -----
297
+ This algorithm is based on a description and proof in
298
+ "Introduction to Algorithms: A Creative Approach" [1]_ .
299
+
300
+ See also
301
+ --------
302
+ is_directed_acyclic_graph, lexicographical_topological_sort
303
+
304
+ References
305
+ ----------
306
+ .. [1] Manber, U. (1989).
307
+ *Introduction to Algorithms - A Creative Approach.* Addison-Wesley.
308
+ """
309
+ for generation in nx.topological_generations(G):
310
+ yield from generation
311
+
312
+
313
+ @nx._dispatchable
314
+ def lexicographical_topological_sort(G, key=None):
315
+ """Generate the nodes in the unique lexicographical topological sort order.
316
+
317
+ Generates a unique ordering of nodes by first sorting topologically (for which there are often
318
+ multiple valid orderings) and then additionally by sorting lexicographically.
319
+
320
+ A topological sort arranges the nodes of a directed graph so that the
321
+ upstream node of each directed edge precedes the downstream node.
322
+ It is always possible to find a solution for directed graphs that have no cycles.
323
+ There may be more than one valid solution.
324
+
325
+ Lexicographical sorting is just sorting alphabetically. It is used here to break ties in the
326
+ topological sort and to determine a single, unique ordering. This can be useful in comparing
327
+ sort results.
328
+
329
+ The lexicographical order can be customized by providing a function to the `key=` parameter.
330
+ The definition of the key function is the same as used in python's built-in `sort()`.
331
+ The function takes a single argument and returns a key to use for sorting purposes.
332
+
333
+ Lexicographical sorting can fail if the node names are un-sortable. See the example below.
334
+ The solution is to provide a function to the `key=` argument that returns sortable keys.
335
+
336
+
337
+ Parameters
338
+ ----------
339
+ G : NetworkX digraph
340
+ A directed acyclic graph (DAG)
341
+
342
+ key : function, optional
343
+ A function of one argument that converts a node name to a comparison key.
344
+ It defines and resolves ambiguities in the sort order. Defaults to the identity function.
345
+
346
+ Yields
347
+ ------
348
+ nodes
349
+ Yields the nodes of G in lexicographical topological sort order.
350
+
351
+ Raises
352
+ ------
353
+ NetworkXError
354
+ Topological sort is defined for directed graphs only. If the graph `G`
355
+ is undirected, a :exc:`NetworkXError` is raised.
356
+
357
+ NetworkXUnfeasible
358
+ If `G` is not a directed acyclic graph (DAG) no topological sort exists
359
+ and a :exc:`NetworkXUnfeasible` exception is raised. This can also be
360
+ raised if `G` is changed while the returned iterator is being processed
361
+
362
+ RuntimeError
363
+ If `G` is changed while the returned iterator is being processed.
364
+
365
+ TypeError
366
+ Results from un-sortable node names.
367
+ Consider using `key=` parameter to resolve ambiguities in the sort order.
368
+
369
+ Examples
370
+ --------
371
+ >>> DG = nx.DiGraph([(2, 1), (2, 5), (1, 3), (1, 4), (5, 4)])
372
+ >>> list(nx.lexicographical_topological_sort(DG))
373
+ [2, 1, 3, 5, 4]
374
+ >>> list(nx.lexicographical_topological_sort(DG, key=lambda x: -x))
375
+ [2, 5, 1, 4, 3]
376
+
377
+ The sort will fail for any graph with integer and string nodes. Comparison of integer to strings
378
+ is not defined in python. Is 3 greater or less than 'red'?
379
+
380
+ >>> DG = nx.DiGraph([(1, "red"), (3, "red"), (1, "green"), (2, "blue")])
381
+ >>> list(nx.lexicographical_topological_sort(DG))
382
+ Traceback (most recent call last):
383
+ ...
384
+ TypeError: '<' not supported between instances of 'str' and 'int'
385
+ ...
386
+
387
+ Incomparable nodes can be resolved using a `key` function. This example function
388
+ allows comparison of integers and strings by returning a tuple where the first
389
+ element is True for `str`, False otherwise. The second element is the node name.
390
+ This groups the strings and integers separately so they can be compared only among themselves.
391
+
392
+ >>> key = lambda node: (isinstance(node, str), node)
393
+ >>> list(nx.lexicographical_topological_sort(DG, key=key))
394
+ [1, 2, 3, 'blue', 'green', 'red']
395
+
396
+ Notes
397
+ -----
398
+ This algorithm is based on a description and proof in
399
+ "Introduction to Algorithms: A Creative Approach" [1]_ .
400
+
401
+ See also
402
+ --------
403
+ topological_sort
404
+
405
+ References
406
+ ----------
407
+ .. [1] Manber, U. (1989).
408
+ *Introduction to Algorithms - A Creative Approach.* Addison-Wesley.
409
+ """
410
+ if not G.is_directed():
411
+ msg = "Topological sort not defined on undirected graphs."
412
+ raise nx.NetworkXError(msg)
413
+
414
+ if key is None:
415
+
416
+ def key(node):
417
+ return node
418
+
419
+ nodeid_map = {n: i for i, n in enumerate(G)}
420
+
421
+ def create_tuple(node):
422
+ return key(node), nodeid_map[node], node
423
+
424
+ indegree_map = {v: d for v, d in G.in_degree() if d > 0}
425
+ # These nodes have zero indegree and ready to be returned.
426
+ zero_indegree = [create_tuple(v) for v, d in G.in_degree() if d == 0]
427
+ heapq.heapify(zero_indegree)
428
+
429
+ while zero_indegree:
430
+ _, _, node = heapq.heappop(zero_indegree)
431
+
432
+ if node not in G:
433
+ raise RuntimeError("Graph changed during iteration")
434
+ for _, child in G.edges(node):
435
+ try:
436
+ indegree_map[child] -= 1
437
+ except KeyError as err:
438
+ raise RuntimeError("Graph changed during iteration") from err
439
+ if indegree_map[child] == 0:
440
+ try:
441
+ heapq.heappush(zero_indegree, create_tuple(child))
442
+ except TypeError as err:
443
+ raise TypeError(
444
+ f"{err}\nConsider using `key=` parameter to resolve ambiguities in the sort order."
445
+ )
446
+ del indegree_map[child]
447
+
448
+ yield node
449
+
450
+ if indegree_map:
451
+ msg = "Graph contains a cycle or graph changed during iteration"
452
+ raise nx.NetworkXUnfeasible(msg)
453
+
454
+
455
+ @not_implemented_for("undirected")
456
+ @nx._dispatchable
457
+ def all_topological_sorts(G):
458
+ """Returns a generator of _all_ topological sorts of the directed graph G.
459
+
460
+ A topological sort is a nonunique permutation of the nodes such that an
461
+ edge from u to v implies that u appears before v in the topological sort
462
+ order.
463
+
464
+ Parameters
465
+ ----------
466
+ G : NetworkX DiGraph
467
+ A directed graph
468
+
469
+ Yields
470
+ ------
471
+ topological_sort_order : list
472
+ a list of nodes in `G`, representing one of the topological sort orders
473
+
474
+ Raises
475
+ ------
476
+ NetworkXNotImplemented
477
+ If `G` is not directed
478
+ NetworkXUnfeasible
479
+ If `G` is not acyclic
480
+
481
+ Examples
482
+ --------
483
+ To enumerate all topological sorts of directed graph:
484
+
485
+ >>> DG = nx.DiGraph([(1, 2), (2, 3), (2, 4)])
486
+ >>> list(nx.all_topological_sorts(DG))
487
+ [[1, 2, 4, 3], [1, 2, 3, 4]]
488
+
489
+ Notes
490
+ -----
491
+ Implements an iterative version of the algorithm given in [1].
492
+
493
+ References
494
+ ----------
495
+ .. [1] Knuth, Donald E., Szwarcfiter, Jayme L. (1974).
496
+ "A Structured Program to Generate All Topological Sorting Arrangements"
497
+ Information Processing Letters, Volume 2, Issue 6, 1974, Pages 153-157,
498
+ ISSN 0020-0190,
499
+ https://doi.org/10.1016/0020-0190(74)90001-5.
500
+ Elsevier (North-Holland), Amsterdam
501
+ """
502
+ if not G.is_directed():
503
+ raise nx.NetworkXError("Topological sort not defined on undirected graphs.")
504
+
505
+ # the names of count and D are chosen to match the global variables in [1]
506
+ # number of edges originating in a vertex v
507
+ count = dict(G.in_degree())
508
+ # vertices with indegree 0
509
+ D = deque([v for v, d in G.in_degree() if d == 0])
510
+ # stack of first value chosen at a position k in the topological sort
511
+ bases = []
512
+ current_sort = []
513
+
514
+ # do-while construct
515
+ while True:
516
+ assert all(count[v] == 0 for v in D)
517
+
518
+ if len(current_sort) == len(G):
519
+ yield list(current_sort)
520
+
521
+ # clean-up stack
522
+ while len(current_sort) > 0:
523
+ assert len(bases) == len(current_sort)
524
+ q = current_sort.pop()
525
+
526
+ # "restores" all edges (q, x)
527
+ # NOTE: it is important to iterate over edges instead
528
+ # of successors, so count is updated correctly in multigraphs
529
+ for _, j in G.out_edges(q):
530
+ count[j] += 1
531
+ assert count[j] >= 0
532
+ # remove entries from D
533
+ while len(D) > 0 and count[D[-1]] > 0:
534
+ D.pop()
535
+
536
+ # corresponds to a circular shift of the values in D
537
+ # if the first value chosen (the base) is in the first
538
+ # position of D again, we are done and need to consider the
539
+ # previous condition
540
+ D.appendleft(q)
541
+ if D[-1] == bases[-1]:
542
+ # all possible values have been chosen at current position
543
+ # remove corresponding marker
544
+ bases.pop()
545
+ else:
546
+ # there are still elements that have not been fixed
547
+ # at the current position in the topological sort
548
+ # stop removing elements, escape inner loop
549
+ break
550
+
551
+ else:
552
+ if len(D) == 0:
553
+ raise nx.NetworkXUnfeasible("Graph contains a cycle.")
554
+
555
+ # choose next node
556
+ q = D.pop()
557
+ # "erase" all edges (q, x)
558
+ # NOTE: it is important to iterate over edges instead
559
+ # of successors, so count is updated correctly in multigraphs
560
+ for _, j in G.out_edges(q):
561
+ count[j] -= 1
562
+ assert count[j] >= 0
563
+ if count[j] == 0:
564
+ D.append(j)
565
+ current_sort.append(q)
566
+
567
+ # base for current position might _not_ be fixed yet
568
+ if len(bases) < len(current_sort):
569
+ bases.append(q)
570
+
571
+ if len(bases) == 0:
572
+ break
573
+
574
+
575
+ @nx._dispatchable
576
+ def is_aperiodic(G):
577
+ """Returns True if `G` is aperiodic.
578
+
579
+ A directed graph is aperiodic if there is no integer k > 1 that
580
+ divides the length of every cycle in the graph.
581
+
582
+ Parameters
583
+ ----------
584
+ G : NetworkX DiGraph
585
+ A directed graph
586
+
587
+ Returns
588
+ -------
589
+ bool
590
+ True if the graph is aperiodic False otherwise
591
+
592
+ Raises
593
+ ------
594
+ NetworkXError
595
+ If `G` is not directed
596
+
597
+ Examples
598
+ --------
599
+ A graph consisting of one cycle, the length of which is 2. Therefore ``k = 2``
600
+ divides the length of every cycle in the graph and thus the graph
601
+ is *not aperiodic*::
602
+
603
+ >>> DG = nx.DiGraph([(1, 2), (2, 1)])
604
+ >>> nx.is_aperiodic(DG)
605
+ False
606
+
607
+ A graph consisting of two cycles: one of length 2 and the other of length 3.
608
+ The cycle lengths are coprime, so there is no single value of k where ``k > 1``
609
+ that divides each cycle length and therefore the graph is *aperiodic*::
610
+
611
+ >>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1), (1, 4), (4, 1)])
612
+ >>> nx.is_aperiodic(DG)
613
+ True
614
+
615
+ A graph consisting of two cycles: one of length 2 and the other of length 4.
616
+ The lengths of the cycles share a common factor ``k = 2``, and therefore
617
+ the graph is *not aperiodic*::
618
+
619
+ >>> DG = nx.DiGraph([(1, 2), (2, 1), (3, 4), (4, 5), (5, 6), (6, 3)])
620
+ >>> nx.is_aperiodic(DG)
621
+ False
622
+
623
+ An acyclic graph, therefore the graph is *not aperiodic*::
624
+
625
+ >>> DG = nx.DiGraph([(1, 2), (2, 3)])
626
+ >>> nx.is_aperiodic(DG)
627
+ False
628
+
629
+ Notes
630
+ -----
631
+ This uses the method outlined in [1]_, which runs in $O(m)$ time
632
+ given $m$ edges in `G`. Note that a graph is not aperiodic if it is
633
+ acyclic as every integer trivial divides length 0 cycles.
634
+
635
+ References
636
+ ----------
637
+ .. [1] Jarvis, J. P.; Shier, D. R. (1996),
638
+ "Graph-theoretic analysis of finite Markov chains,"
639
+ in Shier, D. R.; Wallenius, K. T., Applied Mathematical Modeling:
640
+ A Multidisciplinary Approach, CRC Press.
641
+ """
642
+ if not G.is_directed():
643
+ raise nx.NetworkXError("is_aperiodic not defined for undirected graphs")
644
+ if len(G) == 0:
645
+ raise nx.NetworkXPointlessConcept("Graph has no nodes.")
646
+ s = arbitrary_element(G)
647
+ levels = {s: 0}
648
+ this_level = [s]
649
+ g = 0
650
+ lev = 1
651
+ while this_level:
652
+ next_level = []
653
+ for u in this_level:
654
+ for v in G[u]:
655
+ if v in levels: # Non-Tree Edge
656
+ g = gcd(g, levels[u] - levels[v] + 1)
657
+ else: # Tree Edge
658
+ next_level.append(v)
659
+ levels[v] = lev
660
+ this_level = next_level
661
+ lev += 1
662
+ if len(levels) == len(G): # All nodes in tree
663
+ return g == 1
664
+ else:
665
+ return g == 1 and nx.is_aperiodic(G.subgraph(set(G) - set(levels)))
666
+
667
+
668
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
669
+ def transitive_closure(G, reflexive=False):
670
+ """Returns transitive closure of a graph
671
+
672
+ The transitive closure of G = (V,E) is a graph G+ = (V,E+) such that
673
+ for all v, w in V there is an edge (v, w) in E+ if and only if there
674
+ is a path from v to w in G.
675
+
676
+ Handling of paths from v to v has some flexibility within this definition.
677
+ A reflexive transitive closure creates a self-loop for the path
678
+ from v to v of length 0. The usual transitive closure creates a
679
+ self-loop only if a cycle exists (a path from v to v with length > 0).
680
+ We also allow an option for no self-loops.
681
+
682
+ Parameters
683
+ ----------
684
+ G : NetworkX Graph
685
+ A directed/undirected graph/multigraph.
686
+ reflexive : Bool or None, optional (default: False)
687
+ Determines when cycles create self-loops in the Transitive Closure.
688
+ If True, trivial cycles (length 0) create self-loops. The result
689
+ is a reflexive transitive closure of G.
690
+ If False (the default) non-trivial cycles create self-loops.
691
+ If None, self-loops are not created.
692
+
693
+ Returns
694
+ -------
695
+ NetworkX graph
696
+ The transitive closure of `G`
697
+
698
+ Raises
699
+ ------
700
+ NetworkXError
701
+ If `reflexive` not in `{None, True, False}`
702
+
703
+ Examples
704
+ --------
705
+ The treatment of trivial (i.e. length 0) cycles is controlled by the
706
+ `reflexive` parameter.
707
+
708
+ Trivial (i.e. length 0) cycles do not create self-loops when
709
+ ``reflexive=False`` (the default)::
710
+
711
+ >>> DG = nx.DiGraph([(1, 2), (2, 3)])
712
+ >>> TC = nx.transitive_closure(DG, reflexive=False)
713
+ >>> TC.edges()
714
+ OutEdgeView([(1, 2), (1, 3), (2, 3)])
715
+
716
+ However, nontrivial (i.e. length greater than 0) cycles create self-loops
717
+ when ``reflexive=False`` (the default)::
718
+
719
+ >>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
720
+ >>> TC = nx.transitive_closure(DG, reflexive=False)
721
+ >>> TC.edges()
722
+ OutEdgeView([(1, 2), (1, 3), (1, 1), (2, 3), (2, 1), (2, 2), (3, 1), (3, 2), (3, 3)])
723
+
724
+ Trivial cycles (length 0) create self-loops when ``reflexive=True``::
725
+
726
+ >>> DG = nx.DiGraph([(1, 2), (2, 3)])
727
+ >>> TC = nx.transitive_closure(DG, reflexive=True)
728
+ >>> TC.edges()
729
+ OutEdgeView([(1, 2), (1, 1), (1, 3), (2, 3), (2, 2), (3, 3)])
730
+
731
+ And the third option is not to create self-loops at all when ``reflexive=None``::
732
+
733
+ >>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
734
+ >>> TC = nx.transitive_closure(DG, reflexive=None)
735
+ >>> TC.edges()
736
+ OutEdgeView([(1, 2), (1, 3), (2, 3), (2, 1), (3, 1), (3, 2)])
737
+
738
+ References
739
+ ----------
740
+ .. [1] https://www.ics.uci.edu/~eppstein/PADS/PartialOrder.py
741
+ """
742
+ TC = G.copy()
743
+
744
+ if reflexive not in {None, True, False}:
745
+ raise nx.NetworkXError("Incorrect value for the parameter `reflexive`")
746
+
747
+ for v in G:
748
+ if reflexive is None:
749
+ TC.add_edges_from((v, u) for u in nx.descendants(G, v) if u not in TC[v])
750
+ elif reflexive is True:
751
+ TC.add_edges_from(
752
+ (v, u) for u in nx.descendants(G, v) | {v} if u not in TC[v]
753
+ )
754
+ elif reflexive is False:
755
+ TC.add_edges_from((v, e[1]) for e in nx.edge_bfs(G, v) if e[1] not in TC[v])
756
+
757
+ return TC
758
+
759
+
760
+ @not_implemented_for("undirected")
761
+ @nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
762
+ def transitive_closure_dag(G, topo_order=None):
763
+ """Returns the transitive closure of a directed acyclic graph.
764
+
765
+ This function is faster than the function `transitive_closure`, but fails
766
+ if the graph has a cycle.
767
+
768
+ The transitive closure of G = (V,E) is a graph G+ = (V,E+) such that
769
+ for all v, w in V there is an edge (v, w) in E+ if and only if there
770
+ is a non-null path from v to w in G.
771
+
772
+ Parameters
773
+ ----------
774
+ G : NetworkX DiGraph
775
+ A directed acyclic graph (DAG)
776
+
777
+ topo_order: list or tuple, optional
778
+ A topological order for G (if None, the function will compute one)
779
+
780
+ Returns
781
+ -------
782
+ NetworkX DiGraph
783
+ The transitive closure of `G`
784
+
785
+ Raises
786
+ ------
787
+ NetworkXNotImplemented
788
+ If `G` is not directed
789
+ NetworkXUnfeasible
790
+ If `G` has a cycle
791
+
792
+ Examples
793
+ --------
794
+ >>> DG = nx.DiGraph([(1, 2), (2, 3)])
795
+ >>> TC = nx.transitive_closure_dag(DG)
796
+ >>> TC.edges()
797
+ OutEdgeView([(1, 2), (1, 3), (2, 3)])
798
+
799
+ Notes
800
+ -----
801
+ This algorithm is probably simple enough to be well-known but I didn't find
802
+ a mention in the literature.
803
+ """
804
+ if topo_order is None:
805
+ topo_order = list(topological_sort(G))
806
+
807
+ TC = G.copy()
808
+
809
+ # idea: traverse vertices following a reverse topological order, connecting
810
+ # each vertex to its descendants at distance 2 as we go
811
+ for v in reversed(topo_order):
812
+ TC.add_edges_from((v, u) for u in nx.descendants_at_distance(TC, v, 2))
813
+
814
+ return TC
815
+
816
+
817
+ @not_implemented_for("undirected")
818
+ @nx._dispatchable(returns_graph=True)
819
+ def transitive_reduction(G):
820
+ """Returns transitive reduction of a directed graph
821
+
822
+ The transitive reduction of G = (V,E) is a graph G- = (V,E-) such that
823
+ for all v,w in V there is an edge (v,w) in E- if and only if (v,w) is
824
+ in E and there is no path from v to w in G with length greater than 1.
825
+
826
+ Parameters
827
+ ----------
828
+ G : NetworkX DiGraph
829
+ A directed acyclic graph (DAG)
830
+
831
+ Returns
832
+ -------
833
+ NetworkX DiGraph
834
+ The transitive reduction of `G`
835
+
836
+ Raises
837
+ ------
838
+ NetworkXError
839
+ If `G` is not a directed acyclic graph (DAG) transitive reduction is
840
+ not uniquely defined and a :exc:`NetworkXError` exception is raised.
841
+
842
+ Examples
843
+ --------
844
+ To perform transitive reduction on a DiGraph:
845
+
846
+ >>> DG = nx.DiGraph([(1, 2), (2, 3), (1, 3)])
847
+ >>> TR = nx.transitive_reduction(DG)
848
+ >>> list(TR.edges)
849
+ [(1, 2), (2, 3)]
850
+
851
+ To avoid unnecessary data copies, this implementation does not return a
852
+ DiGraph with node/edge data.
853
+ To perform transitive reduction on a DiGraph and transfer node/edge data:
854
+
855
+ >>> DG = nx.DiGraph()
856
+ >>> DG.add_edges_from([(1, 2), (2, 3), (1, 3)], color="red")
857
+ >>> TR = nx.transitive_reduction(DG)
858
+ >>> TR.add_nodes_from(DG.nodes(data=True))
859
+ >>> TR.add_edges_from((u, v, DG.edges[u, v]) for u, v in TR.edges)
860
+ >>> list(TR.edges(data=True))
861
+ [(1, 2, {'color': 'red'}), (2, 3, {'color': 'red'})]
862
+
863
+ References
864
+ ----------
865
+ https://en.wikipedia.org/wiki/Transitive_reduction
866
+
867
+ """
868
+ if not is_directed_acyclic_graph(G):
869
+ msg = "Directed Acyclic Graph required for transitive_reduction"
870
+ raise nx.NetworkXError(msg)
871
+ TR = nx.DiGraph()
872
+ TR.add_nodes_from(G.nodes())
873
+ descendants = {}
874
+ # count before removing set stored in descendants
875
+ check_count = dict(G.in_degree)
876
+ for u in G:
877
+ u_nbrs = set(G[u])
878
+ for v in G[u]:
879
+ if v in u_nbrs:
880
+ if v not in descendants:
881
+ descendants[v] = {y for x, y in nx.dfs_edges(G, v)}
882
+ u_nbrs -= descendants[v]
883
+ check_count[v] -= 1
884
+ if check_count[v] == 0:
885
+ del descendants[v]
886
+ TR.add_edges_from((u, v) for v in u_nbrs)
887
+ return TR
888
+
889
+
890
+ @not_implemented_for("undirected")
891
+ @nx._dispatchable
892
+ def antichains(G, topo_order=None):
893
+ """Generates antichains from a directed acyclic graph (DAG).
894
+
895
+ An antichain is a subset of a partially ordered set such that any
896
+ two elements in the subset are incomparable.
897
+
898
+ Parameters
899
+ ----------
900
+ G : NetworkX DiGraph
901
+ A directed acyclic graph (DAG)
902
+
903
+ topo_order: list or tuple, optional
904
+ A topological order for G (if None, the function will compute one)
905
+
906
+ Yields
907
+ ------
908
+ antichain : list
909
+ a list of nodes in `G` representing an antichain
910
+
911
+ Raises
912
+ ------
913
+ NetworkXNotImplemented
914
+ If `G` is not directed
915
+
916
+ NetworkXUnfeasible
917
+ If `G` contains a cycle
918
+
919
+ Examples
920
+ --------
921
+ >>> DG = nx.DiGraph([(1, 2), (1, 3)])
922
+ >>> list(nx.antichains(DG))
923
+ [[], [3], [2], [2, 3], [1]]
924
+
925
+ Notes
926
+ -----
927
+ This function was originally developed by Peter Jipsen and Franco Saliola
928
+ for the SAGE project. It's included in NetworkX with permission from the
929
+ authors. Original SAGE code at:
930
+
931
+ https://github.com/sagemath/sage/blob/master/src/sage/combinat/posets/hasse_diagram.py
932
+
933
+ References
934
+ ----------
935
+ .. [1] Free Lattices, by R. Freese, J. Jezek and J. B. Nation,
936
+ AMS, Vol 42, 1995, p. 226.
937
+ """
938
+ if topo_order is None:
939
+ topo_order = list(nx.topological_sort(G))
940
+
941
+ TC = nx.transitive_closure_dag(G, topo_order)
942
+ antichains_stacks = [([], list(reversed(topo_order)))]
943
+
944
+ while antichains_stacks:
945
+ (antichain, stack) = antichains_stacks.pop()
946
+ # Invariant:
947
+ # - the elements of antichain are independent
948
+ # - the elements of stack are independent from those of antichain
949
+ yield antichain
950
+ while stack:
951
+ x = stack.pop()
952
+ new_antichain = antichain + [x]
953
+ new_stack = [t for t in stack if not ((t in TC[x]) or (x in TC[t]))]
954
+ antichains_stacks.append((new_antichain, new_stack))
955
+
956
+
957
+ @not_implemented_for("undirected")
958
+ @nx._dispatchable(edge_attrs={"weight": "default_weight"})
959
+ def dag_longest_path(G, weight="weight", default_weight=1, topo_order=None):
960
+ """Returns the longest path in a directed acyclic graph (DAG).
961
+
962
+ If `G` has edges with `weight` attribute the edge data are used as
963
+ weight values.
964
+
965
+ Parameters
966
+ ----------
967
+ G : NetworkX DiGraph
968
+ A directed acyclic graph (DAG)
969
+
970
+ weight : str, optional
971
+ Edge data key to use for weight
972
+
973
+ default_weight : int, optional
974
+ The weight of edges that do not have a weight attribute
975
+
976
+ topo_order: list or tuple, optional
977
+ A topological order for `G` (if None, the function will compute one)
978
+
979
+ Returns
980
+ -------
981
+ list
982
+ Longest path
983
+
984
+ Raises
985
+ ------
986
+ NetworkXNotImplemented
987
+ If `G` is not directed
988
+
989
+ Examples
990
+ --------
991
+ >>> DG = nx.DiGraph(
992
+ ... [(0, 1, {"cost": 1}), (1, 2, {"cost": 1}), (0, 2, {"cost": 42})]
993
+ ... )
994
+ >>> list(nx.all_simple_paths(DG, 0, 2))
995
+ [[0, 1, 2], [0, 2]]
996
+ >>> nx.dag_longest_path(DG)
997
+ [0, 1, 2]
998
+ >>> nx.dag_longest_path(DG, weight="cost")
999
+ [0, 2]
1000
+
1001
+ In the case where multiple valid topological orderings exist, `topo_order`
1002
+ can be used to specify a specific ordering:
1003
+
1004
+ >>> DG = nx.DiGraph([(0, 1), (0, 2)])
1005
+ >>> sorted(nx.all_topological_sorts(DG)) # Valid topological orderings
1006
+ [[0, 1, 2], [0, 2, 1]]
1007
+ >>> nx.dag_longest_path(DG, topo_order=[0, 1, 2])
1008
+ [0, 1]
1009
+ >>> nx.dag_longest_path(DG, topo_order=[0, 2, 1])
1010
+ [0, 2]
1011
+
1012
+ See also
1013
+ --------
1014
+ dag_longest_path_length
1015
+
1016
+ """
1017
+ if not G:
1018
+ return []
1019
+
1020
+ if topo_order is None:
1021
+ topo_order = nx.topological_sort(G)
1022
+
1023
+ dist = {} # stores {v : (length, u)}
1024
+ for v in topo_order:
1025
+ us = [
1026
+ (
1027
+ dist[u][0]
1028
+ + (
1029
+ max(data.values(), key=lambda x: x.get(weight, default_weight))
1030
+ if G.is_multigraph()
1031
+ else data
1032
+ ).get(weight, default_weight),
1033
+ u,
1034
+ )
1035
+ for u, data in G.pred[v].items()
1036
+ ]
1037
+
1038
+ # Use the best predecessor if there is one and its distance is
1039
+ # non-negative, otherwise terminate.
1040
+ maxu = max(us, key=lambda x: x[0]) if us else (0, v)
1041
+ dist[v] = maxu if maxu[0] >= 0 else (0, v)
1042
+
1043
+ u = None
1044
+ v = max(dist, key=lambda x: dist[x][0])
1045
+ path = []
1046
+ while u != v:
1047
+ path.append(v)
1048
+ u = v
1049
+ v = dist[v][1]
1050
+
1051
+ path.reverse()
1052
+ return path
1053
+
1054
+
1055
+ @not_implemented_for("undirected")
1056
+ @nx._dispatchable(edge_attrs={"weight": "default_weight"})
1057
+ def dag_longest_path_length(G, weight="weight", default_weight=1):
1058
+ """Returns the longest path length in a DAG
1059
+
1060
+ Parameters
1061
+ ----------
1062
+ G : NetworkX DiGraph
1063
+ A directed acyclic graph (DAG)
1064
+
1065
+ weight : string, optional
1066
+ Edge data key to use for weight
1067
+
1068
+ default_weight : int, optional
1069
+ The weight of edges that do not have a weight attribute
1070
+
1071
+ Returns
1072
+ -------
1073
+ int
1074
+ Longest path length
1075
+
1076
+ Raises
1077
+ ------
1078
+ NetworkXNotImplemented
1079
+ If `G` is not directed
1080
+
1081
+ Examples
1082
+ --------
1083
+ >>> DG = nx.DiGraph(
1084
+ ... [(0, 1, {"cost": 1}), (1, 2, {"cost": 1}), (0, 2, {"cost": 42})]
1085
+ ... )
1086
+ >>> list(nx.all_simple_paths(DG, 0, 2))
1087
+ [[0, 1, 2], [0, 2]]
1088
+ >>> nx.dag_longest_path_length(DG)
1089
+ 2
1090
+ >>> nx.dag_longest_path_length(DG, weight="cost")
1091
+ 42
1092
+
1093
+ See also
1094
+ --------
1095
+ dag_longest_path
1096
+ """
1097
+ path = nx.dag_longest_path(G, weight, default_weight)
1098
+ path_length = 0
1099
+ if G.is_multigraph():
1100
+ for u, v in pairwise(path):
1101
+ i = max(G[u][v], key=lambda x: G[u][v][x].get(weight, default_weight))
1102
+ path_length += G[u][v][i].get(weight, default_weight)
1103
+ else:
1104
+ for u, v in pairwise(path):
1105
+ path_length += G[u][v].get(weight, default_weight)
1106
+
1107
+ return path_length
1108
+
1109
+
1110
+ @nx._dispatchable
1111
+ def root_to_leaf_paths(G):
1112
+ """Yields root-to-leaf paths in a directed acyclic graph.
1113
+
1114
+ `G` must be a directed acyclic graph. If not, the behavior of this
1115
+ function is undefined. A "root" in this graph is a node of in-degree
1116
+ zero and a "leaf" a node of out-degree zero.
1117
+
1118
+ When invoked, this function iterates over each path from any root to
1119
+ any leaf. A path is a list of nodes.
1120
+
1121
+ """
1122
+ roots = (v for v, d in G.in_degree() if d == 0)
1123
+ leaves = (v for v, d in G.out_degree() if d == 0)
1124
+ all_paths = partial(nx.all_simple_paths, G)
1125
+ # TODO In Python 3, this would be better as `yield from ...`.
1126
+ return chaini(starmap(all_paths, product(roots, leaves)))
1127
+
1128
+
1129
+ @not_implemented_for("multigraph")
1130
+ @not_implemented_for("undirected")
1131
+ @nx._dispatchable(returns_graph=True)
1132
+ def dag_to_branching(G):
1133
+ """Returns a branching representing all (overlapping) paths from
1134
+ root nodes to leaf nodes in the given directed acyclic graph.
1135
+
1136
+ As described in :mod:`networkx.algorithms.tree.recognition`, a
1137
+ *branching* is a directed forest in which each node has at most one
1138
+ parent. In other words, a branching is a disjoint union of
1139
+ *arborescences*. For this function, each node of in-degree zero in
1140
+ `G` becomes a root of one of the arborescences, and there will be
1141
+ one leaf node for each distinct path from that root to a leaf node
1142
+ in `G`.
1143
+
1144
+ Each node `v` in `G` with *k* parents becomes *k* distinct nodes in
1145
+ the returned branching, one for each parent, and the sub-DAG rooted
1146
+ at `v` is duplicated for each copy. The algorithm then recurses on
1147
+ the children of each copy of `v`.
1148
+
1149
+ Parameters
1150
+ ----------
1151
+ G : NetworkX graph
1152
+ A directed acyclic graph.
1153
+
1154
+ Returns
1155
+ -------
1156
+ DiGraph
1157
+ The branching in which there is a bijection between root-to-leaf
1158
+ paths in `G` (in which multiple paths may share the same leaf)
1159
+ and root-to-leaf paths in the branching (in which there is a
1160
+ unique path from a root to a leaf).
1161
+
1162
+ Each node has an attribute 'source' whose value is the original
1163
+ node to which this node corresponds. No other graph, node, or
1164
+ edge attributes are copied into this new graph.
1165
+
1166
+ Raises
1167
+ ------
1168
+ NetworkXNotImplemented
1169
+ If `G` is not directed, or if `G` is a multigraph.
1170
+
1171
+ HasACycle
1172
+ If `G` is not acyclic.
1173
+
1174
+ Examples
1175
+ --------
1176
+ To examine which nodes in the returned branching were produced by
1177
+ which original node in the directed acyclic graph, we can collect
1178
+ the mapping from source node to new nodes into a dictionary. For
1179
+ example, consider the directed diamond graph::
1180
+
1181
+ >>> from collections import defaultdict
1182
+ >>> from operator import itemgetter
1183
+ >>>
1184
+ >>> G = nx.DiGraph(nx.utils.pairwise("abd"))
1185
+ >>> G.add_edges_from(nx.utils.pairwise("acd"))
1186
+ >>> B = nx.dag_to_branching(G)
1187
+ >>>
1188
+ >>> sources = defaultdict(set)
1189
+ >>> for v, source in B.nodes(data="source"):
1190
+ ... sources[source].add(v)
1191
+ >>> len(sources["a"])
1192
+ 1
1193
+ >>> len(sources["d"])
1194
+ 2
1195
+
1196
+ To copy node attributes from the original graph to the new graph,
1197
+ you can use a dictionary like the one constructed in the above
1198
+ example::
1199
+
1200
+ >>> for source, nodes in sources.items():
1201
+ ... for v in nodes:
1202
+ ... B.nodes[v].update(G.nodes[source])
1203
+
1204
+ Notes
1205
+ -----
1206
+ This function is not idempotent in the sense that the node labels in
1207
+ the returned branching may be uniquely generated each time the
1208
+ function is invoked. In fact, the node labels may not be integers;
1209
+ in order to relabel the nodes to be more readable, you can use the
1210
+ :func:`networkx.convert_node_labels_to_integers` function.
1211
+
1212
+ The current implementation of this function uses
1213
+ :func:`networkx.prefix_tree`, so it is subject to the limitations of
1214
+ that function.
1215
+
1216
+ """
1217
+ if has_cycle(G):
1218
+ msg = "dag_to_branching is only defined for acyclic graphs"
1219
+ raise nx.HasACycle(msg)
1220
+ paths = root_to_leaf_paths(G)
1221
+ B = nx.prefix_tree(paths)
1222
+ # Remove the synthetic `root`(0) and `NIL`(-1) nodes from the tree
1223
+ B.remove_node(0)
1224
+ B.remove_node(-1)
1225
+ return B
1226
+
1227
+
1228
+ @not_implemented_for("undirected")
1229
+ @nx._dispatchable
1230
+ def compute_v_structures(G):
1231
+ """Yields 3-node tuples that represent the v-structures in `G`.
1232
+
1233
+ .. deprecated:: 3.4
1234
+
1235
+ `compute_v_structures` actually yields colliders. It will be removed in
1236
+ version 3.6. Use `nx.dag.v_structures` or `nx.dag.colliders` instead.
1237
+
1238
+ Colliders are triples in the directed acyclic graph (DAG) where two parent nodes
1239
+ point to the same child node. V-structures are colliders where the two parent
1240
+ nodes are not adjacent. In a causal graph setting, the parents do not directly
1241
+ depend on each other, but conditioning on the child node provides an association.
1242
+
1243
+ Parameters
1244
+ ----------
1245
+ G : graph
1246
+ A networkx `~networkx.DiGraph`.
1247
+
1248
+ Yields
1249
+ ------
1250
+ A 3-tuple representation of a v-structure
1251
+ Each v-structure is a 3-tuple with the parent, collider, and other parent.
1252
+
1253
+ Raises
1254
+ ------
1255
+ NetworkXNotImplemented
1256
+ If `G` is an undirected graph.
1257
+
1258
+ Examples
1259
+ --------
1260
+ >>> G = nx.DiGraph([(1, 2), (0, 4), (3, 1), (2, 4), (0, 5), (4, 5), (1, 5)])
1261
+ >>> nx.is_directed_acyclic_graph(G)
1262
+ True
1263
+ >>> list(nx.compute_v_structures(G))
1264
+ [(0, 4, 2), (0, 5, 4), (0, 5, 1), (4, 5, 1)]
1265
+
1266
+ See Also
1267
+ --------
1268
+ v_structures
1269
+ colliders
1270
+
1271
+ Notes
1272
+ -----
1273
+ This function was written to be used on DAGs, however it works on cyclic graphs
1274
+ too. Since colliders are referred to in the cyclic causal graph literature
1275
+ [2]_ we allow cyclic graphs in this function. It is suggested that you test if
1276
+ your input graph is acyclic as in the example if you want that property.
1277
+
1278
+ References
1279
+ ----------
1280
+ .. [1] `Pearl's PRIMER <https://bayes.cs.ucla.edu/PRIMER/primer-ch2.pdf>`_
1281
+ Ch-2 page 50: v-structures def.
1282
+ .. [2] A Hyttinen, P.O. Hoyer, F. Eberhardt, M J ̈arvisalo, (2013)
1283
+ "Discovering cyclic causal models with latent variables:
1284
+ a general SAT-based procedure", UAI'13: Proceedings of the Twenty-Ninth
1285
+ Conference on Uncertainty in Artificial Intelligence, pg 301–310,
1286
+ `doi:10.5555/3023638.3023669 <https://dl.acm.org/doi/10.5555/3023638.3023669>`_
1287
+ """
1288
+ import warnings
1289
+
1290
+ warnings.warn(
1291
+ (
1292
+ "\n\n`compute_v_structures` actually yields colliders. It will be\n"
1293
+ "removed in version 3.6. Use `nx.dag.v_structures` or `nx.dag.colliders`\n"
1294
+ "instead.\n"
1295
+ ),
1296
+ category=DeprecationWarning,
1297
+ stacklevel=5,
1298
+ )
1299
+
1300
+ return colliders(G)
1301
+
1302
+
1303
+ @not_implemented_for("undirected")
1304
+ @nx._dispatchable
1305
+ def v_structures(G):
1306
+ """Yields 3-node tuples that represent the v-structures in `G`.
1307
+
1308
+ Colliders are triples in the directed acyclic graph (DAG) where two parent nodes
1309
+ point to the same child node. V-structures are colliders where the two parent
1310
+ nodes are not adjacent. In a causal graph setting, the parents do not directly
1311
+ depend on each other, but conditioning on the child node provides an association.
1312
+
1313
+ Parameters
1314
+ ----------
1315
+ G : graph
1316
+ A networkx `~networkx.DiGraph`.
1317
+
1318
+ Yields
1319
+ ------
1320
+ A 3-tuple representation of a v-structure
1321
+ Each v-structure is a 3-tuple with the parent, collider, and other parent.
1322
+
1323
+ Raises
1324
+ ------
1325
+ NetworkXNotImplemented
1326
+ If `G` is an undirected graph.
1327
+
1328
+ Examples
1329
+ --------
1330
+ >>> G = nx.DiGraph([(1, 2), (0, 4), (3, 1), (2, 4), (0, 5), (4, 5), (1, 5)])
1331
+ >>> nx.is_directed_acyclic_graph(G)
1332
+ True
1333
+ >>> list(nx.dag.v_structures(G))
1334
+ [(0, 4, 2), (0, 5, 1), (4, 5, 1)]
1335
+
1336
+ See Also
1337
+ --------
1338
+ colliders
1339
+
1340
+ Notes
1341
+ -----
1342
+ This function was written to be used on DAGs, however it works on cyclic graphs
1343
+ too. Since colliders are referred to in the cyclic causal graph literature
1344
+ [2]_ we allow cyclic graphs in this function. It is suggested that you test if
1345
+ your input graph is acyclic as in the example if you want that property.
1346
+
1347
+ References
1348
+ ----------
1349
+ .. [1] `Pearl's PRIMER <https://bayes.cs.ucla.edu/PRIMER/primer-ch2.pdf>`_
1350
+ Ch-2 page 50: v-structures def.
1351
+ .. [2] A Hyttinen, P.O. Hoyer, F. Eberhardt, M J ̈arvisalo, (2013)
1352
+ "Discovering cyclic causal models with latent variables:
1353
+ a general SAT-based procedure", UAI'13: Proceedings of the Twenty-Ninth
1354
+ Conference on Uncertainty in Artificial Intelligence, pg 301–310,
1355
+ `doi:10.5555/3023638.3023669 <https://dl.acm.org/doi/10.5555/3023638.3023669>`_
1356
+ """
1357
+ for p1, c, p2 in colliders(G):
1358
+ if not (G.has_edge(p1, p2) or G.has_edge(p2, p1)):
1359
+ yield (p1, c, p2)
1360
+
1361
+
1362
+ @not_implemented_for("undirected")
1363
+ @nx._dispatchable
1364
+ def colliders(G):
1365
+ """Yields 3-node tuples that represent the colliders in `G`.
1366
+
1367
+ In a Directed Acyclic Graph (DAG), if you have three nodes A, B, and C, and
1368
+ there are edges from A to C and from B to C, then C is a collider [1]_ . In
1369
+ a causal graph setting, this means that both events A and B are "causing" C,
1370
+ and conditioning on C provide an association between A and B even if
1371
+ no direct causal relationship exists between A and B.
1372
+
1373
+ Parameters
1374
+ ----------
1375
+ G : graph
1376
+ A networkx `~networkx.DiGraph`.
1377
+
1378
+ Yields
1379
+ ------
1380
+ A 3-tuple representation of a collider
1381
+ Each collider is a 3-tuple with the parent, collider, and other parent.
1382
+
1383
+ Raises
1384
+ ------
1385
+ NetworkXNotImplemented
1386
+ If `G` is an undirected graph.
1387
+
1388
+ Examples
1389
+ --------
1390
+ >>> G = nx.DiGraph([(1, 2), (0, 4), (3, 1), (2, 4), (0, 5), (4, 5), (1, 5)])
1391
+ >>> nx.is_directed_acyclic_graph(G)
1392
+ True
1393
+ >>> list(nx.dag.colliders(G))
1394
+ [(0, 4, 2), (0, 5, 4), (0, 5, 1), (4, 5, 1)]
1395
+
1396
+ See Also
1397
+ --------
1398
+ v_structures
1399
+
1400
+ Notes
1401
+ -----
1402
+ This function was written to be used on DAGs, however it works on cyclic graphs
1403
+ too. Since colliders are referred to in the cyclic causal graph literature
1404
+ [2]_ we allow cyclic graphs in this function. It is suggested that you test if
1405
+ your input graph is acyclic as in the example if you want that property.
1406
+
1407
+ References
1408
+ ----------
1409
+ .. [1] `Wikipedia: Collider in causal graphs <https://en.wikipedia.org/wiki/Collider_(statistics)>`_
1410
+ .. [2] A Hyttinen, P.O. Hoyer, F. Eberhardt, M J ̈arvisalo, (2013)
1411
+ "Discovering cyclic causal models with latent variables:
1412
+ a general SAT-based procedure", UAI'13: Proceedings of the Twenty-Ninth
1413
+ Conference on Uncertainty in Artificial Intelligence, pg 301–310,
1414
+ `doi:10.5555/3023638.3023669 <https://dl.acm.org/doi/10.5555/3023638.3023669>`_
1415
+ """
1416
+ for node in G.nodes:
1417
+ for p1, p2 in combinations(G.predecessors(node), 2):
1418
+ yield (p1, node, p2)
wemm/lib/python3.10/site-packages/networkx/algorithms/distance_measures.py ADDED
@@ -0,0 +1,1022 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Graph diameter, radius, eccentricity and other properties."""
2
+
3
+ import math
4
+
5
+ import networkx as nx
6
+ from networkx.utils import not_implemented_for
7
+
8
+ __all__ = [
9
+ "eccentricity",
10
+ "diameter",
11
+ "harmonic_diameter",
12
+ "radius",
13
+ "periphery",
14
+ "center",
15
+ "barycenter",
16
+ "resistance_distance",
17
+ "kemeny_constant",
18
+ "effective_graph_resistance",
19
+ ]
20
+
21
+
22
+ def _extrema_bounding(G, compute="diameter", weight=None):
23
+ """Compute requested extreme distance metric of undirected graph G
24
+
25
+ Computation is based on smart lower and upper bounds, and in practice
26
+ linear in the number of nodes, rather than quadratic (except for some
27
+ border cases such as complete graphs or circle shaped graphs).
28
+
29
+ Parameters
30
+ ----------
31
+ G : NetworkX graph
32
+ An undirected graph
33
+
34
+ compute : string denoting the requesting metric
35
+ "diameter" for the maximal eccentricity value,
36
+ "radius" for the minimal eccentricity value,
37
+ "periphery" for the set of nodes with eccentricity equal to the diameter,
38
+ "center" for the set of nodes with eccentricity equal to the radius,
39
+ "eccentricities" for the maximum distance from each node to all other nodes in G
40
+
41
+ weight : string, function, or None
42
+ If this is a string, then edge weights will be accessed via the
43
+ edge attribute with this key (that is, the weight of the edge
44
+ joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
45
+ such edge attribute exists, the weight of the edge is assumed to
46
+ be one.
47
+
48
+ If this is a function, the weight of an edge is the value
49
+ returned by the function. The function must accept exactly three
50
+ positional arguments: the two endpoints of an edge and the
51
+ dictionary of edge attributes for that edge. The function must
52
+ return a number.
53
+
54
+ If this is None, every edge has weight/distance/cost 1.
55
+
56
+ Weights stored as floating point values can lead to small round-off
57
+ errors in distances. Use integer weights to avoid this.
58
+
59
+ Weights should be positive, since they are distances.
60
+
61
+ Returns
62
+ -------
63
+ value : value of the requested metric
64
+ int for "diameter" and "radius" or
65
+ list of nodes for "center" and "periphery" or
66
+ dictionary of eccentricity values keyed by node for "eccentricities"
67
+
68
+ Raises
69
+ ------
70
+ NetworkXError
71
+ If the graph consists of multiple components
72
+ ValueError
73
+ If `compute` is not one of "diameter", "radius", "periphery", "center", or "eccentricities".
74
+
75
+ Notes
76
+ -----
77
+ This algorithm was proposed in [1]_ and discussed further in [2]_ and [3]_.
78
+
79
+ References
80
+ ----------
81
+ .. [1] F. W. Takes, W. A. Kosters,
82
+ "Determining the diameter of small world networks."
83
+ Proceedings of the 20th ACM international conference on Information and knowledge management, 2011
84
+ https://dl.acm.org/doi/abs/10.1145/2063576.2063748
85
+ .. [2] F. W. Takes, W. A. Kosters,
86
+ "Computing the Eccentricity Distribution of Large Graphs."
87
+ Algorithms, 2013
88
+ https://www.mdpi.com/1999-4893/6/1/100
89
+ .. [3] M. Borassi, P. Crescenzi, M. Habib, W. A. Kosters, A. Marino, F. W. Takes,
90
+ "Fast diameter and radius BFS-based computation in (weakly connected) real-world graphs: With an application to the six degrees of separation games. "
91
+ Theoretical Computer Science, 2015
92
+ https://www.sciencedirect.com/science/article/pii/S0304397515001644
93
+ """
94
+ # init variables
95
+ degrees = dict(G.degree()) # start with the highest degree node
96
+ minlowernode = max(degrees, key=degrees.get)
97
+ N = len(degrees) # number of nodes
98
+ # alternate between smallest lower and largest upper bound
99
+ high = False
100
+ # status variables
101
+ ecc_lower = dict.fromkeys(G, 0)
102
+ ecc_upper = dict.fromkeys(G, N)
103
+ candidates = set(G)
104
+
105
+ # (re)set bound extremes
106
+ minlower = N
107
+ maxlower = 0
108
+ minupper = N
109
+ maxupper = 0
110
+
111
+ # repeat the following until there are no more candidates
112
+ while candidates:
113
+ if high:
114
+ current = maxuppernode # select node with largest upper bound
115
+ else:
116
+ current = minlowernode # select node with smallest lower bound
117
+ high = not high
118
+
119
+ # get distances from/to current node and derive eccentricity
120
+ dist = nx.shortest_path_length(G, source=current, weight=weight)
121
+
122
+ if len(dist) != N:
123
+ msg = "Cannot compute metric because graph is not connected."
124
+ raise nx.NetworkXError(msg)
125
+ current_ecc = max(dist.values())
126
+
127
+ # print status update
128
+ # print ("ecc of " + str(current) + " (" + str(ecc_lower[current]) + "/"
129
+ # + str(ecc_upper[current]) + ", deg: " + str(dist[current]) + ") is "
130
+ # + str(current_ecc))
131
+ # print(ecc_upper)
132
+
133
+ # (re)set bound extremes
134
+ maxuppernode = None
135
+ minlowernode = None
136
+
137
+ # update node bounds
138
+ for i in candidates:
139
+ # update eccentricity bounds
140
+ d = dist[i]
141
+ ecc_lower[i] = low = max(ecc_lower[i], max(d, (current_ecc - d)))
142
+ ecc_upper[i] = upp = min(ecc_upper[i], current_ecc + d)
143
+
144
+ # update min/max values of lower and upper bounds
145
+ minlower = min(ecc_lower[i], minlower)
146
+ maxlower = max(ecc_lower[i], maxlower)
147
+ minupper = min(ecc_upper[i], minupper)
148
+ maxupper = max(ecc_upper[i], maxupper)
149
+
150
+ # update candidate set
151
+ if compute == "diameter":
152
+ ruled_out = {
153
+ i
154
+ for i in candidates
155
+ if ecc_upper[i] <= maxlower and 2 * ecc_lower[i] >= maxupper
156
+ }
157
+ elif compute == "radius":
158
+ ruled_out = {
159
+ i
160
+ for i in candidates
161
+ if ecc_lower[i] >= minupper and ecc_upper[i] + 1 <= 2 * minlower
162
+ }
163
+ elif compute == "periphery":
164
+ ruled_out = {
165
+ i
166
+ for i in candidates
167
+ if ecc_upper[i] < maxlower
168
+ and (maxlower == maxupper or ecc_lower[i] > maxupper)
169
+ }
170
+ elif compute == "center":
171
+ ruled_out = {
172
+ i
173
+ for i in candidates
174
+ if ecc_lower[i] > minupper
175
+ and (minlower == minupper or ecc_upper[i] + 1 < 2 * minlower)
176
+ }
177
+ elif compute == "eccentricities":
178
+ ruled_out = set()
179
+ else:
180
+ msg = "compute must be one of 'diameter', 'radius', 'periphery', 'center', 'eccentricities'"
181
+ raise ValueError(msg)
182
+
183
+ ruled_out.update(i for i in candidates if ecc_lower[i] == ecc_upper[i])
184
+ candidates -= ruled_out
185
+
186
+ # for i in ruled_out:
187
+ # print("removing %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"%
188
+ # (i,ecc_upper[i],maxlower,ecc_lower[i],maxupper))
189
+ # print("node %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"%
190
+ # (4,ecc_upper[4],maxlower,ecc_lower[4],maxupper))
191
+ # print("NODE 4: %g"%(ecc_upper[4] <= maxlower))
192
+ # print("NODE 4: %g"%(2 * ecc_lower[4] >= maxupper))
193
+ # print("NODE 4: %g"%(ecc_upper[4] <= maxlower
194
+ # and 2 * ecc_lower[4] >= maxupper))
195
+
196
+ # updating maxuppernode and minlowernode for selection in next round
197
+ for i in candidates:
198
+ if (
199
+ minlowernode is None
200
+ or (
201
+ ecc_lower[i] == ecc_lower[minlowernode]
202
+ and degrees[i] > degrees[minlowernode]
203
+ )
204
+ or (ecc_lower[i] < ecc_lower[minlowernode])
205
+ ):
206
+ minlowernode = i
207
+
208
+ if (
209
+ maxuppernode is None
210
+ or (
211
+ ecc_upper[i] == ecc_upper[maxuppernode]
212
+ and degrees[i] > degrees[maxuppernode]
213
+ )
214
+ or (ecc_upper[i] > ecc_upper[maxuppernode])
215
+ ):
216
+ maxuppernode = i
217
+
218
+ # print status update
219
+ # print (" min=" + str(minlower) + "/" + str(minupper) +
220
+ # " max=" + str(maxlower) + "/" + str(maxupper) +
221
+ # " candidates: " + str(len(candidates)))
222
+ # print("cand:",candidates)
223
+ # print("ecc_l",ecc_lower)
224
+ # print("ecc_u",ecc_upper)
225
+ # wait = input("press Enter to continue")
226
+
227
+ # return the correct value of the requested metric
228
+ if compute == "diameter":
229
+ return maxlower
230
+ if compute == "radius":
231
+ return minupper
232
+ if compute == "periphery":
233
+ p = [v for v in G if ecc_lower[v] == maxlower]
234
+ return p
235
+ if compute == "center":
236
+ c = [v for v in G if ecc_upper[v] == minupper]
237
+ return c
238
+ if compute == "eccentricities":
239
+ return ecc_lower
240
+ return None
241
+
242
+
243
+ @nx._dispatchable(edge_attrs="weight")
244
+ def eccentricity(G, v=None, sp=None, weight=None):
245
+ """Returns the eccentricity of nodes in G.
246
+
247
+ The eccentricity of a node v is the maximum distance from v to
248
+ all other nodes in G.
249
+
250
+ Parameters
251
+ ----------
252
+ G : NetworkX graph
253
+ A graph
254
+
255
+ v : node, optional
256
+ Return value of specified node
257
+
258
+ sp : dict of dicts, optional
259
+ All pairs shortest path lengths as a dictionary of dictionaries
260
+
261
+ weight : string, function, or None (default=None)
262
+ If this is a string, then edge weights will be accessed via the
263
+ edge attribute with this key (that is, the weight of the edge
264
+ joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
265
+ such edge attribute exists, the weight of the edge is assumed to
266
+ be one.
267
+
268
+ If this is a function, the weight of an edge is the value
269
+ returned by the function. The function must accept exactly three
270
+ positional arguments: the two endpoints of an edge and the
271
+ dictionary of edge attributes for that edge. The function must
272
+ return a number.
273
+
274
+ If this is None, every edge has weight/distance/cost 1.
275
+
276
+ Weights stored as floating point values can lead to small round-off
277
+ errors in distances. Use integer weights to avoid this.
278
+
279
+ Weights should be positive, since they are distances.
280
+
281
+ Returns
282
+ -------
283
+ ecc : dictionary
284
+ A dictionary of eccentricity values keyed by node.
285
+
286
+ Examples
287
+ --------
288
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
289
+ >>> dict(nx.eccentricity(G))
290
+ {1: 2, 2: 3, 3: 2, 4: 2, 5: 3}
291
+
292
+ >>> dict(
293
+ ... nx.eccentricity(G, v=[1, 5])
294
+ ... ) # This returns the eccentricity of node 1 & 5
295
+ {1: 2, 5: 3}
296
+
297
+ """
298
+ # if v is None: # none, use entire graph
299
+ # nodes=G.nodes()
300
+ # elif v in G: # is v a single node
301
+ # nodes=[v]
302
+ # else: # assume v is a container of nodes
303
+ # nodes=v
304
+ order = G.order()
305
+ e = {}
306
+ for n in G.nbunch_iter(v):
307
+ if sp is None:
308
+ length = nx.shortest_path_length(G, source=n, weight=weight)
309
+
310
+ L = len(length)
311
+ else:
312
+ try:
313
+ length = sp[n]
314
+ L = len(length)
315
+ except TypeError as err:
316
+ raise nx.NetworkXError('Format of "sp" is invalid.') from err
317
+ if L != order:
318
+ if G.is_directed():
319
+ msg = (
320
+ "Found infinite path length because the digraph is not"
321
+ " strongly connected"
322
+ )
323
+ else:
324
+ msg = "Found infinite path length because the graph is not" " connected"
325
+ raise nx.NetworkXError(msg)
326
+
327
+ e[n] = max(length.values())
328
+
329
+ if v in G:
330
+ return e[v] # return single value
331
+ return e
332
+
333
+
334
+ @nx._dispatchable(edge_attrs="weight")
335
+ def diameter(G, e=None, usebounds=False, weight=None):
336
+ """Returns the diameter of the graph G.
337
+
338
+ The diameter is the maximum eccentricity.
339
+
340
+ Parameters
341
+ ----------
342
+ G : NetworkX graph
343
+ A graph
344
+
345
+ e : eccentricity dictionary, optional
346
+ A precomputed dictionary of eccentricities.
347
+
348
+ weight : string, function, or None
349
+ If this is a string, then edge weights will be accessed via the
350
+ edge attribute with this key (that is, the weight of the edge
351
+ joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
352
+ such edge attribute exists, the weight of the edge is assumed to
353
+ be one.
354
+
355
+ If this is a function, the weight of an edge is the value
356
+ returned by the function. The function must accept exactly three
357
+ positional arguments: the two endpoints of an edge and the
358
+ dictionary of edge attributes for that edge. The function must
359
+ return a number.
360
+
361
+ If this is None, every edge has weight/distance/cost 1.
362
+
363
+ Weights stored as floating point values can lead to small round-off
364
+ errors in distances. Use integer weights to avoid this.
365
+
366
+ Weights should be positive, since they are distances.
367
+
368
+ Returns
369
+ -------
370
+ d : integer
371
+ Diameter of graph
372
+
373
+ Examples
374
+ --------
375
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
376
+ >>> nx.diameter(G)
377
+ 3
378
+
379
+ See Also
380
+ --------
381
+ eccentricity
382
+ """
383
+ if usebounds is True and e is None and not G.is_directed():
384
+ return _extrema_bounding(G, compute="diameter", weight=weight)
385
+ if e is None:
386
+ e = eccentricity(G, weight=weight)
387
+ return max(e.values())
388
+
389
+
390
+ @nx._dispatchable
391
+ def harmonic_diameter(G, sp=None):
392
+ """Returns the harmonic diameter of the graph G.
393
+
394
+ The harmonic diameter of a graph is the harmonic mean of the distances
395
+ between all pairs of distinct vertices. Graphs that are not strongly
396
+ connected have infinite diameter and mean distance, making such
397
+ measures not useful. Restricting the diameter or mean distance to
398
+ finite distances yields paradoxical values (e.g., a perfect match
399
+ would have diameter one). The harmonic mean handles gracefully
400
+ infinite distances (e.g., a perfect match has harmonic diameter equal
401
+ to the number of vertices minus one), making it possible to assign a
402
+ meaningful value to all graphs.
403
+
404
+ Note that in [1] the harmonic diameter is called "connectivity length":
405
+ however, "harmonic diameter" is a more standard name from the
406
+ theory of metric spaces. The name "harmonic mean distance" is perhaps
407
+ a more descriptive name, but is not used in the literature, so we use the
408
+ name "harmonic diameter" here.
409
+
410
+ Parameters
411
+ ----------
412
+ G : NetworkX graph
413
+ A graph
414
+
415
+ sp : dict of dicts, optional
416
+ All-pairs shortest path lengths as a dictionary of dictionaries
417
+
418
+ Returns
419
+ -------
420
+ hd : float
421
+ Harmonic diameter of graph
422
+
423
+ References
424
+ ----------
425
+ .. [1] Massimo Marchiori and Vito Latora, "Harmony in the small-world".
426
+ *Physica A: Statistical Mechanics and Its Applications*
427
+ 285(3-4), pages 539-546, 2000.
428
+ <https://doi.org/10.1016/S0378-4371(00)00311-3>
429
+ """
430
+ order = G.order()
431
+
432
+ sum_invd = 0
433
+ for n in G:
434
+ if sp is None:
435
+ length = nx.single_source_shortest_path_length(G, n)
436
+ else:
437
+ try:
438
+ length = sp[n]
439
+ L = len(length)
440
+ except TypeError as err:
441
+ raise nx.NetworkXError('Format of "sp" is invalid.') from err
442
+
443
+ for d in length.values():
444
+ # Note that this will skip the zero distance from n to itself,
445
+ # as it should be, but also zero-weight paths in weighted graphs.
446
+ if d != 0:
447
+ sum_invd += 1 / d
448
+
449
+ if sum_invd != 0:
450
+ return order * (order - 1) / sum_invd
451
+ if order > 1:
452
+ return math.inf
453
+ return math.nan
454
+
455
+
456
+ @nx._dispatchable(edge_attrs="weight")
457
+ def periphery(G, e=None, usebounds=False, weight=None):
458
+ """Returns the periphery of the graph G.
459
+
460
+ The periphery is the set of nodes with eccentricity equal to the diameter.
461
+
462
+ Parameters
463
+ ----------
464
+ G : NetworkX graph
465
+ A graph
466
+
467
+ e : eccentricity dictionary, optional
468
+ A precomputed dictionary of eccentricities.
469
+
470
+ weight : string, function, or None
471
+ If this is a string, then edge weights will be accessed via the
472
+ edge attribute with this key (that is, the weight of the edge
473
+ joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
474
+ such edge attribute exists, the weight of the edge is assumed to
475
+ be one.
476
+
477
+ If this is a function, the weight of an edge is the value
478
+ returned by the function. The function must accept exactly three
479
+ positional arguments: the two endpoints of an edge and the
480
+ dictionary of edge attributes for that edge. The function must
481
+ return a number.
482
+
483
+ If this is None, every edge has weight/distance/cost 1.
484
+
485
+ Weights stored as floating point values can lead to small round-off
486
+ errors in distances. Use integer weights to avoid this.
487
+
488
+ Weights should be positive, since they are distances.
489
+
490
+ Returns
491
+ -------
492
+ p : list
493
+ List of nodes in periphery
494
+
495
+ Examples
496
+ --------
497
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
498
+ >>> nx.periphery(G)
499
+ [2, 5]
500
+
501
+ See Also
502
+ --------
503
+ barycenter
504
+ center
505
+ """
506
+ if usebounds is True and e is None and not G.is_directed():
507
+ return _extrema_bounding(G, compute="periphery", weight=weight)
508
+ if e is None:
509
+ e = eccentricity(G, weight=weight)
510
+ diameter = max(e.values())
511
+ p = [v for v in e if e[v] == diameter]
512
+ return p
513
+
514
+
515
+ @nx._dispatchable(edge_attrs="weight")
516
+ def radius(G, e=None, usebounds=False, weight=None):
517
+ """Returns the radius of the graph G.
518
+
519
+ The radius is the minimum eccentricity.
520
+
521
+ Parameters
522
+ ----------
523
+ G : NetworkX graph
524
+ A graph
525
+
526
+ e : eccentricity dictionary, optional
527
+ A precomputed dictionary of eccentricities.
528
+
529
+ weight : string, function, or None
530
+ If this is a string, then edge weights will be accessed via the
531
+ edge attribute with this key (that is, the weight of the edge
532
+ joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
533
+ such edge attribute exists, the weight of the edge is assumed to
534
+ be one.
535
+
536
+ If this is a function, the weight of an edge is the value
537
+ returned by the function. The function must accept exactly three
538
+ positional arguments: the two endpoints of an edge and the
539
+ dictionary of edge attributes for that edge. The function must
540
+ return a number.
541
+
542
+ If this is None, every edge has weight/distance/cost 1.
543
+
544
+ Weights stored as floating point values can lead to small round-off
545
+ errors in distances. Use integer weights to avoid this.
546
+
547
+ Weights should be positive, since they are distances.
548
+
549
+ Returns
550
+ -------
551
+ r : integer
552
+ Radius of graph
553
+
554
+ Examples
555
+ --------
556
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
557
+ >>> nx.radius(G)
558
+ 2
559
+
560
+ """
561
+ if usebounds is True and e is None and not G.is_directed():
562
+ return _extrema_bounding(G, compute="radius", weight=weight)
563
+ if e is None:
564
+ e = eccentricity(G, weight=weight)
565
+ return min(e.values())
566
+
567
+
568
+ @nx._dispatchable(edge_attrs="weight")
569
+ def center(G, e=None, usebounds=False, weight=None):
570
+ """Returns the center of the graph G.
571
+
572
+ The center is the set of nodes with eccentricity equal to radius.
573
+
574
+ Parameters
575
+ ----------
576
+ G : NetworkX graph
577
+ A graph
578
+
579
+ e : eccentricity dictionary, optional
580
+ A precomputed dictionary of eccentricities.
581
+
582
+ weight : string, function, or None
583
+ If this is a string, then edge weights will be accessed via the
584
+ edge attribute with this key (that is, the weight of the edge
585
+ joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
586
+ such edge attribute exists, the weight of the edge is assumed to
587
+ be one.
588
+
589
+ If this is a function, the weight of an edge is the value
590
+ returned by the function. The function must accept exactly three
591
+ positional arguments: the two endpoints of an edge and the
592
+ dictionary of edge attributes for that edge. The function must
593
+ return a number.
594
+
595
+ If this is None, every edge has weight/distance/cost 1.
596
+
597
+ Weights stored as floating point values can lead to small round-off
598
+ errors in distances. Use integer weights to avoid this.
599
+
600
+ Weights should be positive, since they are distances.
601
+
602
+ Returns
603
+ -------
604
+ c : list
605
+ List of nodes in center
606
+
607
+ Examples
608
+ --------
609
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
610
+ >>> list(nx.center(G))
611
+ [1, 3, 4]
612
+
613
+ See Also
614
+ --------
615
+ barycenter
616
+ periphery
617
+ """
618
+ if usebounds is True and e is None and not G.is_directed():
619
+ return _extrema_bounding(G, compute="center", weight=weight)
620
+ if e is None:
621
+ e = eccentricity(G, weight=weight)
622
+ radius = min(e.values())
623
+ p = [v for v in e if e[v] == radius]
624
+ return p
625
+
626
+
627
+ @nx._dispatchable(edge_attrs="weight", mutates_input={"attr": 2})
628
+ def barycenter(G, weight=None, attr=None, sp=None):
629
+ r"""Calculate barycenter of a connected graph, optionally with edge weights.
630
+
631
+ The :dfn:`barycenter` a
632
+ :func:`connected <networkx.algorithms.components.is_connected>` graph
633
+ :math:`G` is the subgraph induced by the set of its nodes :math:`v`
634
+ minimizing the objective function
635
+
636
+ .. math::
637
+
638
+ \sum_{u \in V(G)} d_G(u, v),
639
+
640
+ where :math:`d_G` is the (possibly weighted) :func:`path length
641
+ <networkx.algorithms.shortest_paths.generic.shortest_path_length>`.
642
+ The barycenter is also called the :dfn:`median`. See [West01]_, p. 78.
643
+
644
+ Parameters
645
+ ----------
646
+ G : :class:`networkx.Graph`
647
+ The connected graph :math:`G`.
648
+ weight : :class:`str`, optional
649
+ Passed through to
650
+ :func:`~networkx.algorithms.shortest_paths.generic.shortest_path_length`.
651
+ attr : :class:`str`, optional
652
+ If given, write the value of the objective function to each node's
653
+ `attr` attribute. Otherwise do not store the value.
654
+ sp : dict of dicts, optional
655
+ All pairs shortest path lengths as a dictionary of dictionaries
656
+
657
+ Returns
658
+ -------
659
+ list
660
+ Nodes of `G` that induce the barycenter of `G`.
661
+
662
+ Raises
663
+ ------
664
+ NetworkXNoPath
665
+ If `G` is disconnected. `G` may appear disconnected to
666
+ :func:`barycenter` if `sp` is given but is missing shortest path
667
+ lengths for any pairs.
668
+ ValueError
669
+ If `sp` and `weight` are both given.
670
+
671
+ Examples
672
+ --------
673
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
674
+ >>> nx.barycenter(G)
675
+ [1, 3, 4]
676
+
677
+ See Also
678
+ --------
679
+ center
680
+ periphery
681
+ """
682
+ if sp is None:
683
+ sp = nx.shortest_path_length(G, weight=weight)
684
+ else:
685
+ sp = sp.items()
686
+ if weight is not None:
687
+ raise ValueError("Cannot use both sp, weight arguments together")
688
+ smallest, barycenter_vertices, n = float("inf"), [], len(G)
689
+ for v, dists in sp:
690
+ if len(dists) < n:
691
+ raise nx.NetworkXNoPath(
692
+ f"Input graph {G} is disconnected, so every induced subgraph "
693
+ "has infinite barycentricity."
694
+ )
695
+ barycentricity = sum(dists.values())
696
+ if attr is not None:
697
+ G.nodes[v][attr] = barycentricity
698
+ if barycentricity < smallest:
699
+ smallest = barycentricity
700
+ barycenter_vertices = [v]
701
+ elif barycentricity == smallest:
702
+ barycenter_vertices.append(v)
703
+ if attr is not None:
704
+ nx._clear_cache(G)
705
+ return barycenter_vertices
706
+
707
+
708
+ @not_implemented_for("directed")
709
+ @nx._dispatchable(edge_attrs="weight")
710
+ def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=True):
711
+ """Returns the resistance distance between pairs of nodes in graph G.
712
+
713
+ The resistance distance between two nodes of a graph is akin to treating
714
+ the graph as a grid of resistors with a resistance equal to the provided
715
+ weight [1]_, [2]_.
716
+
717
+ If weight is not provided, then a weight of 1 is used for all edges.
718
+
719
+ If two nodes are the same, the resistance distance is zero.
720
+
721
+ Parameters
722
+ ----------
723
+ G : NetworkX graph
724
+ A graph
725
+
726
+ nodeA : node or None, optional (default=None)
727
+ A node within graph G.
728
+ If None, compute resistance distance using all nodes as source nodes.
729
+
730
+ nodeB : node or None, optional (default=None)
731
+ A node within graph G.
732
+ If None, compute resistance distance using all nodes as target nodes.
733
+
734
+ weight : string or None, optional (default=None)
735
+ The edge data key used to compute the resistance distance.
736
+ If None, then each edge has weight 1.
737
+
738
+ invert_weight : boolean (default=True)
739
+ Proper calculation of resistance distance requires building the
740
+ Laplacian matrix with the reciprocal of the weight. Not required
741
+ if the weight is already inverted. Weight cannot be zero.
742
+
743
+ Returns
744
+ -------
745
+ rd : dict or float
746
+ If `nodeA` and `nodeB` are given, resistance distance between `nodeA`
747
+ and `nodeB`. If `nodeA` or `nodeB` is unspecified (the default), a
748
+ dictionary of nodes with resistance distances as the value.
749
+
750
+ Raises
751
+ ------
752
+ NetworkXNotImplemented
753
+ If `G` is a directed graph.
754
+
755
+ NetworkXError
756
+ If `G` is not connected, or contains no nodes,
757
+ or `nodeA` is not in `G` or `nodeB` is not in `G`.
758
+
759
+ Examples
760
+ --------
761
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
762
+ >>> round(nx.resistance_distance(G, 1, 3), 10)
763
+ 0.625
764
+
765
+ Notes
766
+ -----
767
+ The implementation is based on Theorem A in [2]_. Self-loops are ignored.
768
+ Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights.
769
+
770
+ References
771
+ ----------
772
+ .. [1] Wikipedia
773
+ "Resistance distance."
774
+ https://en.wikipedia.org/wiki/Resistance_distance
775
+ .. [2] D. J. Klein and M. Randic.
776
+ Resistance distance.
777
+ J. of Math. Chem. 12:81-95, 1993.
778
+ """
779
+ import numpy as np
780
+
781
+ if len(G) == 0:
782
+ raise nx.NetworkXError("Graph G must contain at least one node.")
783
+ if not nx.is_connected(G):
784
+ raise nx.NetworkXError("Graph G must be strongly connected.")
785
+ if nodeA is not None and nodeA not in G:
786
+ raise nx.NetworkXError("Node A is not in graph G.")
787
+ if nodeB is not None and nodeB not in G:
788
+ raise nx.NetworkXError("Node B is not in graph G.")
789
+
790
+ G = G.copy()
791
+ node_list = list(G)
792
+
793
+ # Invert weights
794
+ if invert_weight and weight is not None:
795
+ if G.is_multigraph():
796
+ for u, v, k, d in G.edges(keys=True, data=True):
797
+ d[weight] = 1 / d[weight]
798
+ else:
799
+ for u, v, d in G.edges(data=True):
800
+ d[weight] = 1 / d[weight]
801
+
802
+ # Compute resistance distance using the Pseudo-inverse of the Laplacian
803
+ # Self-loops are ignored
804
+ L = nx.laplacian_matrix(G, weight=weight).todense()
805
+ Linv = np.linalg.pinv(L, hermitian=True)
806
+
807
+ # Return relevant distances
808
+ if nodeA is not None and nodeB is not None:
809
+ i = node_list.index(nodeA)
810
+ j = node_list.index(nodeB)
811
+ return Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i)
812
+
813
+ elif nodeA is not None:
814
+ i = node_list.index(nodeA)
815
+ d = {}
816
+ for n in G:
817
+ j = node_list.index(n)
818
+ d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i)
819
+ return d
820
+
821
+ elif nodeB is not None:
822
+ j = node_list.index(nodeB)
823
+ d = {}
824
+ for n in G:
825
+ i = node_list.index(n)
826
+ d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i)
827
+ return d
828
+
829
+ else:
830
+ d = {}
831
+ for n in G:
832
+ i = node_list.index(n)
833
+ d[n] = {}
834
+ for n2 in G:
835
+ j = node_list.index(n2)
836
+ d[n][n2] = (
837
+ Linv.item(i, i)
838
+ + Linv.item(j, j)
839
+ - Linv.item(i, j)
840
+ - Linv.item(j, i)
841
+ )
842
+ return d
843
+
844
+
845
+ @not_implemented_for("directed")
846
+ @nx._dispatchable(edge_attrs="weight")
847
+ def effective_graph_resistance(G, weight=None, invert_weight=True):
848
+ """Returns the Effective graph resistance of G.
849
+
850
+ Also known as the Kirchhoff index.
851
+
852
+ The effective graph resistance is defined as the sum
853
+ of the resistance distance of every node pair in G [1]_.
854
+
855
+ If weight is not provided, then a weight of 1 is used for all edges.
856
+
857
+ The effective graph resistance of a disconnected graph is infinite.
858
+
859
+ Parameters
860
+ ----------
861
+ G : NetworkX graph
862
+ A graph
863
+
864
+ weight : string or None, optional (default=None)
865
+ The edge data key used to compute the effective graph resistance.
866
+ If None, then each edge has weight 1.
867
+
868
+ invert_weight : boolean (default=True)
869
+ Proper calculation of resistance distance requires building the
870
+ Laplacian matrix with the reciprocal of the weight. Not required
871
+ if the weight is already inverted. Weight cannot be zero.
872
+
873
+ Returns
874
+ -------
875
+ RG : float
876
+ The effective graph resistance of `G`.
877
+
878
+ Raises
879
+ ------
880
+ NetworkXNotImplemented
881
+ If `G` is a directed graph.
882
+
883
+ NetworkXError
884
+ If `G` does not contain any nodes.
885
+
886
+ Examples
887
+ --------
888
+ >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)])
889
+ >>> round(nx.effective_graph_resistance(G), 10)
890
+ 10.25
891
+
892
+ Notes
893
+ -----
894
+ The implementation is based on Theorem 2.2 in [2]_. Self-loops are ignored.
895
+ Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights.
896
+
897
+ References
898
+ ----------
899
+ .. [1] Wolfram
900
+ "Kirchhoff Index."
901
+ https://mathworld.wolfram.com/KirchhoffIndex.html
902
+ .. [2] W. Ellens, F. M. Spieksma, P. Van Mieghem, A. Jamakovic, R. E. Kooij.
903
+ Effective graph resistance.
904
+ Lin. Alg. Appl. 435:2491-2506, 2011.
905
+ """
906
+ import numpy as np
907
+
908
+ if len(G) == 0:
909
+ raise nx.NetworkXError("Graph G must contain at least one node.")
910
+
911
+ # Disconnected graphs have infinite Effective graph resistance
912
+ if not nx.is_connected(G):
913
+ return float("inf")
914
+
915
+ # Invert weights
916
+ G = G.copy()
917
+ if invert_weight and weight is not None:
918
+ if G.is_multigraph():
919
+ for u, v, k, d in G.edges(keys=True, data=True):
920
+ d[weight] = 1 / d[weight]
921
+ else:
922
+ for u, v, d in G.edges(data=True):
923
+ d[weight] = 1 / d[weight]
924
+
925
+ # Get Laplacian eigenvalues
926
+ mu = np.sort(nx.laplacian_spectrum(G, weight=weight))
927
+
928
+ # Compute Effective graph resistance based on spectrum of the Laplacian
929
+ # Self-loops are ignored
930
+ return float(np.sum(1 / mu[1:]) * G.number_of_nodes())
931
+
932
+
933
+ @nx.utils.not_implemented_for("directed")
934
+ @nx._dispatchable(edge_attrs="weight")
935
+ def kemeny_constant(G, *, weight=None):
936
+ """Returns the Kemeny constant of the given graph.
937
+
938
+ The *Kemeny constant* (or Kemeny's constant) of a graph `G`
939
+ can be computed by regarding the graph as a Markov chain.
940
+ The Kemeny constant is then the expected number of time steps
941
+ to transition from a starting state i to a random destination state
942
+ sampled from the Markov chain's stationary distribution.
943
+ The Kemeny constant is independent of the chosen initial state [1]_.
944
+
945
+ The Kemeny constant measures the time needed for spreading
946
+ across a graph. Low values indicate a closely connected graph
947
+ whereas high values indicate a spread-out graph.
948
+
949
+ If weight is not provided, then a weight of 1 is used for all edges.
950
+
951
+ Since `G` represents a Markov chain, the weights must be positive.
952
+
953
+ Parameters
954
+ ----------
955
+ G : NetworkX graph
956
+
957
+ weight : string or None, optional (default=None)
958
+ The edge data key used to compute the Kemeny constant.
959
+ If None, then each edge has weight 1.
960
+
961
+ Returns
962
+ -------
963
+ float
964
+ The Kemeny constant of the graph `G`.
965
+
966
+ Raises
967
+ ------
968
+ NetworkXNotImplemented
969
+ If the graph `G` is directed.
970
+
971
+ NetworkXError
972
+ If the graph `G` is not connected, or contains no nodes,
973
+ or has edges with negative weights.
974
+
975
+ Examples
976
+ --------
977
+ >>> G = nx.complete_graph(5)
978
+ >>> round(nx.kemeny_constant(G), 10)
979
+ 3.2
980
+
981
+ Notes
982
+ -----
983
+ The implementation is based on equation (3.3) in [2]_.
984
+ Self-loops are allowed and indicate a Markov chain where
985
+ the state can remain the same. Multi-edges are contracted
986
+ in one edge with weight equal to the sum of the weights.
987
+
988
+ References
989
+ ----------
990
+ .. [1] Wikipedia
991
+ "Kemeny's constant."
992
+ https://en.wikipedia.org/wiki/Kemeny%27s_constant
993
+ .. [2] Lovász L.
994
+ Random walks on graphs: A survey.
995
+ Paul Erdös is Eighty, vol. 2, Bolyai Society,
996
+ Mathematical Studies, Keszthely, Hungary (1993), pp. 1-46
997
+ """
998
+ import numpy as np
999
+ import scipy as sp
1000
+
1001
+ if len(G) == 0:
1002
+ raise nx.NetworkXError("Graph G must contain at least one node.")
1003
+ if not nx.is_connected(G):
1004
+ raise nx.NetworkXError("Graph G must be connected.")
1005
+ if nx.is_negatively_weighted(G, weight=weight):
1006
+ raise nx.NetworkXError("The weights of graph G must be nonnegative.")
1007
+
1008
+ # Compute matrix H = D^-1/2 A D^-1/2
1009
+ A = nx.adjacency_matrix(G, weight=weight)
1010
+ n, m = A.shape
1011
+ diags = A.sum(axis=1)
1012
+ with np.errstate(divide="ignore"):
1013
+ diags_sqrt = 1.0 / np.sqrt(diags)
1014
+ diags_sqrt[np.isinf(diags_sqrt)] = 0
1015
+ DH = sp.sparse.csr_array(sp.sparse.spdiags(diags_sqrt, 0, m, n, format="csr"))
1016
+ H = DH @ (A @ DH)
1017
+
1018
+ # Compute eigenvalues of H
1019
+ eig = np.sort(sp.linalg.eigvalsh(H.todense()))
1020
+
1021
+ # Compute the Kemeny constant
1022
+ return float(np.sum(1 / (1 - eig[:-1])))
wemm/lib/python3.10/site-packages/networkx/algorithms/efficiency_measures.py ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Provides functions for computing the efficiency of nodes and graphs."""
2
+
3
+ import networkx as nx
4
+ from networkx.exception import NetworkXNoPath
5
+
6
+ from ..utils import not_implemented_for
7
+
8
+ __all__ = ["efficiency", "local_efficiency", "global_efficiency"]
9
+
10
+
11
+ @not_implemented_for("directed")
12
+ @nx._dispatchable
13
+ def efficiency(G, u, v):
14
+ """Returns the efficiency of a pair of nodes in a graph.
15
+
16
+ The *efficiency* of a pair of nodes is the multiplicative inverse of the
17
+ shortest path distance between the nodes [1]_. Returns 0 if no path
18
+ between nodes.
19
+
20
+ Parameters
21
+ ----------
22
+ G : :class:`networkx.Graph`
23
+ An undirected graph for which to compute the average local efficiency.
24
+ u, v : node
25
+ Nodes in the graph ``G``.
26
+
27
+ Returns
28
+ -------
29
+ float
30
+ Multiplicative inverse of the shortest path distance between the nodes.
31
+
32
+ Examples
33
+ --------
34
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
35
+ >>> nx.efficiency(G, 2, 3) # this gives efficiency for node 2 and 3
36
+ 0.5
37
+
38
+ Notes
39
+ -----
40
+ Edge weights are ignored when computing the shortest path distances.
41
+
42
+ See also
43
+ --------
44
+ local_efficiency
45
+ global_efficiency
46
+
47
+ References
48
+ ----------
49
+ .. [1] Latora, Vito, and Massimo Marchiori.
50
+ "Efficient behavior of small-world networks."
51
+ *Physical Review Letters* 87.19 (2001): 198701.
52
+ <https://doi.org/10.1103/PhysRevLett.87.198701>
53
+
54
+ """
55
+ try:
56
+ eff = 1 / nx.shortest_path_length(G, u, v)
57
+ except NetworkXNoPath:
58
+ eff = 0
59
+ return eff
60
+
61
+
62
+ @not_implemented_for("directed")
63
+ @nx._dispatchable
64
+ def global_efficiency(G):
65
+ """Returns the average global efficiency of the graph.
66
+
67
+ The *efficiency* of a pair of nodes in a graph is the multiplicative
68
+ inverse of the shortest path distance between the nodes. The *average
69
+ global efficiency* of a graph is the average efficiency of all pairs of
70
+ nodes [1]_.
71
+
72
+ Parameters
73
+ ----------
74
+ G : :class:`networkx.Graph`
75
+ An undirected graph for which to compute the average global efficiency.
76
+
77
+ Returns
78
+ -------
79
+ float
80
+ The average global efficiency of the graph.
81
+
82
+ Examples
83
+ --------
84
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
85
+ >>> round(nx.global_efficiency(G), 12)
86
+ 0.916666666667
87
+
88
+ Notes
89
+ -----
90
+ Edge weights are ignored when computing the shortest path distances.
91
+
92
+ See also
93
+ --------
94
+ local_efficiency
95
+
96
+ References
97
+ ----------
98
+ .. [1] Latora, Vito, and Massimo Marchiori.
99
+ "Efficient behavior of small-world networks."
100
+ *Physical Review Letters* 87.19 (2001): 198701.
101
+ <https://doi.org/10.1103/PhysRevLett.87.198701>
102
+
103
+ """
104
+ n = len(G)
105
+ denom = n * (n - 1)
106
+ if denom != 0:
107
+ lengths = nx.all_pairs_shortest_path_length(G)
108
+ g_eff = 0
109
+ for source, targets in lengths:
110
+ for target, distance in targets.items():
111
+ if distance > 0:
112
+ g_eff += 1 / distance
113
+ g_eff /= denom
114
+ # g_eff = sum(1 / d for s, tgts in lengths
115
+ # for t, d in tgts.items() if d > 0) / denom
116
+ else:
117
+ g_eff = 0
118
+ # TODO This can be made more efficient by computing all pairs shortest
119
+ # path lengths in parallel.
120
+ return g_eff
121
+
122
+
123
+ @not_implemented_for("directed")
124
+ @nx._dispatchable
125
+ def local_efficiency(G):
126
+ """Returns the average local efficiency of the graph.
127
+
128
+ The *efficiency* of a pair of nodes in a graph is the multiplicative
129
+ inverse of the shortest path distance between the nodes. The *local
130
+ efficiency* of a node in the graph is the average global efficiency of the
131
+ subgraph induced by the neighbors of the node. The *average local
132
+ efficiency* is the average of the local efficiencies of each node [1]_.
133
+
134
+ Parameters
135
+ ----------
136
+ G : :class:`networkx.Graph`
137
+ An undirected graph for which to compute the average local efficiency.
138
+
139
+ Returns
140
+ -------
141
+ float
142
+ The average local efficiency of the graph.
143
+
144
+ Examples
145
+ --------
146
+ >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
147
+ >>> nx.local_efficiency(G)
148
+ 0.9166666666666667
149
+
150
+ Notes
151
+ -----
152
+ Edge weights are ignored when computing the shortest path distances.
153
+
154
+ See also
155
+ --------
156
+ global_efficiency
157
+
158
+ References
159
+ ----------
160
+ .. [1] Latora, Vito, and Massimo Marchiori.
161
+ "Efficient behavior of small-world networks."
162
+ *Physical Review Letters* 87.19 (2001): 198701.
163
+ <https://doi.org/10.1103/PhysRevLett.87.198701>
164
+
165
+ """
166
+ efficiency_list = (global_efficiency(G.subgraph(G[v])) for v in G)
167
+ return sum(efficiency_list) / len(G)
wemm/lib/python3.10/site-packages/networkx/algorithms/graph_hashing.py ADDED
@@ -0,0 +1,328 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Functions for hashing graphs to strings.
3
+ Isomorphic graphs should be assigned identical hashes.
4
+ For now, only Weisfeiler-Lehman hashing is implemented.
5
+ """
6
+
7
+ from collections import Counter, defaultdict
8
+ from hashlib import blake2b
9
+
10
+ import networkx as nx
11
+
12
+ __all__ = ["weisfeiler_lehman_graph_hash", "weisfeiler_lehman_subgraph_hashes"]
13
+
14
+
15
+ def _hash_label(label, digest_size):
16
+ return blake2b(label.encode("ascii"), digest_size=digest_size).hexdigest()
17
+
18
+
19
+ def _init_node_labels(G, edge_attr, node_attr):
20
+ if node_attr:
21
+ return {u: str(dd[node_attr]) for u, dd in G.nodes(data=True)}
22
+ elif edge_attr:
23
+ return {u: "" for u in G}
24
+ else:
25
+ return {u: str(deg) for u, deg in G.degree()}
26
+
27
+
28
+ def _neighborhood_aggregate(G, node, node_labels, edge_attr=None):
29
+ """
30
+ Compute new labels for given node by aggregating
31
+ the labels of each node's neighbors.
32
+ """
33
+ label_list = []
34
+ for nbr in G.neighbors(node):
35
+ prefix = "" if edge_attr is None else str(G[node][nbr][edge_attr])
36
+ label_list.append(prefix + node_labels[nbr])
37
+ return node_labels[node] + "".join(sorted(label_list))
38
+
39
+
40
+ @nx.utils.not_implemented_for("multigraph")
41
+ @nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr")
42
+ def weisfeiler_lehman_graph_hash(
43
+ G, edge_attr=None, node_attr=None, iterations=3, digest_size=16
44
+ ):
45
+ """Return Weisfeiler Lehman (WL) graph hash.
46
+
47
+ The function iteratively aggregates and hashes neighborhoods of each node.
48
+ After each node's neighbors are hashed to obtain updated node labels,
49
+ a hashed histogram of resulting labels is returned as the final hash.
50
+
51
+ Hashes are identical for isomorphic graphs and strong guarantees that
52
+ non-isomorphic graphs will get different hashes. See [1]_ for details.
53
+
54
+ If no node or edge attributes are provided, the degree of each node
55
+ is used as its initial label.
56
+ Otherwise, node and/or edge labels are used to compute the hash.
57
+
58
+ Parameters
59
+ ----------
60
+ G : graph
61
+ The graph to be hashed.
62
+ Can have node and/or edge attributes. Can also have no attributes.
63
+ edge_attr : string, optional (default=None)
64
+ The key in edge attribute dictionary to be used for hashing.
65
+ If None, edge labels are ignored.
66
+ node_attr: string, optional (default=None)
67
+ The key in node attribute dictionary to be used for hashing.
68
+ If None, and no edge_attr given, use the degrees of the nodes as labels.
69
+ iterations: int, optional (default=3)
70
+ Number of neighbor aggregations to perform.
71
+ Should be larger for larger graphs.
72
+ digest_size: int, optional (default=16)
73
+ Size (in bits) of blake2b hash digest to use for hashing node labels.
74
+
75
+ Returns
76
+ -------
77
+ h : string
78
+ Hexadecimal string corresponding to hash of the input graph.
79
+
80
+ Examples
81
+ --------
82
+ Two graphs with edge attributes that are isomorphic, except for
83
+ differences in the edge labels.
84
+
85
+ >>> G1 = nx.Graph()
86
+ >>> G1.add_edges_from(
87
+ ... [
88
+ ... (1, 2, {"label": "A"}),
89
+ ... (2, 3, {"label": "A"}),
90
+ ... (3, 1, {"label": "A"}),
91
+ ... (1, 4, {"label": "B"}),
92
+ ... ]
93
+ ... )
94
+ >>> G2 = nx.Graph()
95
+ >>> G2.add_edges_from(
96
+ ... [
97
+ ... (5, 6, {"label": "B"}),
98
+ ... (6, 7, {"label": "A"}),
99
+ ... (7, 5, {"label": "A"}),
100
+ ... (7, 8, {"label": "A"}),
101
+ ... ]
102
+ ... )
103
+
104
+ Omitting the `edge_attr` option, results in identical hashes.
105
+
106
+ >>> nx.weisfeiler_lehman_graph_hash(G1)
107
+ '7bc4dde9a09d0b94c5097b219891d81a'
108
+ >>> nx.weisfeiler_lehman_graph_hash(G2)
109
+ '7bc4dde9a09d0b94c5097b219891d81a'
110
+
111
+ With edge labels, the graphs are no longer assigned
112
+ the same hash digest.
113
+
114
+ >>> nx.weisfeiler_lehman_graph_hash(G1, edge_attr="label")
115
+ 'c653d85538bcf041d88c011f4f905f10'
116
+ >>> nx.weisfeiler_lehman_graph_hash(G2, edge_attr="label")
117
+ '3dcd84af1ca855d0eff3c978d88e7ec7'
118
+
119
+ Notes
120
+ -----
121
+ To return the WL hashes of each subgraph of a graph, use
122
+ `weisfeiler_lehman_subgraph_hashes`
123
+
124
+ Similarity between hashes does not imply similarity between graphs.
125
+
126
+ References
127
+ ----------
128
+ .. [1] Shervashidze, Nino, Pascal Schweitzer, Erik Jan Van Leeuwen,
129
+ Kurt Mehlhorn, and Karsten M. Borgwardt. Weisfeiler Lehman
130
+ Graph Kernels. Journal of Machine Learning Research. 2011.
131
+ http://www.jmlr.org/papers/volume12/shervashidze11a/shervashidze11a.pdf
132
+
133
+ See also
134
+ --------
135
+ weisfeiler_lehman_subgraph_hashes
136
+ """
137
+
138
+ def weisfeiler_lehman_step(G, labels, edge_attr=None):
139
+ """
140
+ Apply neighborhood aggregation to each node
141
+ in the graph.
142
+ Computes a dictionary with labels for each node.
143
+ """
144
+ new_labels = {}
145
+ for node in G.nodes():
146
+ label = _neighborhood_aggregate(G, node, labels, edge_attr=edge_attr)
147
+ new_labels[node] = _hash_label(label, digest_size)
148
+ return new_labels
149
+
150
+ # set initial node labels
151
+ node_labels = _init_node_labels(G, edge_attr, node_attr)
152
+
153
+ subgraph_hash_counts = []
154
+ for _ in range(iterations):
155
+ node_labels = weisfeiler_lehman_step(G, node_labels, edge_attr=edge_attr)
156
+ counter = Counter(node_labels.values())
157
+ # sort the counter, extend total counts
158
+ subgraph_hash_counts.extend(sorted(counter.items(), key=lambda x: x[0]))
159
+
160
+ # hash the final counter
161
+ return _hash_label(str(tuple(subgraph_hash_counts)), digest_size)
162
+
163
+
164
+ @nx.utils.not_implemented_for("multigraph")
165
+ @nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr")
166
+ def weisfeiler_lehman_subgraph_hashes(
167
+ G,
168
+ edge_attr=None,
169
+ node_attr=None,
170
+ iterations=3,
171
+ digest_size=16,
172
+ include_initial_labels=False,
173
+ ):
174
+ """
175
+ Return a dictionary of subgraph hashes by node.
176
+
177
+ Dictionary keys are nodes in `G`, and values are a list of hashes.
178
+ Each hash corresponds to a subgraph rooted at a given node u in `G`.
179
+ Lists of subgraph hashes are sorted in increasing order of depth from
180
+ their root node, with the hash at index i corresponding to a subgraph
181
+ of nodes at most i edges distance from u. Thus, each list will contain
182
+ `iterations` elements - a hash for a subgraph at each depth. If
183
+ `include_initial_labels` is set to `True`, each list will additionally
184
+ have contain a hash of the initial node label (or equivalently a
185
+ subgraph of depth 0) prepended, totalling ``iterations + 1`` elements.
186
+
187
+ The function iteratively aggregates and hashes neighborhoods of each node.
188
+ This is achieved for each step by replacing for each node its label from
189
+ the previous iteration with its hashed 1-hop neighborhood aggregate.
190
+ The new node label is then appended to a list of node labels for each
191
+ node.
192
+
193
+ To aggregate neighborhoods for a node $u$ at each step, all labels of
194
+ nodes adjacent to $u$ are concatenated. If the `edge_attr` parameter is set,
195
+ labels for each neighboring node are prefixed with the value of this attribute
196
+ along the connecting edge from this neighbor to node $u$. The resulting string
197
+ is then hashed to compress this information into a fixed digest size.
198
+
199
+ Thus, at the $i$-th iteration, nodes within $i$ hops influence any given
200
+ hashed node label. We can therefore say that at depth $i$ for node $u$
201
+ we have a hash for a subgraph induced by the $i$-hop neighborhood of $u$.
202
+
203
+ The output can be used to create general Weisfeiler-Lehman graph kernels,
204
+ or generate features for graphs or nodes - for example to generate 'words' in
205
+ a graph as seen in the 'graph2vec' algorithm.
206
+ See [1]_ & [2]_ respectively for details.
207
+
208
+ Hashes are identical for isomorphic subgraphs and there exist strong
209
+ guarantees that non-isomorphic graphs will get different hashes.
210
+ See [1]_ for details.
211
+
212
+ If no node or edge attributes are provided, the degree of each node
213
+ is used as its initial label.
214
+ Otherwise, node and/or edge labels are used to compute the hash.
215
+
216
+ Parameters
217
+ ----------
218
+ G : graph
219
+ The graph to be hashed.
220
+ Can have node and/or edge attributes. Can also have no attributes.
221
+ edge_attr : string, optional (default=None)
222
+ The key in edge attribute dictionary to be used for hashing.
223
+ If None, edge labels are ignored.
224
+ node_attr : string, optional (default=None)
225
+ The key in node attribute dictionary to be used for hashing.
226
+ If None, and no edge_attr given, use the degrees of the nodes as labels.
227
+ If None, and edge_attr is given, each node starts with an identical label.
228
+ iterations : int, optional (default=3)
229
+ Number of neighbor aggregations to perform.
230
+ Should be larger for larger graphs.
231
+ digest_size : int, optional (default=16)
232
+ Size (in bits) of blake2b hash digest to use for hashing node labels.
233
+ The default size is 16 bits.
234
+ include_initial_labels : bool, optional (default=False)
235
+ If True, include the hashed initial node label as the first subgraph
236
+ hash for each node.
237
+
238
+ Returns
239
+ -------
240
+ node_subgraph_hashes : dict
241
+ A dictionary with each key given by a node in G, and each value given
242
+ by the subgraph hashes in order of depth from the key node.
243
+
244
+ Examples
245
+ --------
246
+ Finding similar nodes in different graphs:
247
+
248
+ >>> G1 = nx.Graph()
249
+ >>> G1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 5), (4, 6), (5, 7), (6, 7)])
250
+ >>> G2 = nx.Graph()
251
+ >>> G2.add_edges_from([(1, 3), (2, 3), (1, 6), (1, 5), (4, 6)])
252
+ >>> g1_hashes = nx.weisfeiler_lehman_subgraph_hashes(
253
+ ... G1, iterations=3, digest_size=8
254
+ ... )
255
+ >>> g2_hashes = nx.weisfeiler_lehman_subgraph_hashes(
256
+ ... G2, iterations=3, digest_size=8
257
+ ... )
258
+
259
+ Even though G1 and G2 are not isomorphic (they have different numbers of edges),
260
+ the hash sequence of depth 3 for node 1 in G1 and node 5 in G2 are similar:
261
+
262
+ >>> g1_hashes[1]
263
+ ['a93b64973cfc8897', 'db1b43ae35a1878f', '57872a7d2059c1c0']
264
+ >>> g2_hashes[5]
265
+ ['a93b64973cfc8897', 'db1b43ae35a1878f', '1716d2a4012fa4bc']
266
+
267
+ The first 2 WL subgraph hashes match. From this we can conclude that it's very
268
+ likely the neighborhood of 2 hops around these nodes are isomorphic.
269
+
270
+ However the 3-hop neighborhoods of ``G1`` and ``G2`` are not isomorphic since the
271
+ 3rd hashes in the lists above are not equal.
272
+
273
+ These nodes may be candidates to be classified together since their local topology
274
+ is similar.
275
+
276
+ Notes
277
+ -----
278
+ To hash the full graph when subgraph hashes are not needed, use
279
+ `weisfeiler_lehman_graph_hash` for efficiency.
280
+
281
+ Similarity between hashes does not imply similarity between graphs.
282
+
283
+ References
284
+ ----------
285
+ .. [1] Shervashidze, Nino, Pascal Schweitzer, Erik Jan Van Leeuwen,
286
+ Kurt Mehlhorn, and Karsten M. Borgwardt. Weisfeiler Lehman
287
+ Graph Kernels. Journal of Machine Learning Research. 2011.
288
+ http://www.jmlr.org/papers/volume12/shervashidze11a/shervashidze11a.pdf
289
+ .. [2] Annamalai Narayanan, Mahinthan Chandramohan, Rajasekar Venkatesan,
290
+ Lihui Chen, Yang Liu and Shantanu Jaiswa. graph2vec: Learning
291
+ Distributed Representations of Graphs. arXiv. 2017
292
+ https://arxiv.org/pdf/1707.05005.pdf
293
+
294
+ See also
295
+ --------
296
+ weisfeiler_lehman_graph_hash
297
+ """
298
+
299
+ def weisfeiler_lehman_step(G, labels, node_subgraph_hashes, edge_attr=None):
300
+ """
301
+ Apply neighborhood aggregation to each node
302
+ in the graph.
303
+ Computes a dictionary with labels for each node.
304
+ Appends the new hashed label to the dictionary of subgraph hashes
305
+ originating from and indexed by each node in G
306
+ """
307
+ new_labels = {}
308
+ for node in G.nodes():
309
+ label = _neighborhood_aggregate(G, node, labels, edge_attr=edge_attr)
310
+ hashed_label = _hash_label(label, digest_size)
311
+ new_labels[node] = hashed_label
312
+ node_subgraph_hashes[node].append(hashed_label)
313
+ return new_labels
314
+
315
+ node_labels = _init_node_labels(G, edge_attr, node_attr)
316
+ if include_initial_labels:
317
+ node_subgraph_hashes = {
318
+ k: [_hash_label(v, digest_size)] for k, v in node_labels.items()
319
+ }
320
+ else:
321
+ node_subgraph_hashes = defaultdict(list)
322
+
323
+ for _ in range(iterations):
324
+ node_labels = weisfeiler_lehman_step(
325
+ G, node_labels, node_subgraph_hashes, edge_attr
326
+ )
327
+
328
+ return dict(node_subgraph_hashes)
wemm/lib/python3.10/site-packages/networkx/algorithms/graphical.py ADDED
@@ -0,0 +1,483 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Test sequences for graphiness."""
2
+
3
+ import heapq
4
+
5
+ import networkx as nx
6
+
7
+ __all__ = [
8
+ "is_graphical",
9
+ "is_multigraphical",
10
+ "is_pseudographical",
11
+ "is_digraphical",
12
+ "is_valid_degree_sequence_erdos_gallai",
13
+ "is_valid_degree_sequence_havel_hakimi",
14
+ ]
15
+
16
+
17
+ @nx._dispatchable(graphs=None)
18
+ def is_graphical(sequence, method="eg"):
19
+ """Returns True if sequence is a valid degree sequence.
20
+
21
+ A degree sequence is valid if some graph can realize it.
22
+
23
+ Parameters
24
+ ----------
25
+ sequence : list or iterable container
26
+ A sequence of integer node degrees
27
+
28
+ method : "eg" | "hh" (default: 'eg')
29
+ The method used to validate the degree sequence.
30
+ "eg" corresponds to the Erdős-Gallai algorithm
31
+ [EG1960]_, [choudum1986]_, and
32
+ "hh" to the Havel-Hakimi algorithm
33
+ [havel1955]_, [hakimi1962]_, [CL1996]_.
34
+
35
+ Returns
36
+ -------
37
+ valid : bool
38
+ True if the sequence is a valid degree sequence and False if not.
39
+
40
+ Examples
41
+ --------
42
+ >>> G = nx.path_graph(4)
43
+ >>> sequence = (d for n, d in G.degree())
44
+ >>> nx.is_graphical(sequence)
45
+ True
46
+
47
+ To test a non-graphical sequence:
48
+ >>> sequence_list = [d for n, d in G.degree()]
49
+ >>> sequence_list[-1] += 1
50
+ >>> nx.is_graphical(sequence_list)
51
+ False
52
+
53
+ References
54
+ ----------
55
+ .. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960.
56
+ .. [choudum1986] S.A. Choudum. "A simple proof of the Erdős-Gallai theorem on
57
+ graph sequences." Bulletin of the Australian Mathematical Society, 33,
58
+ pp 67-70, 1986. https://doi.org/10.1017/S0004972700002872
59
+ .. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs"
60
+ Casopis Pest. Mat. 80, 477-480, 1955.
61
+ .. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as
62
+ Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962.
63
+ .. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs",
64
+ Chapman and Hall/CRC, 1996.
65
+ """
66
+ if method == "eg":
67
+ valid = is_valid_degree_sequence_erdos_gallai(list(sequence))
68
+ elif method == "hh":
69
+ valid = is_valid_degree_sequence_havel_hakimi(list(sequence))
70
+ else:
71
+ msg = "`method` must be 'eg' or 'hh'"
72
+ raise nx.NetworkXException(msg)
73
+ return valid
74
+
75
+
76
+ def _basic_graphical_tests(deg_sequence):
77
+ # Sort and perform some simple tests on the sequence
78
+ deg_sequence = nx.utils.make_list_of_ints(deg_sequence)
79
+ p = len(deg_sequence)
80
+ num_degs = [0] * p
81
+ dmax, dmin, dsum, n = 0, p, 0, 0
82
+ for d in deg_sequence:
83
+ # Reject if degree is negative or larger than the sequence length
84
+ if d < 0 or d >= p:
85
+ raise nx.NetworkXUnfeasible
86
+ # Process only the non-zero integers
87
+ elif d > 0:
88
+ dmax, dmin, dsum, n = max(dmax, d), min(dmin, d), dsum + d, n + 1
89
+ num_degs[d] += 1
90
+ # Reject sequence if it has odd sum or is oversaturated
91
+ if dsum % 2 or dsum > n * (n - 1):
92
+ raise nx.NetworkXUnfeasible
93
+ return dmax, dmin, dsum, n, num_degs
94
+
95
+
96
+ @nx._dispatchable(graphs=None)
97
+ def is_valid_degree_sequence_havel_hakimi(deg_sequence):
98
+ r"""Returns True if deg_sequence can be realized by a simple graph.
99
+
100
+ The validation proceeds using the Havel-Hakimi theorem
101
+ [havel1955]_, [hakimi1962]_, [CL1996]_.
102
+ Worst-case run time is $O(s)$ where $s$ is the sum of the sequence.
103
+
104
+ Parameters
105
+ ----------
106
+ deg_sequence : list
107
+ A list of integers where each element specifies the degree of a node
108
+ in a graph.
109
+
110
+ Returns
111
+ -------
112
+ valid : bool
113
+ True if deg_sequence is graphical and False if not.
114
+
115
+ Examples
116
+ --------
117
+ >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
118
+ >>> sequence = (d for _, d in G.degree())
119
+ >>> nx.is_valid_degree_sequence_havel_hakimi(sequence)
120
+ True
121
+
122
+ To test a non-valid sequence:
123
+ >>> sequence_list = [d for _, d in G.degree()]
124
+ >>> sequence_list[-1] += 1
125
+ >>> nx.is_valid_degree_sequence_havel_hakimi(sequence_list)
126
+ False
127
+
128
+ Notes
129
+ -----
130
+ The ZZ condition says that for the sequence d if
131
+
132
+ .. math::
133
+ |d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)}
134
+
135
+ then d is graphical. This was shown in Theorem 6 in [1]_.
136
+
137
+ References
138
+ ----------
139
+ .. [1] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory
140
+ of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992).
141
+ .. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs"
142
+ Casopis Pest. Mat. 80, 477-480, 1955.
143
+ .. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as
144
+ Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962.
145
+ .. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs",
146
+ Chapman and Hall/CRC, 1996.
147
+ """
148
+ try:
149
+ dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence)
150
+ except nx.NetworkXUnfeasible:
151
+ return False
152
+ # Accept if sequence has no non-zero degrees or passes the ZZ condition
153
+ if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1):
154
+ return True
155
+
156
+ modstubs = [0] * (dmax + 1)
157
+ # Successively reduce degree sequence by removing the maximum degree
158
+ while n > 0:
159
+ # Retrieve the maximum degree in the sequence
160
+ while num_degs[dmax] == 0:
161
+ dmax -= 1
162
+ # If there are not enough stubs to connect to, then the sequence is
163
+ # not graphical
164
+ if dmax > n - 1:
165
+ return False
166
+
167
+ # Remove largest stub in list
168
+ num_degs[dmax], n = num_degs[dmax] - 1, n - 1
169
+ # Reduce the next dmax largest stubs
170
+ mslen = 0
171
+ k = dmax
172
+ for i in range(dmax):
173
+ while num_degs[k] == 0:
174
+ k -= 1
175
+ num_degs[k], n = num_degs[k] - 1, n - 1
176
+ if k > 1:
177
+ modstubs[mslen] = k - 1
178
+ mslen += 1
179
+ # Add back to the list any non-zero stubs that were removed
180
+ for i in range(mslen):
181
+ stub = modstubs[i]
182
+ num_degs[stub], n = num_degs[stub] + 1, n + 1
183
+ return True
184
+
185
+
186
+ @nx._dispatchable(graphs=None)
187
+ def is_valid_degree_sequence_erdos_gallai(deg_sequence):
188
+ r"""Returns True if deg_sequence can be realized by a simple graph.
189
+
190
+ The validation is done using the Erdős-Gallai theorem [EG1960]_.
191
+
192
+ Parameters
193
+ ----------
194
+ deg_sequence : list
195
+ A list of integers
196
+
197
+ Returns
198
+ -------
199
+ valid : bool
200
+ True if deg_sequence is graphical and False if not.
201
+
202
+ Examples
203
+ --------
204
+ >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
205
+ >>> sequence = (d for _, d in G.degree())
206
+ >>> nx.is_valid_degree_sequence_erdos_gallai(sequence)
207
+ True
208
+
209
+ To test a non-valid sequence:
210
+ >>> sequence_list = [d for _, d in G.degree()]
211
+ >>> sequence_list[-1] += 1
212
+ >>> nx.is_valid_degree_sequence_erdos_gallai(sequence_list)
213
+ False
214
+
215
+ Notes
216
+ -----
217
+
218
+ This implementation uses an equivalent form of the Erdős-Gallai criterion.
219
+ Worst-case run time is $O(n)$ where $n$ is the length of the sequence.
220
+
221
+ Specifically, a sequence d is graphical if and only if the
222
+ sum of the sequence is even and for all strong indices k in the sequence,
223
+
224
+ .. math::
225
+
226
+ \sum_{i=1}^{k} d_i \leq k(k-1) + \sum_{j=k+1}^{n} \min(d_i,k)
227
+ = k(n-1) - ( k \sum_{j=0}^{k-1} n_j - \sum_{j=0}^{k-1} j n_j )
228
+
229
+ A strong index k is any index where d_k >= k and the value n_j is the
230
+ number of occurrences of j in d. The maximal strong index is called the
231
+ Durfee index.
232
+
233
+ This particular rearrangement comes from the proof of Theorem 3 in [2]_.
234
+
235
+ The ZZ condition says that for the sequence d if
236
+
237
+ .. math::
238
+ |d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)}
239
+
240
+ then d is graphical. This was shown in Theorem 6 in [2]_.
241
+
242
+ References
243
+ ----------
244
+ .. [1] A. Tripathi and S. Vijay. "A note on a theorem of Erdős & Gallai",
245
+ Discrete Mathematics, 265, pp. 417-420 (2003).
246
+ .. [2] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory
247
+ of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992).
248
+ .. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960.
249
+ """
250
+ try:
251
+ dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence)
252
+ except nx.NetworkXUnfeasible:
253
+ return False
254
+ # Accept if sequence has no non-zero degrees or passes the ZZ condition
255
+ if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1):
256
+ return True
257
+
258
+ # Perform the EG checks using the reformulation of Zverovich and Zverovich
259
+ k, sum_deg, sum_nj, sum_jnj = 0, 0, 0, 0
260
+ for dk in range(dmax, dmin - 1, -1):
261
+ if dk < k + 1: # Check if already past Durfee index
262
+ return True
263
+ if num_degs[dk] > 0:
264
+ run_size = num_degs[dk] # Process a run of identical-valued degrees
265
+ if dk < k + run_size: # Check if end of run is past Durfee index
266
+ run_size = dk - k # Adjust back to Durfee index
267
+ sum_deg += run_size * dk
268
+ for v in range(run_size):
269
+ sum_nj += num_degs[k + v]
270
+ sum_jnj += (k + v) * num_degs[k + v]
271
+ k += run_size
272
+ if sum_deg > k * (n - 1) - k * sum_nj + sum_jnj:
273
+ return False
274
+ return True
275
+
276
+
277
+ @nx._dispatchable(graphs=None)
278
+ def is_multigraphical(sequence):
279
+ """Returns True if some multigraph can realize the sequence.
280
+
281
+ Parameters
282
+ ----------
283
+ sequence : list
284
+ A list of integers
285
+
286
+ Returns
287
+ -------
288
+ valid : bool
289
+ True if deg_sequence is a multigraphic degree sequence and False if not.
290
+
291
+ Examples
292
+ --------
293
+ >>> G = nx.MultiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
294
+ >>> sequence = (d for _, d in G.degree())
295
+ >>> nx.is_multigraphical(sequence)
296
+ True
297
+
298
+ To test a non-multigraphical sequence:
299
+ >>> sequence_list = [d for _, d in G.degree()]
300
+ >>> sequence_list[-1] += 1
301
+ >>> nx.is_multigraphical(sequence_list)
302
+ False
303
+
304
+ Notes
305
+ -----
306
+ The worst-case run time is $O(n)$ where $n$ is the length of the sequence.
307
+
308
+ References
309
+ ----------
310
+ .. [1] S. L. Hakimi. "On the realizability of a set of integers as
311
+ degrees of the vertices of a linear graph", J. SIAM, 10, pp. 496-506
312
+ (1962).
313
+ """
314
+ try:
315
+ deg_sequence = nx.utils.make_list_of_ints(sequence)
316
+ except nx.NetworkXError:
317
+ return False
318
+ dsum, dmax = 0, 0
319
+ for d in deg_sequence:
320
+ if d < 0:
321
+ return False
322
+ dsum, dmax = dsum + d, max(dmax, d)
323
+ if dsum % 2 or dsum < 2 * dmax:
324
+ return False
325
+ return True
326
+
327
+
328
+ @nx._dispatchable(graphs=None)
329
+ def is_pseudographical(sequence):
330
+ """Returns True if some pseudograph can realize the sequence.
331
+
332
+ Every nonnegative integer sequence with an even sum is pseudographical
333
+ (see [1]_).
334
+
335
+ Parameters
336
+ ----------
337
+ sequence : list or iterable container
338
+ A sequence of integer node degrees
339
+
340
+ Returns
341
+ -------
342
+ valid : bool
343
+ True if the sequence is a pseudographic degree sequence and False if not.
344
+
345
+ Examples
346
+ --------
347
+ >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
348
+ >>> sequence = (d for _, d in G.degree())
349
+ >>> nx.is_pseudographical(sequence)
350
+ True
351
+
352
+ To test a non-pseudographical sequence:
353
+ >>> sequence_list = [d for _, d in G.degree()]
354
+ >>> sequence_list[-1] += 1
355
+ >>> nx.is_pseudographical(sequence_list)
356
+ False
357
+
358
+ Notes
359
+ -----
360
+ The worst-case run time is $O(n)$ where n is the length of the sequence.
361
+
362
+ References
363
+ ----------
364
+ .. [1] F. Boesch and F. Harary. "Line removal algorithms for graphs
365
+ and their degree lists", IEEE Trans. Circuits and Systems, CAS-23(12),
366
+ pp. 778-782 (1976).
367
+ """
368
+ try:
369
+ deg_sequence = nx.utils.make_list_of_ints(sequence)
370
+ except nx.NetworkXError:
371
+ return False
372
+ return sum(deg_sequence) % 2 == 0 and min(deg_sequence) >= 0
373
+
374
+
375
+ @nx._dispatchable(graphs=None)
376
+ def is_digraphical(in_sequence, out_sequence):
377
+ r"""Returns True if some directed graph can realize the in- and out-degree
378
+ sequences.
379
+
380
+ Parameters
381
+ ----------
382
+ in_sequence : list or iterable container
383
+ A sequence of integer node in-degrees
384
+
385
+ out_sequence : list or iterable container
386
+ A sequence of integer node out-degrees
387
+
388
+ Returns
389
+ -------
390
+ valid : bool
391
+ True if in and out-sequences are digraphic False if not.
392
+
393
+ Examples
394
+ --------
395
+ >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
396
+ >>> in_seq = (d for n, d in G.in_degree())
397
+ >>> out_seq = (d for n, d in G.out_degree())
398
+ >>> nx.is_digraphical(in_seq, out_seq)
399
+ True
400
+
401
+ To test a non-digraphical scenario:
402
+ >>> in_seq_list = [d for n, d in G.in_degree()]
403
+ >>> in_seq_list[-1] += 1
404
+ >>> nx.is_digraphical(in_seq_list, out_seq)
405
+ False
406
+
407
+ Notes
408
+ -----
409
+ This algorithm is from Kleitman and Wang [1]_.
410
+ The worst case runtime is $O(s \times \log n)$ where $s$ and $n$ are the
411
+ sum and length of the sequences respectively.
412
+
413
+ References
414
+ ----------
415
+ .. [1] D.J. Kleitman and D.L. Wang
416
+ Algorithms for Constructing Graphs and Digraphs with Given Valences
417
+ and Factors, Discrete Mathematics, 6(1), pp. 79-88 (1973)
418
+ """
419
+ try:
420
+ in_deg_sequence = nx.utils.make_list_of_ints(in_sequence)
421
+ out_deg_sequence = nx.utils.make_list_of_ints(out_sequence)
422
+ except nx.NetworkXError:
423
+ return False
424
+ # Process the sequences and form two heaps to store degree pairs with
425
+ # either zero or non-zero out degrees
426
+ sumin, sumout, nin, nout = 0, 0, len(in_deg_sequence), len(out_deg_sequence)
427
+ maxn = max(nin, nout)
428
+ maxin = 0
429
+ if maxn == 0:
430
+ return True
431
+ stubheap, zeroheap = [], []
432
+ for n in range(maxn):
433
+ in_deg, out_deg = 0, 0
434
+ if n < nout:
435
+ out_deg = out_deg_sequence[n]
436
+ if n < nin:
437
+ in_deg = in_deg_sequence[n]
438
+ if in_deg < 0 or out_deg < 0:
439
+ return False
440
+ sumin, sumout, maxin = sumin + in_deg, sumout + out_deg, max(maxin, in_deg)
441
+ if in_deg > 0:
442
+ stubheap.append((-1 * out_deg, -1 * in_deg))
443
+ elif out_deg > 0:
444
+ zeroheap.append(-1 * out_deg)
445
+ if sumin != sumout:
446
+ return False
447
+ heapq.heapify(stubheap)
448
+ heapq.heapify(zeroheap)
449
+
450
+ modstubs = [(0, 0)] * (maxin + 1)
451
+ # Successively reduce degree sequence by removing the maximum out degree
452
+ while stubheap:
453
+ # Take the first value in the sequence with non-zero in degree
454
+ (freeout, freein) = heapq.heappop(stubheap)
455
+ freein *= -1
456
+ if freein > len(stubheap) + len(zeroheap):
457
+ return False
458
+
459
+ # Attach out stubs to the nodes with the most in stubs
460
+ mslen = 0
461
+ for i in range(freein):
462
+ if zeroheap and (not stubheap or stubheap[0][0] > zeroheap[0]):
463
+ stubout = heapq.heappop(zeroheap)
464
+ stubin = 0
465
+ else:
466
+ (stubout, stubin) = heapq.heappop(stubheap)
467
+ if stubout == 0:
468
+ return False
469
+ # Check if target is now totally connected
470
+ if stubout + 1 < 0 or stubin < 0:
471
+ modstubs[mslen] = (stubout + 1, stubin)
472
+ mslen += 1
473
+
474
+ # Add back the nodes to the heap that still have available stubs
475
+ for i in range(mslen):
476
+ stub = modstubs[i]
477
+ if stub[1] < 0:
478
+ heapq.heappush(stubheap, stub)
479
+ else:
480
+ heapq.heappush(zeroheap, stub[0])
481
+ if freeout < 0:
482
+ heapq.heappush(zeroheap, freeout)
483
+ return True
wemm/lib/python3.10/site-packages/networkx/algorithms/hierarchy.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Flow Hierarchy.
3
+ """
4
+
5
+ import networkx as nx
6
+
7
+ __all__ = ["flow_hierarchy"]
8
+
9
+
10
+ @nx._dispatchable(edge_attrs="weight")
11
+ def flow_hierarchy(G, weight=None):
12
+ """Returns the flow hierarchy of a directed network.
13
+
14
+ Flow hierarchy is defined as the fraction of edges not participating
15
+ in cycles in a directed graph [1]_.
16
+
17
+ Parameters
18
+ ----------
19
+ G : DiGraph or MultiDiGraph
20
+ A directed graph
21
+
22
+ weight : string, optional (default=None)
23
+ Attribute to use for edge weights. If None the weight defaults to 1.
24
+
25
+ Returns
26
+ -------
27
+ h : float
28
+ Flow hierarchy value
29
+
30
+ Raises
31
+ ------
32
+ NetworkXError
33
+ If `G` is not a directed graph or if `G` has no edges.
34
+
35
+ Notes
36
+ -----
37
+ The algorithm described in [1]_ computes the flow hierarchy through
38
+ exponentiation of the adjacency matrix. This function implements an
39
+ alternative approach that finds strongly connected components.
40
+ An edge is in a cycle if and only if it is in a strongly connected
41
+ component, which can be found in $O(m)$ time using Tarjan's algorithm.
42
+
43
+ References
44
+ ----------
45
+ .. [1] Luo, J.; Magee, C.L. (2011),
46
+ Detecting evolving patterns of self-organizing networks by flow
47
+ hierarchy measurement, Complexity, Volume 16 Issue 6 53-61.
48
+ DOI: 10.1002/cplx.20368
49
+ http://web.mit.edu/~cmagee/www/documents/28-DetectingEvolvingPatterns_FlowHierarchy.pdf
50
+ """
51
+ # corner case: G has no edges
52
+ if nx.is_empty(G):
53
+ raise nx.NetworkXError("flow_hierarchy not applicable to empty graphs")
54
+ if not G.is_directed():
55
+ raise nx.NetworkXError("G must be a digraph in flow_hierarchy")
56
+ scc = nx.strongly_connected_components(G)
57
+ return 1 - sum(G.subgraph(c).size(weight) for c in scc) / G.size(weight)
wemm/lib/python3.10/site-packages/networkx/algorithms/hybrid.py ADDED
@@ -0,0 +1,196 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Provides functions for finding and testing for locally `(k, l)`-connected
3
+ graphs.
4
+
5
+ """
6
+
7
+ import copy
8
+
9
+ import networkx as nx
10
+
11
+ __all__ = ["kl_connected_subgraph", "is_kl_connected"]
12
+
13
+
14
+ @nx._dispatchable(returns_graph=True)
15
+ def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False):
16
+ """Returns the maximum locally `(k, l)`-connected subgraph of `G`.
17
+
18
+ A graph is locally `(k, l)`-connected if for each edge `(u, v)` in the
19
+ graph there are at least `l` edge-disjoint paths of length at most `k`
20
+ joining `u` to `v`.
21
+
22
+ Parameters
23
+ ----------
24
+ G : NetworkX graph
25
+ The graph in which to find a maximum locally `(k, l)`-connected
26
+ subgraph.
27
+
28
+ k : integer
29
+ The maximum length of paths to consider. A higher number means a looser
30
+ connectivity requirement.
31
+
32
+ l : integer
33
+ The number of edge-disjoint paths. A higher number means a stricter
34
+ connectivity requirement.
35
+
36
+ low_memory : bool
37
+ If this is True, this function uses an algorithm that uses slightly
38
+ more time but less memory.
39
+
40
+ same_as_graph : bool
41
+ If True then return a tuple of the form `(H, is_same)`,
42
+ where `H` is the maximum locally `(k, l)`-connected subgraph and
43
+ `is_same` is a Boolean representing whether `G` is locally `(k,
44
+ l)`-connected (and hence, whether `H` is simply a copy of the input
45
+ graph `G`).
46
+
47
+ Returns
48
+ -------
49
+ NetworkX graph or two-tuple
50
+ If `same_as_graph` is True, then this function returns a
51
+ two-tuple as described above. Otherwise, it returns only the maximum
52
+ locally `(k, l)`-connected subgraph.
53
+
54
+ See also
55
+ --------
56
+ is_kl_connected
57
+
58
+ References
59
+ ----------
60
+ .. [1] Chung, Fan and Linyuan Lu. "The Small World Phenomenon in Hybrid
61
+ Power Law Graphs." *Complex Networks*. Springer Berlin Heidelberg,
62
+ 2004. 89--104.
63
+
64
+ """
65
+ H = copy.deepcopy(G) # subgraph we construct by removing from G
66
+
67
+ graphOK = True
68
+ deleted_some = True # hack to start off the while loop
69
+ while deleted_some:
70
+ deleted_some = False
71
+ # We use `for edge in list(H.edges()):` instead of
72
+ # `for edge in H.edges():` because we edit the graph `H` in
73
+ # the loop. Hence using an iterator will result in
74
+ # `RuntimeError: dictionary changed size during iteration`
75
+ for edge in list(H.edges()):
76
+ (u, v) = edge
77
+ # Get copy of graph needed for this search
78
+ if low_memory:
79
+ verts = {u, v}
80
+ for i in range(k):
81
+ for w in verts.copy():
82
+ verts.update(G[w])
83
+ G2 = G.subgraph(verts).copy()
84
+ else:
85
+ G2 = copy.deepcopy(G)
86
+ ###
87
+ path = [u, v]
88
+ cnt = 0
89
+ accept = 0
90
+ while path:
91
+ cnt += 1 # Found a path
92
+ if cnt >= l:
93
+ accept = 1
94
+ break
95
+ # record edges along this graph
96
+ prev = u
97
+ for w in path:
98
+ if prev != w:
99
+ G2.remove_edge(prev, w)
100
+ prev = w
101
+ # path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1?
102
+ try:
103
+ path = nx.shortest_path(G2, u, v) # ??? should "Cutoff" be k+1?
104
+ except nx.NetworkXNoPath:
105
+ path = False
106
+ # No Other Paths
107
+ if accept == 0:
108
+ H.remove_edge(u, v)
109
+ deleted_some = True
110
+ if graphOK:
111
+ graphOK = False
112
+ # We looked through all edges and removed none of them.
113
+ # So, H is the maximal (k,l)-connected subgraph of G
114
+ if same_as_graph:
115
+ return (H, graphOK)
116
+ return H
117
+
118
+
119
+ @nx._dispatchable
120
+ def is_kl_connected(G, k, l, low_memory=False):
121
+ """Returns True if and only if `G` is locally `(k, l)`-connected.
122
+
123
+ A graph is locally `(k, l)`-connected if for each edge `(u, v)` in the
124
+ graph there are at least `l` edge-disjoint paths of length at most `k`
125
+ joining `u` to `v`.
126
+
127
+ Parameters
128
+ ----------
129
+ G : NetworkX graph
130
+ The graph to test for local `(k, l)`-connectedness.
131
+
132
+ k : integer
133
+ The maximum length of paths to consider. A higher number means a looser
134
+ connectivity requirement.
135
+
136
+ l : integer
137
+ The number of edge-disjoint paths. A higher number means a stricter
138
+ connectivity requirement.
139
+
140
+ low_memory : bool
141
+ If this is True, this function uses an algorithm that uses slightly
142
+ more time but less memory.
143
+
144
+ Returns
145
+ -------
146
+ bool
147
+ Whether the graph is locally `(k, l)`-connected subgraph.
148
+
149
+ See also
150
+ --------
151
+ kl_connected_subgraph
152
+
153
+ References
154
+ ----------
155
+ .. [1] Chung, Fan and Linyuan Lu. "The Small World Phenomenon in Hybrid
156
+ Power Law Graphs." *Complex Networks*. Springer Berlin Heidelberg,
157
+ 2004. 89--104.
158
+
159
+ """
160
+ graphOK = True
161
+ for edge in G.edges():
162
+ (u, v) = edge
163
+ # Get copy of graph needed for this search
164
+ if low_memory:
165
+ verts = {u, v}
166
+ for i in range(k):
167
+ [verts.update(G.neighbors(w)) for w in verts.copy()]
168
+ G2 = G.subgraph(verts)
169
+ else:
170
+ G2 = copy.deepcopy(G)
171
+ ###
172
+ path = [u, v]
173
+ cnt = 0
174
+ accept = 0
175
+ while path:
176
+ cnt += 1 # Found a path
177
+ if cnt >= l:
178
+ accept = 1
179
+ break
180
+ # record edges along this graph
181
+ prev = u
182
+ for w in path:
183
+ if w != prev:
184
+ G2.remove_edge(prev, w)
185
+ prev = w
186
+ # path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1?
187
+ try:
188
+ path = nx.shortest_path(G2, u, v) # ??? should "Cutoff" be k+1?
189
+ except nx.NetworkXNoPath:
190
+ path = False
191
+ # No Other Paths
192
+ if accept == 0:
193
+ graphOK = False
194
+ break
195
+ # return status
196
+ return graphOK
wemm/lib/python3.10/site-packages/networkx/algorithms/isolate.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Functions for identifying isolate (degree zero) nodes.
3
+ """
4
+
5
+ import networkx as nx
6
+
7
+ __all__ = ["is_isolate", "isolates", "number_of_isolates"]
8
+
9
+
10
+ @nx._dispatchable
11
+ def is_isolate(G, n):
12
+ """Determines whether a node is an isolate.
13
+
14
+ An *isolate* is a node with no neighbors (that is, with degree
15
+ zero). For directed graphs, this means no in-neighbors and no
16
+ out-neighbors.
17
+
18
+ Parameters
19
+ ----------
20
+ G : NetworkX graph
21
+
22
+ n : node
23
+ A node in `G`.
24
+
25
+ Returns
26
+ -------
27
+ is_isolate : bool
28
+ True if and only if `n` has no neighbors.
29
+
30
+ Examples
31
+ --------
32
+ >>> G = nx.Graph()
33
+ >>> G.add_edge(1, 2)
34
+ >>> G.add_node(3)
35
+ >>> nx.is_isolate(G, 2)
36
+ False
37
+ >>> nx.is_isolate(G, 3)
38
+ True
39
+ """
40
+ return G.degree(n) == 0
41
+
42
+
43
+ @nx._dispatchable
44
+ def isolates(G):
45
+ """Iterator over isolates in the graph.
46
+
47
+ An *isolate* is a node with no neighbors (that is, with degree
48
+ zero). For directed graphs, this means no in-neighbors and no
49
+ out-neighbors.
50
+
51
+ Parameters
52
+ ----------
53
+ G : NetworkX graph
54
+
55
+ Returns
56
+ -------
57
+ iterator
58
+ An iterator over the isolates of `G`.
59
+
60
+ Examples
61
+ --------
62
+ To get a list of all isolates of a graph, use the :class:`list`
63
+ constructor::
64
+
65
+ >>> G = nx.Graph()
66
+ >>> G.add_edge(1, 2)
67
+ >>> G.add_node(3)
68
+ >>> list(nx.isolates(G))
69
+ [3]
70
+
71
+ To remove all isolates in the graph, first create a list of the
72
+ isolates, then use :meth:`Graph.remove_nodes_from`::
73
+
74
+ >>> G.remove_nodes_from(list(nx.isolates(G)))
75
+ >>> list(G)
76
+ [1, 2]
77
+
78
+ For digraphs, isolates have zero in-degree and zero out_degre::
79
+
80
+ >>> G = nx.DiGraph([(0, 1), (1, 2)])
81
+ >>> G.add_node(3)
82
+ >>> list(nx.isolates(G))
83
+ [3]
84
+
85
+ """
86
+ return (n for n, d in G.degree() if d == 0)
87
+
88
+
89
+ @nx._dispatchable
90
+ def number_of_isolates(G):
91
+ """Returns the number of isolates in the graph.
92
+
93
+ An *isolate* is a node with no neighbors (that is, with degree
94
+ zero). For directed graphs, this means no in-neighbors and no
95
+ out-neighbors.
96
+
97
+ Parameters
98
+ ----------
99
+ G : NetworkX graph
100
+
101
+ Returns
102
+ -------
103
+ int
104
+ The number of degree zero nodes in the graph `G`.
105
+
106
+ """
107
+ return sum(1 for v in isolates(G))
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-310.pyc ADDED
Binary file (10.9 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2pp.cpython-310.pyc ADDED
Binary file (28.5 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-310.pyc ADDED
Binary file (6.47 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py ADDED
@@ -0,0 +1,1238 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ *************
3
+ VF2 Algorithm
4
+ *************
5
+
6
+ An implementation of VF2 algorithm for graph isomorphism testing.
7
+
8
+ The simplest interface to use this module is to call the
9
+ :func:`is_isomorphic <networkx.algorithms.isomorphism.is_isomorphic>`
10
+ function.
11
+
12
+ Introduction
13
+ ------------
14
+
15
+ The GraphMatcher and DiGraphMatcher are responsible for matching
16
+ graphs or directed graphs in a predetermined manner. This
17
+ usually means a check for an isomorphism, though other checks
18
+ are also possible. For example, a subgraph of one graph
19
+ can be checked for isomorphism to a second graph.
20
+
21
+ Matching is done via syntactic feasibility. It is also possible
22
+ to check for semantic feasibility. Feasibility, then, is defined
23
+ as the logical AND of the two functions.
24
+
25
+ To include a semantic check, the (Di)GraphMatcher class should be
26
+ subclassed, and the
27
+ :meth:`semantic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.semantic_feasibility>`
28
+ function should be redefined. By default, the semantic feasibility function always
29
+ returns ``True``. The effect of this is that semantics are not
30
+ considered in the matching of G1 and G2.
31
+
32
+ Examples
33
+ --------
34
+
35
+ Suppose G1 and G2 are isomorphic graphs. Verification is as follows:
36
+
37
+ >>> from networkx.algorithms import isomorphism
38
+ >>> G1 = nx.path_graph(4)
39
+ >>> G2 = nx.path_graph(4)
40
+ >>> GM = isomorphism.GraphMatcher(G1, G2)
41
+ >>> GM.is_isomorphic()
42
+ True
43
+
44
+ GM.mapping stores the isomorphism mapping from G1 to G2.
45
+
46
+ >>> GM.mapping
47
+ {0: 0, 1: 1, 2: 2, 3: 3}
48
+
49
+
50
+ Suppose G1 and G2 are isomorphic directed graphs.
51
+ Verification is as follows:
52
+
53
+ >>> G1 = nx.path_graph(4, create_using=nx.DiGraph)
54
+ >>> G2 = nx.path_graph(4, create_using=nx.DiGraph)
55
+ >>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
56
+ >>> DiGM.is_isomorphic()
57
+ True
58
+
59
+ DiGM.mapping stores the isomorphism mapping from G1 to G2.
60
+
61
+ >>> DiGM.mapping
62
+ {0: 0, 1: 1, 2: 2, 3: 3}
63
+
64
+
65
+
66
+ Subgraph Isomorphism
67
+ --------------------
68
+ Graph theory literature can be ambiguous about the meaning of the
69
+ above statement, and we seek to clarify it now.
70
+
71
+ In the VF2 literature, a mapping ``M`` is said to be a graph-subgraph
72
+ isomorphism iff ``M`` is an isomorphism between ``G2`` and a subgraph of ``G1``.
73
+ Thus, to say that ``G1`` and ``G2`` are graph-subgraph isomorphic is to say
74
+ that a subgraph of ``G1`` is isomorphic to ``G2``.
75
+
76
+ Other literature uses the phrase 'subgraph isomorphic' as in '``G1`` does
77
+ not have a subgraph isomorphic to ``G2``'. Another use is as an in adverb
78
+ for isomorphic. Thus, to say that ``G1`` and ``G2`` are subgraph isomorphic
79
+ is to say that a subgraph of ``G1`` is isomorphic to ``G2``.
80
+
81
+ Finally, the term 'subgraph' can have multiple meanings. In this
82
+ context, 'subgraph' always means a 'node-induced subgraph'. Edge-induced
83
+ subgraph isomorphisms are not directly supported, but one should be
84
+ able to perform the check by making use of
85
+ :func:`line_graph <networkx.generators.line.line_graph>`. For
86
+ subgraphs which are not induced, the term 'monomorphism' is preferred
87
+ over 'isomorphism'.
88
+
89
+ Let ``G = (N, E)`` be a graph with a set of nodes ``N`` and set of edges ``E``.
90
+
91
+ If ``G' = (N', E')`` is a subgraph, then:
92
+ ``N'`` is a subset of ``N`` and
93
+ ``E'`` is a subset of ``E``.
94
+
95
+ If ``G' = (N', E')`` is a node-induced subgraph, then:
96
+ ``N'`` is a subset of ``N`` and
97
+ ``E'`` is the subset of edges in ``E`` relating nodes in ``N'``.
98
+
99
+ If ``G' = (N', E')`` is an edge-induced subgraph, then:
100
+ ``N'`` is the subset of nodes in ``N`` related by edges in ``E'`` and
101
+ ``E'`` is a subset of ``E``.
102
+
103
+ If ``G' = (N', E')`` is a monomorphism, then:
104
+ ``N'`` is a subset of ``N`` and
105
+ ``E'`` is a subset of the set of edges in ``E`` relating nodes in ``N'``.
106
+
107
+ Note that if ``G'`` is a node-induced subgraph of ``G``, then it is always a
108
+ subgraph monomorphism of ``G``, but the opposite is not always true, as a
109
+ monomorphism can have fewer edges.
110
+
111
+ References
112
+ ----------
113
+ [1] Luigi P. Cordella, Pasquale Foggia, Carlo Sansone, Mario Vento,
114
+ "A (Sub)Graph Isomorphism Algorithm for Matching Large Graphs",
115
+ IEEE Transactions on Pattern Analysis and Machine Intelligence,
116
+ vol. 26, no. 10, pp. 1367-1372, Oct., 2004.
117
+ http://ieeexplore.ieee.org/iel5/34/29305/01323804.pdf
118
+
119
+ [2] L. P. Cordella, P. Foggia, C. Sansone, M. Vento, "An Improved
120
+ Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop
121
+ on Graph-based Representations in Pattern Recognition, Cuen,
122
+ pp. 149-159, 2001.
123
+ https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
124
+
125
+ See Also
126
+ --------
127
+ :meth:`semantic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.semantic_feasibility>`
128
+ :meth:`syntactic_feasibility <networkx.algorithms.isomorphism.GraphMatcher.syntactic_feasibility>`
129
+
130
+ Notes
131
+ -----
132
+
133
+ The implementation handles both directed and undirected graphs as well
134
+ as multigraphs.
135
+
136
+ In general, the subgraph isomorphism problem is NP-complete whereas the
137
+ graph isomorphism problem is most likely not NP-complete (although no
138
+ polynomial-time algorithm is known to exist).
139
+
140
+ """
141
+
142
+ # This work was originally coded by Christopher Ellison
143
+ # as part of the Computational Mechanics Python (CMPy) project.
144
+ # James P. Crutchfield, principal investigator.
145
+ # Complexity Sciences Center and Physics Department, UC Davis.
146
+
147
+ import sys
148
+
149
+ __all__ = ["GraphMatcher", "DiGraphMatcher"]
150
+
151
+
152
+ class GraphMatcher:
153
+ """Implementation of VF2 algorithm for matching undirected graphs.
154
+
155
+ Suitable for Graph and MultiGraph instances.
156
+ """
157
+
158
+ def __init__(self, G1, G2):
159
+ """Initialize GraphMatcher.
160
+
161
+ Parameters
162
+ ----------
163
+ G1,G2: NetworkX Graph or MultiGraph instances.
164
+ The two graphs to check for isomorphism or monomorphism.
165
+
166
+ Examples
167
+ --------
168
+ To create a GraphMatcher which checks for syntactic feasibility:
169
+
170
+ >>> from networkx.algorithms import isomorphism
171
+ >>> G1 = nx.path_graph(4)
172
+ >>> G2 = nx.path_graph(4)
173
+ >>> GM = isomorphism.GraphMatcher(G1, G2)
174
+ """
175
+ self.G1 = G1
176
+ self.G2 = G2
177
+ self.G1_nodes = set(G1.nodes())
178
+ self.G2_nodes = set(G2.nodes())
179
+ self.G2_node_order = {n: i for i, n in enumerate(G2)}
180
+
181
+ # Set recursion limit.
182
+ self.old_recursion_limit = sys.getrecursionlimit()
183
+ expected_max_recursion_level = len(self.G2)
184
+ if self.old_recursion_limit < 1.5 * expected_max_recursion_level:
185
+ # Give some breathing room.
186
+ sys.setrecursionlimit(int(1.5 * expected_max_recursion_level))
187
+
188
+ # Declare that we will be searching for a graph-graph isomorphism.
189
+ self.test = "graph"
190
+
191
+ # Initialize state
192
+ self.initialize()
193
+
194
+ def reset_recursion_limit(self):
195
+ """Restores the recursion limit."""
196
+ # TODO:
197
+ # Currently, we use recursion and set the recursion level higher.
198
+ # It would be nice to restore the level, but because the
199
+ # (Di)GraphMatcher classes make use of cyclic references, garbage
200
+ # collection will never happen when we define __del__() to
201
+ # restore the recursion level. The result is a memory leak.
202
+ # So for now, we do not automatically restore the recursion level,
203
+ # and instead provide a method to do this manually. Eventually,
204
+ # we should turn this into a non-recursive implementation.
205
+ sys.setrecursionlimit(self.old_recursion_limit)
206
+
207
+ def candidate_pairs_iter(self):
208
+ """Iterator over candidate pairs of nodes in G1 and G2."""
209
+
210
+ # All computations are done using the current state!
211
+
212
+ G1_nodes = self.G1_nodes
213
+ G2_nodes = self.G2_nodes
214
+ min_key = self.G2_node_order.__getitem__
215
+
216
+ # First we compute the inout-terminal sets.
217
+ T1_inout = [node for node in self.inout_1 if node not in self.core_1]
218
+ T2_inout = [node for node in self.inout_2 if node not in self.core_2]
219
+
220
+ # If T1_inout and T2_inout are both nonempty.
221
+ # P(s) = T1_inout x {min T2_inout}
222
+ if T1_inout and T2_inout:
223
+ node_2 = min(T2_inout, key=min_key)
224
+ for node_1 in T1_inout:
225
+ yield node_1, node_2
226
+
227
+ else:
228
+ # If T1_inout and T2_inout were both empty....
229
+ # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
230
+ # if not (T1_inout or T2_inout): # as suggested by [2], incorrect
231
+ if 1: # as inferred from [1], correct
232
+ # First we determine the candidate node for G2
233
+ other_node = min(G2_nodes - set(self.core_2), key=min_key)
234
+ for node in self.G1:
235
+ if node not in self.core_1:
236
+ yield node, other_node
237
+
238
+ # For all other cases, we don't have any candidate pairs.
239
+
240
+ def initialize(self):
241
+ """Reinitializes the state of the algorithm.
242
+
243
+ This method should be redefined if using something other than GMState.
244
+ If only subclassing GraphMatcher, a redefinition is not necessary.
245
+
246
+ """
247
+
248
+ # core_1[n] contains the index of the node paired with n, which is m,
249
+ # provided n is in the mapping.
250
+ # core_2[m] contains the index of the node paired with m, which is n,
251
+ # provided m is in the mapping.
252
+ self.core_1 = {}
253
+ self.core_2 = {}
254
+
255
+ # See the paper for definitions of M_x and T_x^{y}
256
+
257
+ # inout_1[n] is non-zero if n is in M_1 or in T_1^{inout}
258
+ # inout_2[m] is non-zero if m is in M_2 or in T_2^{inout}
259
+ #
260
+ # The value stored is the depth of the SSR tree when the node became
261
+ # part of the corresponding set.
262
+ self.inout_1 = {}
263
+ self.inout_2 = {}
264
+ # Practically, these sets simply store the nodes in the subgraph.
265
+
266
+ self.state = GMState(self)
267
+
268
+ # Provide a convenient way to access the isomorphism mapping.
269
+ self.mapping = self.core_1.copy()
270
+
271
+ def is_isomorphic(self):
272
+ """Returns True if G1 and G2 are isomorphic graphs."""
273
+
274
+ # Let's do two very quick checks!
275
+ # QUESTION: Should we call faster_graph_could_be_isomorphic(G1,G2)?
276
+ # For now, I just copy the code.
277
+
278
+ # Check global properties
279
+ if self.G1.order() != self.G2.order():
280
+ return False
281
+
282
+ # Check local properties
283
+ d1 = sorted(d for n, d in self.G1.degree())
284
+ d2 = sorted(d for n, d in self.G2.degree())
285
+ if d1 != d2:
286
+ return False
287
+
288
+ try:
289
+ x = next(self.isomorphisms_iter())
290
+ return True
291
+ except StopIteration:
292
+ return False
293
+
294
+ def isomorphisms_iter(self):
295
+ """Generator over isomorphisms between G1 and G2."""
296
+ # Declare that we are looking for a graph-graph isomorphism.
297
+ self.test = "graph"
298
+ self.initialize()
299
+ yield from self.match()
300
+
301
+ def match(self):
302
+ """Extends the isomorphism mapping.
303
+
304
+ This function is called recursively to determine if a complete
305
+ isomorphism can be found between G1 and G2. It cleans up the class
306
+ variables after each recursive call. If an isomorphism is found,
307
+ we yield the mapping.
308
+
309
+ """
310
+ if len(self.core_1) == len(self.G2):
311
+ # Save the final mapping, otherwise garbage collection deletes it.
312
+ self.mapping = self.core_1.copy()
313
+ # The mapping is complete.
314
+ yield self.mapping
315
+ else:
316
+ for G1_node, G2_node in self.candidate_pairs_iter():
317
+ if self.syntactic_feasibility(G1_node, G2_node):
318
+ if self.semantic_feasibility(G1_node, G2_node):
319
+ # Recursive call, adding the feasible state.
320
+ newstate = self.state.__class__(self, G1_node, G2_node)
321
+ yield from self.match()
322
+
323
+ # restore data structures
324
+ newstate.restore()
325
+
326
+ def semantic_feasibility(self, G1_node, G2_node):
327
+ """Returns True if adding (G1_node, G2_node) is semantically feasible.
328
+
329
+ The semantic feasibility function should return True if it is
330
+ acceptable to add the candidate pair (G1_node, G2_node) to the current
331
+ partial isomorphism mapping. The logic should focus on semantic
332
+ information contained in the edge data or a formalized node class.
333
+
334
+ By acceptable, we mean that the subsequent mapping can still become a
335
+ complete isomorphism mapping. Thus, if adding the candidate pair
336
+ definitely makes it so that the subsequent mapping cannot become a
337
+ complete isomorphism mapping, then this function must return False.
338
+
339
+ The default semantic feasibility function always returns True. The
340
+ effect is that semantics are not considered in the matching of G1
341
+ and G2.
342
+
343
+ The semantic checks might differ based on the what type of test is
344
+ being performed. A keyword description of the test is stored in
345
+ self.test. Here is a quick description of the currently implemented
346
+ tests::
347
+
348
+ test='graph'
349
+ Indicates that the graph matcher is looking for a graph-graph
350
+ isomorphism.
351
+
352
+ test='subgraph'
353
+ Indicates that the graph matcher is looking for a subgraph-graph
354
+ isomorphism such that a subgraph of G1 is isomorphic to G2.
355
+
356
+ test='mono'
357
+ Indicates that the graph matcher is looking for a subgraph-graph
358
+ monomorphism such that a subgraph of G1 is monomorphic to G2.
359
+
360
+ Any subclass which redefines semantic_feasibility() must maintain
361
+ the above form to keep the match() method functional. Implementations
362
+ should consider multigraphs.
363
+ """
364
+ return True
365
+
366
+ def subgraph_is_isomorphic(self):
367
+ """Returns `True` if a subgraph of ``G1`` is isomorphic to ``G2``.
368
+
369
+ Examples
370
+ --------
371
+ When creating the `GraphMatcher`, the order of the arguments is important
372
+
373
+ >>> G = nx.Graph([("A", "B"), ("B", "C"), ("A", "C")])
374
+ >>> H = nx.Graph([(0, 1), (1, 2), (0, 2), (1, 3), (0, 4)])
375
+
376
+ Check whether a subgraph of G is isomorphic to H:
377
+
378
+ >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
379
+ >>> isomatcher.subgraph_is_isomorphic()
380
+ False
381
+
382
+ Check whether a subgraph of H is isomorphic to G:
383
+
384
+ >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
385
+ >>> isomatcher.subgraph_is_isomorphic()
386
+ True
387
+ """
388
+ try:
389
+ x = next(self.subgraph_isomorphisms_iter())
390
+ return True
391
+ except StopIteration:
392
+ return False
393
+
394
+ def subgraph_is_monomorphic(self):
395
+ """Returns `True` if a subgraph of ``G1`` is monomorphic to ``G2``.
396
+
397
+ Examples
398
+ --------
399
+ When creating the `GraphMatcher`, the order of the arguments is important.
400
+
401
+ >>> G = nx.Graph([("A", "B"), ("B", "C")])
402
+ >>> H = nx.Graph([(0, 1), (1, 2), (0, 2)])
403
+
404
+ Check whether a subgraph of G is monomorphic to H:
405
+
406
+ >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
407
+ >>> isomatcher.subgraph_is_monomorphic()
408
+ False
409
+
410
+ Check whether a subgraph of H is isomorphic to G:
411
+
412
+ >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
413
+ >>> isomatcher.subgraph_is_monomorphic()
414
+ True
415
+ """
416
+ try:
417
+ x = next(self.subgraph_monomorphisms_iter())
418
+ return True
419
+ except StopIteration:
420
+ return False
421
+
422
+ def subgraph_isomorphisms_iter(self):
423
+ """Generator over isomorphisms between a subgraph of ``G1`` and ``G2``.
424
+
425
+ Examples
426
+ --------
427
+ When creating the `GraphMatcher`, the order of the arguments is important
428
+
429
+ >>> G = nx.Graph([("A", "B"), ("B", "C"), ("A", "C")])
430
+ >>> H = nx.Graph([(0, 1), (1, 2), (0, 2), (1, 3), (0, 4)])
431
+
432
+ Yield isomorphic mappings between ``H`` and subgraphs of ``G``:
433
+
434
+ >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
435
+ >>> list(isomatcher.subgraph_isomorphisms_iter())
436
+ []
437
+
438
+ Yield isomorphic mappings between ``G`` and subgraphs of ``H``:
439
+
440
+ >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
441
+ >>> next(isomatcher.subgraph_isomorphisms_iter())
442
+ {0: 'A', 1: 'B', 2: 'C'}
443
+
444
+ """
445
+ # Declare that we are looking for graph-subgraph isomorphism.
446
+ self.test = "subgraph"
447
+ self.initialize()
448
+ yield from self.match()
449
+
450
+ def subgraph_monomorphisms_iter(self):
451
+ """Generator over monomorphisms between a subgraph of ``G1`` and ``G2``.
452
+
453
+ Examples
454
+ --------
455
+ When creating the `GraphMatcher`, the order of the arguments is important.
456
+
457
+ >>> G = nx.Graph([("A", "B"), ("B", "C")])
458
+ >>> H = nx.Graph([(0, 1), (1, 2), (0, 2)])
459
+
460
+ Yield monomorphic mappings between ``H`` and subgraphs of ``G``:
461
+
462
+ >>> isomatcher = nx.isomorphism.GraphMatcher(G, H)
463
+ >>> list(isomatcher.subgraph_monomorphisms_iter())
464
+ []
465
+
466
+ Yield monomorphic mappings between ``G`` and subgraphs of ``H``:
467
+
468
+ >>> isomatcher = nx.isomorphism.GraphMatcher(H, G)
469
+ >>> next(isomatcher.subgraph_monomorphisms_iter())
470
+ {0: 'A', 1: 'B', 2: 'C'}
471
+ """
472
+ # Declare that we are looking for graph-subgraph monomorphism.
473
+ self.test = "mono"
474
+ self.initialize()
475
+ yield from self.match()
476
+
477
+ def syntactic_feasibility(self, G1_node, G2_node):
478
+ """Returns True if adding (G1_node, G2_node) is syntactically feasible.
479
+
480
+ This function returns True if it is adding the candidate pair
481
+ to the current partial isomorphism/monomorphism mapping is allowable.
482
+ The addition is allowable if the inclusion of the candidate pair does
483
+ not make it impossible for an isomorphism/monomorphism to be found.
484
+ """
485
+
486
+ # The VF2 algorithm was designed to work with graphs having, at most,
487
+ # one edge connecting any two nodes. This is not the case when
488
+ # dealing with an MultiGraphs.
489
+ #
490
+ # Basically, when we test the look-ahead rules R_neighbor, we will
491
+ # make sure that the number of edges are checked. We also add
492
+ # a R_self check to verify that the number of selfloops is acceptable.
493
+ #
494
+ # Users might be comparing Graph instances with MultiGraph instances.
495
+ # So the generic GraphMatcher class must work with MultiGraphs.
496
+ # Care must be taken since the value in the innermost dictionary is a
497
+ # singlet for Graph instances. For MultiGraphs, the value in the
498
+ # innermost dictionary is a list.
499
+
500
+ ###
501
+ # Test at each step to get a return value as soon as possible.
502
+ ###
503
+
504
+ # Look ahead 0
505
+
506
+ # R_self
507
+
508
+ # The number of selfloops for G1_node must equal the number of
509
+ # self-loops for G2_node. Without this check, we would fail on
510
+ # R_neighbor at the next recursion level. But it is good to prune the
511
+ # search tree now.
512
+
513
+ if self.test == "mono":
514
+ if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
515
+ G2_node, G2_node
516
+ ):
517
+ return False
518
+ else:
519
+ if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
520
+ G2_node, G2_node
521
+ ):
522
+ return False
523
+
524
+ # R_neighbor
525
+
526
+ # For each neighbor n' of n in the partial mapping, the corresponding
527
+ # node m' is a neighbor of m, and vice versa. Also, the number of
528
+ # edges must be equal.
529
+ if self.test != "mono":
530
+ for neighbor in self.G1[G1_node]:
531
+ if neighbor in self.core_1:
532
+ if self.core_1[neighbor] not in self.G2[G2_node]:
533
+ return False
534
+ elif self.G1.number_of_edges(
535
+ neighbor, G1_node
536
+ ) != self.G2.number_of_edges(self.core_1[neighbor], G2_node):
537
+ return False
538
+
539
+ for neighbor in self.G2[G2_node]:
540
+ if neighbor in self.core_2:
541
+ if self.core_2[neighbor] not in self.G1[G1_node]:
542
+ return False
543
+ elif self.test == "mono":
544
+ if self.G1.number_of_edges(
545
+ self.core_2[neighbor], G1_node
546
+ ) < self.G2.number_of_edges(neighbor, G2_node):
547
+ return False
548
+ else:
549
+ if self.G1.number_of_edges(
550
+ self.core_2[neighbor], G1_node
551
+ ) != self.G2.number_of_edges(neighbor, G2_node):
552
+ return False
553
+
554
+ if self.test != "mono":
555
+ # Look ahead 1
556
+
557
+ # R_terminout
558
+ # The number of neighbors of n in T_1^{inout} is equal to the
559
+ # number of neighbors of m that are in T_2^{inout}, and vice versa.
560
+ num1 = 0
561
+ for neighbor in self.G1[G1_node]:
562
+ if (neighbor in self.inout_1) and (neighbor not in self.core_1):
563
+ num1 += 1
564
+ num2 = 0
565
+ for neighbor in self.G2[G2_node]:
566
+ if (neighbor in self.inout_2) and (neighbor not in self.core_2):
567
+ num2 += 1
568
+ if self.test == "graph":
569
+ if num1 != num2:
570
+ return False
571
+ else: # self.test == 'subgraph'
572
+ if not (num1 >= num2):
573
+ return False
574
+
575
+ # Look ahead 2
576
+
577
+ # R_new
578
+
579
+ # The number of neighbors of n that are neither in the core_1 nor
580
+ # T_1^{inout} is equal to the number of neighbors of m
581
+ # that are neither in core_2 nor T_2^{inout}.
582
+ num1 = 0
583
+ for neighbor in self.G1[G1_node]:
584
+ if neighbor not in self.inout_1:
585
+ num1 += 1
586
+ num2 = 0
587
+ for neighbor in self.G2[G2_node]:
588
+ if neighbor not in self.inout_2:
589
+ num2 += 1
590
+ if self.test == "graph":
591
+ if num1 != num2:
592
+ return False
593
+ else: # self.test == 'subgraph'
594
+ if not (num1 >= num2):
595
+ return False
596
+
597
+ # Otherwise, this node pair is syntactically feasible!
598
+ return True
599
+
600
+
601
+ class DiGraphMatcher(GraphMatcher):
602
+ """Implementation of VF2 algorithm for matching directed graphs.
603
+
604
+ Suitable for DiGraph and MultiDiGraph instances.
605
+ """
606
+
607
+ def __init__(self, G1, G2):
608
+ """Initialize DiGraphMatcher.
609
+
610
+ G1 and G2 should be nx.Graph or nx.MultiGraph instances.
611
+
612
+ Examples
613
+ --------
614
+ To create a GraphMatcher which checks for syntactic feasibility:
615
+
616
+ >>> from networkx.algorithms import isomorphism
617
+ >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
618
+ >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
619
+ >>> DiGM = isomorphism.DiGraphMatcher(G1, G2)
620
+ """
621
+ super().__init__(G1, G2)
622
+
623
+ def candidate_pairs_iter(self):
624
+ """Iterator over candidate pairs of nodes in G1 and G2."""
625
+
626
+ # All computations are done using the current state!
627
+
628
+ G1_nodes = self.G1_nodes
629
+ G2_nodes = self.G2_nodes
630
+ min_key = self.G2_node_order.__getitem__
631
+
632
+ # First we compute the out-terminal sets.
633
+ T1_out = [node for node in self.out_1 if node not in self.core_1]
634
+ T2_out = [node for node in self.out_2 if node not in self.core_2]
635
+
636
+ # If T1_out and T2_out are both nonempty.
637
+ # P(s) = T1_out x {min T2_out}
638
+ if T1_out and T2_out:
639
+ node_2 = min(T2_out, key=min_key)
640
+ for node_1 in T1_out:
641
+ yield node_1, node_2
642
+
643
+ # If T1_out and T2_out were both empty....
644
+ # We compute the in-terminal sets.
645
+
646
+ # elif not (T1_out or T2_out): # as suggested by [2], incorrect
647
+ else: # as suggested by [1], correct
648
+ T1_in = [node for node in self.in_1 if node not in self.core_1]
649
+ T2_in = [node for node in self.in_2 if node not in self.core_2]
650
+
651
+ # If T1_in and T2_in are both nonempty.
652
+ # P(s) = T1_out x {min T2_out}
653
+ if T1_in and T2_in:
654
+ node_2 = min(T2_in, key=min_key)
655
+ for node_1 in T1_in:
656
+ yield node_1, node_2
657
+
658
+ # If all terminal sets are empty...
659
+ # P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
660
+
661
+ # elif not (T1_in or T2_in): # as suggested by [2], incorrect
662
+ else: # as inferred from [1], correct
663
+ node_2 = min(G2_nodes - set(self.core_2), key=min_key)
664
+ for node_1 in G1_nodes:
665
+ if node_1 not in self.core_1:
666
+ yield node_1, node_2
667
+
668
+ # For all other cases, we don't have any candidate pairs.
669
+
670
+ def initialize(self):
671
+ """Reinitializes the state of the algorithm.
672
+
673
+ This method should be redefined if using something other than DiGMState.
674
+ If only subclassing GraphMatcher, a redefinition is not necessary.
675
+ """
676
+
677
+ # core_1[n] contains the index of the node paired with n, which is m,
678
+ # provided n is in the mapping.
679
+ # core_2[m] contains the index of the node paired with m, which is n,
680
+ # provided m is in the mapping.
681
+ self.core_1 = {}
682
+ self.core_2 = {}
683
+
684
+ # See the paper for definitions of M_x and T_x^{y}
685
+
686
+ # in_1[n] is non-zero if n is in M_1 or in T_1^{in}
687
+ # out_1[n] is non-zero if n is in M_1 or in T_1^{out}
688
+ #
689
+ # in_2[m] is non-zero if m is in M_2 or in T_2^{in}
690
+ # out_2[m] is non-zero if m is in M_2 or in T_2^{out}
691
+ #
692
+ # The value stored is the depth of the search tree when the node became
693
+ # part of the corresponding set.
694
+ self.in_1 = {}
695
+ self.in_2 = {}
696
+ self.out_1 = {}
697
+ self.out_2 = {}
698
+
699
+ self.state = DiGMState(self)
700
+
701
+ # Provide a convenient way to access the isomorphism mapping.
702
+ self.mapping = self.core_1.copy()
703
+
704
+ def syntactic_feasibility(self, G1_node, G2_node):
705
+ """Returns True if adding (G1_node, G2_node) is syntactically feasible.
706
+
707
+ This function returns True if it is adding the candidate pair
708
+ to the current partial isomorphism/monomorphism mapping is allowable.
709
+ The addition is allowable if the inclusion of the candidate pair does
710
+ not make it impossible for an isomorphism/monomorphism to be found.
711
+ """
712
+
713
+ # The VF2 algorithm was designed to work with graphs having, at most,
714
+ # one edge connecting any two nodes. This is not the case when
715
+ # dealing with an MultiGraphs.
716
+ #
717
+ # Basically, when we test the look-ahead rules R_pred and R_succ, we
718
+ # will make sure that the number of edges are checked. We also add
719
+ # a R_self check to verify that the number of selfloops is acceptable.
720
+
721
+ # Users might be comparing DiGraph instances with MultiDiGraph
722
+ # instances. So the generic DiGraphMatcher class must work with
723
+ # MultiDiGraphs. Care must be taken since the value in the innermost
724
+ # dictionary is a singlet for DiGraph instances. For MultiDiGraphs,
725
+ # the value in the innermost dictionary is a list.
726
+
727
+ ###
728
+ # Test at each step to get a return value as soon as possible.
729
+ ###
730
+
731
+ # Look ahead 0
732
+
733
+ # R_self
734
+
735
+ # The number of selfloops for G1_node must equal the number of
736
+ # self-loops for G2_node. Without this check, we would fail on R_pred
737
+ # at the next recursion level. This should prune the tree even further.
738
+ if self.test == "mono":
739
+ if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges(
740
+ G2_node, G2_node
741
+ ):
742
+ return False
743
+ else:
744
+ if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges(
745
+ G2_node, G2_node
746
+ ):
747
+ return False
748
+
749
+ # R_pred
750
+
751
+ # For each predecessor n' of n in the partial mapping, the
752
+ # corresponding node m' is a predecessor of m, and vice versa. Also,
753
+ # the number of edges must be equal
754
+ if self.test != "mono":
755
+ for predecessor in self.G1.pred[G1_node]:
756
+ if predecessor in self.core_1:
757
+ if self.core_1[predecessor] not in self.G2.pred[G2_node]:
758
+ return False
759
+ elif self.G1.number_of_edges(
760
+ predecessor, G1_node
761
+ ) != self.G2.number_of_edges(self.core_1[predecessor], G2_node):
762
+ return False
763
+
764
+ for predecessor in self.G2.pred[G2_node]:
765
+ if predecessor in self.core_2:
766
+ if self.core_2[predecessor] not in self.G1.pred[G1_node]:
767
+ return False
768
+ elif self.test == "mono":
769
+ if self.G1.number_of_edges(
770
+ self.core_2[predecessor], G1_node
771
+ ) < self.G2.number_of_edges(predecessor, G2_node):
772
+ return False
773
+ else:
774
+ if self.G1.number_of_edges(
775
+ self.core_2[predecessor], G1_node
776
+ ) != self.G2.number_of_edges(predecessor, G2_node):
777
+ return False
778
+
779
+ # R_succ
780
+
781
+ # For each successor n' of n in the partial mapping, the corresponding
782
+ # node m' is a successor of m, and vice versa. Also, the number of
783
+ # edges must be equal.
784
+ if self.test != "mono":
785
+ for successor in self.G1[G1_node]:
786
+ if successor in self.core_1:
787
+ if self.core_1[successor] not in self.G2[G2_node]:
788
+ return False
789
+ elif self.G1.number_of_edges(
790
+ G1_node, successor
791
+ ) != self.G2.number_of_edges(G2_node, self.core_1[successor]):
792
+ return False
793
+
794
+ for successor in self.G2[G2_node]:
795
+ if successor in self.core_2:
796
+ if self.core_2[successor] not in self.G1[G1_node]:
797
+ return False
798
+ elif self.test == "mono":
799
+ if self.G1.number_of_edges(
800
+ G1_node, self.core_2[successor]
801
+ ) < self.G2.number_of_edges(G2_node, successor):
802
+ return False
803
+ else:
804
+ if self.G1.number_of_edges(
805
+ G1_node, self.core_2[successor]
806
+ ) != self.G2.number_of_edges(G2_node, successor):
807
+ return False
808
+
809
+ if self.test != "mono":
810
+ # Look ahead 1
811
+
812
+ # R_termin
813
+ # The number of predecessors of n that are in T_1^{in} is equal to the
814
+ # number of predecessors of m that are in T_2^{in}.
815
+ num1 = 0
816
+ for predecessor in self.G1.pred[G1_node]:
817
+ if (predecessor in self.in_1) and (predecessor not in self.core_1):
818
+ num1 += 1
819
+ num2 = 0
820
+ for predecessor in self.G2.pred[G2_node]:
821
+ if (predecessor in self.in_2) and (predecessor not in self.core_2):
822
+ num2 += 1
823
+ if self.test == "graph":
824
+ if num1 != num2:
825
+ return False
826
+ else: # self.test == 'subgraph'
827
+ if not (num1 >= num2):
828
+ return False
829
+
830
+ # The number of successors of n that are in T_1^{in} is equal to the
831
+ # number of successors of m that are in T_2^{in}.
832
+ num1 = 0
833
+ for successor in self.G1[G1_node]:
834
+ if (successor in self.in_1) and (successor not in self.core_1):
835
+ num1 += 1
836
+ num2 = 0
837
+ for successor in self.G2[G2_node]:
838
+ if (successor in self.in_2) and (successor not in self.core_2):
839
+ num2 += 1
840
+ if self.test == "graph":
841
+ if num1 != num2:
842
+ return False
843
+ else: # self.test == 'subgraph'
844
+ if not (num1 >= num2):
845
+ return False
846
+
847
+ # R_termout
848
+
849
+ # The number of predecessors of n that are in T_1^{out} is equal to the
850
+ # number of predecessors of m that are in T_2^{out}.
851
+ num1 = 0
852
+ for predecessor in self.G1.pred[G1_node]:
853
+ if (predecessor in self.out_1) and (predecessor not in self.core_1):
854
+ num1 += 1
855
+ num2 = 0
856
+ for predecessor in self.G2.pred[G2_node]:
857
+ if (predecessor in self.out_2) and (predecessor not in self.core_2):
858
+ num2 += 1
859
+ if self.test == "graph":
860
+ if num1 != num2:
861
+ return False
862
+ else: # self.test == 'subgraph'
863
+ if not (num1 >= num2):
864
+ return False
865
+
866
+ # The number of successors of n that are in T_1^{out} is equal to the
867
+ # number of successors of m that are in T_2^{out}.
868
+ num1 = 0
869
+ for successor in self.G1[G1_node]:
870
+ if (successor in self.out_1) and (successor not in self.core_1):
871
+ num1 += 1
872
+ num2 = 0
873
+ for successor in self.G2[G2_node]:
874
+ if (successor in self.out_2) and (successor not in self.core_2):
875
+ num2 += 1
876
+ if self.test == "graph":
877
+ if num1 != num2:
878
+ return False
879
+ else: # self.test == 'subgraph'
880
+ if not (num1 >= num2):
881
+ return False
882
+
883
+ # Look ahead 2
884
+
885
+ # R_new
886
+
887
+ # The number of predecessors of n that are neither in the core_1 nor
888
+ # T_1^{in} nor T_1^{out} is equal to the number of predecessors of m
889
+ # that are neither in core_2 nor T_2^{in} nor T_2^{out}.
890
+ num1 = 0
891
+ for predecessor in self.G1.pred[G1_node]:
892
+ if (predecessor not in self.in_1) and (predecessor not in self.out_1):
893
+ num1 += 1
894
+ num2 = 0
895
+ for predecessor in self.G2.pred[G2_node]:
896
+ if (predecessor not in self.in_2) and (predecessor not in self.out_2):
897
+ num2 += 1
898
+ if self.test == "graph":
899
+ if num1 != num2:
900
+ return False
901
+ else: # self.test == 'subgraph'
902
+ if not (num1 >= num2):
903
+ return False
904
+
905
+ # The number of successors of n that are neither in the core_1 nor
906
+ # T_1^{in} nor T_1^{out} is equal to the number of successors of m
907
+ # that are neither in core_2 nor T_2^{in} nor T_2^{out}.
908
+ num1 = 0
909
+ for successor in self.G1[G1_node]:
910
+ if (successor not in self.in_1) and (successor not in self.out_1):
911
+ num1 += 1
912
+ num2 = 0
913
+ for successor in self.G2[G2_node]:
914
+ if (successor not in self.in_2) and (successor not in self.out_2):
915
+ num2 += 1
916
+ if self.test == "graph":
917
+ if num1 != num2:
918
+ return False
919
+ else: # self.test == 'subgraph'
920
+ if not (num1 >= num2):
921
+ return False
922
+
923
+ # Otherwise, this node pair is syntactically feasible!
924
+ return True
925
+
926
+ def subgraph_is_isomorphic(self):
927
+ """Returns `True` if a subgraph of ``G1`` is isomorphic to ``G2``.
928
+
929
+ Examples
930
+ --------
931
+ When creating the `DiGraphMatcher`, the order of the arguments is important
932
+
933
+ >>> G = nx.DiGraph([("A", "B"), ("B", "A"), ("B", "C"), ("C", "B")])
934
+ >>> H = nx.DiGraph(nx.path_graph(5))
935
+
936
+ Check whether a subgraph of G is isomorphic to H:
937
+
938
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
939
+ >>> isomatcher.subgraph_is_isomorphic()
940
+ False
941
+
942
+ Check whether a subgraph of H is isomorphic to G:
943
+
944
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
945
+ >>> isomatcher.subgraph_is_isomorphic()
946
+ True
947
+ """
948
+ return super().subgraph_is_isomorphic()
949
+
950
+ def subgraph_is_monomorphic(self):
951
+ """Returns `True` if a subgraph of ``G1`` is monomorphic to ``G2``.
952
+
953
+ Examples
954
+ --------
955
+ When creating the `DiGraphMatcher`, the order of the arguments is important.
956
+
957
+ >>> G = nx.DiGraph([("A", "B"), ("C", "B"), ("D", "C")])
958
+ >>> H = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 2)])
959
+
960
+ Check whether a subgraph of G is monomorphic to H:
961
+
962
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
963
+ >>> isomatcher.subgraph_is_monomorphic()
964
+ False
965
+
966
+ Check whether a subgraph of H is isomorphic to G:
967
+
968
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
969
+ >>> isomatcher.subgraph_is_monomorphic()
970
+ True
971
+ """
972
+ return super().subgraph_is_monomorphic()
973
+
974
+ def subgraph_isomorphisms_iter(self):
975
+ """Generator over isomorphisms between a subgraph of ``G1`` and ``G2``.
976
+
977
+ Examples
978
+ --------
979
+ When creating the `DiGraphMatcher`, the order of the arguments is important
980
+
981
+ >>> G = nx.DiGraph([("B", "C"), ("C", "B"), ("C", "D"), ("D", "C")])
982
+ >>> H = nx.DiGraph(nx.path_graph(5))
983
+
984
+ Yield isomorphic mappings between ``H`` and subgraphs of ``G``:
985
+
986
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
987
+ >>> list(isomatcher.subgraph_isomorphisms_iter())
988
+ []
989
+
990
+ Yield isomorphic mappings between ``G`` and subgraphs of ``H``:
991
+
992
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
993
+ >>> next(isomatcher.subgraph_isomorphisms_iter())
994
+ {0: 'B', 1: 'C', 2: 'D'}
995
+ """
996
+ return super().subgraph_isomorphisms_iter()
997
+
998
+ def subgraph_monomorphisms_iter(self):
999
+ """Generator over monomorphisms between a subgraph of ``G1`` and ``G2``.
1000
+
1001
+ Examples
1002
+ --------
1003
+ When creating the `DiGraphMatcher`, the order of the arguments is important.
1004
+
1005
+ >>> G = nx.DiGraph([("A", "B"), ("C", "B"), ("D", "C")])
1006
+ >>> H = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 2)])
1007
+
1008
+ Yield monomorphic mappings between ``H`` and subgraphs of ``G``:
1009
+
1010
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H)
1011
+ >>> list(isomatcher.subgraph_monomorphisms_iter())
1012
+ []
1013
+
1014
+ Yield monomorphic mappings between ``G`` and subgraphs of ``H``:
1015
+
1016
+ >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G)
1017
+ >>> next(isomatcher.subgraph_monomorphisms_iter())
1018
+ {3: 'A', 2: 'B', 1: 'C', 0: 'D'}
1019
+ """
1020
+ return super().subgraph_monomorphisms_iter()
1021
+
1022
+
1023
+ class GMState:
1024
+ """Internal representation of state for the GraphMatcher class.
1025
+
1026
+ This class is used internally by the GraphMatcher class. It is used
1027
+ only to store state specific data. There will be at most G2.order() of
1028
+ these objects in memory at a time, due to the depth-first search
1029
+ strategy employed by the VF2 algorithm.
1030
+ """
1031
+
1032
+ def __init__(self, GM, G1_node=None, G2_node=None):
1033
+ """Initializes GMState object.
1034
+
1035
+ Pass in the GraphMatcher to which this GMState belongs and the
1036
+ new node pair that will be added to the GraphMatcher's current
1037
+ isomorphism mapping.
1038
+ """
1039
+ self.GM = GM
1040
+
1041
+ # Initialize the last stored node pair.
1042
+ self.G1_node = None
1043
+ self.G2_node = None
1044
+ self.depth = len(GM.core_1)
1045
+
1046
+ if G1_node is None or G2_node is None:
1047
+ # Then we reset the class variables
1048
+ GM.core_1 = {}
1049
+ GM.core_2 = {}
1050
+ GM.inout_1 = {}
1051
+ GM.inout_2 = {}
1052
+
1053
+ # Watch out! G1_node == 0 should evaluate to True.
1054
+ if G1_node is not None and G2_node is not None:
1055
+ # Add the node pair to the isomorphism mapping.
1056
+ GM.core_1[G1_node] = G2_node
1057
+ GM.core_2[G2_node] = G1_node
1058
+
1059
+ # Store the node that was added last.
1060
+ self.G1_node = G1_node
1061
+ self.G2_node = G2_node
1062
+
1063
+ # Now we must update the other two vectors.
1064
+ # We will add only if it is not in there already!
1065
+ self.depth = len(GM.core_1)
1066
+
1067
+ # First we add the new nodes...
1068
+ if G1_node not in GM.inout_1:
1069
+ GM.inout_1[G1_node] = self.depth
1070
+ if G2_node not in GM.inout_2:
1071
+ GM.inout_2[G2_node] = self.depth
1072
+
1073
+ # Now we add every other node...
1074
+
1075
+ # Updates for T_1^{inout}
1076
+ new_nodes = set()
1077
+ for node in GM.core_1:
1078
+ new_nodes.update(
1079
+ [neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1]
1080
+ )
1081
+ for node in new_nodes:
1082
+ if node not in GM.inout_1:
1083
+ GM.inout_1[node] = self.depth
1084
+
1085
+ # Updates for T_2^{inout}
1086
+ new_nodes = set()
1087
+ for node in GM.core_2:
1088
+ new_nodes.update(
1089
+ [neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2]
1090
+ )
1091
+ for node in new_nodes:
1092
+ if node not in GM.inout_2:
1093
+ GM.inout_2[node] = self.depth
1094
+
1095
+ def restore(self):
1096
+ """Deletes the GMState object and restores the class variables."""
1097
+ # First we remove the node that was added from the core vectors.
1098
+ # Watch out! G1_node == 0 should evaluate to True.
1099
+ if self.G1_node is not None and self.G2_node is not None:
1100
+ del self.GM.core_1[self.G1_node]
1101
+ del self.GM.core_2[self.G2_node]
1102
+
1103
+ # Now we revert the other two vectors.
1104
+ # Thus, we delete all entries which have this depth level.
1105
+ for vector in (self.GM.inout_1, self.GM.inout_2):
1106
+ for node in list(vector.keys()):
1107
+ if vector[node] == self.depth:
1108
+ del vector[node]
1109
+
1110
+
1111
+ class DiGMState:
1112
+ """Internal representation of state for the DiGraphMatcher class.
1113
+
1114
+ This class is used internally by the DiGraphMatcher class. It is used
1115
+ only to store state specific data. There will be at most G2.order() of
1116
+ these objects in memory at a time, due to the depth-first search
1117
+ strategy employed by the VF2 algorithm.
1118
+
1119
+ """
1120
+
1121
+ def __init__(self, GM, G1_node=None, G2_node=None):
1122
+ """Initializes DiGMState object.
1123
+
1124
+ Pass in the DiGraphMatcher to which this DiGMState belongs and the
1125
+ new node pair that will be added to the GraphMatcher's current
1126
+ isomorphism mapping.
1127
+ """
1128
+ self.GM = GM
1129
+
1130
+ # Initialize the last stored node pair.
1131
+ self.G1_node = None
1132
+ self.G2_node = None
1133
+ self.depth = len(GM.core_1)
1134
+
1135
+ if G1_node is None or G2_node is None:
1136
+ # Then we reset the class variables
1137
+ GM.core_1 = {}
1138
+ GM.core_2 = {}
1139
+ GM.in_1 = {}
1140
+ GM.in_2 = {}
1141
+ GM.out_1 = {}
1142
+ GM.out_2 = {}
1143
+
1144
+ # Watch out! G1_node == 0 should evaluate to True.
1145
+ if G1_node is not None and G2_node is not None:
1146
+ # Add the node pair to the isomorphism mapping.
1147
+ GM.core_1[G1_node] = G2_node
1148
+ GM.core_2[G2_node] = G1_node
1149
+
1150
+ # Store the node that was added last.
1151
+ self.G1_node = G1_node
1152
+ self.G2_node = G2_node
1153
+
1154
+ # Now we must update the other four vectors.
1155
+ # We will add only if it is not in there already!
1156
+ self.depth = len(GM.core_1)
1157
+
1158
+ # First we add the new nodes...
1159
+ for vector in (GM.in_1, GM.out_1):
1160
+ if G1_node not in vector:
1161
+ vector[G1_node] = self.depth
1162
+ for vector in (GM.in_2, GM.out_2):
1163
+ if G2_node not in vector:
1164
+ vector[G2_node] = self.depth
1165
+
1166
+ # Now we add every other node...
1167
+
1168
+ # Updates for T_1^{in}
1169
+ new_nodes = set()
1170
+ for node in GM.core_1:
1171
+ new_nodes.update(
1172
+ [
1173
+ predecessor
1174
+ for predecessor in GM.G1.predecessors(node)
1175
+ if predecessor not in GM.core_1
1176
+ ]
1177
+ )
1178
+ for node in new_nodes:
1179
+ if node not in GM.in_1:
1180
+ GM.in_1[node] = self.depth
1181
+
1182
+ # Updates for T_2^{in}
1183
+ new_nodes = set()
1184
+ for node in GM.core_2:
1185
+ new_nodes.update(
1186
+ [
1187
+ predecessor
1188
+ for predecessor in GM.G2.predecessors(node)
1189
+ if predecessor not in GM.core_2
1190
+ ]
1191
+ )
1192
+ for node in new_nodes:
1193
+ if node not in GM.in_2:
1194
+ GM.in_2[node] = self.depth
1195
+
1196
+ # Updates for T_1^{out}
1197
+ new_nodes = set()
1198
+ for node in GM.core_1:
1199
+ new_nodes.update(
1200
+ [
1201
+ successor
1202
+ for successor in GM.G1.successors(node)
1203
+ if successor not in GM.core_1
1204
+ ]
1205
+ )
1206
+ for node in new_nodes:
1207
+ if node not in GM.out_1:
1208
+ GM.out_1[node] = self.depth
1209
+
1210
+ # Updates for T_2^{out}
1211
+ new_nodes = set()
1212
+ for node in GM.core_2:
1213
+ new_nodes.update(
1214
+ [
1215
+ successor
1216
+ for successor in GM.G2.successors(node)
1217
+ if successor not in GM.core_2
1218
+ ]
1219
+ )
1220
+ for node in new_nodes:
1221
+ if node not in GM.out_2:
1222
+ GM.out_2[node] = self.depth
1223
+
1224
+ def restore(self):
1225
+ """Deletes the DiGMState object and restores the class variables."""
1226
+
1227
+ # First we remove the node that was added from the core vectors.
1228
+ # Watch out! G1_node == 0 should evaluate to True.
1229
+ if self.G1_node is not None and self.G2_node is not None:
1230
+ del self.GM.core_1[self.G1_node]
1231
+ del self.GM.core_2[self.G2_node]
1232
+
1233
+ # Now we revert the other four vectors.
1234
+ # Thus, we delete all entries which have this depth level.
1235
+ for vector in (self.GM.in_1, self.GM.in_2, self.GM.out_1, self.GM.out_2):
1236
+ for node in list(vector.keys()):
1237
+ if vector[node] == self.depth:
1238
+ del vector[node]
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/matchhelpers.py ADDED
@@ -0,0 +1,352 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions which help end users define customize node_match and
2
+ edge_match functions to use during isomorphism checks.
3
+ """
4
+
5
+ import math
6
+ import types
7
+ from itertools import permutations
8
+
9
+ __all__ = [
10
+ "categorical_node_match",
11
+ "categorical_edge_match",
12
+ "categorical_multiedge_match",
13
+ "numerical_node_match",
14
+ "numerical_edge_match",
15
+ "numerical_multiedge_match",
16
+ "generic_node_match",
17
+ "generic_edge_match",
18
+ "generic_multiedge_match",
19
+ ]
20
+
21
+
22
+ def copyfunc(f, name=None):
23
+ """Returns a deepcopy of a function."""
24
+ return types.FunctionType(
25
+ f.__code__, f.__globals__, name or f.__name__, f.__defaults__, f.__closure__
26
+ )
27
+
28
+
29
+ def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08):
30
+ """Returns True if x and y are sufficiently close, elementwise.
31
+
32
+ Parameters
33
+ ----------
34
+ rtol : float
35
+ The relative error tolerance.
36
+ atol : float
37
+ The absolute error tolerance.
38
+
39
+ """
40
+ # assume finite weights, see numpy.allclose() for reference
41
+ return all(math.isclose(xi, yi, rel_tol=rtol, abs_tol=atol) for xi, yi in zip(x, y))
42
+
43
+
44
+ categorical_doc = """
45
+ Returns a comparison function for a categorical node attribute.
46
+
47
+ The value(s) of the attr(s) must be hashable and comparable via the ==
48
+ operator since they are placed into a set([]) object. If the sets from
49
+ G1 and G2 are the same, then the constructed function returns True.
50
+
51
+ Parameters
52
+ ----------
53
+ attr : string | list
54
+ The categorical node attribute to compare, or a list of categorical
55
+ node attributes to compare.
56
+ default : value | list
57
+ The default value for the categorical node attribute, or a list of
58
+ default values for the categorical node attributes.
59
+
60
+ Returns
61
+ -------
62
+ match : function
63
+ The customized, categorical `node_match` function.
64
+
65
+ Examples
66
+ --------
67
+ >>> import networkx.algorithms.isomorphism as iso
68
+ >>> nm = iso.categorical_node_match("size", 1)
69
+ >>> nm = iso.categorical_node_match(["color", "size"], ["red", 2])
70
+
71
+ """
72
+
73
+
74
+ def categorical_node_match(attr, default):
75
+ if isinstance(attr, str):
76
+
77
+ def match(data1, data2):
78
+ return data1.get(attr, default) == data2.get(attr, default)
79
+
80
+ else:
81
+ attrs = list(zip(attr, default)) # Python 3
82
+
83
+ def match(data1, data2):
84
+ return all(data1.get(attr, d) == data2.get(attr, d) for attr, d in attrs)
85
+
86
+ return match
87
+
88
+
89
+ categorical_edge_match = copyfunc(categorical_node_match, "categorical_edge_match")
90
+
91
+
92
+ def categorical_multiedge_match(attr, default):
93
+ if isinstance(attr, str):
94
+
95
+ def match(datasets1, datasets2):
96
+ values1 = {data.get(attr, default) for data in datasets1.values()}
97
+ values2 = {data.get(attr, default) for data in datasets2.values()}
98
+ return values1 == values2
99
+
100
+ else:
101
+ attrs = list(zip(attr, default)) # Python 3
102
+
103
+ def match(datasets1, datasets2):
104
+ values1 = set()
105
+ for data1 in datasets1.values():
106
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
107
+ values1.add(x)
108
+ values2 = set()
109
+ for data2 in datasets2.values():
110
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
111
+ values2.add(x)
112
+ return values1 == values2
113
+
114
+ return match
115
+
116
+
117
+ # Docstrings for categorical functions.
118
+ categorical_node_match.__doc__ = categorical_doc
119
+ categorical_edge_match.__doc__ = categorical_doc.replace("node", "edge")
120
+ tmpdoc = categorical_doc.replace("node", "edge")
121
+ tmpdoc = tmpdoc.replace("categorical_edge_match", "categorical_multiedge_match")
122
+ categorical_multiedge_match.__doc__ = tmpdoc
123
+
124
+
125
+ numerical_doc = """
126
+ Returns a comparison function for a numerical node attribute.
127
+
128
+ The value(s) of the attr(s) must be numerical and sortable. If the
129
+ sorted list of values from G1 and G2 are the same within some
130
+ tolerance, then the constructed function returns True.
131
+
132
+ Parameters
133
+ ----------
134
+ attr : string | list
135
+ The numerical node attribute to compare, or a list of numerical
136
+ node attributes to compare.
137
+ default : value | list
138
+ The default value for the numerical node attribute, or a list of
139
+ default values for the numerical node attributes.
140
+ rtol : float
141
+ The relative error tolerance.
142
+ atol : float
143
+ The absolute error tolerance.
144
+
145
+ Returns
146
+ -------
147
+ match : function
148
+ The customized, numerical `node_match` function.
149
+
150
+ Examples
151
+ --------
152
+ >>> import networkx.algorithms.isomorphism as iso
153
+ >>> nm = iso.numerical_node_match("weight", 1.0)
154
+ >>> nm = iso.numerical_node_match(["weight", "linewidth"], [0.25, 0.5])
155
+
156
+ """
157
+
158
+
159
+ def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
160
+ if isinstance(attr, str):
161
+
162
+ def match(data1, data2):
163
+ return math.isclose(
164
+ data1.get(attr, default),
165
+ data2.get(attr, default),
166
+ rel_tol=rtol,
167
+ abs_tol=atol,
168
+ )
169
+
170
+ else:
171
+ attrs = list(zip(attr, default)) # Python 3
172
+
173
+ def match(data1, data2):
174
+ values1 = [data1.get(attr, d) for attr, d in attrs]
175
+ values2 = [data2.get(attr, d) for attr, d in attrs]
176
+ return allclose(values1, values2, rtol=rtol, atol=atol)
177
+
178
+ return match
179
+
180
+
181
+ numerical_edge_match = copyfunc(numerical_node_match, "numerical_edge_match")
182
+
183
+
184
+ def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
185
+ if isinstance(attr, str):
186
+
187
+ def match(datasets1, datasets2):
188
+ values1 = sorted(data.get(attr, default) for data in datasets1.values())
189
+ values2 = sorted(data.get(attr, default) for data in datasets2.values())
190
+ return allclose(values1, values2, rtol=rtol, atol=atol)
191
+
192
+ else:
193
+ attrs = list(zip(attr, default)) # Python 3
194
+
195
+ def match(datasets1, datasets2):
196
+ values1 = []
197
+ for data1 in datasets1.values():
198
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
199
+ values1.append(x)
200
+ values2 = []
201
+ for data2 in datasets2.values():
202
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
203
+ values2.append(x)
204
+ values1.sort()
205
+ values2.sort()
206
+ for xi, yi in zip(values1, values2):
207
+ if not allclose(xi, yi, rtol=rtol, atol=atol):
208
+ return False
209
+ else:
210
+ return True
211
+
212
+ return match
213
+
214
+
215
+ # Docstrings for numerical functions.
216
+ numerical_node_match.__doc__ = numerical_doc
217
+ numerical_edge_match.__doc__ = numerical_doc.replace("node", "edge")
218
+ tmpdoc = numerical_doc.replace("node", "edge")
219
+ tmpdoc = tmpdoc.replace("numerical_edge_match", "numerical_multiedge_match")
220
+ numerical_multiedge_match.__doc__ = tmpdoc
221
+
222
+
223
+ generic_doc = """
224
+ Returns a comparison function for a generic attribute.
225
+
226
+ The value(s) of the attr(s) are compared using the specified
227
+ operators. If all the attributes are equal, then the constructed
228
+ function returns True.
229
+
230
+ Parameters
231
+ ----------
232
+ attr : string | list
233
+ The node attribute to compare, or a list of node attributes
234
+ to compare.
235
+ default : value | list
236
+ The default value for the node attribute, or a list of
237
+ default values for the node attributes.
238
+ op : callable | list
239
+ The operator to use when comparing attribute values, or a list
240
+ of operators to use when comparing values for each attribute.
241
+
242
+ Returns
243
+ -------
244
+ match : function
245
+ The customized, generic `node_match` function.
246
+
247
+ Examples
248
+ --------
249
+ >>> from operator import eq
250
+ >>> from math import isclose
251
+ >>> from networkx.algorithms.isomorphism import generic_node_match
252
+ >>> nm = generic_node_match("weight", 1.0, isclose)
253
+ >>> nm = generic_node_match("color", "red", eq)
254
+ >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
255
+
256
+ """
257
+
258
+
259
+ def generic_node_match(attr, default, op):
260
+ if isinstance(attr, str):
261
+
262
+ def match(data1, data2):
263
+ return op(data1.get(attr, default), data2.get(attr, default))
264
+
265
+ else:
266
+ attrs = list(zip(attr, default, op)) # Python 3
267
+
268
+ def match(data1, data2):
269
+ for attr, d, operator in attrs:
270
+ if not operator(data1.get(attr, d), data2.get(attr, d)):
271
+ return False
272
+ else:
273
+ return True
274
+
275
+ return match
276
+
277
+
278
+ generic_edge_match = copyfunc(generic_node_match, "generic_edge_match")
279
+
280
+
281
+ def generic_multiedge_match(attr, default, op):
282
+ """Returns a comparison function for a generic attribute.
283
+
284
+ The value(s) of the attr(s) are compared using the specified
285
+ operators. If all the attributes are equal, then the constructed
286
+ function returns True. Potentially, the constructed edge_match
287
+ function can be slow since it must verify that no isomorphism
288
+ exists between the multiedges before it returns False.
289
+
290
+ Parameters
291
+ ----------
292
+ attr : string | list
293
+ The edge attribute to compare, or a list of node attributes
294
+ to compare.
295
+ default : value | list
296
+ The default value for the edge attribute, or a list of
297
+ default values for the edgeattributes.
298
+ op : callable | list
299
+ The operator to use when comparing attribute values, or a list
300
+ of operators to use when comparing values for each attribute.
301
+
302
+ Returns
303
+ -------
304
+ match : function
305
+ The customized, generic `edge_match` function.
306
+
307
+ Examples
308
+ --------
309
+ >>> from operator import eq
310
+ >>> from math import isclose
311
+ >>> from networkx.algorithms.isomorphism import generic_node_match
312
+ >>> nm = generic_node_match("weight", 1.0, isclose)
313
+ >>> nm = generic_node_match("color", "red", eq)
314
+ >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq])
315
+
316
+ """
317
+
318
+ # This is slow, but generic.
319
+ # We must test every possible isomorphism between the edges.
320
+ if isinstance(attr, str):
321
+ attr = [attr]
322
+ default = [default]
323
+ op = [op]
324
+ attrs = list(zip(attr, default)) # Python 3
325
+
326
+ def match(datasets1, datasets2):
327
+ values1 = []
328
+ for data1 in datasets1.values():
329
+ x = tuple(data1.get(attr, d) for attr, d in attrs)
330
+ values1.append(x)
331
+ values2 = []
332
+ for data2 in datasets2.values():
333
+ x = tuple(data2.get(attr, d) for attr, d in attrs)
334
+ values2.append(x)
335
+ for vals2 in permutations(values2):
336
+ for xi, yi in zip(values1, vals2):
337
+ if not all(map(lambda x, y, z: z(x, y), xi, yi, op)):
338
+ # This is not an isomorphism, go to next permutation.
339
+ break
340
+ else:
341
+ # Then we found an isomorphism.
342
+ return True
343
+ else:
344
+ # Then there are no isomorphisms between the multiedges.
345
+ return False
346
+
347
+ return match
348
+
349
+
350
+ # Docstrings for numerical functions.
351
+ generic_node_match.__doc__ = generic_doc
352
+ generic_edge_match.__doc__ = generic_doc.replace("node", "edge")
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py ADDED
@@ -0,0 +1,308 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ *****************************
3
+ Time-respecting VF2 Algorithm
4
+ *****************************
5
+
6
+ An extension of the VF2 algorithm for time-respecting graph isomorphism
7
+ testing in temporal graphs.
8
+
9
+ A temporal graph is one in which edges contain a datetime attribute,
10
+ denoting when interaction occurred between the incident nodes. A
11
+ time-respecting subgraph of a temporal graph is a subgraph such that
12
+ all interactions incident to a node occurred within a time threshold,
13
+ delta, of each other. A directed time-respecting subgraph has the
14
+ added constraint that incoming interactions to a node must precede
15
+ outgoing interactions from the same node - this enforces a sense of
16
+ directed flow.
17
+
18
+ Introduction
19
+ ------------
20
+
21
+ The TimeRespectingGraphMatcher and TimeRespectingDiGraphMatcher
22
+ extend the GraphMatcher and DiGraphMatcher classes, respectively,
23
+ to include temporal constraints on matches. This is achieved through
24
+ a semantic check, via the semantic_feasibility() function.
25
+
26
+ As well as including G1 (the graph in which to seek embeddings) and
27
+ G2 (the subgraph structure of interest), the name of the temporal
28
+ attribute on the edges and the time threshold, delta, must be supplied
29
+ as arguments to the matching constructors.
30
+
31
+ A delta of zero is the strictest temporal constraint on the match -
32
+ only embeddings in which all interactions occur at the same time will
33
+ be returned. A delta of one day will allow embeddings in which
34
+ adjacent interactions occur up to a day apart.
35
+
36
+ Examples
37
+ --------
38
+
39
+ Examples will be provided when the datetime type has been incorporated.
40
+
41
+
42
+ Temporal Subgraph Isomorphism
43
+ -----------------------------
44
+
45
+ A brief discussion of the somewhat diverse current literature will be
46
+ included here.
47
+
48
+ References
49
+ ----------
50
+
51
+ [1] Redmond, U. and Cunningham, P. Temporal subgraph isomorphism. In:
52
+ The 2013 IEEE/ACM International Conference on Advances in Social
53
+ Networks Analysis and Mining (ASONAM). Niagara Falls, Canada; 2013:
54
+ pages 1451 - 1452. [65]
55
+
56
+ For a discussion of the literature on temporal networks:
57
+
58
+ [3] P. Holme and J. Saramaki. Temporal networks. Physics Reports,
59
+ 519(3):97–125, 2012.
60
+
61
+ Notes
62
+ -----
63
+
64
+ Handles directed and undirected graphs and graphs with parallel edges.
65
+
66
+ """
67
+
68
+ import networkx as nx
69
+
70
+ from .isomorphvf2 import DiGraphMatcher, GraphMatcher
71
+
72
+ __all__ = ["TimeRespectingGraphMatcher", "TimeRespectingDiGraphMatcher"]
73
+
74
+
75
+ class TimeRespectingGraphMatcher(GraphMatcher):
76
+ def __init__(self, G1, G2, temporal_attribute_name, delta):
77
+ """Initialize TimeRespectingGraphMatcher.
78
+
79
+ G1 and G2 should be nx.Graph or nx.MultiGraph instances.
80
+
81
+ Examples
82
+ --------
83
+ To create a TimeRespectingGraphMatcher which checks for
84
+ syntactic and semantic feasibility:
85
+
86
+ >>> from networkx.algorithms import isomorphism
87
+ >>> from datetime import timedelta
88
+ >>> G1 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
89
+
90
+ >>> G2 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
91
+
92
+ >>> GM = isomorphism.TimeRespectingGraphMatcher(
93
+ ... G1, G2, "date", timedelta(days=1)
94
+ ... )
95
+ """
96
+ self.temporal_attribute_name = temporal_attribute_name
97
+ self.delta = delta
98
+ super().__init__(G1, G2)
99
+
100
+ def one_hop(self, Gx, Gx_node, neighbors):
101
+ """
102
+ Edges one hop out from a node in the mapping should be
103
+ time-respecting with respect to each other.
104
+ """
105
+ dates = []
106
+ for n in neighbors:
107
+ if isinstance(Gx, nx.Graph): # Graph G[u][v] returns the data dictionary.
108
+ dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
109
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
110
+ for edge in Gx[Gx_node][
111
+ n
112
+ ].values(): # Iterates all edges between node pair.
113
+ dates.append(edge[self.temporal_attribute_name])
114
+ if any(x is None for x in dates):
115
+ raise ValueError("Datetime not supplied for at least one edge.")
116
+ return not dates or max(dates) - min(dates) <= self.delta
117
+
118
+ def two_hop(self, Gx, core_x, Gx_node, neighbors):
119
+ """
120
+ Paths of length 2 from Gx_node should be time-respecting.
121
+ """
122
+ return all(
123
+ self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node])
124
+ for v in neighbors
125
+ )
126
+
127
+ def semantic_feasibility(self, G1_node, G2_node):
128
+ """Returns True if adding (G1_node, G2_node) is semantically
129
+ feasible.
130
+
131
+ Any subclass which redefines semantic_feasibility() must
132
+ maintain the self.tests if needed, to keep the match() method
133
+ functional. Implementations should consider multigraphs.
134
+ """
135
+ neighbors = [n for n in self.G1[G1_node] if n in self.core_1]
136
+ if not self.one_hop(self.G1, G1_node, neighbors): # Fail fast on first node.
137
+ return False
138
+ if not self.two_hop(self.G1, self.core_1, G1_node, neighbors):
139
+ return False
140
+ # Otherwise, this node is semantically feasible!
141
+ return True
142
+
143
+
144
+ class TimeRespectingDiGraphMatcher(DiGraphMatcher):
145
+ def __init__(self, G1, G2, temporal_attribute_name, delta):
146
+ """Initialize TimeRespectingDiGraphMatcher.
147
+
148
+ G1 and G2 should be nx.DiGraph or nx.MultiDiGraph instances.
149
+
150
+ Examples
151
+ --------
152
+ To create a TimeRespectingDiGraphMatcher which checks for
153
+ syntactic and semantic feasibility:
154
+
155
+ >>> from networkx.algorithms import isomorphism
156
+ >>> from datetime import timedelta
157
+ >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
158
+
159
+ >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
160
+
161
+ >>> GM = isomorphism.TimeRespectingDiGraphMatcher(
162
+ ... G1, G2, "date", timedelta(days=1)
163
+ ... )
164
+ """
165
+ self.temporal_attribute_name = temporal_attribute_name
166
+ self.delta = delta
167
+ super().__init__(G1, G2)
168
+
169
+ def get_pred_dates(self, Gx, Gx_node, core_x, pred):
170
+ """
171
+ Get the dates of edges from predecessors.
172
+ """
173
+ pred_dates = []
174
+ if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary.
175
+ for n in pred:
176
+ pred_dates.append(Gx[n][Gx_node][self.temporal_attribute_name])
177
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
178
+ for n in pred:
179
+ for edge in Gx[n][
180
+ Gx_node
181
+ ].values(): # Iterates all edge data between node pair.
182
+ pred_dates.append(edge[self.temporal_attribute_name])
183
+ return pred_dates
184
+
185
+ def get_succ_dates(self, Gx, Gx_node, core_x, succ):
186
+ """
187
+ Get the dates of edges to successors.
188
+ """
189
+ succ_dates = []
190
+ if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary.
191
+ for n in succ:
192
+ succ_dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
193
+ else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
194
+ for n in succ:
195
+ for edge in Gx[Gx_node][
196
+ n
197
+ ].values(): # Iterates all edge data between node pair.
198
+ succ_dates.append(edge[self.temporal_attribute_name])
199
+ return succ_dates
200
+
201
+ def one_hop(self, Gx, Gx_node, core_x, pred, succ):
202
+ """
203
+ The ego node.
204
+ """
205
+ pred_dates = self.get_pred_dates(Gx, Gx_node, core_x, pred)
206
+ succ_dates = self.get_succ_dates(Gx, Gx_node, core_x, succ)
207
+ return self.test_one(pred_dates, succ_dates) and self.test_two(
208
+ pred_dates, succ_dates
209
+ )
210
+
211
+ def two_hop_pred(self, Gx, Gx_node, core_x, pred):
212
+ """
213
+ The predecessors of the ego node.
214
+ """
215
+ return all(
216
+ self.one_hop(
217
+ Gx,
218
+ p,
219
+ core_x,
220
+ self.preds(Gx, core_x, p),
221
+ self.succs(Gx, core_x, p, Gx_node),
222
+ )
223
+ for p in pred
224
+ )
225
+
226
+ def two_hop_succ(self, Gx, Gx_node, core_x, succ):
227
+ """
228
+ The successors of the ego node.
229
+ """
230
+ return all(
231
+ self.one_hop(
232
+ Gx,
233
+ s,
234
+ core_x,
235
+ self.preds(Gx, core_x, s, Gx_node),
236
+ self.succs(Gx, core_x, s),
237
+ )
238
+ for s in succ
239
+ )
240
+
241
+ def preds(self, Gx, core_x, v, Gx_node=None):
242
+ pred = [n for n in Gx.predecessors(v) if n in core_x]
243
+ if Gx_node:
244
+ pred.append(Gx_node)
245
+ return pred
246
+
247
+ def succs(self, Gx, core_x, v, Gx_node=None):
248
+ succ = [n for n in Gx.successors(v) if n in core_x]
249
+ if Gx_node:
250
+ succ.append(Gx_node)
251
+ return succ
252
+
253
+ def test_one(self, pred_dates, succ_dates):
254
+ """
255
+ Edges one hop out from Gx_node in the mapping should be
256
+ time-respecting with respect to each other, regardless of
257
+ direction.
258
+ """
259
+ time_respecting = True
260
+ dates = pred_dates + succ_dates
261
+
262
+ if any(x is None for x in dates):
263
+ raise ValueError("Date or datetime not supplied for at least one edge.")
264
+
265
+ dates.sort() # Small to large.
266
+ if 0 < len(dates) and not (dates[-1] - dates[0] <= self.delta):
267
+ time_respecting = False
268
+ return time_respecting
269
+
270
+ def test_two(self, pred_dates, succ_dates):
271
+ """
272
+ Edges from a dual Gx_node in the mapping should be ordered in
273
+ a time-respecting manner.
274
+ """
275
+ time_respecting = True
276
+ pred_dates.sort()
277
+ succ_dates.sort()
278
+ # First out before last in; negative of the necessary condition for time-respect.
279
+ if (
280
+ 0 < len(succ_dates)
281
+ and 0 < len(pred_dates)
282
+ and succ_dates[0] < pred_dates[-1]
283
+ ):
284
+ time_respecting = False
285
+ return time_respecting
286
+
287
+ def semantic_feasibility(self, G1_node, G2_node):
288
+ """Returns True if adding (G1_node, G2_node) is semantically
289
+ feasible.
290
+
291
+ Any subclass which redefines semantic_feasibility() must
292
+ maintain the self.tests if needed, to keep the match() method
293
+ functional. Implementations should consider multigraphs.
294
+ """
295
+ pred, succ = (
296
+ [n for n in self.G1.predecessors(G1_node) if n in self.core_1],
297
+ [n for n in self.G1.successors(G1_node) if n in self.core_1],
298
+ )
299
+ if not self.one_hop(
300
+ self.G1, G1_node, self.core_1, pred, succ
301
+ ): # Fail fast on first node.
302
+ return False
303
+ if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred):
304
+ return False
305
+ if not self.two_hop_succ(self.G1, G1_node, self.core_1, succ):
306
+ return False
307
+ # Otherwise, this node is semantically feasible!
308
+ return True
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__init__.py ADDED
File without changes
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-310.pyc ADDED
Binary file (9.17 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-310.pyc ADDED
Binary file (2.25 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-310.pyc ADDED
Binary file (7.11 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp.cpython-310.pyc ADDED
Binary file (31.8 kB). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99 ADDED
Binary file (310 Bytes). View file
 
wemm/lib/python3.10/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py ADDED
@@ -0,0 +1,410 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Tests for VF2 isomorphism algorithm.
3
+ """
4
+
5
+ import importlib.resources
6
+ import os
7
+ import random
8
+ import struct
9
+
10
+ import networkx as nx
11
+ from networkx.algorithms import isomorphism as iso
12
+
13
+
14
+ class TestWikipediaExample:
15
+ # Source: https://en.wikipedia.org/wiki/Graph_isomorphism
16
+
17
+ # Nodes 'a', 'b', 'c' and 'd' form a column.
18
+ # Nodes 'g', 'h', 'i' and 'j' form a column.
19
+ g1edges = [
20
+ ["a", "g"],
21
+ ["a", "h"],
22
+ ["a", "i"],
23
+ ["b", "g"],
24
+ ["b", "h"],
25
+ ["b", "j"],
26
+ ["c", "g"],
27
+ ["c", "i"],
28
+ ["c", "j"],
29
+ ["d", "h"],
30
+ ["d", "i"],
31
+ ["d", "j"],
32
+ ]
33
+
34
+ # Nodes 1,2,3,4 form the clockwise corners of a large square.
35
+ # Nodes 5,6,7,8 form the clockwise corners of a small square
36
+ g2edges = [
37
+ [1, 2],
38
+ [2, 3],
39
+ [3, 4],
40
+ [4, 1],
41
+ [5, 6],
42
+ [6, 7],
43
+ [7, 8],
44
+ [8, 5],
45
+ [1, 5],
46
+ [2, 6],
47
+ [3, 7],
48
+ [4, 8],
49
+ ]
50
+
51
+ def test_graph(self):
52
+ g1 = nx.Graph()
53
+ g2 = nx.Graph()
54
+ g1.add_edges_from(self.g1edges)
55
+ g2.add_edges_from(self.g2edges)
56
+ gm = iso.GraphMatcher(g1, g2)
57
+ assert gm.is_isomorphic()
58
+ # Just testing some cases
59
+ assert gm.subgraph_is_monomorphic()
60
+
61
+ mapping = sorted(gm.mapping.items())
62
+
63
+ # this mapping is only one of the possibilities
64
+ # so this test needs to be reconsidered
65
+ # isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8),
66
+ # ('g', 2), ('h', 5), ('i', 4), ('j', 7)]
67
+ # assert_equal(mapping, isomap)
68
+
69
+ def test_subgraph(self):
70
+ g1 = nx.Graph()
71
+ g2 = nx.Graph()
72
+ g1.add_edges_from(self.g1edges)
73
+ g2.add_edges_from(self.g2edges)
74
+ g3 = g2.subgraph([1, 2, 3, 4])
75
+ gm = iso.GraphMatcher(g1, g3)
76
+ assert gm.subgraph_is_isomorphic()
77
+
78
+ def test_subgraph_mono(self):
79
+ g1 = nx.Graph()
80
+ g2 = nx.Graph()
81
+ g1.add_edges_from(self.g1edges)
82
+ g2.add_edges_from([[1, 2], [2, 3], [3, 4]])
83
+ gm = iso.GraphMatcher(g1, g2)
84
+ assert gm.subgraph_is_monomorphic()
85
+
86
+
87
+ class TestVF2GraphDB:
88
+ # https://web.archive.org/web/20090303210205/http://amalfi.dis.unina.it/graph/db/
89
+
90
+ @staticmethod
91
+ def create_graph(filename):
92
+ """Creates a Graph instance from the filename."""
93
+
94
+ # The file is assumed to be in the format from the VF2 graph database.
95
+ # Each file is composed of 16-bit numbers (unsigned short int).
96
+ # So we will want to read 2 bytes at a time.
97
+
98
+ # We can read the number as follows:
99
+ # number = struct.unpack('<H', file.read(2))
100
+ # This says, expect the data in little-endian encoding
101
+ # as an unsigned short int and unpack 2 bytes from the file.
102
+
103
+ fh = open(filename, mode="rb")
104
+
105
+ # Grab the number of nodes.
106
+ # Node numeration is 0-based, so the first node has index 0.
107
+ nodes = struct.unpack("<H", fh.read(2))[0]
108
+
109
+ graph = nx.Graph()
110
+ for from_node in range(nodes):
111
+ # Get the number of edges.
112
+ edges = struct.unpack("<H", fh.read(2))[0]
113
+ for edge in range(edges):
114
+ # Get the terminal node.
115
+ to_node = struct.unpack("<H", fh.read(2))[0]
116
+ graph.add_edge(from_node, to_node)
117
+
118
+ fh.close()
119
+ return graph
120
+
121
+ def test_graph(self):
122
+ head = importlib.resources.files("networkx.algorithms.isomorphism.tests")
123
+ g1 = self.create_graph(head / "iso_r01_s80.A99")
124
+ g2 = self.create_graph(head / "iso_r01_s80.B99")
125
+ gm = iso.GraphMatcher(g1, g2)
126
+ assert gm.is_isomorphic()
127
+
128
+ def test_subgraph(self):
129
+ # A is the subgraph
130
+ # B is the full graph
131
+ head = importlib.resources.files("networkx.algorithms.isomorphism.tests")
132
+ subgraph = self.create_graph(head / "si2_b06_m200.A99")
133
+ graph = self.create_graph(head / "si2_b06_m200.B99")
134
+ gm = iso.GraphMatcher(graph, subgraph)
135
+ assert gm.subgraph_is_isomorphic()
136
+ # Just testing some cases
137
+ assert gm.subgraph_is_monomorphic()
138
+
139
+ # There isn't a similar test implemented for subgraph monomorphism,
140
+ # feel free to create one.
141
+
142
+
143
+ class TestAtlas:
144
+ @classmethod
145
+ def setup_class(cls):
146
+ global atlas
147
+ from networkx.generators import atlas
148
+
149
+ cls.GAG = atlas.graph_atlas_g()
150
+
151
+ def test_graph_atlas(self):
152
+ # Atlas = nx.graph_atlas_g()[0:208] # 208, 6 nodes or less
153
+ Atlas = self.GAG[0:100]
154
+ alphabet = list(range(26))
155
+ for graph in Atlas:
156
+ nlist = list(graph)
157
+ labels = alphabet[: len(nlist)]
158
+ for s in range(10):
159
+ random.shuffle(labels)
160
+ d = dict(zip(nlist, labels))
161
+ relabel = nx.relabel_nodes(graph, d)
162
+ gm = iso.GraphMatcher(graph, relabel)
163
+ assert gm.is_isomorphic()
164
+
165
+
166
+ def test_multiedge():
167
+ # Simple test for multigraphs
168
+ # Need something much more rigorous
169
+ edges = [
170
+ (0, 1),
171
+ (1, 2),
172
+ (2, 3),
173
+ (3, 4),
174
+ (4, 5),
175
+ (5, 6),
176
+ (6, 7),
177
+ (7, 8),
178
+ (8, 9),
179
+ (9, 10),
180
+ (10, 11),
181
+ (10, 11),
182
+ (11, 12),
183
+ (11, 12),
184
+ (12, 13),
185
+ (12, 13),
186
+ (13, 14),
187
+ (13, 14),
188
+ (14, 15),
189
+ (14, 15),
190
+ (15, 16),
191
+ (15, 16),
192
+ (16, 17),
193
+ (16, 17),
194
+ (17, 18),
195
+ (17, 18),
196
+ (18, 19),
197
+ (18, 19),
198
+ (19, 0),
199
+ (19, 0),
200
+ ]
201
+ nodes = list(range(20))
202
+
203
+ for g1 in [nx.MultiGraph(), nx.MultiDiGraph()]:
204
+ g1.add_edges_from(edges)
205
+ for _ in range(10):
206
+ new_nodes = list(nodes)
207
+ random.shuffle(new_nodes)
208
+ d = dict(zip(nodes, new_nodes))
209
+ g2 = nx.relabel_nodes(g1, d)
210
+ if not g1.is_directed():
211
+ gm = iso.GraphMatcher(g1, g2)
212
+ else:
213
+ gm = iso.DiGraphMatcher(g1, g2)
214
+ assert gm.is_isomorphic()
215
+ # Testing if monomorphism works in multigraphs
216
+ assert gm.subgraph_is_monomorphic()
217
+
218
+
219
+ def test_selfloop():
220
+ # Simple test for graphs with selfloops
221
+ edges = [
222
+ (0, 1),
223
+ (0, 2),
224
+ (1, 2),
225
+ (1, 3),
226
+ (2, 2),
227
+ (2, 4),
228
+ (3, 1),
229
+ (3, 2),
230
+ (4, 2),
231
+ (4, 5),
232
+ (5, 4),
233
+ ]
234
+ nodes = list(range(6))
235
+
236
+ for g1 in [nx.Graph(), nx.DiGraph()]:
237
+ g1.add_edges_from(edges)
238
+ for _ in range(100):
239
+ new_nodes = list(nodes)
240
+ random.shuffle(new_nodes)
241
+ d = dict(zip(nodes, new_nodes))
242
+ g2 = nx.relabel_nodes(g1, d)
243
+ if not g1.is_directed():
244
+ gm = iso.GraphMatcher(g1, g2)
245
+ else:
246
+ gm = iso.DiGraphMatcher(g1, g2)
247
+ assert gm.is_isomorphic()
248
+
249
+
250
+ def test_selfloop_mono():
251
+ # Simple test for graphs with selfloops
252
+ edges0 = [
253
+ (0, 1),
254
+ (0, 2),
255
+ (1, 2),
256
+ (1, 3),
257
+ (2, 4),
258
+ (3, 1),
259
+ (3, 2),
260
+ (4, 2),
261
+ (4, 5),
262
+ (5, 4),
263
+ ]
264
+ edges = edges0 + [(2, 2)]
265
+ nodes = list(range(6))
266
+
267
+ for g1 in [nx.Graph(), nx.DiGraph()]:
268
+ g1.add_edges_from(edges)
269
+ for _ in range(100):
270
+ new_nodes = list(nodes)
271
+ random.shuffle(new_nodes)
272
+ d = dict(zip(nodes, new_nodes))
273
+ g2 = nx.relabel_nodes(g1, d)
274
+ g2.remove_edges_from(nx.selfloop_edges(g2))
275
+ if not g1.is_directed():
276
+ gm = iso.GraphMatcher(g2, g1)
277
+ else:
278
+ gm = iso.DiGraphMatcher(g2, g1)
279
+ assert not gm.subgraph_is_monomorphic()
280
+
281
+
282
+ def test_isomorphism_iter1():
283
+ # As described in:
284
+ # http://groups.google.com/group/networkx-discuss/browse_thread/thread/2ff65c67f5e3b99f/d674544ebea359bb?fwc=1
285
+ g1 = nx.DiGraph()
286
+ g2 = nx.DiGraph()
287
+ g3 = nx.DiGraph()
288
+ g1.add_edge("A", "B")
289
+ g1.add_edge("B", "C")
290
+ g2.add_edge("Y", "Z")
291
+ g3.add_edge("Z", "Y")
292
+ gm12 = iso.DiGraphMatcher(g1, g2)
293
+ gm13 = iso.DiGraphMatcher(g1, g3)
294
+ x = list(gm12.subgraph_isomorphisms_iter())
295
+ y = list(gm13.subgraph_isomorphisms_iter())
296
+ assert {"A": "Y", "B": "Z"} in x
297
+ assert {"B": "Y", "C": "Z"} in x
298
+ assert {"A": "Z", "B": "Y"} in y
299
+ assert {"B": "Z", "C": "Y"} in y
300
+ assert len(x) == len(y)
301
+ assert len(x) == 2
302
+
303
+
304
+ def test_monomorphism_iter1():
305
+ g1 = nx.DiGraph()
306
+ g2 = nx.DiGraph()
307
+ g1.add_edge("A", "B")
308
+ g1.add_edge("B", "C")
309
+ g1.add_edge("C", "A")
310
+ g2.add_edge("X", "Y")
311
+ g2.add_edge("Y", "Z")
312
+ gm12 = iso.DiGraphMatcher(g1, g2)
313
+ x = list(gm12.subgraph_monomorphisms_iter())
314
+ assert {"A": "X", "B": "Y", "C": "Z"} in x
315
+ assert {"A": "Y", "B": "Z", "C": "X"} in x
316
+ assert {"A": "Z", "B": "X", "C": "Y"} in x
317
+ assert len(x) == 3
318
+ gm21 = iso.DiGraphMatcher(g2, g1)
319
+ # Check if StopIteration exception returns False
320
+ assert not gm21.subgraph_is_monomorphic()
321
+
322
+
323
+ def test_isomorphism_iter2():
324
+ # Path
325
+ for L in range(2, 10):
326
+ g1 = nx.path_graph(L)
327
+ gm = iso.GraphMatcher(g1, g1)
328
+ s = len(list(gm.isomorphisms_iter()))
329
+ assert s == 2
330
+ # Cycle
331
+ for L in range(3, 10):
332
+ g1 = nx.cycle_graph(L)
333
+ gm = iso.GraphMatcher(g1, g1)
334
+ s = len(list(gm.isomorphisms_iter()))
335
+ assert s == 2 * L
336
+
337
+
338
+ def test_multiple():
339
+ # Verify that we can use the graph matcher multiple times
340
+ edges = [("A", "B"), ("B", "A"), ("B", "C")]
341
+ for g1, g2 in [(nx.Graph(), nx.Graph()), (nx.DiGraph(), nx.DiGraph())]:
342
+ g1.add_edges_from(edges)
343
+ g2.add_edges_from(edges)
344
+ g3 = nx.subgraph(g2, ["A", "B"])
345
+ if not g1.is_directed():
346
+ gmA = iso.GraphMatcher(g1, g2)
347
+ gmB = iso.GraphMatcher(g1, g3)
348
+ else:
349
+ gmA = iso.DiGraphMatcher(g1, g2)
350
+ gmB = iso.DiGraphMatcher(g1, g3)
351
+ assert gmA.is_isomorphic()
352
+ g2.remove_node("C")
353
+ if not g1.is_directed():
354
+ gmA = iso.GraphMatcher(g1, g2)
355
+ else:
356
+ gmA = iso.DiGraphMatcher(g1, g2)
357
+ assert gmA.subgraph_is_isomorphic()
358
+ assert gmB.subgraph_is_isomorphic()
359
+ assert gmA.subgraph_is_monomorphic()
360
+ assert gmB.subgraph_is_monomorphic()
361
+
362
+
363
+ # for m in [gmB.mapping, gmB.mapping]:
364
+ # assert_true(m['A'] == 'A')
365
+ # assert_true(m['B'] == 'B')
366
+ # assert_true('C' not in m)
367
+
368
+
369
+ def test_noncomparable_nodes():
370
+ node1 = object()
371
+ node2 = object()
372
+ node3 = object()
373
+
374
+ # Graph
375
+ G = nx.path_graph([node1, node2, node3])
376
+ gm = iso.GraphMatcher(G, G)
377
+ assert gm.is_isomorphic()
378
+ # Just testing some cases
379
+ assert gm.subgraph_is_monomorphic()
380
+
381
+ # DiGraph
382
+ G = nx.path_graph([node1, node2, node3], create_using=nx.DiGraph)
383
+ H = nx.path_graph([node3, node2, node1], create_using=nx.DiGraph)
384
+ dgm = iso.DiGraphMatcher(G, H)
385
+ assert dgm.is_isomorphic()
386
+ # Just testing some cases
387
+ assert gm.subgraph_is_monomorphic()
388
+
389
+
390
+ def test_monomorphism_edge_match():
391
+ G = nx.DiGraph()
392
+ G.add_node(1)
393
+ G.add_node(2)
394
+ G.add_edge(1, 2, label="A")
395
+ G.add_edge(2, 1, label="B")
396
+ G.add_edge(2, 2, label="C")
397
+
398
+ SG = nx.DiGraph()
399
+ SG.add_node(5)
400
+ SG.add_node(6)
401
+ SG.add_edge(5, 6, label="A")
402
+
403
+ gm = iso.DiGraphMatcher(G, SG, edge_match=iso.categorical_edge_match("label", None))
404
+ assert gm.subgraph_is_monomorphic()
405
+
406
+
407
+ def test_isomorphvf2pp_multidigraphs():
408
+ g = nx.MultiDiGraph({0: [1, 1, 2, 2, 3], 1: [2, 3, 3], 2: [3]})
409
+ h = nx.MultiDiGraph({0: [1, 1, 2, 2, 3], 1: [2, 3, 3], 3: [2]})
410
+ assert not (nx.vf2pp_is_isomorphic(g, h))