Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +3 -0
- llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/INSTALLER +1 -0
- llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/LICENSE +201 -0
- llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/METADATA +359 -0
- llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/RECORD +151 -0
- llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/REQUESTED +0 -0
- llava_next/lib/python3.10/site-packages/httpx-0.24.0.dist-info/RECORD +57 -0
- llava_next/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/networkx/__init__.py +53 -0
- llava_next/lib/python3.10/site-packages/networkx/algorithms/chains.py +172 -0
- llava_next/lib/python3.10/site-packages/networkx/algorithms/chordal.py +443 -0
- llava_next/lib/python3.10/site-packages/networkx/algorithms/graphical.py +483 -0
- llava_next/lib/python3.10/site-packages/networkx/algorithms/non_randomness.py +98 -0
- llava_next/lib/python3.10/site-packages/networkx/algorithms/reciprocity.py +98 -0
- llava_next/lib/python3.10/site-packages/networkx/algorithms/smetric.py +30 -0
- llava_next/lib/python3.10/site-packages/networkx/algorithms/structuralholes.py +283 -0
- llava_next/lib/python3.10/site-packages/networkx/algorithms/tournament.py +403 -0
- llava_next/lib/python3.10/site-packages/networkx/algorithms/triads.py +604 -0
- llava_next/lib/python3.10/site-packages/networkx/conftest.py +284 -0
- llava_next/lib/python3.10/site-packages/networkx/convert.py +502 -0
- llava_next/lib/python3.10/site-packages/networkx/convert_matrix.py +1317 -0
- llava_next/lib/python3.10/site-packages/networkx/exception.py +131 -0
- llava_next/lib/python3.10/site-packages/networkx/lazy_imports.py +188 -0
- llava_next/lib/python3.10/site-packages/networkx/relabel.py +285 -0
- llava_next/lib/python3.10/site-packages/pillow.libs/libXau-154567c4.so.6.0.0 +0 -0
- llava_next/lib/python3.10/site-packages/pillow.libs/libbrotlicommon-3ecfe81c.so.1 +3 -0
- llava_next/lib/python3.10/site-packages/pillow.libs/libbrotlidec-ba690955.so.1 +0 -0
- llava_next/lib/python3.10/site-packages/pillow.libs/libfreetype-be14bf51.so.6.20.1 +3 -0
- llava_next/lib/python3.10/site-packages/pillow.libs/libsharpyuv-898c0cb5.so.0.1.0 +0 -0
- llava_next/lib/python3.10/site-packages/pillow.libs/libwebpdemux-f2642bcc.so.2.0.15 +0 -0
- llava_next/lib/python3.10/site-packages/pillow.libs/libwebpmux-d524b4d5.so.3.1.0 +0 -0
- llava_next/lib/python3.10/site-packages/pillow.libs/libxcb-b8a56d01.so.1.1.0 +3 -0
- llava_next/lib/python3.10/site-packages/rpds/__init__.py +5 -0
- llava_next/lib/python3.10/site-packages/rpds/__pycache__/__init__.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/rpds/py.typed +0 -0
- llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/PKG-INFO +142 -0
- llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/SOURCES.txt +571 -0
- llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/dependency_links.txt +1 -0
- llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/entry_points.txt +51 -0
- llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/requires.txt +85 -0
- llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/top_level.txt +3 -0
- llava_next/lib/python3.10/site-packages/shellingham/__init__.py +23 -0
- llava_next/lib/python3.10/site-packages/shellingham/__pycache__/__init__.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/shellingham/posix/__init__.py +112 -0
- llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/__init__.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/_core.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/proc.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/ps.cpython-310.pyc +0 -0
- llava_next/lib/python3.10/site-packages/shellingham/posix/_core.py +3 -0
- llava_next/lib/python3.10/site-packages/shellingham/posix/proc.py +83 -0
.gitattributes
CHANGED
|
@@ -1140,3 +1140,6 @@ mgm/lib/python3.10/__pycache__/_pydecimal.cpython-310.pyc filter=lfs diff=lfs me
|
|
| 1140 |
mgm/lib/python3.10/lib-dynload/readline.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1141 |
wemm/lib/python3.10/site-packages/torch/lib/libcudnn_adv_infer.so.8 filter=lfs diff=lfs merge=lfs -text
|
| 1142 |
vlmpy310/lib/python3.10/site-packages/decord/libdecord.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
| 1140 |
mgm/lib/python3.10/lib-dynload/readline.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1141 |
wemm/lib/python3.10/site-packages/torch/lib/libcudnn_adv_infer.so.8 filter=lfs diff=lfs merge=lfs -text
|
| 1142 |
vlmpy310/lib/python3.10/site-packages/decord/libdecord.so filter=lfs diff=lfs merge=lfs -text
|
| 1143 |
+
llava_next/lib/python3.10/site-packages/pillow.libs/libxcb-b8a56d01.so.1.1.0 filter=lfs diff=lfs merge=lfs -text
|
| 1144 |
+
llava_next/lib/python3.10/site-packages/pillow.libs/libfreetype-be14bf51.so.6.20.1 filter=lfs diff=lfs merge=lfs -text
|
| 1145 |
+
llava_next/lib/python3.10/site-packages/pillow.libs/libbrotlicommon-3ecfe81c.so.1 filter=lfs diff=lfs merge=lfs -text
|
llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Apache License
|
| 2 |
+
Version 2.0, January 2004
|
| 3 |
+
http://www.apache.org/licenses/
|
| 4 |
+
|
| 5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 6 |
+
|
| 7 |
+
1. Definitions.
|
| 8 |
+
|
| 9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 11 |
+
|
| 12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 13 |
+
the copyright owner that is granting the License.
|
| 14 |
+
|
| 15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 16 |
+
other entities that control, are controlled by, or are under common
|
| 17 |
+
control with that entity. For the purposes of this definition,
|
| 18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 19 |
+
direction or management of such entity, whether by contract or
|
| 20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 22 |
+
|
| 23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 24 |
+
exercising permissions granted by this License.
|
| 25 |
+
|
| 26 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 27 |
+
including but not limited to software source code, documentation
|
| 28 |
+
source, and configuration files.
|
| 29 |
+
|
| 30 |
+
"Object" form shall mean any form resulting from mechanical
|
| 31 |
+
transformation or translation of a Source form, including but
|
| 32 |
+
not limited to compiled object code, generated documentation,
|
| 33 |
+
and conversions to other media types.
|
| 34 |
+
|
| 35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 36 |
+
Object form, made available under the License, as indicated by a
|
| 37 |
+
copyright notice that is included in or attached to the work
|
| 38 |
+
(an example is provided in the Appendix below).
|
| 39 |
+
|
| 40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 41 |
+
form, that is based on (or derived from) the Work and for which the
|
| 42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 44 |
+
of this License, Derivative Works shall not include works that remain
|
| 45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 46 |
+
the Work and Derivative Works thereof.
|
| 47 |
+
|
| 48 |
+
"Contribution" shall mean any work of authorship, including
|
| 49 |
+
the original version of the Work and any modifications or additions
|
| 50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 54 |
+
means any form of electronic, verbal, or written communication sent
|
| 55 |
+
to the Licensor or its representatives, including but not limited to
|
| 56 |
+
communication on electronic mailing lists, source code control systems,
|
| 57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 59 |
+
excluding communication that is conspicuously marked or otherwise
|
| 60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 61 |
+
|
| 62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 64 |
+
subsequently incorporated within the Work.
|
| 65 |
+
|
| 66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 71 |
+
Work and such Derivative Works in Source or Object form.
|
| 72 |
+
|
| 73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 76 |
+
(except as stated in this section) patent license to make, have made,
|
| 77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 78 |
+
where such license applies only to those patent claims licensable
|
| 79 |
+
by such Contributor that are necessarily infringed by their
|
| 80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 82 |
+
institute patent litigation against any entity (including a
|
| 83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 84 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 85 |
+
or contributory patent infringement, then any patent licenses
|
| 86 |
+
granted to You under this License for that Work shall terminate
|
| 87 |
+
as of the date such litigation is filed.
|
| 88 |
+
|
| 89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 90 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 91 |
+
modifications, and in Source or Object form, provided that You
|
| 92 |
+
meet the following conditions:
|
| 93 |
+
|
| 94 |
+
(a) You must give any other recipients of the Work or
|
| 95 |
+
Derivative Works a copy of this License; and
|
| 96 |
+
|
| 97 |
+
(b) You must cause any modified files to carry prominent notices
|
| 98 |
+
stating that You changed the files; and
|
| 99 |
+
|
| 100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 101 |
+
that You distribute, all copyright, patent, trademark, and
|
| 102 |
+
attribution notices from the Source form of the Work,
|
| 103 |
+
excluding those notices that do not pertain to any part of
|
| 104 |
+
the Derivative Works; and
|
| 105 |
+
|
| 106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 107 |
+
distribution, then any Derivative Works that You distribute must
|
| 108 |
+
include a readable copy of the attribution notices contained
|
| 109 |
+
within such NOTICE file, excluding those notices that do not
|
| 110 |
+
pertain to any part of the Derivative Works, in at least one
|
| 111 |
+
of the following places: within a NOTICE text file distributed
|
| 112 |
+
as part of the Derivative Works; within the Source form or
|
| 113 |
+
documentation, if provided along with the Derivative Works; or,
|
| 114 |
+
within a display generated by the Derivative Works, if and
|
| 115 |
+
wherever such third-party notices normally appear. The contents
|
| 116 |
+
of the NOTICE file are for informational purposes only and
|
| 117 |
+
do not modify the License. You may add Your own attribution
|
| 118 |
+
notices within Derivative Works that You distribute, alongside
|
| 119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 120 |
+
that such additional attribution notices cannot be construed
|
| 121 |
+
as modifying the License.
|
| 122 |
+
|
| 123 |
+
You may add Your own copyright statement to Your modifications and
|
| 124 |
+
may provide additional or different license terms and conditions
|
| 125 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 126 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 127 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 128 |
+
the conditions stated in this License.
|
| 129 |
+
|
| 130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 132 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 133 |
+
this License, without any additional terms or conditions.
|
| 134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 135 |
+
the terms of any separate license agreement you may have executed
|
| 136 |
+
with Licensor regarding such Contributions.
|
| 137 |
+
|
| 138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 140 |
+
except as required for reasonable and customary use in describing the
|
| 141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 142 |
+
|
| 143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 144 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 147 |
+
implied, including, without limitation, any warranties or conditions
|
| 148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 150 |
+
appropriateness of using or redistributing the Work and assume any
|
| 151 |
+
risks associated with Your exercise of permissions under this License.
|
| 152 |
+
|
| 153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 154 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 155 |
+
unless required by applicable law (such as deliberate and grossly
|
| 156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 157 |
+
liable to You for damages, including any direct, indirect, special,
|
| 158 |
+
incidental, or consequential damages of any character arising as a
|
| 159 |
+
result of this License or out of the use or inability to use the
|
| 160 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 161 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 162 |
+
other commercial damages or losses), even if such Contributor
|
| 163 |
+
has been advised of the possibility of such damages.
|
| 164 |
+
|
| 165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 168 |
+
or other liability obligations and/or rights consistent with this
|
| 169 |
+
License. However, in accepting such obligations, You may act only
|
| 170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 171 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 172 |
+
defend, and hold each Contributor harmless for any liability
|
| 173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 174 |
+
of your accepting any such warranty or additional liability.
|
| 175 |
+
|
| 176 |
+
END OF TERMS AND CONDITIONS
|
| 177 |
+
|
| 178 |
+
APPENDIX: How to apply the Apache License to your work.
|
| 179 |
+
|
| 180 |
+
To apply the Apache License to your work, attach the following
|
| 181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
| 182 |
+
replaced with your own identifying information. (Don't include
|
| 183 |
+
the brackets!) The text should be enclosed in the appropriate
|
| 184 |
+
comment syntax for the file format. We also recommend that a
|
| 185 |
+
file or class name and description of purpose be included on the
|
| 186 |
+
same "printed page" as the copyright notice for easier
|
| 187 |
+
identification within third-party archives.
|
| 188 |
+
|
| 189 |
+
Copyright [yyyy] [name of copyright owner]
|
| 190 |
+
|
| 191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 192 |
+
you may not use this file except in compliance with the License.
|
| 193 |
+
You may obtain a copy of the License at
|
| 194 |
+
|
| 195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 196 |
+
|
| 197 |
+
Unless required by applicable law or agreed to in writing, software
|
| 198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 200 |
+
See the License for the specific language governing permissions and
|
| 201 |
+
limitations under the License.
|
llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,359 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: accelerate
|
| 3 |
+
Version: 0.21.0
|
| 4 |
+
Summary: Accelerate
|
| 5 |
+
Home-page: https://github.com/huggingface/accelerate
|
| 6 |
+
Author: The HuggingFace team
|
| 7 |
+
Author-email: sylvain@huggingface.co
|
| 8 |
+
License: Apache
|
| 9 |
+
Keywords: deep learning
|
| 10 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 11 |
+
Classifier: Intended Audience :: Developers
|
| 12 |
+
Classifier: Intended Audience :: Education
|
| 13 |
+
Classifier: Intended Audience :: Science/Research
|
| 14 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
| 15 |
+
Classifier: Operating System :: OS Independent
|
| 16 |
+
Classifier: Programming Language :: Python :: 3
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 18 |
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
| 19 |
+
Requires-Python: >=3.8.0
|
| 20 |
+
Description-Content-Type: text/markdown
|
| 21 |
+
License-File: LICENSE
|
| 22 |
+
Requires-Dist: numpy (>=1.17)
|
| 23 |
+
Requires-Dist: packaging (>=20.0)
|
| 24 |
+
Requires-Dist: psutil
|
| 25 |
+
Requires-Dist: pyyaml
|
| 26 |
+
Requires-Dist: torch (>=1.10.0)
|
| 27 |
+
Provides-Extra: dev
|
| 28 |
+
Requires-Dist: black (~=23.1) ; extra == 'dev'
|
| 29 |
+
Requires-Dist: ruff (>=0.0.241) ; extra == 'dev'
|
| 30 |
+
Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'dev'
|
| 31 |
+
Requires-Dist: urllib3 (<2.0.0) ; extra == 'dev'
|
| 32 |
+
Requires-Dist: pytest ; extra == 'dev'
|
| 33 |
+
Requires-Dist: pytest-xdist ; extra == 'dev'
|
| 34 |
+
Requires-Dist: pytest-subtests ; extra == 'dev'
|
| 35 |
+
Requires-Dist: parameterized ; extra == 'dev'
|
| 36 |
+
Requires-Dist: datasets ; extra == 'dev'
|
| 37 |
+
Requires-Dist: evaluate ; extra == 'dev'
|
| 38 |
+
Requires-Dist: transformers ; extra == 'dev'
|
| 39 |
+
Requires-Dist: scipy ; extra == 'dev'
|
| 40 |
+
Requires-Dist: scikit-learn ; extra == 'dev'
|
| 41 |
+
Requires-Dist: deepspeed ; extra == 'dev'
|
| 42 |
+
Requires-Dist: tqdm ; extra == 'dev'
|
| 43 |
+
Requires-Dist: rich ; extra == 'dev'
|
| 44 |
+
Provides-Extra: docs
|
| 45 |
+
Provides-Extra: quality
|
| 46 |
+
Requires-Dist: black (~=23.1) ; extra == 'quality'
|
| 47 |
+
Requires-Dist: ruff (>=0.0.241) ; extra == 'quality'
|
| 48 |
+
Requires-Dist: hf-doc-builder (>=0.3.0) ; extra == 'quality'
|
| 49 |
+
Requires-Dist: urllib3 (<2.0.0) ; extra == 'quality'
|
| 50 |
+
Provides-Extra: rich
|
| 51 |
+
Requires-Dist: rich ; extra == 'rich'
|
| 52 |
+
Provides-Extra: sagemaker
|
| 53 |
+
Requires-Dist: sagemaker ; extra == 'sagemaker'
|
| 54 |
+
Provides-Extra: test_dev
|
| 55 |
+
Requires-Dist: datasets ; extra == 'test_dev'
|
| 56 |
+
Requires-Dist: evaluate ; extra == 'test_dev'
|
| 57 |
+
Requires-Dist: transformers ; extra == 'test_dev'
|
| 58 |
+
Requires-Dist: scipy ; extra == 'test_dev'
|
| 59 |
+
Requires-Dist: scikit-learn ; extra == 'test_dev'
|
| 60 |
+
Requires-Dist: deepspeed ; extra == 'test_dev'
|
| 61 |
+
Requires-Dist: tqdm ; extra == 'test_dev'
|
| 62 |
+
Provides-Extra: test_prod
|
| 63 |
+
Requires-Dist: pytest ; extra == 'test_prod'
|
| 64 |
+
Requires-Dist: pytest-xdist ; extra == 'test_prod'
|
| 65 |
+
Requires-Dist: pytest-subtests ; extra == 'test_prod'
|
| 66 |
+
Requires-Dist: parameterized ; extra == 'test_prod'
|
| 67 |
+
Provides-Extra: test_trackers
|
| 68 |
+
Requires-Dist: wandb ; extra == 'test_trackers'
|
| 69 |
+
Requires-Dist: comet-ml ; extra == 'test_trackers'
|
| 70 |
+
Requires-Dist: tensorboard ; extra == 'test_trackers'
|
| 71 |
+
Provides-Extra: testing
|
| 72 |
+
Requires-Dist: pytest ; extra == 'testing'
|
| 73 |
+
Requires-Dist: pytest-xdist ; extra == 'testing'
|
| 74 |
+
Requires-Dist: pytest-subtests ; extra == 'testing'
|
| 75 |
+
Requires-Dist: parameterized ; extra == 'testing'
|
| 76 |
+
Requires-Dist: datasets ; extra == 'testing'
|
| 77 |
+
Requires-Dist: evaluate ; extra == 'testing'
|
| 78 |
+
Requires-Dist: transformers ; extra == 'testing'
|
| 79 |
+
Requires-Dist: scipy ; extra == 'testing'
|
| 80 |
+
Requires-Dist: scikit-learn ; extra == 'testing'
|
| 81 |
+
Requires-Dist: deepspeed ; extra == 'testing'
|
| 82 |
+
Requires-Dist: tqdm ; extra == 'testing'
|
| 83 |
+
|
| 84 |
+
<!---
|
| 85 |
+
Copyright 2021 The HuggingFace Team. All rights reserved.
|
| 86 |
+
|
| 87 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 88 |
+
you may not use this file except in compliance with the License.
|
| 89 |
+
You may obtain a copy of the License at
|
| 90 |
+
|
| 91 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 92 |
+
|
| 93 |
+
Unless required by applicable law or agreed to in writing, software
|
| 94 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 95 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 96 |
+
See the License for the specific language governing permissions and
|
| 97 |
+
limitations under the License.
|
| 98 |
+
-->
|
| 99 |
+
|
| 100 |
+
<p align="center">
|
| 101 |
+
<br>
|
| 102 |
+
<img src="https://raw.githubusercontent.com/huggingface/accelerate/main/docs/source/imgs/accelerate_logo.png" width="400"/>
|
| 103 |
+
<br>
|
| 104 |
+
<p>
|
| 105 |
+
|
| 106 |
+
<p align="center">
|
| 107 |
+
<!-- Uncomment when CircleCI is set up
|
| 108 |
+
<a href="https://circleci.com/gh/huggingface/accelerate">
|
| 109 |
+
<img alt="Build" src="https://img.shields.io/circleci/build/github/huggingface/transformers/master">
|
| 110 |
+
</a>
|
| 111 |
+
-->
|
| 112 |
+
<a href="https://github.com/huggingface/accelerate/blob/main/LICENSE">
|
| 113 |
+
<img alt="License" src="https://img.shields.io/github/license/huggingface/accelerate.svg?color=blue">
|
| 114 |
+
</a>
|
| 115 |
+
<a href="https://huggingface.co/docs/accelerate/index.html">
|
| 116 |
+
<img alt="Documentation" src="https://img.shields.io/website/http/huggingface.co/docs/accelerate/index.html.svg?down_color=red&down_message=offline&up_message=online">
|
| 117 |
+
</a>
|
| 118 |
+
<a href="https://github.com/huggingface/accelerate/releases">
|
| 119 |
+
<img alt="GitHub release" src="https://img.shields.io/github/release/huggingface/accelerate.svg">
|
| 120 |
+
</a>
|
| 121 |
+
<a href="https://github.com/huggingface/accelerate/blob/main/CODE_OF_CONDUCT.md">
|
| 122 |
+
<img alt="Contributor Covenant" src="https://img.shields.io/badge/Contributor%20Covenant-v2.0%20adopted-ff69b4.svg">
|
| 123 |
+
</a>
|
| 124 |
+
</p>
|
| 125 |
+
|
| 126 |
+
<h3 align="center">
|
| 127 |
+
<p>Run your *raw* PyTorch training script on any kind of device
|
| 128 |
+
</h3>
|
| 129 |
+
|
| 130 |
+
<h3 align="center">
|
| 131 |
+
<a href="https://hf.co/course"><img src="https://raw.githubusercontent.com/huggingface/accelerate/main/docs/source/imgs/course_banner.png"></a>
|
| 132 |
+
</h3>
|
| 133 |
+
|
| 134 |
+
## Easy to integrate
|
| 135 |
+
|
| 136 |
+
🤗 Accelerate was created for PyTorch users who like to write the training loop of PyTorch models but are reluctant to write and maintain the boilerplate code needed to use multi-GPUs/TPU/fp16.
|
| 137 |
+
|
| 138 |
+
🤗 Accelerate abstracts exactly and only the boilerplate code related to multi-GPUs/TPU/fp16 and leaves the rest of your code unchanged.
|
| 139 |
+
|
| 140 |
+
Here is an example:
|
| 141 |
+
|
| 142 |
+
```diff
|
| 143 |
+
import torch
|
| 144 |
+
import torch.nn.functional as F
|
| 145 |
+
from datasets import load_dataset
|
| 146 |
+
+ from accelerate import Accelerator
|
| 147 |
+
|
| 148 |
+
+ accelerator = Accelerator()
|
| 149 |
+
- device = 'cpu'
|
| 150 |
+
+ device = accelerator.device
|
| 151 |
+
|
| 152 |
+
model = torch.nn.Transformer().to(device)
|
| 153 |
+
optimizer = torch.optim.Adam(model.parameters())
|
| 154 |
+
|
| 155 |
+
dataset = load_dataset('my_dataset')
|
| 156 |
+
data = torch.utils.data.DataLoader(dataset, shuffle=True)
|
| 157 |
+
|
| 158 |
+
+ model, optimizer, data = accelerator.prepare(model, optimizer, data)
|
| 159 |
+
|
| 160 |
+
model.train()
|
| 161 |
+
for epoch in range(10):
|
| 162 |
+
for source, targets in data:
|
| 163 |
+
source = source.to(device)
|
| 164 |
+
targets = targets.to(device)
|
| 165 |
+
|
| 166 |
+
optimizer.zero_grad()
|
| 167 |
+
|
| 168 |
+
output = model(source)
|
| 169 |
+
loss = F.cross_entropy(output, targets)
|
| 170 |
+
|
| 171 |
+
- loss.backward()
|
| 172 |
+
+ accelerator.backward(loss)
|
| 173 |
+
|
| 174 |
+
optimizer.step()
|
| 175 |
+
```
|
| 176 |
+
|
| 177 |
+
As you can see in this example, by adding 5-lines to any standard PyTorch training script you can now run on any kind of single or distributed node setting (single CPU, single GPU, multi-GPUs and TPUs) as well as with or without mixed precision (fp8, fp16, bf16).
|
| 178 |
+
|
| 179 |
+
In particular, the same code can then be run without modification on your local machine for debugging or your training environment.
|
| 180 |
+
|
| 181 |
+
🤗 Accelerate even handles the device placement for you (which requires a few more changes to your code, but is safer in general), so you can even simplify your training loop further:
|
| 182 |
+
|
| 183 |
+
```diff
|
| 184 |
+
import torch
|
| 185 |
+
import torch.nn.functional as F
|
| 186 |
+
from datasets import load_dataset
|
| 187 |
+
+ from accelerate import Accelerator
|
| 188 |
+
|
| 189 |
+
- device = 'cpu'
|
| 190 |
+
+ accelerator = Accelerator()
|
| 191 |
+
|
| 192 |
+
- model = torch.nn.Transformer().to(device)
|
| 193 |
+
+ model = torch.nn.Transformer()
|
| 194 |
+
optimizer = torch.optim.Adam(model.parameters())
|
| 195 |
+
|
| 196 |
+
dataset = load_dataset('my_dataset')
|
| 197 |
+
data = torch.utils.data.DataLoader(dataset, shuffle=True)
|
| 198 |
+
|
| 199 |
+
+ model, optimizer, data = accelerator.prepare(model, optimizer, data)
|
| 200 |
+
|
| 201 |
+
model.train()
|
| 202 |
+
for epoch in range(10):
|
| 203 |
+
for source, targets in data:
|
| 204 |
+
- source = source.to(device)
|
| 205 |
+
- targets = targets.to(device)
|
| 206 |
+
|
| 207 |
+
optimizer.zero_grad()
|
| 208 |
+
|
| 209 |
+
output = model(source)
|
| 210 |
+
loss = F.cross_entropy(output, targets)
|
| 211 |
+
|
| 212 |
+
- loss.backward()
|
| 213 |
+
+ accelerator.backward(loss)
|
| 214 |
+
|
| 215 |
+
optimizer.step()
|
| 216 |
+
```
|
| 217 |
+
|
| 218 |
+
Want to learn more? Check out the [documentation](https://huggingface.co/docs/accelerate) or have a look at our [examples](https://github.com/huggingface/accelerate/tree/main/examples).
|
| 219 |
+
|
| 220 |
+
## Launching script
|
| 221 |
+
|
| 222 |
+
🤗 Accelerate also provides an optional CLI tool that allows you to quickly configure and test your training environment before launching the scripts. No need to remember how to use `torch.distributed.run` or to write a specific launcher for TPU training!
|
| 223 |
+
On your machine(s) just run:
|
| 224 |
+
|
| 225 |
+
```bash
|
| 226 |
+
accelerate config
|
| 227 |
+
```
|
| 228 |
+
|
| 229 |
+
and answer the questions asked. This will generate a config file that will be used automatically to properly set the default options when doing
|
| 230 |
+
|
| 231 |
+
```bash
|
| 232 |
+
accelerate launch my_script.py --args_to_my_script
|
| 233 |
+
```
|
| 234 |
+
|
| 235 |
+
For instance, here is how you would run the GLUE example on the MRPC task (from the root of the repo):
|
| 236 |
+
|
| 237 |
+
```bash
|
| 238 |
+
accelerate launch examples/nlp_example.py
|
| 239 |
+
```
|
| 240 |
+
|
| 241 |
+
This CLI tool is **optional**, and you can still use `python my_script.py` or `python -m torchrun my_script.py` at your convenience.
|
| 242 |
+
|
| 243 |
+
You can also directly pass in the arguments you would to `torchrun` as arguments to `accelerate launch` if you wish to not run` accelerate config`.
|
| 244 |
+
|
| 245 |
+
For example, here is how to launch on two GPUs:
|
| 246 |
+
|
| 247 |
+
```bash
|
| 248 |
+
accelerate launch --multi_gpu --num_processes 2 examples/nlp_example.py
|
| 249 |
+
```
|
| 250 |
+
|
| 251 |
+
To learn more, check the CLI documentation available [here](https://huggingface.co/docs/accelerate/package_reference/cli).
|
| 252 |
+
|
| 253 |
+
## Launching multi-CPU run using MPI
|
| 254 |
+
|
| 255 |
+
🤗 Here is another way to launch multi-CPU run using MPI. You can learn how to install Open MPI on [this page](https://www.open-mpi.org/faq/?category=building#easy-build). You can use Intel MPI or MVAPICH as well.
|
| 256 |
+
Once you have MPI setup on your cluster, just run:
|
| 257 |
+
|
| 258 |
+
```bash
|
| 259 |
+
mpirun -np 2 python examples/nlp_example.py
|
| 260 |
+
```
|
| 261 |
+
|
| 262 |
+
## Launching training using DeepSpeed
|
| 263 |
+
|
| 264 |
+
🤗 Accelerate supports training on single/multiple GPUs using DeepSpeed. To use it, you don't need to change anything in your training code; you can set everything using just `accelerate config`. However, if you desire to tweak your DeepSpeed related args from your Python script, we provide you the `DeepSpeedPlugin`.
|
| 265 |
+
|
| 266 |
+
```python
|
| 267 |
+
from accelerate import Accelerator, DeepSpeedPlugin
|
| 268 |
+
|
| 269 |
+
# deepspeed needs to know your gradient accumulation steps beforehand, so don't forget to pass it
|
| 270 |
+
# Remember you still need to do gradient accumulation by yourself, just like you would have done without deepspeed
|
| 271 |
+
deepspeed_plugin = DeepSpeedPlugin(zero_stage=2, gradient_accumulation_steps=2)
|
| 272 |
+
accelerator = Accelerator(mixed_precision='fp16', deepspeed_plugin=deepspeed_plugin)
|
| 273 |
+
|
| 274 |
+
# How to save your 🤗 Transformer?
|
| 275 |
+
accelerator.wait_for_everyone()
|
| 276 |
+
unwrapped_model = accelerator.unwrap_model(model)
|
| 277 |
+
unwrapped_model.save_pretrained(save_dir, save_function=accelerator.save, state_dict=accelerator.get_state_dict(model))
|
| 278 |
+
```
|
| 279 |
+
|
| 280 |
+
Note: DeepSpeed support is experimental for now. In case you get into some problem, please open an issue.
|
| 281 |
+
|
| 282 |
+
## Launching your training from a notebook
|
| 283 |
+
|
| 284 |
+
🤗 Accelerate also provides a `notebook_launcher` function you can use in a notebook to launch a distributed training. This is especially useful for Colab or Kaggle notebooks with a TPU backend. Just define your training loop in a `training_function` then in your last cell, add:
|
| 285 |
+
|
| 286 |
+
```python
|
| 287 |
+
from accelerate import notebook_launcher
|
| 288 |
+
|
| 289 |
+
notebook_launcher(training_function)
|
| 290 |
+
```
|
| 291 |
+
|
| 292 |
+
An example can be found in [this notebook](https://github.com/huggingface/notebooks/blob/main/examples/accelerate_examples/simple_nlp_example.ipynb). [](https://colab.research.google.com/github/huggingface/notebooks/blob/main/examples/accelerate_examples/simple_nlp_example.ipynb)
|
| 293 |
+
|
| 294 |
+
## Why should I use 🤗 Accelerate?
|
| 295 |
+
|
| 296 |
+
You should use 🤗 Accelerate when you want to easily run your training scripts in a distributed environment without having to renounce full control over your training loop. This is not a high-level framework above PyTorch, just a thin wrapper so you don't have to learn a new library. In fact, the whole API of 🤗 Accelerate is in one class, the `Accelerator` object.
|
| 297 |
+
|
| 298 |
+
## Why shouldn't I use 🤗 Accelerate?
|
| 299 |
+
|
| 300 |
+
You shouldn't use 🤗 Accelerate if you don't want to write a training loop yourself. There are plenty of high-level libraries above PyTorch that will offer you that, 🤗 Accelerate is not one of them.
|
| 301 |
+
|
| 302 |
+
## Frameworks using 🤗 Accelerate
|
| 303 |
+
|
| 304 |
+
If you like the simplicity of 🤗 Accelerate but would prefer a higher-level abstraction around its capabilities, some frameworks and libraries that are built on top of 🤗 Accelerate are listed below:
|
| 305 |
+
|
| 306 |
+
* [Animus](https://github.com/Scitator/animus) is a minimalistic framework to run machine learning experiments. Animus highlights common "breakpoints" in ML experiments and provides a unified interface for them within [IExperiment](https://github.com/Scitator/animus/blob/main/animus/core.py#L76).
|
| 307 |
+
* [Catalyst](https://github.com/catalyst-team/catalyst#getting-started) is a PyTorch framework for Deep Learning Research and Development. It focuses on reproducibility, rapid experimentation, and codebase reuse so you can create something new rather than write yet another train loop. Catalyst provides a [Runner](https://catalyst-team.github.io/catalyst/api/core.html#runner) to connect all parts of the experiment: hardware backend, data transformations, model training, and inference logic.
|
| 308 |
+
* [fastai](https://github.com/fastai/fastai#installing) is a PyTorch framework for Deep Learning that simplifies training fast and accurate neural nets using modern best practices. fastai provides a [Learner](https://docs.fast.ai/learner.html#Learner) to handle the training, fine-tuning, and inference of deep learning algorithms.
|
| 309 |
+
* [Finetuner](https://github.com/jina-ai/finetuner) is a service that enables models to create higher-quality embeddings for semantic search, visual similarity search, cross-modal text<->image search, recommendation systems, clustering, duplication detection, anomaly detection, or other uses.
|
| 310 |
+
* [InvokeAI](https://github.com/invoke-ai/InvokeAI) is a creative engine for Stable Diffusion models, offering industry-leading WebUI, terminal usage support, and serves as the foundation for many commercial products.
|
| 311 |
+
* [Kornia](https://kornia.readthedocs.io/en/latest/get-started/introduction.html) is a differentiable library that allows classical computer vision to be integrated into deep learning models. Kornia provides a [Trainer](https://kornia.readthedocs.io/en/latest/x.html#kornia.x.Trainer) with the specific purpose to train and fine-tune the supported deep learning algorithms within the library.
|
| 312 |
+
* [Open Assistant](https://projects.laion.ai/Open-Assistant/) is a chat-based assistant that understands tasks, can interact with their party systems, and retrieve information dynamically to do so.
|
| 313 |
+
* [pytorch-accelerated](https://github.com/Chris-hughes10/pytorch-accelerated) is a lightweight training library, with a streamlined feature set centered around a general-purpose [Trainer](https://pytorch-accelerated.readthedocs.io/en/latest/trainer.html), that places a huge emphasis on simplicity and transparency; enabling users to understand exactly what is going on under the hood, but without having to write and maintain the boilerplate themselves!
|
| 314 |
+
* [Stable Diffusion web UI](https://github.com/AUTOMATIC1111/stable-diffusion-webui) is an open-source browser-based easy-to-use interface based on the Gradio library for Stable Diffusion.
|
| 315 |
+
* [torchkeras](https://github.com/lyhue1991/torchkeras) is a simple tool for training pytorch model just in a keras style, a dynamic and beautiful plot is provided in notebook to monitor your loss or metric.
|
| 316 |
+
* [transformers](https://github.com/huggingface/transformers) as a tool for helping train state-of-the-art machine learning models in PyTorch, Tensorflow, and JAX. (Accelerate is the backend for the PyTorch side).
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
## Installation
|
| 320 |
+
|
| 321 |
+
This repository is tested on Python 3.8+ and PyTorch 1.10.0+
|
| 322 |
+
|
| 323 |
+
You should install 🤗 Accelerate in a [virtual environment](https://docs.python.org/3/library/venv.html). If you're unfamiliar with Python virtual environments, check out the [user guide](https://packaging.python.org/guides/installing-using-pip-and-virtual-environments/).
|
| 324 |
+
|
| 325 |
+
First, create a virtual environment with the version of Python you're going to use and activate it.
|
| 326 |
+
|
| 327 |
+
Then, you will need to install PyTorch: refer to the [official installation page](https://pytorch.org/get-started/locally/#start-locally) regarding the specific install command for your platform. Then 🤗 Accelerate can be installed using pip as follows:
|
| 328 |
+
|
| 329 |
+
```bash
|
| 330 |
+
pip install accelerate
|
| 331 |
+
```
|
| 332 |
+
|
| 333 |
+
## Supported integrations
|
| 334 |
+
|
| 335 |
+
- CPU only
|
| 336 |
+
- multi-CPU on one node (machine)
|
| 337 |
+
- multi-CPU on several nodes (machines)
|
| 338 |
+
- single GPU
|
| 339 |
+
- multi-GPU on one node (machine)
|
| 340 |
+
- multi-GPU on several nodes (machines)
|
| 341 |
+
- TPU
|
| 342 |
+
- FP16/BFloat16 mixed precision
|
| 343 |
+
- FP8 mixed precision with [Transformer Engine](https://github.com/NVIDIA/TransformerEngine)
|
| 344 |
+
- DeepSpeed support (Experimental)
|
| 345 |
+
- PyTorch Fully Sharded Data Parallel (FSDP) support (Experimental)
|
| 346 |
+
- Megatron-LM support (Experimental)
|
| 347 |
+
|
| 348 |
+
## Citing 🤗 Accelerate
|
| 349 |
+
|
| 350 |
+
If you use 🤗 Accelerate in your publication, please cite it by using the following BibTeX entry.
|
| 351 |
+
|
| 352 |
+
```bibtex
|
| 353 |
+
@Misc{accelerate,
|
| 354 |
+
title = {Accelerate: Training and inference at scale made simple, efficient and adaptable.},
|
| 355 |
+
author = {Sylvain Gugger, Lysandre Debut, Thomas Wolf, Philipp Schmid, Zachary Mueller, Sourab Mangrulkar},
|
| 356 |
+
howpublished = {\url{https://github.com/huggingface/accelerate}},
|
| 357 |
+
year = {2022}
|
| 358 |
+
}
|
| 359 |
+
```
|
llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../../bin/accelerate,sha256=ykjGr7JOpA2q7Js29fX0TeeqlhovksFpWm1rYy0cS6I,249
|
| 2 |
+
../../../bin/accelerate-config,sha256=ILoBYa125VMtXzN3GXc1Ge-8W5TBU-oxTW62-HNyoBU,241
|
| 3 |
+
../../../bin/accelerate-launch,sha256=UGDafjw0crDny1FqhDGqkxPfR-fLzR_CpHwSaN6t4OM,241
|
| 4 |
+
accelerate-0.21.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 5 |
+
accelerate-0.21.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
| 6 |
+
accelerate-0.21.0.dist-info/METADATA,sha256=DmKcH62uEAfKORx0NLLvMCTfs8DWyKWaU5Tw81MC2hk,17758
|
| 7 |
+
accelerate-0.21.0.dist-info/RECORD,,
|
| 8 |
+
accelerate-0.21.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 9 |
+
accelerate-0.21.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
|
| 10 |
+
accelerate-0.21.0.dist-info/entry_points.txt,sha256=rG3RclT0BGKdw2nRZCtUGAsb1jzVWEG_ZX9WQBEeSAA,175
|
| 11 |
+
accelerate-0.21.0.dist-info/top_level.txt,sha256=esVfdxTidsjQ90zsN_rPpjLFJ4ijRlx4mnLrG09hlt4,11
|
| 12 |
+
accelerate/__init__.py,sha256=n484ccMKp-qETKV0RH-qSXnIi-ihHg8a-_8rJrzcDZk,764
|
| 13 |
+
accelerate/__pycache__/__init__.cpython-310.pyc,,
|
| 14 |
+
accelerate/__pycache__/accelerator.cpython-310.pyc,,
|
| 15 |
+
accelerate/__pycache__/big_modeling.cpython-310.pyc,,
|
| 16 |
+
accelerate/__pycache__/checkpointing.cpython-310.pyc,,
|
| 17 |
+
accelerate/__pycache__/data_loader.cpython-310.pyc,,
|
| 18 |
+
accelerate/__pycache__/hooks.cpython-310.pyc,,
|
| 19 |
+
accelerate/__pycache__/launchers.cpython-310.pyc,,
|
| 20 |
+
accelerate/__pycache__/local_sgd.cpython-310.pyc,,
|
| 21 |
+
accelerate/__pycache__/logging.cpython-310.pyc,,
|
| 22 |
+
accelerate/__pycache__/memory_utils.cpython-310.pyc,,
|
| 23 |
+
accelerate/__pycache__/optimizer.cpython-310.pyc,,
|
| 24 |
+
accelerate/__pycache__/scheduler.cpython-310.pyc,,
|
| 25 |
+
accelerate/__pycache__/state.cpython-310.pyc,,
|
| 26 |
+
accelerate/__pycache__/tracking.cpython-310.pyc,,
|
| 27 |
+
accelerate/accelerator.py,sha256=kzoZzXQRpE5G0jxU2_yMKrlLPykiXmas7tQp5R3UP4o,129438
|
| 28 |
+
accelerate/big_modeling.py,sha256=_KpgBUeoPfr7rqlUbr-IOWEbK28ETPU8LWcysRI8I64,23534
|
| 29 |
+
accelerate/checkpointing.py,sha256=zxBD37SrV0xJP79IyfaszvoAYLArECbLgHxlqV57KBE,8564
|
| 30 |
+
accelerate/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 31 |
+
accelerate/commands/__pycache__/__init__.cpython-310.pyc,,
|
| 32 |
+
accelerate/commands/__pycache__/accelerate_cli.cpython-310.pyc,,
|
| 33 |
+
accelerate/commands/__pycache__/env.cpython-310.pyc,,
|
| 34 |
+
accelerate/commands/__pycache__/launch.cpython-310.pyc,,
|
| 35 |
+
accelerate/commands/__pycache__/test.cpython-310.pyc,,
|
| 36 |
+
accelerate/commands/__pycache__/tpu.cpython-310.pyc,,
|
| 37 |
+
accelerate/commands/accelerate_cli.py,sha256=F6_h_3vCeAeJdY_i0xk8Wpy_dQtBdTjVzT2wHlwSzi0,1605
|
| 38 |
+
accelerate/commands/config/__init__.py,sha256=iJK8dgj3pc5Vdr1E7UuGoFu-BlybyXLxYDoTg9gXngE,1645
|
| 39 |
+
accelerate/commands/config/__pycache__/__init__.cpython-310.pyc,,
|
| 40 |
+
accelerate/commands/config/__pycache__/cluster.cpython-310.pyc,,
|
| 41 |
+
accelerate/commands/config/__pycache__/config.cpython-310.pyc,,
|
| 42 |
+
accelerate/commands/config/__pycache__/config_args.cpython-310.pyc,,
|
| 43 |
+
accelerate/commands/config/__pycache__/config_utils.cpython-310.pyc,,
|
| 44 |
+
accelerate/commands/config/__pycache__/default.cpython-310.pyc,,
|
| 45 |
+
accelerate/commands/config/__pycache__/sagemaker.cpython-310.pyc,,
|
| 46 |
+
accelerate/commands/config/__pycache__/update.cpython-310.pyc,,
|
| 47 |
+
accelerate/commands/config/cluster.py,sha256=QQeI2T7qB1Je-uM-Ip54GoKaJIJNtazM3ud8B4irFJU,26202
|
| 48 |
+
accelerate/commands/config/config.py,sha256=FuRlQvOjgATEtyqOSsGD-KEtOCvACOHjs2C-krrtldk,3035
|
| 49 |
+
accelerate/commands/config/config_args.py,sha256=UDzUi7on2MKOmFTGhuD2TSQvUfYebD3WjEQPKdoiNlw,8691
|
| 50 |
+
accelerate/commands/config/config_utils.py,sha256=-xBy8rdXGRNq4-AVDQKJYVUVXRy3WMRdp3skohBGRQg,2879
|
| 51 |
+
accelerate/commands/config/default.py,sha256=clFfkx8JuY39TfwtBnFrg027xvvsc4G4p3OHoWptqfI,5016
|
| 52 |
+
accelerate/commands/config/sagemaker.py,sha256=af3ZXG2ybfhFOGTp97BorbGZYd20kY_wKJZQykFv074,9947
|
| 53 |
+
accelerate/commands/config/update.py,sha256=NXW1J7GkUHpg71QlIXsmMB_0z8S8IZo2FWax5POwrhc,2395
|
| 54 |
+
accelerate/commands/env.py,sha256=7guiNUOE0SFe2CRexn2FNmI-4yuwGOXSJLd3QG8tVpA,3056
|
| 55 |
+
accelerate/commands/launch.py,sha256=xXgCcRfa73SkLoaqkRayGZmFDimmqVgLUOJbD3jH1Sw,38005
|
| 56 |
+
accelerate/commands/menu/__init__.py,sha256=5EhDZN5_e1TAuh9_KqJ4Ghs61offoeGZy1pktSBDpa0,39
|
| 57 |
+
accelerate/commands/menu/__pycache__/__init__.cpython-310.pyc,,
|
| 58 |
+
accelerate/commands/menu/__pycache__/cursor.cpython-310.pyc,,
|
| 59 |
+
accelerate/commands/menu/__pycache__/helpers.cpython-310.pyc,,
|
| 60 |
+
accelerate/commands/menu/__pycache__/input.cpython-310.pyc,,
|
| 61 |
+
accelerate/commands/menu/__pycache__/keymap.cpython-310.pyc,,
|
| 62 |
+
accelerate/commands/menu/__pycache__/selection_menu.cpython-310.pyc,,
|
| 63 |
+
accelerate/commands/menu/cursor.py,sha256=-lmpJVAzvNc0c3EOtSuLoKB59zqylVCbYyWLPnrOmvQ,2028
|
| 64 |
+
accelerate/commands/menu/helpers.py,sha256=KrSB5fJjH4MUEUAQJ6bYaN16AYcnl9UalDrPD3DYeeg,1483
|
| 65 |
+
accelerate/commands/menu/input.py,sha256=uW2ywuqWPOKjkS7XBjqNpuVWLTgVKici2_xLyltEbMs,2581
|
| 66 |
+
accelerate/commands/menu/keymap.py,sha256=c9YEMMmNlBGtMiWFk2rdhtTSCZ9w_uJ77cNCwAKguHk,4087
|
| 67 |
+
accelerate/commands/menu/selection_menu.py,sha256=UZKwSIZKKG60y2fuWbSoCx0RbrPS4MbY2DwvxWRBIBQ,4920
|
| 68 |
+
accelerate/commands/test.py,sha256=whf_g7X263A5OErEHRzKu_L5x6HWbIIVNS8N5ERtGao,2179
|
| 69 |
+
accelerate/commands/tpu.py,sha256=OnFQNu9zhlK5D7xXouZZXJevN5623Jgy_HsHTuy4HAE,5553
|
| 70 |
+
accelerate/data_loader.py,sha256=d0HUd6b7AvGPjlhx00i5Y_xmmlGCH0MQIhUlXwLEjLg,40793
|
| 71 |
+
accelerate/hooks.py,sha256=XfcTHugSE7rBm8Gpa8YQ5_4l0z9gHo5X908bK5eq6xo,24746
|
| 72 |
+
accelerate/launchers.py,sha256=jr0jXsWRbEYhSqwD8C3D0-_VUqGHkWy3ikRQ9thbxDw,8335
|
| 73 |
+
accelerate/local_sgd.py,sha256=znJcwwpRb0imRslW5_uQ4OYJmM8zxekMv4XTnbzXlZk,3924
|
| 74 |
+
accelerate/logging.py,sha256=NgFWX5C7yERtFQYkbgD9TgTST6JKtlJysruz868L-jI,4283
|
| 75 |
+
accelerate/memory_utils.py,sha256=3R5LoeHl6GgTZ-IMPrDZMdaEehWarGdPqODushb-6pg,862
|
| 76 |
+
accelerate/optimizer.py,sha256=OnLV0iyOsBcys9L-6fAgCJvdRwJk-N-DfSK34K-jkx0,6454
|
| 77 |
+
accelerate/scheduler.py,sha256=des_4M_Tt1W8gCYZZbLla0GHBEgJY3Wx2EGBQPTzeiY,4238
|
| 78 |
+
accelerate/state.py,sha256=aFx7O6kSzXOT3zNX8HrYNO68GzqbgVQgDriR0a2t2Hk,42891
|
| 79 |
+
accelerate/test_utils/__init__.py,sha256=XwDtBwCjU1wTbjyLng3knf3v0rPZkZX9yu8KmuWidEk,534
|
| 80 |
+
accelerate/test_utils/__pycache__/__init__.cpython-310.pyc,,
|
| 81 |
+
accelerate/test_utils/__pycache__/examples.cpython-310.pyc,,
|
| 82 |
+
accelerate/test_utils/__pycache__/testing.cpython-310.pyc,,
|
| 83 |
+
accelerate/test_utils/__pycache__/training.cpython-310.pyc,,
|
| 84 |
+
accelerate/test_utils/examples.py,sha256=PJAAy5MjIeyH5Sgj9sFqh0VGebfI7Tg4i_3OBABVVYg,7301
|
| 85 |
+
accelerate/test_utils/scripts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 86 |
+
accelerate/test_utils/scripts/__pycache__/__init__.cpython-310.pyc,,
|
| 87 |
+
accelerate/test_utils/scripts/__pycache__/test_cli.cpython-310.pyc,,
|
| 88 |
+
accelerate/test_utils/scripts/__pycache__/test_distributed_data_loop.cpython-310.pyc,,
|
| 89 |
+
accelerate/test_utils/scripts/__pycache__/test_ops.cpython-310.pyc,,
|
| 90 |
+
accelerate/test_utils/scripts/__pycache__/test_script.cpython-310.pyc,,
|
| 91 |
+
accelerate/test_utils/scripts/__pycache__/test_sync.cpython-310.pyc,,
|
| 92 |
+
accelerate/test_utils/scripts/external_deps/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 93 |
+
accelerate/test_utils/scripts/external_deps/__pycache__/__init__.cpython-310.pyc,,
|
| 94 |
+
accelerate/test_utils/scripts/external_deps/__pycache__/test_checkpointing.cpython-310.pyc,,
|
| 95 |
+
accelerate/test_utils/scripts/external_deps/__pycache__/test_metrics.cpython-310.pyc,,
|
| 96 |
+
accelerate/test_utils/scripts/external_deps/__pycache__/test_peak_memory_usage.cpython-310.pyc,,
|
| 97 |
+
accelerate/test_utils/scripts/external_deps/__pycache__/test_performance.cpython-310.pyc,,
|
| 98 |
+
accelerate/test_utils/scripts/external_deps/test_checkpointing.py,sha256=eJ8dpY6Bi9De7Vb9oDw435NELTjWegjWD7wuckvkaoQ,10686
|
| 99 |
+
accelerate/test_utils/scripts/external_deps/test_metrics.py,sha256=LXBzKN-TIXET9zzuQ856UTerLSSGcCqwytIcw0TIA4c,7199
|
| 100 |
+
accelerate/test_utils/scripts/external_deps/test_peak_memory_usage.py,sha256=lqOBqkU5g772-e30bl3G3lVu8YtG1pLsWerWKS3XYwY,9793
|
| 101 |
+
accelerate/test_utils/scripts/external_deps/test_performance.py,sha256=VqYjGaIK509389-iukIBb1397dFLbrWyHwjAKF3Fcvw,9093
|
| 102 |
+
accelerate/test_utils/scripts/test_cli.py,sha256=EJClouXlerf7cpgqY1P1VY2ohUcRXk56GoVkM6-jmrU,227
|
| 103 |
+
accelerate/test_utils/scripts/test_distributed_data_loop.py,sha256=TEqnW4WIlSDsMB9D2bcvAC_AB9hHtJRiERt3ZGKzK80,8236
|
| 104 |
+
accelerate/test_utils/scripts/test_ops.py,sha256=NGTAilGAt7f_8Q2gVJogth9wEmXP5wAHK4EmP4rW65E,3560
|
| 105 |
+
accelerate/test_utils/scripts/test_script.py,sha256=__b8IO__NwICAA90g6XbKMtwqMJXm3JUtShMMLxxSo4,23525
|
| 106 |
+
accelerate/test_utils/scripts/test_sync.py,sha256=rwcjWzJAnninOBAbUwBztNaTodEMVkBT0wrdgHtD8BM,14441
|
| 107 |
+
accelerate/test_utils/testing.py,sha256=KtWKTwulfEvsMw23o2LXInjkNnL-H3kztJ3oH3vPRNc,14217
|
| 108 |
+
accelerate/test_utils/training.py,sha256=7RNVMmRb6WFCvGzyR2tWTaPL5tKO4YGzjXN0GFWvI8U,4019
|
| 109 |
+
accelerate/tracking.py,sha256=PP8NaPFcW0bGdMPaGCM6oO4wt_3IbVKeU_KdSKI7LVY,28328
|
| 110 |
+
accelerate/utils/__init__.py,sha256=kqdnZPFPLPAaJjBGlrDP9L3WCVcs6vMFL6TpGIKz5ik,4556
|
| 111 |
+
accelerate/utils/__pycache__/__init__.cpython-310.pyc,,
|
| 112 |
+
accelerate/utils/__pycache__/bnb.cpython-310.pyc,,
|
| 113 |
+
accelerate/utils/__pycache__/constants.cpython-310.pyc,,
|
| 114 |
+
accelerate/utils/__pycache__/dataclasses.cpython-310.pyc,,
|
| 115 |
+
accelerate/utils/__pycache__/deepspeed.cpython-310.pyc,,
|
| 116 |
+
accelerate/utils/__pycache__/environment.cpython-310.pyc,,
|
| 117 |
+
accelerate/utils/__pycache__/fsdp_utils.cpython-310.pyc,,
|
| 118 |
+
accelerate/utils/__pycache__/imports.cpython-310.pyc,,
|
| 119 |
+
accelerate/utils/__pycache__/launch.cpython-310.pyc,,
|
| 120 |
+
accelerate/utils/__pycache__/megatron_lm.cpython-310.pyc,,
|
| 121 |
+
accelerate/utils/__pycache__/memory.cpython-310.pyc,,
|
| 122 |
+
accelerate/utils/__pycache__/modeling.cpython-310.pyc,,
|
| 123 |
+
accelerate/utils/__pycache__/offload.cpython-310.pyc,,
|
| 124 |
+
accelerate/utils/__pycache__/operations.cpython-310.pyc,,
|
| 125 |
+
accelerate/utils/__pycache__/other.cpython-310.pyc,,
|
| 126 |
+
accelerate/utils/__pycache__/random.cpython-310.pyc,,
|
| 127 |
+
accelerate/utils/__pycache__/rich.cpython-310.pyc,,
|
| 128 |
+
accelerate/utils/__pycache__/torch_xla.cpython-310.pyc,,
|
| 129 |
+
accelerate/utils/__pycache__/tqdm.cpython-310.pyc,,
|
| 130 |
+
accelerate/utils/__pycache__/transformer_engine.cpython-310.pyc,,
|
| 131 |
+
accelerate/utils/__pycache__/versions.cpython-310.pyc,,
|
| 132 |
+
accelerate/utils/bnb.py,sha256=pVUVJNB4h9Y_1R_i_qnavE25h_a3XZYeh_fpFdqwuiM,20402
|
| 133 |
+
accelerate/utils/constants.py,sha256=uJFvRq3h86h3P331v9_JxbIh9eNJksByXvLKcxCRt8c,2398
|
| 134 |
+
accelerate/utils/dataclasses.py,sha256=gwF344kbSfit3TKGrfiyqdw8qF7U7OCAWCwz3LN4OVo,62268
|
| 135 |
+
accelerate/utils/deepspeed.py,sha256=B-CDDAWZwAWe_gXKszeV4NcDJVe4ACPwvDg-5Q96OJQ,9939
|
| 136 |
+
accelerate/utils/environment.py,sha256=ZG-2HbPrTihFX2Ak_bP0-uKLjs5KdNUrRUkhuLOwPX8,1302
|
| 137 |
+
accelerate/utils/fsdp_utils.py,sha256=xMznlR-37wtYZmS_Rv40zBKULMTVn4u5VVeuaxoLGwI,8924
|
| 138 |
+
accelerate/utils/imports.py,sha256=CpGodEPc234u-h7ESwm7my89nQgZ-suTrwZSc6GTasI,8387
|
| 139 |
+
accelerate/utils/launch.py,sha256=AfvwsFUXUyNgpfM2hcum_XTP-gdQ5NLOcwzbP792QcI,23659
|
| 140 |
+
accelerate/utils/megatron_lm.py,sha256=yOrhJ2u9NKBO3LR_FWlIxh44PWUx_cgdcCVDAjrBiE8,57263
|
| 141 |
+
accelerate/utils/memory.py,sha256=d2DBzqkcoYAPlpK0aMQ5f5c-R-M6Wx9KBx_2UM6qhNw,4880
|
| 142 |
+
accelerate/utils/modeling.py,sha256=ccAz-a34fQ9nTtjYPqPcV9cAisLW0XSzsIpIGLW4-Vo,61743
|
| 143 |
+
accelerate/utils/offload.py,sha256=UoinJf_eUs_cIkdo48RA8kblwl3QGfRACQ8ncbdikeU,7596
|
| 144 |
+
accelerate/utils/operations.py,sha256=PFsfIS8QpnV69cHN5u7NS941h4DFoRzUHOVb689c5k4,22002
|
| 145 |
+
accelerate/utils/other.py,sha256=cdJFvN1cvjTq0JoBL-e064-pO3_JFyg878gRB33wuhA,5453
|
| 146 |
+
accelerate/utils/random.py,sha256=IWVnFFjRuZZOO8HI9L7suHRSM33Pk2NXYywOpU0BKIg,4292
|
| 147 |
+
accelerate/utils/rich.py,sha256=8JZX_uGMQX-BufdXxJpdne7BWd1KyLHSgbiGxrDMYr8,847
|
| 148 |
+
accelerate/utils/torch_xla.py,sha256=Pq1tuqN0X_pWDVza6YgjfO45uoJdoRVRForLeLQzFus,1908
|
| 149 |
+
accelerate/utils/tqdm.py,sha256=0cegNnuA93tKT3o6HDip90rPl8BODLFLu4jP1E3aJ08,1344
|
| 150 |
+
accelerate/utils/transformer_engine.py,sha256=TlbaYL85ppjFD3DUgkUopTJkVIWxQOk476EpGb2LJ58,3665
|
| 151 |
+
accelerate/utils/versions.py,sha256=UgmcbjBm--6CIx1ZamSAMjAK_B_2l48LbeaNygqej8M,2149
|
llava_next/lib/python3.10/site-packages/accelerate-0.21.0.dist-info/REQUESTED
ADDED
|
File without changes
|
llava_next/lib/python3.10/site-packages/httpx-0.24.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../../bin/httpx,sha256=guIUHe5POr5U3JDxGZvb1maB-fTl_I760cdYVBsxE8k,220
|
| 2 |
+
httpx-0.24.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 3 |
+
httpx-0.24.0.dist-info/METADATA,sha256=i1VGa3zTHR-dIW93Y9VApsMWu_AOOuzcWh4vz7_zqVA,8070
|
| 4 |
+
httpx-0.24.0.dist-info/RECORD,,
|
| 5 |
+
httpx-0.24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 6 |
+
httpx-0.24.0.dist-info/WHEEL,sha256=EI2JsGydwUL5GP9t6kzZv7G3HDPi7FuZDDf9In6amRM,87
|
| 7 |
+
httpx-0.24.0.dist-info/entry_points.txt,sha256=2lVkdQmxLA1pNMgSN2eV89o90HCZezhmNwsy6ryKDSA,37
|
| 8 |
+
httpx-0.24.0.dist-info/licenses/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508
|
| 9 |
+
httpx/__init__.py,sha256=oCxVAsePEy5DE9eLhGAAq9H3RBGZUDaUROtGEyzbBRo,3210
|
| 10 |
+
httpx/__pycache__/__init__.cpython-310.pyc,,
|
| 11 |
+
httpx/__pycache__/__version__.cpython-310.pyc,,
|
| 12 |
+
httpx/__pycache__/_api.cpython-310.pyc,,
|
| 13 |
+
httpx/__pycache__/_auth.cpython-310.pyc,,
|
| 14 |
+
httpx/__pycache__/_client.cpython-310.pyc,,
|
| 15 |
+
httpx/__pycache__/_compat.cpython-310.pyc,,
|
| 16 |
+
httpx/__pycache__/_config.cpython-310.pyc,,
|
| 17 |
+
httpx/__pycache__/_content.cpython-310.pyc,,
|
| 18 |
+
httpx/__pycache__/_decoders.cpython-310.pyc,,
|
| 19 |
+
httpx/__pycache__/_exceptions.cpython-310.pyc,,
|
| 20 |
+
httpx/__pycache__/_main.cpython-310.pyc,,
|
| 21 |
+
httpx/__pycache__/_models.cpython-310.pyc,,
|
| 22 |
+
httpx/__pycache__/_multipart.cpython-310.pyc,,
|
| 23 |
+
httpx/__pycache__/_status_codes.cpython-310.pyc,,
|
| 24 |
+
httpx/__pycache__/_types.cpython-310.pyc,,
|
| 25 |
+
httpx/__pycache__/_urlparse.cpython-310.pyc,,
|
| 26 |
+
httpx/__pycache__/_urls.cpython-310.pyc,,
|
| 27 |
+
httpx/__pycache__/_utils.cpython-310.pyc,,
|
| 28 |
+
httpx/__version__.py,sha256=9Gk5Kj_c778Xjs4WJbvag1s_DVWXql64qRyncd3a6kA,108
|
| 29 |
+
httpx/_api.py,sha256=cVU9ErzaXve5rqoPoSHr9yJbovHtICrcxR7yBoNSeOw,13011
|
| 30 |
+
httpx/_auth.py,sha256=WnTcFM__63hDCex56w5udXociXGctfo3BQuE7v3d4OQ,11766
|
| 31 |
+
httpx/_client.py,sha256=R6Snj6msUWTWIdZIW2Lf5TYu_Zx6Oz8L6J5sfjaXrYw,68139
|
| 32 |
+
httpx/_compat.py,sha256=lQa4SnZhS-kNQ8HKpSwKrmJ00nYQKDVaWwwnOYEvjMI,1602
|
| 33 |
+
httpx/_config.py,sha256=9Tg0-pV93Hl5knjyZhCLcoEXymAMn-OLaDsEn2uPK14,12391
|
| 34 |
+
httpx/_content.py,sha256=olbWqawdWWweXeW6gDYHPiEGjip5lqFZKv9OmVd-zIg,8092
|
| 35 |
+
httpx/_decoders.py,sha256=dd8GSkEAe45BzRUF47zH_lg3-BcwXtxzPBSGP5Y4F90,9739
|
| 36 |
+
httpx/_exceptions.py,sha256=xKw-U6vW7zmdReUAGYHMegYWZuDAuE5039L087SHe4Q,7880
|
| 37 |
+
httpx/_main.py,sha256=m9C4RuqjOB6UqL3FFHMjmC45f4SDSO-iOREFLdw4IdM,15784
|
| 38 |
+
httpx/_models.py,sha256=Ho9YjmVMkS-lEMhCGpecfYsenVZy2jsLJmKCexO50tI,42696
|
| 39 |
+
httpx/_multipart.py,sha256=LTcxKvbIkVbleNDhb3_JEIayIdYxXfxr812uP_Hudz0,8978
|
| 40 |
+
httpx/_status_codes.py,sha256=XKArMrSoo8oKBQCHdFGA-wsM2PcSTaHE8svDYOUcwWk,5584
|
| 41 |
+
httpx/_transports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 42 |
+
httpx/_transports/__pycache__/__init__.cpython-310.pyc,,
|
| 43 |
+
httpx/_transports/__pycache__/asgi.cpython-310.pyc,,
|
| 44 |
+
httpx/_transports/__pycache__/base.cpython-310.pyc,,
|
| 45 |
+
httpx/_transports/__pycache__/default.cpython-310.pyc,,
|
| 46 |
+
httpx/_transports/__pycache__/mock.cpython-310.pyc,,
|
| 47 |
+
httpx/_transports/__pycache__/wsgi.cpython-310.pyc,,
|
| 48 |
+
httpx/_transports/asgi.py,sha256=lKAL-6dhxqSnZA2fMWtj-MokSTIzjnwwa3DTkkof5cE,5317
|
| 49 |
+
httpx/_transports/base.py,sha256=0BM8yZZEkdFT4tXXSm0h0dK0cSYA4hLgInj_BljGEGw,2510
|
| 50 |
+
httpx/_transports/default.py,sha256=fla9xvSAM3BuGtaMa4PhbX1gW_9oafl8vzujOhcE-H8,12626
|
| 51 |
+
httpx/_transports/mock.py,sha256=sDt3BDXbz8-W94kC8OXtGzF1PWH0y73h1De7Q-XkVtg,1179
|
| 52 |
+
httpx/_transports/wsgi.py,sha256=72ZMPBLPV-aZB4gfsz_SOrJpgKJb6Z9W5wFxhlMQcqg,4754
|
| 53 |
+
httpx/_types.py,sha256=BnX0adSAxLT9BzkxuX96S4odkC9UdLMgws6waxqEKuI,3333
|
| 54 |
+
httpx/_urlparse.py,sha256=sYl4v1ndRHbuXQswp3kN9I4VNWsj-Vu6icmHofOdUzM,15897
|
| 55 |
+
httpx/_urls.py,sha256=JAONd-2reXpB_WuQ7WuvhUcLuebiQeYJQPyszADmCow,21840
|
| 56 |
+
httpx/_utils.py,sha256=FbZDT9UCu_8EjyynU8g_YsQGzQmSgSp5dtexxBolgIA,14580
|
| 57 |
+
httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
llava_next/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (824 Bytes). View file
|
|
|
llava_next/lib/python3.10/site-packages/networkx/__init__.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
NetworkX
|
| 3 |
+
========
|
| 4 |
+
|
| 5 |
+
NetworkX is a Python package for the creation, manipulation, and study of the
|
| 6 |
+
structure, dynamics, and functions of complex networks.
|
| 7 |
+
|
| 8 |
+
See https://networkx.org for complete documentation.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
__version__ = "3.4.2"
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# These are imported in order as listed
|
| 15 |
+
from networkx.lazy_imports import _lazy_import
|
| 16 |
+
|
| 17 |
+
from networkx.exception import *
|
| 18 |
+
|
| 19 |
+
from networkx import utils
|
| 20 |
+
from networkx.utils import _clear_cache, _dispatchable
|
| 21 |
+
|
| 22 |
+
# load_and_call entry_points, set configs
|
| 23 |
+
config = utils.backends._set_configs_from_environment()
|
| 24 |
+
utils.config = utils.configs.config = config # type: ignore[attr-defined]
|
| 25 |
+
|
| 26 |
+
from networkx import classes
|
| 27 |
+
from networkx.classes import filters
|
| 28 |
+
from networkx.classes import *
|
| 29 |
+
|
| 30 |
+
from networkx import convert
|
| 31 |
+
from networkx.convert import *
|
| 32 |
+
|
| 33 |
+
from networkx import convert_matrix
|
| 34 |
+
from networkx.convert_matrix import *
|
| 35 |
+
|
| 36 |
+
from networkx import relabel
|
| 37 |
+
from networkx.relabel import *
|
| 38 |
+
|
| 39 |
+
from networkx import generators
|
| 40 |
+
from networkx.generators import *
|
| 41 |
+
|
| 42 |
+
from networkx import readwrite
|
| 43 |
+
from networkx.readwrite import *
|
| 44 |
+
|
| 45 |
+
# Need to test with SciPy, when available
|
| 46 |
+
from networkx import algorithms
|
| 47 |
+
from networkx.algorithms import *
|
| 48 |
+
|
| 49 |
+
from networkx import linalg
|
| 50 |
+
from networkx.linalg import *
|
| 51 |
+
|
| 52 |
+
from networkx import drawing
|
| 53 |
+
from networkx.drawing import *
|
llava_next/lib/python3.10/site-packages/networkx/algorithms/chains.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for finding chains in a graph."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.utils import not_implemented_for
|
| 5 |
+
|
| 6 |
+
__all__ = ["chain_decomposition"]
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@not_implemented_for("directed")
|
| 10 |
+
@not_implemented_for("multigraph")
|
| 11 |
+
@nx._dispatchable
|
| 12 |
+
def chain_decomposition(G, root=None):
|
| 13 |
+
"""Returns the chain decomposition of a graph.
|
| 14 |
+
|
| 15 |
+
The *chain decomposition* of a graph with respect a depth-first
|
| 16 |
+
search tree is a set of cycles or paths derived from the set of
|
| 17 |
+
fundamental cycles of the tree in the following manner. Consider
|
| 18 |
+
each fundamental cycle with respect to the given tree, represented
|
| 19 |
+
as a list of edges beginning with the nontree edge oriented away
|
| 20 |
+
from the root of the tree. For each fundamental cycle, if it
|
| 21 |
+
overlaps with any previous fundamental cycle, just take the initial
|
| 22 |
+
non-overlapping segment, which is a path instead of a cycle. Each
|
| 23 |
+
cycle or path is called a *chain*. For more information, see [1]_.
|
| 24 |
+
|
| 25 |
+
Parameters
|
| 26 |
+
----------
|
| 27 |
+
G : undirected graph
|
| 28 |
+
|
| 29 |
+
root : node (optional)
|
| 30 |
+
A node in the graph `G`. If specified, only the chain
|
| 31 |
+
decomposition for the connected component containing this node
|
| 32 |
+
will be returned. This node indicates the root of the depth-first
|
| 33 |
+
search tree.
|
| 34 |
+
|
| 35 |
+
Yields
|
| 36 |
+
------
|
| 37 |
+
chain : list
|
| 38 |
+
A list of edges representing a chain. There is no guarantee on
|
| 39 |
+
the orientation of the edges in each chain (for example, if a
|
| 40 |
+
chain includes the edge joining nodes 1 and 2, the chain may
|
| 41 |
+
include either (1, 2) or (2, 1)).
|
| 42 |
+
|
| 43 |
+
Raises
|
| 44 |
+
------
|
| 45 |
+
NodeNotFound
|
| 46 |
+
If `root` is not in the graph `G`.
|
| 47 |
+
|
| 48 |
+
Examples
|
| 49 |
+
--------
|
| 50 |
+
>>> G = nx.Graph([(0, 1), (1, 4), (3, 4), (3, 5), (4, 5)])
|
| 51 |
+
>>> list(nx.chain_decomposition(G))
|
| 52 |
+
[[(4, 5), (5, 3), (3, 4)]]
|
| 53 |
+
|
| 54 |
+
Notes
|
| 55 |
+
-----
|
| 56 |
+
The worst-case running time of this implementation is linear in the
|
| 57 |
+
number of nodes and number of edges [1]_.
|
| 58 |
+
|
| 59 |
+
References
|
| 60 |
+
----------
|
| 61 |
+
.. [1] Jens M. Schmidt (2013). "A simple test on 2-vertex-
|
| 62 |
+
and 2-edge-connectivity." *Information Processing Letters*,
|
| 63 |
+
113, 241–244. Elsevier. <https://doi.org/10.1016/j.ipl.2013.01.016>
|
| 64 |
+
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
def _dfs_cycle_forest(G, root=None):
|
| 68 |
+
"""Builds a directed graph composed of cycles from the given graph.
|
| 69 |
+
|
| 70 |
+
`G` is an undirected simple graph. `root` is a node in the graph
|
| 71 |
+
from which the depth-first search is started.
|
| 72 |
+
|
| 73 |
+
This function returns both the depth-first search cycle graph
|
| 74 |
+
(as a :class:`~networkx.DiGraph`) and the list of nodes in
|
| 75 |
+
depth-first preorder. The depth-first search cycle graph is a
|
| 76 |
+
directed graph whose edges are the edges of `G` oriented toward
|
| 77 |
+
the root if the edge is a tree edge and away from the root if
|
| 78 |
+
the edge is a non-tree edge. If `root` is not specified, this
|
| 79 |
+
performs a depth-first search on each connected component of `G`
|
| 80 |
+
and returns a directed forest instead.
|
| 81 |
+
|
| 82 |
+
If `root` is not in the graph, this raises :exc:`KeyError`.
|
| 83 |
+
|
| 84 |
+
"""
|
| 85 |
+
# Create a directed graph from the depth-first search tree with
|
| 86 |
+
# root node `root` in which tree edges are directed toward the
|
| 87 |
+
# root and nontree edges are directed away from the root. For
|
| 88 |
+
# each node with an incident nontree edge, this creates a
|
| 89 |
+
# directed cycle starting with the nontree edge and returning to
|
| 90 |
+
# that node.
|
| 91 |
+
#
|
| 92 |
+
# The `parent` node attribute stores the parent of each node in
|
| 93 |
+
# the DFS tree. The `nontree` edge attribute indicates whether
|
| 94 |
+
# the edge is a tree edge or a nontree edge.
|
| 95 |
+
#
|
| 96 |
+
# We also store the order of the nodes found in the depth-first
|
| 97 |
+
# search in the `nodes` list.
|
| 98 |
+
H = nx.DiGraph()
|
| 99 |
+
nodes = []
|
| 100 |
+
for u, v, d in nx.dfs_labeled_edges(G, source=root):
|
| 101 |
+
if d == "forward":
|
| 102 |
+
# `dfs_labeled_edges()` yields (root, root, 'forward')
|
| 103 |
+
# if it is beginning the search on a new connected
|
| 104 |
+
# component.
|
| 105 |
+
if u == v:
|
| 106 |
+
H.add_node(v, parent=None)
|
| 107 |
+
nodes.append(v)
|
| 108 |
+
else:
|
| 109 |
+
H.add_node(v, parent=u)
|
| 110 |
+
H.add_edge(v, u, nontree=False)
|
| 111 |
+
nodes.append(v)
|
| 112 |
+
# `dfs_labeled_edges` considers nontree edges in both
|
| 113 |
+
# orientations, so we need to not add the edge if it its
|
| 114 |
+
# other orientation has been added.
|
| 115 |
+
elif d == "nontree" and v not in H[u]:
|
| 116 |
+
H.add_edge(v, u, nontree=True)
|
| 117 |
+
else:
|
| 118 |
+
# Do nothing on 'reverse' edges; we only care about
|
| 119 |
+
# forward and nontree edges.
|
| 120 |
+
pass
|
| 121 |
+
return H, nodes
|
| 122 |
+
|
| 123 |
+
def _build_chain(G, u, v, visited):
|
| 124 |
+
"""Generate the chain starting from the given nontree edge.
|
| 125 |
+
|
| 126 |
+
`G` is a DFS cycle graph as constructed by
|
| 127 |
+
:func:`_dfs_cycle_graph`. The edge (`u`, `v`) is a nontree edge
|
| 128 |
+
that begins a chain. `visited` is a set representing the nodes
|
| 129 |
+
in `G` that have already been visited.
|
| 130 |
+
|
| 131 |
+
This function yields the edges in an initial segment of the
|
| 132 |
+
fundamental cycle of `G` starting with the nontree edge (`u`,
|
| 133 |
+
`v`) that includes all the edges up until the first node that
|
| 134 |
+
appears in `visited`. The tree edges are given by the 'parent'
|
| 135 |
+
node attribute. The `visited` set is updated to add each node in
|
| 136 |
+
an edge yielded by this function.
|
| 137 |
+
|
| 138 |
+
"""
|
| 139 |
+
while v not in visited:
|
| 140 |
+
yield u, v
|
| 141 |
+
visited.add(v)
|
| 142 |
+
u, v = v, G.nodes[v]["parent"]
|
| 143 |
+
yield u, v
|
| 144 |
+
|
| 145 |
+
# Check if the root is in the graph G. If not, raise NodeNotFound
|
| 146 |
+
if root is not None and root not in G:
|
| 147 |
+
raise nx.NodeNotFound(f"Root node {root} is not in graph")
|
| 148 |
+
|
| 149 |
+
# Create a directed version of H that has the DFS edges directed
|
| 150 |
+
# toward the root and the nontree edges directed away from the root
|
| 151 |
+
# (in each connected component).
|
| 152 |
+
H, nodes = _dfs_cycle_forest(G, root)
|
| 153 |
+
|
| 154 |
+
# Visit the nodes again in DFS order. For each node, and for each
|
| 155 |
+
# nontree edge leaving that node, compute the fundamental cycle for
|
| 156 |
+
# that nontree edge starting with that edge. If the fundamental
|
| 157 |
+
# cycle overlaps with any visited nodes, just take the prefix of the
|
| 158 |
+
# cycle up to the point of visited nodes.
|
| 159 |
+
#
|
| 160 |
+
# We repeat this process for each connected component (implicitly,
|
| 161 |
+
# since `nodes` already has a list of the nodes grouped by connected
|
| 162 |
+
# component).
|
| 163 |
+
visited = set()
|
| 164 |
+
for u in nodes:
|
| 165 |
+
visited.add(u)
|
| 166 |
+
# For each nontree edge going out of node u...
|
| 167 |
+
edges = ((u, v) for u, v, d in H.out_edges(u, data="nontree") if d)
|
| 168 |
+
for u, v in edges:
|
| 169 |
+
# Create the cycle or cycle prefix starting with the
|
| 170 |
+
# nontree edge.
|
| 171 |
+
chain = list(_build_chain(H, u, v, visited))
|
| 172 |
+
yield chain
|
llava_next/lib/python3.10/site-packages/networkx/algorithms/chordal.py
ADDED
|
@@ -0,0 +1,443 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Algorithms for chordal graphs.
|
| 3 |
+
|
| 4 |
+
A graph is chordal if every cycle of length at least 4 has a chord
|
| 5 |
+
(an edge joining two nodes not adjacent in the cycle).
|
| 6 |
+
https://en.wikipedia.org/wiki/Chordal_graph
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import sys
|
| 10 |
+
|
| 11 |
+
import networkx as nx
|
| 12 |
+
from networkx.algorithms.components import connected_components
|
| 13 |
+
from networkx.utils import arbitrary_element, not_implemented_for
|
| 14 |
+
|
| 15 |
+
__all__ = [
|
| 16 |
+
"is_chordal",
|
| 17 |
+
"find_induced_nodes",
|
| 18 |
+
"chordal_graph_cliques",
|
| 19 |
+
"chordal_graph_treewidth",
|
| 20 |
+
"NetworkXTreewidthBoundExceeded",
|
| 21 |
+
"complete_to_chordal_graph",
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class NetworkXTreewidthBoundExceeded(nx.NetworkXException):
|
| 26 |
+
"""Exception raised when a treewidth bound has been provided and it has
|
| 27 |
+
been exceeded"""
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
@not_implemented_for("directed")
|
| 31 |
+
@not_implemented_for("multigraph")
|
| 32 |
+
@nx._dispatchable
|
| 33 |
+
def is_chordal(G):
|
| 34 |
+
"""Checks whether G is a chordal graph.
|
| 35 |
+
|
| 36 |
+
A graph is chordal if every cycle of length at least 4 has a chord
|
| 37 |
+
(an edge joining two nodes not adjacent in the cycle).
|
| 38 |
+
|
| 39 |
+
Parameters
|
| 40 |
+
----------
|
| 41 |
+
G : graph
|
| 42 |
+
A NetworkX graph.
|
| 43 |
+
|
| 44 |
+
Returns
|
| 45 |
+
-------
|
| 46 |
+
chordal : bool
|
| 47 |
+
True if G is a chordal graph and False otherwise.
|
| 48 |
+
|
| 49 |
+
Raises
|
| 50 |
+
------
|
| 51 |
+
NetworkXNotImplemented
|
| 52 |
+
The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
|
| 53 |
+
|
| 54 |
+
Examples
|
| 55 |
+
--------
|
| 56 |
+
>>> e = [
|
| 57 |
+
... (1, 2),
|
| 58 |
+
... (1, 3),
|
| 59 |
+
... (2, 3),
|
| 60 |
+
... (2, 4),
|
| 61 |
+
... (3, 4),
|
| 62 |
+
... (3, 5),
|
| 63 |
+
... (3, 6),
|
| 64 |
+
... (4, 5),
|
| 65 |
+
... (4, 6),
|
| 66 |
+
... (5, 6),
|
| 67 |
+
... ]
|
| 68 |
+
>>> G = nx.Graph(e)
|
| 69 |
+
>>> nx.is_chordal(G)
|
| 70 |
+
True
|
| 71 |
+
|
| 72 |
+
Notes
|
| 73 |
+
-----
|
| 74 |
+
The routine tries to go through every node following maximum cardinality
|
| 75 |
+
search. It returns False when it finds that the separator for any node
|
| 76 |
+
is not a clique. Based on the algorithms in [1]_.
|
| 77 |
+
|
| 78 |
+
Self loops are ignored.
|
| 79 |
+
|
| 80 |
+
References
|
| 81 |
+
----------
|
| 82 |
+
.. [1] R. E. Tarjan and M. Yannakakis, Simple linear-time algorithms
|
| 83 |
+
to test chordality of graphs, test acyclicity of hypergraphs, and
|
| 84 |
+
selectively reduce acyclic hypergraphs, SIAM J. Comput., 13 (1984),
|
| 85 |
+
pp. 566–579.
|
| 86 |
+
"""
|
| 87 |
+
if len(G.nodes) <= 3:
|
| 88 |
+
return True
|
| 89 |
+
return len(_find_chordality_breaker(G)) == 0
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
@nx._dispatchable
|
| 93 |
+
def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize):
|
| 94 |
+
"""Returns the set of induced nodes in the path from s to t.
|
| 95 |
+
|
| 96 |
+
Parameters
|
| 97 |
+
----------
|
| 98 |
+
G : graph
|
| 99 |
+
A chordal NetworkX graph
|
| 100 |
+
s : node
|
| 101 |
+
Source node to look for induced nodes
|
| 102 |
+
t : node
|
| 103 |
+
Destination node to look for induced nodes
|
| 104 |
+
treewidth_bound: float
|
| 105 |
+
Maximum treewidth acceptable for the graph H. The search
|
| 106 |
+
for induced nodes will end as soon as the treewidth_bound is exceeded.
|
| 107 |
+
|
| 108 |
+
Returns
|
| 109 |
+
-------
|
| 110 |
+
induced_nodes : Set of nodes
|
| 111 |
+
The set of induced nodes in the path from s to t in G
|
| 112 |
+
|
| 113 |
+
Raises
|
| 114 |
+
------
|
| 115 |
+
NetworkXError
|
| 116 |
+
The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
|
| 117 |
+
If the input graph is an instance of one of these classes, a
|
| 118 |
+
:exc:`NetworkXError` is raised.
|
| 119 |
+
The algorithm can only be applied to chordal graphs. If the input
|
| 120 |
+
graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
|
| 121 |
+
|
| 122 |
+
Examples
|
| 123 |
+
--------
|
| 124 |
+
>>> G = nx.Graph()
|
| 125 |
+
>>> G = nx.generators.classic.path_graph(10)
|
| 126 |
+
>>> induced_nodes = nx.find_induced_nodes(G, 1, 9, 2)
|
| 127 |
+
>>> sorted(induced_nodes)
|
| 128 |
+
[1, 2, 3, 4, 5, 6, 7, 8, 9]
|
| 129 |
+
|
| 130 |
+
Notes
|
| 131 |
+
-----
|
| 132 |
+
G must be a chordal graph and (s,t) an edge that is not in G.
|
| 133 |
+
|
| 134 |
+
If a treewidth_bound is provided, the search for induced nodes will end
|
| 135 |
+
as soon as the treewidth_bound is exceeded.
|
| 136 |
+
|
| 137 |
+
The algorithm is inspired by Algorithm 4 in [1]_.
|
| 138 |
+
A formal definition of induced node can also be found on that reference.
|
| 139 |
+
|
| 140 |
+
Self Loops are ignored
|
| 141 |
+
|
| 142 |
+
References
|
| 143 |
+
----------
|
| 144 |
+
.. [1] Learning Bounded Treewidth Bayesian Networks.
|
| 145 |
+
Gal Elidan, Stephen Gould; JMLR, 9(Dec):2699--2731, 2008.
|
| 146 |
+
http://jmlr.csail.mit.edu/papers/volume9/elidan08a/elidan08a.pdf
|
| 147 |
+
"""
|
| 148 |
+
if not is_chordal(G):
|
| 149 |
+
raise nx.NetworkXError("Input graph is not chordal.")
|
| 150 |
+
|
| 151 |
+
H = nx.Graph(G)
|
| 152 |
+
H.add_edge(s, t)
|
| 153 |
+
induced_nodes = set()
|
| 154 |
+
triplet = _find_chordality_breaker(H, s, treewidth_bound)
|
| 155 |
+
while triplet:
|
| 156 |
+
(u, v, w) = triplet
|
| 157 |
+
induced_nodes.update(triplet)
|
| 158 |
+
for n in triplet:
|
| 159 |
+
if n != s:
|
| 160 |
+
H.add_edge(s, n)
|
| 161 |
+
triplet = _find_chordality_breaker(H, s, treewidth_bound)
|
| 162 |
+
if induced_nodes:
|
| 163 |
+
# Add t and the second node in the induced path from s to t.
|
| 164 |
+
induced_nodes.add(t)
|
| 165 |
+
for u in G[s]:
|
| 166 |
+
if len(induced_nodes & set(G[u])) == 2:
|
| 167 |
+
induced_nodes.add(u)
|
| 168 |
+
break
|
| 169 |
+
return induced_nodes
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
@nx._dispatchable
|
| 173 |
+
def chordal_graph_cliques(G):
|
| 174 |
+
"""Returns all maximal cliques of a chordal graph.
|
| 175 |
+
|
| 176 |
+
The algorithm breaks the graph in connected components and performs a
|
| 177 |
+
maximum cardinality search in each component to get the cliques.
|
| 178 |
+
|
| 179 |
+
Parameters
|
| 180 |
+
----------
|
| 181 |
+
G : graph
|
| 182 |
+
A NetworkX graph
|
| 183 |
+
|
| 184 |
+
Yields
|
| 185 |
+
------
|
| 186 |
+
frozenset of nodes
|
| 187 |
+
Maximal cliques, each of which is a frozenset of
|
| 188 |
+
nodes in `G`. The order of cliques is arbitrary.
|
| 189 |
+
|
| 190 |
+
Raises
|
| 191 |
+
------
|
| 192 |
+
NetworkXError
|
| 193 |
+
The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
|
| 194 |
+
The algorithm can only be applied to chordal graphs. If the input
|
| 195 |
+
graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
|
| 196 |
+
|
| 197 |
+
Examples
|
| 198 |
+
--------
|
| 199 |
+
>>> e = [
|
| 200 |
+
... (1, 2),
|
| 201 |
+
... (1, 3),
|
| 202 |
+
... (2, 3),
|
| 203 |
+
... (2, 4),
|
| 204 |
+
... (3, 4),
|
| 205 |
+
... (3, 5),
|
| 206 |
+
... (3, 6),
|
| 207 |
+
... (4, 5),
|
| 208 |
+
... (4, 6),
|
| 209 |
+
... (5, 6),
|
| 210 |
+
... (7, 8),
|
| 211 |
+
... ]
|
| 212 |
+
>>> G = nx.Graph(e)
|
| 213 |
+
>>> G.add_node(9)
|
| 214 |
+
>>> cliques = [c for c in chordal_graph_cliques(G)]
|
| 215 |
+
>>> cliques[0]
|
| 216 |
+
frozenset({1, 2, 3})
|
| 217 |
+
"""
|
| 218 |
+
for C in (G.subgraph(c).copy() for c in connected_components(G)):
|
| 219 |
+
if C.number_of_nodes() == 1:
|
| 220 |
+
if nx.number_of_selfloops(C) > 0:
|
| 221 |
+
raise nx.NetworkXError("Input graph is not chordal.")
|
| 222 |
+
yield frozenset(C.nodes())
|
| 223 |
+
else:
|
| 224 |
+
unnumbered = set(C.nodes())
|
| 225 |
+
v = arbitrary_element(C)
|
| 226 |
+
unnumbered.remove(v)
|
| 227 |
+
numbered = {v}
|
| 228 |
+
clique_wanna_be = {v}
|
| 229 |
+
while unnumbered:
|
| 230 |
+
v = _max_cardinality_node(C, unnumbered, numbered)
|
| 231 |
+
unnumbered.remove(v)
|
| 232 |
+
numbered.add(v)
|
| 233 |
+
new_clique_wanna_be = set(C.neighbors(v)) & numbered
|
| 234 |
+
sg = C.subgraph(clique_wanna_be)
|
| 235 |
+
if _is_complete_graph(sg):
|
| 236 |
+
new_clique_wanna_be.add(v)
|
| 237 |
+
if not new_clique_wanna_be >= clique_wanna_be:
|
| 238 |
+
yield frozenset(clique_wanna_be)
|
| 239 |
+
clique_wanna_be = new_clique_wanna_be
|
| 240 |
+
else:
|
| 241 |
+
raise nx.NetworkXError("Input graph is not chordal.")
|
| 242 |
+
yield frozenset(clique_wanna_be)
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
@nx._dispatchable
|
| 246 |
+
def chordal_graph_treewidth(G):
|
| 247 |
+
"""Returns the treewidth of the chordal graph G.
|
| 248 |
+
|
| 249 |
+
Parameters
|
| 250 |
+
----------
|
| 251 |
+
G : graph
|
| 252 |
+
A NetworkX graph
|
| 253 |
+
|
| 254 |
+
Returns
|
| 255 |
+
-------
|
| 256 |
+
treewidth : int
|
| 257 |
+
The size of the largest clique in the graph minus one.
|
| 258 |
+
|
| 259 |
+
Raises
|
| 260 |
+
------
|
| 261 |
+
NetworkXError
|
| 262 |
+
The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
|
| 263 |
+
The algorithm can only be applied to chordal graphs. If the input
|
| 264 |
+
graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
|
| 265 |
+
|
| 266 |
+
Examples
|
| 267 |
+
--------
|
| 268 |
+
>>> e = [
|
| 269 |
+
... (1, 2),
|
| 270 |
+
... (1, 3),
|
| 271 |
+
... (2, 3),
|
| 272 |
+
... (2, 4),
|
| 273 |
+
... (3, 4),
|
| 274 |
+
... (3, 5),
|
| 275 |
+
... (3, 6),
|
| 276 |
+
... (4, 5),
|
| 277 |
+
... (4, 6),
|
| 278 |
+
... (5, 6),
|
| 279 |
+
... (7, 8),
|
| 280 |
+
... ]
|
| 281 |
+
>>> G = nx.Graph(e)
|
| 282 |
+
>>> G.add_node(9)
|
| 283 |
+
>>> nx.chordal_graph_treewidth(G)
|
| 284 |
+
3
|
| 285 |
+
|
| 286 |
+
References
|
| 287 |
+
----------
|
| 288 |
+
.. [1] https://en.wikipedia.org/wiki/Tree_decomposition#Treewidth
|
| 289 |
+
"""
|
| 290 |
+
if not is_chordal(G):
|
| 291 |
+
raise nx.NetworkXError("Input graph is not chordal.")
|
| 292 |
+
|
| 293 |
+
max_clique = -1
|
| 294 |
+
for clique in nx.chordal_graph_cliques(G):
|
| 295 |
+
max_clique = max(max_clique, len(clique))
|
| 296 |
+
return max_clique - 1
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def _is_complete_graph(G):
|
| 300 |
+
"""Returns True if G is a complete graph."""
|
| 301 |
+
if nx.number_of_selfloops(G) > 0:
|
| 302 |
+
raise nx.NetworkXError("Self loop found in _is_complete_graph()")
|
| 303 |
+
n = G.number_of_nodes()
|
| 304 |
+
if n < 2:
|
| 305 |
+
return True
|
| 306 |
+
e = G.number_of_edges()
|
| 307 |
+
max_edges = (n * (n - 1)) / 2
|
| 308 |
+
return e == max_edges
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
def _find_missing_edge(G):
|
| 312 |
+
"""Given a non-complete graph G, returns a missing edge."""
|
| 313 |
+
nodes = set(G)
|
| 314 |
+
for u in G:
|
| 315 |
+
missing = nodes - set(list(G[u].keys()) + [u])
|
| 316 |
+
if missing:
|
| 317 |
+
return (u, missing.pop())
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def _max_cardinality_node(G, choices, wanna_connect):
|
| 321 |
+
"""Returns a the node in choices that has more connections in G
|
| 322 |
+
to nodes in wanna_connect.
|
| 323 |
+
"""
|
| 324 |
+
max_number = -1
|
| 325 |
+
for x in choices:
|
| 326 |
+
number = len([y for y in G[x] if y in wanna_connect])
|
| 327 |
+
if number > max_number:
|
| 328 |
+
max_number = number
|
| 329 |
+
max_cardinality_node = x
|
| 330 |
+
return max_cardinality_node
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize):
|
| 334 |
+
"""Given a graph G, starts a max cardinality search
|
| 335 |
+
(starting from s if s is given and from an arbitrary node otherwise)
|
| 336 |
+
trying to find a non-chordal cycle.
|
| 337 |
+
|
| 338 |
+
If it does find one, it returns (u,v,w) where u,v,w are the three
|
| 339 |
+
nodes that together with s are involved in the cycle.
|
| 340 |
+
|
| 341 |
+
It ignores any self loops.
|
| 342 |
+
"""
|
| 343 |
+
if len(G) == 0:
|
| 344 |
+
raise nx.NetworkXPointlessConcept("Graph has no nodes.")
|
| 345 |
+
unnumbered = set(G)
|
| 346 |
+
if s is None:
|
| 347 |
+
s = arbitrary_element(G)
|
| 348 |
+
unnumbered.remove(s)
|
| 349 |
+
numbered = {s}
|
| 350 |
+
current_treewidth = -1
|
| 351 |
+
while unnumbered: # and current_treewidth <= treewidth_bound:
|
| 352 |
+
v = _max_cardinality_node(G, unnumbered, numbered)
|
| 353 |
+
unnumbered.remove(v)
|
| 354 |
+
numbered.add(v)
|
| 355 |
+
clique_wanna_be = set(G[v]) & numbered
|
| 356 |
+
sg = G.subgraph(clique_wanna_be)
|
| 357 |
+
if _is_complete_graph(sg):
|
| 358 |
+
# The graph seems to be chordal by now. We update the treewidth
|
| 359 |
+
current_treewidth = max(current_treewidth, len(clique_wanna_be))
|
| 360 |
+
if current_treewidth > treewidth_bound:
|
| 361 |
+
raise nx.NetworkXTreewidthBoundExceeded(
|
| 362 |
+
f"treewidth_bound exceeded: {current_treewidth}"
|
| 363 |
+
)
|
| 364 |
+
else:
|
| 365 |
+
# sg is not a clique,
|
| 366 |
+
# look for an edge that is not included in sg
|
| 367 |
+
(u, w) = _find_missing_edge(sg)
|
| 368 |
+
return (u, v, w)
|
| 369 |
+
return ()
|
| 370 |
+
|
| 371 |
+
|
| 372 |
+
@not_implemented_for("directed")
|
| 373 |
+
@nx._dispatchable(returns_graph=True)
|
| 374 |
+
def complete_to_chordal_graph(G):
|
| 375 |
+
"""Return a copy of G completed to a chordal graph
|
| 376 |
+
|
| 377 |
+
Adds edges to a copy of G to create a chordal graph. A graph G=(V,E) is
|
| 378 |
+
called chordal if for each cycle with length bigger than 3, there exist
|
| 379 |
+
two non-adjacent nodes connected by an edge (called a chord).
|
| 380 |
+
|
| 381 |
+
Parameters
|
| 382 |
+
----------
|
| 383 |
+
G : NetworkX graph
|
| 384 |
+
Undirected graph
|
| 385 |
+
|
| 386 |
+
Returns
|
| 387 |
+
-------
|
| 388 |
+
H : NetworkX graph
|
| 389 |
+
The chordal enhancement of G
|
| 390 |
+
alpha : Dictionary
|
| 391 |
+
The elimination ordering of nodes of G
|
| 392 |
+
|
| 393 |
+
Notes
|
| 394 |
+
-----
|
| 395 |
+
There are different approaches to calculate the chordal
|
| 396 |
+
enhancement of a graph. The algorithm used here is called
|
| 397 |
+
MCS-M and gives at least minimal (local) triangulation of graph. Note
|
| 398 |
+
that this triangulation is not necessarily a global minimum.
|
| 399 |
+
|
| 400 |
+
https://en.wikipedia.org/wiki/Chordal_graph
|
| 401 |
+
|
| 402 |
+
References
|
| 403 |
+
----------
|
| 404 |
+
.. [1] Berry, Anne & Blair, Jean & Heggernes, Pinar & Peyton, Barry. (2004)
|
| 405 |
+
Maximum Cardinality Search for Computing Minimal Triangulations of
|
| 406 |
+
Graphs. Algorithmica. 39. 287-298. 10.1007/s00453-004-1084-3.
|
| 407 |
+
|
| 408 |
+
Examples
|
| 409 |
+
--------
|
| 410 |
+
>>> from networkx.algorithms.chordal import complete_to_chordal_graph
|
| 411 |
+
>>> G = nx.wheel_graph(10)
|
| 412 |
+
>>> H, alpha = complete_to_chordal_graph(G)
|
| 413 |
+
"""
|
| 414 |
+
H = G.copy()
|
| 415 |
+
alpha = {node: 0 for node in H}
|
| 416 |
+
if nx.is_chordal(H):
|
| 417 |
+
return H, alpha
|
| 418 |
+
chords = set()
|
| 419 |
+
weight = {node: 0 for node in H.nodes()}
|
| 420 |
+
unnumbered_nodes = list(H.nodes())
|
| 421 |
+
for i in range(len(H.nodes()), 0, -1):
|
| 422 |
+
# get the node in unnumbered_nodes with the maximum weight
|
| 423 |
+
z = max(unnumbered_nodes, key=lambda node: weight[node])
|
| 424 |
+
unnumbered_nodes.remove(z)
|
| 425 |
+
alpha[z] = i
|
| 426 |
+
update_nodes = []
|
| 427 |
+
for y in unnumbered_nodes:
|
| 428 |
+
if G.has_edge(y, z):
|
| 429 |
+
update_nodes.append(y)
|
| 430 |
+
else:
|
| 431 |
+
# y_weight will be bigger than node weights between y and z
|
| 432 |
+
y_weight = weight[y]
|
| 433 |
+
lower_nodes = [
|
| 434 |
+
node for node in unnumbered_nodes if weight[node] < y_weight
|
| 435 |
+
]
|
| 436 |
+
if nx.has_path(H.subgraph(lower_nodes + [z, y]), y, z):
|
| 437 |
+
update_nodes.append(y)
|
| 438 |
+
chords.add((z, y))
|
| 439 |
+
# during calculation of paths the weights should not be updated
|
| 440 |
+
for node in update_nodes:
|
| 441 |
+
weight[node] += 1
|
| 442 |
+
H.add_edges_from(chords)
|
| 443 |
+
return H, alpha
|
llava_next/lib/python3.10/site-packages/networkx/algorithms/graphical.py
ADDED
|
@@ -0,0 +1,483 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Test sequences for graphiness."""
|
| 2 |
+
|
| 3 |
+
import heapq
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
|
| 7 |
+
__all__ = [
|
| 8 |
+
"is_graphical",
|
| 9 |
+
"is_multigraphical",
|
| 10 |
+
"is_pseudographical",
|
| 11 |
+
"is_digraphical",
|
| 12 |
+
"is_valid_degree_sequence_erdos_gallai",
|
| 13 |
+
"is_valid_degree_sequence_havel_hakimi",
|
| 14 |
+
]
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
@nx._dispatchable(graphs=None)
|
| 18 |
+
def is_graphical(sequence, method="eg"):
|
| 19 |
+
"""Returns True if sequence is a valid degree sequence.
|
| 20 |
+
|
| 21 |
+
A degree sequence is valid if some graph can realize it.
|
| 22 |
+
|
| 23 |
+
Parameters
|
| 24 |
+
----------
|
| 25 |
+
sequence : list or iterable container
|
| 26 |
+
A sequence of integer node degrees
|
| 27 |
+
|
| 28 |
+
method : "eg" | "hh" (default: 'eg')
|
| 29 |
+
The method used to validate the degree sequence.
|
| 30 |
+
"eg" corresponds to the Erdős-Gallai algorithm
|
| 31 |
+
[EG1960]_, [choudum1986]_, and
|
| 32 |
+
"hh" to the Havel-Hakimi algorithm
|
| 33 |
+
[havel1955]_, [hakimi1962]_, [CL1996]_.
|
| 34 |
+
|
| 35 |
+
Returns
|
| 36 |
+
-------
|
| 37 |
+
valid : bool
|
| 38 |
+
True if the sequence is a valid degree sequence and False if not.
|
| 39 |
+
|
| 40 |
+
Examples
|
| 41 |
+
--------
|
| 42 |
+
>>> G = nx.path_graph(4)
|
| 43 |
+
>>> sequence = (d for n, d in G.degree())
|
| 44 |
+
>>> nx.is_graphical(sequence)
|
| 45 |
+
True
|
| 46 |
+
|
| 47 |
+
To test a non-graphical sequence:
|
| 48 |
+
>>> sequence_list = [d for n, d in G.degree()]
|
| 49 |
+
>>> sequence_list[-1] += 1
|
| 50 |
+
>>> nx.is_graphical(sequence_list)
|
| 51 |
+
False
|
| 52 |
+
|
| 53 |
+
References
|
| 54 |
+
----------
|
| 55 |
+
.. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960.
|
| 56 |
+
.. [choudum1986] S.A. Choudum. "A simple proof of the Erdős-Gallai theorem on
|
| 57 |
+
graph sequences." Bulletin of the Australian Mathematical Society, 33,
|
| 58 |
+
pp 67-70, 1986. https://doi.org/10.1017/S0004972700002872
|
| 59 |
+
.. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs"
|
| 60 |
+
Casopis Pest. Mat. 80, 477-480, 1955.
|
| 61 |
+
.. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as
|
| 62 |
+
Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962.
|
| 63 |
+
.. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs",
|
| 64 |
+
Chapman and Hall/CRC, 1996.
|
| 65 |
+
"""
|
| 66 |
+
if method == "eg":
|
| 67 |
+
valid = is_valid_degree_sequence_erdos_gallai(list(sequence))
|
| 68 |
+
elif method == "hh":
|
| 69 |
+
valid = is_valid_degree_sequence_havel_hakimi(list(sequence))
|
| 70 |
+
else:
|
| 71 |
+
msg = "`method` must be 'eg' or 'hh'"
|
| 72 |
+
raise nx.NetworkXException(msg)
|
| 73 |
+
return valid
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def _basic_graphical_tests(deg_sequence):
|
| 77 |
+
# Sort and perform some simple tests on the sequence
|
| 78 |
+
deg_sequence = nx.utils.make_list_of_ints(deg_sequence)
|
| 79 |
+
p = len(deg_sequence)
|
| 80 |
+
num_degs = [0] * p
|
| 81 |
+
dmax, dmin, dsum, n = 0, p, 0, 0
|
| 82 |
+
for d in deg_sequence:
|
| 83 |
+
# Reject if degree is negative or larger than the sequence length
|
| 84 |
+
if d < 0 or d >= p:
|
| 85 |
+
raise nx.NetworkXUnfeasible
|
| 86 |
+
# Process only the non-zero integers
|
| 87 |
+
elif d > 0:
|
| 88 |
+
dmax, dmin, dsum, n = max(dmax, d), min(dmin, d), dsum + d, n + 1
|
| 89 |
+
num_degs[d] += 1
|
| 90 |
+
# Reject sequence if it has odd sum or is oversaturated
|
| 91 |
+
if dsum % 2 or dsum > n * (n - 1):
|
| 92 |
+
raise nx.NetworkXUnfeasible
|
| 93 |
+
return dmax, dmin, dsum, n, num_degs
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
@nx._dispatchable(graphs=None)
|
| 97 |
+
def is_valid_degree_sequence_havel_hakimi(deg_sequence):
|
| 98 |
+
r"""Returns True if deg_sequence can be realized by a simple graph.
|
| 99 |
+
|
| 100 |
+
The validation proceeds using the Havel-Hakimi theorem
|
| 101 |
+
[havel1955]_, [hakimi1962]_, [CL1996]_.
|
| 102 |
+
Worst-case run time is $O(s)$ where $s$ is the sum of the sequence.
|
| 103 |
+
|
| 104 |
+
Parameters
|
| 105 |
+
----------
|
| 106 |
+
deg_sequence : list
|
| 107 |
+
A list of integers where each element specifies the degree of a node
|
| 108 |
+
in a graph.
|
| 109 |
+
|
| 110 |
+
Returns
|
| 111 |
+
-------
|
| 112 |
+
valid : bool
|
| 113 |
+
True if deg_sequence is graphical and False if not.
|
| 114 |
+
|
| 115 |
+
Examples
|
| 116 |
+
--------
|
| 117 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
| 118 |
+
>>> sequence = (d for _, d in G.degree())
|
| 119 |
+
>>> nx.is_valid_degree_sequence_havel_hakimi(sequence)
|
| 120 |
+
True
|
| 121 |
+
|
| 122 |
+
To test a non-valid sequence:
|
| 123 |
+
>>> sequence_list = [d for _, d in G.degree()]
|
| 124 |
+
>>> sequence_list[-1] += 1
|
| 125 |
+
>>> nx.is_valid_degree_sequence_havel_hakimi(sequence_list)
|
| 126 |
+
False
|
| 127 |
+
|
| 128 |
+
Notes
|
| 129 |
+
-----
|
| 130 |
+
The ZZ condition says that for the sequence d if
|
| 131 |
+
|
| 132 |
+
.. math::
|
| 133 |
+
|d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)}
|
| 134 |
+
|
| 135 |
+
then d is graphical. This was shown in Theorem 6 in [1]_.
|
| 136 |
+
|
| 137 |
+
References
|
| 138 |
+
----------
|
| 139 |
+
.. [1] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory
|
| 140 |
+
of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992).
|
| 141 |
+
.. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs"
|
| 142 |
+
Casopis Pest. Mat. 80, 477-480, 1955.
|
| 143 |
+
.. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as
|
| 144 |
+
Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962.
|
| 145 |
+
.. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs",
|
| 146 |
+
Chapman and Hall/CRC, 1996.
|
| 147 |
+
"""
|
| 148 |
+
try:
|
| 149 |
+
dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence)
|
| 150 |
+
except nx.NetworkXUnfeasible:
|
| 151 |
+
return False
|
| 152 |
+
# Accept if sequence has no non-zero degrees or passes the ZZ condition
|
| 153 |
+
if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1):
|
| 154 |
+
return True
|
| 155 |
+
|
| 156 |
+
modstubs = [0] * (dmax + 1)
|
| 157 |
+
# Successively reduce degree sequence by removing the maximum degree
|
| 158 |
+
while n > 0:
|
| 159 |
+
# Retrieve the maximum degree in the sequence
|
| 160 |
+
while num_degs[dmax] == 0:
|
| 161 |
+
dmax -= 1
|
| 162 |
+
# If there are not enough stubs to connect to, then the sequence is
|
| 163 |
+
# not graphical
|
| 164 |
+
if dmax > n - 1:
|
| 165 |
+
return False
|
| 166 |
+
|
| 167 |
+
# Remove largest stub in list
|
| 168 |
+
num_degs[dmax], n = num_degs[dmax] - 1, n - 1
|
| 169 |
+
# Reduce the next dmax largest stubs
|
| 170 |
+
mslen = 0
|
| 171 |
+
k = dmax
|
| 172 |
+
for i in range(dmax):
|
| 173 |
+
while num_degs[k] == 0:
|
| 174 |
+
k -= 1
|
| 175 |
+
num_degs[k], n = num_degs[k] - 1, n - 1
|
| 176 |
+
if k > 1:
|
| 177 |
+
modstubs[mslen] = k - 1
|
| 178 |
+
mslen += 1
|
| 179 |
+
# Add back to the list any non-zero stubs that were removed
|
| 180 |
+
for i in range(mslen):
|
| 181 |
+
stub = modstubs[i]
|
| 182 |
+
num_degs[stub], n = num_degs[stub] + 1, n + 1
|
| 183 |
+
return True
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
@nx._dispatchable(graphs=None)
|
| 187 |
+
def is_valid_degree_sequence_erdos_gallai(deg_sequence):
|
| 188 |
+
r"""Returns True if deg_sequence can be realized by a simple graph.
|
| 189 |
+
|
| 190 |
+
The validation is done using the Erdős-Gallai theorem [EG1960]_.
|
| 191 |
+
|
| 192 |
+
Parameters
|
| 193 |
+
----------
|
| 194 |
+
deg_sequence : list
|
| 195 |
+
A list of integers
|
| 196 |
+
|
| 197 |
+
Returns
|
| 198 |
+
-------
|
| 199 |
+
valid : bool
|
| 200 |
+
True if deg_sequence is graphical and False if not.
|
| 201 |
+
|
| 202 |
+
Examples
|
| 203 |
+
--------
|
| 204 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
| 205 |
+
>>> sequence = (d for _, d in G.degree())
|
| 206 |
+
>>> nx.is_valid_degree_sequence_erdos_gallai(sequence)
|
| 207 |
+
True
|
| 208 |
+
|
| 209 |
+
To test a non-valid sequence:
|
| 210 |
+
>>> sequence_list = [d for _, d in G.degree()]
|
| 211 |
+
>>> sequence_list[-1] += 1
|
| 212 |
+
>>> nx.is_valid_degree_sequence_erdos_gallai(sequence_list)
|
| 213 |
+
False
|
| 214 |
+
|
| 215 |
+
Notes
|
| 216 |
+
-----
|
| 217 |
+
|
| 218 |
+
This implementation uses an equivalent form of the Erdős-Gallai criterion.
|
| 219 |
+
Worst-case run time is $O(n)$ where $n$ is the length of the sequence.
|
| 220 |
+
|
| 221 |
+
Specifically, a sequence d is graphical if and only if the
|
| 222 |
+
sum of the sequence is even and for all strong indices k in the sequence,
|
| 223 |
+
|
| 224 |
+
.. math::
|
| 225 |
+
|
| 226 |
+
\sum_{i=1}^{k} d_i \leq k(k-1) + \sum_{j=k+1}^{n} \min(d_i,k)
|
| 227 |
+
= k(n-1) - ( k \sum_{j=0}^{k-1} n_j - \sum_{j=0}^{k-1} j n_j )
|
| 228 |
+
|
| 229 |
+
A strong index k is any index where d_k >= k and the value n_j is the
|
| 230 |
+
number of occurrences of j in d. The maximal strong index is called the
|
| 231 |
+
Durfee index.
|
| 232 |
+
|
| 233 |
+
This particular rearrangement comes from the proof of Theorem 3 in [2]_.
|
| 234 |
+
|
| 235 |
+
The ZZ condition says that for the sequence d if
|
| 236 |
+
|
| 237 |
+
.. math::
|
| 238 |
+
|d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)}
|
| 239 |
+
|
| 240 |
+
then d is graphical. This was shown in Theorem 6 in [2]_.
|
| 241 |
+
|
| 242 |
+
References
|
| 243 |
+
----------
|
| 244 |
+
.. [1] A. Tripathi and S. Vijay. "A note on a theorem of Erdős & Gallai",
|
| 245 |
+
Discrete Mathematics, 265, pp. 417-420 (2003).
|
| 246 |
+
.. [2] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory
|
| 247 |
+
of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992).
|
| 248 |
+
.. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960.
|
| 249 |
+
"""
|
| 250 |
+
try:
|
| 251 |
+
dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence)
|
| 252 |
+
except nx.NetworkXUnfeasible:
|
| 253 |
+
return False
|
| 254 |
+
# Accept if sequence has no non-zero degrees or passes the ZZ condition
|
| 255 |
+
if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1):
|
| 256 |
+
return True
|
| 257 |
+
|
| 258 |
+
# Perform the EG checks using the reformulation of Zverovich and Zverovich
|
| 259 |
+
k, sum_deg, sum_nj, sum_jnj = 0, 0, 0, 0
|
| 260 |
+
for dk in range(dmax, dmin - 1, -1):
|
| 261 |
+
if dk < k + 1: # Check if already past Durfee index
|
| 262 |
+
return True
|
| 263 |
+
if num_degs[dk] > 0:
|
| 264 |
+
run_size = num_degs[dk] # Process a run of identical-valued degrees
|
| 265 |
+
if dk < k + run_size: # Check if end of run is past Durfee index
|
| 266 |
+
run_size = dk - k # Adjust back to Durfee index
|
| 267 |
+
sum_deg += run_size * dk
|
| 268 |
+
for v in range(run_size):
|
| 269 |
+
sum_nj += num_degs[k + v]
|
| 270 |
+
sum_jnj += (k + v) * num_degs[k + v]
|
| 271 |
+
k += run_size
|
| 272 |
+
if sum_deg > k * (n - 1) - k * sum_nj + sum_jnj:
|
| 273 |
+
return False
|
| 274 |
+
return True
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
@nx._dispatchable(graphs=None)
|
| 278 |
+
def is_multigraphical(sequence):
|
| 279 |
+
"""Returns True if some multigraph can realize the sequence.
|
| 280 |
+
|
| 281 |
+
Parameters
|
| 282 |
+
----------
|
| 283 |
+
sequence : list
|
| 284 |
+
A list of integers
|
| 285 |
+
|
| 286 |
+
Returns
|
| 287 |
+
-------
|
| 288 |
+
valid : bool
|
| 289 |
+
True if deg_sequence is a multigraphic degree sequence and False if not.
|
| 290 |
+
|
| 291 |
+
Examples
|
| 292 |
+
--------
|
| 293 |
+
>>> G = nx.MultiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
| 294 |
+
>>> sequence = (d for _, d in G.degree())
|
| 295 |
+
>>> nx.is_multigraphical(sequence)
|
| 296 |
+
True
|
| 297 |
+
|
| 298 |
+
To test a non-multigraphical sequence:
|
| 299 |
+
>>> sequence_list = [d for _, d in G.degree()]
|
| 300 |
+
>>> sequence_list[-1] += 1
|
| 301 |
+
>>> nx.is_multigraphical(sequence_list)
|
| 302 |
+
False
|
| 303 |
+
|
| 304 |
+
Notes
|
| 305 |
+
-----
|
| 306 |
+
The worst-case run time is $O(n)$ where $n$ is the length of the sequence.
|
| 307 |
+
|
| 308 |
+
References
|
| 309 |
+
----------
|
| 310 |
+
.. [1] S. L. Hakimi. "On the realizability of a set of integers as
|
| 311 |
+
degrees of the vertices of a linear graph", J. SIAM, 10, pp. 496-506
|
| 312 |
+
(1962).
|
| 313 |
+
"""
|
| 314 |
+
try:
|
| 315 |
+
deg_sequence = nx.utils.make_list_of_ints(sequence)
|
| 316 |
+
except nx.NetworkXError:
|
| 317 |
+
return False
|
| 318 |
+
dsum, dmax = 0, 0
|
| 319 |
+
for d in deg_sequence:
|
| 320 |
+
if d < 0:
|
| 321 |
+
return False
|
| 322 |
+
dsum, dmax = dsum + d, max(dmax, d)
|
| 323 |
+
if dsum % 2 or dsum < 2 * dmax:
|
| 324 |
+
return False
|
| 325 |
+
return True
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
@nx._dispatchable(graphs=None)
|
| 329 |
+
def is_pseudographical(sequence):
|
| 330 |
+
"""Returns True if some pseudograph can realize the sequence.
|
| 331 |
+
|
| 332 |
+
Every nonnegative integer sequence with an even sum is pseudographical
|
| 333 |
+
(see [1]_).
|
| 334 |
+
|
| 335 |
+
Parameters
|
| 336 |
+
----------
|
| 337 |
+
sequence : list or iterable container
|
| 338 |
+
A sequence of integer node degrees
|
| 339 |
+
|
| 340 |
+
Returns
|
| 341 |
+
-------
|
| 342 |
+
valid : bool
|
| 343 |
+
True if the sequence is a pseudographic degree sequence and False if not.
|
| 344 |
+
|
| 345 |
+
Examples
|
| 346 |
+
--------
|
| 347 |
+
>>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
| 348 |
+
>>> sequence = (d for _, d in G.degree())
|
| 349 |
+
>>> nx.is_pseudographical(sequence)
|
| 350 |
+
True
|
| 351 |
+
|
| 352 |
+
To test a non-pseudographical sequence:
|
| 353 |
+
>>> sequence_list = [d for _, d in G.degree()]
|
| 354 |
+
>>> sequence_list[-1] += 1
|
| 355 |
+
>>> nx.is_pseudographical(sequence_list)
|
| 356 |
+
False
|
| 357 |
+
|
| 358 |
+
Notes
|
| 359 |
+
-----
|
| 360 |
+
The worst-case run time is $O(n)$ where n is the length of the sequence.
|
| 361 |
+
|
| 362 |
+
References
|
| 363 |
+
----------
|
| 364 |
+
.. [1] F. Boesch and F. Harary. "Line removal algorithms for graphs
|
| 365 |
+
and their degree lists", IEEE Trans. Circuits and Systems, CAS-23(12),
|
| 366 |
+
pp. 778-782 (1976).
|
| 367 |
+
"""
|
| 368 |
+
try:
|
| 369 |
+
deg_sequence = nx.utils.make_list_of_ints(sequence)
|
| 370 |
+
except nx.NetworkXError:
|
| 371 |
+
return False
|
| 372 |
+
return sum(deg_sequence) % 2 == 0 and min(deg_sequence) >= 0
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
@nx._dispatchable(graphs=None)
|
| 376 |
+
def is_digraphical(in_sequence, out_sequence):
|
| 377 |
+
r"""Returns True if some directed graph can realize the in- and out-degree
|
| 378 |
+
sequences.
|
| 379 |
+
|
| 380 |
+
Parameters
|
| 381 |
+
----------
|
| 382 |
+
in_sequence : list or iterable container
|
| 383 |
+
A sequence of integer node in-degrees
|
| 384 |
+
|
| 385 |
+
out_sequence : list or iterable container
|
| 386 |
+
A sequence of integer node out-degrees
|
| 387 |
+
|
| 388 |
+
Returns
|
| 389 |
+
-------
|
| 390 |
+
valid : bool
|
| 391 |
+
True if in and out-sequences are digraphic False if not.
|
| 392 |
+
|
| 393 |
+
Examples
|
| 394 |
+
--------
|
| 395 |
+
>>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)])
|
| 396 |
+
>>> in_seq = (d for n, d in G.in_degree())
|
| 397 |
+
>>> out_seq = (d for n, d in G.out_degree())
|
| 398 |
+
>>> nx.is_digraphical(in_seq, out_seq)
|
| 399 |
+
True
|
| 400 |
+
|
| 401 |
+
To test a non-digraphical scenario:
|
| 402 |
+
>>> in_seq_list = [d for n, d in G.in_degree()]
|
| 403 |
+
>>> in_seq_list[-1] += 1
|
| 404 |
+
>>> nx.is_digraphical(in_seq_list, out_seq)
|
| 405 |
+
False
|
| 406 |
+
|
| 407 |
+
Notes
|
| 408 |
+
-----
|
| 409 |
+
This algorithm is from Kleitman and Wang [1]_.
|
| 410 |
+
The worst case runtime is $O(s \times \log n)$ where $s$ and $n$ are the
|
| 411 |
+
sum and length of the sequences respectively.
|
| 412 |
+
|
| 413 |
+
References
|
| 414 |
+
----------
|
| 415 |
+
.. [1] D.J. Kleitman and D.L. Wang
|
| 416 |
+
Algorithms for Constructing Graphs and Digraphs with Given Valences
|
| 417 |
+
and Factors, Discrete Mathematics, 6(1), pp. 79-88 (1973)
|
| 418 |
+
"""
|
| 419 |
+
try:
|
| 420 |
+
in_deg_sequence = nx.utils.make_list_of_ints(in_sequence)
|
| 421 |
+
out_deg_sequence = nx.utils.make_list_of_ints(out_sequence)
|
| 422 |
+
except nx.NetworkXError:
|
| 423 |
+
return False
|
| 424 |
+
# Process the sequences and form two heaps to store degree pairs with
|
| 425 |
+
# either zero or non-zero out degrees
|
| 426 |
+
sumin, sumout, nin, nout = 0, 0, len(in_deg_sequence), len(out_deg_sequence)
|
| 427 |
+
maxn = max(nin, nout)
|
| 428 |
+
maxin = 0
|
| 429 |
+
if maxn == 0:
|
| 430 |
+
return True
|
| 431 |
+
stubheap, zeroheap = [], []
|
| 432 |
+
for n in range(maxn):
|
| 433 |
+
in_deg, out_deg = 0, 0
|
| 434 |
+
if n < nout:
|
| 435 |
+
out_deg = out_deg_sequence[n]
|
| 436 |
+
if n < nin:
|
| 437 |
+
in_deg = in_deg_sequence[n]
|
| 438 |
+
if in_deg < 0 or out_deg < 0:
|
| 439 |
+
return False
|
| 440 |
+
sumin, sumout, maxin = sumin + in_deg, sumout + out_deg, max(maxin, in_deg)
|
| 441 |
+
if in_deg > 0:
|
| 442 |
+
stubheap.append((-1 * out_deg, -1 * in_deg))
|
| 443 |
+
elif out_deg > 0:
|
| 444 |
+
zeroheap.append(-1 * out_deg)
|
| 445 |
+
if sumin != sumout:
|
| 446 |
+
return False
|
| 447 |
+
heapq.heapify(stubheap)
|
| 448 |
+
heapq.heapify(zeroheap)
|
| 449 |
+
|
| 450 |
+
modstubs = [(0, 0)] * (maxin + 1)
|
| 451 |
+
# Successively reduce degree sequence by removing the maximum out degree
|
| 452 |
+
while stubheap:
|
| 453 |
+
# Take the first value in the sequence with non-zero in degree
|
| 454 |
+
(freeout, freein) = heapq.heappop(stubheap)
|
| 455 |
+
freein *= -1
|
| 456 |
+
if freein > len(stubheap) + len(zeroheap):
|
| 457 |
+
return False
|
| 458 |
+
|
| 459 |
+
# Attach out stubs to the nodes with the most in stubs
|
| 460 |
+
mslen = 0
|
| 461 |
+
for i in range(freein):
|
| 462 |
+
if zeroheap and (not stubheap or stubheap[0][0] > zeroheap[0]):
|
| 463 |
+
stubout = heapq.heappop(zeroheap)
|
| 464 |
+
stubin = 0
|
| 465 |
+
else:
|
| 466 |
+
(stubout, stubin) = heapq.heappop(stubheap)
|
| 467 |
+
if stubout == 0:
|
| 468 |
+
return False
|
| 469 |
+
# Check if target is now totally connected
|
| 470 |
+
if stubout + 1 < 0 or stubin < 0:
|
| 471 |
+
modstubs[mslen] = (stubout + 1, stubin)
|
| 472 |
+
mslen += 1
|
| 473 |
+
|
| 474 |
+
# Add back the nodes to the heap that still have available stubs
|
| 475 |
+
for i in range(mslen):
|
| 476 |
+
stub = modstubs[i]
|
| 477 |
+
if stub[1] < 0:
|
| 478 |
+
heapq.heappush(stubheap, stub)
|
| 479 |
+
else:
|
| 480 |
+
heapq.heappush(zeroheap, stub[0])
|
| 481 |
+
if freeout < 0:
|
| 482 |
+
heapq.heappush(zeroheap, freeout)
|
| 483 |
+
return True
|
llava_next/lib/python3.10/site-packages/networkx/algorithms/non_randomness.py
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""Computation of graph non-randomness"""
|
| 2 |
+
|
| 3 |
+
import math
|
| 4 |
+
|
| 5 |
+
import networkx as nx
|
| 6 |
+
from networkx.utils import not_implemented_for
|
| 7 |
+
|
| 8 |
+
__all__ = ["non_randomness"]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@not_implemented_for("directed")
|
| 12 |
+
@not_implemented_for("multigraph")
|
| 13 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 14 |
+
def non_randomness(G, k=None, weight="weight"):
|
| 15 |
+
"""Compute the non-randomness of graph G.
|
| 16 |
+
|
| 17 |
+
The first returned value nr is the sum of non-randomness values of all
|
| 18 |
+
edges within the graph (where the non-randomness of an edge tends to be
|
| 19 |
+
small when the two nodes linked by that edge are from two different
|
| 20 |
+
communities).
|
| 21 |
+
|
| 22 |
+
The second computed value nr_rd is a relative measure that indicates
|
| 23 |
+
to what extent graph G is different from random graphs in terms
|
| 24 |
+
of probability. When it is close to 0, the graph tends to be more
|
| 25 |
+
likely generated by an Erdos Renyi model.
|
| 26 |
+
|
| 27 |
+
Parameters
|
| 28 |
+
----------
|
| 29 |
+
G : NetworkX graph
|
| 30 |
+
Graph must be symmetric, connected, and without self-loops.
|
| 31 |
+
|
| 32 |
+
k : int
|
| 33 |
+
The number of communities in G.
|
| 34 |
+
If k is not set, the function will use a default community
|
| 35 |
+
detection algorithm to set it.
|
| 36 |
+
|
| 37 |
+
weight : string or None, optional (default=None)
|
| 38 |
+
The name of an edge attribute that holds the numerical value used
|
| 39 |
+
as a weight. If None, then each edge has weight 1, i.e., the graph is
|
| 40 |
+
binary.
|
| 41 |
+
|
| 42 |
+
Returns
|
| 43 |
+
-------
|
| 44 |
+
non-randomness : (float, float) tuple
|
| 45 |
+
Non-randomness, Relative non-randomness w.r.t.
|
| 46 |
+
Erdos Renyi random graphs.
|
| 47 |
+
|
| 48 |
+
Raises
|
| 49 |
+
------
|
| 50 |
+
NetworkXException
|
| 51 |
+
if the input graph is not connected.
|
| 52 |
+
NetworkXError
|
| 53 |
+
if the input graph contains self-loops or if graph has no edges.
|
| 54 |
+
|
| 55 |
+
Examples
|
| 56 |
+
--------
|
| 57 |
+
>>> G = nx.karate_club_graph()
|
| 58 |
+
>>> nr, nr_rd = nx.non_randomness(G, 2)
|
| 59 |
+
>>> nr, nr_rd = nx.non_randomness(G, 2, "weight")
|
| 60 |
+
|
| 61 |
+
Notes
|
| 62 |
+
-----
|
| 63 |
+
This computes Eq. (4.4) and (4.5) in Ref. [1]_.
|
| 64 |
+
|
| 65 |
+
If a weight field is passed, this algorithm will use the eigenvalues
|
| 66 |
+
of the weighted adjacency matrix to compute Eq. (4.4) and (4.5).
|
| 67 |
+
|
| 68 |
+
References
|
| 69 |
+
----------
|
| 70 |
+
.. [1] Xiaowei Ying and Xintao Wu,
|
| 71 |
+
On Randomness Measures for Social Networks,
|
| 72 |
+
SIAM International Conference on Data Mining. 2009
|
| 73 |
+
"""
|
| 74 |
+
import numpy as np
|
| 75 |
+
|
| 76 |
+
# corner case: graph has no edges
|
| 77 |
+
if nx.is_empty(G):
|
| 78 |
+
raise nx.NetworkXError("non_randomness not applicable to empty graphs")
|
| 79 |
+
if not nx.is_connected(G):
|
| 80 |
+
raise nx.NetworkXException("Non connected graph.")
|
| 81 |
+
if len(list(nx.selfloop_edges(G))) > 0:
|
| 82 |
+
raise nx.NetworkXError("Graph must not contain self-loops")
|
| 83 |
+
|
| 84 |
+
if k is None:
|
| 85 |
+
k = len(tuple(nx.community.label_propagation_communities(G)))
|
| 86 |
+
|
| 87 |
+
# eq. 4.4
|
| 88 |
+
eigenvalues = np.linalg.eigvals(nx.to_numpy_array(G, weight=weight))
|
| 89 |
+
nr = float(np.real(np.sum(eigenvalues[:k])))
|
| 90 |
+
|
| 91 |
+
n = G.number_of_nodes()
|
| 92 |
+
m = G.number_of_edges()
|
| 93 |
+
p = (2 * k * m) / (n * (n - k))
|
| 94 |
+
|
| 95 |
+
# eq. 4.5
|
| 96 |
+
nr_rd = (nr - ((n - 2 * k) * p + k)) / math.sqrt(2 * k * p * (1 - p))
|
| 97 |
+
|
| 98 |
+
return nr, nr_rd
|
llava_next/lib/python3.10/site-packages/networkx/algorithms/reciprocity.py
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Algorithms to calculate reciprocity in a directed graph."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx import NetworkXError
|
| 5 |
+
|
| 6 |
+
from ..utils import not_implemented_for
|
| 7 |
+
|
| 8 |
+
__all__ = ["reciprocity", "overall_reciprocity"]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@not_implemented_for("undirected", "multigraph")
|
| 12 |
+
@nx._dispatchable
|
| 13 |
+
def reciprocity(G, nodes=None):
|
| 14 |
+
r"""Compute the reciprocity in a directed graph.
|
| 15 |
+
|
| 16 |
+
The reciprocity of a directed graph is defined as the ratio
|
| 17 |
+
of the number of edges pointing in both directions to the total
|
| 18 |
+
number of edges in the graph.
|
| 19 |
+
Formally, $r = |{(u,v) \in G|(v,u) \in G}| / |{(u,v) \in G}|$.
|
| 20 |
+
|
| 21 |
+
The reciprocity of a single node u is defined similarly,
|
| 22 |
+
it is the ratio of the number of edges in both directions to
|
| 23 |
+
the total number of edges attached to node u.
|
| 24 |
+
|
| 25 |
+
Parameters
|
| 26 |
+
----------
|
| 27 |
+
G : graph
|
| 28 |
+
A networkx directed graph
|
| 29 |
+
nodes : container of nodes, optional (default=whole graph)
|
| 30 |
+
Compute reciprocity for nodes in this container.
|
| 31 |
+
|
| 32 |
+
Returns
|
| 33 |
+
-------
|
| 34 |
+
out : dictionary
|
| 35 |
+
Reciprocity keyed by node label.
|
| 36 |
+
|
| 37 |
+
Notes
|
| 38 |
+
-----
|
| 39 |
+
The reciprocity is not defined for isolated nodes.
|
| 40 |
+
In such cases this function will return None.
|
| 41 |
+
|
| 42 |
+
"""
|
| 43 |
+
# If `nodes` is not specified, calculate the reciprocity of the graph.
|
| 44 |
+
if nodes is None:
|
| 45 |
+
return overall_reciprocity(G)
|
| 46 |
+
|
| 47 |
+
# If `nodes` represents a single node in the graph, return only its
|
| 48 |
+
# reciprocity.
|
| 49 |
+
if nodes in G:
|
| 50 |
+
reciprocity = next(_reciprocity_iter(G, nodes))[1]
|
| 51 |
+
if reciprocity is None:
|
| 52 |
+
raise NetworkXError("Not defined for isolated nodes.")
|
| 53 |
+
else:
|
| 54 |
+
return reciprocity
|
| 55 |
+
|
| 56 |
+
# Otherwise, `nodes` represents an iterable of nodes, so return a
|
| 57 |
+
# dictionary mapping node to its reciprocity.
|
| 58 |
+
return dict(_reciprocity_iter(G, nodes))
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def _reciprocity_iter(G, nodes):
|
| 62 |
+
"""Return an iterator of (node, reciprocity)."""
|
| 63 |
+
n = G.nbunch_iter(nodes)
|
| 64 |
+
for node in n:
|
| 65 |
+
pred = set(G.predecessors(node))
|
| 66 |
+
succ = set(G.successors(node))
|
| 67 |
+
overlap = pred & succ
|
| 68 |
+
n_total = len(pred) + len(succ)
|
| 69 |
+
|
| 70 |
+
# Reciprocity is not defined for isolated nodes.
|
| 71 |
+
# Return None.
|
| 72 |
+
if n_total == 0:
|
| 73 |
+
yield (node, None)
|
| 74 |
+
else:
|
| 75 |
+
reciprocity = 2 * len(overlap) / n_total
|
| 76 |
+
yield (node, reciprocity)
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
@not_implemented_for("undirected", "multigraph")
|
| 80 |
+
@nx._dispatchable
|
| 81 |
+
def overall_reciprocity(G):
|
| 82 |
+
"""Compute the reciprocity for the whole graph.
|
| 83 |
+
|
| 84 |
+
See the doc of reciprocity for the definition.
|
| 85 |
+
|
| 86 |
+
Parameters
|
| 87 |
+
----------
|
| 88 |
+
G : graph
|
| 89 |
+
A networkx graph
|
| 90 |
+
|
| 91 |
+
"""
|
| 92 |
+
n_all_edge = G.number_of_edges()
|
| 93 |
+
n_overlap_edge = (n_all_edge - G.to_undirected().number_of_edges()) * 2
|
| 94 |
+
|
| 95 |
+
if n_all_edge == 0:
|
| 96 |
+
raise NetworkXError("Not defined for empty graphs")
|
| 97 |
+
|
| 98 |
+
return n_overlap_edge / n_all_edge
|
llava_next/lib/python3.10/site-packages/networkx/algorithms/smetric.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
|
| 3 |
+
__all__ = ["s_metric"]
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
@nx._dispatchable
|
| 7 |
+
def s_metric(G):
|
| 8 |
+
"""Returns the s-metric [1]_ of graph.
|
| 9 |
+
|
| 10 |
+
The s-metric is defined as the sum of the products ``deg(u) * deg(v)``
|
| 11 |
+
for every edge ``(u, v)`` in `G`.
|
| 12 |
+
|
| 13 |
+
Parameters
|
| 14 |
+
----------
|
| 15 |
+
G : graph
|
| 16 |
+
The graph used to compute the s-metric.
|
| 17 |
+
|
| 18 |
+
Returns
|
| 19 |
+
-------
|
| 20 |
+
s : float
|
| 21 |
+
The s-metric of the graph.
|
| 22 |
+
|
| 23 |
+
References
|
| 24 |
+
----------
|
| 25 |
+
.. [1] Lun Li, David Alderson, John C. Doyle, and Walter Willinger,
|
| 26 |
+
Towards a Theory of Scale-Free Graphs:
|
| 27 |
+
Definition, Properties, and Implications (Extended Version), 2005.
|
| 28 |
+
https://arxiv.org/abs/cond-mat/0501169
|
| 29 |
+
"""
|
| 30 |
+
return float(sum(G.degree(u) * G.degree(v) for (u, v) in G.edges()))
|
llava_next/lib/python3.10/site-packages/networkx/algorithms/structuralholes.py
ADDED
|
@@ -0,0 +1,283 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions for computing measures of structural holes."""
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
|
| 5 |
+
__all__ = ["constraint", "local_constraint", "effective_size"]
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 9 |
+
def mutual_weight(G, u, v, weight=None):
|
| 10 |
+
"""Returns the sum of the weights of the edge from `u` to `v` and
|
| 11 |
+
the edge from `v` to `u` in `G`.
|
| 12 |
+
|
| 13 |
+
`weight` is the edge data key that represents the edge weight. If
|
| 14 |
+
the specified key is `None` or is not in the edge data for an edge,
|
| 15 |
+
that edge is assumed to have weight 1.
|
| 16 |
+
|
| 17 |
+
Pre-conditions: `u` and `v` must both be in `G`.
|
| 18 |
+
|
| 19 |
+
"""
|
| 20 |
+
try:
|
| 21 |
+
a_uv = G[u][v].get(weight, 1)
|
| 22 |
+
except KeyError:
|
| 23 |
+
a_uv = 0
|
| 24 |
+
try:
|
| 25 |
+
a_vu = G[v][u].get(weight, 1)
|
| 26 |
+
except KeyError:
|
| 27 |
+
a_vu = 0
|
| 28 |
+
return a_uv + a_vu
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 32 |
+
def normalized_mutual_weight(G, u, v, norm=sum, weight=None):
|
| 33 |
+
"""Returns normalized mutual weight of the edges from `u` to `v`
|
| 34 |
+
with respect to the mutual weights of the neighbors of `u` in `G`.
|
| 35 |
+
|
| 36 |
+
`norm` specifies how the normalization factor is computed. It must
|
| 37 |
+
be a function that takes a single argument and returns a number.
|
| 38 |
+
The argument will be an iterable of mutual weights
|
| 39 |
+
of pairs ``(u, w)``, where ``w`` ranges over each (in- and
|
| 40 |
+
out-)neighbor of ``u``. Commons values for `normalization` are
|
| 41 |
+
``sum`` and ``max``.
|
| 42 |
+
|
| 43 |
+
`weight` can be ``None`` or a string, if None, all edge weights
|
| 44 |
+
are considered equal. Otherwise holds the name of the edge
|
| 45 |
+
attribute used as weight.
|
| 46 |
+
|
| 47 |
+
"""
|
| 48 |
+
scale = norm(mutual_weight(G, u, w, weight) for w in set(nx.all_neighbors(G, u)))
|
| 49 |
+
return 0 if scale == 0 else mutual_weight(G, u, v, weight) / scale
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 53 |
+
def effective_size(G, nodes=None, weight=None):
|
| 54 |
+
r"""Returns the effective size of all nodes in the graph ``G``.
|
| 55 |
+
|
| 56 |
+
The *effective size* of a node's ego network is based on the concept
|
| 57 |
+
of redundancy. A person's ego network has redundancy to the extent
|
| 58 |
+
that her contacts are connected to each other as well. The
|
| 59 |
+
nonredundant part of a person's relationships is the effective
|
| 60 |
+
size of her ego network [1]_. Formally, the effective size of a
|
| 61 |
+
node $u$, denoted $e(u)$, is defined by
|
| 62 |
+
|
| 63 |
+
.. math::
|
| 64 |
+
|
| 65 |
+
e(u) = \sum_{v \in N(u) \setminus \{u\}}
|
| 66 |
+
\left(1 - \sum_{w \in N(v)} p_{uw} m_{vw}\right)
|
| 67 |
+
|
| 68 |
+
where $N(u)$ is the set of neighbors of $u$ and $p_{uw}$ is the
|
| 69 |
+
normalized mutual weight of the (directed or undirected) edges
|
| 70 |
+
joining $u$ and $v$, for each vertex $u$ and $v$ [1]_. And $m_{vw}$
|
| 71 |
+
is the mutual weight of $v$ and $w$ divided by $v$ highest mutual
|
| 72 |
+
weight with any of its neighbors. The *mutual weight* of $u$ and $v$
|
| 73 |
+
is the sum of the weights of edges joining them (edge weights are
|
| 74 |
+
assumed to be one if the graph is unweighted).
|
| 75 |
+
|
| 76 |
+
For the case of unweighted and undirected graphs, Borgatti proposed
|
| 77 |
+
a simplified formula to compute effective size [2]_
|
| 78 |
+
|
| 79 |
+
.. math::
|
| 80 |
+
|
| 81 |
+
e(u) = n - \frac{2t}{n}
|
| 82 |
+
|
| 83 |
+
where `t` is the number of ties in the ego network (not including
|
| 84 |
+
ties to ego) and `n` is the number of nodes (excluding ego).
|
| 85 |
+
|
| 86 |
+
Parameters
|
| 87 |
+
----------
|
| 88 |
+
G : NetworkX graph
|
| 89 |
+
The graph containing ``v``. Directed graphs are treated like
|
| 90 |
+
undirected graphs when computing neighbors of ``v``.
|
| 91 |
+
|
| 92 |
+
nodes : container, optional
|
| 93 |
+
Container of nodes in the graph ``G`` to compute the effective size.
|
| 94 |
+
If None, the effective size of every node is computed.
|
| 95 |
+
|
| 96 |
+
weight : None or string, optional
|
| 97 |
+
If None, all edge weights are considered equal.
|
| 98 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 99 |
+
|
| 100 |
+
Returns
|
| 101 |
+
-------
|
| 102 |
+
dict
|
| 103 |
+
Dictionary with nodes as keys and the effective size of the node as values.
|
| 104 |
+
|
| 105 |
+
Notes
|
| 106 |
+
-----
|
| 107 |
+
Burt also defined the related concept of *efficiency* of a node's ego
|
| 108 |
+
network, which is its effective size divided by the degree of that
|
| 109 |
+
node [1]_. So you can easily compute efficiency:
|
| 110 |
+
|
| 111 |
+
>>> G = nx.DiGraph()
|
| 112 |
+
>>> G.add_edges_from([(0, 1), (0, 2), (1, 0), (2, 1)])
|
| 113 |
+
>>> esize = nx.effective_size(G)
|
| 114 |
+
>>> efficiency = {n: v / G.degree(n) for n, v in esize.items()}
|
| 115 |
+
|
| 116 |
+
See also
|
| 117 |
+
--------
|
| 118 |
+
constraint
|
| 119 |
+
|
| 120 |
+
References
|
| 121 |
+
----------
|
| 122 |
+
.. [1] Burt, Ronald S.
|
| 123 |
+
*Structural Holes: The Social Structure of Competition.*
|
| 124 |
+
Cambridge: Harvard University Press, 1995.
|
| 125 |
+
|
| 126 |
+
.. [2] Borgatti, S.
|
| 127 |
+
"Structural Holes: Unpacking Burt's Redundancy Measures"
|
| 128 |
+
CONNECTIONS 20(1):35-38.
|
| 129 |
+
http://www.analytictech.com/connections/v20(1)/holes.htm
|
| 130 |
+
|
| 131 |
+
"""
|
| 132 |
+
|
| 133 |
+
def redundancy(G, u, v, weight=None):
|
| 134 |
+
nmw = normalized_mutual_weight
|
| 135 |
+
r = sum(
|
| 136 |
+
nmw(G, u, w, weight=weight) * nmw(G, v, w, norm=max, weight=weight)
|
| 137 |
+
for w in set(nx.all_neighbors(G, u))
|
| 138 |
+
)
|
| 139 |
+
return 1 - r
|
| 140 |
+
|
| 141 |
+
effective_size = {}
|
| 142 |
+
if nodes is None:
|
| 143 |
+
nodes = G
|
| 144 |
+
# Use Borgatti's simplified formula for unweighted and undirected graphs
|
| 145 |
+
if not G.is_directed() and weight is None:
|
| 146 |
+
for v in nodes:
|
| 147 |
+
# Effective size is not defined for isolated nodes
|
| 148 |
+
if len(G[v]) == 0:
|
| 149 |
+
effective_size[v] = float("nan")
|
| 150 |
+
continue
|
| 151 |
+
E = nx.ego_graph(G, v, center=False, undirected=True)
|
| 152 |
+
effective_size[v] = len(E) - (2 * E.size()) / len(E)
|
| 153 |
+
else:
|
| 154 |
+
for v in nodes:
|
| 155 |
+
# Effective size is not defined for isolated nodes
|
| 156 |
+
if len(G[v]) == 0:
|
| 157 |
+
effective_size[v] = float("nan")
|
| 158 |
+
continue
|
| 159 |
+
effective_size[v] = sum(
|
| 160 |
+
redundancy(G, v, u, weight) for u in set(nx.all_neighbors(G, v))
|
| 161 |
+
)
|
| 162 |
+
return effective_size
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 166 |
+
def constraint(G, nodes=None, weight=None):
|
| 167 |
+
r"""Returns the constraint on all nodes in the graph ``G``.
|
| 168 |
+
|
| 169 |
+
The *constraint* is a measure of the extent to which a node *v* is
|
| 170 |
+
invested in those nodes that are themselves invested in the
|
| 171 |
+
neighbors of *v*. Formally, the *constraint on v*, denoted `c(v)`,
|
| 172 |
+
is defined by
|
| 173 |
+
|
| 174 |
+
.. math::
|
| 175 |
+
|
| 176 |
+
c(v) = \sum_{w \in N(v) \setminus \{v\}} \ell(v, w)
|
| 177 |
+
|
| 178 |
+
where $N(v)$ is the subset of the neighbors of `v` that are either
|
| 179 |
+
predecessors or successors of `v` and $\ell(v, w)$ is the local
|
| 180 |
+
constraint on `v` with respect to `w` [1]_. For the definition of local
|
| 181 |
+
constraint, see :func:`local_constraint`.
|
| 182 |
+
|
| 183 |
+
Parameters
|
| 184 |
+
----------
|
| 185 |
+
G : NetworkX graph
|
| 186 |
+
The graph containing ``v``. This can be either directed or undirected.
|
| 187 |
+
|
| 188 |
+
nodes : container, optional
|
| 189 |
+
Container of nodes in the graph ``G`` to compute the constraint. If
|
| 190 |
+
None, the constraint of every node is computed.
|
| 191 |
+
|
| 192 |
+
weight : None or string, optional
|
| 193 |
+
If None, all edge weights are considered equal.
|
| 194 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 195 |
+
|
| 196 |
+
Returns
|
| 197 |
+
-------
|
| 198 |
+
dict
|
| 199 |
+
Dictionary with nodes as keys and the constraint on the node as values.
|
| 200 |
+
|
| 201 |
+
See also
|
| 202 |
+
--------
|
| 203 |
+
local_constraint
|
| 204 |
+
|
| 205 |
+
References
|
| 206 |
+
----------
|
| 207 |
+
.. [1] Burt, Ronald S.
|
| 208 |
+
"Structural holes and good ideas".
|
| 209 |
+
American Journal of Sociology (110): 349–399.
|
| 210 |
+
|
| 211 |
+
"""
|
| 212 |
+
if nodes is None:
|
| 213 |
+
nodes = G
|
| 214 |
+
constraint = {}
|
| 215 |
+
for v in nodes:
|
| 216 |
+
# Constraint is not defined for isolated nodes
|
| 217 |
+
if len(G[v]) == 0:
|
| 218 |
+
constraint[v] = float("nan")
|
| 219 |
+
continue
|
| 220 |
+
constraint[v] = sum(
|
| 221 |
+
local_constraint(G, v, n, weight) for n in set(nx.all_neighbors(G, v))
|
| 222 |
+
)
|
| 223 |
+
return constraint
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 227 |
+
def local_constraint(G, u, v, weight=None):
|
| 228 |
+
r"""Returns the local constraint on the node ``u`` with respect to
|
| 229 |
+
the node ``v`` in the graph ``G``.
|
| 230 |
+
|
| 231 |
+
Formally, the *local constraint on u with respect to v*, denoted
|
| 232 |
+
$\ell(u, v)$, is defined by
|
| 233 |
+
|
| 234 |
+
.. math::
|
| 235 |
+
|
| 236 |
+
\ell(u, v) = \left(p_{uv} + \sum_{w \in N(v)} p_{uw} p_{wv}\right)^2,
|
| 237 |
+
|
| 238 |
+
where $N(v)$ is the set of neighbors of $v$ and $p_{uv}$ is the
|
| 239 |
+
normalized mutual weight of the (directed or undirected) edges
|
| 240 |
+
joining $u$ and $v$, for each vertex $u$ and $v$ [1]_. The *mutual
|
| 241 |
+
weight* of $u$ and $v$ is the sum of the weights of edges joining
|
| 242 |
+
them (edge weights are assumed to be one if the graph is
|
| 243 |
+
unweighted).
|
| 244 |
+
|
| 245 |
+
Parameters
|
| 246 |
+
----------
|
| 247 |
+
G : NetworkX graph
|
| 248 |
+
The graph containing ``u`` and ``v``. This can be either
|
| 249 |
+
directed or undirected.
|
| 250 |
+
|
| 251 |
+
u : node
|
| 252 |
+
A node in the graph ``G``.
|
| 253 |
+
|
| 254 |
+
v : node
|
| 255 |
+
A node in the graph ``G``.
|
| 256 |
+
|
| 257 |
+
weight : None or string, optional
|
| 258 |
+
If None, all edge weights are considered equal.
|
| 259 |
+
Otherwise holds the name of the edge attribute used as weight.
|
| 260 |
+
|
| 261 |
+
Returns
|
| 262 |
+
-------
|
| 263 |
+
float
|
| 264 |
+
The constraint of the node ``v`` in the graph ``G``.
|
| 265 |
+
|
| 266 |
+
See also
|
| 267 |
+
--------
|
| 268 |
+
constraint
|
| 269 |
+
|
| 270 |
+
References
|
| 271 |
+
----------
|
| 272 |
+
.. [1] Burt, Ronald S.
|
| 273 |
+
"Structural holes and good ideas".
|
| 274 |
+
American Journal of Sociology (110): 349–399.
|
| 275 |
+
|
| 276 |
+
"""
|
| 277 |
+
nmw = normalized_mutual_weight
|
| 278 |
+
direct = nmw(G, u, v, weight=weight)
|
| 279 |
+
indirect = sum(
|
| 280 |
+
nmw(G, u, w, weight=weight) * nmw(G, w, v, weight=weight)
|
| 281 |
+
for w in set(nx.all_neighbors(G, u))
|
| 282 |
+
)
|
| 283 |
+
return (direct + indirect) ** 2
|
llava_next/lib/python3.10/site-packages/networkx/algorithms/tournament.py
ADDED
|
@@ -0,0 +1,403 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions concerning tournament graphs.
|
| 2 |
+
|
| 3 |
+
A `tournament graph`_ is a complete oriented graph. In other words, it
|
| 4 |
+
is a directed graph in which there is exactly one directed edge joining
|
| 5 |
+
each pair of distinct nodes. For each function in this module that
|
| 6 |
+
accepts a graph as input, you must provide a tournament graph. The
|
| 7 |
+
responsibility is on the caller to ensure that the graph is a tournament
|
| 8 |
+
graph:
|
| 9 |
+
|
| 10 |
+
>>> G = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
|
| 11 |
+
>>> nx.is_tournament(G)
|
| 12 |
+
True
|
| 13 |
+
|
| 14 |
+
To access the functions in this module, you must access them through the
|
| 15 |
+
:mod:`networkx.tournament` module::
|
| 16 |
+
|
| 17 |
+
>>> nx.tournament.is_reachable(G, 0, 1)
|
| 18 |
+
True
|
| 19 |
+
|
| 20 |
+
.. _tournament graph: https://en.wikipedia.org/wiki/Tournament_%28graph_theory%29
|
| 21 |
+
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
from itertools import combinations
|
| 25 |
+
|
| 26 |
+
import networkx as nx
|
| 27 |
+
from networkx.algorithms.simple_paths import is_simple_path as is_path
|
| 28 |
+
from networkx.utils import arbitrary_element, not_implemented_for, py_random_state
|
| 29 |
+
|
| 30 |
+
__all__ = [
|
| 31 |
+
"hamiltonian_path",
|
| 32 |
+
"is_reachable",
|
| 33 |
+
"is_strongly_connected",
|
| 34 |
+
"is_tournament",
|
| 35 |
+
"random_tournament",
|
| 36 |
+
"score_sequence",
|
| 37 |
+
]
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def index_satisfying(iterable, condition):
|
| 41 |
+
"""Returns the index of the first element in `iterable` that
|
| 42 |
+
satisfies the given condition.
|
| 43 |
+
|
| 44 |
+
If no such element is found (that is, when the iterable is
|
| 45 |
+
exhausted), this returns the length of the iterable (that is, one
|
| 46 |
+
greater than the last index of the iterable).
|
| 47 |
+
|
| 48 |
+
`iterable` must not be empty. If `iterable` is empty, this
|
| 49 |
+
function raises :exc:`ValueError`.
|
| 50 |
+
|
| 51 |
+
"""
|
| 52 |
+
# Pre-condition: iterable must not be empty.
|
| 53 |
+
for i, x in enumerate(iterable):
|
| 54 |
+
if condition(x):
|
| 55 |
+
return i
|
| 56 |
+
# If we reach the end of the iterable without finding an element
|
| 57 |
+
# that satisfies the condition, return the length of the iterable,
|
| 58 |
+
# which is one greater than the index of its last element. If the
|
| 59 |
+
# iterable was empty, `i` will not be defined, so we raise an
|
| 60 |
+
# exception.
|
| 61 |
+
try:
|
| 62 |
+
return i + 1
|
| 63 |
+
except NameError as err:
|
| 64 |
+
raise ValueError("iterable must be non-empty") from err
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
@not_implemented_for("undirected")
|
| 68 |
+
@not_implemented_for("multigraph")
|
| 69 |
+
@nx._dispatchable
|
| 70 |
+
def is_tournament(G):
|
| 71 |
+
"""Returns True if and only if `G` is a tournament.
|
| 72 |
+
|
| 73 |
+
A tournament is a directed graph, with neither self-loops nor
|
| 74 |
+
multi-edges, in which there is exactly one directed edge joining
|
| 75 |
+
each pair of distinct nodes.
|
| 76 |
+
|
| 77 |
+
Parameters
|
| 78 |
+
----------
|
| 79 |
+
G : NetworkX graph
|
| 80 |
+
A directed graph representing a tournament.
|
| 81 |
+
|
| 82 |
+
Returns
|
| 83 |
+
-------
|
| 84 |
+
bool
|
| 85 |
+
Whether the given graph is a tournament graph.
|
| 86 |
+
|
| 87 |
+
Examples
|
| 88 |
+
--------
|
| 89 |
+
>>> G = nx.DiGraph([(0, 1), (1, 2), (2, 0)])
|
| 90 |
+
>>> nx.is_tournament(G)
|
| 91 |
+
True
|
| 92 |
+
|
| 93 |
+
Notes
|
| 94 |
+
-----
|
| 95 |
+
Some definitions require a self-loop on each node, but that is not
|
| 96 |
+
the convention used here.
|
| 97 |
+
|
| 98 |
+
"""
|
| 99 |
+
# In a tournament, there is exactly one directed edge joining each pair.
|
| 100 |
+
return (
|
| 101 |
+
all((v in G[u]) ^ (u in G[v]) for u, v in combinations(G, 2))
|
| 102 |
+
and nx.number_of_selfloops(G) == 0
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
@not_implemented_for("undirected")
|
| 107 |
+
@not_implemented_for("multigraph")
|
| 108 |
+
@nx._dispatchable
|
| 109 |
+
def hamiltonian_path(G):
|
| 110 |
+
"""Returns a Hamiltonian path in the given tournament graph.
|
| 111 |
+
|
| 112 |
+
Each tournament has a Hamiltonian path. If furthermore, the
|
| 113 |
+
tournament is strongly connected, then the returned Hamiltonian path
|
| 114 |
+
is a Hamiltonian cycle (by joining the endpoints of the path).
|
| 115 |
+
|
| 116 |
+
Parameters
|
| 117 |
+
----------
|
| 118 |
+
G : NetworkX graph
|
| 119 |
+
A directed graph representing a tournament.
|
| 120 |
+
|
| 121 |
+
Returns
|
| 122 |
+
-------
|
| 123 |
+
path : list
|
| 124 |
+
A list of nodes which form a Hamiltonian path in `G`.
|
| 125 |
+
|
| 126 |
+
Examples
|
| 127 |
+
--------
|
| 128 |
+
>>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)])
|
| 129 |
+
>>> nx.is_tournament(G)
|
| 130 |
+
True
|
| 131 |
+
>>> nx.tournament.hamiltonian_path(G)
|
| 132 |
+
[0, 1, 2, 3]
|
| 133 |
+
|
| 134 |
+
Notes
|
| 135 |
+
-----
|
| 136 |
+
This is a recursive implementation with an asymptotic running time
|
| 137 |
+
of $O(n^2)$, ignoring multiplicative polylogarithmic factors, where
|
| 138 |
+
$n$ is the number of nodes in the graph.
|
| 139 |
+
|
| 140 |
+
"""
|
| 141 |
+
if len(G) == 0:
|
| 142 |
+
return []
|
| 143 |
+
if len(G) == 1:
|
| 144 |
+
return [arbitrary_element(G)]
|
| 145 |
+
v = arbitrary_element(G)
|
| 146 |
+
hampath = hamiltonian_path(G.subgraph(set(G) - {v}))
|
| 147 |
+
# Get the index of the first node in the path that does *not* have
|
| 148 |
+
# an edge to `v`, then insert `v` before that node.
|
| 149 |
+
index = index_satisfying(hampath, lambda u: v not in G[u])
|
| 150 |
+
hampath.insert(index, v)
|
| 151 |
+
return hampath
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
@py_random_state(1)
|
| 155 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 156 |
+
def random_tournament(n, seed=None):
|
| 157 |
+
r"""Returns a random tournament graph on `n` nodes.
|
| 158 |
+
|
| 159 |
+
Parameters
|
| 160 |
+
----------
|
| 161 |
+
n : int
|
| 162 |
+
The number of nodes in the returned graph.
|
| 163 |
+
seed : integer, random_state, or None (default)
|
| 164 |
+
Indicator of random number generation state.
|
| 165 |
+
See :ref:`Randomness<randomness>`.
|
| 166 |
+
|
| 167 |
+
Returns
|
| 168 |
+
-------
|
| 169 |
+
G : DiGraph
|
| 170 |
+
A tournament on `n` nodes, with exactly one directed edge joining
|
| 171 |
+
each pair of distinct nodes.
|
| 172 |
+
|
| 173 |
+
Notes
|
| 174 |
+
-----
|
| 175 |
+
This algorithm adds, for each pair of distinct nodes, an edge with
|
| 176 |
+
uniformly random orientation. In other words, `\binom{n}{2}` flips
|
| 177 |
+
of an unbiased coin decide the orientations of the edges in the
|
| 178 |
+
graph.
|
| 179 |
+
|
| 180 |
+
"""
|
| 181 |
+
# Flip an unbiased coin for each pair of distinct nodes.
|
| 182 |
+
coins = (seed.random() for i in range((n * (n - 1)) // 2))
|
| 183 |
+
pairs = combinations(range(n), 2)
|
| 184 |
+
edges = ((u, v) if r < 0.5 else (v, u) for (u, v), r in zip(pairs, coins))
|
| 185 |
+
return nx.DiGraph(edges)
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
@not_implemented_for("undirected")
|
| 189 |
+
@not_implemented_for("multigraph")
|
| 190 |
+
@nx._dispatchable
|
| 191 |
+
def score_sequence(G):
|
| 192 |
+
"""Returns the score sequence for the given tournament graph.
|
| 193 |
+
|
| 194 |
+
The score sequence is the sorted list of the out-degrees of the
|
| 195 |
+
nodes of the graph.
|
| 196 |
+
|
| 197 |
+
Parameters
|
| 198 |
+
----------
|
| 199 |
+
G : NetworkX graph
|
| 200 |
+
A directed graph representing a tournament.
|
| 201 |
+
|
| 202 |
+
Returns
|
| 203 |
+
-------
|
| 204 |
+
list
|
| 205 |
+
A sorted list of the out-degrees of the nodes of `G`.
|
| 206 |
+
|
| 207 |
+
Examples
|
| 208 |
+
--------
|
| 209 |
+
>>> G = nx.DiGraph([(1, 0), (1, 3), (0, 2), (0, 3), (2, 1), (3, 2)])
|
| 210 |
+
>>> nx.is_tournament(G)
|
| 211 |
+
True
|
| 212 |
+
>>> nx.tournament.score_sequence(G)
|
| 213 |
+
[1, 1, 2, 2]
|
| 214 |
+
|
| 215 |
+
"""
|
| 216 |
+
return sorted(d for v, d in G.out_degree())
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
@not_implemented_for("undirected")
|
| 220 |
+
@not_implemented_for("multigraph")
|
| 221 |
+
@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}})
|
| 222 |
+
def tournament_matrix(G):
|
| 223 |
+
r"""Returns the tournament matrix for the given tournament graph.
|
| 224 |
+
|
| 225 |
+
This function requires SciPy.
|
| 226 |
+
|
| 227 |
+
The *tournament matrix* of a tournament graph with edge set *E* is
|
| 228 |
+
the matrix *T* defined by
|
| 229 |
+
|
| 230 |
+
.. math::
|
| 231 |
+
|
| 232 |
+
T_{i j} =
|
| 233 |
+
\begin{cases}
|
| 234 |
+
+1 & \text{if } (i, j) \in E \\
|
| 235 |
+
-1 & \text{if } (j, i) \in E \\
|
| 236 |
+
0 & \text{if } i == j.
|
| 237 |
+
\end{cases}
|
| 238 |
+
|
| 239 |
+
An equivalent definition is `T = A - A^T`, where *A* is the
|
| 240 |
+
adjacency matrix of the graph `G`.
|
| 241 |
+
|
| 242 |
+
Parameters
|
| 243 |
+
----------
|
| 244 |
+
G : NetworkX graph
|
| 245 |
+
A directed graph representing a tournament.
|
| 246 |
+
|
| 247 |
+
Returns
|
| 248 |
+
-------
|
| 249 |
+
SciPy sparse array
|
| 250 |
+
The tournament matrix of the tournament graph `G`.
|
| 251 |
+
|
| 252 |
+
Raises
|
| 253 |
+
------
|
| 254 |
+
ImportError
|
| 255 |
+
If SciPy is not available.
|
| 256 |
+
|
| 257 |
+
"""
|
| 258 |
+
A = nx.adjacency_matrix(G)
|
| 259 |
+
return A - A.T
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
@not_implemented_for("undirected")
|
| 263 |
+
@not_implemented_for("multigraph")
|
| 264 |
+
@nx._dispatchable
|
| 265 |
+
def is_reachable(G, s, t):
|
| 266 |
+
"""Decides whether there is a path from `s` to `t` in the
|
| 267 |
+
tournament.
|
| 268 |
+
|
| 269 |
+
This function is more theoretically efficient than the reachability
|
| 270 |
+
checks than the shortest path algorithms in
|
| 271 |
+
:mod:`networkx.algorithms.shortest_paths`.
|
| 272 |
+
|
| 273 |
+
The given graph **must** be a tournament, otherwise this function's
|
| 274 |
+
behavior is undefined.
|
| 275 |
+
|
| 276 |
+
Parameters
|
| 277 |
+
----------
|
| 278 |
+
G : NetworkX graph
|
| 279 |
+
A directed graph representing a tournament.
|
| 280 |
+
|
| 281 |
+
s : node
|
| 282 |
+
A node in the graph.
|
| 283 |
+
|
| 284 |
+
t : node
|
| 285 |
+
A node in the graph.
|
| 286 |
+
|
| 287 |
+
Returns
|
| 288 |
+
-------
|
| 289 |
+
bool
|
| 290 |
+
Whether there is a path from `s` to `t` in `G`.
|
| 291 |
+
|
| 292 |
+
Examples
|
| 293 |
+
--------
|
| 294 |
+
>>> G = nx.DiGraph([(1, 0), (1, 3), (1, 2), (2, 3), (2, 0), (3, 0)])
|
| 295 |
+
>>> nx.is_tournament(G)
|
| 296 |
+
True
|
| 297 |
+
>>> nx.tournament.is_reachable(G, 1, 3)
|
| 298 |
+
True
|
| 299 |
+
>>> nx.tournament.is_reachable(G, 3, 2)
|
| 300 |
+
False
|
| 301 |
+
|
| 302 |
+
Notes
|
| 303 |
+
-----
|
| 304 |
+
Although this function is more theoretically efficient than the
|
| 305 |
+
generic shortest path functions, a speedup requires the use of
|
| 306 |
+
parallelism. Though it may in the future, the current implementation
|
| 307 |
+
does not use parallelism, thus you may not see much of a speedup.
|
| 308 |
+
|
| 309 |
+
This algorithm comes from [1].
|
| 310 |
+
|
| 311 |
+
References
|
| 312 |
+
----------
|
| 313 |
+
.. [1] Tantau, Till.
|
| 314 |
+
"A note on the complexity of the reachability problem for
|
| 315 |
+
tournaments."
|
| 316 |
+
*Electronic Colloquium on Computational Complexity*. 2001.
|
| 317 |
+
<http://eccc.hpi-web.de/report/2001/092/>
|
| 318 |
+
"""
|
| 319 |
+
|
| 320 |
+
def two_neighborhood(G, v):
|
| 321 |
+
"""Returns the set of nodes at distance at most two from `v`.
|
| 322 |
+
|
| 323 |
+
`G` must be a graph and `v` a node in that graph.
|
| 324 |
+
|
| 325 |
+
The returned set includes the nodes at distance zero (that is,
|
| 326 |
+
the node `v` itself), the nodes at distance one (that is, the
|
| 327 |
+
out-neighbors of `v`), and the nodes at distance two.
|
| 328 |
+
|
| 329 |
+
"""
|
| 330 |
+
return {
|
| 331 |
+
x for x in G if x == v or x in G[v] or any(is_path(G, [v, z, x]) for z in G)
|
| 332 |
+
}
|
| 333 |
+
|
| 334 |
+
def is_closed(G, nodes):
|
| 335 |
+
"""Decides whether the given set of nodes is closed.
|
| 336 |
+
|
| 337 |
+
A set *S* of nodes is *closed* if for each node *u* in the graph
|
| 338 |
+
not in *S* and for each node *v* in *S*, there is an edge from
|
| 339 |
+
*u* to *v*.
|
| 340 |
+
|
| 341 |
+
"""
|
| 342 |
+
return all(v in G[u] for u in set(G) - nodes for v in nodes)
|
| 343 |
+
|
| 344 |
+
neighborhoods = [two_neighborhood(G, v) for v in G]
|
| 345 |
+
return all(not (is_closed(G, S) and s in S and t not in S) for S in neighborhoods)
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
@not_implemented_for("undirected")
|
| 349 |
+
@not_implemented_for("multigraph")
|
| 350 |
+
@nx._dispatchable(name="tournament_is_strongly_connected")
|
| 351 |
+
def is_strongly_connected(G):
|
| 352 |
+
"""Decides whether the given tournament is strongly connected.
|
| 353 |
+
|
| 354 |
+
This function is more theoretically efficient than the
|
| 355 |
+
:func:`~networkx.algorithms.components.is_strongly_connected`
|
| 356 |
+
function.
|
| 357 |
+
|
| 358 |
+
The given graph **must** be a tournament, otherwise this function's
|
| 359 |
+
behavior is undefined.
|
| 360 |
+
|
| 361 |
+
Parameters
|
| 362 |
+
----------
|
| 363 |
+
G : NetworkX graph
|
| 364 |
+
A directed graph representing a tournament.
|
| 365 |
+
|
| 366 |
+
Returns
|
| 367 |
+
-------
|
| 368 |
+
bool
|
| 369 |
+
Whether the tournament is strongly connected.
|
| 370 |
+
|
| 371 |
+
Examples
|
| 372 |
+
--------
|
| 373 |
+
>>> G = nx.DiGraph([(0, 1), (0, 2), (1, 2), (1, 3), (2, 3), (3, 0)])
|
| 374 |
+
>>> nx.is_tournament(G)
|
| 375 |
+
True
|
| 376 |
+
>>> nx.tournament.is_strongly_connected(G)
|
| 377 |
+
True
|
| 378 |
+
>>> G.remove_edge(3, 0)
|
| 379 |
+
>>> G.add_edge(0, 3)
|
| 380 |
+
>>> nx.is_tournament(G)
|
| 381 |
+
True
|
| 382 |
+
>>> nx.tournament.is_strongly_connected(G)
|
| 383 |
+
False
|
| 384 |
+
|
| 385 |
+
Notes
|
| 386 |
+
-----
|
| 387 |
+
Although this function is more theoretically efficient than the
|
| 388 |
+
generic strong connectivity function, a speedup requires the use of
|
| 389 |
+
parallelism. Though it may in the future, the current implementation
|
| 390 |
+
does not use parallelism, thus you may not see much of a speedup.
|
| 391 |
+
|
| 392 |
+
This algorithm comes from [1].
|
| 393 |
+
|
| 394 |
+
References
|
| 395 |
+
----------
|
| 396 |
+
.. [1] Tantau, Till.
|
| 397 |
+
"A note on the complexity of the reachability problem for
|
| 398 |
+
tournaments."
|
| 399 |
+
*Electronic Colloquium on Computational Complexity*. 2001.
|
| 400 |
+
<http://eccc.hpi-web.de/report/2001/092/>
|
| 401 |
+
|
| 402 |
+
"""
|
| 403 |
+
return all(is_reachable(G, u, v) for u in G for v in G)
|
llava_next/lib/python3.10/site-packages/networkx/algorithms/triads.py
ADDED
|
@@ -0,0 +1,604 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# See https://github.com/networkx/networkx/pull/1474
|
| 2 |
+
# Copyright 2011 Reya Group <http://www.reyagroup.com>
|
| 3 |
+
# Copyright 2011 Alex Levenson <alex@isnotinvain.com>
|
| 4 |
+
# Copyright 2011 Diederik van Liere <diederik.vanliere@rotman.utoronto.ca>
|
| 5 |
+
"""Functions for analyzing triads of a graph."""
|
| 6 |
+
|
| 7 |
+
from collections import defaultdict
|
| 8 |
+
from itertools import combinations, permutations
|
| 9 |
+
|
| 10 |
+
import networkx as nx
|
| 11 |
+
from networkx.utils import not_implemented_for, py_random_state
|
| 12 |
+
|
| 13 |
+
__all__ = [
|
| 14 |
+
"triadic_census",
|
| 15 |
+
"is_triad",
|
| 16 |
+
"all_triplets",
|
| 17 |
+
"all_triads",
|
| 18 |
+
"triads_by_type",
|
| 19 |
+
"triad_type",
|
| 20 |
+
"random_triad",
|
| 21 |
+
]
|
| 22 |
+
|
| 23 |
+
#: The integer codes representing each type of triad.
|
| 24 |
+
#:
|
| 25 |
+
#: Triads that are the same up to symmetry have the same code.
|
| 26 |
+
TRICODES = (
|
| 27 |
+
1,
|
| 28 |
+
2,
|
| 29 |
+
2,
|
| 30 |
+
3,
|
| 31 |
+
2,
|
| 32 |
+
4,
|
| 33 |
+
6,
|
| 34 |
+
8,
|
| 35 |
+
2,
|
| 36 |
+
6,
|
| 37 |
+
5,
|
| 38 |
+
7,
|
| 39 |
+
3,
|
| 40 |
+
8,
|
| 41 |
+
7,
|
| 42 |
+
11,
|
| 43 |
+
2,
|
| 44 |
+
6,
|
| 45 |
+
4,
|
| 46 |
+
8,
|
| 47 |
+
5,
|
| 48 |
+
9,
|
| 49 |
+
9,
|
| 50 |
+
13,
|
| 51 |
+
6,
|
| 52 |
+
10,
|
| 53 |
+
9,
|
| 54 |
+
14,
|
| 55 |
+
7,
|
| 56 |
+
14,
|
| 57 |
+
12,
|
| 58 |
+
15,
|
| 59 |
+
2,
|
| 60 |
+
5,
|
| 61 |
+
6,
|
| 62 |
+
7,
|
| 63 |
+
6,
|
| 64 |
+
9,
|
| 65 |
+
10,
|
| 66 |
+
14,
|
| 67 |
+
4,
|
| 68 |
+
9,
|
| 69 |
+
9,
|
| 70 |
+
12,
|
| 71 |
+
8,
|
| 72 |
+
13,
|
| 73 |
+
14,
|
| 74 |
+
15,
|
| 75 |
+
3,
|
| 76 |
+
7,
|
| 77 |
+
8,
|
| 78 |
+
11,
|
| 79 |
+
7,
|
| 80 |
+
12,
|
| 81 |
+
14,
|
| 82 |
+
15,
|
| 83 |
+
8,
|
| 84 |
+
14,
|
| 85 |
+
13,
|
| 86 |
+
15,
|
| 87 |
+
11,
|
| 88 |
+
15,
|
| 89 |
+
15,
|
| 90 |
+
16,
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
#: The names of each type of triad. The order of the elements is
|
| 94 |
+
#: important: it corresponds to the tricodes given in :data:`TRICODES`.
|
| 95 |
+
TRIAD_NAMES = (
|
| 96 |
+
"003",
|
| 97 |
+
"012",
|
| 98 |
+
"102",
|
| 99 |
+
"021D",
|
| 100 |
+
"021U",
|
| 101 |
+
"021C",
|
| 102 |
+
"111D",
|
| 103 |
+
"111U",
|
| 104 |
+
"030T",
|
| 105 |
+
"030C",
|
| 106 |
+
"201",
|
| 107 |
+
"120D",
|
| 108 |
+
"120U",
|
| 109 |
+
"120C",
|
| 110 |
+
"210",
|
| 111 |
+
"300",
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
#: A dictionary mapping triad code to triad name.
|
| 116 |
+
TRICODE_TO_NAME = {i: TRIAD_NAMES[code - 1] for i, code in enumerate(TRICODES)}
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def _tricode(G, v, u, w):
|
| 120 |
+
"""Returns the integer code of the given triad.
|
| 121 |
+
|
| 122 |
+
This is some fancy magic that comes from Batagelj and Mrvar's paper. It
|
| 123 |
+
treats each edge joining a pair of `v`, `u`, and `w` as a bit in
|
| 124 |
+
the binary representation of an integer.
|
| 125 |
+
|
| 126 |
+
"""
|
| 127 |
+
combos = ((v, u, 1), (u, v, 2), (v, w, 4), (w, v, 8), (u, w, 16), (w, u, 32))
|
| 128 |
+
return sum(x for u, v, x in combos if v in G[u])
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
@not_implemented_for("undirected")
|
| 132 |
+
@nx._dispatchable
|
| 133 |
+
def triadic_census(G, nodelist=None):
|
| 134 |
+
"""Determines the triadic census of a directed graph.
|
| 135 |
+
|
| 136 |
+
The triadic census is a count of how many of the 16 possible types of
|
| 137 |
+
triads are present in a directed graph. If a list of nodes is passed, then
|
| 138 |
+
only those triads are taken into account which have elements of nodelist in them.
|
| 139 |
+
|
| 140 |
+
Parameters
|
| 141 |
+
----------
|
| 142 |
+
G : digraph
|
| 143 |
+
A NetworkX DiGraph
|
| 144 |
+
nodelist : list
|
| 145 |
+
List of nodes for which you want to calculate triadic census
|
| 146 |
+
|
| 147 |
+
Returns
|
| 148 |
+
-------
|
| 149 |
+
census : dict
|
| 150 |
+
Dictionary with triad type as keys and number of occurrences as values.
|
| 151 |
+
|
| 152 |
+
Examples
|
| 153 |
+
--------
|
| 154 |
+
>>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1), (3, 4), (4, 1), (4, 2)])
|
| 155 |
+
>>> triadic_census = nx.triadic_census(G)
|
| 156 |
+
>>> for key, value in triadic_census.items():
|
| 157 |
+
... print(f"{key}: {value}")
|
| 158 |
+
003: 0
|
| 159 |
+
012: 0
|
| 160 |
+
102: 0
|
| 161 |
+
021D: 0
|
| 162 |
+
021U: 0
|
| 163 |
+
021C: 0
|
| 164 |
+
111D: 0
|
| 165 |
+
111U: 0
|
| 166 |
+
030T: 2
|
| 167 |
+
030C: 2
|
| 168 |
+
201: 0
|
| 169 |
+
120D: 0
|
| 170 |
+
120U: 0
|
| 171 |
+
120C: 0
|
| 172 |
+
210: 0
|
| 173 |
+
300: 0
|
| 174 |
+
|
| 175 |
+
Notes
|
| 176 |
+
-----
|
| 177 |
+
This algorithm has complexity $O(m)$ where $m$ is the number of edges in
|
| 178 |
+
the graph.
|
| 179 |
+
|
| 180 |
+
For undirected graphs, the triadic census can be computed by first converting
|
| 181 |
+
the graph into a directed graph using the ``G.to_directed()`` method.
|
| 182 |
+
After this conversion, only the triad types 003, 102, 201 and 300 will be
|
| 183 |
+
present in the undirected scenario.
|
| 184 |
+
|
| 185 |
+
Raises
|
| 186 |
+
------
|
| 187 |
+
ValueError
|
| 188 |
+
If `nodelist` contains duplicate nodes or nodes not in `G`.
|
| 189 |
+
If you want to ignore this you can preprocess with `set(nodelist) & G.nodes`
|
| 190 |
+
|
| 191 |
+
See also
|
| 192 |
+
--------
|
| 193 |
+
triad_graph
|
| 194 |
+
|
| 195 |
+
References
|
| 196 |
+
----------
|
| 197 |
+
.. [1] Vladimir Batagelj and Andrej Mrvar, A subquadratic triad census
|
| 198 |
+
algorithm for large sparse networks with small maximum degree,
|
| 199 |
+
University of Ljubljana,
|
| 200 |
+
http://vlado.fmf.uni-lj.si/pub/networks/doc/triads/triads.pdf
|
| 201 |
+
|
| 202 |
+
"""
|
| 203 |
+
nodeset = set(G.nbunch_iter(nodelist))
|
| 204 |
+
if nodelist is not None and len(nodelist) != len(nodeset):
|
| 205 |
+
raise ValueError("nodelist includes duplicate nodes or nodes not in G")
|
| 206 |
+
|
| 207 |
+
N = len(G)
|
| 208 |
+
Nnot = N - len(nodeset) # can signal special counting for subset of nodes
|
| 209 |
+
|
| 210 |
+
# create an ordering of nodes with nodeset nodes first
|
| 211 |
+
m = {n: i for i, n in enumerate(nodeset)}
|
| 212 |
+
if Nnot:
|
| 213 |
+
# add non-nodeset nodes later in the ordering
|
| 214 |
+
not_nodeset = G.nodes - nodeset
|
| 215 |
+
m.update((n, i + N) for i, n in enumerate(not_nodeset))
|
| 216 |
+
|
| 217 |
+
# build all_neighbor dicts for easy counting
|
| 218 |
+
# After Python 3.8 can leave off these keys(). Speedup also using G._pred
|
| 219 |
+
# nbrs = {n: G._pred[n].keys() | G._succ[n].keys() for n in G}
|
| 220 |
+
nbrs = {n: G.pred[n].keys() | G.succ[n].keys() for n in G}
|
| 221 |
+
dbl_nbrs = {n: G.pred[n].keys() & G.succ[n].keys() for n in G}
|
| 222 |
+
|
| 223 |
+
if Nnot:
|
| 224 |
+
sgl_nbrs = {n: G.pred[n].keys() ^ G.succ[n].keys() for n in not_nodeset}
|
| 225 |
+
# find number of edges not incident to nodes in nodeset
|
| 226 |
+
sgl = sum(1 for n in not_nodeset for nbr in sgl_nbrs[n] if nbr not in nodeset)
|
| 227 |
+
sgl_edges_outside = sgl // 2
|
| 228 |
+
dbl = sum(1 for n in not_nodeset for nbr in dbl_nbrs[n] if nbr not in nodeset)
|
| 229 |
+
dbl_edges_outside = dbl // 2
|
| 230 |
+
|
| 231 |
+
# Initialize the count for each triad to be zero.
|
| 232 |
+
census = {name: 0 for name in TRIAD_NAMES}
|
| 233 |
+
# Main loop over nodes
|
| 234 |
+
for v in nodeset:
|
| 235 |
+
vnbrs = nbrs[v]
|
| 236 |
+
dbl_vnbrs = dbl_nbrs[v]
|
| 237 |
+
if Nnot:
|
| 238 |
+
# set up counts of edges attached to v.
|
| 239 |
+
sgl_unbrs_bdy = sgl_unbrs_out = dbl_unbrs_bdy = dbl_unbrs_out = 0
|
| 240 |
+
for u in vnbrs:
|
| 241 |
+
if m[u] <= m[v]:
|
| 242 |
+
continue
|
| 243 |
+
unbrs = nbrs[u]
|
| 244 |
+
neighbors = (vnbrs | unbrs) - {u, v}
|
| 245 |
+
# Count connected triads.
|
| 246 |
+
for w in neighbors:
|
| 247 |
+
if m[u] < m[w] or (m[v] < m[w] < m[u] and v not in nbrs[w]):
|
| 248 |
+
code = _tricode(G, v, u, w)
|
| 249 |
+
census[TRICODE_TO_NAME[code]] += 1
|
| 250 |
+
|
| 251 |
+
# Use a formula for dyadic triads with edge incident to v
|
| 252 |
+
if u in dbl_vnbrs:
|
| 253 |
+
census["102"] += N - len(neighbors) - 2
|
| 254 |
+
else:
|
| 255 |
+
census["012"] += N - len(neighbors) - 2
|
| 256 |
+
|
| 257 |
+
# Count edges attached to v. Subtract later to get triads with v isolated
|
| 258 |
+
# _out are (u,unbr) for unbrs outside boundary of nodeset
|
| 259 |
+
# _bdy are (u,unbr) for unbrs on boundary of nodeset (get double counted)
|
| 260 |
+
if Nnot and u not in nodeset:
|
| 261 |
+
sgl_unbrs = sgl_nbrs[u]
|
| 262 |
+
sgl_unbrs_bdy += len(sgl_unbrs & vnbrs - nodeset)
|
| 263 |
+
sgl_unbrs_out += len(sgl_unbrs - vnbrs - nodeset)
|
| 264 |
+
dbl_unbrs = dbl_nbrs[u]
|
| 265 |
+
dbl_unbrs_bdy += len(dbl_unbrs & vnbrs - nodeset)
|
| 266 |
+
dbl_unbrs_out += len(dbl_unbrs - vnbrs - nodeset)
|
| 267 |
+
# if nodeset == G.nodes, skip this b/c we will find the edge later.
|
| 268 |
+
if Nnot:
|
| 269 |
+
# Count edges outside nodeset not connected with v (v isolated triads)
|
| 270 |
+
census["012"] += sgl_edges_outside - (sgl_unbrs_out + sgl_unbrs_bdy // 2)
|
| 271 |
+
census["102"] += dbl_edges_outside - (dbl_unbrs_out + dbl_unbrs_bdy // 2)
|
| 272 |
+
|
| 273 |
+
# calculate null triads: "003"
|
| 274 |
+
# null triads = total number of possible triads - all found triads
|
| 275 |
+
total_triangles = (N * (N - 1) * (N - 2)) // 6
|
| 276 |
+
triangles_without_nodeset = (Nnot * (Nnot - 1) * (Nnot - 2)) // 6
|
| 277 |
+
total_census = total_triangles - triangles_without_nodeset
|
| 278 |
+
census["003"] = total_census - sum(census.values())
|
| 279 |
+
|
| 280 |
+
return census
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
@nx._dispatchable
|
| 284 |
+
def is_triad(G):
|
| 285 |
+
"""Returns True if the graph G is a triad, else False.
|
| 286 |
+
|
| 287 |
+
Parameters
|
| 288 |
+
----------
|
| 289 |
+
G : graph
|
| 290 |
+
A NetworkX Graph
|
| 291 |
+
|
| 292 |
+
Returns
|
| 293 |
+
-------
|
| 294 |
+
istriad : boolean
|
| 295 |
+
Whether G is a valid triad
|
| 296 |
+
|
| 297 |
+
Examples
|
| 298 |
+
--------
|
| 299 |
+
>>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
|
| 300 |
+
>>> nx.is_triad(G)
|
| 301 |
+
True
|
| 302 |
+
>>> G.add_edge(0, 1)
|
| 303 |
+
>>> nx.is_triad(G)
|
| 304 |
+
False
|
| 305 |
+
"""
|
| 306 |
+
if isinstance(G, nx.Graph):
|
| 307 |
+
if G.order() == 3 and nx.is_directed(G):
|
| 308 |
+
if not any((n, n) in G.edges() for n in G.nodes()):
|
| 309 |
+
return True
|
| 310 |
+
return False
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
@not_implemented_for("undirected")
|
| 314 |
+
@nx._dispatchable
|
| 315 |
+
def all_triplets(G):
|
| 316 |
+
"""Returns a generator of all possible sets of 3 nodes in a DiGraph.
|
| 317 |
+
|
| 318 |
+
.. deprecated:: 3.3
|
| 319 |
+
|
| 320 |
+
all_triplets is deprecated and will be removed in NetworkX version 3.5.
|
| 321 |
+
Use `itertools.combinations` instead::
|
| 322 |
+
|
| 323 |
+
all_triplets = itertools.combinations(G, 3)
|
| 324 |
+
|
| 325 |
+
Parameters
|
| 326 |
+
----------
|
| 327 |
+
G : digraph
|
| 328 |
+
A NetworkX DiGraph
|
| 329 |
+
|
| 330 |
+
Returns
|
| 331 |
+
-------
|
| 332 |
+
triplets : generator of 3-tuples
|
| 333 |
+
Generator of tuples of 3 nodes
|
| 334 |
+
|
| 335 |
+
Examples
|
| 336 |
+
--------
|
| 337 |
+
>>> G = nx.DiGraph([(1, 2), (2, 3), (3, 4)])
|
| 338 |
+
>>> list(nx.all_triplets(G))
|
| 339 |
+
[(1, 2, 3), (1, 2, 4), (1, 3, 4), (2, 3, 4)]
|
| 340 |
+
|
| 341 |
+
"""
|
| 342 |
+
import warnings
|
| 343 |
+
|
| 344 |
+
warnings.warn(
|
| 345 |
+
(
|
| 346 |
+
"\n\nall_triplets is deprecated and will be removed in v3.5.\n"
|
| 347 |
+
"Use `itertools.combinations(G, 3)` instead."
|
| 348 |
+
),
|
| 349 |
+
category=DeprecationWarning,
|
| 350 |
+
stacklevel=4,
|
| 351 |
+
)
|
| 352 |
+
triplets = combinations(G.nodes(), 3)
|
| 353 |
+
return triplets
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
@not_implemented_for("undirected")
|
| 357 |
+
@nx._dispatchable(returns_graph=True)
|
| 358 |
+
def all_triads(G):
|
| 359 |
+
"""A generator of all possible triads in G.
|
| 360 |
+
|
| 361 |
+
Parameters
|
| 362 |
+
----------
|
| 363 |
+
G : digraph
|
| 364 |
+
A NetworkX DiGraph
|
| 365 |
+
|
| 366 |
+
Returns
|
| 367 |
+
-------
|
| 368 |
+
all_triads : generator of DiGraphs
|
| 369 |
+
Generator of triads (order-3 DiGraphs)
|
| 370 |
+
|
| 371 |
+
Examples
|
| 372 |
+
--------
|
| 373 |
+
>>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1), (3, 4), (4, 1), (4, 2)])
|
| 374 |
+
>>> for triad in nx.all_triads(G):
|
| 375 |
+
... print(triad.edges)
|
| 376 |
+
[(1, 2), (2, 3), (3, 1)]
|
| 377 |
+
[(1, 2), (4, 1), (4, 2)]
|
| 378 |
+
[(3, 1), (3, 4), (4, 1)]
|
| 379 |
+
[(2, 3), (3, 4), (4, 2)]
|
| 380 |
+
|
| 381 |
+
"""
|
| 382 |
+
triplets = combinations(G.nodes(), 3)
|
| 383 |
+
for triplet in triplets:
|
| 384 |
+
yield G.subgraph(triplet).copy()
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
@not_implemented_for("undirected")
|
| 388 |
+
@nx._dispatchable
|
| 389 |
+
def triads_by_type(G):
|
| 390 |
+
"""Returns a list of all triads for each triad type in a directed graph.
|
| 391 |
+
There are exactly 16 different types of triads possible. Suppose 1, 2, 3 are three
|
| 392 |
+
nodes, they will be classified as a particular triad type if their connections
|
| 393 |
+
are as follows:
|
| 394 |
+
|
| 395 |
+
- 003: 1, 2, 3
|
| 396 |
+
- 012: 1 -> 2, 3
|
| 397 |
+
- 102: 1 <-> 2, 3
|
| 398 |
+
- 021D: 1 <- 2 -> 3
|
| 399 |
+
- 021U: 1 -> 2 <- 3
|
| 400 |
+
- 021C: 1 -> 2 -> 3
|
| 401 |
+
- 111D: 1 <-> 2 <- 3
|
| 402 |
+
- 111U: 1 <-> 2 -> 3
|
| 403 |
+
- 030T: 1 -> 2 -> 3, 1 -> 3
|
| 404 |
+
- 030C: 1 <- 2 <- 3, 1 -> 3
|
| 405 |
+
- 201: 1 <-> 2 <-> 3
|
| 406 |
+
- 120D: 1 <- 2 -> 3, 1 <-> 3
|
| 407 |
+
- 120U: 1 -> 2 <- 3, 1 <-> 3
|
| 408 |
+
- 120C: 1 -> 2 -> 3, 1 <-> 3
|
| 409 |
+
- 210: 1 -> 2 <-> 3, 1 <-> 3
|
| 410 |
+
- 300: 1 <-> 2 <-> 3, 1 <-> 3
|
| 411 |
+
|
| 412 |
+
Refer to the :doc:`example gallery </auto_examples/graph/plot_triad_types>`
|
| 413 |
+
for visual examples of the triad types.
|
| 414 |
+
|
| 415 |
+
Parameters
|
| 416 |
+
----------
|
| 417 |
+
G : digraph
|
| 418 |
+
A NetworkX DiGraph
|
| 419 |
+
|
| 420 |
+
Returns
|
| 421 |
+
-------
|
| 422 |
+
tri_by_type : dict
|
| 423 |
+
Dictionary with triad types as keys and lists of triads as values.
|
| 424 |
+
|
| 425 |
+
Examples
|
| 426 |
+
--------
|
| 427 |
+
>>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 1), (5, 6), (5, 4), (6, 7)])
|
| 428 |
+
>>> dict = nx.triads_by_type(G)
|
| 429 |
+
>>> dict["120C"][0].edges()
|
| 430 |
+
OutEdgeView([(1, 2), (1, 3), (2, 3), (3, 1)])
|
| 431 |
+
>>> dict["012"][0].edges()
|
| 432 |
+
OutEdgeView([(1, 2)])
|
| 433 |
+
|
| 434 |
+
References
|
| 435 |
+
----------
|
| 436 |
+
.. [1] Snijders, T. (2012). "Transitivity and triads." University of
|
| 437 |
+
Oxford.
|
| 438 |
+
https://web.archive.org/web/20170830032057/http://www.stats.ox.ac.uk/~snijders/Trans_Triads_ha.pdf
|
| 439 |
+
"""
|
| 440 |
+
# num_triads = o * (o - 1) * (o - 2) // 6
|
| 441 |
+
# if num_triads > TRIAD_LIMIT: print(WARNING)
|
| 442 |
+
all_tri = all_triads(G)
|
| 443 |
+
tri_by_type = defaultdict(list)
|
| 444 |
+
for triad in all_tri:
|
| 445 |
+
name = triad_type(triad)
|
| 446 |
+
tri_by_type[name].append(triad)
|
| 447 |
+
return tri_by_type
|
| 448 |
+
|
| 449 |
+
|
| 450 |
+
@not_implemented_for("undirected")
|
| 451 |
+
@nx._dispatchable
|
| 452 |
+
def triad_type(G):
|
| 453 |
+
"""Returns the sociological triad type for a triad.
|
| 454 |
+
|
| 455 |
+
Parameters
|
| 456 |
+
----------
|
| 457 |
+
G : digraph
|
| 458 |
+
A NetworkX DiGraph with 3 nodes
|
| 459 |
+
|
| 460 |
+
Returns
|
| 461 |
+
-------
|
| 462 |
+
triad_type : str
|
| 463 |
+
A string identifying the triad type
|
| 464 |
+
|
| 465 |
+
Examples
|
| 466 |
+
--------
|
| 467 |
+
>>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
|
| 468 |
+
>>> nx.triad_type(G)
|
| 469 |
+
'030C'
|
| 470 |
+
>>> G.add_edge(1, 3)
|
| 471 |
+
>>> nx.triad_type(G)
|
| 472 |
+
'120C'
|
| 473 |
+
|
| 474 |
+
Notes
|
| 475 |
+
-----
|
| 476 |
+
There can be 6 unique edges in a triad (order-3 DiGraph) (so 2^^6=64 unique
|
| 477 |
+
triads given 3 nodes). These 64 triads each display exactly 1 of 16
|
| 478 |
+
topologies of triads (topologies can be permuted). These topologies are
|
| 479 |
+
identified by the following notation:
|
| 480 |
+
|
| 481 |
+
{m}{a}{n}{type} (for example: 111D, 210, 102)
|
| 482 |
+
|
| 483 |
+
Here:
|
| 484 |
+
|
| 485 |
+
{m} = number of mutual ties (takes 0, 1, 2, 3); a mutual tie is (0,1)
|
| 486 |
+
AND (1,0)
|
| 487 |
+
{a} = number of asymmetric ties (takes 0, 1, 2, 3); an asymmetric tie
|
| 488 |
+
is (0,1) BUT NOT (1,0) or vice versa
|
| 489 |
+
{n} = number of null ties (takes 0, 1, 2, 3); a null tie is NEITHER
|
| 490 |
+
(0,1) NOR (1,0)
|
| 491 |
+
{type} = a letter (takes U, D, C, T) corresponding to up, down, cyclical
|
| 492 |
+
and transitive. This is only used for topologies that can have
|
| 493 |
+
more than one form (eg: 021D and 021U).
|
| 494 |
+
|
| 495 |
+
References
|
| 496 |
+
----------
|
| 497 |
+
.. [1] Snijders, T. (2012). "Transitivity and triads." University of
|
| 498 |
+
Oxford.
|
| 499 |
+
https://web.archive.org/web/20170830032057/http://www.stats.ox.ac.uk/~snijders/Trans_Triads_ha.pdf
|
| 500 |
+
"""
|
| 501 |
+
if not is_triad(G):
|
| 502 |
+
raise nx.NetworkXAlgorithmError("G is not a triad (order-3 DiGraph)")
|
| 503 |
+
num_edges = len(G.edges())
|
| 504 |
+
if num_edges == 0:
|
| 505 |
+
return "003"
|
| 506 |
+
elif num_edges == 1:
|
| 507 |
+
return "012"
|
| 508 |
+
elif num_edges == 2:
|
| 509 |
+
e1, e2 = G.edges()
|
| 510 |
+
if set(e1) == set(e2):
|
| 511 |
+
return "102"
|
| 512 |
+
elif e1[0] == e2[0]:
|
| 513 |
+
return "021D"
|
| 514 |
+
elif e1[1] == e2[1]:
|
| 515 |
+
return "021U"
|
| 516 |
+
elif e1[1] == e2[0] or e2[1] == e1[0]:
|
| 517 |
+
return "021C"
|
| 518 |
+
elif num_edges == 3:
|
| 519 |
+
for e1, e2, e3 in permutations(G.edges(), 3):
|
| 520 |
+
if set(e1) == set(e2):
|
| 521 |
+
if e3[0] in e1:
|
| 522 |
+
return "111U"
|
| 523 |
+
# e3[1] in e1:
|
| 524 |
+
return "111D"
|
| 525 |
+
elif set(e1).symmetric_difference(set(e2)) == set(e3):
|
| 526 |
+
if {e1[0], e2[0], e3[0]} == {e1[0], e2[0], e3[0]} == set(G.nodes()):
|
| 527 |
+
return "030C"
|
| 528 |
+
# e3 == (e1[0], e2[1]) and e2 == (e1[1], e3[1]):
|
| 529 |
+
return "030T"
|
| 530 |
+
elif num_edges == 4:
|
| 531 |
+
for e1, e2, e3, e4 in permutations(G.edges(), 4):
|
| 532 |
+
if set(e1) == set(e2):
|
| 533 |
+
# identify pair of symmetric edges (which necessarily exists)
|
| 534 |
+
if set(e3) == set(e4):
|
| 535 |
+
return "201"
|
| 536 |
+
if {e3[0]} == {e4[0]} == set(e3).intersection(set(e4)):
|
| 537 |
+
return "120D"
|
| 538 |
+
if {e3[1]} == {e4[1]} == set(e3).intersection(set(e4)):
|
| 539 |
+
return "120U"
|
| 540 |
+
if e3[1] == e4[0]:
|
| 541 |
+
return "120C"
|
| 542 |
+
elif num_edges == 5:
|
| 543 |
+
return "210"
|
| 544 |
+
elif num_edges == 6:
|
| 545 |
+
return "300"
|
| 546 |
+
|
| 547 |
+
|
| 548 |
+
@not_implemented_for("undirected")
|
| 549 |
+
@py_random_state(1)
|
| 550 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
| 551 |
+
def random_triad(G, seed=None):
|
| 552 |
+
"""Returns a random triad from a directed graph.
|
| 553 |
+
|
| 554 |
+
.. deprecated:: 3.3
|
| 555 |
+
|
| 556 |
+
random_triad is deprecated and will be removed in version 3.5.
|
| 557 |
+
Use random sampling directly instead::
|
| 558 |
+
|
| 559 |
+
G.subgraph(random.sample(list(G), 3))
|
| 560 |
+
|
| 561 |
+
Parameters
|
| 562 |
+
----------
|
| 563 |
+
G : digraph
|
| 564 |
+
A NetworkX DiGraph
|
| 565 |
+
seed : integer, random_state, or None (default)
|
| 566 |
+
Indicator of random number generation state.
|
| 567 |
+
See :ref:`Randomness<randomness>`.
|
| 568 |
+
|
| 569 |
+
Returns
|
| 570 |
+
-------
|
| 571 |
+
G2 : subgraph
|
| 572 |
+
A randomly selected triad (order-3 NetworkX DiGraph)
|
| 573 |
+
|
| 574 |
+
Raises
|
| 575 |
+
------
|
| 576 |
+
NetworkXError
|
| 577 |
+
If the input Graph has less than 3 nodes.
|
| 578 |
+
|
| 579 |
+
Examples
|
| 580 |
+
--------
|
| 581 |
+
>>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 1), (5, 6), (5, 4), (6, 7)])
|
| 582 |
+
>>> triad = nx.random_triad(G, seed=1)
|
| 583 |
+
>>> triad.edges
|
| 584 |
+
OutEdgeView([(1, 2)])
|
| 585 |
+
|
| 586 |
+
"""
|
| 587 |
+
import warnings
|
| 588 |
+
|
| 589 |
+
warnings.warn(
|
| 590 |
+
(
|
| 591 |
+
"\n\nrandom_triad is deprecated and will be removed in NetworkX v3.5.\n"
|
| 592 |
+
"Use random.sample instead, e.g.::\n\n"
|
| 593 |
+
"\tG.subgraph(random.sample(list(G), 3))\n"
|
| 594 |
+
),
|
| 595 |
+
category=DeprecationWarning,
|
| 596 |
+
stacklevel=5,
|
| 597 |
+
)
|
| 598 |
+
if len(G) < 3:
|
| 599 |
+
raise nx.NetworkXError(
|
| 600 |
+
f"G needs at least 3 nodes to form a triad; (it has {len(G)} nodes)"
|
| 601 |
+
)
|
| 602 |
+
nodes = seed.sample(list(G.nodes()), 3)
|
| 603 |
+
G2 = G.subgraph(nodes)
|
| 604 |
+
return G2
|
llava_next/lib/python3.10/site-packages/networkx/conftest.py
ADDED
|
@@ -0,0 +1,284 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Testing
|
| 3 |
+
=======
|
| 4 |
+
|
| 5 |
+
General guidelines for writing good tests:
|
| 6 |
+
|
| 7 |
+
- doctests always assume ``import networkx as nx`` so don't add that
|
| 8 |
+
- prefer pytest fixtures over classes with setup methods.
|
| 9 |
+
- use the ``@pytest.mark.parametrize`` decorator
|
| 10 |
+
- use ``pytest.importorskip`` for numpy, scipy, pandas, and matplotlib b/c of PyPy.
|
| 11 |
+
and add the module to the relevant entries below.
|
| 12 |
+
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
import os
|
| 16 |
+
import sys
|
| 17 |
+
import warnings
|
| 18 |
+
from importlib.metadata import entry_points
|
| 19 |
+
|
| 20 |
+
import pytest
|
| 21 |
+
|
| 22 |
+
import networkx
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def pytest_addoption(parser):
|
| 26 |
+
parser.addoption(
|
| 27 |
+
"--runslow", action="store_true", default=False, help="run slow tests"
|
| 28 |
+
)
|
| 29 |
+
parser.addoption(
|
| 30 |
+
"--backend",
|
| 31 |
+
action="store",
|
| 32 |
+
default=None,
|
| 33 |
+
help="Run tests with a backend by auto-converting nx graphs to backend graphs",
|
| 34 |
+
)
|
| 35 |
+
parser.addoption(
|
| 36 |
+
"--fallback-to-nx",
|
| 37 |
+
action="store_true",
|
| 38 |
+
default=False,
|
| 39 |
+
help="Run nx function if a backend doesn't implement a dispatchable function"
|
| 40 |
+
" (use with --backend)",
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def pytest_configure(config):
|
| 45 |
+
config.addinivalue_line("markers", "slow: mark test as slow to run")
|
| 46 |
+
backend = config.getoption("--backend")
|
| 47 |
+
if backend is None:
|
| 48 |
+
backend = os.environ.get("NETWORKX_TEST_BACKEND")
|
| 49 |
+
# nx_loopback backend is only available when testing with a backend
|
| 50 |
+
loopback_ep = entry_points(name="nx_loopback", group="networkx.backends")
|
| 51 |
+
if not loopback_ep:
|
| 52 |
+
warnings.warn(
|
| 53 |
+
"\n\n WARNING: Mixed NetworkX configuration! \n\n"
|
| 54 |
+
" This environment has mixed configuration for networkx.\n"
|
| 55 |
+
" The test object nx_loopback is not configured correctly.\n"
|
| 56 |
+
" You should not be seeing this message.\n"
|
| 57 |
+
" Try `pip install -e .`, or change your PYTHONPATH\n"
|
| 58 |
+
" Make sure python finds the networkx repo you are testing\n\n"
|
| 59 |
+
)
|
| 60 |
+
config.backend = backend
|
| 61 |
+
if backend:
|
| 62 |
+
# We will update `networkx.config.backend_priority` below in `*_modify_items`
|
| 63 |
+
# to allow tests to get set up with normal networkx graphs.
|
| 64 |
+
networkx.utils.backends.backends["nx_loopback"] = loopback_ep["nx_loopback"]
|
| 65 |
+
networkx.utils.backends.backend_info["nx_loopback"] = {}
|
| 66 |
+
networkx.config.backends = networkx.utils.Config(
|
| 67 |
+
nx_loopback=networkx.utils.Config(),
|
| 68 |
+
**networkx.config.backends,
|
| 69 |
+
)
|
| 70 |
+
fallback_to_nx = config.getoption("--fallback-to-nx")
|
| 71 |
+
if not fallback_to_nx:
|
| 72 |
+
fallback_to_nx = os.environ.get("NETWORKX_FALLBACK_TO_NX")
|
| 73 |
+
networkx.config.fallback_to_nx = bool(fallback_to_nx)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def pytest_collection_modifyitems(config, items):
|
| 77 |
+
# Setting this to True here allows tests to be set up before dispatching
|
| 78 |
+
# any function call to a backend.
|
| 79 |
+
if config.backend:
|
| 80 |
+
# Allow pluggable backends to add markers to tests (such as skip or xfail)
|
| 81 |
+
# when running in auto-conversion test mode
|
| 82 |
+
backend_name = config.backend
|
| 83 |
+
if backend_name != "networkx":
|
| 84 |
+
networkx.utils.backends._dispatchable._is_testing = True
|
| 85 |
+
networkx.config.backend_priority.algos = [backend_name]
|
| 86 |
+
networkx.config.backend_priority.generators = [backend_name]
|
| 87 |
+
backend = networkx.utils.backends.backends[backend_name].load()
|
| 88 |
+
if hasattr(backend, "on_start_tests"):
|
| 89 |
+
getattr(backend, "on_start_tests")(items)
|
| 90 |
+
|
| 91 |
+
if config.getoption("--runslow"):
|
| 92 |
+
# --runslow given in cli: do not skip slow tests
|
| 93 |
+
return
|
| 94 |
+
skip_slow = pytest.mark.skip(reason="need --runslow option to run")
|
| 95 |
+
for item in items:
|
| 96 |
+
if "slow" in item.keywords:
|
| 97 |
+
item.add_marker(skip_slow)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
# TODO: The warnings below need to be dealt with, but for now we silence them.
|
| 101 |
+
@pytest.fixture(autouse=True)
|
| 102 |
+
def set_warnings():
|
| 103 |
+
warnings.filterwarnings(
|
| 104 |
+
"ignore",
|
| 105 |
+
category=FutureWarning,
|
| 106 |
+
message="\n\nsingle_target_shortest_path_length",
|
| 107 |
+
)
|
| 108 |
+
warnings.filterwarnings(
|
| 109 |
+
"ignore",
|
| 110 |
+
category=FutureWarning,
|
| 111 |
+
message="\n\nshortest_path",
|
| 112 |
+
)
|
| 113 |
+
warnings.filterwarnings(
|
| 114 |
+
"ignore", category=DeprecationWarning, message="\n\nThe `normalized`"
|
| 115 |
+
)
|
| 116 |
+
warnings.filterwarnings(
|
| 117 |
+
"ignore", category=DeprecationWarning, message="\n\nall_triplets"
|
| 118 |
+
)
|
| 119 |
+
warnings.filterwarnings(
|
| 120 |
+
"ignore", category=DeprecationWarning, message="\n\nrandom_triad"
|
| 121 |
+
)
|
| 122 |
+
warnings.filterwarnings(
|
| 123 |
+
"ignore", category=DeprecationWarning, message="minimal_d_separator"
|
| 124 |
+
)
|
| 125 |
+
warnings.filterwarnings(
|
| 126 |
+
"ignore", category=DeprecationWarning, message="d_separated"
|
| 127 |
+
)
|
| 128 |
+
warnings.filterwarnings("ignore", category=DeprecationWarning, message="\n\nk_core")
|
| 129 |
+
warnings.filterwarnings(
|
| 130 |
+
"ignore", category=DeprecationWarning, message="\n\nk_shell"
|
| 131 |
+
)
|
| 132 |
+
warnings.filterwarnings(
|
| 133 |
+
"ignore", category=DeprecationWarning, message="\n\nk_crust"
|
| 134 |
+
)
|
| 135 |
+
warnings.filterwarnings(
|
| 136 |
+
"ignore", category=DeprecationWarning, message="\n\nk_corona"
|
| 137 |
+
)
|
| 138 |
+
warnings.filterwarnings(
|
| 139 |
+
"ignore", category=DeprecationWarning, message="\n\ntotal_spanning_tree_weight"
|
| 140 |
+
)
|
| 141 |
+
warnings.filterwarnings(
|
| 142 |
+
"ignore", category=DeprecationWarning, message=r"\n\nThe 'create=matrix'"
|
| 143 |
+
)
|
| 144 |
+
warnings.filterwarnings(
|
| 145 |
+
"ignore", category=DeprecationWarning, message="\n\n`compute_v_structures"
|
| 146 |
+
)
|
| 147 |
+
warnings.filterwarnings(
|
| 148 |
+
"ignore", category=DeprecationWarning, message="Keyword argument 'link'"
|
| 149 |
+
)
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
@pytest.fixture(autouse=True)
|
| 153 |
+
def add_nx(doctest_namespace):
|
| 154 |
+
doctest_namespace["nx"] = networkx
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
# What dependencies are installed?
|
| 158 |
+
|
| 159 |
+
try:
|
| 160 |
+
import numpy
|
| 161 |
+
|
| 162 |
+
has_numpy = True
|
| 163 |
+
except ImportError:
|
| 164 |
+
has_numpy = False
|
| 165 |
+
|
| 166 |
+
try:
|
| 167 |
+
import scipy
|
| 168 |
+
|
| 169 |
+
has_scipy = True
|
| 170 |
+
except ImportError:
|
| 171 |
+
has_scipy = False
|
| 172 |
+
|
| 173 |
+
try:
|
| 174 |
+
import matplotlib
|
| 175 |
+
|
| 176 |
+
has_matplotlib = True
|
| 177 |
+
except ImportError:
|
| 178 |
+
has_matplotlib = False
|
| 179 |
+
|
| 180 |
+
try:
|
| 181 |
+
import pandas
|
| 182 |
+
|
| 183 |
+
has_pandas = True
|
| 184 |
+
except ImportError:
|
| 185 |
+
has_pandas = False
|
| 186 |
+
|
| 187 |
+
try:
|
| 188 |
+
import pygraphviz
|
| 189 |
+
|
| 190 |
+
has_pygraphviz = True
|
| 191 |
+
except ImportError:
|
| 192 |
+
has_pygraphviz = False
|
| 193 |
+
|
| 194 |
+
try:
|
| 195 |
+
import pydot
|
| 196 |
+
|
| 197 |
+
has_pydot = True
|
| 198 |
+
except ImportError:
|
| 199 |
+
has_pydot = False
|
| 200 |
+
|
| 201 |
+
try:
|
| 202 |
+
import sympy
|
| 203 |
+
|
| 204 |
+
has_sympy = True
|
| 205 |
+
except ImportError:
|
| 206 |
+
has_sympy = False
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
# List of files that pytest should ignore
|
| 210 |
+
|
| 211 |
+
collect_ignore = []
|
| 212 |
+
|
| 213 |
+
needs_numpy = [
|
| 214 |
+
"algorithms/approximation/traveling_salesman.py",
|
| 215 |
+
"algorithms/centrality/current_flow_closeness.py",
|
| 216 |
+
"algorithms/centrality/laplacian.py",
|
| 217 |
+
"algorithms/node_classification.py",
|
| 218 |
+
"algorithms/non_randomness.py",
|
| 219 |
+
"algorithms/polynomials.py",
|
| 220 |
+
"algorithms/shortest_paths/dense.py",
|
| 221 |
+
"algorithms/tree/mst.py",
|
| 222 |
+
"drawing/nx_latex.py",
|
| 223 |
+
"generators/expanders.py",
|
| 224 |
+
"linalg/bethehessianmatrix.py",
|
| 225 |
+
"linalg/laplacianmatrix.py",
|
| 226 |
+
"utils/misc.py",
|
| 227 |
+
]
|
| 228 |
+
needs_scipy = [
|
| 229 |
+
"algorithms/approximation/traveling_salesman.py",
|
| 230 |
+
"algorithms/assortativity/correlation.py",
|
| 231 |
+
"algorithms/assortativity/mixing.py",
|
| 232 |
+
"algorithms/assortativity/pairs.py",
|
| 233 |
+
"algorithms/bipartite/matrix.py",
|
| 234 |
+
"algorithms/bipartite/spectral.py",
|
| 235 |
+
"algorithms/centrality/current_flow_betweenness.py",
|
| 236 |
+
"algorithms/centrality/current_flow_betweenness_subset.py",
|
| 237 |
+
"algorithms/centrality/eigenvector.py",
|
| 238 |
+
"algorithms/centrality/katz.py",
|
| 239 |
+
"algorithms/centrality/laplacian.py",
|
| 240 |
+
"algorithms/centrality/second_order.py",
|
| 241 |
+
"algorithms/centrality/subgraph_alg.py",
|
| 242 |
+
"algorithms/communicability_alg.py",
|
| 243 |
+
"algorithms/community/divisive.py",
|
| 244 |
+
"algorithms/distance_measures.py",
|
| 245 |
+
"algorithms/link_analysis/hits_alg.py",
|
| 246 |
+
"algorithms/link_analysis/pagerank_alg.py",
|
| 247 |
+
"algorithms/node_classification.py",
|
| 248 |
+
"algorithms/similarity.py",
|
| 249 |
+
"algorithms/tree/mst.py",
|
| 250 |
+
"algorithms/walks.py",
|
| 251 |
+
"convert_matrix.py",
|
| 252 |
+
"drawing/layout.py",
|
| 253 |
+
"drawing/nx_pylab.py",
|
| 254 |
+
"generators/spectral_graph_forge.py",
|
| 255 |
+
"generators/expanders.py",
|
| 256 |
+
"linalg/algebraicconnectivity.py",
|
| 257 |
+
"linalg/attrmatrix.py",
|
| 258 |
+
"linalg/bethehessianmatrix.py",
|
| 259 |
+
"linalg/graphmatrix.py",
|
| 260 |
+
"linalg/laplacianmatrix.py",
|
| 261 |
+
"linalg/modularitymatrix.py",
|
| 262 |
+
"linalg/spectrum.py",
|
| 263 |
+
"utils/rcm.py",
|
| 264 |
+
]
|
| 265 |
+
needs_matplotlib = ["drawing/nx_pylab.py", "generators/classic.py"]
|
| 266 |
+
needs_pandas = ["convert_matrix.py"]
|
| 267 |
+
needs_pygraphviz = ["drawing/nx_agraph.py"]
|
| 268 |
+
needs_pydot = ["drawing/nx_pydot.py"]
|
| 269 |
+
needs_sympy = ["algorithms/polynomials.py"]
|
| 270 |
+
|
| 271 |
+
if not has_numpy:
|
| 272 |
+
collect_ignore += needs_numpy
|
| 273 |
+
if not has_scipy:
|
| 274 |
+
collect_ignore += needs_scipy
|
| 275 |
+
if not has_matplotlib:
|
| 276 |
+
collect_ignore += needs_matplotlib
|
| 277 |
+
if not has_pandas:
|
| 278 |
+
collect_ignore += needs_pandas
|
| 279 |
+
if not has_pygraphviz:
|
| 280 |
+
collect_ignore += needs_pygraphviz
|
| 281 |
+
if not has_pydot:
|
| 282 |
+
collect_ignore += needs_pydot
|
| 283 |
+
if not has_sympy:
|
| 284 |
+
collect_ignore += needs_sympy
|
llava_next/lib/python3.10/site-packages/networkx/convert.py
ADDED
|
@@ -0,0 +1,502 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions to convert NetworkX graphs to and from other formats.
|
| 2 |
+
|
| 3 |
+
The preferred way of converting data to a NetworkX graph is through the
|
| 4 |
+
graph constructor. The constructor calls the to_networkx_graph() function
|
| 5 |
+
which attempts to guess the input type and convert it automatically.
|
| 6 |
+
|
| 7 |
+
Examples
|
| 8 |
+
--------
|
| 9 |
+
Create a graph with a single edge from a dictionary of dictionaries
|
| 10 |
+
|
| 11 |
+
>>> d = {0: {1: 1}} # dict-of-dicts single edge (0,1)
|
| 12 |
+
>>> G = nx.Graph(d)
|
| 13 |
+
|
| 14 |
+
See Also
|
| 15 |
+
--------
|
| 16 |
+
nx_agraph, nx_pydot
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
import warnings
|
| 20 |
+
from collections.abc import Collection, Generator, Iterator
|
| 21 |
+
|
| 22 |
+
import networkx as nx
|
| 23 |
+
|
| 24 |
+
__all__ = [
|
| 25 |
+
"to_networkx_graph",
|
| 26 |
+
"from_dict_of_dicts",
|
| 27 |
+
"to_dict_of_dicts",
|
| 28 |
+
"from_dict_of_lists",
|
| 29 |
+
"to_dict_of_lists",
|
| 30 |
+
"from_edgelist",
|
| 31 |
+
"to_edgelist",
|
| 32 |
+
]
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def to_networkx_graph(data, create_using=None, multigraph_input=False):
|
| 36 |
+
"""Make a NetworkX graph from a known data structure.
|
| 37 |
+
|
| 38 |
+
The preferred way to call this is automatically
|
| 39 |
+
from the class constructor
|
| 40 |
+
|
| 41 |
+
>>> d = {0: {1: {"weight": 1}}} # dict-of-dicts single edge (0,1)
|
| 42 |
+
>>> G = nx.Graph(d)
|
| 43 |
+
|
| 44 |
+
instead of the equivalent
|
| 45 |
+
|
| 46 |
+
>>> G = nx.from_dict_of_dicts(d)
|
| 47 |
+
|
| 48 |
+
Parameters
|
| 49 |
+
----------
|
| 50 |
+
data : object to be converted
|
| 51 |
+
|
| 52 |
+
Current known types are:
|
| 53 |
+
any NetworkX graph
|
| 54 |
+
dict-of-dicts
|
| 55 |
+
dict-of-lists
|
| 56 |
+
container (e.g. set, list, tuple) of edges
|
| 57 |
+
iterator (e.g. itertools.chain) that produces edges
|
| 58 |
+
generator of edges
|
| 59 |
+
Pandas DataFrame (row per edge)
|
| 60 |
+
2D numpy array
|
| 61 |
+
scipy sparse array
|
| 62 |
+
pygraphviz agraph
|
| 63 |
+
|
| 64 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 65 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 66 |
+
|
| 67 |
+
multigraph_input : bool (default False)
|
| 68 |
+
If True and data is a dict_of_dicts,
|
| 69 |
+
try to create a multigraph assuming dict_of_dict_of_lists.
|
| 70 |
+
If data and create_using are both multigraphs then create
|
| 71 |
+
a multigraph from a multigraph.
|
| 72 |
+
|
| 73 |
+
"""
|
| 74 |
+
# NX graph
|
| 75 |
+
if hasattr(data, "adj"):
|
| 76 |
+
try:
|
| 77 |
+
result = from_dict_of_dicts(
|
| 78 |
+
data.adj,
|
| 79 |
+
create_using=create_using,
|
| 80 |
+
multigraph_input=data.is_multigraph(),
|
| 81 |
+
)
|
| 82 |
+
# data.graph should be dict-like
|
| 83 |
+
result.graph.update(data.graph)
|
| 84 |
+
# data.nodes should be dict-like
|
| 85 |
+
# result.add_node_from(data.nodes.items()) possible but
|
| 86 |
+
# for custom node_attr_dict_factory which may be hashable
|
| 87 |
+
# will be unexpected behavior
|
| 88 |
+
for n, dd in data.nodes.items():
|
| 89 |
+
result._node[n].update(dd)
|
| 90 |
+
return result
|
| 91 |
+
except Exception as err:
|
| 92 |
+
raise nx.NetworkXError("Input is not a correct NetworkX graph.") from err
|
| 93 |
+
|
| 94 |
+
# dict of dicts/lists
|
| 95 |
+
if isinstance(data, dict):
|
| 96 |
+
try:
|
| 97 |
+
return from_dict_of_dicts(
|
| 98 |
+
data, create_using=create_using, multigraph_input=multigraph_input
|
| 99 |
+
)
|
| 100 |
+
except Exception as err1:
|
| 101 |
+
if multigraph_input is True:
|
| 102 |
+
raise nx.NetworkXError(
|
| 103 |
+
f"converting multigraph_input raised:\n{type(err1)}: {err1}"
|
| 104 |
+
)
|
| 105 |
+
try:
|
| 106 |
+
return from_dict_of_lists(data, create_using=create_using)
|
| 107 |
+
except Exception as err2:
|
| 108 |
+
raise TypeError("Input is not known type.") from err2
|
| 109 |
+
|
| 110 |
+
# edgelists
|
| 111 |
+
if isinstance(data, list | tuple | nx.reportviews.EdgeViewABC | Iterator):
|
| 112 |
+
try:
|
| 113 |
+
return from_edgelist(data, create_using=create_using)
|
| 114 |
+
except:
|
| 115 |
+
pass
|
| 116 |
+
|
| 117 |
+
# pygraphviz agraph
|
| 118 |
+
if hasattr(data, "is_strict"):
|
| 119 |
+
try:
|
| 120 |
+
return nx.nx_agraph.from_agraph(data, create_using=create_using)
|
| 121 |
+
except Exception as err:
|
| 122 |
+
raise nx.NetworkXError("Input is not a correct pygraphviz graph.") from err
|
| 123 |
+
|
| 124 |
+
# Pandas DataFrame
|
| 125 |
+
try:
|
| 126 |
+
import pandas as pd
|
| 127 |
+
|
| 128 |
+
if isinstance(data, pd.DataFrame):
|
| 129 |
+
if data.shape[0] == data.shape[1]:
|
| 130 |
+
try:
|
| 131 |
+
return nx.from_pandas_adjacency(data, create_using=create_using)
|
| 132 |
+
except Exception as err:
|
| 133 |
+
msg = "Input is not a correct Pandas DataFrame adjacency matrix."
|
| 134 |
+
raise nx.NetworkXError(msg) from err
|
| 135 |
+
else:
|
| 136 |
+
try:
|
| 137 |
+
return nx.from_pandas_edgelist(
|
| 138 |
+
data, edge_attr=True, create_using=create_using
|
| 139 |
+
)
|
| 140 |
+
except Exception as err:
|
| 141 |
+
msg = "Input is not a correct Pandas DataFrame edge-list."
|
| 142 |
+
raise nx.NetworkXError(msg) from err
|
| 143 |
+
except ImportError:
|
| 144 |
+
pass
|
| 145 |
+
|
| 146 |
+
# numpy array
|
| 147 |
+
try:
|
| 148 |
+
import numpy as np
|
| 149 |
+
|
| 150 |
+
if isinstance(data, np.ndarray):
|
| 151 |
+
try:
|
| 152 |
+
return nx.from_numpy_array(data, create_using=create_using)
|
| 153 |
+
except Exception as err:
|
| 154 |
+
raise nx.NetworkXError(
|
| 155 |
+
f"Failed to interpret array as an adjacency matrix."
|
| 156 |
+
) from err
|
| 157 |
+
except ImportError:
|
| 158 |
+
pass
|
| 159 |
+
|
| 160 |
+
# scipy sparse array - any format
|
| 161 |
+
try:
|
| 162 |
+
import scipy
|
| 163 |
+
|
| 164 |
+
if hasattr(data, "format"):
|
| 165 |
+
try:
|
| 166 |
+
return nx.from_scipy_sparse_array(data, create_using=create_using)
|
| 167 |
+
except Exception as err:
|
| 168 |
+
raise nx.NetworkXError(
|
| 169 |
+
"Input is not a correct scipy sparse array type."
|
| 170 |
+
) from err
|
| 171 |
+
except ImportError:
|
| 172 |
+
pass
|
| 173 |
+
|
| 174 |
+
# Note: most general check - should remain last in order of execution
|
| 175 |
+
# Includes containers (e.g. list, set, dict, etc.), generators, and
|
| 176 |
+
# iterators (e.g. itertools.chain) of edges
|
| 177 |
+
|
| 178 |
+
if isinstance(data, Collection | Generator | Iterator):
|
| 179 |
+
try:
|
| 180 |
+
return from_edgelist(data, create_using=create_using)
|
| 181 |
+
except Exception as err:
|
| 182 |
+
raise nx.NetworkXError("Input is not a valid edge list") from err
|
| 183 |
+
|
| 184 |
+
raise nx.NetworkXError("Input is not a known data type for conversion.")
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
@nx._dispatchable
|
| 188 |
+
def to_dict_of_lists(G, nodelist=None):
|
| 189 |
+
"""Returns adjacency representation of graph as a dictionary of lists.
|
| 190 |
+
|
| 191 |
+
Parameters
|
| 192 |
+
----------
|
| 193 |
+
G : graph
|
| 194 |
+
A NetworkX graph
|
| 195 |
+
|
| 196 |
+
nodelist : list
|
| 197 |
+
Use only nodes specified in nodelist
|
| 198 |
+
|
| 199 |
+
Notes
|
| 200 |
+
-----
|
| 201 |
+
Completely ignores edge data for MultiGraph and MultiDiGraph.
|
| 202 |
+
|
| 203 |
+
"""
|
| 204 |
+
if nodelist is None:
|
| 205 |
+
nodelist = G
|
| 206 |
+
|
| 207 |
+
d = {}
|
| 208 |
+
for n in nodelist:
|
| 209 |
+
d[n] = [nbr for nbr in G.neighbors(n) if nbr in nodelist]
|
| 210 |
+
return d
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 214 |
+
def from_dict_of_lists(d, create_using=None):
|
| 215 |
+
"""Returns a graph from a dictionary of lists.
|
| 216 |
+
|
| 217 |
+
Parameters
|
| 218 |
+
----------
|
| 219 |
+
d : dictionary of lists
|
| 220 |
+
A dictionary of lists adjacency representation.
|
| 221 |
+
|
| 222 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 223 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 224 |
+
|
| 225 |
+
Examples
|
| 226 |
+
--------
|
| 227 |
+
>>> dol = {0: [1]} # single edge (0,1)
|
| 228 |
+
>>> G = nx.from_dict_of_lists(dol)
|
| 229 |
+
|
| 230 |
+
or
|
| 231 |
+
|
| 232 |
+
>>> G = nx.Graph(dol) # use Graph constructor
|
| 233 |
+
|
| 234 |
+
"""
|
| 235 |
+
G = nx.empty_graph(0, create_using)
|
| 236 |
+
G.add_nodes_from(d)
|
| 237 |
+
if G.is_multigraph() and not G.is_directed():
|
| 238 |
+
# a dict_of_lists can't show multiedges. BUT for undirected graphs,
|
| 239 |
+
# each edge shows up twice in the dict_of_lists.
|
| 240 |
+
# So we need to treat this case separately.
|
| 241 |
+
seen = {}
|
| 242 |
+
for node, nbrlist in d.items():
|
| 243 |
+
for nbr in nbrlist:
|
| 244 |
+
if nbr not in seen:
|
| 245 |
+
G.add_edge(node, nbr)
|
| 246 |
+
seen[node] = 1 # don't allow reverse edge to show up
|
| 247 |
+
else:
|
| 248 |
+
G.add_edges_from(
|
| 249 |
+
((node, nbr) for node, nbrlist in d.items() for nbr in nbrlist)
|
| 250 |
+
)
|
| 251 |
+
return G
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
def to_dict_of_dicts(G, nodelist=None, edge_data=None):
|
| 255 |
+
"""Returns adjacency representation of graph as a dictionary of dictionaries.
|
| 256 |
+
|
| 257 |
+
Parameters
|
| 258 |
+
----------
|
| 259 |
+
G : graph
|
| 260 |
+
A NetworkX graph
|
| 261 |
+
|
| 262 |
+
nodelist : list
|
| 263 |
+
Use only nodes specified in nodelist
|
| 264 |
+
|
| 265 |
+
edge_data : scalar, optional
|
| 266 |
+
If provided, the value of the dictionary will be set to `edge_data` for
|
| 267 |
+
all edges. Usual values could be `1` or `True`. If `edge_data` is
|
| 268 |
+
`None` (the default), the edgedata in `G` is used, resulting in a
|
| 269 |
+
dict-of-dict-of-dicts. If `G` is a MultiGraph, the result will be a
|
| 270 |
+
dict-of-dict-of-dict-of-dicts. See Notes for an approach to customize
|
| 271 |
+
handling edge data. `edge_data` should *not* be a container.
|
| 272 |
+
|
| 273 |
+
Returns
|
| 274 |
+
-------
|
| 275 |
+
dod : dict
|
| 276 |
+
A nested dictionary representation of `G`. Note that the level of
|
| 277 |
+
nesting depends on the type of `G` and the value of `edge_data`
|
| 278 |
+
(see Examples).
|
| 279 |
+
|
| 280 |
+
See Also
|
| 281 |
+
--------
|
| 282 |
+
from_dict_of_dicts, to_dict_of_lists
|
| 283 |
+
|
| 284 |
+
Notes
|
| 285 |
+
-----
|
| 286 |
+
For a more custom approach to handling edge data, try::
|
| 287 |
+
|
| 288 |
+
dod = {
|
| 289 |
+
n: {nbr: custom(n, nbr, dd) for nbr, dd in nbrdict.items()}
|
| 290 |
+
for n, nbrdict in G.adj.items()
|
| 291 |
+
}
|
| 292 |
+
|
| 293 |
+
where `custom` returns the desired edge data for each edge between `n` and
|
| 294 |
+
`nbr`, given existing edge data `dd`.
|
| 295 |
+
|
| 296 |
+
Examples
|
| 297 |
+
--------
|
| 298 |
+
>>> G = nx.path_graph(3)
|
| 299 |
+
>>> nx.to_dict_of_dicts(G)
|
| 300 |
+
{0: {1: {}}, 1: {0: {}, 2: {}}, 2: {1: {}}}
|
| 301 |
+
|
| 302 |
+
Edge data is preserved by default (``edge_data=None``), resulting
|
| 303 |
+
in dict-of-dict-of-dicts where the innermost dictionary contains the
|
| 304 |
+
edge data:
|
| 305 |
+
|
| 306 |
+
>>> G = nx.Graph()
|
| 307 |
+
>>> G.add_edges_from(
|
| 308 |
+
... [
|
| 309 |
+
... (0, 1, {"weight": 1.0}),
|
| 310 |
+
... (1, 2, {"weight": 2.0}),
|
| 311 |
+
... (2, 0, {"weight": 1.0}),
|
| 312 |
+
... ]
|
| 313 |
+
... )
|
| 314 |
+
>>> d = nx.to_dict_of_dicts(G)
|
| 315 |
+
>>> d # doctest: +SKIP
|
| 316 |
+
{0: {1: {'weight': 1.0}, 2: {'weight': 1.0}},
|
| 317 |
+
1: {0: {'weight': 1.0}, 2: {'weight': 2.0}},
|
| 318 |
+
2: {1: {'weight': 2.0}, 0: {'weight': 1.0}}}
|
| 319 |
+
>>> d[1][2]["weight"]
|
| 320 |
+
2.0
|
| 321 |
+
|
| 322 |
+
If `edge_data` is not `None`, edge data in the original graph (if any) is
|
| 323 |
+
replaced:
|
| 324 |
+
|
| 325 |
+
>>> d = nx.to_dict_of_dicts(G, edge_data=1)
|
| 326 |
+
>>> d
|
| 327 |
+
{0: {1: 1, 2: 1}, 1: {0: 1, 2: 1}, 2: {1: 1, 0: 1}}
|
| 328 |
+
>>> d[1][2]
|
| 329 |
+
1
|
| 330 |
+
|
| 331 |
+
This also applies to MultiGraphs: edge data is preserved by default:
|
| 332 |
+
|
| 333 |
+
>>> G = nx.MultiGraph()
|
| 334 |
+
>>> G.add_edge(0, 1, key="a", weight=1.0)
|
| 335 |
+
'a'
|
| 336 |
+
>>> G.add_edge(0, 1, key="b", weight=5.0)
|
| 337 |
+
'b'
|
| 338 |
+
>>> d = nx.to_dict_of_dicts(G)
|
| 339 |
+
>>> d # doctest: +SKIP
|
| 340 |
+
{0: {1: {'a': {'weight': 1.0}, 'b': {'weight': 5.0}}},
|
| 341 |
+
1: {0: {'a': {'weight': 1.0}, 'b': {'weight': 5.0}}}}
|
| 342 |
+
>>> d[0][1]["b"]["weight"]
|
| 343 |
+
5.0
|
| 344 |
+
|
| 345 |
+
But multi edge data is lost if `edge_data` is not `None`:
|
| 346 |
+
|
| 347 |
+
>>> d = nx.to_dict_of_dicts(G, edge_data=10)
|
| 348 |
+
>>> d
|
| 349 |
+
{0: {1: 10}, 1: {0: 10}}
|
| 350 |
+
"""
|
| 351 |
+
dod = {}
|
| 352 |
+
if nodelist is None:
|
| 353 |
+
if edge_data is None:
|
| 354 |
+
for u, nbrdict in G.adjacency():
|
| 355 |
+
dod[u] = nbrdict.copy()
|
| 356 |
+
else: # edge_data is not None
|
| 357 |
+
for u, nbrdict in G.adjacency():
|
| 358 |
+
dod[u] = dod.fromkeys(nbrdict, edge_data)
|
| 359 |
+
else: # nodelist is not None
|
| 360 |
+
if edge_data is None:
|
| 361 |
+
for u in nodelist:
|
| 362 |
+
dod[u] = {}
|
| 363 |
+
for v, data in ((v, data) for v, data in G[u].items() if v in nodelist):
|
| 364 |
+
dod[u][v] = data
|
| 365 |
+
else: # nodelist and edge_data are not None
|
| 366 |
+
for u in nodelist:
|
| 367 |
+
dod[u] = {}
|
| 368 |
+
for v in (v for v in G[u] if v in nodelist):
|
| 369 |
+
dod[u][v] = edge_data
|
| 370 |
+
return dod
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 374 |
+
def from_dict_of_dicts(d, create_using=None, multigraph_input=False):
|
| 375 |
+
"""Returns a graph from a dictionary of dictionaries.
|
| 376 |
+
|
| 377 |
+
Parameters
|
| 378 |
+
----------
|
| 379 |
+
d : dictionary of dictionaries
|
| 380 |
+
A dictionary of dictionaries adjacency representation.
|
| 381 |
+
|
| 382 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 383 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 384 |
+
|
| 385 |
+
multigraph_input : bool (default False)
|
| 386 |
+
When True, the dict `d` is assumed
|
| 387 |
+
to be a dict-of-dict-of-dict-of-dict structure keyed by
|
| 388 |
+
node to neighbor to edge keys to edge data for multi-edges.
|
| 389 |
+
Otherwise this routine assumes dict-of-dict-of-dict keyed by
|
| 390 |
+
node to neighbor to edge data.
|
| 391 |
+
|
| 392 |
+
Examples
|
| 393 |
+
--------
|
| 394 |
+
>>> dod = {0: {1: {"weight": 1}}} # single edge (0,1)
|
| 395 |
+
>>> G = nx.from_dict_of_dicts(dod)
|
| 396 |
+
|
| 397 |
+
or
|
| 398 |
+
|
| 399 |
+
>>> G = nx.Graph(dod) # use Graph constructor
|
| 400 |
+
|
| 401 |
+
"""
|
| 402 |
+
G = nx.empty_graph(0, create_using)
|
| 403 |
+
G.add_nodes_from(d)
|
| 404 |
+
# does dict d represent a MultiGraph or MultiDiGraph?
|
| 405 |
+
if multigraph_input:
|
| 406 |
+
if G.is_directed():
|
| 407 |
+
if G.is_multigraph():
|
| 408 |
+
G.add_edges_from(
|
| 409 |
+
(u, v, key, data)
|
| 410 |
+
for u, nbrs in d.items()
|
| 411 |
+
for v, datadict in nbrs.items()
|
| 412 |
+
for key, data in datadict.items()
|
| 413 |
+
)
|
| 414 |
+
else:
|
| 415 |
+
G.add_edges_from(
|
| 416 |
+
(u, v, data)
|
| 417 |
+
for u, nbrs in d.items()
|
| 418 |
+
for v, datadict in nbrs.items()
|
| 419 |
+
for key, data in datadict.items()
|
| 420 |
+
)
|
| 421 |
+
else: # Undirected
|
| 422 |
+
if G.is_multigraph():
|
| 423 |
+
seen = set() # don't add both directions of undirected graph
|
| 424 |
+
for u, nbrs in d.items():
|
| 425 |
+
for v, datadict in nbrs.items():
|
| 426 |
+
if (u, v) not in seen:
|
| 427 |
+
G.add_edges_from(
|
| 428 |
+
(u, v, key, data) for key, data in datadict.items()
|
| 429 |
+
)
|
| 430 |
+
seen.add((v, u))
|
| 431 |
+
else:
|
| 432 |
+
seen = set() # don't add both directions of undirected graph
|
| 433 |
+
for u, nbrs in d.items():
|
| 434 |
+
for v, datadict in nbrs.items():
|
| 435 |
+
if (u, v) not in seen:
|
| 436 |
+
G.add_edges_from(
|
| 437 |
+
(u, v, data) for key, data in datadict.items()
|
| 438 |
+
)
|
| 439 |
+
seen.add((v, u))
|
| 440 |
+
|
| 441 |
+
else: # not a multigraph to multigraph transfer
|
| 442 |
+
if G.is_multigraph() and not G.is_directed():
|
| 443 |
+
# d can have both representations u-v, v-u in dict. Only add one.
|
| 444 |
+
# We don't need this check for digraphs since we add both directions,
|
| 445 |
+
# or for Graph() since it is done implicitly (parallel edges not allowed)
|
| 446 |
+
seen = set()
|
| 447 |
+
for u, nbrs in d.items():
|
| 448 |
+
for v, data in nbrs.items():
|
| 449 |
+
if (u, v) not in seen:
|
| 450 |
+
G.add_edge(u, v, key=0)
|
| 451 |
+
G[u][v][0].update(data)
|
| 452 |
+
seen.add((v, u))
|
| 453 |
+
else:
|
| 454 |
+
G.add_edges_from(
|
| 455 |
+
((u, v, data) for u, nbrs in d.items() for v, data in nbrs.items())
|
| 456 |
+
)
|
| 457 |
+
return G
|
| 458 |
+
|
| 459 |
+
|
| 460 |
+
@nx._dispatchable(preserve_edge_attrs=True)
|
| 461 |
+
def to_edgelist(G, nodelist=None):
|
| 462 |
+
"""Returns a list of edges in the graph.
|
| 463 |
+
|
| 464 |
+
Parameters
|
| 465 |
+
----------
|
| 466 |
+
G : graph
|
| 467 |
+
A NetworkX graph
|
| 468 |
+
|
| 469 |
+
nodelist : list
|
| 470 |
+
Use only nodes specified in nodelist
|
| 471 |
+
|
| 472 |
+
"""
|
| 473 |
+
if nodelist is None:
|
| 474 |
+
return G.edges(data=True)
|
| 475 |
+
return G.edges(nodelist, data=True)
|
| 476 |
+
|
| 477 |
+
|
| 478 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 479 |
+
def from_edgelist(edgelist, create_using=None):
|
| 480 |
+
"""Returns a graph from a list of edges.
|
| 481 |
+
|
| 482 |
+
Parameters
|
| 483 |
+
----------
|
| 484 |
+
edgelist : list or iterator
|
| 485 |
+
Edge tuples
|
| 486 |
+
|
| 487 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 488 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 489 |
+
|
| 490 |
+
Examples
|
| 491 |
+
--------
|
| 492 |
+
>>> edgelist = [(0, 1)] # single edge (0,1)
|
| 493 |
+
>>> G = nx.from_edgelist(edgelist)
|
| 494 |
+
|
| 495 |
+
or
|
| 496 |
+
|
| 497 |
+
>>> G = nx.Graph(edgelist) # use Graph constructor
|
| 498 |
+
|
| 499 |
+
"""
|
| 500 |
+
G = nx.empty_graph(0, create_using)
|
| 501 |
+
G.add_edges_from(edgelist)
|
| 502 |
+
return G
|
llava_next/lib/python3.10/site-packages/networkx/convert_matrix.py
ADDED
|
@@ -0,0 +1,1317 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions to convert NetworkX graphs to and from common data containers
|
| 2 |
+
like numpy arrays, scipy sparse arrays, and pandas DataFrames.
|
| 3 |
+
|
| 4 |
+
The preferred way of converting data to a NetworkX graph is through the
|
| 5 |
+
graph constructor. The constructor calls the `~networkx.convert.to_networkx_graph`
|
| 6 |
+
function which attempts to guess the input type and convert it automatically.
|
| 7 |
+
|
| 8 |
+
Examples
|
| 9 |
+
--------
|
| 10 |
+
Create a 10 node random graph from a numpy array
|
| 11 |
+
|
| 12 |
+
>>> import numpy as np
|
| 13 |
+
>>> rng = np.random.default_rng()
|
| 14 |
+
>>> a = rng.integers(low=0, high=2, size=(10, 10))
|
| 15 |
+
>>> DG = nx.from_numpy_array(a, create_using=nx.DiGraph)
|
| 16 |
+
|
| 17 |
+
or equivalently:
|
| 18 |
+
|
| 19 |
+
>>> DG = nx.DiGraph(a)
|
| 20 |
+
|
| 21 |
+
which calls `from_numpy_array` internally based on the type of ``a``.
|
| 22 |
+
|
| 23 |
+
See Also
|
| 24 |
+
--------
|
| 25 |
+
nx_agraph, nx_pydot
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
import itertools
|
| 29 |
+
from collections import defaultdict
|
| 30 |
+
|
| 31 |
+
import networkx as nx
|
| 32 |
+
from networkx.utils import not_implemented_for
|
| 33 |
+
|
| 34 |
+
__all__ = [
|
| 35 |
+
"from_pandas_adjacency",
|
| 36 |
+
"to_pandas_adjacency",
|
| 37 |
+
"from_pandas_edgelist",
|
| 38 |
+
"to_pandas_edgelist",
|
| 39 |
+
"from_scipy_sparse_array",
|
| 40 |
+
"to_scipy_sparse_array",
|
| 41 |
+
"from_numpy_array",
|
| 42 |
+
"to_numpy_array",
|
| 43 |
+
]
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 47 |
+
def to_pandas_adjacency(
|
| 48 |
+
G,
|
| 49 |
+
nodelist=None,
|
| 50 |
+
dtype=None,
|
| 51 |
+
order=None,
|
| 52 |
+
multigraph_weight=sum,
|
| 53 |
+
weight="weight",
|
| 54 |
+
nonedge=0.0,
|
| 55 |
+
):
|
| 56 |
+
"""Returns the graph adjacency matrix as a Pandas DataFrame.
|
| 57 |
+
|
| 58 |
+
Parameters
|
| 59 |
+
----------
|
| 60 |
+
G : graph
|
| 61 |
+
The NetworkX graph used to construct the Pandas DataFrame.
|
| 62 |
+
|
| 63 |
+
nodelist : list, optional
|
| 64 |
+
The rows and columns are ordered according to the nodes in `nodelist`.
|
| 65 |
+
If `nodelist` is None, then the ordering is produced by G.nodes().
|
| 66 |
+
|
| 67 |
+
multigraph_weight : {sum, min, max}, optional
|
| 68 |
+
An operator that determines how weights in multigraphs are handled.
|
| 69 |
+
The default is to sum the weights of the multiple edges.
|
| 70 |
+
|
| 71 |
+
weight : string or None, optional
|
| 72 |
+
The edge attribute that holds the numerical value used for
|
| 73 |
+
the edge weight. If an edge does not have that attribute, then the
|
| 74 |
+
value 1 is used instead.
|
| 75 |
+
|
| 76 |
+
nonedge : float, optional
|
| 77 |
+
The matrix values corresponding to nonedges are typically set to zero.
|
| 78 |
+
However, this could be undesirable if there are matrix values
|
| 79 |
+
corresponding to actual edges that also have the value zero. If so,
|
| 80 |
+
one might prefer nonedges to have some other value, such as nan.
|
| 81 |
+
|
| 82 |
+
Returns
|
| 83 |
+
-------
|
| 84 |
+
df : Pandas DataFrame
|
| 85 |
+
Graph adjacency matrix
|
| 86 |
+
|
| 87 |
+
Notes
|
| 88 |
+
-----
|
| 89 |
+
For directed graphs, entry i,j corresponds to an edge from i to j.
|
| 90 |
+
|
| 91 |
+
The DataFrame entries are assigned to the weight edge attribute. When
|
| 92 |
+
an edge does not have a weight attribute, the value of the entry is set to
|
| 93 |
+
the number 1. For multiple (parallel) edges, the values of the entries
|
| 94 |
+
are determined by the 'multigraph_weight' parameter. The default is to
|
| 95 |
+
sum the weight attributes for each of the parallel edges.
|
| 96 |
+
|
| 97 |
+
When `nodelist` does not contain every node in `G`, the matrix is built
|
| 98 |
+
from the subgraph of `G` that is induced by the nodes in `nodelist`.
|
| 99 |
+
|
| 100 |
+
The convention used for self-loop edges in graphs is to assign the
|
| 101 |
+
diagonal matrix entry value to the weight attribute of the edge
|
| 102 |
+
(or the number 1 if the edge has no weight attribute). If the
|
| 103 |
+
alternate convention of doubling the edge weight is desired the
|
| 104 |
+
resulting Pandas DataFrame can be modified as follows::
|
| 105 |
+
|
| 106 |
+
>>> import pandas as pd
|
| 107 |
+
>>> G = nx.Graph([(1, 1), (2, 2)])
|
| 108 |
+
>>> df = nx.to_pandas_adjacency(G)
|
| 109 |
+
>>> df
|
| 110 |
+
1 2
|
| 111 |
+
1 1.0 0.0
|
| 112 |
+
2 0.0 1.0
|
| 113 |
+
>>> diag_idx = list(range(len(df)))
|
| 114 |
+
>>> df.iloc[diag_idx, diag_idx] *= 2
|
| 115 |
+
>>> df
|
| 116 |
+
1 2
|
| 117 |
+
1 2.0 0.0
|
| 118 |
+
2 0.0 2.0
|
| 119 |
+
|
| 120 |
+
Examples
|
| 121 |
+
--------
|
| 122 |
+
>>> G = nx.MultiDiGraph()
|
| 123 |
+
>>> G.add_edge(0, 1, weight=2)
|
| 124 |
+
0
|
| 125 |
+
>>> G.add_edge(1, 0)
|
| 126 |
+
0
|
| 127 |
+
>>> G.add_edge(2, 2, weight=3)
|
| 128 |
+
0
|
| 129 |
+
>>> G.add_edge(2, 2)
|
| 130 |
+
1
|
| 131 |
+
>>> nx.to_pandas_adjacency(G, nodelist=[0, 1, 2], dtype=int)
|
| 132 |
+
0 1 2
|
| 133 |
+
0 0 2 0
|
| 134 |
+
1 1 0 0
|
| 135 |
+
2 0 0 4
|
| 136 |
+
|
| 137 |
+
"""
|
| 138 |
+
import pandas as pd
|
| 139 |
+
|
| 140 |
+
M = to_numpy_array(
|
| 141 |
+
G,
|
| 142 |
+
nodelist=nodelist,
|
| 143 |
+
dtype=dtype,
|
| 144 |
+
order=order,
|
| 145 |
+
multigraph_weight=multigraph_weight,
|
| 146 |
+
weight=weight,
|
| 147 |
+
nonedge=nonedge,
|
| 148 |
+
)
|
| 149 |
+
if nodelist is None:
|
| 150 |
+
nodelist = list(G)
|
| 151 |
+
return pd.DataFrame(data=M, index=nodelist, columns=nodelist)
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 155 |
+
def from_pandas_adjacency(df, create_using=None):
|
| 156 |
+
r"""Returns a graph from Pandas DataFrame.
|
| 157 |
+
|
| 158 |
+
The Pandas DataFrame is interpreted as an adjacency matrix for the graph.
|
| 159 |
+
|
| 160 |
+
Parameters
|
| 161 |
+
----------
|
| 162 |
+
df : Pandas DataFrame
|
| 163 |
+
An adjacency matrix representation of a graph
|
| 164 |
+
|
| 165 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 166 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 167 |
+
|
| 168 |
+
Notes
|
| 169 |
+
-----
|
| 170 |
+
For directed graphs, explicitly mention create_using=nx.DiGraph,
|
| 171 |
+
and entry i,j of df corresponds to an edge from i to j.
|
| 172 |
+
|
| 173 |
+
If `df` has a single data type for each entry it will be converted to an
|
| 174 |
+
appropriate Python data type.
|
| 175 |
+
|
| 176 |
+
If you have node attributes stored in a separate dataframe `df_nodes`,
|
| 177 |
+
you can load those attributes to the graph `G` using the following code:
|
| 178 |
+
|
| 179 |
+
```
|
| 180 |
+
df_nodes = pd.DataFrame({"node_id": [1, 2, 3], "attribute1": ["A", "B", "C"]})
|
| 181 |
+
G.add_nodes_from((n, dict(d)) for n, d in df_nodes.iterrows())
|
| 182 |
+
```
|
| 183 |
+
|
| 184 |
+
If `df` has a user-specified compound data type the names
|
| 185 |
+
of the data fields will be used as attribute keys in the resulting
|
| 186 |
+
NetworkX graph.
|
| 187 |
+
|
| 188 |
+
See Also
|
| 189 |
+
--------
|
| 190 |
+
to_pandas_adjacency
|
| 191 |
+
|
| 192 |
+
Examples
|
| 193 |
+
--------
|
| 194 |
+
Simple integer weights on edges:
|
| 195 |
+
|
| 196 |
+
>>> import pandas as pd
|
| 197 |
+
>>> pd.options.display.max_columns = 20
|
| 198 |
+
>>> df = pd.DataFrame([[1, 1], [2, 1]])
|
| 199 |
+
>>> df
|
| 200 |
+
0 1
|
| 201 |
+
0 1 1
|
| 202 |
+
1 2 1
|
| 203 |
+
>>> G = nx.from_pandas_adjacency(df)
|
| 204 |
+
>>> G.name = "Graph from pandas adjacency matrix"
|
| 205 |
+
>>> print(G)
|
| 206 |
+
Graph named 'Graph from pandas adjacency matrix' with 2 nodes and 3 edges
|
| 207 |
+
"""
|
| 208 |
+
|
| 209 |
+
try:
|
| 210 |
+
df = df[df.index]
|
| 211 |
+
except Exception as err:
|
| 212 |
+
missing = list(set(df.index).difference(set(df.columns)))
|
| 213 |
+
msg = f"{missing} not in columns"
|
| 214 |
+
raise nx.NetworkXError("Columns must match Indices.", msg) from err
|
| 215 |
+
|
| 216 |
+
A = df.values
|
| 217 |
+
G = from_numpy_array(A, create_using=create_using, nodelist=df.columns)
|
| 218 |
+
|
| 219 |
+
return G
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
@nx._dispatchable(preserve_edge_attrs=True)
|
| 223 |
+
def to_pandas_edgelist(
|
| 224 |
+
G,
|
| 225 |
+
source="source",
|
| 226 |
+
target="target",
|
| 227 |
+
nodelist=None,
|
| 228 |
+
dtype=None,
|
| 229 |
+
edge_key=None,
|
| 230 |
+
):
|
| 231 |
+
"""Returns the graph edge list as a Pandas DataFrame.
|
| 232 |
+
|
| 233 |
+
Parameters
|
| 234 |
+
----------
|
| 235 |
+
G : graph
|
| 236 |
+
The NetworkX graph used to construct the Pandas DataFrame.
|
| 237 |
+
|
| 238 |
+
source : str or int, optional
|
| 239 |
+
A valid column name (string or integer) for the source nodes (for the
|
| 240 |
+
directed case).
|
| 241 |
+
|
| 242 |
+
target : str or int, optional
|
| 243 |
+
A valid column name (string or integer) for the target nodes (for the
|
| 244 |
+
directed case).
|
| 245 |
+
|
| 246 |
+
nodelist : list, optional
|
| 247 |
+
Use only nodes specified in nodelist
|
| 248 |
+
|
| 249 |
+
dtype : dtype, default None
|
| 250 |
+
Use to create the DataFrame. Data type to force.
|
| 251 |
+
Only a single dtype is allowed. If None, infer.
|
| 252 |
+
|
| 253 |
+
edge_key : str or int or None, optional (default=None)
|
| 254 |
+
A valid column name (string or integer) for the edge keys (for the
|
| 255 |
+
multigraph case). If None, edge keys are not stored in the DataFrame.
|
| 256 |
+
|
| 257 |
+
Returns
|
| 258 |
+
-------
|
| 259 |
+
df : Pandas DataFrame
|
| 260 |
+
Graph edge list
|
| 261 |
+
|
| 262 |
+
Examples
|
| 263 |
+
--------
|
| 264 |
+
>>> G = nx.Graph(
|
| 265 |
+
... [
|
| 266 |
+
... ("A", "B", {"cost": 1, "weight": 7}),
|
| 267 |
+
... ("C", "E", {"cost": 9, "weight": 10}),
|
| 268 |
+
... ]
|
| 269 |
+
... )
|
| 270 |
+
>>> df = nx.to_pandas_edgelist(G, nodelist=["A", "C"])
|
| 271 |
+
>>> df[["source", "target", "cost", "weight"]]
|
| 272 |
+
source target cost weight
|
| 273 |
+
0 A B 1 7
|
| 274 |
+
1 C E 9 10
|
| 275 |
+
|
| 276 |
+
>>> G = nx.MultiGraph([("A", "B", {"cost": 1}), ("A", "B", {"cost": 9})])
|
| 277 |
+
>>> df = nx.to_pandas_edgelist(G, nodelist=["A", "C"], edge_key="ekey")
|
| 278 |
+
>>> df[["source", "target", "cost", "ekey"]]
|
| 279 |
+
source target cost ekey
|
| 280 |
+
0 A B 1 0
|
| 281 |
+
1 A B 9 1
|
| 282 |
+
|
| 283 |
+
"""
|
| 284 |
+
import pandas as pd
|
| 285 |
+
|
| 286 |
+
if nodelist is None:
|
| 287 |
+
edgelist = G.edges(data=True)
|
| 288 |
+
else:
|
| 289 |
+
edgelist = G.edges(nodelist, data=True)
|
| 290 |
+
source_nodes = [s for s, _, _ in edgelist]
|
| 291 |
+
target_nodes = [t for _, t, _ in edgelist]
|
| 292 |
+
|
| 293 |
+
all_attrs = set().union(*(d.keys() for _, _, d in edgelist))
|
| 294 |
+
if source in all_attrs:
|
| 295 |
+
raise nx.NetworkXError(f"Source name {source!r} is an edge attr name")
|
| 296 |
+
if target in all_attrs:
|
| 297 |
+
raise nx.NetworkXError(f"Target name {target!r} is an edge attr name")
|
| 298 |
+
|
| 299 |
+
nan = float("nan")
|
| 300 |
+
edge_attr = {k: [d.get(k, nan) for _, _, d in edgelist] for k in all_attrs}
|
| 301 |
+
|
| 302 |
+
if G.is_multigraph() and edge_key is not None:
|
| 303 |
+
if edge_key in all_attrs:
|
| 304 |
+
raise nx.NetworkXError(f"Edge key name {edge_key!r} is an edge attr name")
|
| 305 |
+
edge_keys = [k for _, _, k in G.edges(keys=True)]
|
| 306 |
+
edgelistdict = {source: source_nodes, target: target_nodes, edge_key: edge_keys}
|
| 307 |
+
else:
|
| 308 |
+
edgelistdict = {source: source_nodes, target: target_nodes}
|
| 309 |
+
|
| 310 |
+
edgelistdict.update(edge_attr)
|
| 311 |
+
return pd.DataFrame(edgelistdict, dtype=dtype)
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 315 |
+
def from_pandas_edgelist(
|
| 316 |
+
df,
|
| 317 |
+
source="source",
|
| 318 |
+
target="target",
|
| 319 |
+
edge_attr=None,
|
| 320 |
+
create_using=None,
|
| 321 |
+
edge_key=None,
|
| 322 |
+
):
|
| 323 |
+
"""Returns a graph from Pandas DataFrame containing an edge list.
|
| 324 |
+
|
| 325 |
+
The Pandas DataFrame should contain at least two columns of node names and
|
| 326 |
+
zero or more columns of edge attributes. Each row will be processed as one
|
| 327 |
+
edge instance.
|
| 328 |
+
|
| 329 |
+
Note: This function iterates over DataFrame.values, which is not
|
| 330 |
+
guaranteed to retain the data type across columns in the row. This is only
|
| 331 |
+
a problem if your row is entirely numeric and a mix of ints and floats. In
|
| 332 |
+
that case, all values will be returned as floats. See the
|
| 333 |
+
DataFrame.iterrows documentation for an example.
|
| 334 |
+
|
| 335 |
+
Parameters
|
| 336 |
+
----------
|
| 337 |
+
df : Pandas DataFrame
|
| 338 |
+
An edge list representation of a graph
|
| 339 |
+
|
| 340 |
+
source : str or int
|
| 341 |
+
A valid column name (string or integer) for the source nodes (for the
|
| 342 |
+
directed case).
|
| 343 |
+
|
| 344 |
+
target : str or int
|
| 345 |
+
A valid column name (string or integer) for the target nodes (for the
|
| 346 |
+
directed case).
|
| 347 |
+
|
| 348 |
+
edge_attr : str or int, iterable, True, or None
|
| 349 |
+
A valid column name (str or int) or iterable of column names that are
|
| 350 |
+
used to retrieve items and add them to the graph as edge attributes.
|
| 351 |
+
If `True`, all columns will be added except `source`, `target` and `edge_key`.
|
| 352 |
+
If `None`, no edge attributes are added to the graph.
|
| 353 |
+
|
| 354 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 355 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 356 |
+
|
| 357 |
+
edge_key : str or None, optional (default=None)
|
| 358 |
+
A valid column name for the edge keys (for a MultiGraph). The values in
|
| 359 |
+
this column are used for the edge keys when adding edges if create_using
|
| 360 |
+
is a multigraph.
|
| 361 |
+
|
| 362 |
+
If you have node attributes stored in a separate dataframe `df_nodes`,
|
| 363 |
+
you can load those attributes to the graph `G` using the following code:
|
| 364 |
+
|
| 365 |
+
```
|
| 366 |
+
df_nodes = pd.DataFrame({"node_id": [1, 2, 3], "attribute1": ["A", "B", "C"]})
|
| 367 |
+
G.add_nodes_from((n, dict(d)) for n, d in df_nodes.iterrows())
|
| 368 |
+
```
|
| 369 |
+
|
| 370 |
+
See Also
|
| 371 |
+
--------
|
| 372 |
+
to_pandas_edgelist
|
| 373 |
+
|
| 374 |
+
Examples
|
| 375 |
+
--------
|
| 376 |
+
Simple integer weights on edges:
|
| 377 |
+
|
| 378 |
+
>>> import pandas as pd
|
| 379 |
+
>>> pd.options.display.max_columns = 20
|
| 380 |
+
>>> import numpy as np
|
| 381 |
+
>>> rng = np.random.RandomState(seed=5)
|
| 382 |
+
>>> ints = rng.randint(1, 11, size=(3, 2))
|
| 383 |
+
>>> a = ["A", "B", "C"]
|
| 384 |
+
>>> b = ["D", "A", "E"]
|
| 385 |
+
>>> df = pd.DataFrame(ints, columns=["weight", "cost"])
|
| 386 |
+
>>> df[0] = a
|
| 387 |
+
>>> df["b"] = b
|
| 388 |
+
>>> df[["weight", "cost", 0, "b"]]
|
| 389 |
+
weight cost 0 b
|
| 390 |
+
0 4 7 A D
|
| 391 |
+
1 7 1 B A
|
| 392 |
+
2 10 9 C E
|
| 393 |
+
>>> G = nx.from_pandas_edgelist(df, 0, "b", ["weight", "cost"])
|
| 394 |
+
>>> G["E"]["C"]["weight"]
|
| 395 |
+
10
|
| 396 |
+
>>> G["E"]["C"]["cost"]
|
| 397 |
+
9
|
| 398 |
+
>>> edges = pd.DataFrame(
|
| 399 |
+
... {
|
| 400 |
+
... "source": [0, 1, 2],
|
| 401 |
+
... "target": [2, 2, 3],
|
| 402 |
+
... "weight": [3, 4, 5],
|
| 403 |
+
... "color": ["red", "blue", "blue"],
|
| 404 |
+
... }
|
| 405 |
+
... )
|
| 406 |
+
>>> G = nx.from_pandas_edgelist(edges, edge_attr=True)
|
| 407 |
+
>>> G[0][2]["color"]
|
| 408 |
+
'red'
|
| 409 |
+
|
| 410 |
+
Build multigraph with custom keys:
|
| 411 |
+
|
| 412 |
+
>>> edges = pd.DataFrame(
|
| 413 |
+
... {
|
| 414 |
+
... "source": [0, 1, 2, 0],
|
| 415 |
+
... "target": [2, 2, 3, 2],
|
| 416 |
+
... "my_edge_key": ["A", "B", "C", "D"],
|
| 417 |
+
... "weight": [3, 4, 5, 6],
|
| 418 |
+
... "color": ["red", "blue", "blue", "blue"],
|
| 419 |
+
... }
|
| 420 |
+
... )
|
| 421 |
+
>>> G = nx.from_pandas_edgelist(
|
| 422 |
+
... edges,
|
| 423 |
+
... edge_key="my_edge_key",
|
| 424 |
+
... edge_attr=["weight", "color"],
|
| 425 |
+
... create_using=nx.MultiGraph(),
|
| 426 |
+
... )
|
| 427 |
+
>>> G[0][2]
|
| 428 |
+
AtlasView({'A': {'weight': 3, 'color': 'red'}, 'D': {'weight': 6, 'color': 'blue'}})
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
"""
|
| 432 |
+
g = nx.empty_graph(0, create_using)
|
| 433 |
+
|
| 434 |
+
if edge_attr is None:
|
| 435 |
+
if g.is_multigraph() and edge_key is not None:
|
| 436 |
+
for u, v, k in zip(df[source], df[target], df[edge_key]):
|
| 437 |
+
g.add_edge(u, v, k)
|
| 438 |
+
else:
|
| 439 |
+
g.add_edges_from(zip(df[source], df[target]))
|
| 440 |
+
return g
|
| 441 |
+
|
| 442 |
+
reserved_columns = [source, target]
|
| 443 |
+
if g.is_multigraph() and edge_key is not None:
|
| 444 |
+
reserved_columns.append(edge_key)
|
| 445 |
+
|
| 446 |
+
# Additional columns requested
|
| 447 |
+
attr_col_headings = []
|
| 448 |
+
attribute_data = []
|
| 449 |
+
if edge_attr is True:
|
| 450 |
+
attr_col_headings = [c for c in df.columns if c not in reserved_columns]
|
| 451 |
+
elif isinstance(edge_attr, list | tuple):
|
| 452 |
+
attr_col_headings = edge_attr
|
| 453 |
+
else:
|
| 454 |
+
attr_col_headings = [edge_attr]
|
| 455 |
+
if len(attr_col_headings) == 0:
|
| 456 |
+
raise nx.NetworkXError(
|
| 457 |
+
f"Invalid edge_attr argument: No columns found with name: {attr_col_headings}"
|
| 458 |
+
)
|
| 459 |
+
|
| 460 |
+
try:
|
| 461 |
+
attribute_data = zip(*[df[col] for col in attr_col_headings])
|
| 462 |
+
except (KeyError, TypeError) as err:
|
| 463 |
+
msg = f"Invalid edge_attr argument: {edge_attr}"
|
| 464 |
+
raise nx.NetworkXError(msg) from err
|
| 465 |
+
|
| 466 |
+
if g.is_multigraph():
|
| 467 |
+
# => append the edge keys from the df to the bundled data
|
| 468 |
+
if edge_key is not None:
|
| 469 |
+
try:
|
| 470 |
+
multigraph_edge_keys = df[edge_key]
|
| 471 |
+
attribute_data = zip(attribute_data, multigraph_edge_keys)
|
| 472 |
+
except (KeyError, TypeError) as err:
|
| 473 |
+
msg = f"Invalid edge_key argument: {edge_key}"
|
| 474 |
+
raise nx.NetworkXError(msg) from err
|
| 475 |
+
|
| 476 |
+
for s, t, attrs in zip(df[source], df[target], attribute_data):
|
| 477 |
+
if edge_key is not None:
|
| 478 |
+
attrs, multigraph_edge_key = attrs
|
| 479 |
+
key = g.add_edge(s, t, key=multigraph_edge_key)
|
| 480 |
+
else:
|
| 481 |
+
key = g.add_edge(s, t)
|
| 482 |
+
|
| 483 |
+
g[s][t][key].update(zip(attr_col_headings, attrs))
|
| 484 |
+
else:
|
| 485 |
+
for s, t, attrs in zip(df[source], df[target], attribute_data):
|
| 486 |
+
g.add_edge(s, t)
|
| 487 |
+
g[s][t].update(zip(attr_col_headings, attrs))
|
| 488 |
+
|
| 489 |
+
return g
|
| 490 |
+
|
| 491 |
+
|
| 492 |
+
@nx._dispatchable(edge_attrs="weight")
|
| 493 |
+
def to_scipy_sparse_array(G, nodelist=None, dtype=None, weight="weight", format="csr"):
|
| 494 |
+
"""Returns the graph adjacency matrix as a SciPy sparse array.
|
| 495 |
+
|
| 496 |
+
Parameters
|
| 497 |
+
----------
|
| 498 |
+
G : graph
|
| 499 |
+
The NetworkX graph used to construct the sparse array.
|
| 500 |
+
|
| 501 |
+
nodelist : list, optional
|
| 502 |
+
The rows and columns are ordered according to the nodes in `nodelist`.
|
| 503 |
+
If `nodelist` is None, then the ordering is produced by ``G.nodes()``.
|
| 504 |
+
|
| 505 |
+
dtype : NumPy data-type, optional
|
| 506 |
+
A valid NumPy dtype used to initialize the array. If None, then the
|
| 507 |
+
NumPy default is used.
|
| 508 |
+
|
| 509 |
+
weight : string or None, optional (default='weight')
|
| 510 |
+
The edge attribute that holds the numerical value used for
|
| 511 |
+
the edge weight. If None then all edge weights are 1.
|
| 512 |
+
|
| 513 |
+
format : str in {'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'}
|
| 514 |
+
The format of the sparse array to be returned (default 'csr'). For
|
| 515 |
+
some algorithms different implementations of sparse arrays
|
| 516 |
+
can perform better. See [1]_ for details.
|
| 517 |
+
|
| 518 |
+
Returns
|
| 519 |
+
-------
|
| 520 |
+
A : SciPy sparse array
|
| 521 |
+
Graph adjacency matrix.
|
| 522 |
+
|
| 523 |
+
Notes
|
| 524 |
+
-----
|
| 525 |
+
For directed graphs, matrix entry ``i, j`` corresponds to an edge from
|
| 526 |
+
``i`` to ``j``.
|
| 527 |
+
|
| 528 |
+
The values of the adjacency matrix are populated using the edge attribute held in
|
| 529 |
+
parameter `weight`. When an edge does not have that attribute, the
|
| 530 |
+
value of the entry is 1.
|
| 531 |
+
|
| 532 |
+
For multiple edges the matrix values are the sums of the edge weights.
|
| 533 |
+
|
| 534 |
+
When `nodelist` does not contain every node in `G`, the adjacency matrix
|
| 535 |
+
is built from the subgraph of `G` that is induced by the nodes in
|
| 536 |
+
`nodelist`.
|
| 537 |
+
|
| 538 |
+
The convention used for self-loop edges in graphs is to assign the
|
| 539 |
+
diagonal matrix entry value to the weight attribute of the edge
|
| 540 |
+
(or the number 1 if the edge has no weight attribute). If the
|
| 541 |
+
alternate convention of doubling the edge weight is desired the
|
| 542 |
+
resulting array can be modified as follows::
|
| 543 |
+
|
| 544 |
+
>>> G = nx.Graph([(1, 1)])
|
| 545 |
+
>>> A = nx.to_scipy_sparse_array(G)
|
| 546 |
+
>>> A.toarray()
|
| 547 |
+
array([[1]])
|
| 548 |
+
>>> A.setdiag(A.diagonal() * 2)
|
| 549 |
+
>>> A.toarray()
|
| 550 |
+
array([[2]])
|
| 551 |
+
|
| 552 |
+
Examples
|
| 553 |
+
--------
|
| 554 |
+
|
| 555 |
+
Basic usage:
|
| 556 |
+
|
| 557 |
+
>>> G = nx.path_graph(4)
|
| 558 |
+
>>> A = nx.to_scipy_sparse_array(G)
|
| 559 |
+
>>> A # doctest: +SKIP
|
| 560 |
+
<Compressed Sparse Row sparse array of dtype 'int64'
|
| 561 |
+
with 6 stored elements and shape (4, 4)>
|
| 562 |
+
|
| 563 |
+
>>> A.toarray()
|
| 564 |
+
array([[0, 1, 0, 0],
|
| 565 |
+
[1, 0, 1, 0],
|
| 566 |
+
[0, 1, 0, 1],
|
| 567 |
+
[0, 0, 1, 0]])
|
| 568 |
+
|
| 569 |
+
.. note:: The `toarray` method is used in these examples to better visualize
|
| 570 |
+
the adjacancy matrix. For a dense representation of the adjaceny matrix,
|
| 571 |
+
use `to_numpy_array` instead.
|
| 572 |
+
|
| 573 |
+
Directed graphs:
|
| 574 |
+
|
| 575 |
+
>>> G = nx.DiGraph([(0, 1), (1, 2), (2, 3)])
|
| 576 |
+
>>> nx.to_scipy_sparse_array(G).toarray()
|
| 577 |
+
array([[0, 1, 0, 0],
|
| 578 |
+
[0, 0, 1, 0],
|
| 579 |
+
[0, 0, 0, 1],
|
| 580 |
+
[0, 0, 0, 0]])
|
| 581 |
+
|
| 582 |
+
>>> H = G.reverse()
|
| 583 |
+
>>> H.edges
|
| 584 |
+
OutEdgeView([(1, 0), (2, 1), (3, 2)])
|
| 585 |
+
>>> nx.to_scipy_sparse_array(H).toarray()
|
| 586 |
+
array([[0, 0, 0, 0],
|
| 587 |
+
[1, 0, 0, 0],
|
| 588 |
+
[0, 1, 0, 0],
|
| 589 |
+
[0, 0, 1, 0]])
|
| 590 |
+
|
| 591 |
+
By default, the order of the rows/columns of the adjacency matrix is determined
|
| 592 |
+
by the ordering of the nodes in `G`:
|
| 593 |
+
|
| 594 |
+
>>> G = nx.Graph()
|
| 595 |
+
>>> G.add_nodes_from([3, 5, 0, 1])
|
| 596 |
+
>>> G.add_edges_from([(1, 3), (1, 5)])
|
| 597 |
+
>>> nx.to_scipy_sparse_array(G).toarray()
|
| 598 |
+
array([[0, 0, 0, 1],
|
| 599 |
+
[0, 0, 0, 1],
|
| 600 |
+
[0, 0, 0, 0],
|
| 601 |
+
[1, 1, 0, 0]])
|
| 602 |
+
|
| 603 |
+
The ordering of the rows can be changed with `nodelist`:
|
| 604 |
+
|
| 605 |
+
>>> ordered = [0, 1, 3, 5]
|
| 606 |
+
>>> nx.to_scipy_sparse_array(G, nodelist=ordered).toarray()
|
| 607 |
+
array([[0, 0, 0, 0],
|
| 608 |
+
[0, 0, 1, 1],
|
| 609 |
+
[0, 1, 0, 0],
|
| 610 |
+
[0, 1, 0, 0]])
|
| 611 |
+
|
| 612 |
+
If `nodelist` contains a subset of the nodes in `G`, the adjacency matrix
|
| 613 |
+
for the node-induced subgraph is produced:
|
| 614 |
+
|
| 615 |
+
>>> nx.to_scipy_sparse_array(G, nodelist=[1, 3, 5]).toarray()
|
| 616 |
+
array([[0, 1, 1],
|
| 617 |
+
[1, 0, 0],
|
| 618 |
+
[1, 0, 0]])
|
| 619 |
+
|
| 620 |
+
The values of the adjacency matrix are drawn from the edge attribute
|
| 621 |
+
specified by the `weight` parameter:
|
| 622 |
+
|
| 623 |
+
>>> G = nx.path_graph(4)
|
| 624 |
+
>>> nx.set_edge_attributes(
|
| 625 |
+
... G, values={(0, 1): 1, (1, 2): 10, (2, 3): 2}, name="weight"
|
| 626 |
+
... )
|
| 627 |
+
>>> nx.set_edge_attributes(
|
| 628 |
+
... G, values={(0, 1): 50, (1, 2): 35, (2, 3): 10}, name="capacity"
|
| 629 |
+
... )
|
| 630 |
+
>>> nx.to_scipy_sparse_array(G).toarray() # Default weight="weight"
|
| 631 |
+
array([[ 0, 1, 0, 0],
|
| 632 |
+
[ 1, 0, 10, 0],
|
| 633 |
+
[ 0, 10, 0, 2],
|
| 634 |
+
[ 0, 0, 2, 0]])
|
| 635 |
+
>>> nx.to_scipy_sparse_array(G, weight="capacity").toarray()
|
| 636 |
+
array([[ 0, 50, 0, 0],
|
| 637 |
+
[50, 0, 35, 0],
|
| 638 |
+
[ 0, 35, 0, 10],
|
| 639 |
+
[ 0, 0, 10, 0]])
|
| 640 |
+
|
| 641 |
+
Any edges that don't have a `weight` attribute default to 1:
|
| 642 |
+
|
| 643 |
+
>>> G[1][2].pop("capacity")
|
| 644 |
+
35
|
| 645 |
+
>>> nx.to_scipy_sparse_array(G, weight="capacity").toarray()
|
| 646 |
+
array([[ 0, 50, 0, 0],
|
| 647 |
+
[50, 0, 1, 0],
|
| 648 |
+
[ 0, 1, 0, 10],
|
| 649 |
+
[ 0, 0, 10, 0]])
|
| 650 |
+
|
| 651 |
+
When `G` is a multigraph, the values in the adjacency matrix are given by
|
| 652 |
+
the sum of the `weight` edge attribute over each edge key:
|
| 653 |
+
|
| 654 |
+
>>> G = nx.MultiDiGraph([(0, 1), (0, 1), (0, 1), (2, 0)])
|
| 655 |
+
>>> nx.to_scipy_sparse_array(G).toarray()
|
| 656 |
+
array([[0, 3, 0],
|
| 657 |
+
[0, 0, 0],
|
| 658 |
+
[1, 0, 0]])
|
| 659 |
+
|
| 660 |
+
References
|
| 661 |
+
----------
|
| 662 |
+
.. [1] Scipy Dev. References, "Sparse Arrays",
|
| 663 |
+
https://docs.scipy.org/doc/scipy/reference/sparse.html
|
| 664 |
+
"""
|
| 665 |
+
import scipy as sp
|
| 666 |
+
|
| 667 |
+
if len(G) == 0:
|
| 668 |
+
raise nx.NetworkXError("Graph has no nodes or edges")
|
| 669 |
+
|
| 670 |
+
if nodelist is None:
|
| 671 |
+
nodelist = list(G)
|
| 672 |
+
nlen = len(G)
|
| 673 |
+
else:
|
| 674 |
+
nlen = len(nodelist)
|
| 675 |
+
if nlen == 0:
|
| 676 |
+
raise nx.NetworkXError("nodelist has no nodes")
|
| 677 |
+
nodeset = set(G.nbunch_iter(nodelist))
|
| 678 |
+
if nlen != len(nodeset):
|
| 679 |
+
for n in nodelist:
|
| 680 |
+
if n not in G:
|
| 681 |
+
raise nx.NetworkXError(f"Node {n} in nodelist is not in G")
|
| 682 |
+
raise nx.NetworkXError("nodelist contains duplicates.")
|
| 683 |
+
if nlen < len(G):
|
| 684 |
+
G = G.subgraph(nodelist)
|
| 685 |
+
|
| 686 |
+
index = dict(zip(nodelist, range(nlen)))
|
| 687 |
+
coefficients = zip(
|
| 688 |
+
*((index[u], index[v], wt) for u, v, wt in G.edges(data=weight, default=1))
|
| 689 |
+
)
|
| 690 |
+
try:
|
| 691 |
+
row, col, data = coefficients
|
| 692 |
+
except ValueError:
|
| 693 |
+
# there is no edge in the subgraph
|
| 694 |
+
row, col, data = [], [], []
|
| 695 |
+
|
| 696 |
+
if G.is_directed():
|
| 697 |
+
A = sp.sparse.coo_array((data, (row, col)), shape=(nlen, nlen), dtype=dtype)
|
| 698 |
+
else:
|
| 699 |
+
# symmetrize matrix
|
| 700 |
+
d = data + data
|
| 701 |
+
r = row + col
|
| 702 |
+
c = col + row
|
| 703 |
+
# selfloop entries get double counted when symmetrizing
|
| 704 |
+
# so we subtract the data on the diagonal
|
| 705 |
+
selfloops = list(nx.selfloop_edges(G, data=weight, default=1))
|
| 706 |
+
if selfloops:
|
| 707 |
+
diag_index, diag_data = zip(*((index[u], -wt) for u, v, wt in selfloops))
|
| 708 |
+
d += diag_data
|
| 709 |
+
r += diag_index
|
| 710 |
+
c += diag_index
|
| 711 |
+
A = sp.sparse.coo_array((d, (r, c)), shape=(nlen, nlen), dtype=dtype)
|
| 712 |
+
try:
|
| 713 |
+
return A.asformat(format)
|
| 714 |
+
except ValueError as err:
|
| 715 |
+
raise nx.NetworkXError(f"Unknown sparse matrix format: {format}") from err
|
| 716 |
+
|
| 717 |
+
|
| 718 |
+
def _csr_gen_triples(A):
|
| 719 |
+
"""Converts a SciPy sparse array in **Compressed Sparse Row** format to
|
| 720 |
+
an iterable of weighted edge triples.
|
| 721 |
+
|
| 722 |
+
"""
|
| 723 |
+
nrows = A.shape[0]
|
| 724 |
+
indptr, dst_indices, data = A.indptr, A.indices, A.data
|
| 725 |
+
import numpy as np
|
| 726 |
+
|
| 727 |
+
src_indices = np.repeat(np.arange(nrows), np.diff(indptr))
|
| 728 |
+
return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist())
|
| 729 |
+
|
| 730 |
+
|
| 731 |
+
def _csc_gen_triples(A):
|
| 732 |
+
"""Converts a SciPy sparse array in **Compressed Sparse Column** format to
|
| 733 |
+
an iterable of weighted edge triples.
|
| 734 |
+
|
| 735 |
+
"""
|
| 736 |
+
ncols = A.shape[1]
|
| 737 |
+
indptr, src_indices, data = A.indptr, A.indices, A.data
|
| 738 |
+
import numpy as np
|
| 739 |
+
|
| 740 |
+
dst_indices = np.repeat(np.arange(ncols), np.diff(indptr))
|
| 741 |
+
return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist())
|
| 742 |
+
|
| 743 |
+
|
| 744 |
+
def _coo_gen_triples(A):
|
| 745 |
+
"""Converts a SciPy sparse array in **Coordinate** format to an iterable
|
| 746 |
+
of weighted edge triples.
|
| 747 |
+
|
| 748 |
+
"""
|
| 749 |
+
return zip(A.row.tolist(), A.col.tolist(), A.data.tolist())
|
| 750 |
+
|
| 751 |
+
|
| 752 |
+
def _dok_gen_triples(A):
|
| 753 |
+
"""Converts a SciPy sparse array in **Dictionary of Keys** format to an
|
| 754 |
+
iterable of weighted edge triples.
|
| 755 |
+
|
| 756 |
+
"""
|
| 757 |
+
for (r, c), v in A.items():
|
| 758 |
+
# Use `v.item()` to convert a NumPy scalar to the appropriate Python scalar
|
| 759 |
+
yield int(r), int(c), v.item()
|
| 760 |
+
|
| 761 |
+
|
| 762 |
+
def _generate_weighted_edges(A):
|
| 763 |
+
"""Returns an iterable over (u, v, w) triples, where u and v are adjacent
|
| 764 |
+
vertices and w is the weight of the edge joining u and v.
|
| 765 |
+
|
| 766 |
+
`A` is a SciPy sparse array (in any format).
|
| 767 |
+
|
| 768 |
+
"""
|
| 769 |
+
if A.format == "csr":
|
| 770 |
+
return _csr_gen_triples(A)
|
| 771 |
+
if A.format == "csc":
|
| 772 |
+
return _csc_gen_triples(A)
|
| 773 |
+
if A.format == "dok":
|
| 774 |
+
return _dok_gen_triples(A)
|
| 775 |
+
# If A is in any other format (including COO), convert it to COO format.
|
| 776 |
+
return _coo_gen_triples(A.tocoo())
|
| 777 |
+
|
| 778 |
+
|
| 779 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 780 |
+
def from_scipy_sparse_array(
|
| 781 |
+
A, parallel_edges=False, create_using=None, edge_attribute="weight"
|
| 782 |
+
):
|
| 783 |
+
"""Creates a new graph from an adjacency matrix given as a SciPy sparse
|
| 784 |
+
array.
|
| 785 |
+
|
| 786 |
+
Parameters
|
| 787 |
+
----------
|
| 788 |
+
A: scipy.sparse array
|
| 789 |
+
An adjacency matrix representation of a graph
|
| 790 |
+
|
| 791 |
+
parallel_edges : Boolean
|
| 792 |
+
If this is True, `create_using` is a multigraph, and `A` is an
|
| 793 |
+
integer matrix, then entry *(i, j)* in the matrix is interpreted as the
|
| 794 |
+
number of parallel edges joining vertices *i* and *j* in the graph.
|
| 795 |
+
If it is False, then the entries in the matrix are interpreted as
|
| 796 |
+
the weight of a single edge joining the vertices.
|
| 797 |
+
|
| 798 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 799 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 800 |
+
|
| 801 |
+
edge_attribute: string
|
| 802 |
+
Name of edge attribute to store matrix numeric value. The data will
|
| 803 |
+
have the same type as the matrix entry (int, float, (real,imag)).
|
| 804 |
+
|
| 805 |
+
Notes
|
| 806 |
+
-----
|
| 807 |
+
For directed graphs, explicitly mention create_using=nx.DiGraph,
|
| 808 |
+
and entry i,j of A corresponds to an edge from i to j.
|
| 809 |
+
|
| 810 |
+
If `create_using` is :class:`networkx.MultiGraph` or
|
| 811 |
+
:class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the
|
| 812 |
+
entries of `A` are of type :class:`int`, then this function returns a
|
| 813 |
+
multigraph (constructed from `create_using`) with parallel edges.
|
| 814 |
+
In this case, `edge_attribute` will be ignored.
|
| 815 |
+
|
| 816 |
+
If `create_using` indicates an undirected multigraph, then only the edges
|
| 817 |
+
indicated by the upper triangle of the matrix `A` will be added to the
|
| 818 |
+
graph.
|
| 819 |
+
|
| 820 |
+
Examples
|
| 821 |
+
--------
|
| 822 |
+
>>> import scipy as sp
|
| 823 |
+
>>> A = sp.sparse.eye(2, 2, 1)
|
| 824 |
+
>>> G = nx.from_scipy_sparse_array(A)
|
| 825 |
+
|
| 826 |
+
If `create_using` indicates a multigraph and the matrix has only integer
|
| 827 |
+
entries and `parallel_edges` is False, then the entries will be treated
|
| 828 |
+
as weights for edges joining the nodes (without creating parallel edges):
|
| 829 |
+
|
| 830 |
+
>>> A = sp.sparse.csr_array([[1, 1], [1, 2]])
|
| 831 |
+
>>> G = nx.from_scipy_sparse_array(A, create_using=nx.MultiGraph)
|
| 832 |
+
>>> G[1][1]
|
| 833 |
+
AtlasView({0: {'weight': 2}})
|
| 834 |
+
|
| 835 |
+
If `create_using` indicates a multigraph and the matrix has only integer
|
| 836 |
+
entries and `parallel_edges` is True, then the entries will be treated
|
| 837 |
+
as the number of parallel edges joining those two vertices:
|
| 838 |
+
|
| 839 |
+
>>> A = sp.sparse.csr_array([[1, 1], [1, 2]])
|
| 840 |
+
>>> G = nx.from_scipy_sparse_array(
|
| 841 |
+
... A, parallel_edges=True, create_using=nx.MultiGraph
|
| 842 |
+
... )
|
| 843 |
+
>>> G[1][1]
|
| 844 |
+
AtlasView({0: {'weight': 1}, 1: {'weight': 1}})
|
| 845 |
+
|
| 846 |
+
"""
|
| 847 |
+
G = nx.empty_graph(0, create_using)
|
| 848 |
+
n, m = A.shape
|
| 849 |
+
if n != m:
|
| 850 |
+
raise nx.NetworkXError(f"Adjacency matrix not square: nx,ny={A.shape}")
|
| 851 |
+
# Make sure we get even the isolated nodes of the graph.
|
| 852 |
+
G.add_nodes_from(range(n))
|
| 853 |
+
# Create an iterable over (u, v, w) triples and for each triple, add an
|
| 854 |
+
# edge from u to v with weight w.
|
| 855 |
+
triples = _generate_weighted_edges(A)
|
| 856 |
+
# If the entries in the adjacency matrix are integers, the graph is a
|
| 857 |
+
# multigraph, and parallel_edges is True, then create parallel edges, each
|
| 858 |
+
# with weight 1, for each entry in the adjacency matrix. Otherwise, create
|
| 859 |
+
# one edge for each positive entry in the adjacency matrix and set the
|
| 860 |
+
# weight of that edge to be the entry in the matrix.
|
| 861 |
+
if A.dtype.kind in ("i", "u") and G.is_multigraph() and parallel_edges:
|
| 862 |
+
chain = itertools.chain.from_iterable
|
| 863 |
+
# The following line is equivalent to:
|
| 864 |
+
#
|
| 865 |
+
# for (u, v) in edges:
|
| 866 |
+
# for d in range(A[u, v]):
|
| 867 |
+
# G.add_edge(u, v, weight=1)
|
| 868 |
+
#
|
| 869 |
+
triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples)
|
| 870 |
+
# If we are creating an undirected multigraph, only add the edges from the
|
| 871 |
+
# upper triangle of the matrix. Otherwise, add all the edges. This relies
|
| 872 |
+
# on the fact that the vertices created in the
|
| 873 |
+
# `_generated_weighted_edges()` function are actually the row/column
|
| 874 |
+
# indices for the matrix `A`.
|
| 875 |
+
#
|
| 876 |
+
# Without this check, we run into a problem where each edge is added twice
|
| 877 |
+
# when `G.add_weighted_edges_from()` is invoked below.
|
| 878 |
+
if G.is_multigraph() and not G.is_directed():
|
| 879 |
+
triples = ((u, v, d) for u, v, d in triples if u <= v)
|
| 880 |
+
G.add_weighted_edges_from(triples, weight=edge_attribute)
|
| 881 |
+
return G
|
| 882 |
+
|
| 883 |
+
|
| 884 |
+
@nx._dispatchable(edge_attrs="weight") # edge attrs may also be obtained from `dtype`
|
| 885 |
+
def to_numpy_array(
|
| 886 |
+
G,
|
| 887 |
+
nodelist=None,
|
| 888 |
+
dtype=None,
|
| 889 |
+
order=None,
|
| 890 |
+
multigraph_weight=sum,
|
| 891 |
+
weight="weight",
|
| 892 |
+
nonedge=0.0,
|
| 893 |
+
):
|
| 894 |
+
"""Returns the graph adjacency matrix as a NumPy array.
|
| 895 |
+
|
| 896 |
+
Parameters
|
| 897 |
+
----------
|
| 898 |
+
G : graph
|
| 899 |
+
The NetworkX graph used to construct the NumPy array.
|
| 900 |
+
|
| 901 |
+
nodelist : list, optional
|
| 902 |
+
The rows and columns are ordered according to the nodes in `nodelist`.
|
| 903 |
+
If `nodelist` is ``None``, then the ordering is produced by ``G.nodes()``.
|
| 904 |
+
|
| 905 |
+
dtype : NumPy data type, optional
|
| 906 |
+
A NumPy data type used to initialize the array. If None, then the NumPy
|
| 907 |
+
default is used. The dtype can be structured if `weight=None`, in which
|
| 908 |
+
case the dtype field names are used to look up edge attributes. The
|
| 909 |
+
result is a structured array where each named field in the dtype
|
| 910 |
+
corresponds to the adjacency for that edge attribute. See examples for
|
| 911 |
+
details.
|
| 912 |
+
|
| 913 |
+
order : {'C', 'F'}, optional
|
| 914 |
+
Whether to store multidimensional data in C- or Fortran-contiguous
|
| 915 |
+
(row- or column-wise) order in memory. If None, then the NumPy default
|
| 916 |
+
is used.
|
| 917 |
+
|
| 918 |
+
multigraph_weight : callable, optional
|
| 919 |
+
An function that determines how weights in multigraphs are handled.
|
| 920 |
+
The function should accept a sequence of weights and return a single
|
| 921 |
+
value. The default is to sum the weights of the multiple edges.
|
| 922 |
+
|
| 923 |
+
weight : string or None optional (default = 'weight')
|
| 924 |
+
The edge attribute that holds the numerical value used for
|
| 925 |
+
the edge weight. If an edge does not have that attribute, then the
|
| 926 |
+
value 1 is used instead. `weight` must be ``None`` if a structured
|
| 927 |
+
dtype is used.
|
| 928 |
+
|
| 929 |
+
nonedge : array_like (default = 0.0)
|
| 930 |
+
The value used to represent non-edges in the adjacency matrix.
|
| 931 |
+
The array values corresponding to nonedges are typically set to zero.
|
| 932 |
+
However, this could be undesirable if there are array values
|
| 933 |
+
corresponding to actual edges that also have the value zero. If so,
|
| 934 |
+
one might prefer nonedges to have some other value, such as ``nan``.
|
| 935 |
+
|
| 936 |
+
Returns
|
| 937 |
+
-------
|
| 938 |
+
A : NumPy ndarray
|
| 939 |
+
Graph adjacency matrix
|
| 940 |
+
|
| 941 |
+
Raises
|
| 942 |
+
------
|
| 943 |
+
NetworkXError
|
| 944 |
+
If `dtype` is a structured dtype and `G` is a multigraph
|
| 945 |
+
ValueError
|
| 946 |
+
If `dtype` is a structured dtype and `weight` is not `None`
|
| 947 |
+
|
| 948 |
+
See Also
|
| 949 |
+
--------
|
| 950 |
+
from_numpy_array
|
| 951 |
+
|
| 952 |
+
Notes
|
| 953 |
+
-----
|
| 954 |
+
For directed graphs, entry ``i, j`` corresponds to an edge from ``i`` to ``j``.
|
| 955 |
+
|
| 956 |
+
Entries in the adjacency matrix are given by the `weight` edge attribute.
|
| 957 |
+
When an edge does not have a weight attribute, the value of the entry is
|
| 958 |
+
set to the number 1. For multiple (parallel) edges, the values of the
|
| 959 |
+
entries are determined by the `multigraph_weight` parameter. The default is
|
| 960 |
+
to sum the weight attributes for each of the parallel edges.
|
| 961 |
+
|
| 962 |
+
When `nodelist` does not contain every node in `G`, the adjacency matrix is
|
| 963 |
+
built from the subgraph of `G` that is induced by the nodes in `nodelist`.
|
| 964 |
+
|
| 965 |
+
The convention used for self-loop edges in graphs is to assign the
|
| 966 |
+
diagonal array entry value to the weight attribute of the edge
|
| 967 |
+
(or the number 1 if the edge has no weight attribute). If the
|
| 968 |
+
alternate convention of doubling the edge weight is desired the
|
| 969 |
+
resulting NumPy array can be modified as follows:
|
| 970 |
+
|
| 971 |
+
>>> import numpy as np
|
| 972 |
+
>>> G = nx.Graph([(1, 1)])
|
| 973 |
+
>>> A = nx.to_numpy_array(G)
|
| 974 |
+
>>> A
|
| 975 |
+
array([[1.]])
|
| 976 |
+
>>> A[np.diag_indices_from(A)] *= 2
|
| 977 |
+
>>> A
|
| 978 |
+
array([[2.]])
|
| 979 |
+
|
| 980 |
+
Examples
|
| 981 |
+
--------
|
| 982 |
+
>>> G = nx.MultiDiGraph()
|
| 983 |
+
>>> G.add_edge(0, 1, weight=2)
|
| 984 |
+
0
|
| 985 |
+
>>> G.add_edge(1, 0)
|
| 986 |
+
0
|
| 987 |
+
>>> G.add_edge(2, 2, weight=3)
|
| 988 |
+
0
|
| 989 |
+
>>> G.add_edge(2, 2)
|
| 990 |
+
1
|
| 991 |
+
>>> nx.to_numpy_array(G, nodelist=[0, 1, 2])
|
| 992 |
+
array([[0., 2., 0.],
|
| 993 |
+
[1., 0., 0.],
|
| 994 |
+
[0., 0., 4.]])
|
| 995 |
+
|
| 996 |
+
When `nodelist` argument is used, nodes of `G` which do not appear in the `nodelist`
|
| 997 |
+
and their edges are not included in the adjacency matrix. Here is an example:
|
| 998 |
+
|
| 999 |
+
>>> G = nx.Graph()
|
| 1000 |
+
>>> G.add_edge(3, 1)
|
| 1001 |
+
>>> G.add_edge(2, 0)
|
| 1002 |
+
>>> G.add_edge(2, 1)
|
| 1003 |
+
>>> G.add_edge(3, 0)
|
| 1004 |
+
>>> nx.to_numpy_array(G, nodelist=[1, 2, 3])
|
| 1005 |
+
array([[0., 1., 1.],
|
| 1006 |
+
[1., 0., 0.],
|
| 1007 |
+
[1., 0., 0.]])
|
| 1008 |
+
|
| 1009 |
+
This function can also be used to create adjacency matrices for multiple
|
| 1010 |
+
edge attributes with structured dtypes:
|
| 1011 |
+
|
| 1012 |
+
>>> G = nx.Graph()
|
| 1013 |
+
>>> G.add_edge(0, 1, weight=10)
|
| 1014 |
+
>>> G.add_edge(1, 2, cost=5)
|
| 1015 |
+
>>> G.add_edge(2, 3, weight=3, cost=-4.0)
|
| 1016 |
+
>>> dtype = np.dtype([("weight", int), ("cost", float)])
|
| 1017 |
+
>>> A = nx.to_numpy_array(G, dtype=dtype, weight=None)
|
| 1018 |
+
>>> A["weight"]
|
| 1019 |
+
array([[ 0, 10, 0, 0],
|
| 1020 |
+
[10, 0, 1, 0],
|
| 1021 |
+
[ 0, 1, 0, 3],
|
| 1022 |
+
[ 0, 0, 3, 0]])
|
| 1023 |
+
>>> A["cost"]
|
| 1024 |
+
array([[ 0., 1., 0., 0.],
|
| 1025 |
+
[ 1., 0., 5., 0.],
|
| 1026 |
+
[ 0., 5., 0., -4.],
|
| 1027 |
+
[ 0., 0., -4., 0.]])
|
| 1028 |
+
|
| 1029 |
+
As stated above, the argument "nonedge" is useful especially when there are
|
| 1030 |
+
actually edges with weight 0 in the graph. Setting a nonedge value different than 0,
|
| 1031 |
+
makes it much clearer to differentiate such 0-weighted edges and actual nonedge values.
|
| 1032 |
+
|
| 1033 |
+
>>> G = nx.Graph()
|
| 1034 |
+
>>> G.add_edge(3, 1, weight=2)
|
| 1035 |
+
>>> G.add_edge(2, 0, weight=0)
|
| 1036 |
+
>>> G.add_edge(2, 1, weight=0)
|
| 1037 |
+
>>> G.add_edge(3, 0, weight=1)
|
| 1038 |
+
>>> nx.to_numpy_array(G, nonedge=-1.0)
|
| 1039 |
+
array([[-1., 2., -1., 1.],
|
| 1040 |
+
[ 2., -1., 0., -1.],
|
| 1041 |
+
[-1., 0., -1., 0.],
|
| 1042 |
+
[ 1., -1., 0., -1.]])
|
| 1043 |
+
"""
|
| 1044 |
+
import numpy as np
|
| 1045 |
+
|
| 1046 |
+
if nodelist is None:
|
| 1047 |
+
nodelist = list(G)
|
| 1048 |
+
nlen = len(nodelist)
|
| 1049 |
+
|
| 1050 |
+
# Input validation
|
| 1051 |
+
nodeset = set(nodelist)
|
| 1052 |
+
if nodeset - set(G):
|
| 1053 |
+
raise nx.NetworkXError(f"Nodes {nodeset - set(G)} in nodelist is not in G")
|
| 1054 |
+
if len(nodeset) < nlen:
|
| 1055 |
+
raise nx.NetworkXError("nodelist contains duplicates.")
|
| 1056 |
+
|
| 1057 |
+
A = np.full((nlen, nlen), fill_value=nonedge, dtype=dtype, order=order)
|
| 1058 |
+
|
| 1059 |
+
# Corner cases: empty nodelist or graph without any edges
|
| 1060 |
+
if nlen == 0 or G.number_of_edges() == 0:
|
| 1061 |
+
return A
|
| 1062 |
+
|
| 1063 |
+
# If dtype is structured and weight is None, use dtype field names as
|
| 1064 |
+
# edge attributes
|
| 1065 |
+
edge_attrs = None # Only single edge attribute by default
|
| 1066 |
+
if A.dtype.names:
|
| 1067 |
+
if weight is None:
|
| 1068 |
+
edge_attrs = dtype.names
|
| 1069 |
+
else:
|
| 1070 |
+
raise ValueError(
|
| 1071 |
+
"Specifying `weight` not supported for structured dtypes\n."
|
| 1072 |
+
"To create adjacency matrices from structured dtypes, use `weight=None`."
|
| 1073 |
+
)
|
| 1074 |
+
|
| 1075 |
+
# Map nodes to row/col in matrix
|
| 1076 |
+
idx = dict(zip(nodelist, range(nlen)))
|
| 1077 |
+
if len(nodelist) < len(G):
|
| 1078 |
+
G = G.subgraph(nodelist).copy()
|
| 1079 |
+
|
| 1080 |
+
# Collect all edge weights and reduce with `multigraph_weights`
|
| 1081 |
+
if G.is_multigraph():
|
| 1082 |
+
if edge_attrs:
|
| 1083 |
+
raise nx.NetworkXError(
|
| 1084 |
+
"Structured arrays are not supported for MultiGraphs"
|
| 1085 |
+
)
|
| 1086 |
+
d = defaultdict(list)
|
| 1087 |
+
for u, v, wt in G.edges(data=weight, default=1.0):
|
| 1088 |
+
d[(idx[u], idx[v])].append(wt)
|
| 1089 |
+
i, j = np.array(list(d.keys())).T # indices
|
| 1090 |
+
wts = [multigraph_weight(ws) for ws in d.values()] # reduced weights
|
| 1091 |
+
else:
|
| 1092 |
+
i, j, wts = [], [], []
|
| 1093 |
+
|
| 1094 |
+
# Special branch: multi-attr adjacency from structured dtypes
|
| 1095 |
+
if edge_attrs:
|
| 1096 |
+
# Extract edges with all data
|
| 1097 |
+
for u, v, data in G.edges(data=True):
|
| 1098 |
+
i.append(idx[u])
|
| 1099 |
+
j.append(idx[v])
|
| 1100 |
+
wts.append(data)
|
| 1101 |
+
# Map each attribute to the appropriate named field in the
|
| 1102 |
+
# structured dtype
|
| 1103 |
+
for attr in edge_attrs:
|
| 1104 |
+
attr_data = [wt.get(attr, 1.0) for wt in wts]
|
| 1105 |
+
A[attr][i, j] = attr_data
|
| 1106 |
+
if not G.is_directed():
|
| 1107 |
+
A[attr][j, i] = attr_data
|
| 1108 |
+
return A
|
| 1109 |
+
|
| 1110 |
+
for u, v, wt in G.edges(data=weight, default=1.0):
|
| 1111 |
+
i.append(idx[u])
|
| 1112 |
+
j.append(idx[v])
|
| 1113 |
+
wts.append(wt)
|
| 1114 |
+
|
| 1115 |
+
# Set array values with advanced indexing
|
| 1116 |
+
A[i, j] = wts
|
| 1117 |
+
if not G.is_directed():
|
| 1118 |
+
A[j, i] = wts
|
| 1119 |
+
|
| 1120 |
+
return A
|
| 1121 |
+
|
| 1122 |
+
|
| 1123 |
+
@nx._dispatchable(graphs=None, returns_graph=True)
|
| 1124 |
+
def from_numpy_array(
|
| 1125 |
+
A, parallel_edges=False, create_using=None, edge_attr="weight", *, nodelist=None
|
| 1126 |
+
):
|
| 1127 |
+
"""Returns a graph from a 2D NumPy array.
|
| 1128 |
+
|
| 1129 |
+
The 2D NumPy array is interpreted as an adjacency matrix for the graph.
|
| 1130 |
+
|
| 1131 |
+
Parameters
|
| 1132 |
+
----------
|
| 1133 |
+
A : a 2D numpy.ndarray
|
| 1134 |
+
An adjacency matrix representation of a graph
|
| 1135 |
+
|
| 1136 |
+
parallel_edges : Boolean
|
| 1137 |
+
If this is True, `create_using` is a multigraph, and `A` is an
|
| 1138 |
+
integer array, then entry *(i, j)* in the array is interpreted as the
|
| 1139 |
+
number of parallel edges joining vertices *i* and *j* in the graph.
|
| 1140 |
+
If it is False, then the entries in the array are interpreted as
|
| 1141 |
+
the weight of a single edge joining the vertices.
|
| 1142 |
+
|
| 1143 |
+
create_using : NetworkX graph constructor, optional (default=nx.Graph)
|
| 1144 |
+
Graph type to create. If graph instance, then cleared before populated.
|
| 1145 |
+
|
| 1146 |
+
edge_attr : String, optional (default="weight")
|
| 1147 |
+
The attribute to which the array values are assigned on each edge. If
|
| 1148 |
+
it is None, edge attributes will not be assigned.
|
| 1149 |
+
|
| 1150 |
+
nodelist : sequence of nodes, optional
|
| 1151 |
+
A sequence of objects to use as the nodes in the graph. If provided, the
|
| 1152 |
+
list of nodes must be the same length as the dimensions of `A`. The
|
| 1153 |
+
default is `None`, in which case the nodes are drawn from ``range(n)``.
|
| 1154 |
+
|
| 1155 |
+
Notes
|
| 1156 |
+
-----
|
| 1157 |
+
For directed graphs, explicitly mention create_using=nx.DiGraph,
|
| 1158 |
+
and entry i,j of A corresponds to an edge from i to j.
|
| 1159 |
+
|
| 1160 |
+
If `create_using` is :class:`networkx.MultiGraph` or
|
| 1161 |
+
:class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the
|
| 1162 |
+
entries of `A` are of type :class:`int`, then this function returns a
|
| 1163 |
+
multigraph (of the same type as `create_using`) with parallel edges.
|
| 1164 |
+
|
| 1165 |
+
If `create_using` indicates an undirected multigraph, then only the edges
|
| 1166 |
+
indicated by the upper triangle of the array `A` will be added to the
|
| 1167 |
+
graph.
|
| 1168 |
+
|
| 1169 |
+
If `edge_attr` is Falsy (False or None), edge attributes will not be
|
| 1170 |
+
assigned, and the array data will be treated like a binary mask of
|
| 1171 |
+
edge presence or absence. Otherwise, the attributes will be assigned
|
| 1172 |
+
as follows:
|
| 1173 |
+
|
| 1174 |
+
If the NumPy array has a single data type for each array entry it
|
| 1175 |
+
will be converted to an appropriate Python data type.
|
| 1176 |
+
|
| 1177 |
+
If the NumPy array has a user-specified compound data type the names
|
| 1178 |
+
of the data fields will be used as attribute keys in the resulting
|
| 1179 |
+
NetworkX graph.
|
| 1180 |
+
|
| 1181 |
+
See Also
|
| 1182 |
+
--------
|
| 1183 |
+
to_numpy_array
|
| 1184 |
+
|
| 1185 |
+
Examples
|
| 1186 |
+
--------
|
| 1187 |
+
Simple integer weights on edges:
|
| 1188 |
+
|
| 1189 |
+
>>> import numpy as np
|
| 1190 |
+
>>> A = np.array([[1, 1], [2, 1]])
|
| 1191 |
+
>>> G = nx.from_numpy_array(A)
|
| 1192 |
+
>>> G.edges(data=True)
|
| 1193 |
+
EdgeDataView([(0, 0, {'weight': 1}), (0, 1, {'weight': 2}), (1, 1, {'weight': 1})])
|
| 1194 |
+
|
| 1195 |
+
If `create_using` indicates a multigraph and the array has only integer
|
| 1196 |
+
entries and `parallel_edges` is False, then the entries will be treated
|
| 1197 |
+
as weights for edges joining the nodes (without creating parallel edges):
|
| 1198 |
+
|
| 1199 |
+
>>> A = np.array([[1, 1], [1, 2]])
|
| 1200 |
+
>>> G = nx.from_numpy_array(A, create_using=nx.MultiGraph)
|
| 1201 |
+
>>> G[1][1]
|
| 1202 |
+
AtlasView({0: {'weight': 2}})
|
| 1203 |
+
|
| 1204 |
+
If `create_using` indicates a multigraph and the array has only integer
|
| 1205 |
+
entries and `parallel_edges` is True, then the entries will be treated
|
| 1206 |
+
as the number of parallel edges joining those two vertices:
|
| 1207 |
+
|
| 1208 |
+
>>> A = np.array([[1, 1], [1, 2]])
|
| 1209 |
+
>>> temp = nx.MultiGraph()
|
| 1210 |
+
>>> G = nx.from_numpy_array(A, parallel_edges=True, create_using=temp)
|
| 1211 |
+
>>> G[1][1]
|
| 1212 |
+
AtlasView({0: {'weight': 1}, 1: {'weight': 1}})
|
| 1213 |
+
|
| 1214 |
+
User defined compound data type on edges:
|
| 1215 |
+
|
| 1216 |
+
>>> dt = [("weight", float), ("cost", int)]
|
| 1217 |
+
>>> A = np.array([[(1.0, 2)]], dtype=dt)
|
| 1218 |
+
>>> G = nx.from_numpy_array(A)
|
| 1219 |
+
>>> G.edges()
|
| 1220 |
+
EdgeView([(0, 0)])
|
| 1221 |
+
>>> G[0][0]["cost"]
|
| 1222 |
+
2
|
| 1223 |
+
>>> G[0][0]["weight"]
|
| 1224 |
+
1.0
|
| 1225 |
+
|
| 1226 |
+
"""
|
| 1227 |
+
kind_to_python_type = {
|
| 1228 |
+
"f": float,
|
| 1229 |
+
"i": int,
|
| 1230 |
+
"u": int,
|
| 1231 |
+
"b": bool,
|
| 1232 |
+
"c": complex,
|
| 1233 |
+
"S": str,
|
| 1234 |
+
"U": str,
|
| 1235 |
+
"V": "void",
|
| 1236 |
+
}
|
| 1237 |
+
G = nx.empty_graph(0, create_using)
|
| 1238 |
+
if A.ndim != 2:
|
| 1239 |
+
raise nx.NetworkXError(f"Input array must be 2D, not {A.ndim}")
|
| 1240 |
+
n, m = A.shape
|
| 1241 |
+
if n != m:
|
| 1242 |
+
raise nx.NetworkXError(f"Adjacency matrix not square: nx,ny={A.shape}")
|
| 1243 |
+
dt = A.dtype
|
| 1244 |
+
try:
|
| 1245 |
+
python_type = kind_to_python_type[dt.kind]
|
| 1246 |
+
except Exception as err:
|
| 1247 |
+
raise TypeError(f"Unknown numpy data type: {dt}") from err
|
| 1248 |
+
if _default_nodes := (nodelist is None):
|
| 1249 |
+
nodelist = range(n)
|
| 1250 |
+
else:
|
| 1251 |
+
if len(nodelist) != n:
|
| 1252 |
+
raise ValueError("nodelist must have the same length as A.shape[0]")
|
| 1253 |
+
|
| 1254 |
+
# Make sure we get even the isolated nodes of the graph.
|
| 1255 |
+
G.add_nodes_from(nodelist)
|
| 1256 |
+
# Get a list of all the entries in the array with nonzero entries. These
|
| 1257 |
+
# coordinates become edges in the graph. (convert to int from np.int64)
|
| 1258 |
+
edges = ((int(e[0]), int(e[1])) for e in zip(*A.nonzero()))
|
| 1259 |
+
# handle numpy constructed data type
|
| 1260 |
+
if python_type == "void":
|
| 1261 |
+
# Sort the fields by their offset, then by dtype, then by name.
|
| 1262 |
+
fields = sorted(
|
| 1263 |
+
(offset, dtype, name) for name, (dtype, offset) in A.dtype.fields.items()
|
| 1264 |
+
)
|
| 1265 |
+
triples = (
|
| 1266 |
+
(
|
| 1267 |
+
u,
|
| 1268 |
+
v,
|
| 1269 |
+
{}
|
| 1270 |
+
if edge_attr in [False, None]
|
| 1271 |
+
else {
|
| 1272 |
+
name: kind_to_python_type[dtype.kind](val)
|
| 1273 |
+
for (_, dtype, name), val in zip(fields, A[u, v])
|
| 1274 |
+
},
|
| 1275 |
+
)
|
| 1276 |
+
for u, v in edges
|
| 1277 |
+
)
|
| 1278 |
+
# If the entries in the adjacency matrix are integers, the graph is a
|
| 1279 |
+
# multigraph, and parallel_edges is True, then create parallel edges, each
|
| 1280 |
+
# with weight 1, for each entry in the adjacency matrix. Otherwise, create
|
| 1281 |
+
# one edge for each positive entry in the adjacency matrix and set the
|
| 1282 |
+
# weight of that edge to be the entry in the matrix.
|
| 1283 |
+
elif python_type is int and G.is_multigraph() and parallel_edges:
|
| 1284 |
+
chain = itertools.chain.from_iterable
|
| 1285 |
+
# The following line is equivalent to:
|
| 1286 |
+
#
|
| 1287 |
+
# for (u, v) in edges:
|
| 1288 |
+
# for d in range(A[u, v]):
|
| 1289 |
+
# G.add_edge(u, v, weight=1)
|
| 1290 |
+
#
|
| 1291 |
+
if edge_attr in [False, None]:
|
| 1292 |
+
triples = chain(((u, v, {}) for d in range(A[u, v])) for (u, v) in edges)
|
| 1293 |
+
else:
|
| 1294 |
+
triples = chain(
|
| 1295 |
+
((u, v, {edge_attr: 1}) for d in range(A[u, v])) for (u, v) in edges
|
| 1296 |
+
)
|
| 1297 |
+
else: # basic data type
|
| 1298 |
+
if edge_attr in [False, None]:
|
| 1299 |
+
triples = ((u, v, {}) for u, v in edges)
|
| 1300 |
+
else:
|
| 1301 |
+
triples = ((u, v, {edge_attr: python_type(A[u, v])}) for u, v in edges)
|
| 1302 |
+
# If we are creating an undirected multigraph, only add the edges from the
|
| 1303 |
+
# upper triangle of the matrix. Otherwise, add all the edges. This relies
|
| 1304 |
+
# on the fact that the vertices created in the
|
| 1305 |
+
# `_generated_weighted_edges()` function are actually the row/column
|
| 1306 |
+
# indices for the matrix `A`.
|
| 1307 |
+
#
|
| 1308 |
+
# Without this check, we run into a problem where each edge is added twice
|
| 1309 |
+
# when `G.add_edges_from()` is invoked below.
|
| 1310 |
+
if G.is_multigraph() and not G.is_directed():
|
| 1311 |
+
triples = ((u, v, d) for u, v, d in triples if u <= v)
|
| 1312 |
+
# Remap nodes if user provided custom `nodelist`
|
| 1313 |
+
if not _default_nodes:
|
| 1314 |
+
idx_to_node = dict(enumerate(nodelist))
|
| 1315 |
+
triples = ((idx_to_node[u], idx_to_node[v], d) for u, v, d in triples)
|
| 1316 |
+
G.add_edges_from(triples)
|
| 1317 |
+
return G
|
llava_next/lib/python3.10/site-packages/networkx/exception.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
**********
|
| 3 |
+
Exceptions
|
| 4 |
+
**********
|
| 5 |
+
|
| 6 |
+
Base exceptions and errors for NetworkX.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
__all__ = [
|
| 10 |
+
"HasACycle",
|
| 11 |
+
"NodeNotFound",
|
| 12 |
+
"PowerIterationFailedConvergence",
|
| 13 |
+
"ExceededMaxIterations",
|
| 14 |
+
"AmbiguousSolution",
|
| 15 |
+
"NetworkXAlgorithmError",
|
| 16 |
+
"NetworkXException",
|
| 17 |
+
"NetworkXError",
|
| 18 |
+
"NetworkXNoCycle",
|
| 19 |
+
"NetworkXNoPath",
|
| 20 |
+
"NetworkXNotImplemented",
|
| 21 |
+
"NetworkXPointlessConcept",
|
| 22 |
+
"NetworkXUnbounded",
|
| 23 |
+
"NetworkXUnfeasible",
|
| 24 |
+
]
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class NetworkXException(Exception):
|
| 28 |
+
"""Base class for exceptions in NetworkX."""
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class NetworkXError(NetworkXException):
|
| 32 |
+
"""Exception for a serious error in NetworkX"""
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class NetworkXPointlessConcept(NetworkXException):
|
| 36 |
+
"""Raised when a null graph is provided as input to an algorithm
|
| 37 |
+
that cannot use it.
|
| 38 |
+
|
| 39 |
+
The null graph is sometimes considered a pointless concept [1]_,
|
| 40 |
+
thus the name of the exception.
|
| 41 |
+
|
| 42 |
+
Notes
|
| 43 |
+
-----
|
| 44 |
+
Null graphs and empty graphs are often used interchangeably but they
|
| 45 |
+
are well defined in NetworkX. An ``empty_graph`` is a graph with ``n`` nodes
|
| 46 |
+
and 0 edges, and a ``null_graph`` is a graph with 0 nodes and 0 edges.
|
| 47 |
+
|
| 48 |
+
References
|
| 49 |
+
----------
|
| 50 |
+
.. [1] Harary, F. and Read, R. "Is the Null Graph a Pointless
|
| 51 |
+
Concept?" In Graphs and Combinatorics Conference, George
|
| 52 |
+
Washington University. New York: Springer-Verlag, 1973.
|
| 53 |
+
|
| 54 |
+
"""
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class NetworkXAlgorithmError(NetworkXException):
|
| 58 |
+
"""Exception for unexpected termination of algorithms."""
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class NetworkXUnfeasible(NetworkXAlgorithmError):
|
| 62 |
+
"""Exception raised by algorithms trying to solve a problem
|
| 63 |
+
instance that has no feasible solution."""
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class NetworkXNoPath(NetworkXUnfeasible):
|
| 67 |
+
"""Exception for algorithms that should return a path when running
|
| 68 |
+
on graphs where such a path does not exist."""
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class NetworkXNoCycle(NetworkXUnfeasible):
|
| 72 |
+
"""Exception for algorithms that should return a cycle when running
|
| 73 |
+
on graphs where such a cycle does not exist."""
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class HasACycle(NetworkXException):
|
| 77 |
+
"""Raised if a graph has a cycle when an algorithm expects that it
|
| 78 |
+
will have no cycles.
|
| 79 |
+
|
| 80 |
+
"""
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class NetworkXUnbounded(NetworkXAlgorithmError):
|
| 84 |
+
"""Exception raised by algorithms trying to solve a maximization
|
| 85 |
+
or a minimization problem instance that is unbounded."""
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class NetworkXNotImplemented(NetworkXException):
|
| 89 |
+
"""Exception raised by algorithms not implemented for a type of graph."""
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class NodeNotFound(NetworkXException):
|
| 93 |
+
"""Exception raised if requested node is not present in the graph"""
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
class AmbiguousSolution(NetworkXException):
|
| 97 |
+
"""Raised if more than one valid solution exists for an intermediary step
|
| 98 |
+
of an algorithm.
|
| 99 |
+
|
| 100 |
+
In the face of ambiguity, refuse the temptation to guess.
|
| 101 |
+
This may occur, for example, when trying to determine the
|
| 102 |
+
bipartite node sets in a disconnected bipartite graph when
|
| 103 |
+
computing bipartite matchings.
|
| 104 |
+
|
| 105 |
+
"""
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
class ExceededMaxIterations(NetworkXException):
|
| 109 |
+
"""Raised if a loop iterates too many times without breaking.
|
| 110 |
+
|
| 111 |
+
This may occur, for example, in an algorithm that computes
|
| 112 |
+
progressively better approximations to a value but exceeds an
|
| 113 |
+
iteration bound specified by the user.
|
| 114 |
+
|
| 115 |
+
"""
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
class PowerIterationFailedConvergence(ExceededMaxIterations):
|
| 119 |
+
"""Raised when the power iteration method fails to converge within a
|
| 120 |
+
specified iteration limit.
|
| 121 |
+
|
| 122 |
+
`num_iterations` is the number of iterations that have been
|
| 123 |
+
completed when this exception was raised.
|
| 124 |
+
|
| 125 |
+
"""
|
| 126 |
+
|
| 127 |
+
def __init__(self, num_iterations, *args, **kw):
|
| 128 |
+
msg = f"power iteration failed to converge within {num_iterations} iterations"
|
| 129 |
+
exception_message = msg
|
| 130 |
+
superinit = super().__init__
|
| 131 |
+
superinit(self, exception_message, *args, **kw)
|
llava_next/lib/python3.10/site-packages/networkx/lazy_imports.py
ADDED
|
@@ -0,0 +1,188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib
|
| 2 |
+
import importlib.util
|
| 3 |
+
import inspect
|
| 4 |
+
import os
|
| 5 |
+
import sys
|
| 6 |
+
import types
|
| 7 |
+
|
| 8 |
+
__all__ = ["attach", "_lazy_import"]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def attach(module_name, submodules=None, submod_attrs=None):
|
| 12 |
+
"""Attach lazily loaded submodules, and functions or other attributes.
|
| 13 |
+
|
| 14 |
+
Typically, modules import submodules and attributes as follows::
|
| 15 |
+
|
| 16 |
+
import mysubmodule
|
| 17 |
+
import anothersubmodule
|
| 18 |
+
|
| 19 |
+
from .foo import someattr
|
| 20 |
+
|
| 21 |
+
The idea of this function is to replace the `__init__.py`
|
| 22 |
+
module's `__getattr__`, `__dir__`, and `__all__` attributes such that
|
| 23 |
+
all imports work exactly the way they normally would, except that the
|
| 24 |
+
actual import is delayed until the resulting module object is first used.
|
| 25 |
+
|
| 26 |
+
The typical way to call this function, replacing the above imports, is::
|
| 27 |
+
|
| 28 |
+
__getattr__, __lazy_dir__, __all__ = lazy.attach(
|
| 29 |
+
__name__, ["mysubmodule", "anothersubmodule"], {"foo": "someattr"}
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
This functionality requires Python 3.7 or higher.
|
| 33 |
+
|
| 34 |
+
Parameters
|
| 35 |
+
----------
|
| 36 |
+
module_name : str
|
| 37 |
+
Typically use __name__.
|
| 38 |
+
submodules : set
|
| 39 |
+
List of submodules to lazily import.
|
| 40 |
+
submod_attrs : dict
|
| 41 |
+
Dictionary of submodule -> list of attributes / functions.
|
| 42 |
+
These attributes are imported as they are used.
|
| 43 |
+
|
| 44 |
+
Returns
|
| 45 |
+
-------
|
| 46 |
+
__getattr__, __dir__, __all__
|
| 47 |
+
|
| 48 |
+
"""
|
| 49 |
+
if submod_attrs is None:
|
| 50 |
+
submod_attrs = {}
|
| 51 |
+
|
| 52 |
+
if submodules is None:
|
| 53 |
+
submodules = set()
|
| 54 |
+
else:
|
| 55 |
+
submodules = set(submodules)
|
| 56 |
+
|
| 57 |
+
attr_to_modules = {
|
| 58 |
+
attr: mod for mod, attrs in submod_attrs.items() for attr in attrs
|
| 59 |
+
}
|
| 60 |
+
|
| 61 |
+
__all__ = list(submodules | attr_to_modules.keys())
|
| 62 |
+
|
| 63 |
+
def __getattr__(name):
|
| 64 |
+
if name in submodules:
|
| 65 |
+
return importlib.import_module(f"{module_name}.{name}")
|
| 66 |
+
elif name in attr_to_modules:
|
| 67 |
+
submod = importlib.import_module(f"{module_name}.{attr_to_modules[name]}")
|
| 68 |
+
return getattr(submod, name)
|
| 69 |
+
else:
|
| 70 |
+
raise AttributeError(f"No {module_name} attribute {name}")
|
| 71 |
+
|
| 72 |
+
def __dir__():
|
| 73 |
+
return __all__
|
| 74 |
+
|
| 75 |
+
if os.environ.get("EAGER_IMPORT", ""):
|
| 76 |
+
for attr in set(attr_to_modules.keys()) | submodules:
|
| 77 |
+
__getattr__(attr)
|
| 78 |
+
|
| 79 |
+
return __getattr__, __dir__, list(__all__)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class DelayedImportErrorModule(types.ModuleType):
|
| 83 |
+
def __init__(self, frame_data, *args, **kwargs):
|
| 84 |
+
self.__frame_data = frame_data
|
| 85 |
+
super().__init__(*args, **kwargs)
|
| 86 |
+
|
| 87 |
+
def __getattr__(self, x):
|
| 88 |
+
if x in ("__class__", "__file__", "__frame_data"):
|
| 89 |
+
super().__getattr__(x)
|
| 90 |
+
else:
|
| 91 |
+
fd = self.__frame_data
|
| 92 |
+
raise ModuleNotFoundError(
|
| 93 |
+
f"No module named '{fd['spec']}'\n\n"
|
| 94 |
+
"This error is lazily reported, having originally occurred in\n"
|
| 95 |
+
f' File {fd["filename"]}, line {fd["lineno"]}, in {fd["function"]}\n\n'
|
| 96 |
+
f'----> {"".join(fd["code_context"] or "").strip()}'
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def _lazy_import(fullname):
|
| 101 |
+
"""Return a lazily imported proxy for a module or library.
|
| 102 |
+
|
| 103 |
+
Warning
|
| 104 |
+
-------
|
| 105 |
+
Importing using this function can currently cause trouble
|
| 106 |
+
when the user tries to import from a subpackage of a module before
|
| 107 |
+
the package is fully imported. In particular, this idiom may not work:
|
| 108 |
+
|
| 109 |
+
np = lazy_import("numpy")
|
| 110 |
+
from numpy.lib import recfunctions
|
| 111 |
+
|
| 112 |
+
This is due to a difference in the way Python's LazyLoader handles
|
| 113 |
+
subpackage imports compared to the normal import process. Hopefully
|
| 114 |
+
we will get Python's LazyLoader to fix this, or find a workaround.
|
| 115 |
+
In the meantime, this is a potential problem.
|
| 116 |
+
|
| 117 |
+
The workaround is to import numpy before importing from the subpackage.
|
| 118 |
+
|
| 119 |
+
Notes
|
| 120 |
+
-----
|
| 121 |
+
We often see the following pattern::
|
| 122 |
+
|
| 123 |
+
def myfunc():
|
| 124 |
+
import scipy as sp
|
| 125 |
+
sp.argmin(...)
|
| 126 |
+
....
|
| 127 |
+
|
| 128 |
+
This is to prevent a library, in this case `scipy`, from being
|
| 129 |
+
imported at function definition time, since that can be slow.
|
| 130 |
+
|
| 131 |
+
This function provides a proxy module that, upon access, imports
|
| 132 |
+
the actual module. So the idiom equivalent to the above example is::
|
| 133 |
+
|
| 134 |
+
sp = lazy.load("scipy")
|
| 135 |
+
|
| 136 |
+
def myfunc():
|
| 137 |
+
sp.argmin(...)
|
| 138 |
+
....
|
| 139 |
+
|
| 140 |
+
The initial import time is fast because the actual import is delayed
|
| 141 |
+
until the first attribute is requested. The overall import time may
|
| 142 |
+
decrease as well for users that don't make use of large portions
|
| 143 |
+
of the library.
|
| 144 |
+
|
| 145 |
+
Parameters
|
| 146 |
+
----------
|
| 147 |
+
fullname : str
|
| 148 |
+
The full name of the package or subpackage to import. For example::
|
| 149 |
+
|
| 150 |
+
sp = lazy.load("scipy") # import scipy as sp
|
| 151 |
+
spla = lazy.load("scipy.linalg") # import scipy.linalg as spla
|
| 152 |
+
|
| 153 |
+
Returns
|
| 154 |
+
-------
|
| 155 |
+
pm : importlib.util._LazyModule
|
| 156 |
+
Proxy module. Can be used like any regularly imported module.
|
| 157 |
+
Actual loading of the module occurs upon first attribute request.
|
| 158 |
+
|
| 159 |
+
"""
|
| 160 |
+
try:
|
| 161 |
+
return sys.modules[fullname]
|
| 162 |
+
except:
|
| 163 |
+
pass
|
| 164 |
+
|
| 165 |
+
# Not previously loaded -- look it up
|
| 166 |
+
spec = importlib.util.find_spec(fullname)
|
| 167 |
+
|
| 168 |
+
if spec is None:
|
| 169 |
+
try:
|
| 170 |
+
parent = inspect.stack()[1]
|
| 171 |
+
frame_data = {
|
| 172 |
+
"spec": fullname,
|
| 173 |
+
"filename": parent.filename,
|
| 174 |
+
"lineno": parent.lineno,
|
| 175 |
+
"function": parent.function,
|
| 176 |
+
"code_context": parent.code_context,
|
| 177 |
+
}
|
| 178 |
+
return DelayedImportErrorModule(frame_data, "DelayedImportErrorModule")
|
| 179 |
+
finally:
|
| 180 |
+
del parent
|
| 181 |
+
|
| 182 |
+
module = importlib.util.module_from_spec(spec)
|
| 183 |
+
sys.modules[fullname] = module
|
| 184 |
+
|
| 185 |
+
loader = importlib.util.LazyLoader(spec.loader)
|
| 186 |
+
loader.exec_module(module)
|
| 187 |
+
|
| 188 |
+
return module
|
llava_next/lib/python3.10/site-packages/networkx/relabel.py
ADDED
|
@@ -0,0 +1,285 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
|
| 3 |
+
__all__ = ["convert_node_labels_to_integers", "relabel_nodes"]
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
@nx._dispatchable(
|
| 7 |
+
preserve_all_attrs=True, mutates_input={"not copy": 2}, returns_graph=True
|
| 8 |
+
)
|
| 9 |
+
def relabel_nodes(G, mapping, copy=True):
|
| 10 |
+
"""Relabel the nodes of the graph G according to a given mapping.
|
| 11 |
+
|
| 12 |
+
The original node ordering may not be preserved if `copy` is `False` and the
|
| 13 |
+
mapping includes overlap between old and new labels.
|
| 14 |
+
|
| 15 |
+
Parameters
|
| 16 |
+
----------
|
| 17 |
+
G : graph
|
| 18 |
+
A NetworkX graph
|
| 19 |
+
|
| 20 |
+
mapping : dictionary
|
| 21 |
+
A dictionary with the old labels as keys and new labels as values.
|
| 22 |
+
A partial mapping is allowed. Mapping 2 nodes to a single node is allowed.
|
| 23 |
+
Any non-node keys in the mapping are ignored.
|
| 24 |
+
|
| 25 |
+
copy : bool (optional, default=True)
|
| 26 |
+
If True return a copy, or if False relabel the nodes in place.
|
| 27 |
+
|
| 28 |
+
Examples
|
| 29 |
+
--------
|
| 30 |
+
To create a new graph with nodes relabeled according to a given
|
| 31 |
+
dictionary:
|
| 32 |
+
|
| 33 |
+
>>> G = nx.path_graph(3)
|
| 34 |
+
>>> sorted(G)
|
| 35 |
+
[0, 1, 2]
|
| 36 |
+
>>> mapping = {0: "a", 1: "b", 2: "c"}
|
| 37 |
+
>>> H = nx.relabel_nodes(G, mapping)
|
| 38 |
+
>>> sorted(H)
|
| 39 |
+
['a', 'b', 'c']
|
| 40 |
+
|
| 41 |
+
Nodes can be relabeled with any hashable object, including numbers
|
| 42 |
+
and strings:
|
| 43 |
+
|
| 44 |
+
>>> import string
|
| 45 |
+
>>> G = nx.path_graph(26) # nodes are integers 0 through 25
|
| 46 |
+
>>> sorted(G)[:3]
|
| 47 |
+
[0, 1, 2]
|
| 48 |
+
>>> mapping = dict(zip(G, string.ascii_lowercase))
|
| 49 |
+
>>> G = nx.relabel_nodes(G, mapping) # nodes are characters a through z
|
| 50 |
+
>>> sorted(G)[:3]
|
| 51 |
+
['a', 'b', 'c']
|
| 52 |
+
>>> mapping = dict(zip(G, range(1, 27)))
|
| 53 |
+
>>> G = nx.relabel_nodes(G, mapping) # nodes are integers 1 through 26
|
| 54 |
+
>>> sorted(G)[:3]
|
| 55 |
+
[1, 2, 3]
|
| 56 |
+
|
| 57 |
+
To perform a partial in-place relabeling, provide a dictionary
|
| 58 |
+
mapping only a subset of the nodes, and set the `copy` keyword
|
| 59 |
+
argument to False:
|
| 60 |
+
|
| 61 |
+
>>> G = nx.path_graph(3) # nodes 0-1-2
|
| 62 |
+
>>> mapping = {0: "a", 1: "b"} # 0->'a' and 1->'b'
|
| 63 |
+
>>> G = nx.relabel_nodes(G, mapping, copy=False)
|
| 64 |
+
>>> sorted(G, key=str)
|
| 65 |
+
[2, 'a', 'b']
|
| 66 |
+
|
| 67 |
+
A mapping can also be given as a function:
|
| 68 |
+
|
| 69 |
+
>>> G = nx.path_graph(3)
|
| 70 |
+
>>> H = nx.relabel_nodes(G, lambda x: x**2)
|
| 71 |
+
>>> list(H)
|
| 72 |
+
[0, 1, 4]
|
| 73 |
+
|
| 74 |
+
In a multigraph, relabeling two or more nodes to the same new node
|
| 75 |
+
will retain all edges, but may change the edge keys in the process:
|
| 76 |
+
|
| 77 |
+
>>> G = nx.MultiGraph()
|
| 78 |
+
>>> G.add_edge(0, 1, value="a") # returns the key for this edge
|
| 79 |
+
0
|
| 80 |
+
>>> G.add_edge(0, 2, value="b")
|
| 81 |
+
0
|
| 82 |
+
>>> G.add_edge(0, 3, value="c")
|
| 83 |
+
0
|
| 84 |
+
>>> mapping = {1: 4, 2: 4, 3: 4}
|
| 85 |
+
>>> H = nx.relabel_nodes(G, mapping, copy=True)
|
| 86 |
+
>>> print(H[0])
|
| 87 |
+
{4: {0: {'value': 'a'}, 1: {'value': 'b'}, 2: {'value': 'c'}}}
|
| 88 |
+
|
| 89 |
+
This works for in-place relabeling too:
|
| 90 |
+
|
| 91 |
+
>>> G = nx.relabel_nodes(G, mapping, copy=False)
|
| 92 |
+
>>> print(G[0])
|
| 93 |
+
{4: {0: {'value': 'a'}, 1: {'value': 'b'}, 2: {'value': 'c'}}}
|
| 94 |
+
|
| 95 |
+
Notes
|
| 96 |
+
-----
|
| 97 |
+
Only the nodes specified in the mapping will be relabeled.
|
| 98 |
+
Any non-node keys in the mapping are ignored.
|
| 99 |
+
|
| 100 |
+
The keyword setting copy=False modifies the graph in place.
|
| 101 |
+
Relabel_nodes avoids naming collisions by building a
|
| 102 |
+
directed graph from ``mapping`` which specifies the order of
|
| 103 |
+
relabelings. Naming collisions, such as a->b, b->c, are ordered
|
| 104 |
+
such that "b" gets renamed to "c" before "a" gets renamed "b".
|
| 105 |
+
In cases of circular mappings (e.g. a->b, b->a), modifying the
|
| 106 |
+
graph is not possible in-place and an exception is raised.
|
| 107 |
+
In that case, use copy=True.
|
| 108 |
+
|
| 109 |
+
If a relabel operation on a multigraph would cause two or more
|
| 110 |
+
edges to have the same source, target and key, the second edge must
|
| 111 |
+
be assigned a new key to retain all edges. The new key is set
|
| 112 |
+
to the lowest non-negative integer not already used as a key
|
| 113 |
+
for edges between these two nodes. Note that this means non-numeric
|
| 114 |
+
keys may be replaced by numeric keys.
|
| 115 |
+
|
| 116 |
+
See Also
|
| 117 |
+
--------
|
| 118 |
+
convert_node_labels_to_integers
|
| 119 |
+
"""
|
| 120 |
+
# you can pass any callable e.g. f(old_label) -> new_label or
|
| 121 |
+
# e.g. str(old_label) -> new_label, but we'll just make a dictionary here regardless
|
| 122 |
+
m = {n: mapping(n) for n in G} if callable(mapping) else mapping
|
| 123 |
+
|
| 124 |
+
if copy:
|
| 125 |
+
return _relabel_copy(G, m)
|
| 126 |
+
else:
|
| 127 |
+
return _relabel_inplace(G, m)
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def _relabel_inplace(G, mapping):
|
| 131 |
+
if len(mapping.keys() & mapping.values()) > 0:
|
| 132 |
+
# labels sets overlap
|
| 133 |
+
# can we topological sort and still do the relabeling?
|
| 134 |
+
D = nx.DiGraph(list(mapping.items()))
|
| 135 |
+
D.remove_edges_from(nx.selfloop_edges(D))
|
| 136 |
+
try:
|
| 137 |
+
nodes = reversed(list(nx.topological_sort(D)))
|
| 138 |
+
except nx.NetworkXUnfeasible as err:
|
| 139 |
+
raise nx.NetworkXUnfeasible(
|
| 140 |
+
"The node label sets are overlapping and no ordering can "
|
| 141 |
+
"resolve the mapping. Use copy=True."
|
| 142 |
+
) from err
|
| 143 |
+
else:
|
| 144 |
+
# non-overlapping label sets, sort them in the order of G nodes
|
| 145 |
+
nodes = [n for n in G if n in mapping]
|
| 146 |
+
|
| 147 |
+
multigraph = G.is_multigraph()
|
| 148 |
+
directed = G.is_directed()
|
| 149 |
+
|
| 150 |
+
for old in nodes:
|
| 151 |
+
# Test that old is in both mapping and G, otherwise ignore.
|
| 152 |
+
try:
|
| 153 |
+
new = mapping[old]
|
| 154 |
+
G.add_node(new, **G.nodes[old])
|
| 155 |
+
except KeyError:
|
| 156 |
+
continue
|
| 157 |
+
if new == old:
|
| 158 |
+
continue
|
| 159 |
+
if multigraph:
|
| 160 |
+
new_edges = [
|
| 161 |
+
(new, new if old == target else target, key, data)
|
| 162 |
+
for (_, target, key, data) in G.edges(old, data=True, keys=True)
|
| 163 |
+
]
|
| 164 |
+
if directed:
|
| 165 |
+
new_edges += [
|
| 166 |
+
(new if old == source else source, new, key, data)
|
| 167 |
+
for (source, _, key, data) in G.in_edges(old, data=True, keys=True)
|
| 168 |
+
]
|
| 169 |
+
# Ensure new edges won't overwrite existing ones
|
| 170 |
+
seen = set()
|
| 171 |
+
for i, (source, target, key, data) in enumerate(new_edges):
|
| 172 |
+
if target in G[source] and key in G[source][target]:
|
| 173 |
+
new_key = 0 if not isinstance(key, int | float) else key
|
| 174 |
+
while new_key in G[source][target] or (target, new_key) in seen:
|
| 175 |
+
new_key += 1
|
| 176 |
+
new_edges[i] = (source, target, new_key, data)
|
| 177 |
+
seen.add((target, new_key))
|
| 178 |
+
else:
|
| 179 |
+
new_edges = [
|
| 180 |
+
(new, new if old == target else target, data)
|
| 181 |
+
for (_, target, data) in G.edges(old, data=True)
|
| 182 |
+
]
|
| 183 |
+
if directed:
|
| 184 |
+
new_edges += [
|
| 185 |
+
(new if old == source else source, new, data)
|
| 186 |
+
for (source, _, data) in G.in_edges(old, data=True)
|
| 187 |
+
]
|
| 188 |
+
G.remove_node(old)
|
| 189 |
+
G.add_edges_from(new_edges)
|
| 190 |
+
return G
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
def _relabel_copy(G, mapping):
|
| 194 |
+
H = G.__class__()
|
| 195 |
+
H.add_nodes_from(mapping.get(n, n) for n in G)
|
| 196 |
+
H._node.update((mapping.get(n, n), d.copy()) for n, d in G.nodes.items())
|
| 197 |
+
if G.is_multigraph():
|
| 198 |
+
new_edges = [
|
| 199 |
+
(mapping.get(n1, n1), mapping.get(n2, n2), k, d.copy())
|
| 200 |
+
for (n1, n2, k, d) in G.edges(keys=True, data=True)
|
| 201 |
+
]
|
| 202 |
+
|
| 203 |
+
# check for conflicting edge-keys
|
| 204 |
+
undirected = not G.is_directed()
|
| 205 |
+
seen_edges = set()
|
| 206 |
+
for i, (source, target, key, data) in enumerate(new_edges):
|
| 207 |
+
while (source, target, key) in seen_edges:
|
| 208 |
+
if not isinstance(key, int | float):
|
| 209 |
+
key = 0
|
| 210 |
+
key += 1
|
| 211 |
+
seen_edges.add((source, target, key))
|
| 212 |
+
if undirected:
|
| 213 |
+
seen_edges.add((target, source, key))
|
| 214 |
+
new_edges[i] = (source, target, key, data)
|
| 215 |
+
|
| 216 |
+
H.add_edges_from(new_edges)
|
| 217 |
+
else:
|
| 218 |
+
H.add_edges_from(
|
| 219 |
+
(mapping.get(n1, n1), mapping.get(n2, n2), d.copy())
|
| 220 |
+
for (n1, n2, d) in G.edges(data=True)
|
| 221 |
+
)
|
| 222 |
+
H.graph.update(G.graph)
|
| 223 |
+
return H
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
| 227 |
+
def convert_node_labels_to_integers(
|
| 228 |
+
G, first_label=0, ordering="default", label_attribute=None
|
| 229 |
+
):
|
| 230 |
+
"""Returns a copy of the graph G with the nodes relabeled using
|
| 231 |
+
consecutive integers.
|
| 232 |
+
|
| 233 |
+
Parameters
|
| 234 |
+
----------
|
| 235 |
+
G : graph
|
| 236 |
+
A NetworkX graph
|
| 237 |
+
|
| 238 |
+
first_label : int, optional (default=0)
|
| 239 |
+
An integer specifying the starting offset in numbering nodes.
|
| 240 |
+
The new integer labels are numbered first_label, ..., n-1+first_label.
|
| 241 |
+
|
| 242 |
+
ordering : string
|
| 243 |
+
"default" : inherit node ordering from G.nodes()
|
| 244 |
+
"sorted" : inherit node ordering from sorted(G.nodes())
|
| 245 |
+
"increasing degree" : nodes are sorted by increasing degree
|
| 246 |
+
"decreasing degree" : nodes are sorted by decreasing degree
|
| 247 |
+
|
| 248 |
+
label_attribute : string, optional (default=None)
|
| 249 |
+
Name of node attribute to store old label. If None no attribute
|
| 250 |
+
is created.
|
| 251 |
+
|
| 252 |
+
Notes
|
| 253 |
+
-----
|
| 254 |
+
Node and edge attribute data are copied to the new (relabeled) graph.
|
| 255 |
+
|
| 256 |
+
There is no guarantee that the relabeling of nodes to integers will
|
| 257 |
+
give the same two integers for two (even identical graphs).
|
| 258 |
+
Use the `ordering` argument to try to preserve the order.
|
| 259 |
+
|
| 260 |
+
See Also
|
| 261 |
+
--------
|
| 262 |
+
relabel_nodes
|
| 263 |
+
"""
|
| 264 |
+
N = G.number_of_nodes() + first_label
|
| 265 |
+
if ordering == "default":
|
| 266 |
+
mapping = dict(zip(G.nodes(), range(first_label, N)))
|
| 267 |
+
elif ordering == "sorted":
|
| 268 |
+
nlist = sorted(G.nodes())
|
| 269 |
+
mapping = dict(zip(nlist, range(first_label, N)))
|
| 270 |
+
elif ordering == "increasing degree":
|
| 271 |
+
dv_pairs = [(d, n) for (n, d) in G.degree()]
|
| 272 |
+
dv_pairs.sort() # in-place sort from lowest to highest degree
|
| 273 |
+
mapping = dict(zip([n for d, n in dv_pairs], range(first_label, N)))
|
| 274 |
+
elif ordering == "decreasing degree":
|
| 275 |
+
dv_pairs = [(d, n) for (n, d) in G.degree()]
|
| 276 |
+
dv_pairs.sort() # in-place sort from lowest to highest degree
|
| 277 |
+
dv_pairs.reverse()
|
| 278 |
+
mapping = dict(zip([n for d, n in dv_pairs], range(first_label, N)))
|
| 279 |
+
else:
|
| 280 |
+
raise nx.NetworkXError(f"Unknown node ordering: {ordering}")
|
| 281 |
+
H = relabel_nodes(G, mapping)
|
| 282 |
+
# create node attribute with the old label
|
| 283 |
+
if label_attribute is not None:
|
| 284 |
+
nx.set_node_attributes(H, {v: k for k, v in mapping.items()}, label_attribute)
|
| 285 |
+
return H
|
llava_next/lib/python3.10/site-packages/pillow.libs/libXau-154567c4.so.6.0.0
ADDED
|
Binary file (22.1 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pillow.libs/libbrotlicommon-3ecfe81c.so.1
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:024bc7605502cffd45b3f7c3f37fe043694cc3b4b4cb7f39af3b9a72793e4c2e
|
| 3 |
+
size 144425
|
llava_next/lib/python3.10/site-packages/pillow.libs/libbrotlidec-ba690955.so.1
ADDED
|
Binary file (58.2 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pillow.libs/libfreetype-be14bf51.so.6.20.1
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2a2178a93a108ae14521328258296ab896a77f5f520ea2a5cb66d75838cc2f5f
|
| 3 |
+
size 1422625
|
llava_next/lib/python3.10/site-packages/pillow.libs/libsharpyuv-898c0cb5.so.0.1.0
ADDED
|
Binary file (42 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pillow.libs/libwebpdemux-f2642bcc.so.2.0.15
ADDED
|
Binary file (26.1 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pillow.libs/libwebpmux-d524b4d5.so.3.1.0
ADDED
|
Binary file (54.5 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/pillow.libs/libxcb-b8a56d01.so.1.1.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:633eef394415421c697c6b986ba99ade7f7c0364d5c8a90e3de01a44d8247f1b
|
| 3 |
+
size 251425
|
llava_next/lib/python3.10/site-packages/rpds/__init__.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .rpds import *
|
| 2 |
+
|
| 3 |
+
__doc__ = rpds.__doc__
|
| 4 |
+
if hasattr(rpds, "__all__"):
|
| 5 |
+
__all__ = rpds.__all__
|
llava_next/lib/python3.10/site-packages/rpds/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (247 Bytes). View file
|
|
|
llava_next/lib/python3.10/site-packages/rpds/py.typed
ADDED
|
File without changes
|
llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/PKG-INFO
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.2
|
| 2 |
+
Name: setuptools
|
| 3 |
+
Version: 75.8.0
|
| 4 |
+
Summary: Easily download, build, install, upgrade, and uninstall Python packages
|
| 5 |
+
Author-email: Python Packaging Authority <distutils-sig@python.org>
|
| 6 |
+
Project-URL: Source, https://github.com/pypa/setuptools
|
| 7 |
+
Project-URL: Documentation, https://setuptools.pypa.io/
|
| 8 |
+
Project-URL: Changelog, https://setuptools.pypa.io/en/stable/history.html
|
| 9 |
+
Keywords: CPAN PyPI distutils eggs package management
|
| 10 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 11 |
+
Classifier: Intended Audience :: Developers
|
| 12 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 13 |
+
Classifier: Programming Language :: Python :: 3
|
| 14 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 15 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 16 |
+
Classifier: Topic :: System :: Archiving :: Packaging
|
| 17 |
+
Classifier: Topic :: System :: Systems Administration
|
| 18 |
+
Classifier: Topic :: Utilities
|
| 19 |
+
Requires-Python: >=3.9
|
| 20 |
+
Description-Content-Type: text/x-rst
|
| 21 |
+
License-File: LICENSE
|
| 22 |
+
Provides-Extra: test
|
| 23 |
+
Requires-Dist: pytest!=8.1.*,>=6; extra == "test"
|
| 24 |
+
Requires-Dist: virtualenv>=13.0.0; extra == "test"
|
| 25 |
+
Requires-Dist: wheel>=0.44.0; extra == "test"
|
| 26 |
+
Requires-Dist: pip>=19.1; extra == "test"
|
| 27 |
+
Requires-Dist: packaging>=24.2; extra == "test"
|
| 28 |
+
Requires-Dist: jaraco.envs>=2.2; extra == "test"
|
| 29 |
+
Requires-Dist: pytest-xdist>=3; extra == "test"
|
| 30 |
+
Requires-Dist: jaraco.path>=3.7.2; extra == "test"
|
| 31 |
+
Requires-Dist: build[virtualenv]>=1.0.3; extra == "test"
|
| 32 |
+
Requires-Dist: filelock>=3.4.0; extra == "test"
|
| 33 |
+
Requires-Dist: ini2toml[lite]>=0.14; extra == "test"
|
| 34 |
+
Requires-Dist: tomli-w>=1.0.0; extra == "test"
|
| 35 |
+
Requires-Dist: pytest-timeout; extra == "test"
|
| 36 |
+
Requires-Dist: pytest-perf; sys_platform != "cygwin" and extra == "test"
|
| 37 |
+
Requires-Dist: jaraco.develop>=7.21; (python_version >= "3.9" and sys_platform != "cygwin") and extra == "test"
|
| 38 |
+
Requires-Dist: pytest-home>=0.5; extra == "test"
|
| 39 |
+
Requires-Dist: pytest-subprocess; extra == "test"
|
| 40 |
+
Requires-Dist: pyproject-hooks!=1.1; extra == "test"
|
| 41 |
+
Requires-Dist: jaraco.test>=5.5; extra == "test"
|
| 42 |
+
Provides-Extra: doc
|
| 43 |
+
Requires-Dist: sphinx>=3.5; extra == "doc"
|
| 44 |
+
Requires-Dist: jaraco.packaging>=9.3; extra == "doc"
|
| 45 |
+
Requires-Dist: rst.linker>=1.9; extra == "doc"
|
| 46 |
+
Requires-Dist: furo; extra == "doc"
|
| 47 |
+
Requires-Dist: sphinx-lint; extra == "doc"
|
| 48 |
+
Requires-Dist: jaraco.tidelift>=1.4; extra == "doc"
|
| 49 |
+
Requires-Dist: pygments-github-lexers==0.0.5; extra == "doc"
|
| 50 |
+
Requires-Dist: sphinx-favicon; extra == "doc"
|
| 51 |
+
Requires-Dist: sphinx-inline-tabs; extra == "doc"
|
| 52 |
+
Requires-Dist: sphinx-reredirects; extra == "doc"
|
| 53 |
+
Requires-Dist: sphinxcontrib-towncrier; extra == "doc"
|
| 54 |
+
Requires-Dist: sphinx-notfound-page<2,>=1; extra == "doc"
|
| 55 |
+
Requires-Dist: pyproject-hooks!=1.1; extra == "doc"
|
| 56 |
+
Requires-Dist: towncrier<24.7; extra == "doc"
|
| 57 |
+
Provides-Extra: ssl
|
| 58 |
+
Provides-Extra: certs
|
| 59 |
+
Provides-Extra: core
|
| 60 |
+
Requires-Dist: packaging>=24.2; extra == "core"
|
| 61 |
+
Requires-Dist: more_itertools>=8.8; extra == "core"
|
| 62 |
+
Requires-Dist: jaraco.text>=3.7; extra == "core"
|
| 63 |
+
Requires-Dist: importlib_metadata>=6; python_version < "3.10" and extra == "core"
|
| 64 |
+
Requires-Dist: tomli>=2.0.1; python_version < "3.11" and extra == "core"
|
| 65 |
+
Requires-Dist: wheel>=0.43.0; extra == "core"
|
| 66 |
+
Requires-Dist: platformdirs>=4.2.2; extra == "core"
|
| 67 |
+
Requires-Dist: jaraco.collections; extra == "core"
|
| 68 |
+
Requires-Dist: jaraco.functools>=4; extra == "core"
|
| 69 |
+
Requires-Dist: packaging; extra == "core"
|
| 70 |
+
Requires-Dist: more_itertools; extra == "core"
|
| 71 |
+
Provides-Extra: check
|
| 72 |
+
Requires-Dist: pytest-checkdocs>=2.4; extra == "check"
|
| 73 |
+
Requires-Dist: pytest-ruff>=0.2.1; sys_platform != "cygwin" and extra == "check"
|
| 74 |
+
Requires-Dist: ruff>=0.8.0; sys_platform != "cygwin" and extra == "check"
|
| 75 |
+
Provides-Extra: cover
|
| 76 |
+
Requires-Dist: pytest-cov; extra == "cover"
|
| 77 |
+
Provides-Extra: enabler
|
| 78 |
+
Requires-Dist: pytest-enabler>=2.2; extra == "enabler"
|
| 79 |
+
Provides-Extra: type
|
| 80 |
+
Requires-Dist: pytest-mypy; extra == "type"
|
| 81 |
+
Requires-Dist: mypy==1.14.*; extra == "type"
|
| 82 |
+
Requires-Dist: importlib_metadata>=7.0.2; python_version < "3.10" and extra == "type"
|
| 83 |
+
Requires-Dist: jaraco.develop>=7.21; sys_platform != "cygwin" and extra == "type"
|
| 84 |
+
|
| 85 |
+
.. |pypi-version| image:: https://img.shields.io/pypi/v/setuptools.svg
|
| 86 |
+
:target: https://pypi.org/project/setuptools
|
| 87 |
+
|
| 88 |
+
.. |py-version| image:: https://img.shields.io/pypi/pyversions/setuptools.svg
|
| 89 |
+
|
| 90 |
+
.. |test-badge| image:: https://github.com/pypa/setuptools/actions/workflows/main.yml/badge.svg
|
| 91 |
+
:target: https://github.com/pypa/setuptools/actions?query=workflow%3A%22tests%22
|
| 92 |
+
:alt: tests
|
| 93 |
+
|
| 94 |
+
.. |ruff-badge| image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
|
| 95 |
+
:target: https://github.com/astral-sh/ruff
|
| 96 |
+
:alt: Ruff
|
| 97 |
+
|
| 98 |
+
.. |docs-badge| image:: https://img.shields.io/readthedocs/setuptools/latest.svg
|
| 99 |
+
:target: https://setuptools.pypa.io
|
| 100 |
+
|
| 101 |
+
.. |skeleton-badge| image:: https://img.shields.io/badge/skeleton-2024-informational
|
| 102 |
+
:target: https://blog.jaraco.com/skeleton
|
| 103 |
+
|
| 104 |
+
.. |codecov-badge| image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white
|
| 105 |
+
:target: https://codecov.io/gh/pypa/setuptools
|
| 106 |
+
|
| 107 |
+
.. |tidelift-badge| image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat
|
| 108 |
+
:target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme
|
| 109 |
+
|
| 110 |
+
.. |discord-badge| image:: https://img.shields.io/discord/803025117553754132
|
| 111 |
+
:target: https://discord.com/channels/803025117553754132/815945031150993468
|
| 112 |
+
:alt: Discord
|
| 113 |
+
|
| 114 |
+
|pypi-version| |py-version| |test-badge| |ruff-badge| |docs-badge| |skeleton-badge| |codecov-badge| |discord-badge|
|
| 115 |
+
|
| 116 |
+
See the `Quickstart <https://setuptools.pypa.io/en/latest/userguide/quickstart.html>`_
|
| 117 |
+
and the `User's Guide <https://setuptools.pypa.io/en/latest/userguide/>`_ for
|
| 118 |
+
instructions on how to use Setuptools.
|
| 119 |
+
|
| 120 |
+
Questions and comments should be directed to `GitHub Discussions
|
| 121 |
+
<https://github.com/pypa/setuptools/discussions>`_.
|
| 122 |
+
Bug reports and especially tested patches may be
|
| 123 |
+
submitted directly to the `bug tracker
|
| 124 |
+
<https://github.com/pypa/setuptools/issues>`_.
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
Code of Conduct
|
| 128 |
+
===============
|
| 129 |
+
|
| 130 |
+
Everyone interacting in the setuptools project's codebases, issue trackers,
|
| 131 |
+
chat rooms, and fora is expected to follow the
|
| 132 |
+
`PSF Code of Conduct <https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md>`_.
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
For Enterprise
|
| 136 |
+
==============
|
| 137 |
+
|
| 138 |
+
Available as part of the Tidelift Subscription.
|
| 139 |
+
|
| 140 |
+
Setuptools and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
|
| 141 |
+
|
| 142 |
+
`Learn more <https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=referral&utm_campaign=github>`_.
|
llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/SOURCES.txt
ADDED
|
@@ -0,0 +1,571 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LICENSE
|
| 2 |
+
MANIFEST.in
|
| 3 |
+
NEWS.rst
|
| 4 |
+
README.rst
|
| 5 |
+
conftest.py
|
| 6 |
+
exercises.py
|
| 7 |
+
launcher.c
|
| 8 |
+
mypy.ini
|
| 9 |
+
pyproject.toml
|
| 10 |
+
pytest.ini
|
| 11 |
+
setup.cfg
|
| 12 |
+
setup.py
|
| 13 |
+
tox.ini
|
| 14 |
+
_distutils_hack/__init__.py
|
| 15 |
+
_distutils_hack/override.py
|
| 16 |
+
docs/artwork.rst
|
| 17 |
+
docs/build_meta.rst
|
| 18 |
+
docs/conf.py
|
| 19 |
+
docs/history.rst
|
| 20 |
+
docs/index.rst
|
| 21 |
+
docs/pkg_resources.rst
|
| 22 |
+
docs/python 2 sunset.rst
|
| 23 |
+
docs/roadmap.rst
|
| 24 |
+
docs/setuptools.rst
|
| 25 |
+
docs/deprecated/changed_keywords.rst
|
| 26 |
+
docs/deprecated/commands.rst
|
| 27 |
+
docs/deprecated/dependency_links.rst
|
| 28 |
+
docs/deprecated/distutils-legacy.rst
|
| 29 |
+
docs/deprecated/easy_install.rst
|
| 30 |
+
docs/deprecated/functionalities.rst
|
| 31 |
+
docs/deprecated/index.rst
|
| 32 |
+
docs/deprecated/python_eggs.rst
|
| 33 |
+
docs/deprecated/resource_extraction.rst
|
| 34 |
+
docs/deprecated/zip_safe.rst
|
| 35 |
+
docs/deprecated/distutils/_setuptools_disclaimer.rst
|
| 36 |
+
docs/deprecated/distutils/apiref.rst
|
| 37 |
+
docs/deprecated/distutils/builtdist.rst
|
| 38 |
+
docs/deprecated/distutils/commandref.rst
|
| 39 |
+
docs/deprecated/distutils/configfile.rst
|
| 40 |
+
docs/deprecated/distutils/examples.rst
|
| 41 |
+
docs/deprecated/distutils/extending.rst
|
| 42 |
+
docs/deprecated/distutils/index.rst
|
| 43 |
+
docs/deprecated/distutils/introduction.rst
|
| 44 |
+
docs/deprecated/distutils/packageindex.rst
|
| 45 |
+
docs/deprecated/distutils/setupscript.rst
|
| 46 |
+
docs/deprecated/distutils/sourcedist.rst
|
| 47 |
+
docs/deprecated/distutils/uploading.rst
|
| 48 |
+
docs/development/developer-guide.rst
|
| 49 |
+
docs/development/index.rst
|
| 50 |
+
docs/development/releases.rst
|
| 51 |
+
docs/references/keywords.rst
|
| 52 |
+
docs/userguide/datafiles.rst
|
| 53 |
+
docs/userguide/declarative_config.rst
|
| 54 |
+
docs/userguide/dependency_management.rst
|
| 55 |
+
docs/userguide/development_mode.rst
|
| 56 |
+
docs/userguide/distribution.rst
|
| 57 |
+
docs/userguide/entry_point.rst
|
| 58 |
+
docs/userguide/ext_modules.rst
|
| 59 |
+
docs/userguide/extension.rst
|
| 60 |
+
docs/userguide/index.rst
|
| 61 |
+
docs/userguide/miscellaneous.rst
|
| 62 |
+
docs/userguide/package_discovery.rst
|
| 63 |
+
docs/userguide/pyproject_config.rst
|
| 64 |
+
docs/userguide/quickstart.rst
|
| 65 |
+
newsfragments/.gitignore
|
| 66 |
+
newsfragments/README.rst
|
| 67 |
+
pkg_resources/__init__.py
|
| 68 |
+
pkg_resources/api_tests.txt
|
| 69 |
+
pkg_resources/py.typed
|
| 70 |
+
pkg_resources/tests/__init__.py
|
| 71 |
+
pkg_resources/tests/test_find_distributions.py
|
| 72 |
+
pkg_resources/tests/test_integration_zope_interface.py
|
| 73 |
+
pkg_resources/tests/test_markers.py
|
| 74 |
+
pkg_resources/tests/test_pkg_resources.py
|
| 75 |
+
pkg_resources/tests/test_resources.py
|
| 76 |
+
pkg_resources/tests/test_working_set.py
|
| 77 |
+
pkg_resources/tests/data/my-test-package-source/setup.cfg
|
| 78 |
+
pkg_resources/tests/data/my-test-package-source/setup.py
|
| 79 |
+
pkg_resources/tests/data/my-test-package-zip/my-test-package.zip
|
| 80 |
+
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/PKG-INFO
|
| 81 |
+
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt
|
| 82 |
+
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/dependency_links.txt
|
| 83 |
+
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/top_level.txt
|
| 84 |
+
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe
|
| 85 |
+
pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg
|
| 86 |
+
setuptools/__init__.py
|
| 87 |
+
setuptools/_core_metadata.py
|
| 88 |
+
setuptools/_entry_points.py
|
| 89 |
+
setuptools/_imp.py
|
| 90 |
+
setuptools/_importlib.py
|
| 91 |
+
setuptools/_itertools.py
|
| 92 |
+
setuptools/_normalization.py
|
| 93 |
+
setuptools/_path.py
|
| 94 |
+
setuptools/_reqs.py
|
| 95 |
+
setuptools/_shutil.py
|
| 96 |
+
setuptools/_static.py
|
| 97 |
+
setuptools/archive_util.py
|
| 98 |
+
setuptools/build_meta.py
|
| 99 |
+
setuptools/cli-32.exe
|
| 100 |
+
setuptools/cli-64.exe
|
| 101 |
+
setuptools/cli-arm64.exe
|
| 102 |
+
setuptools/cli.exe
|
| 103 |
+
setuptools/depends.py
|
| 104 |
+
setuptools/discovery.py
|
| 105 |
+
setuptools/dist.py
|
| 106 |
+
setuptools/errors.py
|
| 107 |
+
setuptools/extension.py
|
| 108 |
+
setuptools/glob.py
|
| 109 |
+
setuptools/gui-32.exe
|
| 110 |
+
setuptools/gui-64.exe
|
| 111 |
+
setuptools/gui-arm64.exe
|
| 112 |
+
setuptools/gui.exe
|
| 113 |
+
setuptools/installer.py
|
| 114 |
+
setuptools/launch.py
|
| 115 |
+
setuptools/logging.py
|
| 116 |
+
setuptools/modified.py
|
| 117 |
+
setuptools/monkey.py
|
| 118 |
+
setuptools/msvc.py
|
| 119 |
+
setuptools/namespaces.py
|
| 120 |
+
setuptools/package_index.py
|
| 121 |
+
setuptools/sandbox.py
|
| 122 |
+
setuptools/script (dev).tmpl
|
| 123 |
+
setuptools/script.tmpl
|
| 124 |
+
setuptools/unicode_utils.py
|
| 125 |
+
setuptools/version.py
|
| 126 |
+
setuptools/warnings.py
|
| 127 |
+
setuptools/wheel.py
|
| 128 |
+
setuptools/windows_support.py
|
| 129 |
+
setuptools.egg-info/PKG-INFO
|
| 130 |
+
setuptools.egg-info/SOURCES.txt
|
| 131 |
+
setuptools.egg-info/dependency_links.txt
|
| 132 |
+
setuptools.egg-info/entry_points.txt
|
| 133 |
+
setuptools.egg-info/requires.txt
|
| 134 |
+
setuptools.egg-info/top_level.txt
|
| 135 |
+
setuptools/_distutils/__init__.py
|
| 136 |
+
setuptools/_distutils/_log.py
|
| 137 |
+
setuptools/_distutils/_macos_compat.py
|
| 138 |
+
setuptools/_distutils/_modified.py
|
| 139 |
+
setuptools/_distutils/_msvccompiler.py
|
| 140 |
+
setuptools/_distutils/archive_util.py
|
| 141 |
+
setuptools/_distutils/ccompiler.py
|
| 142 |
+
setuptools/_distutils/cmd.py
|
| 143 |
+
setuptools/_distutils/core.py
|
| 144 |
+
setuptools/_distutils/cygwinccompiler.py
|
| 145 |
+
setuptools/_distutils/debug.py
|
| 146 |
+
setuptools/_distutils/dep_util.py
|
| 147 |
+
setuptools/_distutils/dir_util.py
|
| 148 |
+
setuptools/_distutils/dist.py
|
| 149 |
+
setuptools/_distutils/errors.py
|
| 150 |
+
setuptools/_distutils/extension.py
|
| 151 |
+
setuptools/_distutils/fancy_getopt.py
|
| 152 |
+
setuptools/_distutils/file_util.py
|
| 153 |
+
setuptools/_distutils/filelist.py
|
| 154 |
+
setuptools/_distutils/log.py
|
| 155 |
+
setuptools/_distutils/spawn.py
|
| 156 |
+
setuptools/_distutils/sysconfig.py
|
| 157 |
+
setuptools/_distutils/text_file.py
|
| 158 |
+
setuptools/_distutils/unixccompiler.py
|
| 159 |
+
setuptools/_distutils/util.py
|
| 160 |
+
setuptools/_distutils/version.py
|
| 161 |
+
setuptools/_distutils/versionpredicate.py
|
| 162 |
+
setuptools/_distutils/zosccompiler.py
|
| 163 |
+
setuptools/_distutils/command/__init__.py
|
| 164 |
+
setuptools/_distutils/command/_framework_compat.py
|
| 165 |
+
setuptools/_distutils/command/bdist.py
|
| 166 |
+
setuptools/_distutils/command/bdist_dumb.py
|
| 167 |
+
setuptools/_distutils/command/bdist_rpm.py
|
| 168 |
+
setuptools/_distutils/command/build.py
|
| 169 |
+
setuptools/_distutils/command/build_clib.py
|
| 170 |
+
setuptools/_distutils/command/build_ext.py
|
| 171 |
+
setuptools/_distutils/command/build_py.py
|
| 172 |
+
setuptools/_distutils/command/build_scripts.py
|
| 173 |
+
setuptools/_distutils/command/check.py
|
| 174 |
+
setuptools/_distutils/command/clean.py
|
| 175 |
+
setuptools/_distutils/command/config.py
|
| 176 |
+
setuptools/_distutils/command/install.py
|
| 177 |
+
setuptools/_distutils/command/install_data.py
|
| 178 |
+
setuptools/_distutils/command/install_egg_info.py
|
| 179 |
+
setuptools/_distutils/command/install_headers.py
|
| 180 |
+
setuptools/_distutils/command/install_lib.py
|
| 181 |
+
setuptools/_distutils/command/install_scripts.py
|
| 182 |
+
setuptools/_distutils/command/sdist.py
|
| 183 |
+
setuptools/_distutils/compat/__init__.py
|
| 184 |
+
setuptools/_distutils/compat/py39.py
|
| 185 |
+
setuptools/_distutils/tests/__init__.py
|
| 186 |
+
setuptools/_distutils/tests/support.py
|
| 187 |
+
setuptools/_distutils/tests/test_archive_util.py
|
| 188 |
+
setuptools/_distutils/tests/test_bdist.py
|
| 189 |
+
setuptools/_distutils/tests/test_bdist_dumb.py
|
| 190 |
+
setuptools/_distutils/tests/test_bdist_rpm.py
|
| 191 |
+
setuptools/_distutils/tests/test_build.py
|
| 192 |
+
setuptools/_distutils/tests/test_build_clib.py
|
| 193 |
+
setuptools/_distutils/tests/test_build_ext.py
|
| 194 |
+
setuptools/_distutils/tests/test_build_py.py
|
| 195 |
+
setuptools/_distutils/tests/test_build_scripts.py
|
| 196 |
+
setuptools/_distutils/tests/test_ccompiler.py
|
| 197 |
+
setuptools/_distutils/tests/test_check.py
|
| 198 |
+
setuptools/_distutils/tests/test_clean.py
|
| 199 |
+
setuptools/_distutils/tests/test_cmd.py
|
| 200 |
+
setuptools/_distutils/tests/test_config_cmd.py
|
| 201 |
+
setuptools/_distutils/tests/test_core.py
|
| 202 |
+
setuptools/_distutils/tests/test_cygwinccompiler.py
|
| 203 |
+
setuptools/_distutils/tests/test_dir_util.py
|
| 204 |
+
setuptools/_distutils/tests/test_dist.py
|
| 205 |
+
setuptools/_distutils/tests/test_extension.py
|
| 206 |
+
setuptools/_distutils/tests/test_file_util.py
|
| 207 |
+
setuptools/_distutils/tests/test_filelist.py
|
| 208 |
+
setuptools/_distutils/tests/test_install.py
|
| 209 |
+
setuptools/_distutils/tests/test_install_data.py
|
| 210 |
+
setuptools/_distutils/tests/test_install_headers.py
|
| 211 |
+
setuptools/_distutils/tests/test_install_lib.py
|
| 212 |
+
setuptools/_distutils/tests/test_install_scripts.py
|
| 213 |
+
setuptools/_distutils/tests/test_log.py
|
| 214 |
+
setuptools/_distutils/tests/test_mingwccompiler.py
|
| 215 |
+
setuptools/_distutils/tests/test_modified.py
|
| 216 |
+
setuptools/_distutils/tests/test_msvccompiler.py
|
| 217 |
+
setuptools/_distutils/tests/test_sdist.py
|
| 218 |
+
setuptools/_distutils/tests/test_spawn.py
|
| 219 |
+
setuptools/_distutils/tests/test_sysconfig.py
|
| 220 |
+
setuptools/_distutils/tests/test_text_file.py
|
| 221 |
+
setuptools/_distutils/tests/test_unixccompiler.py
|
| 222 |
+
setuptools/_distutils/tests/test_util.py
|
| 223 |
+
setuptools/_distutils/tests/test_version.py
|
| 224 |
+
setuptools/_distutils/tests/test_versionpredicate.py
|
| 225 |
+
setuptools/_distutils/tests/unix_compat.py
|
| 226 |
+
setuptools/_distutils/tests/compat/__init__.py
|
| 227 |
+
setuptools/_distutils/tests/compat/py39.py
|
| 228 |
+
setuptools/_vendor/ruff.toml
|
| 229 |
+
setuptools/_vendor/typing_extensions.py
|
| 230 |
+
setuptools/_vendor/autocommand/__init__.py
|
| 231 |
+
setuptools/_vendor/autocommand/autoasync.py
|
| 232 |
+
setuptools/_vendor/autocommand/autocommand.py
|
| 233 |
+
setuptools/_vendor/autocommand/automain.py
|
| 234 |
+
setuptools/_vendor/autocommand/autoparse.py
|
| 235 |
+
setuptools/_vendor/autocommand/errors.py
|
| 236 |
+
setuptools/_vendor/autocommand-2.2.2.dist-info/INSTALLER
|
| 237 |
+
setuptools/_vendor/autocommand-2.2.2.dist-info/LICENSE
|
| 238 |
+
setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA
|
| 239 |
+
setuptools/_vendor/autocommand-2.2.2.dist-info/RECORD
|
| 240 |
+
setuptools/_vendor/autocommand-2.2.2.dist-info/WHEEL
|
| 241 |
+
setuptools/_vendor/autocommand-2.2.2.dist-info/top_level.txt
|
| 242 |
+
setuptools/_vendor/backports/__init__.py
|
| 243 |
+
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/INSTALLER
|
| 244 |
+
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/LICENSE
|
| 245 |
+
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/METADATA
|
| 246 |
+
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/RECORD
|
| 247 |
+
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/REQUESTED
|
| 248 |
+
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/WHEEL
|
| 249 |
+
setuptools/_vendor/backports.tarfile-1.2.0.dist-info/top_level.txt
|
| 250 |
+
setuptools/_vendor/backports/tarfile/__init__.py
|
| 251 |
+
setuptools/_vendor/backports/tarfile/__main__.py
|
| 252 |
+
setuptools/_vendor/backports/tarfile/compat/__init__.py
|
| 253 |
+
setuptools/_vendor/backports/tarfile/compat/py38.py
|
| 254 |
+
setuptools/_vendor/importlib_metadata/__init__.py
|
| 255 |
+
setuptools/_vendor/importlib_metadata/_adapters.py
|
| 256 |
+
setuptools/_vendor/importlib_metadata/_collections.py
|
| 257 |
+
setuptools/_vendor/importlib_metadata/_compat.py
|
| 258 |
+
setuptools/_vendor/importlib_metadata/_functools.py
|
| 259 |
+
setuptools/_vendor/importlib_metadata/_itertools.py
|
| 260 |
+
setuptools/_vendor/importlib_metadata/_meta.py
|
| 261 |
+
setuptools/_vendor/importlib_metadata/_text.py
|
| 262 |
+
setuptools/_vendor/importlib_metadata/diagnose.py
|
| 263 |
+
setuptools/_vendor/importlib_metadata/py.typed
|
| 264 |
+
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/INSTALLER
|
| 265 |
+
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/LICENSE
|
| 266 |
+
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/METADATA
|
| 267 |
+
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/RECORD
|
| 268 |
+
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/REQUESTED
|
| 269 |
+
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/WHEEL
|
| 270 |
+
setuptools/_vendor/importlib_metadata-8.0.0.dist-info/top_level.txt
|
| 271 |
+
setuptools/_vendor/importlib_metadata/compat/__init__.py
|
| 272 |
+
setuptools/_vendor/importlib_metadata/compat/py311.py
|
| 273 |
+
setuptools/_vendor/importlib_metadata/compat/py39.py
|
| 274 |
+
setuptools/_vendor/inflect/__init__.py
|
| 275 |
+
setuptools/_vendor/inflect/py.typed
|
| 276 |
+
setuptools/_vendor/inflect-7.3.1.dist-info/INSTALLER
|
| 277 |
+
setuptools/_vendor/inflect-7.3.1.dist-info/LICENSE
|
| 278 |
+
setuptools/_vendor/inflect-7.3.1.dist-info/METADATA
|
| 279 |
+
setuptools/_vendor/inflect-7.3.1.dist-info/RECORD
|
| 280 |
+
setuptools/_vendor/inflect-7.3.1.dist-info/WHEEL
|
| 281 |
+
setuptools/_vendor/inflect-7.3.1.dist-info/top_level.txt
|
| 282 |
+
setuptools/_vendor/inflect/compat/__init__.py
|
| 283 |
+
setuptools/_vendor/inflect/compat/py38.py
|
| 284 |
+
setuptools/_vendor/jaraco/context.py
|
| 285 |
+
setuptools/_vendor/jaraco.collections-5.1.0.dist-info/INSTALLER
|
| 286 |
+
setuptools/_vendor/jaraco.collections-5.1.0.dist-info/LICENSE
|
| 287 |
+
setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA
|
| 288 |
+
setuptools/_vendor/jaraco.collections-5.1.0.dist-info/RECORD
|
| 289 |
+
setuptools/_vendor/jaraco.collections-5.1.0.dist-info/REQUESTED
|
| 290 |
+
setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL
|
| 291 |
+
setuptools/_vendor/jaraco.collections-5.1.0.dist-info/top_level.txt
|
| 292 |
+
setuptools/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER
|
| 293 |
+
setuptools/_vendor/jaraco.context-5.3.0.dist-info/LICENSE
|
| 294 |
+
setuptools/_vendor/jaraco.context-5.3.0.dist-info/METADATA
|
| 295 |
+
setuptools/_vendor/jaraco.context-5.3.0.dist-info/RECORD
|
| 296 |
+
setuptools/_vendor/jaraco.context-5.3.0.dist-info/WHEEL
|
| 297 |
+
setuptools/_vendor/jaraco.context-5.3.0.dist-info/top_level.txt
|
| 298 |
+
setuptools/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
|
| 299 |
+
setuptools/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
|
| 300 |
+
setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
|
| 301 |
+
setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
|
| 302 |
+
setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
|
| 303 |
+
setuptools/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
|
| 304 |
+
setuptools/_vendor/jaraco.text-3.12.1.dist-info/INSTALLER
|
| 305 |
+
setuptools/_vendor/jaraco.text-3.12.1.dist-info/LICENSE
|
| 306 |
+
setuptools/_vendor/jaraco.text-3.12.1.dist-info/METADATA
|
| 307 |
+
setuptools/_vendor/jaraco.text-3.12.1.dist-info/RECORD
|
| 308 |
+
setuptools/_vendor/jaraco.text-3.12.1.dist-info/REQUESTED
|
| 309 |
+
setuptools/_vendor/jaraco.text-3.12.1.dist-info/WHEEL
|
| 310 |
+
setuptools/_vendor/jaraco.text-3.12.1.dist-info/top_level.txt
|
| 311 |
+
setuptools/_vendor/jaraco/collections/__init__.py
|
| 312 |
+
setuptools/_vendor/jaraco/collections/py.typed
|
| 313 |
+
setuptools/_vendor/jaraco/functools/__init__.py
|
| 314 |
+
setuptools/_vendor/jaraco/functools/__init__.pyi
|
| 315 |
+
setuptools/_vendor/jaraco/functools/py.typed
|
| 316 |
+
setuptools/_vendor/jaraco/text/Lorem ipsum.txt
|
| 317 |
+
setuptools/_vendor/jaraco/text/__init__.py
|
| 318 |
+
setuptools/_vendor/jaraco/text/layouts.py
|
| 319 |
+
setuptools/_vendor/jaraco/text/show-newlines.py
|
| 320 |
+
setuptools/_vendor/jaraco/text/strip-prefix.py
|
| 321 |
+
setuptools/_vendor/jaraco/text/to-dvorak.py
|
| 322 |
+
setuptools/_vendor/jaraco/text/to-qwerty.py
|
| 323 |
+
setuptools/_vendor/more_itertools/__init__.py
|
| 324 |
+
setuptools/_vendor/more_itertools/__init__.pyi
|
| 325 |
+
setuptools/_vendor/more_itertools/more.py
|
| 326 |
+
setuptools/_vendor/more_itertools/more.pyi
|
| 327 |
+
setuptools/_vendor/more_itertools/py.typed
|
| 328 |
+
setuptools/_vendor/more_itertools/recipes.py
|
| 329 |
+
setuptools/_vendor/more_itertools/recipes.pyi
|
| 330 |
+
setuptools/_vendor/more_itertools-10.3.0.dist-info/INSTALLER
|
| 331 |
+
setuptools/_vendor/more_itertools-10.3.0.dist-info/LICENSE
|
| 332 |
+
setuptools/_vendor/more_itertools-10.3.0.dist-info/METADATA
|
| 333 |
+
setuptools/_vendor/more_itertools-10.3.0.dist-info/RECORD
|
| 334 |
+
setuptools/_vendor/more_itertools-10.3.0.dist-info/REQUESTED
|
| 335 |
+
setuptools/_vendor/more_itertools-10.3.0.dist-info/WHEEL
|
| 336 |
+
setuptools/_vendor/packaging/__init__.py
|
| 337 |
+
setuptools/_vendor/packaging/_elffile.py
|
| 338 |
+
setuptools/_vendor/packaging/_manylinux.py
|
| 339 |
+
setuptools/_vendor/packaging/_musllinux.py
|
| 340 |
+
setuptools/_vendor/packaging/_parser.py
|
| 341 |
+
setuptools/_vendor/packaging/_structures.py
|
| 342 |
+
setuptools/_vendor/packaging/_tokenizer.py
|
| 343 |
+
setuptools/_vendor/packaging/markers.py
|
| 344 |
+
setuptools/_vendor/packaging/metadata.py
|
| 345 |
+
setuptools/_vendor/packaging/py.typed
|
| 346 |
+
setuptools/_vendor/packaging/requirements.py
|
| 347 |
+
setuptools/_vendor/packaging/specifiers.py
|
| 348 |
+
setuptools/_vendor/packaging/tags.py
|
| 349 |
+
setuptools/_vendor/packaging/utils.py
|
| 350 |
+
setuptools/_vendor/packaging/version.py
|
| 351 |
+
setuptools/_vendor/packaging-24.2.dist-info/INSTALLER
|
| 352 |
+
setuptools/_vendor/packaging-24.2.dist-info/LICENSE
|
| 353 |
+
setuptools/_vendor/packaging-24.2.dist-info/LICENSE.APACHE
|
| 354 |
+
setuptools/_vendor/packaging-24.2.dist-info/LICENSE.BSD
|
| 355 |
+
setuptools/_vendor/packaging-24.2.dist-info/METADATA
|
| 356 |
+
setuptools/_vendor/packaging-24.2.dist-info/RECORD
|
| 357 |
+
setuptools/_vendor/packaging-24.2.dist-info/REQUESTED
|
| 358 |
+
setuptools/_vendor/packaging-24.2.dist-info/WHEEL
|
| 359 |
+
setuptools/_vendor/packaging/licenses/__init__.py
|
| 360 |
+
setuptools/_vendor/packaging/licenses/_spdx.py
|
| 361 |
+
setuptools/_vendor/platformdirs/__init__.py
|
| 362 |
+
setuptools/_vendor/platformdirs/__main__.py
|
| 363 |
+
setuptools/_vendor/platformdirs/android.py
|
| 364 |
+
setuptools/_vendor/platformdirs/api.py
|
| 365 |
+
setuptools/_vendor/platformdirs/macos.py
|
| 366 |
+
setuptools/_vendor/platformdirs/py.typed
|
| 367 |
+
setuptools/_vendor/platformdirs/unix.py
|
| 368 |
+
setuptools/_vendor/platformdirs/version.py
|
| 369 |
+
setuptools/_vendor/platformdirs/windows.py
|
| 370 |
+
setuptools/_vendor/platformdirs-4.2.2.dist-info/INSTALLER
|
| 371 |
+
setuptools/_vendor/platformdirs-4.2.2.dist-info/METADATA
|
| 372 |
+
setuptools/_vendor/platformdirs-4.2.2.dist-info/RECORD
|
| 373 |
+
setuptools/_vendor/platformdirs-4.2.2.dist-info/REQUESTED
|
| 374 |
+
setuptools/_vendor/platformdirs-4.2.2.dist-info/WHEEL
|
| 375 |
+
setuptools/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE
|
| 376 |
+
setuptools/_vendor/tomli/__init__.py
|
| 377 |
+
setuptools/_vendor/tomli/_parser.py
|
| 378 |
+
setuptools/_vendor/tomli/_re.py
|
| 379 |
+
setuptools/_vendor/tomli/_types.py
|
| 380 |
+
setuptools/_vendor/tomli/py.typed
|
| 381 |
+
setuptools/_vendor/tomli-2.0.1.dist-info/INSTALLER
|
| 382 |
+
setuptools/_vendor/tomli-2.0.1.dist-info/LICENSE
|
| 383 |
+
setuptools/_vendor/tomli-2.0.1.dist-info/METADATA
|
| 384 |
+
setuptools/_vendor/tomli-2.0.1.dist-info/RECORD
|
| 385 |
+
setuptools/_vendor/tomli-2.0.1.dist-info/REQUESTED
|
| 386 |
+
setuptools/_vendor/tomli-2.0.1.dist-info/WHEEL
|
| 387 |
+
setuptools/_vendor/typeguard/__init__.py
|
| 388 |
+
setuptools/_vendor/typeguard/_checkers.py
|
| 389 |
+
setuptools/_vendor/typeguard/_config.py
|
| 390 |
+
setuptools/_vendor/typeguard/_decorators.py
|
| 391 |
+
setuptools/_vendor/typeguard/_exceptions.py
|
| 392 |
+
setuptools/_vendor/typeguard/_functions.py
|
| 393 |
+
setuptools/_vendor/typeguard/_importhook.py
|
| 394 |
+
setuptools/_vendor/typeguard/_memo.py
|
| 395 |
+
setuptools/_vendor/typeguard/_pytest_plugin.py
|
| 396 |
+
setuptools/_vendor/typeguard/_suppression.py
|
| 397 |
+
setuptools/_vendor/typeguard/_transformer.py
|
| 398 |
+
setuptools/_vendor/typeguard/_union_transformer.py
|
| 399 |
+
setuptools/_vendor/typeguard/_utils.py
|
| 400 |
+
setuptools/_vendor/typeguard/py.typed
|
| 401 |
+
setuptools/_vendor/typeguard-4.3.0.dist-info/INSTALLER
|
| 402 |
+
setuptools/_vendor/typeguard-4.3.0.dist-info/LICENSE
|
| 403 |
+
setuptools/_vendor/typeguard-4.3.0.dist-info/METADATA
|
| 404 |
+
setuptools/_vendor/typeguard-4.3.0.dist-info/RECORD
|
| 405 |
+
setuptools/_vendor/typeguard-4.3.0.dist-info/WHEEL
|
| 406 |
+
setuptools/_vendor/typeguard-4.3.0.dist-info/entry_points.txt
|
| 407 |
+
setuptools/_vendor/typeguard-4.3.0.dist-info/top_level.txt
|
| 408 |
+
setuptools/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER
|
| 409 |
+
setuptools/_vendor/typing_extensions-4.12.2.dist-info/LICENSE
|
| 410 |
+
setuptools/_vendor/typing_extensions-4.12.2.dist-info/METADATA
|
| 411 |
+
setuptools/_vendor/typing_extensions-4.12.2.dist-info/RECORD
|
| 412 |
+
setuptools/_vendor/typing_extensions-4.12.2.dist-info/WHEEL
|
| 413 |
+
setuptools/_vendor/wheel/__init__.py
|
| 414 |
+
setuptools/_vendor/wheel/__main__.py
|
| 415 |
+
setuptools/_vendor/wheel/_setuptools_logging.py
|
| 416 |
+
setuptools/_vendor/wheel/bdist_wheel.py
|
| 417 |
+
setuptools/_vendor/wheel/macosx_libfile.py
|
| 418 |
+
setuptools/_vendor/wheel/metadata.py
|
| 419 |
+
setuptools/_vendor/wheel/util.py
|
| 420 |
+
setuptools/_vendor/wheel/wheelfile.py
|
| 421 |
+
setuptools/_vendor/wheel-0.43.0.dist-info/INSTALLER
|
| 422 |
+
setuptools/_vendor/wheel-0.43.0.dist-info/LICENSE.txt
|
| 423 |
+
setuptools/_vendor/wheel-0.43.0.dist-info/METADATA
|
| 424 |
+
setuptools/_vendor/wheel-0.43.0.dist-info/RECORD
|
| 425 |
+
setuptools/_vendor/wheel-0.43.0.dist-info/REQUESTED
|
| 426 |
+
setuptools/_vendor/wheel-0.43.0.dist-info/WHEEL
|
| 427 |
+
setuptools/_vendor/wheel-0.43.0.dist-info/entry_points.txt
|
| 428 |
+
setuptools/_vendor/wheel/cli/__init__.py
|
| 429 |
+
setuptools/_vendor/wheel/cli/convert.py
|
| 430 |
+
setuptools/_vendor/wheel/cli/pack.py
|
| 431 |
+
setuptools/_vendor/wheel/cli/tags.py
|
| 432 |
+
setuptools/_vendor/wheel/cli/unpack.py
|
| 433 |
+
setuptools/_vendor/wheel/vendored/__init__.py
|
| 434 |
+
setuptools/_vendor/wheel/vendored/vendor.txt
|
| 435 |
+
setuptools/_vendor/wheel/vendored/packaging/__init__.py
|
| 436 |
+
setuptools/_vendor/wheel/vendored/packaging/_elffile.py
|
| 437 |
+
setuptools/_vendor/wheel/vendored/packaging/_manylinux.py
|
| 438 |
+
setuptools/_vendor/wheel/vendored/packaging/_musllinux.py
|
| 439 |
+
setuptools/_vendor/wheel/vendored/packaging/_parser.py
|
| 440 |
+
setuptools/_vendor/wheel/vendored/packaging/_structures.py
|
| 441 |
+
setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py
|
| 442 |
+
setuptools/_vendor/wheel/vendored/packaging/markers.py
|
| 443 |
+
setuptools/_vendor/wheel/vendored/packaging/requirements.py
|
| 444 |
+
setuptools/_vendor/wheel/vendored/packaging/specifiers.py
|
| 445 |
+
setuptools/_vendor/wheel/vendored/packaging/tags.py
|
| 446 |
+
setuptools/_vendor/wheel/vendored/packaging/utils.py
|
| 447 |
+
setuptools/_vendor/wheel/vendored/packaging/version.py
|
| 448 |
+
setuptools/_vendor/zipp/__init__.py
|
| 449 |
+
setuptools/_vendor/zipp/glob.py
|
| 450 |
+
setuptools/_vendor/zipp-3.19.2.dist-info/INSTALLER
|
| 451 |
+
setuptools/_vendor/zipp-3.19.2.dist-info/LICENSE
|
| 452 |
+
setuptools/_vendor/zipp-3.19.2.dist-info/METADATA
|
| 453 |
+
setuptools/_vendor/zipp-3.19.2.dist-info/RECORD
|
| 454 |
+
setuptools/_vendor/zipp-3.19.2.dist-info/REQUESTED
|
| 455 |
+
setuptools/_vendor/zipp-3.19.2.dist-info/WHEEL
|
| 456 |
+
setuptools/_vendor/zipp-3.19.2.dist-info/top_level.txt
|
| 457 |
+
setuptools/_vendor/zipp/compat/__init__.py
|
| 458 |
+
setuptools/_vendor/zipp/compat/py310.py
|
| 459 |
+
setuptools/command/__init__.py
|
| 460 |
+
setuptools/command/_requirestxt.py
|
| 461 |
+
setuptools/command/alias.py
|
| 462 |
+
setuptools/command/bdist_egg.py
|
| 463 |
+
setuptools/command/bdist_rpm.py
|
| 464 |
+
setuptools/command/bdist_wheel.py
|
| 465 |
+
setuptools/command/build.py
|
| 466 |
+
setuptools/command/build_clib.py
|
| 467 |
+
setuptools/command/build_ext.py
|
| 468 |
+
setuptools/command/build_py.py
|
| 469 |
+
setuptools/command/develop.py
|
| 470 |
+
setuptools/command/dist_info.py
|
| 471 |
+
setuptools/command/easy_install.py
|
| 472 |
+
setuptools/command/editable_wheel.py
|
| 473 |
+
setuptools/command/egg_info.py
|
| 474 |
+
setuptools/command/install.py
|
| 475 |
+
setuptools/command/install_egg_info.py
|
| 476 |
+
setuptools/command/install_lib.py
|
| 477 |
+
setuptools/command/install_scripts.py
|
| 478 |
+
setuptools/command/launcher manifest.xml
|
| 479 |
+
setuptools/command/rotate.py
|
| 480 |
+
setuptools/command/saveopts.py
|
| 481 |
+
setuptools/command/sdist.py
|
| 482 |
+
setuptools/command/setopt.py
|
| 483 |
+
setuptools/command/test.py
|
| 484 |
+
setuptools/compat/__init__.py
|
| 485 |
+
setuptools/compat/py310.py
|
| 486 |
+
setuptools/compat/py311.py
|
| 487 |
+
setuptools/compat/py312.py
|
| 488 |
+
setuptools/compat/py39.py
|
| 489 |
+
setuptools/config/NOTICE
|
| 490 |
+
setuptools/config/__init__.py
|
| 491 |
+
setuptools/config/_apply_pyprojecttoml.py
|
| 492 |
+
setuptools/config/distutils.schema.json
|
| 493 |
+
setuptools/config/expand.py
|
| 494 |
+
setuptools/config/pyprojecttoml.py
|
| 495 |
+
setuptools/config/setupcfg.py
|
| 496 |
+
setuptools/config/setuptools.schema.json
|
| 497 |
+
setuptools/config/_validate_pyproject/NOTICE
|
| 498 |
+
setuptools/config/_validate_pyproject/__init__.py
|
| 499 |
+
setuptools/config/_validate_pyproject/error_reporting.py
|
| 500 |
+
setuptools/config/_validate_pyproject/extra_validations.py
|
| 501 |
+
setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py
|
| 502 |
+
setuptools/config/_validate_pyproject/fastjsonschema_validations.py
|
| 503 |
+
setuptools/config/_validate_pyproject/formats.py
|
| 504 |
+
setuptools/tests/__init__.py
|
| 505 |
+
setuptools/tests/contexts.py
|
| 506 |
+
setuptools/tests/environment.py
|
| 507 |
+
setuptools/tests/fixtures.py
|
| 508 |
+
setuptools/tests/mod_with_constant.py
|
| 509 |
+
setuptools/tests/namespaces.py
|
| 510 |
+
setuptools/tests/script-with-bom.py
|
| 511 |
+
setuptools/tests/server.py
|
| 512 |
+
setuptools/tests/test_archive_util.py
|
| 513 |
+
setuptools/tests/test_bdist_deprecations.py
|
| 514 |
+
setuptools/tests/test_bdist_egg.py
|
| 515 |
+
setuptools/tests/test_bdist_wheel.py
|
| 516 |
+
setuptools/tests/test_build.py
|
| 517 |
+
setuptools/tests/test_build_clib.py
|
| 518 |
+
setuptools/tests/test_build_ext.py
|
| 519 |
+
setuptools/tests/test_build_meta.py
|
| 520 |
+
setuptools/tests/test_build_py.py
|
| 521 |
+
setuptools/tests/test_config_discovery.py
|
| 522 |
+
setuptools/tests/test_core_metadata.py
|
| 523 |
+
setuptools/tests/test_depends.py
|
| 524 |
+
setuptools/tests/test_develop.py
|
| 525 |
+
setuptools/tests/test_dist.py
|
| 526 |
+
setuptools/tests/test_dist_info.py
|
| 527 |
+
setuptools/tests/test_distutils_adoption.py
|
| 528 |
+
setuptools/tests/test_easy_install.py
|
| 529 |
+
setuptools/tests/test_editable_install.py
|
| 530 |
+
setuptools/tests/test_egg_info.py
|
| 531 |
+
setuptools/tests/test_extern.py
|
| 532 |
+
setuptools/tests/test_find_packages.py
|
| 533 |
+
setuptools/tests/test_find_py_modules.py
|
| 534 |
+
setuptools/tests/test_glob.py
|
| 535 |
+
setuptools/tests/test_install_scripts.py
|
| 536 |
+
setuptools/tests/test_logging.py
|
| 537 |
+
setuptools/tests/test_manifest.py
|
| 538 |
+
setuptools/tests/test_namespaces.py
|
| 539 |
+
setuptools/tests/test_packageindex.py
|
| 540 |
+
setuptools/tests/test_sandbox.py
|
| 541 |
+
setuptools/tests/test_sdist.py
|
| 542 |
+
setuptools/tests/test_setopt.py
|
| 543 |
+
setuptools/tests/test_setuptools.py
|
| 544 |
+
setuptools/tests/test_shutil_wrapper.py
|
| 545 |
+
setuptools/tests/test_unicode_utils.py
|
| 546 |
+
setuptools/tests/test_virtualenv.py
|
| 547 |
+
setuptools/tests/test_warnings.py
|
| 548 |
+
setuptools/tests/test_wheel.py
|
| 549 |
+
setuptools/tests/test_windows_wrappers.py
|
| 550 |
+
setuptools/tests/text.py
|
| 551 |
+
setuptools/tests/textwrap.py
|
| 552 |
+
setuptools/tests/compat/__init__.py
|
| 553 |
+
setuptools/tests/compat/py39.py
|
| 554 |
+
setuptools/tests/config/__init__.py
|
| 555 |
+
setuptools/tests/config/setupcfg_examples.txt
|
| 556 |
+
setuptools/tests/config/test_apply_pyprojecttoml.py
|
| 557 |
+
setuptools/tests/config/test_expand.py
|
| 558 |
+
setuptools/tests/config/test_pyprojecttoml.py
|
| 559 |
+
setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py
|
| 560 |
+
setuptools/tests/config/test_setupcfg.py
|
| 561 |
+
setuptools/tests/config/downloads/__init__.py
|
| 562 |
+
setuptools/tests/config/downloads/preload.py
|
| 563 |
+
setuptools/tests/indexes/test_links_priority/external.html
|
| 564 |
+
setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
|
| 565 |
+
setuptools/tests/integration/__init__.py
|
| 566 |
+
setuptools/tests/integration/helpers.py
|
| 567 |
+
setuptools/tests/integration/test_pip_install_sdist.py
|
| 568 |
+
tools/build_launchers.py
|
| 569 |
+
tools/finalize.py
|
| 570 |
+
tools/generate_validation_code.py
|
| 571 |
+
tools/vendored.py
|
llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/dependency_links.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
|
llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/entry_points.txt
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[distutils.commands]
|
| 2 |
+
alias = setuptools.command.alias:alias
|
| 3 |
+
bdist_egg = setuptools.command.bdist_egg:bdist_egg
|
| 4 |
+
bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
|
| 5 |
+
bdist_wheel = setuptools.command.bdist_wheel:bdist_wheel
|
| 6 |
+
build = setuptools.command.build:build
|
| 7 |
+
build_clib = setuptools.command.build_clib:build_clib
|
| 8 |
+
build_ext = setuptools.command.build_ext:build_ext
|
| 9 |
+
build_py = setuptools.command.build_py:build_py
|
| 10 |
+
develop = setuptools.command.develop:develop
|
| 11 |
+
dist_info = setuptools.command.dist_info:dist_info
|
| 12 |
+
easy_install = setuptools.command.easy_install:easy_install
|
| 13 |
+
editable_wheel = setuptools.command.editable_wheel:editable_wheel
|
| 14 |
+
egg_info = setuptools.command.egg_info:egg_info
|
| 15 |
+
install = setuptools.command.install:install
|
| 16 |
+
install_egg_info = setuptools.command.install_egg_info:install_egg_info
|
| 17 |
+
install_lib = setuptools.command.install_lib:install_lib
|
| 18 |
+
install_scripts = setuptools.command.install_scripts:install_scripts
|
| 19 |
+
rotate = setuptools.command.rotate:rotate
|
| 20 |
+
saveopts = setuptools.command.saveopts:saveopts
|
| 21 |
+
sdist = setuptools.command.sdist:sdist
|
| 22 |
+
setopt = setuptools.command.setopt:setopt
|
| 23 |
+
|
| 24 |
+
[distutils.setup_keywords]
|
| 25 |
+
dependency_links = setuptools.dist:assert_string_list
|
| 26 |
+
eager_resources = setuptools.dist:assert_string_list
|
| 27 |
+
entry_points = setuptools.dist:check_entry_points
|
| 28 |
+
exclude_package_data = setuptools.dist:check_package_data
|
| 29 |
+
extras_require = setuptools.dist:check_extras
|
| 30 |
+
include_package_data = setuptools.dist:assert_bool
|
| 31 |
+
install_requires = setuptools.dist:check_requirements
|
| 32 |
+
namespace_packages = setuptools.dist:check_nsp
|
| 33 |
+
package_data = setuptools.dist:check_package_data
|
| 34 |
+
packages = setuptools.dist:check_packages
|
| 35 |
+
python_requires = setuptools.dist:check_specifier
|
| 36 |
+
setup_requires = setuptools.dist:check_requirements
|
| 37 |
+
use_2to3 = setuptools.dist:invalid_unless_false
|
| 38 |
+
zip_safe = setuptools.dist:assert_bool
|
| 39 |
+
|
| 40 |
+
[egg_info.writers]
|
| 41 |
+
PKG-INFO = setuptools.command.egg_info:write_pkg_info
|
| 42 |
+
dependency_links.txt = setuptools.command.egg_info:overwrite_arg
|
| 43 |
+
eager_resources.txt = setuptools.command.egg_info:overwrite_arg
|
| 44 |
+
entry_points.txt = setuptools.command.egg_info:write_entries
|
| 45 |
+
namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
|
| 46 |
+
requires.txt = setuptools.command.egg_info:write_requirements
|
| 47 |
+
top_level.txt = setuptools.command.egg_info:write_toplevel_names
|
| 48 |
+
|
| 49 |
+
[setuptools.finalize_distribution_options]
|
| 50 |
+
keywords = setuptools.dist:Distribution._finalize_setup_keywords
|
| 51 |
+
parent_finalize = setuptools.dist:_Distribution.finalize_options
|
llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/requires.txt
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
[certs]
|
| 3 |
+
|
| 4 |
+
[check]
|
| 5 |
+
pytest-checkdocs>=2.4
|
| 6 |
+
|
| 7 |
+
[check:sys_platform != "cygwin"]
|
| 8 |
+
pytest-ruff>=0.2.1
|
| 9 |
+
ruff>=0.8.0
|
| 10 |
+
|
| 11 |
+
[core]
|
| 12 |
+
packaging>=24.2
|
| 13 |
+
more_itertools>=8.8
|
| 14 |
+
jaraco.text>=3.7
|
| 15 |
+
wheel>=0.43.0
|
| 16 |
+
platformdirs>=4.2.2
|
| 17 |
+
jaraco.collections
|
| 18 |
+
jaraco.functools>=4
|
| 19 |
+
packaging
|
| 20 |
+
more_itertools
|
| 21 |
+
|
| 22 |
+
[core:python_version < "3.10"]
|
| 23 |
+
importlib_metadata>=6
|
| 24 |
+
|
| 25 |
+
[core:python_version < "3.11"]
|
| 26 |
+
tomli>=2.0.1
|
| 27 |
+
|
| 28 |
+
[cover]
|
| 29 |
+
pytest-cov
|
| 30 |
+
|
| 31 |
+
[doc]
|
| 32 |
+
sphinx>=3.5
|
| 33 |
+
jaraco.packaging>=9.3
|
| 34 |
+
rst.linker>=1.9
|
| 35 |
+
furo
|
| 36 |
+
sphinx-lint
|
| 37 |
+
jaraco.tidelift>=1.4
|
| 38 |
+
pygments-github-lexers==0.0.5
|
| 39 |
+
sphinx-favicon
|
| 40 |
+
sphinx-inline-tabs
|
| 41 |
+
sphinx-reredirects
|
| 42 |
+
sphinxcontrib-towncrier
|
| 43 |
+
sphinx-notfound-page<2,>=1
|
| 44 |
+
pyproject-hooks!=1.1
|
| 45 |
+
towncrier<24.7
|
| 46 |
+
|
| 47 |
+
[enabler]
|
| 48 |
+
pytest-enabler>=2.2
|
| 49 |
+
|
| 50 |
+
[ssl]
|
| 51 |
+
|
| 52 |
+
[test]
|
| 53 |
+
pytest!=8.1.*,>=6
|
| 54 |
+
virtualenv>=13.0.0
|
| 55 |
+
wheel>=0.44.0
|
| 56 |
+
pip>=19.1
|
| 57 |
+
packaging>=24.2
|
| 58 |
+
jaraco.envs>=2.2
|
| 59 |
+
pytest-xdist>=3
|
| 60 |
+
jaraco.path>=3.7.2
|
| 61 |
+
build[virtualenv]>=1.0.3
|
| 62 |
+
filelock>=3.4.0
|
| 63 |
+
ini2toml[lite]>=0.14
|
| 64 |
+
tomli-w>=1.0.0
|
| 65 |
+
pytest-timeout
|
| 66 |
+
pytest-home>=0.5
|
| 67 |
+
pytest-subprocess
|
| 68 |
+
pyproject-hooks!=1.1
|
| 69 |
+
jaraco.test>=5.5
|
| 70 |
+
|
| 71 |
+
[test:python_version >= "3.9" and sys_platform != "cygwin"]
|
| 72 |
+
jaraco.develop>=7.21
|
| 73 |
+
|
| 74 |
+
[test:sys_platform != "cygwin"]
|
| 75 |
+
pytest-perf
|
| 76 |
+
|
| 77 |
+
[type]
|
| 78 |
+
pytest-mypy
|
| 79 |
+
mypy==1.14.*
|
| 80 |
+
|
| 81 |
+
[type:python_version < "3.10"]
|
| 82 |
+
importlib_metadata>=7.0.2
|
| 83 |
+
|
| 84 |
+
[type:sys_platform != "cygwin"]
|
| 85 |
+
jaraco.develop>=7.21
|
llava_next/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/top_level.txt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
_distutils_hack
|
| 2 |
+
pkg_resources
|
| 3 |
+
setuptools
|
llava_next/lib/python3.10/site-packages/shellingham/__init__.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
from ._core import ShellDetectionFailure
|
| 5 |
+
|
| 6 |
+
__version__ = "1.5.4"
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def detect_shell(pid=None, max_depth=10):
|
| 10 |
+
name = os.name
|
| 11 |
+
try:
|
| 12 |
+
impl = importlib.import_module(".{}".format(name), __name__)
|
| 13 |
+
except ImportError:
|
| 14 |
+
message = "Shell detection not implemented for {0!r}".format(name)
|
| 15 |
+
raise RuntimeError(message)
|
| 16 |
+
try:
|
| 17 |
+
get_shell = impl.get_shell
|
| 18 |
+
except AttributeError:
|
| 19 |
+
raise RuntimeError("get_shell not implemented for {0!r}".format(name))
|
| 20 |
+
shell = get_shell(pid, max_depth=max_depth)
|
| 21 |
+
if shell:
|
| 22 |
+
return shell
|
| 23 |
+
raise ShellDetectionFailure()
|
llava_next/lib/python3.10/site-packages/shellingham/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (802 Bytes). View file
|
|
|
llava_next/lib/python3.10/site-packages/shellingham/posix/__init__.py
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import re
|
| 3 |
+
|
| 4 |
+
from .._core import SHELL_NAMES, ShellDetectionFailure
|
| 5 |
+
from . import proc, ps
|
| 6 |
+
|
| 7 |
+
# Based on QEMU docs: https://www.qemu.org/docs/master/user/main.html
|
| 8 |
+
QEMU_BIN_REGEX = re.compile(
|
| 9 |
+
r"""qemu-
|
| 10 |
+
(alpha
|
| 11 |
+
|armeb
|
| 12 |
+
|arm
|
| 13 |
+
|m68k
|
| 14 |
+
|cris
|
| 15 |
+
|i386
|
| 16 |
+
|x86_64
|
| 17 |
+
|microblaze
|
| 18 |
+
|mips
|
| 19 |
+
|mipsel
|
| 20 |
+
|mips64
|
| 21 |
+
|mips64el
|
| 22 |
+
|mipsn32
|
| 23 |
+
|mipsn32el
|
| 24 |
+
|nios2
|
| 25 |
+
|ppc64
|
| 26 |
+
|ppc
|
| 27 |
+
|sh4eb
|
| 28 |
+
|sh4
|
| 29 |
+
|sparc
|
| 30 |
+
|sparc32plus
|
| 31 |
+
|sparc64
|
| 32 |
+
)""",
|
| 33 |
+
re.VERBOSE,
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def _iter_process_parents(pid, max_depth=10):
|
| 38 |
+
"""Select a way to obtain process information from the system.
|
| 39 |
+
|
| 40 |
+
* `/proc` is used if supported.
|
| 41 |
+
* The system `ps` utility is used as a fallback option.
|
| 42 |
+
"""
|
| 43 |
+
for impl in (proc, ps):
|
| 44 |
+
try:
|
| 45 |
+
iterator = impl.iter_process_parents(pid, max_depth)
|
| 46 |
+
except EnvironmentError:
|
| 47 |
+
continue
|
| 48 |
+
return iterator
|
| 49 |
+
raise ShellDetectionFailure("compatible proc fs or ps utility is required")
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def _get_login_shell(proc_cmd):
|
| 53 |
+
"""Form shell information from SHELL environ if possible."""
|
| 54 |
+
login_shell = os.environ.get("SHELL", "")
|
| 55 |
+
if login_shell:
|
| 56 |
+
proc_cmd = login_shell
|
| 57 |
+
else:
|
| 58 |
+
proc_cmd = proc_cmd[1:]
|
| 59 |
+
return (os.path.basename(proc_cmd).lower(), proc_cmd)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
_INTERPRETER_SHELL_NAMES = [
|
| 63 |
+
(re.compile(r"^python(\d+(\.\d+)?)?$"), {"xonsh"}),
|
| 64 |
+
]
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _get_interpreter_shell(proc_name, proc_args):
|
| 68 |
+
"""Get shell invoked via an interpreter.
|
| 69 |
+
|
| 70 |
+
Some shells are implemented on, and invoked with an interpreter, e.g. xonsh
|
| 71 |
+
is commonly executed with an executable Python script. This detects what
|
| 72 |
+
script the interpreter is actually running, and check whether that looks
|
| 73 |
+
like a shell.
|
| 74 |
+
|
| 75 |
+
See sarugaku/shellingham#26 for rational.
|
| 76 |
+
"""
|
| 77 |
+
for pattern, shell_names in _INTERPRETER_SHELL_NAMES:
|
| 78 |
+
if not pattern.match(proc_name):
|
| 79 |
+
continue
|
| 80 |
+
for arg in proc_args:
|
| 81 |
+
name = os.path.basename(arg).lower()
|
| 82 |
+
if os.path.isfile(arg) and name in shell_names:
|
| 83 |
+
return (name, arg)
|
| 84 |
+
return None
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def _get_shell(cmd, *args):
|
| 88 |
+
if cmd.startswith("-"): # Login shell! Let's use this.
|
| 89 |
+
return _get_login_shell(cmd)
|
| 90 |
+
name = os.path.basename(cmd).lower()
|
| 91 |
+
if name == "rosetta" or QEMU_BIN_REGEX.fullmatch(name):
|
| 92 |
+
# If the current process is Rosetta or QEMU, this likely is a
|
| 93 |
+
# containerized process. Parse out the actual command instead.
|
| 94 |
+
cmd = args[0]
|
| 95 |
+
args = args[1:]
|
| 96 |
+
name = os.path.basename(cmd).lower()
|
| 97 |
+
if name in SHELL_NAMES: # Command looks like a shell.
|
| 98 |
+
return (name, cmd)
|
| 99 |
+
shell = _get_interpreter_shell(name, args)
|
| 100 |
+
if shell:
|
| 101 |
+
return shell
|
| 102 |
+
return None
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def get_shell(pid=None, max_depth=10):
|
| 106 |
+
"""Get the shell that the supplied pid or os.getpid() is running in."""
|
| 107 |
+
pid = str(pid or os.getpid())
|
| 108 |
+
for proc_args, _, _ in _iter_process_parents(pid, max_depth):
|
| 109 |
+
shell = _get_shell(*proc_args)
|
| 110 |
+
if shell:
|
| 111 |
+
return shell
|
| 112 |
+
return None
|
llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (2.92 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/_core.cpython-310.pyc
ADDED
|
Binary file (248 Bytes). View file
|
|
|
llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/proc.cpython-310.pyc
ADDED
|
Binary file (2.53 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/shellingham/posix/__pycache__/ps.cpython-310.pyc
ADDED
|
Binary file (1.57 kB). View file
|
|
|
llava_next/lib/python3.10/site-packages/shellingham/posix/_core.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
|
| 3 |
+
Process = collections.namedtuple("Process", "args pid ppid")
|
llava_next/lib/python3.10/site-packages/shellingham/posix/proc.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import os
|
| 3 |
+
import re
|
| 4 |
+
import sys
|
| 5 |
+
|
| 6 |
+
from ._core import Process
|
| 7 |
+
|
| 8 |
+
# FreeBSD: https://www.freebsd.org/cgi/man.cgi?query=procfs
|
| 9 |
+
# NetBSD: https://man.netbsd.org/NetBSD-9.3-STABLE/mount_procfs.8
|
| 10 |
+
# DragonFlyBSD: https://www.dragonflybsd.org/cgi/web-man?command=procfs
|
| 11 |
+
BSD_STAT_PPID = 2
|
| 12 |
+
|
| 13 |
+
# See https://docs.kernel.org/filesystems/proc.html
|
| 14 |
+
LINUX_STAT_PPID = 3
|
| 15 |
+
|
| 16 |
+
STAT_PATTERN = re.compile(r"\(.+\)|\S+")
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def detect_proc():
|
| 20 |
+
"""Detect /proc filesystem style.
|
| 21 |
+
|
| 22 |
+
This checks the /proc/{pid} directory for possible formats. Returns one of
|
| 23 |
+
the following as str:
|
| 24 |
+
|
| 25 |
+
* `stat`: Linux-style, i.e. ``/proc/{pid}/stat``.
|
| 26 |
+
* `status`: BSD-style, i.e. ``/proc/{pid}/status``.
|
| 27 |
+
"""
|
| 28 |
+
pid = os.getpid()
|
| 29 |
+
for name in ("stat", "status"):
|
| 30 |
+
if os.path.exists(os.path.join("/proc", str(pid), name)):
|
| 31 |
+
return name
|
| 32 |
+
raise ProcFormatError("unsupported proc format")
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def _use_bsd_stat_format():
|
| 36 |
+
try:
|
| 37 |
+
return os.uname().sysname.lower() in ("freebsd", "netbsd", "dragonfly")
|
| 38 |
+
except Exception:
|
| 39 |
+
return False
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def _get_ppid(pid, name):
|
| 43 |
+
path = os.path.join("/proc", str(pid), name)
|
| 44 |
+
with io.open(path, encoding="ascii", errors="replace") as f:
|
| 45 |
+
parts = STAT_PATTERN.findall(f.read())
|
| 46 |
+
# We only care about TTY and PPID -- both are numbers.
|
| 47 |
+
if _use_bsd_stat_format():
|
| 48 |
+
return parts[BSD_STAT_PPID]
|
| 49 |
+
return parts[LINUX_STAT_PPID]
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def _get_cmdline(pid):
|
| 53 |
+
path = os.path.join("/proc", str(pid), "cmdline")
|
| 54 |
+
encoding = sys.getfilesystemencoding() or "utf-8"
|
| 55 |
+
with io.open(path, encoding=encoding, errors="replace") as f:
|
| 56 |
+
# XXX: Command line arguments can be arbitrary byte sequences, not
|
| 57 |
+
# necessarily decodable. For Shellingham's purpose, however, we don't
|
| 58 |
+
# care. (pypa/pipenv#2820)
|
| 59 |
+
# cmdline appends an extra NULL at the end, hence the [:-1].
|
| 60 |
+
return tuple(f.read().split("\0")[:-1])
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class ProcFormatError(EnvironmentError):
|
| 64 |
+
pass
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def iter_process_parents(pid, max_depth=10):
|
| 68 |
+
"""Try to look up the process tree via the /proc interface."""
|
| 69 |
+
stat_name = detect_proc()
|
| 70 |
+
|
| 71 |
+
# Inner generator function so we correctly throw an error eagerly if proc
|
| 72 |
+
# is not supported, rather than on the first call to the iterator. This
|
| 73 |
+
# allows the call site detects the correct implementation.
|
| 74 |
+
def _iter_process_parents(pid, max_depth):
|
| 75 |
+
for _ in range(max_depth):
|
| 76 |
+
ppid = _get_ppid(pid, stat_name)
|
| 77 |
+
args = _get_cmdline(pid)
|
| 78 |
+
yield Process(args=args, pid=pid, ppid=ppid)
|
| 79 |
+
if ppid == "0":
|
| 80 |
+
break
|
| 81 |
+
pid = ppid
|
| 82 |
+
|
| 83 |
+
return _iter_process_parents(pid, max_depth)
|