diff --git a/.gitattributes b/.gitattributes index 3c3e369797de7501e1512ce67d1eb96faf2f23f0..eb425346fa032c8c4d6b9fa50f568ec730e451c3 100644 --- a/.gitattributes +++ b/.gitattributes @@ -201,3 +201,4 @@ my_container_sandbox/workspace/anaconda3/pkgs/zstandard-0.19.0-py38h5eee18b_0.co my_container_sandbox/workspace/anaconda3/pkgs/pip-24.0-pyhd8ed1ab_0.conda filter=lfs diff=lfs merge=lfs -text my_container_sandbox/workspace/anaconda3/pkgs/conda-package-handling-1.7.3-py39h27cfd23_1.conda filter=lfs diff=lfs merge=lfs -text my_container_sandbox/workspace/anaconda3/lib/python3.8/__pycache__/_pydecimal.cpython-38.pyc filter=lfs diff=lfs merge=lfs -text +my_container_sandbox/workspace/anaconda3/lib/itcl4.2.2/libitcl4.2.2.so filter=lfs diff=lfs merge=lfs -text diff --git a/my_container_sandbox/workspace/anaconda3/lib/itcl4.2.2/libitcl4.2.2.so b/my_container_sandbox/workspace/anaconda3/lib/itcl4.2.2/libitcl4.2.2.so new file mode 100644 index 0000000000000000000000000000000000000000..0d34a9d44d02087403c386191011e230aa7aa9e0 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/itcl4.2.2/libitcl4.2.2.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18526db5c4f5aa0356d171cf7249a436864cae36857b3594f7355f097be512b1 +size 315104 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/INSTALLER b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/LICENSE b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..0f67833e624e99056e9323fef07cfa3e87b74e42 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2006-2012 Filip Wasilewski +Copyright (c) 2012-2020 The PyWavelets Developers + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/LICENSES_bundled.txt b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/LICENSES_bundled.txt new file mode 100644 index 0000000000000000000000000000000000000000..6b2ab7a023a6e00f2369db6491bc27b510335eef --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/LICENSES_bundled.txt @@ -0,0 +1,10 @@ +The PyWavelets repository and source distributions bundle some code that is +adapted from compatibly licensed projects. We list these here. + +Name: NumPy +Files: pywt/_pytesttester.py +License: 3-clause BSD + +Name: SciPy +Files: setup.py, util/* +License: 3-clause BSD diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/METADATA b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..11d5279a23e36b0e5f9bbdf7cab48df682cc28aa --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/METADATA @@ -0,0 +1,47 @@ +Metadata-Version: 2.1 +Name: PyWavelets +Version: 1.3.0 +Summary: PyWavelets, wavelet transform module +Home-page: https://github.com/PyWavelets/pywt +Download-URL: https://github.com/PyWavelets/pywt/releases +Maintainer: The PyWavelets Developers +Maintainer-email: pywavelets@googlegroups.com +License: MIT +Keywords: wavelets,wavelet transform,DWT,SWT,CWT,scientific +Platform: Windows +Platform: Linux +Platform: Solaris +Platform: Mac OS-X +Platform: Unix +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: C +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.7 +License-File: LICENSE +License-File: LICENSES_bundled.txt +Requires-Dist: numpy (>=1.17.3) + + PyWavelets is a Python wavelet transforms module that includes: + + * nD Forward and Inverse Discrete Wavelet Transform (DWT and IDWT) + * 1D and 2D Forward and Inverse Stationary Wavelet Transform (Undecimated Wavelet Transform) + * 1D and 2D Wavelet Packet decomposition and reconstruction + * 1D Continuous Wavelet Tranfsorm + * Computing Approximations of wavelet and scaling functions + * Over 100 built-in wavelet filters and support for custom wavelets + * Single and double precision calculations + * Real and complex calculations + * Results compatible with Matlab Wavelet Toolbox (TM) + + diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/RECORD b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..ea8a7ac14615efa68de6384c26ecae9e4e040320 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/RECORD @@ -0,0 +1,110 @@ +PyWavelets-1.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +PyWavelets-1.3.0.dist-info/LICENSE,sha256=Y_0gPB6rQ8xrbYQ1zjk0_odZJvZGFDF4r1it9_PJrpc,1171 +PyWavelets-1.3.0.dist-info/LICENSES_bundled.txt,sha256=2ahHJgofMijcqSeHR3D5M9hIh8fsFcy7Xg_dy_UHH_8,264 +PyWavelets-1.3.0.dist-info/METADATA,sha256=CW6lXVp1a2iNqz3VKWFTuhV_dSKpPEeUhiWe6Zwa0Kw,1920 +PyWavelets-1.3.0.dist-info/RECORD,, +PyWavelets-1.3.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +PyWavelets-1.3.0.dist-info/WHEEL,sha256=-ijGDuALlPxm3HbhKntps0QzHsi-DPlXqgerYTTJkFE,148 +PyWavelets-1.3.0.dist-info/top_level.txt,sha256=j3eDFZWsDkiyrSwsdc1H0AjhXkTzsuJLSx-blZUoOVo,5 +pywt/__init__.py,sha256=DTdMgZJOEj1M-AQIdHa5jC7KOjJ8q-3NOHVEgpmnFNk,1093 +pywt/__pycache__/__init__.cpython-38.pyc,, +pywt/__pycache__/_c99_config.cpython-38.pyc,, +pywt/__pycache__/_cwt.cpython-38.pyc,, +pywt/__pycache__/_doc_utils.cpython-38.pyc,, +pywt/__pycache__/_dwt.cpython-38.pyc,, +pywt/__pycache__/_functions.cpython-38.pyc,, +pywt/__pycache__/_mra.cpython-38.pyc,, +pywt/__pycache__/_multidim.cpython-38.pyc,, +pywt/__pycache__/_multilevel.cpython-38.pyc,, +pywt/__pycache__/_pytest.cpython-38.pyc,, +pywt/__pycache__/_pytesttester.cpython-38.pyc,, +pywt/__pycache__/_swt.cpython-38.pyc,, +pywt/__pycache__/_thresholding.cpython-38.pyc,, +pywt/__pycache__/_utils.cpython-38.pyc,, +pywt/__pycache__/_wavelet_packets.cpython-38.pyc,, +pywt/__pycache__/conftest.cpython-38.pyc,, +pywt/__pycache__/version.cpython-38.pyc,, +pywt/_c99_config.py,sha256=LrMSEmGvgjoA5sE-BdpF7pUOXSNCfLfNUy6RG5z-6Bo,80 +pywt/_cwt.py,sha256=O988iZfXOMMpHpdDGIQaMjcUYeMuqCryagVqYY9mXis,7715 +pywt/_doc_utils.py,sha256=sHsQirfmNDWhT1qCWEVcliAx765nRH9g2vBNaGVOgrY,5823 +pywt/_dwt.py,sha256=ToFkJbVRVnqwvUkb7NebO0iG9Kauw5CccoBszqM8-fY,17227 +pywt/_extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pywt/_extensions/__pycache__/__init__.cpython-38.pyc,, +pywt/_extensions/_cwt.cpython-38-x86_64-linux-gnu.so,sha256=IRBlOqn--EWvZiyWtMyOiRx4IY8_w9VUQkSkHny5h74,1996888 +pywt/_extensions/_dwt.cpython-38-x86_64-linux-gnu.so,sha256=JnDoqlIKGlzVBje2ZjooVAZWThO2Csrt5-EeZJvBeX4,3458992 +pywt/_extensions/_pywt.cpython-38-x86_64-linux-gnu.so,sha256=UVDw6Un-4WBJyaUOSuRjO5vxg6AmqEw_CyEtCAlTono,3352432 +pywt/_extensions/_swt.cpython-38-x86_64-linux-gnu.so,sha256=kwmZ4MMtoJlLnIGb1CHRE90yvT4u0rDqNWBS1bm2EsI,2773072 +pywt/_functions.py,sha256=hvOQb4tn01j1s5_lNhbBBoCMeOvF1_3Q9mt0aEA1bI8,6999 +pywt/_mra.py,sha256=kK01G4fImwHxT-rAmtVAqmfiy9Hsuolup5lwS-HL_rs,14364 +pywt/_multidim.py,sha256=mnA6YqwNWPQIrrzbl9_XG7hclJdpNZDR1_tzVtWeha0,11443 +pywt/_multilevel.py,sha256=XgmP0hKTDNVvV0df-Mx69dbrpvResJ0D-8gzEm0M_WI,57164 +pywt/_pytest.py,sha256=yhHxov-VTpcokrHgfzZvYjMwpTQdDdiWTJUrbmJ2IxU,2513 +pywt/_pytesttester.py,sha256=bA2LQRmWoibTXD5ggR7FSFbrKt6IEO6-opG4ONiBCGw,4903 +pywt/_swt.py,sha256=VOYyYeyilaWCJEkKElWyPcrTBG9bQ8d6NC5zNA-00w8,31842 +pywt/_thresholding.py,sha256=74opJmTHL5ssbBRiQqir3cmrk4ZRg0ouWTcQ5cp1-Sc,8793 +pywt/_utils.py,sha256=gbcji_MoFHOZg94yq7qHU77eyAl6exTeVmWetntkwTM,3484 +pywt/_wavelet_packets.py,sha256=V6GcUezVWBPVZusmweEjtU5wQP-pkjPK1NpmamMnrsI,36737 +pywt/conftest.py,sha256=cfdJT7ucDVr1E_2Zcl2wdjNQC-dyQW2IF6L21CCcN0E,143 +pywt/data/__init__.py,sha256=qoDFVHX0RNi91n3UwC7UwYU3UGzbgdGD2OCH9zGJ8mo,96 +pywt/data/__pycache__/__init__.cpython-38.pyc,, +pywt/data/__pycache__/_readers.cpython-38.pyc,, +pywt/data/__pycache__/_wavelab_signals.cpython-38.pyc,, +pywt/data/__pycache__/create_dat.cpython-38.pyc,, +pywt/data/_readers.py,sha256=I859gMQwAYMdriDtqsFipia88mMEya_uAWWcgrDYUig,4917 +pywt/data/_wavelab_signals.py,sha256=7ay4VKhYkMsdrmrMm_FeBKrewzGzrwuoy6sO_RkR8vY,9476 +pywt/data/aero.npz,sha256=34YmNXmLrJQia4ko8iTajO-LDQBJLB_fSPrG36-XqUs,227784 +pywt/data/ascent.npz,sha256=ptVryOllcdYSTzTO3rpJ8dNZlQf2yJCtm6U4VERU6Pc,170883 +pywt/data/camera.npz,sha256=K-gZXpaAzLPIBP7hnKC2mgl-rpybKrl0dMfxQ4Fx8HQ,169034 +pywt/data/create_dat.py,sha256=8BsF3dCoixafNSi5jxZnHdvK69FTH6dIGNJNIlv6c60,625 +pywt/data/ecg.npy,sha256=iS9GVe4jRwWTxs8c4X8Of0f2ywMBJKkvVQ5bFyrUPTk,4176 +pywt/data/sst_nino3.npz,sha256=-vMX2TEULdISSSkMpmevDecdiZ5_I4Zk3zC2xB0Qz1c,64200 +pywt/tests/__pycache__/test__pywt.cpython-38.pyc,, +pywt/tests/__pycache__/test_concurrent.cpython-38.pyc,, +pywt/tests/__pycache__/test_cwt_wavelets.cpython-38.pyc,, +pywt/tests/__pycache__/test_data.cpython-38.pyc,, +pywt/tests/__pycache__/test_deprecations.cpython-38.pyc,, +pywt/tests/__pycache__/test_doc.cpython-38.pyc,, +pywt/tests/__pycache__/test_dwt_idwt.cpython-38.pyc,, +pywt/tests/__pycache__/test_functions.cpython-38.pyc,, +pywt/tests/__pycache__/test_matlab_compatibility.cpython-38.pyc,, +pywt/tests/__pycache__/test_matlab_compatibility_cwt.cpython-38.pyc,, +pywt/tests/__pycache__/test_modes.cpython-38.pyc,, +pywt/tests/__pycache__/test_mra.cpython-38.pyc,, +pywt/tests/__pycache__/test_multidim.cpython-38.pyc,, +pywt/tests/__pycache__/test_multilevel.cpython-38.pyc,, +pywt/tests/__pycache__/test_perfect_reconstruction.cpython-38.pyc,, +pywt/tests/__pycache__/test_swt.cpython-38.pyc,, +pywt/tests/__pycache__/test_thresholding.cpython-38.pyc,, +pywt/tests/__pycache__/test_wavelet.cpython-38.pyc,, +pywt/tests/__pycache__/test_wp.cpython-38.pyc,, +pywt/tests/__pycache__/test_wp2d.cpython-38.pyc,, +pywt/tests/__pycache__/test_wpnd.cpython-38.pyc,, +pywt/tests/data/__pycache__/generate_matlab_data.cpython-38.pyc,, +pywt/tests/data/__pycache__/generate_matlab_data_cwt.cpython-38.pyc,, +pywt/tests/data/cwt_matlabR2015b_result.npz,sha256=FA1Tx-q_1k74bb7yERH_lq4hgZEVwdNP3RVm8lu-_Zw,1819506 +pywt/tests/data/dwt_matlabR2012a_result.npz,sha256=H3zj71AxK1VbM7HZArX5El_qnfH_LLHAlVWj9eLhMmM,3590870 +pywt/tests/data/generate_matlab_data.py,sha256=Spz3yi0kYJF9B1-nDTHhRDBwRexvyjoCT3iuNx7T4xc,3957 +pywt/tests/data/generate_matlab_data_cwt.py,sha256=wjHsR_qTcU9WTI1CmSNam-AM-d0Aomb5emDWlAH8XTU,3248 +pywt/tests/data/wavelab_test_signals.npz,sha256=-cx0ne9JdTcq6LiKBacjM_0_En72TAiKvvFUW1yiZYE,184818 +pywt/tests/test__pywt.py,sha256=3xnBENJo0WXpuWikaV7NnFBGBjXyw4rcRRAxXe84b2w,5469 +pywt/tests/test_concurrent.py,sha256=nPh_HttsDnfg4dhgvFaTAqYXLPPHI3XZ7R4BIjG0nKo,3987 +pywt/tests/test_cwt_wavelets.py,sha256=fDyICYgQO6Dzg1opgalcFS1bOVTsup9KhaN28K5VFM4,13589 +pywt/tests/test_data.py,sha256=VPDsh5KNxI-jZ4UCxr6VQjqknjmeQyohLZPfe47pyC4,2266 +pywt/tests/test_deprecations.py,sha256=gmMe6YE9scXBrw7liZAloqtc6woX87_c36jXfgrBAXo,2220 +pywt/tests/test_doc.py,sha256=vgJpPxuFQdeXBwKiiYecQVNB49Rj6yhulHZTa8D8rIE,622 +pywt/tests/test_dwt_idwt.py,sha256=kpCzE8BtzhlFVrSbs_Ywpkdg-C9m9iROqxtvaPwzROI,10352 +pywt/tests/test_functions.py,sha256=Fpbk9V7j4cFBOwNtivGd5jnTw_SJ4OfblmieE_DSIsE,1163 +pywt/tests/test_matlab_compatibility.py,sha256=ldfngu7e2DdJv-vtTkCOhkJ8AbDIGFxXT2y23_g5Odw,5885 +pywt/tests/test_matlab_compatibility_cwt.py,sha256=Js-ZFE8eJUQo0TYKpYi9IR3qazqi1aZiSADSwmBbxA0,6283 +pywt/tests/test_modes.py,sha256=_cwJtc-vSJAg15hmDxBmq-JRmCndEivcNEom4ZW2W_Y,4848 +pywt/tests/test_mra.py,sha256=-h8o8VGxHR7cC0jx5lLISfYhd0jki1_ic45bAf5b8fg,8928 +pywt/tests/test_multidim.py,sha256=FInykgFTbXZ0mzHm5HfHBRhncb4QAcolfEoYT1pXxpU,14933 +pywt/tests/test_multilevel.py,sha256=R3ApQ1lZCGIOKqw8zuZvP767P0BT9s1KS_tuGe8ba-w,39025 +pywt/tests/test_perfect_reconstruction.py,sha256=QMI18bJicfjhX65eMf1QaZM_UEwCbT6gwISOgS0CRHg,1795 +pywt/tests/test_swt.py,sha256=YZQ7ZeuX-bVGL4uMqqu4_m68Dm35BaVHbEXNr2IBncU,24854 +pywt/tests/test_thresholding.py,sha256=tH8X6uqRvqhK0Z2clfvPCpKIpiLdg8eUhO1I4nwj2SQ,6533 +pywt/tests/test_wavelet.py,sha256=TiuQi2zlRmHTSV8zP6eWr3UdX86_2E8hK_MZQv-xTII,11489 +pywt/tests/test_wp.py,sha256=RzUru1gYXEAzfxEnSdAJUAMx12IMRx6r-tS5Gofnf8E,8015 +pywt/tests/test_wp2d.py,sha256=p71jpd7ux1Sriwb9MZtRfIvWkZXlU0nDc5nbCVjUrRw,9394 +pywt/tests/test_wpnd.py,sha256=sa2F9X365O0FslUDqspoIlGH1HcP2qD4CjlVo4eHFX0,6252 +pywt/version.py,sha256=AaS_cTgGfvBRhbjbJPRJbRPpfQOUKflvOjBIn6I8lY4,233 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/REQUESTED b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/WHEEL b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..3a48d3480384503bea53d4a7c55a666ace0eb5fc --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux_2_17_x86_64 +Tag: cp38-cp38-manylinux2014_x86_64 + diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/top_level.txt b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..ce9775431102a8cd4a74674dad38148fc15da142 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/PyWavelets-1.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +pywt diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiosignal/__init__.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiosignal/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f3b9085b5fc5b25aa2fb7d51fe2bea8f2bda99a9 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiosignal/__init__.py @@ -0,0 +1,36 @@ +from frozenlist import FrozenList + +__version__ = "1.2.0" + +__all__ = ("Signal",) + + +class Signal(FrozenList): + """Coroutine-based signal implementation. + + To connect a callback to a signal, use any list method. + + Signals are fired using the send() coroutine, which takes named + arguments. + """ + + __slots__ = ("_owner",) + + def __init__(self, owner): + super().__init__() + self._owner = owner + + def __repr__(self): + return "".format( + self._owner, self.frozen, list(self) + ) + + async def send(self, *args, **kwargs): + """ + Sends data to all registered receivers. + """ + if not self.frozen: + raise RuntimeError("Cannot send non-frozen signal.") + + for receiver in self: + await receiver(*args, **kwargs) # type: ignore diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiosignal/__init__.pyi b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiosignal/__init__.pyi new file mode 100644 index 0000000000000000000000000000000000000000..d4e3416d72246058259061578a82697e2bc0706e --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiosignal/__init__.pyi @@ -0,0 +1,12 @@ +from typing import Any, Generic, TypeVar + +from frozenlist import FrozenList + +__all__ = ("Signal",) + +_T = TypeVar("_T") + +class Signal(FrozenList[_T], Generic[_T]): + def __init__(self, owner: Any) -> None: ... + def __repr__(self) -> str: ... + async def send(self, *args: Any, **kwargs: Any) -> None: ... diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiosignal/py.typed b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiosignal/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/AUTHORS.rst b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/AUTHORS.rst new file mode 100644 index 0000000000000000000000000000000000000000..e2781e4c678c910aa73a8eb11cb75aa02738d0b2 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/AUTHORS.rst @@ -0,0 +1,48 @@ +Main contributors +----------------- +- Hideo Hattori (https://github.com/hhatto) +- Steven Myint (https://github.com/myint) +- Bill Wendling (https://github.com/gwelymernans) + +Patches +------- +- Fraser Tweedale (https://github.com/frasertweedale) +- clach04 (https://github.com/clach04) +- Marc Abramowitz (https://github.com/msabramo) +- dellis23 (https://github.com/dellis23) +- Sam Vilain (https://github.com/samv) +- Florent Xicluna (https://github.com/florentx) +- Andras Tim (https://github.com/andras-tim) +- tomscytale (https://github.com/tomscytale) +- Filip Noetzel (https://github.com/peritus) +- Erik Bray (https://github.com/iguananaut) +- Christopher Medrela (https://github.com/chrismedrela) +- 小明 (https://github.com/dongweiming) +- Andy Hayden (https://github.com/hayd) +- Fabio Zadrozny (https://github.com/fabioz) +- Alex Chernetz (https://github.com/achernet) +- Marc Schlaich (https://github.com/schlamar) +- E. M. Bray (https://github.com/embray) +- Thomas Hisch (https://github.com/thisch) +- Florian Best (https://github.com/spaceone) +- Ian Clark (https://github.com/evenicoulddoit) +- Khairi Hafsham (https://github.com/khairihafsham) +- Neil Halelamien (https://github.com/neilsh) +- Hashem Nasarat (https://github.com/Hnasar) +- Hugo van Kemenade (https://github.com/hugovk) +- gmbnomis (https://github.com/gmbnomis) +- Samuel Lelièvre (https://github.com/slel) +- bigredengineer (https://github.com/bigredengineer) +- Kai Chen (https://github.com/kx-chen) +- Anthony Sottile (https://github.com/asottile) +- 秋葉 (https://github.com/Hanaasagi) +- Christian Clauss (https://github.com/cclauss) +- tobixx (https://github.com/tobixx) +- bigredengineer (https://github.com/bigredengineer) +- Bastien Gérard (https://github.com/bagerard) +- nicolasbonifas (https://github.com/nicolasbonifas) +- Andrii Yurchuk (https://github.com/Ch00k) +- José M. Guisado (https://github.com/pvxe) +- Dai Truong (https://github.com/NovaDev94) +- jnozsc (https://github.com/jnozsc) +- Edwin Shepherd (https://github.com/shardros) diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/INSTALLER b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/LICENSE b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..df9738f45022feaca31db3bc052524607499c604 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/LICENSE @@ -0,0 +1,23 @@ +Copyright (C) 2010-2011 Hideo Hattori +Copyright (C) 2011-2013 Hideo Hattori, Steven Myint +Copyright (C) 2013-2016 Hideo Hattori, Steven Myint, Bill Wendling + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/METADATA b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..4cbb366bea35c594567431a113e975fdea38fac9 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/METADATA @@ -0,0 +1,458 @@ +Metadata-Version: 2.1 +Name: autopep8 +Version: 1.6.0 +Summary: A tool that automatically formats Python code to conform to the PEP 8 style guide +Home-page: https://github.com/hhatto/autopep8 +Author: Hideo Hattori +Author-email: hhatto.jp@gmail.com +License: Expat License +Keywords: automation,pep8,format,pycodestyle +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Quality Assurance +Requires-Dist: pycodestyle (>=2.8.0) +Requires-Dist: toml + +======== +autopep8 +======== + +.. image:: https://img.shields.io/pypi/v/autopep8.svg + :target: https://pypi.org/project/autopep8/ + :alt: PyPI Version + +.. image:: https://github.com/hhatto/autopep8/workflows/Python%20package/badge.svg + :target: https://github.com/hhatto/autopep8/actions + :alt: Build status + +.. image:: https://codecov.io/gh/hhatto/autopep8/branch/master/graph/badge.svg + :target: https://codecov.io/gh/hhatto/autopep8 + :alt: Code Coverage + +autopep8 automatically formats Python code to conform to the `PEP 8`_ style +guide. It uses the pycodestyle_ utility to determine what parts of the code +needs to be formatted. autopep8 is capable of fixing most of the formatting +issues_ that can be reported by pycodestyle. + +.. _PEP 8: https://www.python.org/dev/peps/pep-0008/ +.. _issues: https://pycodestyle.readthedocs.org/en/latest/intro.html#error-codes + +.. contents:: + + +Installation +============ + +From pip:: + + $ pip install --upgrade autopep8 + +Consider using the ``--user`` option_. + +.. _option: https://pip.pypa.io/en/latest/user_guide/#user-installs + + +Requirements +============ + +autopep8 requires pycodestyle_. + +.. _pycodestyle: https://github.com/PyCQA/pycodestyle + + +Usage +===== + +To modify a file in place (with aggressive level 2):: + + $ autopep8 --in-place --aggressive --aggressive + +Before running autopep8. + +.. code-block:: python + + import math, sys; + + def example1(): + ####This is a long comment. This should be wrapped to fit within 72 characters. + some_tuple=( 1,2, 3,'a' ); + some_variable={'long':'Long code lines should be wrapped within 79 characters.', + 'other':[math.pi, 100,200,300,9876543210,'This is a long string that goes on'], + 'more':{'inner':'This whole logical line should be wrapped.',some_tuple:[1, + 20,300,40000,500000000,60000000000000000]}} + return (some_tuple, some_variable) + def example2(): return {'has_key() is deprecated':True}.has_key({'f':2}.has_key('')); + class Example3( object ): + def __init__ ( self, bar ): + #Comments should have a space after the hash. + if bar : bar+=1; bar=bar* bar ; return bar + else: + some_string = """ + Indentation in multiline strings should not be touched. + Only actual code should be reindented. + """ + return (sys.path, some_string) + +After running autopep8. + +.. code-block:: python + + import math + import sys + + + def example1(): + # This is a long comment. This should be wrapped to fit within 72 + # characters. + some_tuple = (1, 2, 3, 'a') + some_variable = { + 'long': 'Long code lines should be wrapped within 79 characters.', + 'other': [ + math.pi, + 100, + 200, + 300, + 9876543210, + 'This is a long string that goes on'], + 'more': { + 'inner': 'This whole logical line should be wrapped.', + some_tuple: [ + 1, + 20, + 300, + 40000, + 500000000, + 60000000000000000]}} + return (some_tuple, some_variable) + + + def example2(): return ('' in {'f': 2}) in {'has_key() is deprecated': True} + + + class Example3(object): + def __init__(self, bar): + # Comments should have a space after the hash. + if bar: + bar += 1 + bar = bar * bar + return bar + else: + some_string = """ + Indentation in multiline strings should not be touched. + Only actual code should be reindented. + """ + return (sys.path, some_string) + +Options:: + + usage: autopep8 [-h] [--version] [-v] [-d] [-i] [--global-config filename] + [--ignore-local-config] [-r] [-j n] [-p n] [-a] + [--experimental] [--exclude globs] [--list-fixes] + [--ignore errors] [--select errors] [--max-line-length n] + [--line-range line line] [--hang-closing] [--exit-code] + [files [files ...]] + + Automatically formats Python code to conform to the PEP 8 style guide. + + positional arguments: + files files to format or '-' for standard in + + optional arguments: + -h, --help show this help message and exit + --version show program's version number and exit + -v, --verbose print verbose messages; multiple -v result in more + verbose messages + -d, --diff print the diff for the fixed source + -i, --in-place make changes to files in place + --global-config filename + path to a global pep8 config file; if this file does + not exist then this is ignored (default: + ~/.config/pep8) + --ignore-local-config + don't look for and apply local config files; if not + passed, defaults are updated with any config files in + the project's root directory + -r, --recursive run recursively over directories; must be used with + --in-place or --diff + -j n, --jobs n number of parallel jobs; match CPU count if value is + less than 1 + -p n, --pep8-passes n + maximum number of additional pep8 passes (default: + infinite) + -a, --aggressive enable non-whitespace changes; multiple -a result in + more aggressive changes + --experimental enable experimental fixes + --exclude globs exclude file/directory names that match these comma- + separated globs + --list-fixes list codes for fixes; used by --ignore and --select + --ignore errors do not fix these errors/warnings (default: + E226,E24,W50,W690) + --select errors fix only these errors/warnings (e.g. E4,W) + --max-line-length n set maximum allowed line length (default: 79) + --line-range line line, --range line line + only fix errors found within this inclusive range of + line numbers (e.g. 1 99); line numbers are indexed at + 1 + --hang-closing hang-closing option passed to pycodestyle + --exit-code change to behavior of exit code. default behavior of + return value, 0 is no differences, 1 is error exit. + return 2 when add this option. 2 is exists + differences. + + +Features +======== + +autopep8 fixes the following issues_ reported by pycodestyle_:: + + E101 - Reindent all lines. + E11 - Fix indentation. + E121 - Fix indentation to be a multiple of four. + E122 - Add absent indentation for hanging indentation. + E123 - Align closing bracket to match opening bracket. + E124 - Align closing bracket to match visual indentation. + E125 - Indent to distinguish line from next logical line. + E126 - Fix over-indented hanging indentation. + E127 - Fix visual indentation. + E128 - Fix visual indentation. + E129 - Fix visual indentation. + E131 - Fix hanging indent for unaligned continuation line. + E133 - Fix missing indentation for closing bracket. + E20 - Remove extraneous whitespace. + E211 - Remove extraneous whitespace. + E22 - Fix extraneous whitespace around keywords. + E224 - Remove extraneous whitespace around operator. + E225 - Fix missing whitespace around operator. + E226 - Fix missing whitespace around arithmetic operator. + E227 - Fix missing whitespace around bitwise/shift operator. + E228 - Fix missing whitespace around modulo operator. + E231 - Add missing whitespace. + E241 - Fix extraneous whitespace around keywords. + E242 - Remove extraneous whitespace around operator. + E251 - Remove whitespace around parameter '=' sign. + E252 - Missing whitespace around parameter equals. + E26 - Fix spacing after comment hash for inline comments. + E265 - Fix spacing after comment hash for block comments. + E266 - Fix too many leading '#' for block comments. + E27 - Fix extraneous whitespace around keywords. + E301 - Add missing blank line. + E302 - Add missing 2 blank lines. + E303 - Remove extra blank lines. + E304 - Remove blank line following function decorator. + E305 - Expected 2 blank lines after end of function or class. + E306 - Expected 1 blank line before a nested definition. + E401 - Put imports on separate lines. + E402 - Fix module level import not at top of file + E501 - Try to make lines fit within --max-line-length characters. + E502 - Remove extraneous escape of newline. + E701 - Put colon-separated compound statement on separate lines. + E70 - Put semicolon-separated compound statement on separate lines. + E711 - Fix comparison with None. + E712 - Fix comparison with boolean. + E713 - Use 'not in' for test for membership. + E714 - Use 'is not' test for object identity. + E721 - Use "isinstance()" instead of comparing types directly. + E722 - Fix bare except. + E731 - Use a def when use do not assign a lambda expression. + W291 - Remove trailing whitespace. + W292 - Add a single newline at the end of the file. + W293 - Remove trailing whitespace on blank line. + W391 - Remove trailing blank lines. + W503 - Fix line break before binary operator. + W504 - Fix line break after binary operator. + W601 - Use "in" rather than "has_key()". + W602 - Fix deprecated form of raising exception. + W603 - Use "!=" instead of "<>" + W604 - Use "repr()" instead of backticks. + W605 - Fix invalid escape sequence 'x'. + W690 - Fix various deprecated code (via lib2to3). + +autopep8 also fixes some issues not found by pycodestyle_. + +- Correct deprecated or non-idiomatic Python code (via ``lib2to3``). Use this + for making Python 2.7 code more compatible with Python 3. (This is triggered + if ``W690`` is enabled.) +- Normalize files with mixed line endings. +- Put a blank line between a class docstring and its first method + declaration. (Enabled with ``E301``.) +- Remove blank lines between a function declaration and its docstring. (Enabled + with ``E303``.) + +autopep8 avoids fixing some issues found by pycodestyle_. + +- ``E112``/``E113`` for non comments are reports of bad indentation that break + syntax rules. These should not be modified at all. +- ``E265``, which refers to spacing after comment hash, is ignored if the + comment looks like code. autopep8 avoids modifying these since they are not + real comments. If you really want to get rid of the pycodestyle_ warning, + consider just removing the commented-out code. (This can be automated via + eradicate_.) + +.. _eradicate: https://github.com/myint/eradicate + + +More advanced usage +=================== + +By default autopep8 only makes whitespace changes. Thus, by default, it does +not fix ``E711`` and ``E712``. (Changing ``x == None`` to ``x is None`` may +change the meaning of the program if ``x`` has its ``__eq__`` method +overridden.) Nor does it correct deprecated code ``W6``. To enable these +more aggressive fixes, use the ``--aggressive`` option:: + + $ autopep8 --aggressive + +Use multiple ``--aggressive`` to increase the aggressiveness level. For +example, ``E712`` requires aggressiveness level 2 (since ``x == True`` could be +changed to either ``x`` or ``x is True``, but autopep8 chooses the former). + +``--aggressive`` will also shorten lines more aggressively. It will also remove +trailing whitespace more aggressively. (Usually, we don't touch trailing +whitespace in docstrings and other multiline strings. And to do even more +aggressive changes to docstrings, use docformatter_.) + +.. _docformatter: https://github.com/myint/docformatter + +To enable only a subset of the fixes, use the ``--select`` option. For example, +to fix various types of indentation issues:: + + $ autopep8 --select=E1,W1 + +Similarly, to just fix deprecated code:: + + $ autopep8 --aggressive --select=W6 + +The above is useful when trying to port a single code base to work with both +Python 2 and Python 3 at the same time. + +If the file being fixed is large, you may want to enable verbose progress +messages:: + + $ autopep8 -v + +Passing in ``--experimental`` enables the following functionality: + +- Shortens code lines by taking its length into account + +:: + +$ autopep8 --experimental + +Use as a module +=============== + +The simplest way of using autopep8 as a module is via the ``fix_code()`` +function: + + >>> import autopep8 + >>> autopep8.fix_code('x= 123\n') + 'x = 123\n' + +Or with options: + + >>> import autopep8 + >>> autopep8.fix_code('x.has_key(y)\n', + ... options={'aggressive': 1}) + 'y in x\n' + >>> autopep8.fix_code('print( 123 )\n', + ... options={'ignore': ['E']}) + 'print( 123 )\n' + + +Configuration +============= + +By default, if ``$HOME/.config/pycodestyle`` (``~\.pycodestyle`` in Windows +environment) exists, it will be used as global configuration file. +Alternatively, you can specify the global configuration file with the +``--global-config`` option. + +Also, if ``setup.cfg``, ``tox.ini``, ``.pep8`` and ``.flake8`` files exist +in the directory where the target file exists, it will be used as the +configuration file. + +``pep8``, ``pycodestyle``, and ``flake8`` can be used as a section. + +configuration file example:: + + [pycodestyle] + max_line_length = 120 + ignore = E501 + +pyproject.toml +-------------- + +autopep8 can also use ``pyproject.toml``. +The section must be ``[tool.autopep8]``, and ``pyproject.toml`` takes precedence +over any other configuration files. + +configuration file example:: + + [tool.autopep8] + max_line_length = 120 + ignore = "E501,W6" # or ["E501", "W6"] + in-place = true + recursive = true + aggressive = 3 + + +Testing +======= + +Test cases are in ``test/test_autopep8.py``. They can be run directly via +``python test/test_autopep8.py`` or via tox_. The latter is useful for +testing against multiple Python interpreters. (We currently test against +CPython versions 2.7, 3.6 3.7 and 3.8. We also test against PyPy.) + +.. _`tox`: https://pypi.org/project/tox/ + +Broad spectrum testing is available via ``test/acid.py``. This script runs +autopep8 against Python code and checks for correctness and completeness of the +code fixes. It can check that the bytecode remains identical. +``test/acid_pypi.py`` makes use of ``acid.py`` to test against the latest +released packages on PyPI. + + +Troubleshooting +=============== + +``pkg_resources.DistributionNotFound`` +-------------------------------------- + +If you are using an ancient version of ``setuptools``, you might encounter +``pkg_resources.DistributionNotFound`` when trying to run ``autopep8``. Try +upgrading ``setuptools`` to workaround this ``setuptools`` problem:: + + $ pip install --upgrade setuptools + +Use ``sudo`` if you are installing to the system. + + +Links +===== + +* PyPI_ +* GitHub_ +* `Travis CI`_ +* Coveralls_ + +.. _PyPI: https://pypi.org/project/autopep8/ +.. _GitHub: https://github.com/hhatto/autopep8 +.. _`Travis CI`: https://travis-ci.org/hhatto/autopep8 +.. _`Coveralls`: https://coveralls.io/r/hhatto/autopep8 + + diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/RECORD b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..c700f370bf1f16c54285b5ff2c94f14ebfb82c86 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/RECORD @@ -0,0 +1,12 @@ +../../../bin/autopep8,sha256=AvYbAVqgkY4XonKVKtO9nsNdFA8jHoBqk1rEB2XeJNI,225 +__pycache__/autopep8.cpython-38.pyc,, +autopep8-1.6.0.dist-info/AUTHORS.rst,sha256=tiTPsbzGl9dtXCMEWXbWSV1zan1M-BoWtiixs46GIWk,2003 +autopep8-1.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +autopep8-1.6.0.dist-info/LICENSE,sha256=jR0COOSFQ0QZFMqwdB1N4-Bwobg2f3h69fIJr7YLCWo,1181 +autopep8-1.6.0.dist-info/METADATA,sha256=Alsm0GBDTMoXAhA8ZTyu5XatFZPRi69nGHS0Np9hAt4,16825 +autopep8-1.6.0.dist-info/RECORD,, +autopep8-1.6.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +autopep8-1.6.0.dist-info/WHEEL,sha256=WzZ8cwjh8l0jtULNjYq1Hpr-WCqCRgPr--TX4P5I1Wo,110 +autopep8-1.6.0.dist-info/entry_points.txt,sha256=iHNa5_cSXw2ablVbRmfiFGMG1CNrpEPRCEjn3nspaJ8,44 +autopep8-1.6.0.dist-info/top_level.txt,sha256=s2x-di3QBwGxr7kd5xErt2pom8dsFRdINbmwsOEgLfU,9 +autopep8.py,sha256=_6TpkLfhzCDrsU0jk_BNbPLa_sKMnx1ZtmraABtnAXU,155367 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/REQUESTED b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/WHEEL b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..b733a60d379c60aef4921d7e42a113fdc28300dc --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/entry_points.txt b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..e3b2c4f8eeddc694526090da28731bdb438e5deb --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +autopep8 = autopep8:main + diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/top_level.txt b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..d81c0c257ee5e43679360afab28fb8a46ebc630e --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/autopep8-1.6.0.dist-info/top_level.txt @@ -0,0 +1 @@ +autopep8 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/__init__.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..90dedf43339ebd579902c9f50b0e6b0f73267122 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/__init__.py @@ -0,0 +1,14 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI +from .error import CDefError, FFIError, VerificationError, VerificationMissing +from .error import PkgConfigError + +__version__ = "1.16.0" +__version_info__ = (1, 16, 0) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_cffi_errors.h b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_cffi_errors.h new file mode 100644 index 0000000000000000000000000000000000000000..158e0590346a9a8b2ab047ac1bd23bcb3af21398 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_cffi_errors.h @@ -0,0 +1,149 @@ +#ifndef CFFI_MESSAGEBOX +# ifdef _MSC_VER +# define CFFI_MESSAGEBOX 1 +# else +# define CFFI_MESSAGEBOX 0 +# endif +#endif + + +#if CFFI_MESSAGEBOX +/* Windows only: logic to take the Python-CFFI embedding logic + initialization errors and display them in a background thread + with MessageBox. The idea is that if the whole program closes + as a result of this problem, then likely it is already a console + program and you can read the stderr output in the console too. + If it is not a console program, then it will likely show its own + dialog to complain, or generally not abruptly close, and for this + case the background thread should stay alive. +*/ +static void *volatile _cffi_bootstrap_text; + +static PyObject *_cffi_start_error_capture(void) +{ + PyObject *result = NULL; + PyObject *x, *m, *bi; + + if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, + (void *)1, NULL) != NULL) + return (PyObject *)1; + + m = PyImport_AddModule("_cffi_error_capture"); + if (m == NULL) + goto error; + + result = PyModule_GetDict(m); + if (result == NULL) + goto error; + +#if PY_MAJOR_VERSION >= 3 + bi = PyImport_ImportModule("builtins"); +#else + bi = PyImport_ImportModule("__builtin__"); +#endif + if (bi == NULL) + goto error; + PyDict_SetItemString(result, "__builtins__", bi); + Py_DECREF(bi); + + x = PyRun_String( + "import sys\n" + "class FileLike:\n" + " def write(self, x):\n" + " try:\n" + " of.write(x)\n" + " except: pass\n" + " self.buf += x\n" + " def flush(self):\n" + " pass\n" + "fl = FileLike()\n" + "fl.buf = ''\n" + "of = sys.stderr\n" + "sys.stderr = fl\n" + "def done():\n" + " sys.stderr = of\n" + " return fl.buf\n", /* make sure the returned value stays alive */ + Py_file_input, + result, result); + Py_XDECREF(x); + + error: + if (PyErr_Occurred()) + { + PyErr_WriteUnraisable(Py_None); + PyErr_Clear(); + } + return result; +} + +#pragma comment(lib, "user32.lib") + +static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) +{ + Sleep(666); /* may be interrupted if the whole process is closing */ +#if PY_MAJOR_VERSION >= 3 + MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, + L"Python-CFFI error", + MB_OK | MB_ICONERROR); +#else + MessageBoxA(NULL, (char *)_cffi_bootstrap_text, + "Python-CFFI error", + MB_OK | MB_ICONERROR); +#endif + _cffi_bootstrap_text = NULL; + return 0; +} + +static void _cffi_stop_error_capture(PyObject *ecap) +{ + PyObject *s; + void *text; + + if (ecap == (PyObject *)1) + return; + + if (ecap == NULL) + goto error; + + s = PyRun_String("done()", Py_eval_input, ecap, ecap); + if (s == NULL) + goto error; + + /* Show a dialog box, but in a background thread, and + never show multiple dialog boxes at once. */ +#if PY_MAJOR_VERSION >= 3 + text = PyUnicode_AsWideCharString(s, NULL); +#else + text = PyString_AsString(s); +#endif + + _cffi_bootstrap_text = text; + + if (text != NULL) + { + HANDLE h; + h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, + NULL, 0, NULL); + if (h != NULL) + CloseHandle(h); + } + /* decref the string, but it should stay alive as 'fl.buf' + in the small module above. It will really be freed only if + we later get another similar error. So it's a leak of at + most one copy of the small module. That's fine for this + situation which is usually a "fatal error" anyway. */ + Py_DECREF(s); + PyErr_Clear(); + return; + + error: + _cffi_bootstrap_text = NULL; + PyErr_Clear(); +} + +#else + +static PyObject *_cffi_start_error_capture(void) { return NULL; } +static void _cffi_stop_error_capture(PyObject *ecap) { } + +#endif diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_cffi_include.h b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_cffi_include.h new file mode 100644 index 0000000000000000000000000000000000000000..e4c0a672405298ddb3dcb2e2ca6da9eea3d2e162 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_cffi_include.h @@ -0,0 +1,385 @@ +#define _CFFI_ + +/* We try to define Py_LIMITED_API before including Python.h. + + Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and + Py_REF_DEBUG are not defined. This is a best-effort approximation: + we can learn about Py_DEBUG from pyconfig.h, but it is unclear if + the same works for the other two macros. Py_DEBUG implies them, + but not the other way around. + + The implementation is messy (issue #350): on Windows, with _MSC_VER, + we have to define Py_LIMITED_API even before including pyconfig.h. + In that case, we guess what pyconfig.h will do to the macros above, + and check our guess after the #include. + + Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv + version >= 16.0.0. With older versions of either, you don't get a + copy of PYTHON3.DLL in the virtualenv. We can't check the version of + CPython *before* we even include pyconfig.h. ffi.set_source() puts + a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is + running on Windows < 3.5, as an attempt at fixing it, but that's + arguably wrong because it may not be the target version of Python. + Still better than nothing I guess. As another workaround, you can + remove the definition of Py_LIMITED_API here. + + See also 'py_limited_api' in cffi/setuptools_ext.py. +*/ +#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) +# ifdef _MSC_VER +# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# include + /* sanity-check: Py_LIMITED_API will cause crashes if any of these + are also defined. Normally, the Python file PC/pyconfig.h does not + cause any of these to be defined, with the exception that _DEBUG + causes Py_DEBUG. Double-check that. */ +# ifdef Py_LIMITED_API +# if defined(Py_DEBUG) +# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set" +# endif +# if defined(Py_TRACE_REFS) +# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set" +# endif +# if defined(Py_REF_DEBUG) +# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set" +# endif +# endif +# else +# include +# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) +# define Py_LIMITED_API +# endif +# endif +#endif + +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +#ifdef __cplusplus +# ifndef _Bool + typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ +# endif +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ + PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _cffi_to_c_wchar3216_t \ + ((int(*)(PyObject *))_cffi_exports[26]) +#define _cffi_from_c_wchar3216_t \ + ((PyObject *(*)(int))_cffi_exports[27]) +#define _CFFI_NUM_EXPORTS 28 + +struct _cffi_ctypedescr; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (struct _cffi_ctypedescr *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + + +#ifdef HAVE_WCHAR_H +typedef wchar_t _cffi_wchar_t; +#else +typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ +#endif + +_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 2) + return (uint16_t)_cffi_to_c_wchar_t(o); + else + return (uint16_t)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) +{ + if (sizeof(_cffi_wchar_t) == 2) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) +{ + if (sizeof(_cffi_wchar_t) == 4) + return (int)_cffi_to_c_wchar_t(o); + else + return (int)_cffi_to_c_wchar3216_t(o); +} + +_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x) +{ + if (sizeof(_cffi_wchar_t) == 4) + return _cffi_from_c_wchar_t((_cffi_wchar_t)x); + else + return _cffi_from_c_wchar3216_t((int)x); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +_CFFI_UNUSED_FN static int +_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +_CFFI_UNUSED_FN static void +_cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + +#ifdef __cplusplus +} +#endif diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_embedding.h b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_embedding.h new file mode 100644 index 0000000000000000000000000000000000000000..1cb66f2352cd0ac9dc506bcdb7b333f3d70b92ed --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_embedding.h @@ -0,0 +1,550 @@ + +/***** Support code for embedding *****/ + +#ifdef __cplusplus +extern "C" { +#endif + + +#if defined(_WIN32) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. If _cffi_start_python() fails, then this is set + to NULL; otherwise, it should never be NULL. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + +#include "_cffi_errors.h" + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + PyObject *builtins; + + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + builtins = PyEval_GetBuiltins(); + if (builtins == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *ecap; + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + ecap = _cffi_start_error_capture(); + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString( + "Failed to initialize the Python-CFFI embedding logic:\n\n", f); + } + + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.16.0" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + _cffi_stop_error_capture(ecap); + } + result = -1; + goto done; +} + +#if PY_VERSION_HEX < 0x03080000 +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ +#endif + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + (What it really does used to be completely different in Python 2 + and Python 3, with the Python 2 solution avoiding the spin-lock + around the Py_InitializeEx() call. However, after recent changes + to CPython 2.7 (issue #358) it no longer works. So we use the + Python 3 solution everywhere.) + + This initializes Python by calling Py_InitializeEx(). + Important: this must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. + + In Python < 3.8, we choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + In Python >= 3.8, this string array is no longer writable, so + instead we pick PyCapsuleType.tp_version_tag. We can't change + Python < 3.8 because someone might use a mixture of cffi + embedded modules, some of which were compiled before this file + changed. + + In Python >= 3.12, this stopped working because that particular + tp_version_tag gets modified during interpreter startup. It's + arguably a bad idea before 3.12 too, but again we can't change + that because someone might use a mixture of cffi embedded + modules, and no-one reported a bug so far. In Python >= 3.12 + we go instead for PyCapsuleType.tp_as_buffer, which is supposed + to always be NULL. We write to it temporarily a pointer to + a struct full of NULLs, which is semantically the same. + */ + +#ifdef WITH_THREAD +# if PY_VERSION_HEX < 0x03080000 + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value, *locked_value; + + while (1) { /* spin loop */ + old_value = *lock; + locked_value = old_value + 1; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# else +# if PY_VERSION_HEX < 0x030C0000 + int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; + int old_value, locked_value = -42; + assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); +# else + static struct ebp_s { PyBufferProcs buf; int mark; } empty_buffer_procs; + empty_buffer_procs.mark = -42; + PyBufferProcs *volatile *lock = (PyBufferProcs *volatile *) + &PyCapsule_Type.tp_as_buffer; + PyBufferProcs *old_value, *locked_value = &empty_buffer_procs.buf; +# endif + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value == 0) { + if (cffi_compare_and_swap(lock, old_value, locked_value)) + break; + } + else { +# if PY_VERSION_HEX < 0x030C0000 + assert(old_value == locked_value); +# else + /* The pointer should point to a possibly different + empty_buffer_procs from another C extension module */ + assert(((struct ebp_s *)old_value)->mark == -42); +# endif + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +# endif +#endif + + /* call Py_InitializeEx() */ + if (!Py_IsInitialized()) { + _cffi_py_initialize(); +#if PY_VERSION_HEX < 0x03070000 + PyEval_InitThreads(); +#endif + PyEval_SaveThread(); /* release the GIL */ + /* the returned tstate must be the one that has been stored into the + autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ + } + else { +#if PY_VERSION_HEX < 0x03070000 + /* PyEval_InitThreads() is always a no-op from CPython 3.7 */ + PyGILState_STATE state = PyGILState_Ensure(); + PyEval_InitThreads(); + PyGILState_Release(state); +#endif + } + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, locked_value, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void *func; /* function pointer */ + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier + +#ifdef __cplusplus +} +#endif diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_imp_emulation.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_imp_emulation.py new file mode 100644 index 0000000000000000000000000000000000000000..136abdddf9d1276095e6f6724298ac19811c136a --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_imp_emulation.py @@ -0,0 +1,83 @@ + +try: + # this works on Python < 3.12 + from imp import * + +except ImportError: + # this is a limited emulation for Python >= 3.12. + # Note that this is used only for tests or for the old ffi.verify(). + # This is copied from the source code of Python 3.11. + + from _imp import (acquire_lock, release_lock, + is_builtin, is_frozen) + + from importlib._bootstrap import _load + + from importlib import machinery + import os + import sys + import tokenize + + SEARCH_ERROR = 0 + PY_SOURCE = 1 + PY_COMPILED = 2 + C_EXTENSION = 3 + PY_RESOURCE = 4 + PKG_DIRECTORY = 5 + C_BUILTIN = 6 + PY_FROZEN = 7 + PY_CODERESOURCE = 8 + IMP_HOOK = 9 + + def get_suffixes(): + extensions = [(s, 'rb', C_EXTENSION) + for s in machinery.EXTENSION_SUFFIXES] + source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES] + bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES] + return extensions + source + bytecode + + def find_module(name, path=None): + if not isinstance(name, str): + raise TypeError("'name' must be a str, not {}".format(type(name))) + elif not isinstance(path, (type(None), list)): + # Backwards-compatibility + raise RuntimeError("'path' must be None or a list, " + "not {}".format(type(path))) + + if path is None: + if is_builtin(name): + return None, None, ('', '', C_BUILTIN) + elif is_frozen(name): + return None, None, ('', '', PY_FROZEN) + else: + path = sys.path + + for entry in path: + package_directory = os.path.join(entry, name) + for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]: + package_file_name = '__init__' + suffix + file_path = os.path.join(package_directory, package_file_name) + if os.path.isfile(file_path): + return None, package_directory, ('', '', PKG_DIRECTORY) + for suffix, mode, type_ in get_suffixes(): + file_name = name + suffix + file_path = os.path.join(entry, file_name) + if os.path.isfile(file_path): + break + else: + continue + break # Break out of outer loop when breaking out of inner loop. + else: + raise ImportError(name, name=name) + + encoding = None + if 'b' not in mode: + with open(file_path, 'rb') as file: + encoding = tokenize.detect_encoding(file.readline)[0] + file = open(file_path, mode, encoding=encoding) + return file, file_path, (suffix, mode, type_) + + def load_dynamic(name, path, file=None): + loader = machinery.ExtensionFileLoader(name, path) + spec = machinery.ModuleSpec(name=name, loader=loader, origin=path) + return _load(spec) diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_shimmed_dist_utils.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_shimmed_dist_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..611bf40f40ffbda6d9514ef40996947dd6f5097c --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/_shimmed_dist_utils.py @@ -0,0 +1,41 @@ +""" +Temporary shim module to indirect the bits of distutils we need from setuptools/distutils while providing useful +error messages beyond `No module named 'distutils' on Python >= 3.12, or when setuptools' vendored distutils is broken. + +This is a compromise to avoid a hard-dep on setuptools for Python >= 3.12, since many users don't need runtime compilation support from CFFI. +""" +import sys + +try: + # import setuptools first; this is the most robust way to ensure its embedded distutils is available + # (the .pth shim should usually work, but this is even more robust) + import setuptools +except Exception as ex: + if sys.version_info >= (3, 12): + # Python 3.12 has no built-in distutils to fall back on, so any import problem is fatal + raise Exception("This CFFI feature requires setuptools on Python >= 3.12. The setuptools module is missing or non-functional.") from ex + + # silently ignore on older Pythons (support fallback to stdlib distutils where available) +else: + del setuptools + +try: + # bring in just the bits of distutils we need, whether they really came from setuptools or stdlib-embedded distutils + from distutils import log, sysconfig + from distutils.ccompiler import CCompiler + from distutils.command.build_ext import build_ext + from distutils.core import Distribution, Extension + from distutils.dir_util import mkpath + from distutils.errors import DistutilsSetupError, CompileError, LinkError + from distutils.log import set_threshold, set_verbosity + + if sys.platform == 'win32': + from distutils.msvc9compiler import MSVCCompiler +except Exception as ex: + if sys.version_info >= (3, 12): + raise Exception("This CFFI feature requires setuptools on Python >= 3.12. Please install the setuptools package.") from ex + + # anything older, just let the underlying distutils import error fly + raise Exception("This CFFI feature requires distutils. Please install the distutils or setuptools package.") from ex + +del sys diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/api.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/api.py new file mode 100644 index 0000000000000000000000000000000000000000..edeb7928107c7f8bc56411f67013f4ea08403860 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/api.py @@ -0,0 +1,965 @@ +import sys, types +from .lock import allocate_lock +from .error import CDefError +from . import model + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +_unspecified = object() + + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + if backend.__version__ != __version__: + # bad version! Try to be as explicit as possible. + if hasattr(backend, '__file__'): + # CPython + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( + __version__, __file__, + backend.__version__, backend.__file__)) + else: + # PyPy + raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( + __version__, __file__, backend.__version__)) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + from . import cparser + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + self._typecache = model.get_typecache(backend) + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in list(backend.__dict__): + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + self.buffer = backend.buffer + + def cdef(self, csource, override=False, packed=False, pack=None): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + Alternatively, 'pack' can be a small integer, and requests for + alignment greater than that are ignored (pack=1 is equivalent to + packed=True). + """ + self._cdef(csource, override=override, packed=packed, pack=pack) + + def embedding_api(self, csource, packed=False, pack=None): + self._cdef(csource, packed=packed, pack=pack, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + if not (isinstance(name, basestring) or + name is None or + isinstance(name, self.CData)): + raise TypeError("dlopen(name): name must be a file name, None, " + "or an already-opened 'void *' handle") + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def dlclose(self, lib): + """Close a library obtained with ffi.dlopen(). After this call, + access to functions or variables from the library will fail + (possibly with a segmentation fault). + """ + type(lib).__cffi_close__(lib) + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def unpack(self, cdata, length): + """Unpack an array of C data of the given length, + returning a Python string/unicode/list. + + If 'cdata' is a pointer to 'char', returns a byte string. + It does not stop at the first null. This is equivalent to: + ffi.buffer(cdata, length)[:] + + If 'cdata' is a pointer to 'wchar_t', returns a unicode string. + 'length' is measured in wchar_t's; it is not the size in bytes. + + If 'cdata' is a pointer to anything else, returns a list of + 'length' items. This is a faster equivalent to: + [cdata[i] for i in range(length)] + """ + return self._backend.unpack(cdata, length) + + #def buffer(self, cdata, size=-1): + # """Return a read-write buffer object that references the raw C data + # pointed to by the given 'cdata'. The 'cdata' must be a pointer or + # an array. Can be passed to functions expecting a buffer, or directly + # manipulated with: + # + # buf[:] get a copy of it in a regular string, or + # buf[idx] as a single character + # buf[:] = ... + # buf[idx] = ... change the content + # """ + # note that 'buffer' is a type, set on this instance by __init__ + + def from_buffer(self, cdecl, python_buffer=_unspecified, + require_writable=False): + """Return a cdata of the given type pointing to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types + str or unicode (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + + The first argument is optional and default to 'char[]'. + """ + if python_buffer is _unspecified: + cdecl, python_buffer = self.BCharA, cdecl + elif isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.from_buffer(cdecl, python_buffer, + require_writable) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor, size=0): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + + The optional 'size' gives an estimate of the size, used to + trigger the garbage collection more eagerly. So far only used + on PyPy. It tells the GC that the returned object keeps alive + roughly 'size' bytes of external memory. + """ + return self._backend.gcp(cdata, destructor, size) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + try: + ctype = self._backend.typeof(cdata) + except TypeError: + if '__addressof__' in type(cdata).__dict__: + return type(cdata).__addressof__(cdata, *fields_or_indexes) + raise + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def release(self, x): + self._backend.release(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + if sys.version_info < (3,): + pythonlib = "pypy-c" + else: + pythonlib = "pypy3-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from cffi._shimmed_dist_utils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + import os + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + if os.sep in module_name or (os.altsep and os.altsep in module_name): + raise ValueError("'module_name' must not contain '/': use a dotted " + "name to make a 'package.module' location") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, + source_extension='.c', **kwds): + from . import pkgconfig + if not isinstance(pkgconfig_libs, list): + raise TypeError("the pkgconfig_libs argument must be a list " + "of package names") + kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) + pkgconfig.merge_flags(kwds, kwds2) + self.set_source(module_name, source, source_extension, **kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from cffi._shimmed_dist_utils import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None, debug=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, debug=debug, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + + def list_types(self): + """Returns the user type names known to this FFI instance. + This returns a tuple containing three lists of names: + (typedef_names, names_of_structs, names_of_unions) + """ + typedefs = [] + structs = [] + unions = [] + for key in self._parser._declarations: + if key.startswith('typedef '): + typedefs.append(key[8:]) + elif key.startswith('struct '): + structs.append(key[7:]) + elif key.startswith('union '): + unions.append(key[6:]) + typedefs.sort() + structs.sort() + unions.sort() + return (typedefs, structs, unions) + + +def _load_backend_lib(backend, name, flags): + import os + if not isinstance(name, basestring): + if sys.platform != "win32" or name is not None: + return backend.load_library(name, flags) + name = "c" # Windows: load_library(None) fails, but this works + # on Python 2 (backward compatibility hack only) + first_error = None + if '.' in name or '/' in name or os.sep in name: + try: + return backend.load_library(name, flags) + except OSError as e: + first_error = e + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): + raise OSError("dlopen(None) cannot work on Windows for Python 3 " + "(see http://bugs.python.org/issue23606)") + msg = ("ctypes.util.find_library() did not manage " + "to locate a library called %r" % (name,)) + if first_error is not None: + msg = "%s. Additionally, %s" % (first_error, msg) + raise OSError(msg) + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + value = backendlib.load_function(BType, name) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def addressof_var(name): + try: + return addr_variables[name] + except KeyError: + with ffi._lock: + if name not in addr_variables: + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + if BType.kind != 'array': + BType = model.pointer_cache(ffi, BType) + p = backendlib.load_function(BType, name) + addr_variables[name] = p + return addr_variables[name] + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + addr_variables = {} + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + def __addressof__(self, name): + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + make_accessor(name) + if name in library.__dict__: + return library.__dict__[name] + if name in FFILibrary.__dict__: + return addressof_var(name) + raise AttributeError("cffi library has no function or " + "global variable named '%s'" % (name,)) + def __cffi_close__(self): + backendlib.close_lib() + self.__dict__.clear() + # + if isinstance(libname, basestring): + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/backend_ctypes.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/backend_ctypes.py new file mode 100644 index 0000000000000000000000000000000000000000..e7956a79cfb1c3d28a2ad22a40b261ae7dbbbb5f --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/backend_ctypes.py @@ -0,0 +1,1121 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + v_is_ptr = not isinstance(self, CTypesGenericPrimitive) + w_is_ptr = (isinstance(other, CTypesData) and + not isinstance(other, CTypesGenericPrimitive)) + if v_is_ptr and w_is_ptr: + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + elif v_is_ptr or w_is_ptr: + return NotImplemented + else: + if isinstance(self, CTypesGenericPrimitive): + self = self._value + if isinstance(other, CTypesGenericPrimitive): + other = other._value + return cmpfunc(self, other) + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __hash__(self): + return hash(self._value) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + __bool__ = __nonzero__ + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return int(self._value) + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + def __nonzero__(self): + return ord(self._value) != 0 + else: + def __nonzero__(self): + return self._value != 0 + __bool__ = __nonzero__ + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + if isinstance(init, CTypesGenericArray): + if (len(init) != len(blob) or + not isinstance(init, CTypesArray)): + raise TypeError("length/type mismatch: %s" % (init,)) + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0, + pack=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + elif pack: + struct_or_union._pack_ = pack + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + _weakref_cache_ref = None + + def gcp(self, cdata, destructor, size=0): + if self._weakref_cache_ref is None: + import weakref + class MyRef(weakref.ref): + def __eq__(self, other): + myref = self() + return self is other or ( + myref is not None and myref is other()) + def __ne__(self, other): + return not (self == other) + def __hash__(self): + try: + return self._hash + except AttributeError: + self._hash = hash(self()) + return self._hash + self._weakref_cache_ref = {}, MyRef + weak_cache, MyRef = self._weakref_cache_ref + + if destructor is None: + try: + del weak_cache[MyRef(cdata)] + except KeyError: + raise TypeError("Can remove destructor only on a object " + "previously returned by ffi.gc()") + return None + + def remove(k): + cdata, destructor = weak_cache.pop(k, (None, None)) + if destructor is not None: + destructor(cdata) + + new_cdata = self.cast(self.typeof(cdata), cdata) + assert new_cdata is not cdata + weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) + return new_cdata + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/cffi_opcode.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/cffi_opcode.py new file mode 100644 index 0000000000000000000000000000000000000000..a0df98d1c743790f4047672abcae0d00f993a2ce --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/cffi_opcode.py @@ -0,0 +1,187 @@ +from .error import VerificationError + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 +PRIM_FLOATCOMPLEX = 48 +PRIM_DOUBLECOMPLEX = 49 +PRIM_CHAR16 = 50 +PRIM_CHAR32 = 51 + +_NUM_PRIM = 52 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + 'float _Complex': PRIM_FLOATCOMPLEX, + 'double _Complex': PRIM_DOUBLECOMPLEX, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'char16_t': PRIM_CHAR16, + 'char32_t': PRIM_CHAR32, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/commontypes.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/commontypes.py new file mode 100644 index 0000000000000000000000000000000000000000..8ec97c756a4b1023fd3963dd39b706f7c0e34373 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/commontypes.py @@ -0,0 +1,80 @@ +import sys +from . import model +from .error import FFIError + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise FFIError( + "Unsupported type: %r. Please look at " + "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " + "and file an issue if you think this type should really " + "be supported." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/cparser.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/cparser.py new file mode 100644 index 0000000000000000000000000000000000000000..74830e913f21409f536febddae7769d0364cd24b --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/cparser.py @@ -0,0 +1,1006 @@ +from . import model +from .commontypes import COMMON_TYPES, resolve_common_type +from .error import FFIError, CDefError +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +def _workaround_for_static_import_finders(): + # Issue #392: packaging tools like cx_Freeze can not find these + # because pycparser uses exec dynamic import. This is an obscure + # workaround. This function is never called. + import pycparser.yacctab + import pycparser.lextab + +CDEF_SOURCE_STRING = "" +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"' + r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") +_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" + r"\.\.\.") +_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis inbetween, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + # + # input: `extern "Python+C" int foo(int);` + # output: + # void __cffi_extern_python_plus_c_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + if 'C' in match.group(1): + parts.append('void __cffi_extern_python_plus_c_start; ') + else: + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _warn_for_string_literal(csource): + if '"' not in csource: + return + for line in csource.splitlines(): + if '"' in line and not line.lstrip().startswith('#'): + import warnings + warnings.warn("String literal found in cdef() or type source. " + "String literals are ignored here, but you should " + "remove them anyway because some character sequences " + "confuse pre-parsing.") + break + +def _warn_for_non_extern_non_static_global_variable(decl): + if not decl.storage: + import warnings + warnings.warn("Global variable '%s' in cdef(): for consistency " + "with C it should have a storage class specifier " + "(usually 'extern')" % (decl.name,)) + +def _remove_line_directives(csource): + # _r_line_directive matches whole lines, without the final \n, if they + # start with '#line' with some spacing allowed, or '#NUMBER'. This + # function stores them away and replaces them with exactly the string + # '#line@N', where N is the index in the list 'line_directives'. + line_directives = [] + def replace(m): + i = len(line_directives) + line_directives.append(m.group()) + return '#line@%d' % i + csource = _r_line_directive.sub(replace, csource) + return csource, line_directives + +def _put_back_line_directives(csource, line_directives): + def replace(m): + s = m.group() + if not s.startswith('#line@'): + raise AssertionError("unexpected #line directive " + "(should have been processed and removed") + return line_directives[int(s[6:])] + return _r_line_directive.sub(replace, csource) + +def _preprocess(csource): + # First, remove the lines of the form '#line N "filename"' because + # the "filename" part could confuse the rest + csource, line_directives = _remove_line_directives(csource) + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literals (except in line directives)! + def replace_keeping_newlines(m): + return ' ' + m.group().count('\n') * '\n' + csource = _r_comment.sub(replace_keeping_newlines, csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Now there should not be any string literal left; warn if we get one + _warn_for_string_literal(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" + csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) + # Replace "float ..." or "double..." with "__dotdotdotfloat__" + csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + csource = csource.replace('...', ' __dotdotdot__ ') + # Finally, put back the line directives + csource = _put_back_line_directives(csource, line_directives) + return csource, macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = {} + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = [] + csourcelines.append('# 1 ""') + for typename in typenames: + csourcelines.append('typedef int %s;' % typename) + csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' + ' __dotdotdot__;') + # this forces pycparser to consider the following in the file + # called from line 1 + csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) + csourcelines.append(csource) + fullcsource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(fullcsource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) + # and interpret that as a line number. This will not work if + # the user gives explicit ``# NUM "FILE"`` directives. + line = None + msg = str(e) + match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) + if match: + linenum = int(match.group(1), 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise CDefError(msg) + + def parse(self, csource, override=False, packed=False, pack=None, + dllexport=False): + if packed: + if packed != True: + raise ValueError("'packed' should be False or True; use " + "'pack' to give another value") + if pack: + raise ValueError("cannot give both 'pack' and 'packed'") + pack = 1 + elif pack: + if pack & (pack - 1): + raise ValueError("'pack' must be a power of two, not %r" % + (pack,)) + else: + pack = 0 + prev_options = self._options + try: + self._options = {'override': override, + 'packed': pack, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + else: + assert 0 + current_decl = None + # + try: + self._inside_extern_python = '__cffi_extern_python_stop' + for decl in iterator: + current_decl = decl + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and + decl.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names[-1].startswith('__dotdotdot')): + realtype = self._get_unknown_ptr_type(decl) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name, partial_length_ok=True, + typedef_example="*(%s *)0" % (decl.name,)) + self._declare('typedef ' + decl.name, realtype, quals=quals) + elif decl.__class__.__name__ == 'Pragma': + pass # skip pragma, only in pycparser 2.15 + else: + raise CDefError("unexpected <%s>: this construct is valid " + "C but not valid in cdef()" % + decl.__class__.__name__, decl) + except CDefError as e: + if len(e.args) == 1: + e.args = e.args + (current_decl,) + raise + except FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python == '__cffi_extern_python_start': + tag = 'extern_python ' + elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': + tag = 'extern_python_plus_c ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = decl.name + else: + if self._inside_extern_python !='__cffi_extern_python_stop': + raise CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + _warn_for_non_extern_non_static_global_variable(decl) + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options.get('override'): + raise FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False, + typedef_example=None): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + # a hack: in 'typedef int foo_t[...][...];', don't use '...' as + # the length but use directly the C expression that would be + # generated by recompiler.py. This lets the typedef be used in + # many more places within recompiler.py + if typedef_example is not None: + if length == '...': + length = '_cffi_array_len(%s)' % (typedef_example,) + typedef_example = "*" + typedef_example + # + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok, + typedef_example=typedef_example) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options.get('packed') + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if '0' <= s[0] <= '9': + s = s.rstrip('uUlL') + try: + if s.startswith('0'): + return int(s, 8) + else: + return int(s, 10) + except ValueError: + if len(s) > 1: + if s.lower()[0:2] == '0x': + return int(s, 16) + elif s.lower()[0:2] == '0b': + return int(s, 2) + raise CDefError("invalid constant %r" % (s,)) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + if partial_length_ok: + self._partial_length = True + return '...' + raise FFIError(":%d: unsupported '[...]' here, cannot derive " + "the actual array length in this context" + % exprnode.coord.line) + # + if isinstance(exprnode, pycparser.c_ast.BinaryOp): + left = self._parse_constant(exprnode.left) + right = self._parse_constant(exprnode.right) + if exprnode.op == '+': + return left + right + elif exprnode.op == '-': + return left - right + elif exprnode.op == '*': + return left * right + elif exprnode.op == '/': + return self._c_div(left, right) + elif exprnode.op == '%': + return left - self._c_div(left, right) * right + elif exprnode.op == '<<': + return left << right + elif exprnode.op == '>>': + return left >> right + elif exprnode.op == '&': + return left & right + elif exprnode.op == '|': + return left | right + elif exprnode.op == '^': + return left ^ right + # + raise FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _c_div(self, a, b): + result = a // b + if ((a < 0) ^ (b < 0)) and (a % b) != 0: + result += 1 + return result + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + if typenames == ['__dotdotdot__']: + return model.unknown_type(decl.name) + + if typenames == ['__dotdotdotint__']: + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef int... %s'" % decl.name + return model.UnknownIntegerType(decl.name) + + if typenames == ['__dotdotdotfloat__']: + # note: not for 'long double' so far + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef float... %s'" % decl.name + return model.UnknownFloatType(decl.name) + + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) + + def _get_unknown_ptr_type(self, decl): + if decl.type.type.type.names == ['__dotdotdot__']: + return model.unknown_ptr_type(decl.name) + raise FFIError(':%d: unsupported usage of "..." in typedef' + % decl.coord.line) diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/error.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/error.py new file mode 100644 index 0000000000000000000000000000000000000000..0a27247c32a381ab7cecedd0f985b781619c1ea5 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/error.py @@ -0,0 +1,31 @@ + +class FFIError(Exception): + __module__ = 'cffi' + +class CDefError(Exception): + __module__ = 'cffi' + def __str__(self): + try: + current_decl = self.args[1] + filename = current_decl.coord.file + linenum = current_decl.coord.line + prefix = '%s:%d: ' % (filename, linenum) + except (AttributeError, TypeError, IndexError): + prefix = '' + return '%s%s' % (prefix, self.args[0]) + +class VerificationError(Exception): + """ An error raised when verification fails + """ + __module__ = 'cffi' + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + __module__ = 'cffi' + +class PkgConfigError(Exception): + """ An error raised for missing modules in pkg-config + """ + __module__ = 'cffi' diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/ffiplatform.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/ffiplatform.py new file mode 100644 index 0000000000000000000000000000000000000000..adca28f1a480bb04a11977d26457fe8886139043 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/ffiplatform.py @@ -0,0 +1,113 @@ +import sys, os +from .error import VerificationError + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + from cffi._shimmed_dist_utils import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0, debug=None): + """Compile a C extension module using distutils.""" + + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose, debug) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0, debug=None): + # XXX compact but horrible :-( + from cffi._shimmed_dist_utils import Distribution, CompileError, LinkError, set_threshold, set_verbosity + + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + if debug is None: + debug = sys.flags.debug + options['debug'] = ('ffiplatform', debug) + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = set_threshold(0) or 0 + try: + set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + set_threshold(old_level) + except (CompileError, LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/lock.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/lock.py new file mode 100644 index 0000000000000000000000000000000000000000..db91b7158c4ee9aa653462fe38e79ed1b553db87 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/model.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/model.py new file mode 100644 index 0000000000000000000000000000000000000000..1708f43df3cceef9a1fbd9fcf159c59f523b393e --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/model.py @@ -0,0 +1,618 @@ +import types +import weakref + +from .lock import allocate_lock +from .error import CDefError, VerificationError, VerificationMissing + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + def is_complex_type(self): + return False + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + 'float _Complex': 'j', + 'double _Complex': 'j', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'char16_t': 'c', + 'char32_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + def is_complex_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + raise CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = " *&" + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + extra = qualify(quals, extra) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def length_is_unknown(self): + return isinstance(self.length, str) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length_is_unknown(): + raise CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = 0 + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def anonymous_struct_fields(self): + if self.fldtypes is not None: + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + yield type + + def enumfields(self, expand_anonymous_struct_union=True): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if (name == '' and isinstance(type, StructOrUnion) + and expand_anonymous_struct_union): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + extra_flags = () + if self.packed: + if self.packed == 1: + extra_flags = (8,) # SF_PACKED + else: + extra_flags = (0, self.packed) + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, *extra_flags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length_is_unknown(): + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + raise VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + try: + # XXX! The goal is to ensure that the warnings.warn() + # will not suppress the warning. We want to get it + # several times if we reach this point several times. + __warningregistry__.clear() + except NameError: + pass + warnings.warn("%r has no values explicitly defined; " + "guessing that it is equivalent to 'unsigned int'" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() +_typecache_cffi_backend = weakref.WeakValueDictionary() + +def get_typecache(backend): + # returns _typecache_cffi_backend if backend is the _cffi_backend + # module, or type(backend).__typecache if backend is an instance of + # CTypesBackend (or some FakeBackend class during tests) + if isinstance(backend, types.ModuleType): + return _typecache_cffi_backend + with global_lock: + if not hasattr(type(backend), '__typecache'): + type(backend).__typecache = weakref.WeakValueDictionary() + return type(backend).__typecache + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._typecache[key] + except KeyError: + pass + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/parse_c_type.h b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/parse_c_type.h new file mode 100644 index 0000000000000000000000000000000000000000..84e4ef85659eb63e6453d8af9f024f1866182342 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/parse_c_type.h @@ -0,0 +1,181 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 +#define _CFFI_PRIM_FLOATCOMPLEX 48 +#define _CFFI_PRIM_DOUBLECOMPLEX 49 +#define _CFFI_PRIM_CHAR16 50 +#define _CFFI_PRIM_CHAR32 51 + +#define _CFFI__NUM_PRIM 52 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/pkgconfig.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/pkgconfig.py new file mode 100644 index 0000000000000000000000000000000000000000..5c93f15a60e6f904b2dd108d6e22044a5890bcb4 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/pkgconfig.py @@ -0,0 +1,121 @@ +# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi +import sys, os, subprocess + +from .error import PkgConfigError + + +def merge_flags(cfg1, cfg2): + """Merge values from cffi config flags cfg2 to cf1 + + Example: + merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) + {"libraries": ["one", "two"]} + """ + for key, value in cfg2.items(): + if key not in cfg1: + cfg1[key] = value + else: + if not isinstance(cfg1[key], list): + raise TypeError("cfg1[%r] should be a list of strings" % (key,)) + if not isinstance(value, list): + raise TypeError("cfg2[%r] should be a list of strings" % (key,)) + cfg1[key].extend(value) + return cfg1 + + +def call(libname, flag, encoding=sys.getfilesystemencoding()): + """Calls pkg-config and returns the output if found + """ + a = ["pkg-config", "--print-errors"] + a.append(flag) + a.append(libname) + try: + pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError as e: + raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) + + bout, berr = pc.communicate() + if pc.returncode != 0: + try: + berr = berr.decode(encoding) + except Exception: + pass + raise PkgConfigError(berr.strip()) + + if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x + try: + bout = bout.decode(encoding) + except UnicodeDecodeError: + raise PkgConfigError("pkg-config %s %s returned bytes that cannot " + "be decoded with encoding %r:\n%r" % + (flag, libname, encoding, bout)) + + if os.altsep != '\\' and '\\' in bout: + raise PkgConfigError("pkg-config %s %s returned an unsupported " + "backslash-escaped output:\n%r" % + (flag, libname, bout)) + return bout + + +def flags_from_pkgconfig(libs): + r"""Return compiler line flags for FFI.set_source based on pkg-config output + + Usage + ... + ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) + + If pkg-config is installed on build machine, then arguments include_dirs, + library_dirs, libraries, define_macros, extra_compile_args and + extra_link_args are extended with an output of pkg-config for libfoo and + libbar. + + Raises PkgConfigError in case the pkg-config call fails. + """ + + def get_include_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-I")] + + def get_library_dirs(string): + return [x[2:] for x in string.split() if x.startswith("-L")] + + def get_libraries(string): + return [x[2:] for x in string.split() if x.startswith("-l")] + + # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils + def get_macros(string): + def _macro(x): + x = x[2:] # drop "-D" + if '=' in x: + return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") + else: + return (x, None) # "-Dfoo" => ("foo", None) + return [_macro(x) for x in string.split() if x.startswith("-D")] + + def get_other_cflags(string): + return [x for x in string.split() if not x.startswith("-I") and + not x.startswith("-D")] + + def get_other_libs(string): + return [x for x in string.split() if not x.startswith("-L") and + not x.startswith("-l")] + + # return kwargs for given libname + def kwargs(libname): + fse = sys.getfilesystemencoding() + all_cflags = call(libname, "--cflags") + all_libs = call(libname, "--libs") + return { + "include_dirs": get_include_dirs(all_cflags), + "library_dirs": get_library_dirs(all_libs), + "libraries": get_libraries(all_libs), + "define_macros": get_macros(all_cflags), + "extra_compile_args": get_other_cflags(all_cflags), + "extra_link_args": get_other_libs(all_libs), + } + + # merge all arguments together + ret = {} + for libname in libs: + lib_flags = kwargs(libname) + merge_flags(ret, lib_flags) + return ret diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/recompiler.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/recompiler.py new file mode 100644 index 0000000000000000000000000000000000000000..4167bc05f97826f9452fdd2dd04eaf94f55034b2 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/recompiler.py @@ -0,0 +1,1581 @@ +import os, sys, io +from . import ffiplatform, model +from .error import VerificationError +from .cffi_opcode import * + +VERSION_BASE = 0x2601 +VERSION_EMBEDDED = 0x2701 +VERSION_CHAR16CHAR32 = 0x2801 + +USE_LIMITED_API = (sys.platform != 'win32' or sys.version_info < (3, 0) or + sys.version_info >= (3, 5)) + + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + self._version = VERSION_BASE + + def needs_version(self, ver): + self._version = max(self._version, ver) + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _enum_fields(self, tp): + # When producing C, expand all anonymous struct/union fields. + # That's necessary to have C code checking the offsets of the + # individual fields contained in them. When producing Python, + # don't do it and instead write it like it is, with the + # corresponding fields having an empty name. Empty names are + # recognized at runtime when we import the generated Python + # file. + expand_anonymous_struct_union = not self.target_is_python + return tp.enumfields(expand_anonymous_struct_union) + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in self._enum_fields(tp): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + if self.ffi._embedding is not None: + prnt('#define _CFFI_USE_EMBEDDING') + if not USE_LIMITED_API: + prnt('#define _CFFI_NO_LIMITED_API') + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') + self._print_string_literal_in_array(self.ffi._embedding) + prnt('0 };') + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + i = lines.index('#include "_cffi_errors.h"\n') + lines[i:i+1] = self._rel_readlines('_cffi_errors.h') + prnt(''.join(lines)) + self.needs_version(VERSION_EMBEDDED) + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy > 0 or self.ffi._embedding is not None: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') + prnt('#endif') + prnt() + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if flags & 1: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)0x%x;' % self._version) + prnt(' p[1] = &_cffi_type_context;') + prnt('#if PY_MAJOR_VERSION >= 3') + prnt(' return NULL;') + prnt('#endif') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( + self.module_name, self._version)) + prnt('}') + prnt('#endif') + prnt() + prnt('#ifdef __GNUC__') + prnt('# pragma GCC visibility pop') + prnt('#endif') + self._version = None + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = 0x%x," % (self._version,)) + self._version = None + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + cname = tp.get_c_name('') + converter = '(%s)_cffi_to_c_%s' % (cname, + tp.name.replace(' ', '_')) + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif (isinstance(tp, model.StructOrUnionOrEnum) or + isinstance(tp, model.BasePrimitiveType)): + # a struct (not a struct pointer) as a function argument; + # or, a complex (the same code works) + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + '(%s)alloca((size_t)datasize) : NULL;' % ( + tovar, tp.get_c_name(''))) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double' and not tp.is_complex_type(): + cname = tp.name.replace(' ', '_') + if cname in ('char16_t', 'char32_t'): + self.needs_version(VERSION_CHAR16CHAR32) + return '_cffi_from_c_%s(%s)' % (cname, var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _typedef_type(self, tp, name): + return self._global_type(tp, "(*(%s *)0)" % (name,)) + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(self._typedef_type(tp, name)) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + tp = self._typedef_type(tp, name) + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + prnt(' PyObject *pyresult;') + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( + name, len(rng), len(rng), + ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. We also do that for + # complex args and return type. + def need_indirection(type): + return (isinstance(type, model.StructOrUnion) or + (isinstance(type, model.PrimitiveType) and + type.is_complex_type())) + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if need_indirection(type): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if need_indirection(tp_result): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + if self.target_is_python: + # also requires nested anon struct/unions in ABI mode, recursively + for fldtype in tp.anonymous_struct_fields(): + self._struct_collecttype(fldtype) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in self._enum_fields(tp): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + if fname != '': + prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or any(tp.anonymous_struct_fields()): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + if tp.packed > 1: + raise NotImplementedError( + "%r is declared with 'pack=%r'; only 0 or 1 are " + "supported in API mode (try to use \"...;\", which " + "does not require a 'pack' declaration)" % + (tp, tp.packed)) + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(self._enum_fields(tp)) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + self._check_not_opaque(fldtype, + "field '%s.%s'" % (tp.name, fldname)) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '((char *)&((%s)0)->%s) - (char *)0' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _check_not_opaque(self, tp, location): + while isinstance(tp, model.ArrayType): + tp = tp.item + if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: + raise TypeError( + "%s is of an opaque type (not declared in cdef())" % location) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) | 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + _generate_cpy_dllexport_python_collecttype = \ + _generate_cpy_extern_python_plus_c_collecttype = \ + _generate_cpy_extern_python_collecttype + + def _extern_python_decl(self, tp, name, tag_and_space): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s.%s", %s, 0, 0 };' % ( + self.module_name, name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_extern_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'static ') + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') + + def _generate_cpy_extern_python_plus_c_decl(self, tp, name): + self._extern_python_decl(tp, name, '') + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + _generate_cpy_dllexport_python_ctx = \ + _generate_cpy_extern_python_plus_c_ctx = \ + _generate_cpy_extern_python_ctx + + def _print_string_literal_in_array(self, s): + prnt = self._prnt + prnt('// # NB. this is not a string because of a size limit in MSVC') + if not isinstance(s, bytes): # unicode + s = s.encode('utf-8') # -> bytes + else: + s.decode('utf-8') # got bytes, check for valid utf-8 + try: + s.decode('ascii') + except UnicodeDecodeError: + s = b'# -*- encoding: utf8 -*-\n' + s + for line in s.splitlines(True): + comment = line + if type('//') is bytes: # python2 + line = map(ord, line) # make a list of integers + else: # python3 + # type(line) is bytes, which enumerates like a list of integers + comment = ascii(comment)[1:-1] + prnt(('// ' + comment).rstrip()) + printed_line = '' + for c in line: + if len(printed_line) >= 76: + prnt(printed_line) + printed_line = '' + printed_line += '%d,' % (c,) + prnt(printed_line) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) | 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): + if verbose: + print("generating %s" % (target_file,)) + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + if verbose: + print("(already up-to-date)") + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, + verbose) + +def make_py_source(ffi, module_name, target_py_file, verbose=False): + return _make_c_or_py_source(ffi, module_name, None, target_py_file, + verbose) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from cffi._shimmed_dist_utils import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from cffi._shimmed_dist_utils import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from cffi._shimmed_dist_utils import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, debug=None, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + updated = make_c_source(ffi, module_name, preamble, c_file, + verbose=compiler_verbose) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + if compiler_verbose: + if tmpdir == '.': + msg = 'the current directory is' + else: + msg = 'setting the current directory to' + print('%s %r' % (msg, os.path.abspath(tmpdir))) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, + compiler_verbose, debug) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file, + verbose=compiler_verbose) + if call_c_compiler: + return c_file + else: + return None, updated + diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/setuptools_ext.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/setuptools_ext.py new file mode 100644 index 0000000000000000000000000000000000000000..681b49d7ad964d9de4b6b32a24eec6fcebddf7ed --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/setuptools_ext.py @@ -0,0 +1,216 @@ +import os +import sys + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from cffi._shimmed_dist_utils import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + +def _set_py_limited_api(Extension, kwds): + """ + Add py_limited_api to kwds if setuptools >= 26 is in use. + Do not alter the setting if it already exists. + Setuptools takes care of ignoring the flag on Python 2 and PyPy. + + CPython itself should ignore the flag in a debugging version + (by not listing .abi3.so in the extensions it supports), but + it doesn't so far, creating troubles. That's why we check + for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent + of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) + + On Windows, with CPython <= 3.4, it's better not to use py_limited_api + because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. + Recently (2020) we started shipping only >= 3.5 wheels, though. So + we'll give it another try and set py_limited_api on Windows >= 3.5. + """ + from cffi import recompiler + + if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') + and recompiler.USE_LIMITED_API): + import setuptools + try: + setuptools_major_version = int(setuptools.__version__.partition('.')[0]) + if setuptools_major_version >= 26: + kwds['py_limited_api'] = True + except ValueError: # certain development versions of setuptools + # If we don't know the version number of setuptools, we + # try to set 'py_limited_api' anyway. At worst, we get a + # warning. + kwds['py_limited_api'] = True + return kwds + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + # We are a setuptools extension. Need this build_ext for py_limited_api. + from setuptools.command.build_ext import build_ext + from cffi._shimmed_dist_utils import Extension, log, mkpath + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + kwds = _set_py_limited_api(Extension, kwds) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from setuptools.command.build_py import build_py + from setuptools.command.build_ext import build_ext + from cffi._shimmed_dist_utils import log, mkpath + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + def get_source_files(self): + # This is called from 'setup.py sdist' only. Exclude + # the generate .py module in this case. + saved_py_modules = self.py_modules + try: + if saved_py_modules: + self.py_modules = [m for m in saved_py_modules + if m != module_name] + return base_class.get_source_files(self) + finally: + self.py_modules = saved_py_modules + dist.cmdclass['build_py'] = build_py_make_mod + + # distutils and setuptools have no notion I could find of a + # generated python module. If we don't add module_name to + # dist.py_modules, then things mostly work but there are some + # combination of options (--root and --record) that will miss + # the module. So we add it here, which gives a few apparently + # harmless warnings about not finding the file outside the + # build directory. + # Then we need to hack more in get_source_files(); see above. + if dist.py_modules is None: + dist.py_modules = [] + dist.py_modules.append(module_name) + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/vengine_cpy.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/vengine_cpy.py new file mode 100644 index 0000000000000000000000000000000000000000..49727d36e57b7743536a6b1e7f40efd06d4c4d47 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/vengine_cpy.py @@ -0,0 +1,1077 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys +from . import model +from .error import VerificationError +from . import _imp_emulation as imp + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars, freelines): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + localvars.add('struct _cffi_freeme_s *large_args_free = NULL') + freelines.add('if (large_args_free != NULL)' + ' _cffi_free_array_arguments(large_args_free);') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' %s = ((size_t)datasize) <= 640 ? ' + 'alloca((size_t)datasize) : NULL;' % (tovar,)) + self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' + '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) + self._prnt(' datasize, &large_args_free) < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructOrUnion): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + freelines = set() + for type in tp.args: + self._extra_local_variables(type, localvars, freelines) + for decl in sorted(localvars): + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + prnt(' PyObject *pyresult;') + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' pyresult = %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + for freeline in freelines: + prnt(' ' + freeline) + prnt(' return pyresult;') + else: + for freeline in freelines: + prnt(' ' + freeline) + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = tp.length_is_unknown()) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong +#define _cffi_from_c__Bool PyBool_FromLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +union _cffi_union_alignment_u { + unsigned char m_char; + unsigned short m_short; + unsigned int m_int; + unsigned long m_long; + unsigned long long m_longlong; + float m_float; + double m_double; + long double m_longdouble; +}; + +struct _cffi_freeme_s { + struct _cffi_freeme_s *next; + union _cffi_union_alignment_u alignment; +}; + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg, + char **output_data, Py_ssize_t datasize, + struct _cffi_freeme_s **freeme) +{ + char *p; + if (datasize < 0) + return -1; + + p = *output_data; + if (p == NULL) { + struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( + offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); + if (fp == NULL) + return -1; + fp->next = *freeme; + *freeme = fp; + p = *output_data = (char *)&fp->alignment; + } + memset((void *)p, 0, (size_t)datasize); + return _cffi_convert_array_from_object(p, ctptr, arg); +} + +#ifdef __GNUC__ + __attribute__((unused)) +#endif +static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme) +{ + do { + void *p = (void *)freeme; + freeme = freeme->next; + PyObject_Free(p); + } while (freeme != NULL); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/vengine_gen.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/vengine_gen.py new file mode 100644 index 0000000000000000000000000000000000000000..26421526f62a07e04419cd57f1f19a64ecd36452 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/vengine_gen.py @@ -0,0 +1,675 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os +import types + +from . import model +from .error import VerificationError + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length_is_unknown(): + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length_is_unknown(): + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ +# ifndef __cplusplus + typedef unsigned char _Bool; +# endif +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif +''' diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/verifier.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/verifier.py new file mode 100644 index 0000000000000000000000000000000000000000..e392a2b7fdab66662f5a32885cbe865d6c538ebe --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cffi/verifier.py @@ -0,0 +1,306 @@ +# +# DEPRECATED: implementation for ffi.verify() +# +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform +from .error import VerificationError + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join(['%d.%d' % sys.version_info[:2], + __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + dirname = os.path.dirname(filename) + if dirname and not os.path.isdir(dirname): + os.makedirs(dirname) diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/PKG-INFO b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/PKG-INFO new file mode 100644 index 0000000000000000000000000000000000000000..8afa884f5b803b5971baa04662fbbbd48318ac71 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/PKG-INFO @@ -0,0 +1,32 @@ +Metadata-Version: 2.1 +Name: conda +Version: 4.14.0 +Summary: OS-agnostic, system-level binary package manager. +Home-page: https://github.com/conda/conda +Author: Anaconda, Inc. +Author-email: conda@continuum.io +License: BSD-3-Clause +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.7 +License-File: LICENSE.txt +License-File: AUTHORS.md + + +.. image:: https://s3.amazonaws.com/conda-dev/conda_logo.svg + :alt: Conda Logo + +Conda is a cross-platform, language-agnostic binary package manager. It is the +package manager used by `Anaconda +`_ installations, but it may be +used for other systems as well. Conda makes environments first-class +citizens, making it easy to create independent environments even for C +libraries. Conda is written entirely in Python, and is BSD licensed open +source. + diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/SOURCES.txt b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/SOURCES.txt new file mode 100644 index 0000000000000000000000000000000000000000..1bcdfa6942f6cebd06bacd04e539d9298c7a1106 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/SOURCES.txt @@ -0,0 +1,240 @@ +AUTHORS.md +CHANGELOG.md +LICENSE.txt +MANIFEST.in +README.md +pyproject.toml +setup.cfg +setup.py +conda/__init__.py +conda/__main__.py +conda/activate.py +conda/api.py +conda/exceptions.py +conda/exports.py +conda/history.py +conda/instructions.py +conda/lock.py +conda/misc.py +conda/plan.py +conda/resolve.py +conda/utils.py +conda.egg-info/PKG-INFO +conda.egg-info/SOURCES.txt +conda.egg-info/dependency_links.txt +conda.egg-info/entry_points.txt +conda.egg-info/not-zip-safe +conda.egg-info/requires.txt +conda.egg-info/top_level.txt +conda/../conda/shell/cli-32.exe +conda/../conda/shell/cli-64.exe +conda/../conda/shell/conda.xsh +conda/../conda/shell/conda_icon.ico +conda/../conda/shell/Library/bin/conda.bat +conda/../conda/shell/Scripts/activate.bat +conda/../conda/shell/bin/activate +conda/../conda/shell/bin/conda +conda/../conda/shell/bin/deactivate +conda/../conda/shell/condabin/Conda.psm1 +conda/../conda/shell/condabin/_conda_activate.bat +conda/../conda/shell/condabin/activate.bat +conda/../conda/shell/condabin/conda-hook.ps1 +conda/../conda/shell/condabin/conda.bat +conda/../conda/shell/condabin/conda_auto_activate.bat +conda/../conda/shell/condabin/conda_hook.bat +conda/../conda/shell/condabin/deactivate.bat +conda/../conda/shell/condabin/rename_tmp.bat +conda/../conda/shell/etc/fish/conf.d/conda.fish +conda/../conda/shell/etc/profile.d/conda.csh +conda/../conda/shell/etc/profile.d/conda.sh +conda/_vendor/__init__.py +conda/_vendor/appdirs.py +conda/_vendor/distro.py +conda/_vendor/six.py +conda/_vendor/boltons/LICENSE +conda/_vendor/boltons/__init__.py +conda/_vendor/boltons/setutils.py +conda/_vendor/boltons/timeutils.py +conda/_vendor/cpuinfo/__init__.py +conda/_vendor/cpuinfo/cpuinfo.py +conda/_vendor/frozendict/__init__.py +conda/_vendor/toolz/__init__.py +conda/_vendor/toolz/compatibility.py +conda/_vendor/toolz/dicttoolz.py +conda/_vendor/toolz/itertoolz.py +conda/_vendor/toolz/recipes.py +conda/_vendor/toolz/utils.py +conda/_vendor/tqdm/__init__.py +conda/_vendor/tqdm/__main__.py +conda/_vendor/tqdm/_main.py +conda/_vendor/tqdm/_monitor.py +conda/_vendor/tqdm/_tqdm.py +conda/_vendor/tqdm/_utils.py +conda/_vendor/tqdm/asyncio.py +conda/_vendor/tqdm/auto.py +conda/_vendor/tqdm/cli.py +conda/_vendor/tqdm/std.py +conda/_vendor/tqdm/utils.py +conda/_vendor/tqdm/version.py +conda/auxlib/LICENSE +conda/auxlib/__init__.py +conda/auxlib/collection.py +conda/auxlib/compat.py +conda/auxlib/decorators.py +conda/auxlib/entity.py +conda/auxlib/exceptions.py +conda/auxlib/ish.py +conda/auxlib/logz.py +conda/auxlib/packaging.py +conda/auxlib/type_coercion.py +conda/base/__init__.py +conda/base/constants.py +conda/base/context.py +conda/base/exceptions.py +conda/cli/__init__.py +conda/cli/common.py +conda/cli/conda_argparse.py +conda/cli/find_commands.py +conda/cli/install.py +conda/cli/main.py +conda/cli/main_clean.py +conda/cli/main_compare.py +conda/cli/main_config.py +conda/cli/main_create.py +conda/cli/main_info.py +conda/cli/main_init.py +conda/cli/main_install.py +conda/cli/main_list.py +conda/cli/main_notices.py +conda/cli/main_package.py +conda/cli/main_pip.py +conda/cli/main_remove.py +conda/cli/main_rename.py +conda/cli/main_run.py +conda/cli/main_search.py +conda/cli/main_update.py +conda/cli/python_api.py +conda/common/__init__.py +conda/common/_logic.py +conda/common/compat.py +conda/common/configuration.py +conda/common/constants.py +conda/common/cuda.py +conda/common/decorators.py +conda/common/disk.py +conda/common/io.py +conda/common/logic.py +conda/common/path.py +conda/common/serialize.py +conda/common/signals.py +conda/common/toposort.py +conda/common/url.py +conda/common/_os/__init__.py +conda/common/_os/linux.py +conda/common/_os/unix.py +conda/common/_os/windows.py +conda/common/pkg_formats/__init__.py +conda/common/pkg_formats/python.py +conda/core/__init__.py +conda/core/envs_manager.py +conda/core/index.py +conda/core/initialize.py +conda/core/link.py +conda/core/package_cache.py +conda/core/package_cache_data.py +conda/core/path_actions.py +conda/core/portability.py +conda/core/prefix_data.py +conda/core/solve.py +conda/core/subdir_data.py +conda/gateways/__init__.py +conda/gateways/anaconda_client.py +conda/gateways/logging.py +conda/gateways/subprocess.py +conda/gateways/connection/__init__.py +conda/gateways/connection/download.py +conda/gateways/connection/session.py +conda/gateways/connection/adapters/__init__.py +conda/gateways/connection/adapters/ftp.py +conda/gateways/connection/adapters/localfs.py +conda/gateways/connection/adapters/s3.py +conda/gateways/disk/__init__.py +conda/gateways/disk/create.py +conda/gateways/disk/delete.py +conda/gateways/disk/link.py +conda/gateways/disk/permissions.py +conda/gateways/disk/read.py +conda/gateways/disk/test.py +conda/gateways/disk/update.py +conda/models/__init__.py +conda/models/channel.py +conda/models/dist.py +conda/models/enums.py +conda/models/leased_path_entry.py +conda/models/match_spec.py +conda/models/package_info.py +conda/models/prefix_graph.py +conda/models/records.py +conda/models/version.py +conda/notices/__init__.py +conda/notices/cache.py +conda/notices/core.py +conda/notices/http.py +conda/notices/types.py +conda/notices/views.py +conda/testing/__init__.py +conda/testing/cases.py +conda/testing/decorators.py +conda/testing/fixtures.py +conda/testing/helpers.py +conda/testing/integration.py +conda/testing/solver_helpers.py +conda/testing/gateways/__init__.py +conda/testing/gateways/fixtures.py +conda/testing/notices/__init__.py +conda/testing/notices/fixtures.py +conda/testing/notices/helpers.py +conda_env/__init__.py +conda_env/__main__.py +conda_env/env.py +conda_env/pip_util.py +conda_env/../conda/shell/cli-32.exe +conda_env/../conda/shell/cli-64.exe +conda_env/../conda/shell/conda.xsh +conda_env/../conda/shell/conda_icon.ico +conda_env/../conda/shell/Library/bin/conda.bat +conda_env/../conda/shell/Scripts/activate.bat +conda_env/../conda/shell/bin/activate +conda_env/../conda/shell/bin/conda +conda_env/../conda/shell/bin/deactivate +conda_env/../conda/shell/condabin/Conda.psm1 +conda_env/../conda/shell/condabin/_conda_activate.bat +conda_env/../conda/shell/condabin/activate.bat +conda_env/../conda/shell/condabin/conda-hook.ps1 +conda_env/../conda/shell/condabin/conda.bat +conda_env/../conda/shell/condabin/conda_auto_activate.bat +conda_env/../conda/shell/condabin/conda_hook.bat +conda_env/../conda/shell/condabin/deactivate.bat +conda_env/../conda/shell/condabin/rename_tmp.bat +conda_env/../conda/shell/etc/fish/conf.d/conda.fish +conda_env/../conda/shell/etc/profile.d/conda.csh +conda_env/../conda/shell/etc/profile.d/conda.sh +conda_env/cli/__init__.py +conda_env/cli/common.py +conda_env/cli/main.py +conda_env/cli/main_config.py +conda_env/cli/main_create.py +conda_env/cli/main_export.py +conda_env/cli/main_list.py +conda_env/cli/main_remove.py +conda_env/cli/main_update.py +conda_env/cli/main_vars.py +conda_env/installers/__init__.py +conda_env/installers/base.py +conda_env/installers/conda.py +conda_env/installers/pip.py +conda_env/specs/__init__.py +conda_env/specs/binstar.py +conda_env/specs/notebook.py +conda_env/specs/requirements.py +conda_env/specs/yaml_file.py \ No newline at end of file diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/dependency_links.txt b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/dependency_links.txt new file mode 100644 index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/entry_points.txt b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..6d7da3a6e231bb52990a75097606ab894b85d227 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +conda = conda.cli.main_pip:main diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/not-zip-safe b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/not-zip-safe new file mode 100644 index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/requires.txt b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/requires.txt new file mode 100644 index 0000000000000000000000000000000000000000..4a493e171a7f8cf2379071d0ad6c291b8a168ac5 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/requires.txt @@ -0,0 +1,6 @@ +pycosat>=0.6.3 +requests>=2.20.1 +ruamel_yaml_conda>=0.11.14 + +[:platform_system == "Windows"] +menuinst diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/top_level.txt b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..d956fbede620347e4cf280da4f052de49a08f305 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/conda-4.14.0-py3.8.egg-info/top_level.txt @@ -0,0 +1,2 @@ +conda +conda_env diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib-3.5.2-py3.8-nspkg.pth b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib-3.5.2-py3.8-nspkg.pth new file mode 100644 index 0000000000000000000000000000000000000000..d701079367221560672e910b45f0903eb9baa558 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib-3.5.2-py3.8-nspkg.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1603bfdee834ef511710a4fc9a038f054872aecc0fb4f0988cea540b26aeed45 +size 569 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/LICENSE.txt b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..a274a66aa1bbd483ea7dcc135e582e9057778398 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/LICENSE.txt @@ -0,0 +1,37 @@ +NetworkX is distributed with the 3-clause BSD license. + +:: + + Copyright (C) 2004-2022, NetworkX Developers + Aric Hagberg + Dan Schult + Pieter Swart + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the NetworkX Developers nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/METADATA b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..dc385d7df4a5a1552c29fb550b3b3527752c0be3 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/METADATA @@ -0,0 +1,131 @@ +Metadata-Version: 2.1 +Name: networkx +Version: 2.8.5 +Summary: Python package for creating and manipulating graphs and networks +Home-page: https://networkx.org/ +Author: Aric Hagberg +Author-email: hagberg@lanl.gov +Maintainer: NetworkX Developers +Maintainer-email: networkx-discuss@googlegroups.com +Project-URL: Bug Tracker, https://github.com/networkx/networkx/issues +Project-URL: Documentation, https://networkx.org/documentation/stable/ +Project-URL: Source Code, https://github.com/networkx/networkx +Keywords: Networks,Graph Theory,Mathematics,network,graph,discrete mathematics,math +Platform: Linux +Platform: Mac OSX +Platform: Windows +Platform: Unix +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Scientific/Engineering :: Bio-Informatics +Classifier: Topic :: Scientific/Engineering :: Information Analysis +Classifier: Topic :: Scientific/Engineering :: Mathematics +Classifier: Topic :: Scientific/Engineering :: Physics +Requires-Python: >=3.8 +License-File: LICENSE.txt +Provides-Extra: default +Requires-Dist: numpy (>=1.19) ; extra == 'default' +Requires-Dist: scipy (>=1.8) ; extra == 'default' +Requires-Dist: matplotlib (>=3.4) ; extra == 'default' +Requires-Dist: pandas (>=1.3) ; extra == 'default' +Provides-Extra: developer +Requires-Dist: pre-commit (>=2.19) ; extra == 'developer' +Requires-Dist: mypy (>=0.960) ; extra == 'developer' +Provides-Extra: doc +Requires-Dist: sphinx (>=5) ; extra == 'doc' +Requires-Dist: pydata-sphinx-theme (>=0.9) ; extra == 'doc' +Requires-Dist: sphinx-gallery (>=0.10) ; extra == 'doc' +Requires-Dist: numpydoc (>=1.4) ; extra == 'doc' +Requires-Dist: pillow (>=9.1) ; extra == 'doc' +Requires-Dist: nb2plots (>=0.6) ; extra == 'doc' +Requires-Dist: texext (>=0.6.6) ; extra == 'doc' +Provides-Extra: extra +Requires-Dist: lxml (>=4.6) ; extra == 'extra' +Requires-Dist: pygraphviz (>=1.9) ; extra == 'extra' +Requires-Dist: pydot (>=1.4.2) ; extra == 'extra' +Requires-Dist: sympy (>=1.10) ; extra == 'extra' +Provides-Extra: test +Requires-Dist: pytest (>=7.1) ; extra == 'test' +Requires-Dist: pytest-cov (>=3.0) ; extra == 'test' +Requires-Dist: codecov (>=2.1) ; extra == 'test' + +NetworkX +======== + +.. image:: https://github.com/networkx/networkx/workflows/test/badge.svg?tag=networkx-2.8.5 + :target: https://github.com/networkx/networkx/actions?query=branch%3Anetworkx-2.8.5 + +.. image:: https://codecov.io/gh/networkx/networkx/branch/main/graph/badge.svg + :target: https://app.codecov.io/gh/networkx/networkx/branch/main + +.. image:: https://img.shields.io/github/labels/networkx/networkx/Good%20First%20Issue?color=green&label=Contribute%20&style=flat-square + :target: https://github.com/networkx/networkx/issues?q=is%3Aopen+is%3Aissue+label%3A%22Good+First+Issue%22 + + +NetworkX is a Python package for the creation, manipulation, +and study of the structure, dynamics, and functions +of complex networks. + +- **Website (including documentation):** https://networkx.org +- **Mailing list:** https://groups.google.com/forum/#!forum/networkx-discuss +- **Source:** https://github.com/networkx/networkx +- **Bug reports:** https://github.com/networkx/networkx/issues +- **Tutorial:** https://networkx.org/documentation/latest/tutorial.html +- **GitHub Discussions:** https://github.com/networkx/networkx/discussions + +Simple example +-------------- + +Find the shortest path between two nodes in an undirected graph: + +.. code:: pycon + + >>> import networkx as nx + >>> G = nx.Graph() + >>> G.add_edge("A", "B", weight=4) + >>> G.add_edge("B", "D", weight=2) + >>> G.add_edge("A", "C", weight=3) + >>> G.add_edge("C", "D", weight=4) + >>> nx.shortest_path(G, "A", "D", weight="weight") + ['A', 'B', 'D'] + +Install +------- + +Install the latest version of NetworkX:: + + $ pip install networkx + +Install with all optional dependencies:: + + $ pip install networkx[all] + +For additional details, please see `INSTALL.rst`. + +Bugs +---- + +Please report any bugs that you find `here `_. +Or, even better, fork the repository on `GitHub `_ +and create a pull request (PR). We welcome all changes, big or small, and we +will help you make the PR if you are new to `git` (just ask on the issue and/or +see `CONTRIBUTING.rst`). + +License +------- + +Released under the 3-Clause BSD license (see `LICENSE.txt`):: + + Copyright (C) 2004-2022 NetworkX Developers + Aric Hagberg + Dan Schult + Pieter Swart diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/RECORD b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..d106f6898d78c1a6c0a3320945ebc0ff0788aa7c --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/RECORD @@ -0,0 +1,1258 @@ +../../../share/doc/networkx-2.8.5/LICENSE.txt,sha256=qun4CfsaFZLtcun1LfEM-tj-ImuNTSg4X5DdQ0HOcRk,1763 +../../../share/doc/networkx-2.8.5/examples/3d_drawing/README.txt,sha256=s5-t1C9VR7xuGe6I6LoAHyLZypgxt6nacxqhlFV_cq0,22 +../../../share/doc/networkx-2.8.5/examples/3d_drawing/__pycache__/mayavi2_spring.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/3d_drawing/__pycache__/plot_basic.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/3d_drawing/mayavi2_spring.py,sha256=m3CEGHEYxTwuWe4jObonMM7ZS62hdEBvas-SW1UCvrk,934 +../../../share/doc/networkx-2.8.5/examples/3d_drawing/plot_basic.py,sha256=PWG-R4COK5xRUpO9fzuYp7jIRkPqPINe9ThiSas1PA8,1149 +../../../share/doc/networkx-2.8.5/examples/README.txt,sha256=4fcFf8kOy3-lR9Mt5JabLTrR5CJU-TwR0qykp4WJaPs,185 +../../../share/doc/networkx-2.8.5/examples/algorithms/README.txt,sha256=xn-_KUQ8ego4sNw2nrr4axL38uzGmSgO1jiK3kC0_X4,22 +../../../share/doc/networkx-2.8.5/examples/algorithms/WormNet.v3.benchmark.txt,sha256=UvbM0_uQawr_W5rjxhICvH_W8n01FBiX8T-le19ufr8,1346746 +../../../share/doc/networkx-2.8.5/examples/algorithms/__pycache__/plot_beam_search.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/algorithms/__pycache__/plot_betweenness_centrality.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/algorithms/__pycache__/plot_blockmodel.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/algorithms/__pycache__/plot_circuits.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/algorithms/__pycache__/plot_davis_club.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/algorithms/__pycache__/plot_dedensification.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/algorithms/__pycache__/plot_iterated_dynamical_systems.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/algorithms/__pycache__/plot_krackhardt_centrality.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/algorithms/__pycache__/plot_parallel_betweenness.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/algorithms/__pycache__/plot_rcm.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/algorithms/__pycache__/plot_snap.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/algorithms/__pycache__/plot_subgraphs.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/algorithms/hartford_drug.edgelist,sha256=Nwzo8P1bWNq1e_JEodTLFgUCx80bpkg4PhtSd3G8aZk,2335 +../../../share/doc/networkx-2.8.5/examples/algorithms/plot_beam_search.py,sha256=SmksU_kiGquXokBB6midxx9IDFSph9SfZK9FKH4Z6O4,4119 +../../../share/doc/networkx-2.8.5/examples/algorithms/plot_betweenness_centrality.py,sha256=y4AnvUdMdUe39X-gGIGRohIHPwN4oSaf6Y2te7E5OsY,2122 +../../../share/doc/networkx-2.8.5/examples/algorithms/plot_blockmodel.py,sha256=wvdheWrt-KcX2e_zMARdPwOJHlDLKM8H8aCVkIx2gbM,2679 +../../../share/doc/networkx-2.8.5/examples/algorithms/plot_circuits.py,sha256=f56Ct7i_s5ImJ71OMAg8BPqcFYQygdEQnTzrAgl7fW8,3496 +../../../share/doc/networkx-2.8.5/examples/algorithms/plot_davis_club.py,sha256=YrUUnaWm82MEE4qoGeCfmUWmHqV7vpDBckDSmFn5T8U,1201 +../../../share/doc/networkx-2.8.5/examples/algorithms/plot_dedensification.py,sha256=BdXLYidxo-5swuseC8viiIQwNB-lqshQ6EdLU_vQsP4,2250 +../../../share/doc/networkx-2.8.5/examples/algorithms/plot_iterated_dynamical_systems.py,sha256=AXlEyF2UPtHosV8rZcmL_KjWOMOoDV-2dm7YzOT54Ro,5996 +../../../share/doc/networkx-2.8.5/examples/algorithms/plot_krackhardt_centrality.py,sha256=Ff4MdZyWKlnQ0HZuPinRrlAQlBtW_tOIeXZXn5CBY4w,637 +../../../share/doc/networkx-2.8.5/examples/algorithms/plot_parallel_betweenness.py,sha256=AVQaNwga0QPUrr3qr3g2hOupyKsf-jp08Dvw4gLz7M0,2453 +../../../share/doc/networkx-2.8.5/examples/algorithms/plot_rcm.py,sha256=nu4JylMqrESBo36J9O6dayk5cQjQ147-UISNzvK6r6E,1039 +../../../share/doc/networkx-2.8.5/examples/algorithms/plot_snap.py,sha256=02GOuZxjHHo0h8eO8ErhktIFFw4FFnOx8CVkt8uIaTI,3088 +../../../share/doc/networkx-2.8.5/examples/algorithms/plot_subgraphs.py,sha256=F-3_AjHvqZidxoJfyMIkfmZHkAEoroQrJM9GW7tJeSg,6474 +../../../share/doc/networkx-2.8.5/examples/basic/README.txt,sha256=SZoFiP7cQAXsOc0zCWRT7NsGznY1aiAk-0zD8dXsaPY,12 +../../../share/doc/networkx-2.8.5/examples/basic/__pycache__/plot_properties.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/basic/__pycache__/plot_read_write.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/basic/__pycache__/plot_simple_graph.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/basic/plot_properties.py,sha256=NIoZX3qYpbb1R_mfgrYnldFwe1qOWYQgjfZZGhUHovk,1065 +../../../share/doc/networkx-2.8.5/examples/basic/plot_read_write.py,sha256=AN5KVYbINladQ3IEODapTqT-4hRCteonClbhUEvbUYM,525 +../../../share/doc/networkx-2.8.5/examples/basic/plot_simple_graph.py,sha256=1QGvIhSGRNJAi4lLzpE_W1q4OMzn0DpdCJpbEZgqeUw,1240 +../../../share/doc/networkx-2.8.5/examples/drawing/README.txt,sha256=DoWMcDCC_TTjREqBqc_YMatITP1_2lHp7Rv5IxrM2bA,16 +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_chess_masters.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_custom_node_icons.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_degree.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_directed.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_edge_colormap.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_ego_graph.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_eigenvalues.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_four_grids.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_house_with_colors.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_knuth_miles.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_labels_and_colors.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_multipartite_graph.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_node_colormap.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_rainbow_coloring.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_random_geometric_graph.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_sampson.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_selfloops.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_simple_path.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_spectral_grid.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_tsp.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_unix_email.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/__pycache__/plot_weighted_graph.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/drawing/chess_masters_WCC.pgn.bz2,sha256=2e-170bYxxtpmUW4mrs4UEnsF89vIfJWJYOk8gYYGew,100224 +../../../share/doc/networkx-2.8.5/examples/drawing/knuth_miles.txt.gz,sha256=e-sV93FBC11m7Rdn-4K_uGoPTNgt3dtkMuyvQ0ovTZU,20317 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_chess_masters.py,sha256=7OZXxlTA8TKyyis7L6yJuqZYNSmzdszmRVNln1BSgrg,4583 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_custom_node_icons.py,sha256=2y_68MamGfIQhpM3YFHSxYqMqzLl4CnhzaqKDKFs4jI,2139 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_degree.py,sha256=US7sGbPQsyVccE3URvirRAHM9t6W_QfWgmkGin2O8Bc,1556 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_directed.py,sha256=Wyzt-aq3seUJVVmiEgPEzXP7YWatFTKDHNc3LvVtwwQ,1108 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_edge_colormap.py,sha256=3cTm9jX3vh635lGtLMmjwR2FJHlxa87WVK0GoU80zxA,441 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_ego_graph.py,sha256=E_LRplVi2knRK9hDuO1z2DvIfvV3uTMp0t4G2kVuDa0,910 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_eigenvalues.py,sha256=cu1VnGCbPgotHgqMzoixOdGNKzZ0rIVn9WjKKweuRhw,544 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_four_grids.py,sha256=P6LJaQLA-9_78LVyHkxdRHlLHj6s-vL21NkOiINjsxk,1054 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_house_with_colors.py,sha256=Bs1z9SqsQ5DMV2a8X8rXU6qpSyUyqvnH-O7y5nBsOks,665 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_knuth_miles.py,sha256=2i4voltKotkkbn_jiiHYaCxAkbJEliMpkQivVL8aKw4,4111 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_labels_and_colors.py,sha256=GtcCrvzGiuyRdeN2_gCH6WCpvtUgUGU2EXqXtMVOkRE,1243 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_multipartite_graph.py,sha256=0X0-mrxVEJD51i16mc5mLTQL0z6ix82NdYQs2ha_ZWs,995 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_node_colormap.py,sha256=Lgc9GuyL9F1sBtcA5pUMnrjdIbPoO3EESyAjLn4e5xg,288 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_rainbow_coloring.py,sha256=pjlo110knte-vQhfryAIQXTlQHBgL8jQBdij9KNZApk,2172 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_random_geometric_graph.py,sha256=HVfm2eKbzs5NcV0p0DxYJmpQ0V_z2ayykrWG3F1vftM,938 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_sampson.py,sha256=FYwBC6iCL6chHnnKCRgQk1WwkL2CcEBs9oq1RWBBCgA,1228 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_selfloops.py,sha256=x-1Ma-TlydzoU7VWahWynKMFACn1OcuR_Uwzk6LaZog,753 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_simple_path.py,sha256=QA26fUWDPmJT_4s1Mo-wG9LWKTyCXPIe-0mRJYXAW5w,252 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_spectral_grid.py,sha256=JhYZQcyrsKHvJfQR_9X4pBaKS7kaM0qvGFzbunqQzq0,1592 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_tsp.py,sha256=g-0NDgIPXQANaqfS3GufbvAoiaS0Y4s6O4TWSjQk5Vc,1301 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_unix_email.py,sha256=tZJBr60SvQSYf1OcrHGi6Oxwmev_skQzUsLUwuGb8wM,1968 +../../../share/doc/networkx-2.8.5/examples/drawing/plot_weighted_graph.py,sha256=HqH3lkhg62XD6ytrhi-nhBCiB6i0tcBv30rwxcQfIqw,1124 +../../../share/doc/networkx-2.8.5/examples/drawing/unix_email.mbox,sha256=i20mxjWonqmAbgbr1qBNIG9BTb3qQt0WQ7YLfHUwu9U,1709 +../../../share/doc/networkx-2.8.5/examples/graph/README.txt,sha256=etRvcBQG30Fb6dIiQZaYHbnl1a6Tz_peks4MqXKwRhg,12 +../../../share/doc/networkx-2.8.5/examples/graph/__pycache__/plot_degree_sequence.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/graph/__pycache__/plot_erdos_renyi.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/graph/__pycache__/plot_expected_degree_sequence.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/graph/__pycache__/plot_football.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/graph/__pycache__/plot_karate_club.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/graph/__pycache__/plot_morse_trie.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/graph/__pycache__/plot_napoleon_russian_campaign.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/graph/__pycache__/plot_roget.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/graph/__pycache__/plot_words.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/graph/plot_degree_sequence.py,sha256=TyzJyRmLHsaXEJ8IivYF416SggsM8lZE-zYxEdkngfo,799 +../../../share/doc/networkx-2.8.5/examples/graph/plot_erdos_renyi.py,sha256=lvwPtraq5q-vuuOyrlZFYABXfbfM47dR89S2k1h9GZ8,841 +../../../share/doc/networkx-2.8.5/examples/graph/plot_expected_degree_sequence.py,sha256=DWIyy0S3eBZksKAdk9dL9unIa1Cfw0rQiTcm8if0Qak,496 +../../../share/doc/networkx-2.8.5/examples/graph/plot_football.py,sha256=Dj0-gyMVUw80vyMIOqA5sFBzddL8t1R7BzIFoD1S1o8,1171 +../../../share/doc/networkx-2.8.5/examples/graph/plot_karate_club.py,sha256=6H2R2SRxZ6ZpCtIC7CUl0fhgdJuorZpo4NOok7ThFYk,494 +../../../share/doc/networkx-2.8.5/examples/graph/plot_morse_trie.py,sha256=w8sFNW1XaVm6Q4XBahxl2IArm3fVyUwTHQM2ZA-AjbU,2965 +../../../share/doc/networkx-2.8.5/examples/graph/plot_napoleon_russian_campaign.py,sha256=WS4ETV1ESBi8tCcPp6pe0uXvFiFht_xNx-hgK1TcTn0,2901 +../../../share/doc/networkx-2.8.5/examples/graph/plot_roget.py,sha256=jHuZLofFw7gTwxzMiNtwewnn3nh4SMeW8SvgabnSnx0,2126 +../../../share/doc/networkx-2.8.5/examples/graph/plot_words.py,sha256=Q-4VlEdCya3ik8kLgp2H4vIk5GP3hmWkH6Y9cLw0LQM,2683 +../../../share/doc/networkx-2.8.5/examples/graph/roget_dat.txt.gz,sha256=XhxtczVtfkcdZxE4P7KWs9ENRy3mvub739cXKdkgBk0,15758 +../../../share/doc/networkx-2.8.5/examples/graph/words_dat.txt.gz,sha256=nuZl5rQHvOTxrOvZjbLg0tIHXvbYGSIB5bwxuXZXcQA,33695 +../../../share/doc/networkx-2.8.5/examples/subclass/README.txt,sha256=9DLXetYO600sLeOY_fFc8Msnli8-qQrVbGB2f5gP20g,18 +../../../share/doc/networkx-2.8.5/examples/subclass/__pycache__/plot_antigraph.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/subclass/__pycache__/plot_printgraph.cpython-38.pyc,, +../../../share/doc/networkx-2.8.5/examples/subclass/plot_antigraph.py,sha256=yImsNH9uoXJLAe8v37cDr0bR9DtjvQTESXozFEse12w,6023 +../../../share/doc/networkx-2.8.5/examples/subclass/plot_printgraph.py,sha256=-IZlxW9m8nRfqS16AAhWP5u_YGPKq005MjL0diyBcc8,2292 +networkx-2.8.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +networkx-2.8.5.dist-info/LICENSE.txt,sha256=qun4CfsaFZLtcun1LfEM-tj-ImuNTSg4X5DdQ0HOcRk,1763 +networkx-2.8.5.dist-info/METADATA,sha256=3ax0xIRcItYCi7YqISxtFJWZuW1p_3v_SIsDy1y2EBk,5010 +networkx-2.8.5.dist-info/RECORD,, +networkx-2.8.5.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx-2.8.5.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +networkx-2.8.5.dist-info/top_level.txt,sha256=s3Mk-7KOlu-kD39w8Xg_KXoP5Z_MVvgB-upkyuOE4Hk,9 +networkx/__init__.py,sha256=MovMyewNUEF1O6pE02yVCvVjVdZntb0rksfnBC1tgiY,2939 +networkx/__pycache__/__init__.cpython-38.pyc,, +networkx/__pycache__/conftest.cpython-38.pyc,, +networkx/__pycache__/convert.cpython-38.pyc,, +networkx/__pycache__/convert_matrix.cpython-38.pyc,, +networkx/__pycache__/exception.cpython-38.pyc,, +networkx/__pycache__/lazy_imports.cpython-38.pyc,, +networkx/__pycache__/relabel.cpython-38.pyc,, +networkx/algorithms/__init__.py,sha256=eJ_V2reAkgQo0cvUYnADoZt5iTe0fhe9he2zrA6pGEs,6364 +networkx/algorithms/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/__pycache__/asteroidal.cpython-38.pyc,, +networkx/algorithms/__pycache__/boundary.cpython-38.pyc,, +networkx/algorithms/__pycache__/bridges.cpython-38.pyc,, +networkx/algorithms/__pycache__/chains.cpython-38.pyc,, +networkx/algorithms/__pycache__/chordal.cpython-38.pyc,, +networkx/algorithms/__pycache__/clique.cpython-38.pyc,, +networkx/algorithms/__pycache__/cluster.cpython-38.pyc,, +networkx/algorithms/__pycache__/communicability_alg.cpython-38.pyc,, +networkx/algorithms/__pycache__/core.cpython-38.pyc,, +networkx/algorithms/__pycache__/covering.cpython-38.pyc,, +networkx/algorithms/__pycache__/cuts.cpython-38.pyc,, +networkx/algorithms/__pycache__/cycles.cpython-38.pyc,, +networkx/algorithms/__pycache__/d_separation.cpython-38.pyc,, +networkx/algorithms/__pycache__/dag.cpython-38.pyc,, +networkx/algorithms/__pycache__/distance_measures.cpython-38.pyc,, +networkx/algorithms/__pycache__/distance_regular.cpython-38.pyc,, +networkx/algorithms/__pycache__/dominance.cpython-38.pyc,, +networkx/algorithms/__pycache__/dominating.cpython-38.pyc,, +networkx/algorithms/__pycache__/efficiency_measures.cpython-38.pyc,, +networkx/algorithms/__pycache__/euler.cpython-38.pyc,, +networkx/algorithms/__pycache__/graph_hashing.cpython-38.pyc,, +networkx/algorithms/__pycache__/graphical.cpython-38.pyc,, +networkx/algorithms/__pycache__/hierarchy.cpython-38.pyc,, +networkx/algorithms/__pycache__/hybrid.cpython-38.pyc,, +networkx/algorithms/__pycache__/isolate.cpython-38.pyc,, +networkx/algorithms/__pycache__/link_prediction.cpython-38.pyc,, +networkx/algorithms/__pycache__/lowest_common_ancestors.cpython-38.pyc,, +networkx/algorithms/__pycache__/matching.cpython-38.pyc,, +networkx/algorithms/__pycache__/mis.cpython-38.pyc,, +networkx/algorithms/__pycache__/moral.cpython-38.pyc,, +networkx/algorithms/__pycache__/non_randomness.cpython-38.pyc,, +networkx/algorithms/__pycache__/planar_drawing.cpython-38.pyc,, +networkx/algorithms/__pycache__/planarity.cpython-38.pyc,, +networkx/algorithms/__pycache__/polynomials.cpython-38.pyc,, +networkx/algorithms/__pycache__/reciprocity.cpython-38.pyc,, +networkx/algorithms/__pycache__/regular.cpython-38.pyc,, +networkx/algorithms/__pycache__/richclub.cpython-38.pyc,, +networkx/algorithms/__pycache__/similarity.cpython-38.pyc,, +networkx/algorithms/__pycache__/simple_paths.cpython-38.pyc,, +networkx/algorithms/__pycache__/smallworld.cpython-38.pyc,, +networkx/algorithms/__pycache__/smetric.cpython-38.pyc,, +networkx/algorithms/__pycache__/sparsifiers.cpython-38.pyc,, +networkx/algorithms/__pycache__/structuralholes.cpython-38.pyc,, +networkx/algorithms/__pycache__/summarization.cpython-38.pyc,, +networkx/algorithms/__pycache__/swap.cpython-38.pyc,, +networkx/algorithms/__pycache__/threshold.cpython-38.pyc,, +networkx/algorithms/__pycache__/tournament.cpython-38.pyc,, +networkx/algorithms/__pycache__/triads.cpython-38.pyc,, +networkx/algorithms/__pycache__/vitality.cpython-38.pyc,, +networkx/algorithms/__pycache__/voronoi.cpython-38.pyc,, +networkx/algorithms/__pycache__/wiener.cpython-38.pyc,, +networkx/algorithms/approximation/__init__.py,sha256=hwi6EOHU1OJEDOxYr8USLexbUOubH76aiU9P4WRDZrw,1197 +networkx/algorithms/approximation/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/approximation/__pycache__/clique.cpython-38.pyc,, +networkx/algorithms/approximation/__pycache__/clustering_coefficient.cpython-38.pyc,, +networkx/algorithms/approximation/__pycache__/connectivity.cpython-38.pyc,, +networkx/algorithms/approximation/__pycache__/distance_measures.cpython-38.pyc,, +networkx/algorithms/approximation/__pycache__/dominating_set.cpython-38.pyc,, +networkx/algorithms/approximation/__pycache__/kcomponents.cpython-38.pyc,, +networkx/algorithms/approximation/__pycache__/matching.cpython-38.pyc,, +networkx/algorithms/approximation/__pycache__/maxcut.cpython-38.pyc,, +networkx/algorithms/approximation/__pycache__/ramsey.cpython-38.pyc,, +networkx/algorithms/approximation/__pycache__/steinertree.cpython-38.pyc,, +networkx/algorithms/approximation/__pycache__/traveling_salesman.cpython-38.pyc,, +networkx/algorithms/approximation/__pycache__/treewidth.cpython-38.pyc,, +networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-38.pyc,, +networkx/algorithms/approximation/clique.py,sha256=t-G-wOQ3SFcCOWSrmH8pKmPhtnp2UouXM4HctPshVNc,7181 +networkx/algorithms/approximation/clustering_coefficient.py,sha256=mfgwpJN1Jk9da7KLBOMNqXiU187r3WoUQLxiTwlI9gw,2009 +networkx/algorithms/approximation/connectivity.py,sha256=lfjwwTlS_JC--GJBC4Lu65XO50Jz3vpKwhAqTE2lwEs,12716 +networkx/algorithms/approximation/distance_measures.py,sha256=6q6J3VRqxAkqqh_d519mItyjfhdgTqS8xePHo4D3ZWI,5550 +networkx/algorithms/approximation/dominating_set.py,sha256=1WAJnJ8AFyFSNO7TLAsJ0fFIVcIPoYhupRDhcJsDpb0,4143 +networkx/algorithms/approximation/kcomponents.py,sha256=fsRl3wBAe5LTQGftJ71OFIMN4k1XGH9JZlsOUJC03QE,13223 +networkx/algorithms/approximation/matching.py,sha256=iN-ofr7Gs1XkCHSAxc5HkwSrHkMyUY7i7bTPQkIuR-o,1155 +networkx/algorithms/approximation/maxcut.py,sha256=DD1mktczrwD79NR3A0bAegXWSld_47rpQfJtZr09Mdk,3594 +networkx/algorithms/approximation/ramsey.py,sha256=mD3edR4mp8qyjV1oFp6vV3yX0w7LbOuOsj94LmRNDYA,1339 +networkx/algorithms/approximation/steinertree.py,sha256=X0IpQh3972Wl1qubybz3sNQb4BW6LJ1uidb9ZUEQNXU,3376 +networkx/algorithms/approximation/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/approximation/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/approximation/tests/__pycache__/test_approx_clust_coeff.cpython-38.pyc,, +networkx/algorithms/approximation/tests/__pycache__/test_clique.cpython-38.pyc,, +networkx/algorithms/approximation/tests/__pycache__/test_connectivity.cpython-38.pyc,, +networkx/algorithms/approximation/tests/__pycache__/test_distance_measures.cpython-38.pyc,, +networkx/algorithms/approximation/tests/__pycache__/test_dominating_set.cpython-38.pyc,, +networkx/algorithms/approximation/tests/__pycache__/test_kcomponents.cpython-38.pyc,, +networkx/algorithms/approximation/tests/__pycache__/test_matching.cpython-38.pyc,, +networkx/algorithms/approximation/tests/__pycache__/test_maxcut.cpython-38.pyc,, +networkx/algorithms/approximation/tests/__pycache__/test_ramsey.cpython-38.pyc,, +networkx/algorithms/approximation/tests/__pycache__/test_steinertree.cpython-38.pyc,, +networkx/algorithms/approximation/tests/__pycache__/test_traveling_salesman.cpython-38.pyc,, +networkx/algorithms/approximation/tests/__pycache__/test_treewidth.cpython-38.pyc,, +networkx/algorithms/approximation/tests/__pycache__/test_vertex_cover.cpython-38.pyc,, +networkx/algorithms/approximation/tests/test_approx_clust_coeff.py,sha256=PGOVEKf2BcJu1vvjZrgTlBBpwM8V6t7yCANjyS9nWF0,1171 +networkx/algorithms/approximation/tests/test_clique.py,sha256=JZ_ja03aVU7vnZ42Joy1ze0vjdcm_CnDhD96Z4W_Dcc,3022 +networkx/algorithms/approximation/tests/test_connectivity.py,sha256=gDG6tsgP3ux7Dgu0x7r0nso7_yknIxicV42Gq0It5pc,5952 +networkx/algorithms/approximation/tests/test_distance_measures.py,sha256=GSyupA_jqSc_pLPSMnZFNcBgZc8-KFWgt6Q7uFegTqg,2024 +networkx/algorithms/approximation/tests/test_dominating_set.py,sha256=Rtdsu-0KjZMS2Qj4fd3nJgHDHxASDrwS907_TyaHUVw,2296 +networkx/algorithms/approximation/tests/test_kcomponents.py,sha256=MCQ1tNiFQrl0-MutM1N_Q6QHYEWCvDQ6cRM_Y7V3dDw,9213 +networkx/algorithms/approximation/tests/test_matching.py,sha256=nitZncaM0605kaIu1NO6_5TFV2--nohUCO46XTD_lnM,186 +networkx/algorithms/approximation/tests/test_maxcut.py,sha256=R0tx_0mP0vWKX564j4qoiljnG3Mn0XhGAhFRYOZEcHM,2430 +networkx/algorithms/approximation/tests/test_ramsey.py,sha256=2oJFufDcKZNFvRVmt66nLvuHC95c4b8ey-nrANKOluw,1142 +networkx/algorithms/approximation/tests/test_steinertree.py,sha256=g4Wvx6u8HNa8Q5XY-R_KCbY8vcKXMuOz8lC6zAJ7Vv0,3145 +networkx/algorithms/approximation/tests/test_traveling_salesman.py,sha256=LSAquDfYaz_MxM1Tb4uYM_XNCh8_tJINcUSxFOFrJ_U,30699 +networkx/algorithms/approximation/tests/test_treewidth.py,sha256=1AwP3bgaEG4JrrlG0cbjMcxirlKH4vk-weaP2isdDXM,8949 +networkx/algorithms/approximation/tests/test_vertex_cover.py,sha256=FobHNhG9CAMeB_AOEprUs-7XQdPoc1YvfmXhozDZ8pM,1942 +networkx/algorithms/approximation/traveling_salesman.py,sha256=eg2zjSJimcv_pnJrdcObmEFJWN48vC-lMto1v74-VKs,54262 +networkx/algorithms/approximation/treewidth.py,sha256=YekIUcogCMMvy2zR9aQsA866A5nSFr0ySTEzP7gDCc0,8019 +networkx/algorithms/approximation/vertex_cover.py,sha256=GroE2Vc_Ieq4kUpj-pDQqlCTR-0PGqcZ9T33qV77xZ0,2741 +networkx/algorithms/assortativity/__init__.py,sha256=ov3HRRbeYB_6Qezvxp1OTl77GBpw-EWkWGUzgfT8G9c,294 +networkx/algorithms/assortativity/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/assortativity/__pycache__/connectivity.cpython-38.pyc,, +networkx/algorithms/assortativity/__pycache__/correlation.cpython-38.pyc,, +networkx/algorithms/assortativity/__pycache__/mixing.cpython-38.pyc,, +networkx/algorithms/assortativity/__pycache__/neighbor_degree.cpython-38.pyc,, +networkx/algorithms/assortativity/__pycache__/pairs.cpython-38.pyc,, +networkx/algorithms/assortativity/connectivity.py,sha256=eR-PMTFyY8fktn_nUZ2LLblEsoktAMFyGmdN_eXQfVc,4815 +networkx/algorithms/assortativity/correlation.py,sha256=KOOfrFosgyeTOz3zKeqh31o1f1WgssbESCmIWP1v3qc,8529 +networkx/algorithms/assortativity/mixing.py,sha256=VhbJxGlBRa_KrGmxfq9da5mibxnbg5Fl1PODfaWIwPo,9094 +networkx/algorithms/assortativity/neighbor_degree.py,sha256=qe33xcO9SNMNTbb3c9e0q-whYDcVl_O5fi2Pzke2p5s,5243 +networkx/algorithms/assortativity/pairs.py,sha256=hleMyWgHoy8hXV8ZOIqGJ_ChZQ2oVlukYfPvkaT9SQg,3297 +networkx/algorithms/assortativity/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/assortativity/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/assortativity/tests/__pycache__/base_test.cpython-38.pyc,, +networkx/algorithms/assortativity/tests/__pycache__/test_connectivity.cpython-38.pyc,, +networkx/algorithms/assortativity/tests/__pycache__/test_correlation.cpython-38.pyc,, +networkx/algorithms/assortativity/tests/__pycache__/test_mixing.cpython-38.pyc,, +networkx/algorithms/assortativity/tests/__pycache__/test_neighbor_degree.cpython-38.pyc,, +networkx/algorithms/assortativity/tests/__pycache__/test_pairs.cpython-38.pyc,, +networkx/algorithms/assortativity/tests/base_test.py,sha256=DjNszEwQzYDPXvEIHTyVAExE6WoUEWtR2kgz_ATfgWo,2724 +networkx/algorithms/assortativity/tests/test_connectivity.py,sha256=HQGuZTnGab1yWaKLilXwwIg-GgSR7alMcH-0CcLXszE,5092 +networkx/algorithms/assortativity/tests/test_correlation.py,sha256=qS0Nv7rqKEYDrsrGkOfpm49jXi-D7YKs39dwkcdLxCM,4527 +networkx/algorithms/assortativity/tests/test_mixing.py,sha256=8zS_xTryZ7_MPmsbRgcaeiPfpnwzhqH8TbYS4f_pEbI,6903 +networkx/algorithms/assortativity/tests/test_neighbor_degree.py,sha256=AFlcIF0CoTD2F_j5i1AHeOFJzvyEn7Z4ww2axdT0D3E,3706 +networkx/algorithms/assortativity/tests/test_pairs.py,sha256=t05qP_-gfkbiR6aTLtE1owYl9otBSsuJcRkuZsa63UQ,3008 +networkx/algorithms/asteroidal.py,sha256=rdFr50v0T9t5VtNN1z74NQ0COgtstlBR4W1BTPoatC8,5810 +networkx/algorithms/bipartite/__init__.py,sha256=P6prxqUpq0T1xikH3DLNggcGxEEf6gu6z8tcwd3Pbq0,3768 +networkx/algorithms/bipartite/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/bipartite/__pycache__/basic.cpython-38.pyc,, +networkx/algorithms/bipartite/__pycache__/centrality.cpython-38.pyc,, +networkx/algorithms/bipartite/__pycache__/cluster.cpython-38.pyc,, +networkx/algorithms/bipartite/__pycache__/covering.cpython-38.pyc,, +networkx/algorithms/bipartite/__pycache__/edgelist.cpython-38.pyc,, +networkx/algorithms/bipartite/__pycache__/generators.cpython-38.pyc,, +networkx/algorithms/bipartite/__pycache__/matching.cpython-38.pyc,, +networkx/algorithms/bipartite/__pycache__/matrix.cpython-38.pyc,, +networkx/algorithms/bipartite/__pycache__/projection.cpython-38.pyc,, +networkx/algorithms/bipartite/__pycache__/redundancy.cpython-38.pyc,, +networkx/algorithms/bipartite/__pycache__/spectral.cpython-38.pyc,, +networkx/algorithms/bipartite/basic.py,sha256=VY0cmgU2KHSklcvJ8xvZAZLzOk7jan3HZ2dg0TMuf4Y,8220 +networkx/algorithms/bipartite/centrality.py,sha256=cabaDLe_RxmYKd3Aa93PeRdH7TvlKKQsBMbSp9kaRH4,8412 +networkx/algorithms/bipartite/cluster.py,sha256=W0ZfqKOwz-UazQ6niOkr4S5xlcvsvVSMFHhFV96EmqE,6845 +networkx/algorithms/bipartite/covering.py,sha256=-_fHiWsMuzF8KrmpeOXFWiQctkKq2jiIlbHIN0u_HdM,2091 +networkx/algorithms/bipartite/edgelist.py,sha256=OQ8hTszNynjYVbhVS0ilgobEPvcIIhisivu9B1cTGw8,11198 +networkx/algorithms/bipartite/generators.py,sha256=pnyIACA6E5BXRGea7KvgXJYWzrcAQxRqWwTn2hYmfcc,19961 +networkx/algorithms/bipartite/matching.py,sha256=l7sahM0AhZ0GGmYj0m1xiRGCcd4Dfr-pc07pD6SL6GY,21273 +networkx/algorithms/bipartite/matrix.py,sha256=GJWEfVMwqfet589UNvDaZnJVdvQQYcVFSNqGu80CnUI,6382 +networkx/algorithms/bipartite/projection.py,sha256=XR81J4o7eUiBCMZV3JsqXq4tmE1ZYyVZEdE6t8xvMxY,17304 +networkx/algorithms/bipartite/redundancy.py,sha256=U6oV3JS6hxoMuqSznTr7Wp3iH90pkzXA5kuVWTutYzU,3479 +networkx/algorithms/bipartite/spectral.py,sha256=DqX2CJNvUIdEwblNbMIhCBb3LsEP5Gvk-yRytP0vlg4,1890 +networkx/algorithms/bipartite/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/bipartite/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/bipartite/tests/__pycache__/test_basic.cpython-38.pyc,, +networkx/algorithms/bipartite/tests/__pycache__/test_centrality.cpython-38.pyc,, +networkx/algorithms/bipartite/tests/__pycache__/test_cluster.cpython-38.pyc,, +networkx/algorithms/bipartite/tests/__pycache__/test_covering.cpython-38.pyc,, +networkx/algorithms/bipartite/tests/__pycache__/test_edgelist.cpython-38.pyc,, +networkx/algorithms/bipartite/tests/__pycache__/test_generators.cpython-38.pyc,, +networkx/algorithms/bipartite/tests/__pycache__/test_matching.cpython-38.pyc,, +networkx/algorithms/bipartite/tests/__pycache__/test_matrix.cpython-38.pyc,, +networkx/algorithms/bipartite/tests/__pycache__/test_project.cpython-38.pyc,, +networkx/algorithms/bipartite/tests/__pycache__/test_redundancy.cpython-38.pyc,, +networkx/algorithms/bipartite/tests/__pycache__/test_spectral_bipartivity.cpython-38.pyc,, +networkx/algorithms/bipartite/tests/test_basic.py,sha256=gzbtsQqPi85BznX5REdGBBJVyr9aH4nO06c3eEI4634,4291 +networkx/algorithms/bipartite/tests/test_centrality.py,sha256=gIXOB8ZP-uO9r5Cp23Yng_C4o-qCUKM_48GzFrlCWQ0,5901 +networkx/algorithms/bipartite/tests/test_cluster.py,sha256=8aJH5Ac8QbuknAA65w0rXUHwAjqqEImEYUyrt5pboM4,2809 +networkx/algorithms/bipartite/tests/test_covering.py,sha256=SbnZQTZY3jjt9Ncv--Q0tG7ywATk4vem2FPx7rV_Ixg,1229 +networkx/algorithms/bipartite/tests/test_edgelist.py,sha256=1_9UI5pv6qbD696ibnmSzf1rVLmWeYrRohZ_Xazg3Yg,6486 +networkx/algorithms/bipartite/tests/test_generators.py,sha256=GLMThTKIfZ96NwTxIL0P0o0OAESZFfnySRkRjtKhao8,12794 +networkx/algorithms/bipartite/tests/test_matching.py,sha256=xDP9qjK1xlfu1eVFGAOi6sk_w1AgWvMp5p2vBmz5J1w,11967 +networkx/algorithms/bipartite/tests/test_matrix.py,sha256=EoqQKTMcPPPPUZYTzc-AAtl5F77qT0X3FI3E1tYppxM,2900 +networkx/algorithms/bipartite/tests/test_project.py,sha256=Hx6P2NQII1O9-cF3GgHqfIZxUfyNjUtZ7i5-beAu4mM,14714 +networkx/algorithms/bipartite/tests/test_redundancy.py,sha256=F6z_h713fkLOAEhR_4LXWaRdP1amduCQYiVESGml61A,785 +networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py,sha256=HZr6gYzQEpMTjnm7IaIne3lN9yr7pkKwYQXh8ljj9SY,2359 +networkx/algorithms/boundary.py,sha256=UXpJDuXRvEv6GDY1BFULHyPJJK0rXx-K5XN491KYpgg,4695 +networkx/algorithms/bridges.py,sha256=a8WCc02kWwhu3F1POie0Jzhlo0yH80MlOIl9DjJq0Ws,5886 +networkx/algorithms/centrality/__init__.py,sha256=SQty4JnRqEKKomu9sE99VcVTouO5A_B0QUUicUIbY60,533 +networkx/algorithms/centrality/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/betweenness.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/betweenness_subset.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/closeness.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/current_flow_betweenness.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/current_flow_betweenness_subset.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/current_flow_closeness.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/degree_alg.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/dispersion.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/eigenvector.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/flow_matrix.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/group.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/harmonic.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/katz.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/load.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/percolation.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/reaching.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/second_order.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/subgraph_alg.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/trophic.cpython-38.pyc,, +networkx/algorithms/centrality/__pycache__/voterank_alg.cpython-38.pyc,, +networkx/algorithms/centrality/betweenness.py,sha256=em6GYnXMzvu1PdX0roN1eAIUTtaw6r8cHDvH6156_o8,14324 +networkx/algorithms/centrality/betweenness_subset.py,sha256=k7RNfHSR-2ZmK3UhhHD0ExibXGxjp8Iaa_BfEPCphIg,9655 +networkx/algorithms/centrality/closeness.py,sha256=2caNHg1O4ZnbmX-GOSBYDbsY2yMvCzGRj0wl1f3Lu10,10201 +networkx/algorithms/centrality/current_flow_betweenness.py,sha256=R_QKBxJuEN8jMPd0b-rbGrKfIowQ1Tk4lfhW-8RlhOM,11766 +networkx/algorithms/centrality/current_flow_betweenness_subset.py,sha256=1x3rUl2FdtWVdvY8uWGh_yme6Eyy4kamK40jApZqPwI,7976 +networkx/algorithms/centrality/current_flow_closeness.py,sha256=wqSzFa1CRsiJyqr5M0DpJXtfSPyxqqM9cIwpg5lfbao,3316 +networkx/algorithms/centrality/degree_alg.py,sha256=P1wQno5RrAK0jnZm9ZDO3UBcAoNIoygEmfCXURmO7Z4,3239 +networkx/algorithms/centrality/dispersion.py,sha256=WzE9_ECrM7K0QGTqgwTJJ-cvU2c-D41FMlZ8OYNr9Sg,3318 +networkx/algorithms/centrality/eigenvector.py,sha256=4NDbPe7NRS075XW4WwsJnZcuNngYlVxmGKjWT1-H_JU,8150 +networkx/algorithms/centrality/flow_matrix.py,sha256=GoXCdw0Cno58Fgv3wdfGJYQ2FkMiUIUI03486OyvE44,3919 +networkx/algorithms/centrality/group.py,sha256=VY7FcOQegFhCgEZ3mOmAGaAymBEWCntxYB6qriYjNt0,27731 +networkx/algorithms/centrality/harmonic.py,sha256=CbLCDYB54B-YqCGgEBsDLkC03SW-s0KitYhsbIN4J88,2589 +networkx/algorithms/centrality/katz.py,sha256=_FUTkTRHF73MdMkIBIumGndSF24XnoFNjSJMN-5nVOc,10674 +networkx/algorithms/centrality/load.py,sha256=2FAn5AO-KfWlkfTxcMJ0z9XDr2dmFjCIafsLJvaCyiQ,6801 +networkx/algorithms/centrality/percolation.py,sha256=KEs6W66WxXQaWlboYLziVhpVO_bGt4iSP4MTj3t2AA4,4088 +networkx/algorithms/centrality/reaching.py,sha256=JB4NXxR8QBiACGNS3ACJkyFsM1jEHumm9O5EXc5kKXI,6947 +networkx/algorithms/centrality/second_order.py,sha256=v7GhTeIRcRPevAc89MPNjkUp8CnTRfOJhNKQf4AGHlg,4728 +networkx/algorithms/centrality/subgraph_alg.py,sha256=3T8X153DaTttL7NSeZXuo1Vn8nC1rzc-tXgtoiEwdcA,9506 +networkx/algorithms/centrality/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/centrality/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality_subset.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_closeness_centrality.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality_subset.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_current_flow_closeness.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_degree_centrality.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_dispersion.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_eigenvector_centrality.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_group.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_harmonic_centrality.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_katz_centrality.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_load_centrality.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_percolation_centrality.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_reaching.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_second_order_centrality.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_subgraph.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_trophic.cpython-38.pyc,, +networkx/algorithms/centrality/tests/__pycache__/test_voterank.cpython-38.pyc,, +networkx/algorithms/centrality/tests/test_betweenness_centrality.py,sha256=pKoPAP1hnQSgrOxYeW5-LdUiFDANiwTn_NdOdgccbo8,26795 +networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py,sha256=wW2PkIEQe-DQu2lCnRchlch2Jl2oh9stOESUbRM_gDM,8388 +networkx/algorithms/centrality/tests/test_closeness_centrality.py,sha256=XWZivyLjxYlF41U4ktUmvULC2PMvxKs2U6BHDXRZVdE,10209 +networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py,sha256=Wvu6wi3BPpsJPLyleV0OQhK_c7W7JTOSfGZCeYqzYPs,7204 +networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py,sha256=JfRGgPuiF-vJu5fc2_pcJYREEboxcK_dmy-np39c4Aw,5839 +networkx/algorithms/centrality/tests/test_current_flow_closeness.py,sha256=E5LdZVJL2KNbfPeBbAVGOLCoE53ZaWTFqx4GdjL2pbg,1153 +networkx/algorithms/centrality/tests/test_degree_centrality.py,sha256=EKduYez3hTUWixAW0NN89l_a7A9j3XhF9ZYCOG4QKls,4106 +networkx/algorithms/centrality/tests/test_dispersion.py,sha256=YB67oQmolVh6PgEvrXApHrOJlK-f1TUGKPymSk6j2WE,1605 +networkx/algorithms/centrality/tests/test_eigenvector_centrality.py,sha256=BzZWTPAKHoWo0GnKfG2OCVuAzqK8djsZ44DSCHQxazY,4634 +networkx/algorithms/centrality/tests/test_group.py,sha256=YmWifoTgw2gSS5BnA9G2T_Voauk_WG6v90JrZEt-Kjk,8686 +networkx/algorithms/centrality/tests/test_harmonic_centrality.py,sha256=wYP0msmB5hh5OMIxPl9t0G4QSpG3Brxw98Kh9BrRoag,3658 +networkx/algorithms/centrality/tests/test_katz_centrality.py,sha256=hI2uNM3_LJhlEbWbiq4iB6L_NsTt_6XCfI6jl9yG6ik,11247 +networkx/algorithms/centrality/tests/test_load_centrality.py,sha256=eOBgwPIyaShzibQ61jEQNlIpEd8-Kh6iauRmmUupyO4,11080 +networkx/algorithms/centrality/tests/test_percolation_centrality.py,sha256=JRuGdrzHwhvsjCe2YKq7povPVWsanKOAXIia0_-KfCU,2699 +networkx/algorithms/centrality/tests/test_reaching.py,sha256=RxNFfPsMfbYpPLZcg2RWWKMWKqE9MFcN0JvjCnOgpKA,3865 +networkx/algorithms/centrality/tests/test_second_order_centrality.py,sha256=xqfVYRYPSv7x0AwUFlkoE1_m8xxG60koN-ychM6lrwE,1921 +networkx/algorithms/centrality/tests/test_subgraph.py,sha256=vhE9Uh-_Hlk49k-ny6ORHCgqk7LWH8OHIYOEYM96uz0,3729 +networkx/algorithms/centrality/tests/test_trophic.py,sha256=AzV6rwcTa4b4tcenoKh95o6VF-z7w75l81ZOdhhi6yE,8705 +networkx/algorithms/centrality/tests/test_voterank.py,sha256=7sNbtXv3578jKtCTPY4LTwR0LUIQq1u_qcWL5Lqm1Kw,1592 +networkx/algorithms/centrality/trophic.py,sha256=3wjkoPEWEgHXHp1i_t08sq4M4JNTGtc8iybNK_rPliE,4549 +networkx/algorithms/centrality/voterank_alg.py,sha256=OllLOtaPLPoBpbWobUXKS9UrGGKSy7tfFwUdr6IoR5I,3191 +networkx/algorithms/chains.py,sha256=NFjIqQYheEmX0Hwyk9bBt3aCQ2WSWe2eMODGXKZ0cGo,6769 +networkx/algorithms/chordal.py,sha256=D71-vEWs8-I096n_vpUuONVIHcLxNQhRkMQS8zhKcgI,14376 +networkx/algorithms/clique.py,sha256=pEBXbMVV2db69hHYXeux9IJwhzIgiyqwBu06jAmvuJs,26631 +networkx/algorithms/cluster.py,sha256=6voNVbLepilh6HQggtn0JB3iiyA0xoVEHNmdOoG_Uq4,18789 +networkx/algorithms/coloring/__init__.py,sha256=P1cmqrAjcaCdObkNZ1e6Hp__ZpxBAhQx0iIipOVW8jg,182 +networkx/algorithms/coloring/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/coloring/__pycache__/equitable_coloring.cpython-38.pyc,, +networkx/algorithms/coloring/__pycache__/greedy_coloring.cpython-38.pyc,, +networkx/algorithms/coloring/equitable_coloring.py,sha256=w82VwGjfpvry3j9h9zpgtcuAGJQNG3FtYUbJl4z2Umk,16571 +networkx/algorithms/coloring/greedy_coloring.py,sha256=BIE4ibWUD9mJtytevadGTSfiKGJA6wPi_iHjWm1SjQ0,19573 +networkx/algorithms/coloring/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/coloring/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/coloring/tests/__pycache__/test_coloring.cpython-38.pyc,, +networkx/algorithms/coloring/tests/test_coloring.py,sha256=p_84Boj4XK1BAVwTEvQoO3rUjMjzBoEDA9b5X8m5zeY,20631 +networkx/algorithms/communicability_alg.py,sha256=ClwaQ5CH0loQJCXtutMCDwqNStNqo0B4eOQECoXs7UM,4551 +networkx/algorithms/community/__init__.py,sha256=SqTYf-Rsu4-4qOA4s_1pPz-eFx_AAHA01X5qm_LX3TE,1245 +networkx/algorithms/community/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/community/__pycache__/asyn_fluid.cpython-38.pyc,, +networkx/algorithms/community/__pycache__/centrality.cpython-38.pyc,, +networkx/algorithms/community/__pycache__/community_utils.cpython-38.pyc,, +networkx/algorithms/community/__pycache__/kclique.cpython-38.pyc,, +networkx/algorithms/community/__pycache__/kernighan_lin.cpython-38.pyc,, +networkx/algorithms/community/__pycache__/label_propagation.cpython-38.pyc,, +networkx/algorithms/community/__pycache__/louvain.cpython-38.pyc,, +networkx/algorithms/community/__pycache__/lukes.cpython-38.pyc,, +networkx/algorithms/community/__pycache__/modularity_max.cpython-38.pyc,, +networkx/algorithms/community/__pycache__/quality.cpython-38.pyc,, +networkx/algorithms/community/asyn_fluid.py,sha256=j2YyD_crV2o3ufiMsiFvItnAedqO4wW-ptG2LlyQFsc,5829 +networkx/algorithms/community/centrality.py,sha256=8L0Cq1upur2Bt6BgEDxqQR1MdRvYoDclWsrL7rUd0nw,6497 +networkx/algorithms/community/community_utils.py,sha256=_jpb_2iem4BoS8tDFKohvgOEl4Fyv0LcwvYJiHpwi5w,867 +networkx/algorithms/community/kclique.py,sha256=Yw4kW2Yn2Ru10RAJ_0xtMyhRIhiol_t9m_hjy_4mbHM,2487 +networkx/algorithms/community/kernighan_lin.py,sha256=AsKOcF07J6-ZYy9nkDDxnAblU43MrlwsqiUw_-hMYRw,4264 +networkx/algorithms/community/label_propagation.py,sha256=QbLnbpZLDTVVEkCFHInz4RzJrcXjaYx-yZgcDRQlwPY,7209 +networkx/algorithms/community/louvain.py,sha256=XHqCYWdWfi3sTIgxfjLoNGZ1hzkMapiFLHY0HWj5wnU,13576 +networkx/algorithms/community/lukes.py,sha256=m3uLqjY7LAONgAVknzp-XtpDW72rcjdERzcaD2mpsWU,8048 +networkx/algorithms/community/modularity_max.py,sha256=XMIkXc1vdgzTnl5w_rvBNWoee6WklKLjru_esf9o1RU,19281 +networkx/algorithms/community/quality.py,sha256=oEkmGVeMYpeVwjWLhVdveIOc-rAVxdyj0Szvy0HFnwE,14610 +networkx/algorithms/community/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/community/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/community/tests/__pycache__/test_asyn_fluid.cpython-38.pyc,, +networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-38.pyc,, +networkx/algorithms/community/tests/__pycache__/test_kclique.cpython-38.pyc,, +networkx/algorithms/community/tests/__pycache__/test_kernighan_lin.cpython-38.pyc,, +networkx/algorithms/community/tests/__pycache__/test_label_propagation.cpython-38.pyc,, +networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-38.pyc,, +networkx/algorithms/community/tests/__pycache__/test_lukes.cpython-38.pyc,, +networkx/algorithms/community/tests/__pycache__/test_modularity_max.cpython-38.pyc,, +networkx/algorithms/community/tests/__pycache__/test_quality.cpython-38.pyc,, +networkx/algorithms/community/tests/__pycache__/test_utils.cpython-38.pyc,, +networkx/algorithms/community/tests/test_asyn_fluid.py,sha256=YVLh9HVky72n9oywcIuVMGJPjJ97ga5O8E_l3qI8bnU,3046 +networkx/algorithms/community/tests/test_centrality.py,sha256=L0S-Dz8MKefObM9m0gFfZhxcfaQ4iwDNq-kovC9UEQc,2922 +networkx/algorithms/community/tests/test_kclique.py,sha256=LpHZPexPoLnrMmJXDxYSLr7XbjIRhiEAdZaSXJFa8fI,2407 +networkx/algorithms/community/tests/test_kernighan_lin.py,sha256=s8bK53Y1a87zvlZ1AJE-QJ2vItnbscSOlHQSrMpetGI,2709 +networkx/algorithms/community/tests/test_label_propagation.py,sha256=-SmqnH_9L8pbLW7NkGxg52Qrjm9j98W3zTe0xY8pcoc,5035 +networkx/algorithms/community/tests/test_louvain.py,sha256=rpn6896mZdz3EjBS0awwxch4hsNStSbo3-XKAXK_0Fk,4781 +networkx/algorithms/community/tests/test_lukes.py,sha256=PpFaCFeiUWhcW0k6A0cPjcIH4yF4cFhjivMw3-AoBZw,3951 +networkx/algorithms/community/tests/test_modularity_max.py,sha256=IaWS3VO-QbVTYTF780xpgKP-TUWyDIi8erya4UbbEzg,10373 +networkx/algorithms/community/tests/test_quality.py,sha256=FlL8fW0Gs-w_Y28Bd7tIqWJFImIz0pB7FEubgeB4VdA,5615 +networkx/algorithms/community/tests/test_utils.py,sha256=WLBssBjJR2ihRIVu78022n2O8Qv8xuLlJqz47kWP3SA,670 +networkx/algorithms/components/__init__.py,sha256=Dt74KZWp_cJ_j0lL5hd_S50_hia5DKcC2SjuRnubr6M,173 +networkx/algorithms/components/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/components/__pycache__/attracting.cpython-38.pyc,, +networkx/algorithms/components/__pycache__/biconnected.cpython-38.pyc,, +networkx/algorithms/components/__pycache__/connected.cpython-38.pyc,, +networkx/algorithms/components/__pycache__/semiconnected.cpython-38.pyc,, +networkx/algorithms/components/__pycache__/strongly_connected.cpython-38.pyc,, +networkx/algorithms/components/__pycache__/weakly_connected.cpython-38.pyc,, +networkx/algorithms/components/attracting.py,sha256=3HRqyJ3HrCOFVNZn_sdcgixIvhfp5BOsVxKk0CALrAU,2657 +networkx/algorithms/components/biconnected.py,sha256=MIDPAAFM5vsViksdzNpH-bpZI9ezKVilBB-FOzYFjQA,12501 +networkx/algorithms/components/connected.py,sha256=a1z8iYqMOLyRMoRhxqry0xm0t8uW5IFkFp1ahRPnUms,4147 +networkx/algorithms/components/semiconnected.py,sha256=JzUqo-WMBDw8dow8TAOZ8w2ARpTn_e_crKbJB_5vnWI,1588 +networkx/algorithms/components/strongly_connected.py,sha256=1VZbq98LKnECUvj46a92Fr8QNfo9ZATAAFyI7KnoCfw,11201 +networkx/algorithms/components/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/components/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/components/tests/__pycache__/test_attracting.cpython-38.pyc,, +networkx/algorithms/components/tests/__pycache__/test_biconnected.cpython-38.pyc,, +networkx/algorithms/components/tests/__pycache__/test_connected.cpython-38.pyc,, +networkx/algorithms/components/tests/__pycache__/test_semiconnected.cpython-38.pyc,, +networkx/algorithms/components/tests/__pycache__/test_strongly_connected.cpython-38.pyc,, +networkx/algorithms/components/tests/__pycache__/test_weakly_connected.cpython-38.pyc,, +networkx/algorithms/components/tests/test_attracting.py,sha256=b3N3ZR9E5gLSQWGgaqhcRfRs4KBW6GnnkVYeAjdxC_o,2243 +networkx/algorithms/components/tests/test_biconnected.py,sha256=N-J-dgBgI77ytYUUrXjduLxtDydH7jS-af98fyPBkYc,6036 +networkx/algorithms/components/tests/test_connected.py,sha256=805NWi0g8doZ3WUguSY59ITrPSuE5J-VTj5j7l9xWsc,3675 +networkx/algorithms/components/tests/test_semiconnected.py,sha256=q860lIxZF5M2JmDwwdzy-SGSXnrillOefMx23GcJpw0,1792 +networkx/algorithms/components/tests/test_strongly_connected.py,sha256=r-H5xAbZiK0k-SGstJPy00xzlA0I9ym5spCGhRJjLvA,6554 +networkx/algorithms/components/tests/test_weakly_connected.py,sha256=yi23wxW2Vw6JOMqaWMEuqNRxnleriuAQrZ5JGWE48Jk,2887 +networkx/algorithms/components/weakly_connected.py,sha256=ZsBSLEe4elp2qPipBPWFhAuIdXuvWFL_cOb9IgtF2oM,4074 +networkx/algorithms/connectivity/__init__.py,sha256=VuUXTkagxX-tHjgmeYJ3K4Eq_luK6kSpv1nZwiwGFd8,281 +networkx/algorithms/connectivity/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/connectivity/__pycache__/connectivity.cpython-38.pyc,, +networkx/algorithms/connectivity/__pycache__/cuts.cpython-38.pyc,, +networkx/algorithms/connectivity/__pycache__/disjoint_paths.cpython-38.pyc,, +networkx/algorithms/connectivity/__pycache__/edge_augmentation.cpython-38.pyc,, +networkx/algorithms/connectivity/__pycache__/edge_kcomponents.cpython-38.pyc,, +networkx/algorithms/connectivity/__pycache__/kcomponents.cpython-38.pyc,, +networkx/algorithms/connectivity/__pycache__/kcutsets.cpython-38.pyc,, +networkx/algorithms/connectivity/__pycache__/stoerwagner.cpython-38.pyc,, +networkx/algorithms/connectivity/__pycache__/utils.cpython-38.pyc,, +networkx/algorithms/connectivity/connectivity.py,sha256=6FnncJG1-syvQk5_ToG2swxEqD04dVM6MX2O9hDMbEs,29734 +networkx/algorithms/connectivity/cuts.py,sha256=qp1kb9DGg3EDj-i-Z4hJCozgdNBMcZyz9nzkNOOovD8,22640 +networkx/algorithms/connectivity/disjoint_paths.py,sha256=H39ge1ZmxGK4TEYxES1_fLLoqUvgUANf_rgS3PchLIE,14425 +networkx/algorithms/connectivity/edge_augmentation.py,sha256=XP1zZb0Lp2-uySoLHA3iZ5Pqr-qPwOawY3qetrLOm9Q,43788 +networkx/algorithms/connectivity/edge_kcomponents.py,sha256=TM0EnginLyOgYifqcCVWrB-SNB5kg2s1cSO9mc85hlo,20687 +networkx/algorithms/connectivity/kcomponents.py,sha256=nLMMqfL5VwDcmQm_Vd1faoaAOU5JcgaLjPW9dj1nDLQ,8222 +networkx/algorithms/connectivity/kcutsets.py,sha256=454eDEjD6l1rI6pMR711LtGxE-GNpUM46YpqXRxqepU,9330 +networkx/algorithms/connectivity/stoerwagner.py,sha256=uP4AWBMqBZDfSzzoj2Wa989ykO6pNMPVdMDE6aeK7Ww,5340 +networkx/algorithms/connectivity/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/connectivity/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/connectivity/tests/__pycache__/test_connectivity.cpython-38.pyc,, +networkx/algorithms/connectivity/tests/__pycache__/test_cuts.cpython-38.pyc,, +networkx/algorithms/connectivity/tests/__pycache__/test_disjoint_paths.cpython-38.pyc,, +networkx/algorithms/connectivity/tests/__pycache__/test_edge_augmentation.cpython-38.pyc,, +networkx/algorithms/connectivity/tests/__pycache__/test_edge_kcomponents.cpython-38.pyc,, +networkx/algorithms/connectivity/tests/__pycache__/test_kcomponents.cpython-38.pyc,, +networkx/algorithms/connectivity/tests/__pycache__/test_kcutsets.cpython-38.pyc,, +networkx/algorithms/connectivity/tests/__pycache__/test_stoer_wagner.cpython-38.pyc,, +networkx/algorithms/connectivity/tests/test_connectivity.py,sha256=PLOwCLA2ZyGsmCLeGqM9r8lEIIfYvJ5T-ZXxIafGDYo,15029 +networkx/algorithms/connectivity/tests/test_cuts.py,sha256=aYAluKaswU-HHx6iZnWx-MI-kwNJ7uXXA39Jx79IWiA,10358 +networkx/algorithms/connectivity/tests/test_disjoint_paths.py,sha256=0IqgdjbNpE3ziWREV8vOdjWHF7cx57BKrenqf8CfR3Y,8399 +networkx/algorithms/connectivity/tests/test_edge_augmentation.py,sha256=-26SQv4AW5oJoRc6RymCQDmeVHcTmc5KwcmetDLEXWQ,15522 +networkx/algorithms/connectivity/tests/test_edge_kcomponents.py,sha256=CZ26Dy91WOUqhw1X73mqLGX-WHWzBBIeBCgrp6KK4Zo,16453 +networkx/algorithms/connectivity/tests/test_kcomponents.py,sha256=ohoSX8GACeszRZdzTiNuWXSFitfU9DzP0hqllS2gvMU,8554 +networkx/algorithms/connectivity/tests/test_kcutsets.py,sha256=qzrxmW3-KEnkNvkP-nWtSQj40_pfzUFadDvv2bjGDT4,8488 +networkx/algorithms/connectivity/tests/test_stoer_wagner.py,sha256=A291C30_t2CI1erPCqN1W0DoAj3zqNA8fThPIj4Rku0,3011 +networkx/algorithms/connectivity/utils.py,sha256=mx7_WRUDm-VWVt5PgbnSMH9MbX0pW-adFrS-UNY7U3s,3144 +networkx/algorithms/core.py,sha256=QsAvXtz0YtF9d10mh_4hKzJ6Hb0uWf3Aq1Sa8Xh37XY,15869 +networkx/algorithms/covering.py,sha256=my5V4PHT3zHLK0QvanEqQn65jJ5_nNVy6ul_0wHRT18,5262 +networkx/algorithms/cuts.py,sha256=HCrDvHqAAMcXqPXaW4qh5TKFM4wVbGnCVIOv4EDL6rs,9722 +networkx/algorithms/cycles.py,sha256=HKG2fERA6_6ZrMQvBvoNcOkYyKn6WE6JB-RhFKOFlmw,21677 +networkx/algorithms/d_separation.py,sha256=RhTA7HwR_NUOVvi6BOBuH5n9L6Fv7Z52DMcPmSn0GSc,4165 +networkx/algorithms/dag.py,sha256=nPMzzIKxPnaI8vo86Q3NmiSRIcMkyz8VDC-Ecu14hVE,34557 +networkx/algorithms/distance_measures.py,sha256=MXpBzHD_xoioe19B13h9ftzV40_Yf-jD7B-0phpQBEI,22803 +networkx/algorithms/distance_regular.py,sha256=DARGJQPSdf_xxoInuEBBKN4kun0Izu-LGX8c1-hiuvk,6872 +networkx/algorithms/dominance.py,sha256=lK7FRWAr6TiENtmuTniW1q7KesopvW6kJLKqNR5-SeI,3394 +networkx/algorithms/dominating.py,sha256=lvyePsQ08fGU9OdZZ5oi_d7P_lg7HR5xn8CNdpWxNP8,2647 +networkx/algorithms/efficiency_measures.py,sha256=y21IgKH5o-IZhdka0_4J0naw1XIXkxa3ymEBey75KEw,4277 +networkx/algorithms/euler.py,sha256=HMAahIPnIXzFW6tMKnWgW0jnYQ5hzR2CWByfK612pzg,13600 +networkx/algorithms/flow/__init__.py,sha256=rVtMUy6dViPLewjDRntmn15QF0bQwiDdQbZZx9j7Drc,341 +networkx/algorithms/flow/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/flow/__pycache__/boykovkolmogorov.cpython-38.pyc,, +networkx/algorithms/flow/__pycache__/capacityscaling.cpython-38.pyc,, +networkx/algorithms/flow/__pycache__/dinitz_alg.cpython-38.pyc,, +networkx/algorithms/flow/__pycache__/edmondskarp.cpython-38.pyc,, +networkx/algorithms/flow/__pycache__/gomory_hu.cpython-38.pyc,, +networkx/algorithms/flow/__pycache__/maxflow.cpython-38.pyc,, +networkx/algorithms/flow/__pycache__/mincost.cpython-38.pyc,, +networkx/algorithms/flow/__pycache__/networksimplex.cpython-38.pyc,, +networkx/algorithms/flow/__pycache__/preflowpush.cpython-38.pyc,, +networkx/algorithms/flow/__pycache__/shortestaugmentingpath.cpython-38.pyc,, +networkx/algorithms/flow/__pycache__/utils.cpython-38.pyc,, +networkx/algorithms/flow/boykovkolmogorov.py,sha256=KkcNNdbeipyuWMJ-UCJP326asD4uVtGsDKiTGYZrLos,13238 +networkx/algorithms/flow/capacityscaling.py,sha256=KeoOIrdZ_Ak_3b3KoEOF-o0LMGGdiceJE8FfamddRxE,14373 +networkx/algorithms/flow/dinitz_alg.py,sha256=u9_fgyD_B8MTHG6ZoExR5HR6G1HNR46uVRWp7c82JRE,7108 +networkx/algorithms/flow/edmondskarp.py,sha256=yidyYmtZNSPTarE1KuU1L53srNPq_VGXU_lIaGaeHcE,7956 +networkx/algorithms/flow/gomory_hu.py,sha256=YZajBDibtu1IX0_P47Bye_wXM9y1vGwiSfzeOO-rqB4,6267 +networkx/algorithms/flow/maxflow.py,sha256=BNqpMcMkqfG-EpnIiGpEmXXgA5GKfdoTM-GgrQTAnHI,22715 +networkx/algorithms/flow/mincost.py,sha256=1xn5I2z66BIBGTwNs_LspRICBTFqf-zdRD_3cqXy788,11968 +networkx/algorithms/flow/networksimplex.py,sha256=M-fkIp_OTaMmXKEwd8WPiYdA1Nhmjj1raZhlgGgC_l8,25089 +networkx/algorithms/flow/preflowpush.py,sha256=x1EUeTLeaTaPK0s1f6CIVxhQ7UBHvLUB37S8wc7WKis,15621 +networkx/algorithms/flow/shortestaugmentingpath.py,sha256=fOTpKT_wW3CHMZbAFkDiCsiYZ2OXlfTR15nxZ9I2QnA,10272 +networkx/algorithms/flow/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/flow/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/flow/tests/__pycache__/test_gomory_hu.cpython-38.pyc,, +networkx/algorithms/flow/tests/__pycache__/test_maxflow.cpython-38.pyc,, +networkx/algorithms/flow/tests/__pycache__/test_maxflow_large_graph.cpython-38.pyc,, +networkx/algorithms/flow/tests/__pycache__/test_mincost.cpython-38.pyc,, +networkx/algorithms/flow/tests/__pycache__/test_networksimplex.cpython-38.pyc,, +networkx/algorithms/flow/tests/gl1.gpickle.bz2,sha256=z4-BzrXqruFiGqYLiS2D5ZamFz9vZRc1m2ef89qhsPg,44623 +networkx/algorithms/flow/tests/gw1.gpickle.bz2,sha256=b3nw6Q-kxR7HkWXxWWPh7YlHdXbga8qmeuYiwmBBGTE,42248 +networkx/algorithms/flow/tests/netgen-2.gpickle.bz2,sha256=OxfmbN7ajtuNHexyYmx38fZd1GdeP3bcL8T9hKoDjjA,18972 +networkx/algorithms/flow/tests/test_gomory_hu.py,sha256=aWtbI3AHofIK6LDJnmj9UH1QOfulXsi5NyB7bNyV2Vw,4471 +networkx/algorithms/flow/tests/test_maxflow.py,sha256=NJK_V40GHZtsM8DR-sLpMeNFHp46K8KcMj-pSYI8zdQ,18425 +networkx/algorithms/flow/tests/test_maxflow_large_graph.py,sha256=w7kDHC3qJeD1vO538jMTlVJRx5_2Fe8HfINeY71_7TU,4504 +networkx/algorithms/flow/tests/test_mincost.py,sha256=fSF6LI098og2G7qA_dlNFirSr6jOoKDeg3Ju0pUnrck,17665 +networkx/algorithms/flow/tests/test_networksimplex.py,sha256=E2iJrte1jS1-ODBQIBx7EbDL-uh4WXB4nv0LME52cu8,11975 +networkx/algorithms/flow/tests/wlm3.gpickle.bz2,sha256=zKy6Hg-_swvsNh8OSOyIyZnTR0_Npd35O9RErOF8-g4,88132 +networkx/algorithms/flow/utils.py,sha256=bugJtMMGob7Yz48xg8POSR_hG-yQd8k-SISiAOI7oWw,5743 +networkx/algorithms/graph_hashing.py,sha256=rQN5evPVZBCnuJHjJ7Fvf0znSuS6PZHX0wTsrnvp4gI,11392 +networkx/algorithms/graphical.py,sha256=r4a7Sk8GSDYvZlKs0I8pOvpCWAZspCAFvNM9O2WtrOk,13449 +networkx/algorithms/hierarchy.py,sha256=afmel-XstPOVjn5B3Pc4G2ejclgStf4Bk8zdWOSgWHw,1502 +networkx/algorithms/hybrid.py,sha256=U6hyTathxCYfenAnDBiqQm46r1zPDbTUdRI--yzg6TA,6152 +networkx/algorithms/isolate.py,sha256=oZU25X5zEKCnwnVMS7BFXnTckS7jwXkyTuI5T6TooME,2261 +networkx/algorithms/isomorphism/__init__.py,sha256=pODZ0ELhrxvY-qfcxL7FQcPPBfc6C9o39BcbFMs5O8E,354 +networkx/algorithms/isomorphism/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/isomorphism/__pycache__/ismags.cpython-38.pyc,, +networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-38.pyc,, +networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-38.pyc,, +networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-38.pyc,, +networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-38.pyc,, +networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-38.pyc,, +networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-38.pyc,, +networkx/algorithms/isomorphism/ismags.py,sha256=ZmDQpO63by6ElTKvVk4-boy45JdP3UAYcHTSHja_r14,43599 +networkx/algorithms/isomorphism/isomorph.py,sha256=qHL_PU4ewMn7bt0d1qIaE3TEc6ZZCura8STinQ_h2ks,6382 +networkx/algorithms/isomorphism/isomorphvf2.py,sha256=PuPRVjHHtYJD4jeqOLsx-cvyz1tfjiL32_3ufWc1ff8,40556 +networkx/algorithms/isomorphism/matchhelpers.py,sha256=VN4eQjwhjOCHZsIKMziH0yr5yQlceOg2lnCnn8Gl2E0,10936 +networkx/algorithms/isomorphism/temporalisomorphvf2.py,sha256=N6yS-OSO_bqprlQgyNEGu27log0nG4RFqmQlAZny6zg,10949 +networkx/algorithms/isomorphism/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-38.pyc,, +networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-38.pyc,, +networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-38.pyc,, +networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-38.pyc,, +networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-38.pyc,, +networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-38.pyc,, +networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-38.pyc,, +networkx/algorithms/isomorphism/tests/iso_r01_s80.A99,sha256=hKzMtYLUR8Oqp9pmJR6RwG7qo31aNPZcnXy4KHDGhqU,1442 +networkx/algorithms/isomorphism/tests/iso_r01_s80.B99,sha256=AHx_W2xG4JEcz1xKoN5TwCHVE6-UO2PiMByynkd4TPE,1442 +networkx/algorithms/isomorphism/tests/si2_b06_m200.A99,sha256=NVnPFA52amNl3qM55G1V9eL9ZlP9NwugBlPf-zekTFU,310 +networkx/algorithms/isomorphism/tests/si2_b06_m200.B99,sha256=-clIDp05LFNRHA2BghhGTeyuXDqBBqA9XpEzpB7Ku7M,1602 +networkx/algorithms/isomorphism/tests/test_ismags.py,sha256=NBuHegns9BFxZCrelBg1ZbJ1c21ZPH49doBnHsJxLvM,10616 +networkx/algorithms/isomorphism/tests/test_isomorphism.py,sha256=1GZmmqNWk605Qq9h55V_5SfEKPM50Ceq6DSICdh6ufs,1663 +networkx/algorithms/isomorphism/tests/test_isomorphvf2.py,sha256=lCC5KSNMfyEMSA4q1Xa41k2BTfmr2u2n6ImsDcupiC8,11480 +networkx/algorithms/isomorphism/tests/test_match_helpers.py,sha256=ocp3pd_JSCLHAp_mtsipu1XqehOgQCMuuhWEeRG5U7g,2456 +networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py,sha256=DZy2zAt74jiTAM-jGK5H9aGRn1ZsMgQl9K5UNsu178Y,7346 +networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py,sha256=NOrZQCy9nJFMeG-2oJGgsSAiq8sqItXhVqsOeH4bRh0,7154 +networkx/algorithms/isomorphism/tests/test_vf2userfunc.py,sha256=qOKeCm46kqdLO02H3wxMm2wEHzFQBdUFDdh_kC0KHwM,6630 +networkx/algorithms/isomorphism/tree_isomorphism.py,sha256=MNKPIHdA1q05AmYv9z_rLWAeW6-_msWiXOLO-UJcbw4,9258 +networkx/algorithms/isomorphism/vf2userfunc.py,sha256=jdvaGLziSM2XURVfxbRjMDX84i_8ewpY7di-u67cXBI,7496 +networkx/algorithms/link_analysis/__init__.py,sha256=UkcgTDdzsIu-jsJ4jBwP8sF2CsRPC1YcZZT-q5Wlj3I,118 +networkx/algorithms/link_analysis/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/link_analysis/__pycache__/hits_alg.cpython-38.pyc,, +networkx/algorithms/link_analysis/__pycache__/pagerank_alg.cpython-38.pyc,, +networkx/algorithms/link_analysis/hits_alg.py,sha256=3xuTVFDQN2VYz8qGVG2TmJ6cQreZm8GY3ZvjyDkAXus,11889 +networkx/algorithms/link_analysis/pagerank_alg.py,sha256=ItC0SgX5TR6b8uQP3eOl4fjZVjArLaiaV_F3eroo0VU,17691 +networkx/algorithms/link_analysis/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/link_analysis/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/link_analysis/tests/__pycache__/test_hits.cpython-38.pyc,, +networkx/algorithms/link_analysis/tests/__pycache__/test_pagerank.cpython-38.pyc,, +networkx/algorithms/link_analysis/tests/test_hits.py,sha256=qpjWeMHkNsepx_Al08wrHZpXYb5NRYgvliRDGEH2TWg,2953 +networkx/algorithms/link_analysis/tests/test_pagerank.py,sha256=QmYxqxmQaisUU3Dht9XY0NkHL4xmbuu7l83Gt4mS8LM,7562 +networkx/algorithms/link_prediction.py,sha256=QOei1_-_sL8FqTLyB-fEcwgUnbhQ_RdwiqpYzJBUZM8,19792 +networkx/algorithms/lowest_common_ancestors.py,sha256=Cez4vbJK2vws3LwTPiFADtkwG_eGIXSU9qTWQbe-IK0,14013 +networkx/algorithms/matching.py,sha256=fope8XNIJMJtmO7SdxNfTsmE0rdQeq0ovWjpNpx2DCk,42801 +networkx/algorithms/minors/__init__.py,sha256=ceeKdsZ6U1H40ED-KmtVGkbADxeWMTVG07Ja8P7N_Pg,587 +networkx/algorithms/minors/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/minors/__pycache__/contraction.cpython-38.pyc,, +networkx/algorithms/minors/contraction.py,sha256=t-RNVWgXjhqbNYUebJC7mZLpUZJAfFU4ju_WJrZ7Y-M,21746 +networkx/algorithms/minors/tests/__pycache__/test_contraction.cpython-38.pyc,, +networkx/algorithms/minors/tests/test_contraction.py,sha256=EjNPMSAR_agCn6jclpx8ojhDg4G9FPVmQfpvgyiprMA,15918 +networkx/algorithms/mis.py,sha256=oUWPZAew3zVsW9DKcblo7mOt2dUUfdrsucSlz56jdok,2325 +networkx/algorithms/moral.py,sha256=tVX5HD9h8qmygjMSufsVrcYiUkpLMUdhDE_mWLw5WYo,1475 +networkx/algorithms/node_classification/__init__.py,sha256=ZCwCp9_7nCD1d_7pwzXU3F9vT13uKhY7wtmRzrZKYQw,1748 +networkx/algorithms/node_classification/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/node_classification/__pycache__/hmn.cpython-38.pyc,, +networkx/algorithms/node_classification/__pycache__/lgc.cpython-38.pyc,, +networkx/algorithms/node_classification/__pycache__/utils.cpython-38.pyc,, +networkx/algorithms/node_classification/hmn.py,sha256=GFlPFKPQtfb1_SzXtVVnK2Gbepa2vPQdsGG_8j897kU,2603 +networkx/algorithms/node_classification/lgc.py,sha256=QDv7oMECQCz9G7tjwXUHcwL6WxiVpzxRe4IYcYrfP24,2722 +networkx/algorithms/node_classification/utils.py,sha256=oFTTByxlC-VjnwazPBpdwyIzEWEylEtFiDKkTc7NqSE,1023 +networkx/algorithms/non_randomness.py,sha256=-hL-zHvyjj0Bpr8E2xj0rwbr7CPYUbXdicggpenIGrg,2858 +networkx/algorithms/operators/__init__.py,sha256=dJ3xOXvHxSzzM3-YcfvjGTJ_ndxULF1TybkIRzUS87Y,201 +networkx/algorithms/operators/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/operators/__pycache__/all.cpython-38.pyc,, +networkx/algorithms/operators/__pycache__/binary.cpython-38.pyc,, +networkx/algorithms/operators/__pycache__/product.cpython-38.pyc,, +networkx/algorithms/operators/__pycache__/unary.cpython-38.pyc,, +networkx/algorithms/operators/all.py,sha256=kzyYsQHxPZ1JZO91u58kriq_vqNejd8Eaeu4rRsmoEk,6741 +networkx/algorithms/operators/binary.py,sha256=DT_vht6aj62yAlXrK4V9sgmAkMUgitvL397lnHwBM3g,11979 +networkx/algorithms/operators/product.py,sha256=ny5SSemJ5I6Gt7eIl3aDxStsIjq4TPWWx896mR06wpk,13857 +networkx/algorithms/operators/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/operators/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/operators/tests/__pycache__/test_all.cpython-38.pyc,, +networkx/algorithms/operators/tests/__pycache__/test_binary.cpython-38.pyc,, +networkx/algorithms/operators/tests/__pycache__/test_product.cpython-38.pyc,, +networkx/algorithms/operators/tests/__pycache__/test_unary.cpython-38.pyc,, +networkx/algorithms/operators/tests/test_all.py,sha256=_WolA1dki0JqeDjLSXMbdGnUkxrP7WJKKIi30OEHQYA,7371 +networkx/algorithms/operators/tests/test_binary.py,sha256=_v7Tu9jzkjI5UYMp3SPKKswXIRqvrBZmwOJKh__b7jE,12033 +networkx/algorithms/operators/tests/test_product.py,sha256=EYTajjizRtZUjQk8o3vjPGoyfs4cDyevLyblCO6RsJI,13188 +networkx/algorithms/operators/tests/test_unary.py,sha256=UZdzbt5GI9hnflEizUWXihGqBWmSFJDkzjwVv6wziQE,1415 +networkx/algorithms/operators/unary.py,sha256=D647RNItgvd05brXLA7VQxEQ1uTsg1PMAzSZshFxkfw,1717 +networkx/algorithms/planar_drawing.py,sha256=vSlP1AToiIzaZPYmNJfTOLouJHndjZWzK2O7bbpNOo0,16320 +networkx/algorithms/planarity.py,sha256=nOX-8mUnFRJrZc9DtsRsMIMZ_anJoftjZU91sXxqlgs,39410 +networkx/algorithms/polynomials.py,sha256=tlbLcpQaYp1XVA7uQPasBHe15AuScZpVXPgzTX75xKI,10826 +networkx/algorithms/reciprocity.py,sha256=1iv5v1GkWu6cAorGxpiBmS5jzh_BkVi8XZaXtuLr7ms,2796 +networkx/algorithms/regular.py,sha256=oI3fx0ZoHceX_WYe1J-7IxhLtLVahkNZIhqv1HVXNJ8,6205 +networkx/algorithms/richclub.py,sha256=6twv21qc43m2N5j9zPwLOWZh7BeDwCN8XTdtx_Ph5KY,4152 +networkx/algorithms/shortest_paths/__init__.py,sha256=Rmxtsje-mPdQyeYhE8TP2NId-iZEOu4eAsWhVRm2Xqk,285 +networkx/algorithms/shortest_paths/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/shortest_paths/__pycache__/astar.cpython-38.pyc,, +networkx/algorithms/shortest_paths/__pycache__/dense.cpython-38.pyc,, +networkx/algorithms/shortest_paths/__pycache__/generic.cpython-38.pyc,, +networkx/algorithms/shortest_paths/__pycache__/unweighted.cpython-38.pyc,, +networkx/algorithms/shortest_paths/__pycache__/weighted.cpython-38.pyc,, +networkx/algorithms/shortest_paths/astar.py,sha256=Yk0U976Uq4HNsaaC7M7pw1zRyzw8NpptX12WMBYVHrE,6990 +networkx/algorithms/shortest_paths/dense.py,sha256=dxDu2puBcMzLlvxemBK-KSr3LUbyGzErTmXdj1Po-ok,7300 +networkx/algorithms/shortest_paths/generic.py,sha256=5TIjfQnbWdbhD5bS_oZJ9WwrEHFKHTX2K4-KrK9wwFg,20139 +networkx/algorithms/shortest_paths/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/shortest_paths/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/shortest_paths/tests/__pycache__/test_astar.cpython-38.pyc,, +networkx/algorithms/shortest_paths/tests/__pycache__/test_dense.cpython-38.pyc,, +networkx/algorithms/shortest_paths/tests/__pycache__/test_dense_numpy.cpython-38.pyc,, +networkx/algorithms/shortest_paths/tests/__pycache__/test_generic.cpython-38.pyc,, +networkx/algorithms/shortest_paths/tests/__pycache__/test_unweighted.cpython-38.pyc,, +networkx/algorithms/shortest_paths/tests/__pycache__/test_weighted.cpython-38.pyc,, +networkx/algorithms/shortest_paths/tests/test_astar.py,sha256=CWFKGGoqhDMBA6pQP-osmHzmfuf1PxYKp1otgR0UV94,5507 +networkx/algorithms/shortest_paths/tests/test_dense.py,sha256=ievl4gu3Exl_31hp4OKcsAGPb3g3_xFUM4t3NnvrG_A,6747 +networkx/algorithms/shortest_paths/tests/test_dense_numpy.py,sha256=BNwXCe2wgNPE8o35-shPsFj8l19c_QG6Ye8tkIGphf8,2300 +networkx/algorithms/shortest_paths/tests/test_generic.py,sha256=5xmZuWeBaIKlzxp4Y1C_ADhNkJfCwUQSczFB-ezdKu8,15463 +networkx/algorithms/shortest_paths/tests/test_unweighted.py,sha256=H2j_MaafTzx2U-biPiViuFPOdk0H50s80HlGQaXBaAA,4601 +networkx/algorithms/shortest_paths/tests/test_weighted.py,sha256=D4MHcUhgYueBAAUccAmESnlcLAHBsd75kuUCiQ0vfg0,33263 +networkx/algorithms/shortest_paths/unweighted.py,sha256=z7Fs94f8R-jeBLH2TAZOQBKEMqQGt_czeI07_1WUUNM,14220 +networkx/algorithms/shortest_paths/weighted.py,sha256=7KA42oSuVrdPY3k9z5yEAMu2xriPv_EobxxlkEYtvO4,80039 +networkx/algorithms/similarity.py,sha256=UpxBU63wyQeMqcWIxTUP6jh3F4DAty9aizoEBZ8pAkE,59768 +networkx/algorithms/simple_paths.py,sha256=5rOnA5bQ_nJCoBiX9eiJQC1OHY5moe9thp9YaM0Hp8w,29763 +networkx/algorithms/smallworld.py,sha256=re54UBdlwcXH2pHqmp3AoBf-lHGeSNe5AM1t7rliObw,13085 +networkx/algorithms/smetric.py,sha256=SXz5CHN9f2fQ_k4DJAW_iGgMgUs3pKHsvLx2Yk5wGfY,1177 +networkx/algorithms/sparsifiers.py,sha256=QETW6i73_YyvmM_bCm-53A3ln_YV926Cq2DzKY526X8,10038 +networkx/algorithms/structuralholes.py,sha256=FGO23J0-m8Dgkt8RgRWLy_v1wvO8wQ0mv96R2pxf1xI,9146 +networkx/algorithms/summarization.py,sha256=HzWpV8v0nc_z9njfvjGTUTZCk177IOqE6BltbN-Z79A,22926 +networkx/algorithms/swap.py,sha256=259xQ_uOB9e9KKqlJ4wprlARSfaThJM2ax1nbPEJ2os,9819 +networkx/algorithms/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_asteroidal.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_boundary.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_bridges.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_chains.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_chordal.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_clique.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_cluster.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_communicability.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_core.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_covering.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_cuts.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_cycles.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_d_separation.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_dag.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_distance_measures.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_distance_regular.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_dominance.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_dominating.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_efficiency.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_euler.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_graph_hashing.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_graphical.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_hierarchy.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_hybrid.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_isolate.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_link_prediction.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_lowest_common_ancestors.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_matching.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_max_weight_clique.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_mis.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_moral.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_node_classification.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_node_classification_deprecations.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_non_randomness.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_planar_drawing.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_planarity.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_polynomials.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_reciprocity.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_regular.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_richclub.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_similarity.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_simple_paths.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_smallworld.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_smetric.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_sparsifiers.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_structuralholes.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_summarization.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_swap.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_threshold.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_tournament.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_triads.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_vitality.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_voronoi.cpython-38.pyc,, +networkx/algorithms/tests/__pycache__/test_wiener.cpython-38.pyc,, +networkx/algorithms/tests/test_asteroidal.py,sha256=uMY1UbtYj5pHEwZbCBT5Ep_jVv1uDa7rSXLWVKUkcWk,503 +networkx/algorithms/tests/test_boundary.py,sha256=ebnJRPyYZOjKE5x0PfXXbiEWA9w4mZtL1j19Lh46WtI,6227 +networkx/algorithms/tests/test_bridges.py,sha256=1STJG5IFSe1rnr64BKrnE7Gm__oCAHxOSHBwl-z97rY,2513 +networkx/algorithms/tests/test_chains.py,sha256=akKRVr3itHTDEll3t1QGhjElAWLWgqI6yiX8vQYJ5f0,4086 +networkx/algorithms/tests/test_chordal.py,sha256=tY-lw79emVZ27jb-zNpufDWiObdeZODWm5YbvI-meeo,4458 +networkx/algorithms/tests/test_clique.py,sha256=BT9VStBBVLLT5-mD2HB0E_HiwzcbmoBVISQRhs7VCpc,10519 +networkx/algorithms/tests/test_cluster.py,sha256=AltwLWAblpSLa-24KvNuxYxM2IeVl5p2d-kozA9QJ-0,15595 +networkx/algorithms/tests/test_communicability.py,sha256=dUKeV-abTQqHfNZY4lelu7aBR1fVgNrMCpKrE5V1P9Y,2939 +networkx/algorithms/tests/test_core.py,sha256=l29lU66JqKCmzdrt5ZnuHJ0Sy0Vu_pYGZRiPClMOm1k,6581 +networkx/algorithms/tests/test_covering.py,sha256=FZw0-_mOwDS7pMFH4tKOZ1YJ7te5i7DTS8Ji62flqO8,2417 +networkx/algorithms/tests/test_cuts.py,sha256=2Ir5xyIG4cTC4Dgg1cceLXaEFiOCJ60ZTDDn33vz0Ns,5377 +networkx/algorithms/tests/test_cycles.py,sha256=NjHzk9QxldNf7NV25F_U4rT-U1B4rdUCrqVnRa5I1Ng,11758 +networkx/algorithms/tests/test_d_separation.py,sha256=wFRwZdvSAi4b9FDfWEIE93Lhc73kv44eyQq4rwrvevg,4312 +networkx/algorithms/tests/test_dag.py,sha256=RGAtEBz_HxpsD3hjXZY9Sh71C3Un1j_j4cb4V7_6Dho,25805 +networkx/algorithms/tests/test_distance_measures.py,sha256=2Z3tTpwpOehI4h9diSHmwPGO7oyG8zxswJkTaxPFAnE,8478 +networkx/algorithms/tests/test_distance_regular.py,sha256=pPZ2CPKo4QLjhxlcJhBQZif6-_2qwfh1kpbrN_mu5tg,2312 +networkx/algorithms/tests/test_dominance.py,sha256=ZeLzdelMFDPBdKnFykUAG565gs5ySUbEhdBMT3HX3hQ,9388 +networkx/algorithms/tests/test_dominating.py,sha256=hyta7ln6BbHaGlpEUla6jVzh2PRuSjvujLSGXrmwZbc,1228 +networkx/algorithms/tests/test_efficiency.py,sha256=QKWMvyjCG1Byt-oNp7Rz_qxnVeT77Zk27lrzI1qH0mA,1894 +networkx/algorithms/tests/test_euler.py,sha256=Q0GeWkKTyqEaLAJLM7kuyVrcYF_n73MiM13x8o5iJ0Y,10038 +networkx/algorithms/tests/test_graph_hashing.py,sha256=duR9DQLUpRuy9bv0ZKQPt9gy9WxiX_K0-BVMlnF-WHY,23517 +networkx/algorithms/tests/test_graphical.py,sha256=iwaAV-LLxzxdrQFHD7zYGzRdwhKiWIzHSlMypu0BF9w,5370 +networkx/algorithms/tests/test_hierarchy.py,sha256=g3-0pNfzRo-RDW1BsiLXxyi2LwWIJukXx2i4JCpN2fg,941 +networkx/algorithms/tests/test_hybrid.py,sha256=kQLzaMoqZcKFaJ3D7PKbY2O-FX59XDZ1pN5un8My-tk,720 +networkx/algorithms/tests/test_isolate.py,sha256=LyR0YYHJDH5vppQzGzGiJK-aaIV17_Jmla8dMf93olg,555 +networkx/algorithms/tests/test_link_prediction.py,sha256=7c322xESYdH5WEA0TsMw4Jcc_-lqfIsj-SjXP6Y0TVc,19442 +networkx/algorithms/tests/test_lowest_common_ancestors.py,sha256=BeuSAX3wqel2mn40cu1FehcvecRGeuCw1plWQFY-oZA,10922 +networkx/algorithms/tests/test_matching.py,sha256=MwQxn_YPJ_TzKabKjLUZfORbtDaXPh8jQtP35RkO6Eo,18836 +networkx/algorithms/tests/test_max_weight_clique.py,sha256=iYLkDGzYAmZ06IcT-0Rtay7UyjJ0A2y7ilXUTjDFg44,6742 +networkx/algorithms/tests/test_mis.py,sha256=F8cf09mvzG3A_omw6wWR1-j9i8WUmpGI9BwGdi2bHes,1875 +networkx/algorithms/tests/test_moral.py,sha256=15PZgkx7O9aXQB1npQ2JNqBBkEqPPP2RfeZzKqY-GNU,452 +networkx/algorithms/tests/test_node_classification.py,sha256=ZGa_uSd6tRqDS775mrPaRLbIr5CDQEMS8qw1frO_pEU,4669 +networkx/algorithms/tests/test_node_classification_deprecations.py,sha256=5UcqqCFIZUiFZ1NsDhTQnMcecbQxFEe6qlzgNGDmPTQ,1286 +networkx/algorithms/tests/test_non_randomness.py,sha256=-8s-fJLYRxVNp7QpaMe5Dxrxi0kvewY78d4ja-nXNBk,782 +networkx/algorithms/tests/test_planar_drawing.py,sha256=FrpNWiGxNzBokpSZHfa8q55UyRn0v7gzwgUmNcMvT7I,8775 +networkx/algorithms/tests/test_planarity.py,sha256=Ts63BD2k38lWA8tDv95YkMFfskHHdZizmroeh5WYEgk,13169 +networkx/algorithms/tests/test_polynomials.py,sha256=baI0Kua1pRngRC6Scm5gRRwi1bl0iET5_Xxo3AZTP3A,1983 +networkx/algorithms/tests/test_reciprocity.py,sha256=MkdZ2w_7i0UPK6PdnStULwmzAt7RAe9xS0_BWxiK05s,1297 +networkx/algorithms/tests/test_regular.py,sha256=zGf7Mmh7XPtwunOoeTfgiICnfsVeCEbMop3NrDgIfqY,2457 +networkx/algorithms/tests/test_richclub.py,sha256=lugmYnRVZz37WWFVaZuVnfuOIZTCddIk1KxNoeNnPYY,2258 +networkx/algorithms/tests/test_similarity.py,sha256=Lrg6NjvE369HDxna5LwsvCPFjYpOx97Q0RjMQ-Iyskc,32195 +networkx/algorithms/tests/test_simple_paths.py,sha256=YT1PSijWs22dxoUI_lfzYOj1ukr--kNEtie-TGZXKF8,24008 +networkx/algorithms/tests/test_smallworld.py,sha256=ReQRXdtXRCRc9YTOATYz_CyOEL5YYiU73GZme_gpjrU,2153 +networkx/algorithms/tests/test_smetric.py,sha256=x2LR9IyimDRC29a0uBnPeBCxptSK90NLN6GQYAH9nRc,426 +networkx/algorithms/tests/test_sparsifiers.py,sha256=A12V4ljWxvXaSFJ73mHSFK2YNO-k8ax6Me4yEWTsI4s,4043 +networkx/algorithms/tests/test_structuralholes.py,sha256=p2PogSKedBHR1bT6x-tuj8aqV3L3tr842v6Z8QozYRI,5228 +networkx/algorithms/tests/test_summarization.py,sha256=msFYq5KWCMT4sK6qXhn_ZItJwlvaANqEGP_bhOl0atY,21393 +networkx/algorithms/tests/test_swap.py,sha256=SIQRGQv9E8phEzrxQKkx0GP-tyiwFEWnkONF06H6VtU,3067 +networkx/algorithms/tests/test_threshold.py,sha256=n3dSpE3amPa49C4MsAffSpZ259saTU0c-1mMzERrh84,9760 +networkx/algorithms/tests/test_tournament.py,sha256=xxmLb9Lrmjkh9tKmyv2yYJrhB2PHWh-Bq71M-d1NjQo,4158 +networkx/algorithms/tests/test_triads.py,sha256=td8v-_0JiLvV0ZW6ADqE8V5iy5p9wfH-jYJtrA-LJ-Y,8952 +networkx/algorithms/tests/test_vitality.py,sha256=p5lPWCtVMtbvxDw6TJUaf8vpb0zKPoz5pND722xiypQ,1380 +networkx/algorithms/tests/test_voronoi.py,sha256=M4B6JtkJUw56ULEWRs1kyVEUsroNrnb5FBq9OioAyHM,3477 +networkx/algorithms/tests/test_wiener.py,sha256=NJJbXZ9L5ZeFGQpCpvYVWFNqyX3amkbuDQEBL7wCixw,2080 +networkx/algorithms/threshold.py,sha256=VCME2hzhkwnXGNqfaYSrCG0x8l63i_FH-N7zNl5CeNs,31010 +networkx/algorithms/tournament.py,sha256=hDrl96Nf5huz66YJsDuV9ruRrRFFTGKJDctQq9fj4ls,11584 +networkx/algorithms/traversal/__init__.py,sha256=YtFrfNjciqTOI6jGePQaJ01tRSEQXTHqTGGNhDEDb_8,142 +networkx/algorithms/traversal/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/traversal/__pycache__/beamsearch.cpython-38.pyc,, +networkx/algorithms/traversal/__pycache__/breadth_first_search.cpython-38.pyc,, +networkx/algorithms/traversal/__pycache__/depth_first_search.cpython-38.pyc,, +networkx/algorithms/traversal/__pycache__/edgebfs.cpython-38.pyc,, +networkx/algorithms/traversal/__pycache__/edgedfs.cpython-38.pyc,, +networkx/algorithms/traversal/beamsearch.py,sha256=2iRO5_t4ZweZ2Jg9nBshRBOdWkcb8KB-fDgTFyhviRo,3388 +networkx/algorithms/traversal/breadth_first_search.py,sha256=xB5bwg8qRfndDMRrz8lA5XI-kawbNjt_f7KsK8gVgyY,12930 +networkx/algorithms/traversal/depth_first_search.py,sha256=1ladJNGDxeu0aRved6IG1EWU8lcxJJBm1b_JZWJ-2oc,12842 +networkx/algorithms/traversal/edgebfs.py,sha256=7eVnh6dqqpoCAW_dNIACvWgBj1-RytVGBft9V8Qr8eE,6233 +networkx/algorithms/traversal/edgedfs.py,sha256=sqssJqQ3M-xDdU1sPdxMd_VDQcxymYMv-wKnSEZEHOE,5938 +networkx/algorithms/traversal/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/traversal/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/traversal/tests/__pycache__/test_beamsearch.cpython-38.pyc,, +networkx/algorithms/traversal/tests/__pycache__/test_bfs.cpython-38.pyc,, +networkx/algorithms/traversal/tests/__pycache__/test_dfs.cpython-38.pyc,, +networkx/algorithms/traversal/tests/__pycache__/test_edgebfs.cpython-38.pyc,, +networkx/algorithms/traversal/tests/__pycache__/test_edgedfs.cpython-38.pyc,, +networkx/algorithms/traversal/tests/test_beamsearch.py,sha256=XutDm5yWOB6lU0p_Wtd6RSSOdd7OfBMkhvQ70wxzb6w,897 +networkx/algorithms/traversal/tests/test_bfs.py,sha256=nm9WzJgBwB_AuPDQD5DKkZc5vpj8Xks08sSf-dL4E9o,4060 +networkx/algorithms/traversal/tests/test_dfs.py,sha256=_pk1X-gJr3eIZGKulPmP2apz6K0AMSrs21cijyRI0vU,5487 +networkx/algorithms/traversal/tests/test_edgebfs.py,sha256=8oplCu0fct3QipT0JB0-292EA2aOm8zWlMkPedfe6iY,4702 +networkx/algorithms/traversal/tests/test_edgedfs.py,sha256=HGmC3GUYSn9XLMHQpdefdE6g-Uh3KqbmgEEXBcckdYc,4775 +networkx/algorithms/tree/__init__.py,sha256=wm_FjX3G7hqJfyNmeEaJsRjZI-8Kkv0Nb5jAmQNXzSc,149 +networkx/algorithms/tree/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/tree/__pycache__/branchings.cpython-38.pyc,, +networkx/algorithms/tree/__pycache__/coding.cpython-38.pyc,, +networkx/algorithms/tree/__pycache__/decomposition.cpython-38.pyc,, +networkx/algorithms/tree/__pycache__/mst.cpython-38.pyc,, +networkx/algorithms/tree/__pycache__/operations.cpython-38.pyc,, +networkx/algorithms/tree/__pycache__/recognition.cpython-38.pyc,, +networkx/algorithms/tree/branchings.py,sha256=lrFHGdFFgkEHrFY_D5j9BoDZoZVmxa2rE6KSpYWTVJQ,36255 +networkx/algorithms/tree/coding.py,sha256=RrzQtnGmZilhyXeVVMWYLGmdMMkh0wZHtF5MMTRdRbo,12987 +networkx/algorithms/tree/decomposition.py,sha256=9FoC5jiOM_TbrKs3MlV3NZZ5t8t8a3Qbya88vlM6igM,3034 +networkx/algorithms/tree/mst.py,sha256=hd4GDW3CcV76dv9lZhsS0m5G2Li-sRwGy6zv4U7e1_g,39261 +networkx/algorithms/tree/operations.py,sha256=bAIIsuZ5CJQOFtMzG6ZIEEiE23QGzu1eIY7uqLuu7LA,3499 +networkx/algorithms/tree/recognition.py,sha256=u36v_SVmQ2W77gB55vvsU6ayG12zYwKOGHPwVxvSnRY,7497 +networkx/algorithms/tree/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/algorithms/tree/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/algorithms/tree/tests/__pycache__/test_branchings.cpython-38.pyc,, +networkx/algorithms/tree/tests/__pycache__/test_coding.cpython-38.pyc,, +networkx/algorithms/tree/tests/__pycache__/test_decomposition.cpython-38.pyc,, +networkx/algorithms/tree/tests/__pycache__/test_mst.cpython-38.pyc,, +networkx/algorithms/tree/tests/__pycache__/test_operations.cpython-38.pyc,, +networkx/algorithms/tree/tests/__pycache__/test_recognition.cpython-38.pyc,, +networkx/algorithms/tree/tests/test_branchings.py,sha256=-jSj7I0dCOxcK3kTnXQRu-NDOWOQH7tzXlOZqkJcRKk,15249 +networkx/algorithms/tree/tests/test_coding.py,sha256=f3A5dvfkWImC6Jp2qkuw2Sz3whOsabnaOfu6Eh9r65I,3954 +networkx/algorithms/tree/tests/test_decomposition.py,sha256=vnl_xoQzi1LnlZL25vXOZWwvaWmon3-x222OKt4eDqE,1871 +networkx/algorithms/tree/tests/test_mst.py,sha256=Pgym7C6ZMb5npBbQbIf6aZ4bjfkYaLoHTRx0fJ2GV4I,21428 +networkx/algorithms/tree/tests/test_operations.py,sha256=0IevbCpr0F6AQBLlxbp5qSO9ENf-y1lg1ZkZxuWpHfc,1124 +networkx/algorithms/tree/tests/test_recognition.py,sha256=1Wz3PHAvAkt2Q_00HUXcfabRY0E63VBwNREAeWoz9N0,4173 +networkx/algorithms/triads.py,sha256=U5t8GatjmTraeuxltj8ocvqCY-iiostYzKgIBRmdyzc,13476 +networkx/algorithms/vitality.py,sha256=f1fAmEm1n7JE6TMBix9iFPmK9EOtgGg0YuRj8BYZnGM,2296 +networkx/algorithms/voronoi.py,sha256=BFykl7dsAXeQGDrK-Z6-sGbsyaQUYMXkkzZTcN_Pa78,3158 +networkx/algorithms/wiener.py,sha256=b_W7Beo_MKyetq6o8w2jLu0v8okmfX3n9F4Bc_NTi-M,2270 +networkx/classes/__init__.py,sha256=AzOWEJxd7rt8jdPvdMmPdKvhbQTksRvSN86TGRS8HnU,335 +networkx/classes/__pycache__/__init__.cpython-38.pyc,, +networkx/classes/__pycache__/coreviews.cpython-38.pyc,, +networkx/classes/__pycache__/digraph.cpython-38.pyc,, +networkx/classes/__pycache__/filters.cpython-38.pyc,, +networkx/classes/__pycache__/function.cpython-38.pyc,, +networkx/classes/__pycache__/graph.cpython-38.pyc,, +networkx/classes/__pycache__/graphviews.cpython-38.pyc,, +networkx/classes/__pycache__/multidigraph.cpython-38.pyc,, +networkx/classes/__pycache__/multigraph.cpython-38.pyc,, +networkx/classes/__pycache__/ordered.cpython-38.pyc,, +networkx/classes/__pycache__/reportviews.cpython-38.pyc,, +networkx/classes/coreviews.py,sha256=i0EEXzpKsW3W62_aP83JrHOxhlaS5K0xZiVE7IBBFCM,15471 +networkx/classes/digraph.py,sha256=YDnFAxhtaMYOyWfipiCu_x62BLlFldgNalDJQ_yBgEs,44427 +networkx/classes/filters.py,sha256=47OFApfkvvohVMoZ2v9sniM6sgv9rka869BDwmbdww4,1715 +networkx/classes/function.py,sha256=jXB672qCM3kNUWjTJG7_Rj7IqXYTRvrLJCqdkWj6Ync,35449 +networkx/classes/graph.py,sha256=whquojGtufS_LgPbPVKtiNUOgDQqzyNTPfIs6gu8HUQ,65984 +networkx/classes/graphviews.py,sha256=p-TcscD--cuB9zn4LdmsfohdUFn3izL30J-88MSb-To,6567 +networkx/classes/multidigraph.py,sha256=GFEH-3FWb1hlNjSeQgSlciLs0zTMt2XWDZAJSnvgCQs,35867 +networkx/classes/multigraph.py,sha256=Mt6njKgMg1gjVC33LMRlbEH1yEuYNOlIwH5fLZfC6K4,45810 +networkx/classes/ordered.py,sha256=RxgDUaJMlO887FsXws4SiPib7hCB6kUCAdCz0vwqRGg,5405 +networkx/classes/reportviews.py,sha256=H9BCIugWdoAsAsQO1F0oDbjNCQIdyI4JltQbqVc8Mb4,45714 +networkx/classes/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/classes/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/classes/tests/__pycache__/historical_tests.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_coreviews.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_digraph.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_digraph_historical.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_filters.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_function.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_graph.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_graph_historical.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_graphviews.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_multidigraph.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_multigraph.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_ordered.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_reportviews.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_special.cpython-38.pyc,, +networkx/classes/tests/__pycache__/test_subgraphviews.cpython-38.pyc,, +networkx/classes/tests/historical_tests.py,sha256=mMQy3mZ9o5ybkTUvtcl6eFflcCX4Ocntb1L35MOwZ08,16173 +networkx/classes/tests/test_coreviews.py,sha256=KcvLfvPXgIKYJrW97M5I6HgJrbHa3mb60CQc3QlV3Bc,15409 +networkx/classes/tests/test_digraph.py,sha256=3OgbdTjpyaZBi6SIoUdH3p2c6ia0d0UpJmC1qs7R9Ak,12059 +networkx/classes/tests/test_digraph_historical.py,sha256=xb2sylutpDIPXM1stcGW-tsW0xFOPQ-WZ7UbqapHEm0,3689 +networkx/classes/tests/test_filters.py,sha256=fBLig8z548gsBBlQw6VJdGZb4IcqJj7_0mi2Fd2ncEM,5851 +networkx/classes/tests/test_function.py,sha256=aYAhSFHAhH2L_LCyrfxLEt1aYcItYAEwq9NyzDblzOs,26327 +networkx/classes/tests/test_graph.py,sha256=2PsivQzvN3od05YOtKRsqQe2NXGLibf-npnwaNP_RQ0,29739 +networkx/classes/tests/test_graph_historical.py,sha256=-jf961vQCuQLyly0ju50q9dbzWG5m2OAs9H6IVS670c,273 +networkx/classes/tests/test_graphviews.py,sha256=7PaY2AuqgCb-TS1pvjkf51GZMsTY179XUxLGRNTx-M0,11524 +networkx/classes/tests/test_multidigraph.py,sha256=nyCRNuIURmk3wFKnLfuPIb9BI5Rjkwd4gjmt5kwc8FE,16034 +networkx/classes/tests/test_multigraph.py,sha256=9z0ocdUGW0kq_zL2XQgBrdNecTQwwys6h-5GbaxgGr8,18568 +networkx/classes/tests/test_ordered.py,sha256=QlDQyKzstSOUQWfIMSMl3hTq9e65pWNYhLlH2U0xkRI,1148 +networkx/classes/tests/test_reportviews.py,sha256=ek-FMPYupoFUFIP-Yv3aJXDYmPs4jcYhreZOrI7Rn88,41317 +networkx/classes/tests/test_special.py,sha256=Uq25h522f3ndVE7P8jc4IwghGggLK6vkP-MtxNoVgJY,5760 +networkx/classes/tests/test_subgraphviews.py,sha256=yEhUPLdS7fW7gLuZzEyQu6zGWnBGvS0GbUbYpuDHTHA,13192 +networkx/conftest.py,sha256=JXFOt642CF00x-Ov8PFmw_XzTtlrpt1CNvJWzSKrG1k,12271 +networkx/convert.py,sha256=ZBOGJBt4e8hKBH-Rr7xdertkWHyv57HAU-jBdN1VBIE,15858 +networkx/convert_matrix.py,sha256=r80YqseF6JglHFwZqDixXxueom7_PChnu_esHGLw05o,56245 +networkx/drawing/__init__.py,sha256=rwVeOR7MCDwBUhXnx5VyiP0YfXkCeT7waocuqwXEPw8,136 +networkx/drawing/__pycache__/__init__.cpython-38.pyc,, +networkx/drawing/__pycache__/layout.cpython-38.pyc,, +networkx/drawing/__pycache__/nx_agraph.cpython-38.pyc,, +networkx/drawing/__pycache__/nx_pydot.cpython-38.pyc,, +networkx/drawing/__pycache__/nx_pylab.cpython-38.pyc,, +networkx/drawing/layout.py,sha256=U3hFZNtAarnHtNrnUCEnDXw-f71QhGFT7jGvTveTV4w,35778 +networkx/drawing/nx_agraph.py,sha256=SKICRNITffboFQN-m_bo4sxKteiHTdqHBpKTQybf48M,14699 +networkx/drawing/nx_pydot.py,sha256=icNbna-3F2kXQGGtnjO4oKiH368Cc4Dtbozuv-kHsVg,14091 +networkx/drawing/nx_pylab.py,sha256=Hj4Bl4hZgr5lBin2goU4jvvkaqQ_-v4oUNotsu6es6c,48387 +networkx/drawing/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/drawing/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/drawing/tests/__pycache__/test_agraph.cpython-38.pyc,, +networkx/drawing/tests/__pycache__/test_layout.cpython-38.pyc,, +networkx/drawing/tests/__pycache__/test_pydot.cpython-38.pyc,, +networkx/drawing/tests/__pycache__/test_pylab.cpython-38.pyc,, +networkx/drawing/tests/baseline/test_house_with_colors.png,sha256=FQi9pIRFwjq4gvgB8cDdBHL5euQUJFw6sQlABf2kRVo,21918 +networkx/drawing/tests/test_agraph.py,sha256=LM4XfZ1C7MPdNDYgqhy3ImHIafIIp4RgetDVe14khv0,8633 +networkx/drawing/tests/test_layout.py,sha256=fhL7o4pi1FT5t32FsWHMimTCeyOuFC4CcaunDreNsdw,17005 +networkx/drawing/tests/test_pydot.py,sha256=zxq8cyjl0ANJg63aXv87D1xvsju6wHQGPt94C8AFLB0,6241 +networkx/drawing/tests/test_pylab.py,sha256=06GFkbfc4KAX4Dq9VoNzlamyAWMlutyLQJyuQ-SdQQo,26085 +networkx/exception.py,sha256=5v8tPTpYcuu3OFgSitgC8-wMUGNwfgxZog2gsBNeRPk,3537 +networkx/generators/__init__.py,sha256=vgbZl18dH6eXOnKI60TJjxrVRsfg-PSPo16i7OSEMr8,1272 +networkx/generators/__pycache__/__init__.cpython-38.pyc,, +networkx/generators/__pycache__/atlas.cpython-38.pyc,, +networkx/generators/__pycache__/classic.cpython-38.pyc,, +networkx/generators/__pycache__/cographs.cpython-38.pyc,, +networkx/generators/__pycache__/community.cpython-38.pyc,, +networkx/generators/__pycache__/degree_seq.cpython-38.pyc,, +networkx/generators/__pycache__/directed.cpython-38.pyc,, +networkx/generators/__pycache__/duplication.cpython-38.pyc,, +networkx/generators/__pycache__/ego.cpython-38.pyc,, +networkx/generators/__pycache__/expanders.cpython-38.pyc,, +networkx/generators/__pycache__/geometric.cpython-38.pyc,, +networkx/generators/__pycache__/harary_graph.cpython-38.pyc,, +networkx/generators/__pycache__/internet_as_graphs.cpython-38.pyc,, +networkx/generators/__pycache__/intersection.cpython-38.pyc,, +networkx/generators/__pycache__/interval_graph.cpython-38.pyc,, +networkx/generators/__pycache__/joint_degree_seq.cpython-38.pyc,, +networkx/generators/__pycache__/lattice.cpython-38.pyc,, +networkx/generators/__pycache__/line.cpython-38.pyc,, +networkx/generators/__pycache__/mycielski.cpython-38.pyc,, +networkx/generators/__pycache__/nonisomorphic_trees.cpython-38.pyc,, +networkx/generators/__pycache__/random_clustered.cpython-38.pyc,, +networkx/generators/__pycache__/random_graphs.cpython-38.pyc,, +networkx/generators/__pycache__/small.cpython-38.pyc,, +networkx/generators/__pycache__/social.cpython-38.pyc,, +networkx/generators/__pycache__/spectral_graph_forge.cpython-38.pyc,, +networkx/generators/__pycache__/stochastic.cpython-38.pyc,, +networkx/generators/__pycache__/sudoku.cpython-38.pyc,, +networkx/generators/__pycache__/trees.cpython-38.pyc,, +networkx/generators/__pycache__/triads.cpython-38.pyc,, +networkx/generators/atlas.dat.gz,sha256=c_xBbfAWSSNgd1HLdZ9K6B3rX2VQvyW-Wcht47dH5B0,8887 +networkx/generators/atlas.py,sha256=f0276nWepfl7P5nbCmZ6Fcd8zntPbRuZKxoQqweM0C4,5548 +networkx/generators/classic.py,sha256=mvK7lT0Y6LX1ipgwk2guIDj_Th1QQtccrae_qJr5_6o,23801 +networkx/generators/cographs.py,sha256=4ebIRwmgDiffa2ofAVGolkpnxQzN1xfkq0r1crnwpEE,1844 +networkx/generators/community.py,sha256=jI-PGI-mC6mubahXRIwj-qJDz9nRX4VE8vMDNosoTGY,34429 +networkx/generators/degree_seq.py,sha256=B3_2OW9EK-u_UHz9vjChwDNTPZG1TrRLMoeYYwGz-Wo,29817 +networkx/generators/directed.py,sha256=Ukdw9h3taJGO3x0zEAIzMsw0n5B5ZOAk2BqcKl-jrpk,16676 +networkx/generators/duplication.py,sha256=qpX1nx37qtJt2NN9EOJQJqusFJLLNpzJmLJ8aE0mkhM,4959 +networkx/generators/ego.py,sha256=CbIbzM9Dxk8OM48Xw4Qu9AwoL9J9SeEzvc5KWXaomHg,1836 +networkx/generators/expanders.py,sha256=Q-fnJKbIc_XXHaHOk9D4rVLXJKHk2Yg6ZgrMfoEJk88,6191 +networkx/generators/geometric.py,sha256=NvQSapYw2pgEbDlI2FwmfX2gxmwoHzP5HjDQesdKxyk,29482 +networkx/generators/harary_graph.py,sha256=zDhVtwJJkr-UjOY8jGhuvPQ7b_Mjve7XpLNNA4I2cLo,6072 +networkx/generators/internet_as_graphs.py,sha256=MyaBr9iRyTDij94ukTKZJUTFrx3X792VcquDnm-bzqs,14128 +networkx/generators/intersection.py,sha256=n2molGcwS9tE7Lo65clvJqTynymOmtqXrn5jQPKpTbQ,3947 +networkx/generators/interval_graph.py,sha256=5bV5ZL66tsDazbbe2IZY3uZFqLoTAqj39F1HJKvvct4,2186 +networkx/generators/joint_degree_seq.py,sha256=LGV7USEfG-t6zUzfip1xnCAgAVn5qkaLAgi1zHt8-50,24785 +networkx/generators/lattice.py,sha256=qDwNX7eSXsn0YKiV42aF_7sjGLyov0N-uBsMRmTgbRs,13222 +networkx/generators/line.py,sha256=CFqpIqDUhDNQKn4eK43aOAS_bqddRpqf-P8l6RM9MhA,17654 +networkx/generators/mycielski.py,sha256=aWhsBSq5fagkxfRqN45Gswpo64jv9lSu9A_g2mMV2P0,3225 +networkx/generators/nonisomorphic_trees.py,sha256=GZJaC09tck-8vtzxH1px9RJTcl8B5WbWaC9ldqTA_MQ,5180 +networkx/generators/random_clustered.py,sha256=DBWtgWQT4gHX7mb4iZjRhfDMEfcoWGzIynEOBQp2cZ4,4132 +networkx/generators/random_graphs.py,sha256=eGaNxx06z3gJrv64vGcoeyXKBKqjYVXMmd8g29y23nA,44337 +networkx/generators/small.py,sha256=-MKuw9rirnVQP5C2JWM78nLq0onGr2Y2i8xlif0p5J8,30426 +networkx/generators/social.py,sha256=ghaDZVhcuUneZ1PS6mkMERgk8yPGAOGU9fHJgy7lAWo,22759 +networkx/generators/spectral_graph_forge.py,sha256=sZnPxv8jKs43Y0gCaHKqZrC-_BEunYaZqKyzKaWl9zU,4246 +networkx/generators/stochastic.py,sha256=5H1kxP4za36KRkfXF8CGGg_X15GtCfQdx-6gzXdHtok,1840 +networkx/generators/sudoku.py,sha256=sc2oGk0vvSmmSzlyHWCntJ8_fXvh9VomeJ-0v6p8yew,4243 +networkx/generators/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/generators/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_atlas.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_classic.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_cographs.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_community.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_degree_seq.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_directed.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_duplication.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_ego.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_expanders.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_geometric.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_harary_graph.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_internet_as_graphs.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_intersection.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_interval_graph.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_joint_degree_seq.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_lattice.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_line.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_mycielski.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_nonisomorphic_trees.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_random_clustered.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_random_graphs.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_small.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_spectral_graph_forge.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_stochastic.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_sudoku.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_trees.cpython-38.pyc,, +networkx/generators/tests/__pycache__/test_triads.cpython-38.pyc,, +networkx/generators/tests/test_atlas.py,sha256=nwXJL4O5jUqhTwqhkPxHY8s3KXHQTDEdsfbg4MsSzVQ,2530 +networkx/generators/tests/test_classic.py,sha256=5iBXKn9DZKvxmtpqWwF7LEpV4TRcSK9iRaeBGagjjUw,19550 +networkx/generators/tests/test_cographs.py,sha256=DkiQzP69sjw3QtjWVX2XV0EXoOuEvR42dixPWwuawSE,460 +networkx/generators/tests/test_community.py,sha256=-8AKUr10zQ_KZKoGud00ypxsby7-YNGoO80qibw8h5E,8681 +networkx/generators/tests/test_degree_seq.py,sha256=xCcfFEHee6AzVdRKHZOZBEHbv03UC8PDYAWM9BON3sc,7106 +networkx/generators/tests/test_directed.py,sha256=fGzUOVnwNR1aOv-9R6caYQeT-xZ_Ea1v6qPFxi19swE,4179 +networkx/generators/tests/test_duplication.py,sha256=IIzcHEfHp0NHsH7GTXSb4E4kgXAlt83q4IMibfx2FBw,1915 +networkx/generators/tests/test_ego.py,sha256=8v1Qjmkli9wIhhUuqzgqCzysr0C1Z2C3oJMCUoNvgY4,1327 +networkx/generators/tests/test_expanders.py,sha256=aYS2zuodq7AeVISebU_O_QIFxIPBgto8J-vEsaqPhy8,2389 +networkx/generators/tests/test_geometric.py,sha256=3ZHjCZJC9DH8jKO5I19h_TisEwVYQC21ADE7LJreclc,11237 +networkx/generators/tests/test_harary_graph.py,sha256=_k00U6jwuGSLu444Cb4q4zRaLV7ufNWNTHJBMOnFmf4,4958 +networkx/generators/tests/test_internet_as_graphs.py,sha256=lR3_KuyETCVxPnRaNeks0sEcNljHmi2ohOYkxaplgmM,7137 +networkx/generators/tests/test_intersection.py,sha256=hcIit5fKfOn3VjMhz9KqovZK9tzxZfmC6ezvA7gZAvM,819 +networkx/generators/tests/test_interval_graph.py,sha256=-1yXDZDW-ygmNva9Bu-TsS_SYGLcW1KJplwZHFFYyWM,4278 +networkx/generators/tests/test_joint_degree_seq.py,sha256=fHK-hW_9aGdf13AlnvoZZmNMTik8CXPoJelmhSzzcXM,4272 +networkx/generators/tests/test_lattice.py,sha256=EFhg_eA-q9x2e56FMIT_Jw3ZXqhjW1yt6Iy-EhUIzzU,9292 +networkx/generators/tests/test_line.py,sha256=f_6YjUGctdlw6ZbSZqoFVpBfTWZLKsW4w8TE5RfLnZ4,8471 +networkx/generators/tests/test_mycielski.py,sha256=cAg2J6o_RrbwEdAc0vCuSF6zeS6w1KT4leTM0vkIeoA,822 +networkx/generators/tests/test_nonisomorphic_trees.py,sha256=Y_qWyj_qZU9O_DC4BHEVD9xnIEALCmfdmZAYJjTxUYE,2384 +networkx/generators/tests/test_random_clustered.py,sha256=LTfigb1swnYWS59OJoBmNcjFcUjsodnHVOwFxBXl7xg,979 +networkx/generators/tests/test_random_graphs.py,sha256=kA2Qo-DbbnBDN8PVcbE9-y22L7JD4B56DfaNU52myY8,12827 +networkx/generators/tests/test_small.py,sha256=yXMFFqC2IWlW_KSvt77H_JYE1i8P-7kmiB7FgR1_iZQ,7354 +networkx/generators/tests/test_spectral_graph_forge.py,sha256=x4jyTiQiydaUPWYaGsNFsIB47PAzSSwQYCNXGa2B4SU,1594 +networkx/generators/tests/test_stochastic.py,sha256=nPupQ2mG2oy22gGCs1dPFg4nZHYObsQZ1bBxTXTsEG0,1822 +networkx/generators/tests/test_sudoku.py,sha256=dgOmk-B7MxCVkbHdZzsLZppQ61FAArVy4McSVL8Afzo,1968 +networkx/generators/tests/test_trees.py,sha256=6FIXOXkiYs4_fr7HCq5q8TcQ87ML_73fZNqT8VI2hVA,2901 +networkx/generators/tests/test_triads.py,sha256=mgpHFf0Z34CqtnXgkdf7gK1dC77ppYAqwviXsaU1HVs,332 +networkx/generators/trees.py,sha256=ayXZBWb6uUgHWFG5kYkZAtV6yC3vy-5_7gS75J_CtJ4,14146 +networkx/generators/triads.py,sha256=OAVGc07yKJ2d2IAvSF94z8f1KHgFS9lMeC7HoWCn8V0,2184 +networkx/lazy_imports.py,sha256=3PZ69R_MO23e3U8-6M37bpfXQ6lx5ywV61sETo4k9Cg,5777 +networkx/linalg/__init__.py,sha256=7iyNZ_YYBnlsW8zSfhUgvEkywOrUWfpIuyS86ZOKlG8,568 +networkx/linalg/__pycache__/__init__.cpython-38.pyc,, +networkx/linalg/__pycache__/algebraicconnectivity.cpython-38.pyc,, +networkx/linalg/__pycache__/attrmatrix.cpython-38.pyc,, +networkx/linalg/__pycache__/bethehessianmatrix.cpython-38.pyc,, +networkx/linalg/__pycache__/graphmatrix.cpython-38.pyc,, +networkx/linalg/__pycache__/laplacianmatrix.cpython-38.pyc,, +networkx/linalg/__pycache__/modularitymatrix.cpython-38.pyc,, +networkx/linalg/__pycache__/spectrum.cpython-38.pyc,, +networkx/linalg/algebraicconnectivity.py,sha256=yRu4HOy1EWdk5jSWTRV1vnSumYvIwsd28ffCyh1BPFA,18281 +networkx/linalg/attrmatrix.py,sha256=Swp4TdcOo7OWvJEdwdSvmZBCeI56SxJQpmc4GSLnmEI,15658 +networkx/linalg/bethehessianmatrix.py,sha256=_Ni1dtnow03zj9J3OENlrqK08_p2e9FOKBOwIiQzIIk,2996 +networkx/linalg/graphmatrix.py,sha256=srUndtkU9U6I6VSFDrm9MgH6qm1SChIJ5RYaY1g1maE,6071 +networkx/linalg/laplacianmatrix.py,sha256=aPJPMP-WT85hy6jZcC6w9m7S2_gOQP2_3Fz3fb_sSQc,13986 +networkx/linalg/modularitymatrix.py,sha256=7hgSPeoMrkDFKOETaMgw9inYJ2T8uSlHeoIib2r3fKA,5119 +networkx/linalg/spectrum.py,sha256=Iek8aRJw_TPVGiyxVyKGCmVLCVOxb3ukvBXp6X1Lhxk,3876 +networkx/linalg/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/linalg/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/linalg/tests/__pycache__/test_algebraic_connectivity.cpython-38.pyc,, +networkx/linalg/tests/__pycache__/test_attrmatrix.cpython-38.pyc,, +networkx/linalg/tests/__pycache__/test_bethehessian.cpython-38.pyc,, +networkx/linalg/tests/__pycache__/test_graphmatrix.cpython-38.pyc,, +networkx/linalg/tests/__pycache__/test_laplacian.cpython-38.pyc,, +networkx/linalg/tests/__pycache__/test_modularity.cpython-38.pyc,, +networkx/linalg/tests/__pycache__/test_spectrum.cpython-38.pyc,, +networkx/linalg/tests/test_algebraic_connectivity.py,sha256=I9kbAwLsGvVUt_1bAwHcp4UmqSg_peWv41j37bg5EUo,13344 +networkx/linalg/tests/test_attrmatrix.py,sha256=XD3YuPc5yXKWbhwVSI8YiV_wABWM-rLtwf1uwwWlnI0,2833 +networkx/linalg/tests/test_bethehessian.py,sha256=0r-Do902ywV10TyqTlIJ2Ls3iMqM6sSs2PZbod7kWBM,1327 +networkx/linalg/tests/test_graphmatrix.py,sha256=0cMwzfK6gX7yGCtwRpVXcUn0RWDQDH-HALM5volWA20,9090 +networkx/linalg/tests/test_laplacian.py,sha256=K8p2upJTJLfNHfAf0B9ohPXBZ4k_2VMpSvIc-jXZ_rM,9934 +networkx/linalg/tests/test_modularity.py,sha256=mfKUvwc3bj6Rud1aG4oK3Eu1qg12o6cB8-pv5ZFicYY,3115 +networkx/linalg/tests/test_spectrum.py,sha256=agP2DsiEIvtkNUkT94mdPtJjwnobnjMTUOwjIQa4giA,2828 +networkx/readwrite/__init__.py,sha256=smim3hE7gh6EUjVvCP2rypK0Lgb30y5LOWfgnl79kq4,2490 +networkx/readwrite/__pycache__/__init__.cpython-38.pyc,, +networkx/readwrite/__pycache__/adjlist.cpython-38.pyc,, +networkx/readwrite/__pycache__/edgelist.cpython-38.pyc,, +networkx/readwrite/__pycache__/gexf.cpython-38.pyc,, +networkx/readwrite/__pycache__/gml.cpython-38.pyc,, +networkx/readwrite/__pycache__/gpickle.cpython-38.pyc,, +networkx/readwrite/__pycache__/graph6.cpython-38.pyc,, +networkx/readwrite/__pycache__/graphml.cpython-38.pyc,, +networkx/readwrite/__pycache__/leda.cpython-38.pyc,, +networkx/readwrite/__pycache__/multiline_adjlist.cpython-38.pyc,, +networkx/readwrite/__pycache__/nx_shp.cpython-38.pyc,, +networkx/readwrite/__pycache__/nx_yaml.cpython-38.pyc,, +networkx/readwrite/__pycache__/p2g.cpython-38.pyc,, +networkx/readwrite/__pycache__/pajek.cpython-38.pyc,, +networkx/readwrite/__pycache__/sparse6.cpython-38.pyc,, +networkx/readwrite/__pycache__/text.cpython-38.pyc,, +networkx/readwrite/adjlist.py,sha256=U4Fit0feX6snGWKqfLslqcTT6EL638_YeZfxnSWTkFM,7743 +networkx/readwrite/edgelist.py,sha256=cV6lWh70mU_W5uBDl_VywZIugIJaRAu9IhoQ2Rthzjo,14079 +networkx/readwrite/gexf.py,sha256=AEtYSWVBRmsiBuF0C5G8z7YbQ0pQRIbQkK--zKj7IMY,39511 +networkx/readwrite/gml.py,sha256=T1xccYR11GzZBXsNOM-4_jYcQbOjQydxc8Z3ECppygg,30016 +networkx/readwrite/gpickle.py,sha256=hiL8LmPRtOWWwUSaPYLzxEtK_pen-GQff70SYL-XXLo,2985 +networkx/readwrite/graph6.py,sha256=3N50F0-58G4YohISmgV2G9emKDBUpfHcKhomYGYHhNE,11277 +networkx/readwrite/graphml.py,sha256=OtynmKyxpS-1V8stzwuIQrk0K3FYZci5eGbuTglRhHM,39163 +networkx/readwrite/json_graph/__init__.py,sha256=6U_kUzFQRiTn_wtklqdxSJ1O7DLykeS6EPLIc-u0Dk4,724 +networkx/readwrite/json_graph/__pycache__/__init__.cpython-38.pyc,, +networkx/readwrite/json_graph/__pycache__/adjacency.cpython-38.pyc,, +networkx/readwrite/json_graph/__pycache__/cytoscape.cpython-38.pyc,, +networkx/readwrite/json_graph/__pycache__/jit.cpython-38.pyc,, +networkx/readwrite/json_graph/__pycache__/node_link.cpython-38.pyc,, +networkx/readwrite/json_graph/__pycache__/tree.cpython-38.pyc,, +networkx/readwrite/json_graph/adjacency.py,sha256=2g2cn9OmwSihhFAJesPdhmU6yIC9Gp6x0reuhKxzjAM,4745 +networkx/readwrite/json_graph/cytoscape.py,sha256=fAT2T4c0IQvAMT9wmAw5eJMjbwJ2T_sAWoMsdLtX61A,8142 +networkx/readwrite/json_graph/jit.py,sha256=xuURaJrdy4gMgwlLIAUiezNM_OCwsalgin8og2qofVc,3082 +networkx/readwrite/json_graph/node_link.py,sha256=1bjjTqnQotI3bee5PxPl1HnhLvB-KCF5zQxcqQ77THE,6111 +networkx/readwrite/json_graph/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/readwrite/json_graph/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/readwrite/json_graph/tests/__pycache__/test_adjacency.cpython-38.pyc,, +networkx/readwrite/json_graph/tests/__pycache__/test_cytoscape.cpython-38.pyc,, +networkx/readwrite/json_graph/tests/__pycache__/test_jit.cpython-38.pyc,, +networkx/readwrite/json_graph/tests/__pycache__/test_node_link.cpython-38.pyc,, +networkx/readwrite/json_graph/tests/__pycache__/test_tree.cpython-38.pyc,, +networkx/readwrite/json_graph/tests/test_adjacency.py,sha256=fIhI53yZlt4_8xSpCLy8-z0K132QTmiVLr49bHfKG5Y,1766 +networkx/readwrite/json_graph/tests/test_cytoscape.py,sha256=dDVWM8IYP4xJPp8Jaqv_p3Bg6TkdbM2aAEEK7GDXcRQ,2548 +networkx/readwrite/json_graph/tests/test_jit.py,sha256=_JpDVaWHwQSPsOCgPRZ7BPBvp8VtVNXOruV7uAz2w78,2068 +networkx/readwrite/json_graph/tests/test_node_link.py,sha256=YrEUySBplVAWbLfU73p4nJNYl7gq6SDkgkx0mm-0lXA,3177 +networkx/readwrite/json_graph/tests/test_tree.py,sha256=xhXn1hsh2mN5AlYpFFod4_vReWWC5n8KnpMXzq82Tpw,1887 +networkx/readwrite/json_graph/tree.py,sha256=dXtA1N16m-at8UdZXtZwy2oI05OdmoNpHiglJCWVeuk,6750 +networkx/readwrite/leda.py,sha256=RSM1kW6F7HWkWliXeBTEMA_3rgXmRfX63MGM_d3DHUU,2712 +networkx/readwrite/multiline_adjlist.py,sha256=LEBRqDIUjxqeeYb-OiL64fDCPagmYVUA0uj0PMFJFcY,11201 +networkx/readwrite/nx_shp.py,sha256=RoING1m00mCFQ3w7YFgmxV_uClb8_MbXw2nba8acLB0,12183 +networkx/readwrite/nx_yaml.py,sha256=fEhgNB23RPNN_upmkPsRvZ2aUtQPC3vJegkRSrVKeS8,2174 +networkx/readwrite/p2g.py,sha256=QzjBrvkRR7_BlIM7fzzQeBBbDo0t4mHttJ1SYQg40Wg,2995 +networkx/readwrite/pajek.py,sha256=EDiFLozCQJb19g-D5EXpNBO2a1Ago2iro07Wp0xUkNE,8636 +networkx/readwrite/sparse6.py,sha256=0dGmYXB-_wPnJ_A0SHb0CvkTCoM9vw0ge4-ZjgOlCB4,10193 +networkx/readwrite/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/readwrite/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_adjlist.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_edgelist.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_getattr_nxyaml_removal.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_gexf.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_gml.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_gpickle.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_graph6.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_graphml.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_leda.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_p2g.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_pajek.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_shp.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_sparse6.cpython-38.pyc,, +networkx/readwrite/tests/__pycache__/test_text.cpython-38.pyc,, +networkx/readwrite/tests/test_adjlist.py,sha256=dLEv3txnBrHYxajOYAQhA8CA7axiuPw1ECbaHL5p338,9922 +networkx/readwrite/tests/test_edgelist.py,sha256=l_o1-VbHFMIkNwOLWDdekoprzrfMEDf-tYhIKoxTG3c,9997 +networkx/readwrite/tests/test_getattr_nxyaml_removal.py,sha256=OBwlS280q6jFsrM7DY1MFmKLqHQZSlEswH7P0aY28bE,1005 +networkx/readwrite/tests/test_gexf.py,sha256=PLphmfUB8fYGJCt3EM-rVkqXjHJaCF1GNFN7qccyrxM,18928 +networkx/readwrite/tests/test_gml.py,sha256=stS4rg6N4-YBBYNVOagxSnwL2kXFr2DX-7oQBiE4Mns,20182 +networkx/readwrite/tests/test_gpickle.py,sha256=kr_-moaOpvzk1N5VkuW-biJroHA_kCKH11Va-7Z6jT4,2143 +networkx/readwrite/tests/test_graph6.py,sha256=IjBpfTr-czBLHb8UT_JzvOTBROpnOf5TKKkfCnEeQT8,6069 +networkx/readwrite/tests/test_graphml.py,sha256=vGbgGGZEMtQa4-Cjhxv6lj1GiIjVtZYdARsisdAccQQ,67496 +networkx/readwrite/tests/test_leda.py,sha256=_5F4nLLQ1oAZQMZtTQoFncZL0Oc-IsztFBglEdQeH3k,1392 +networkx/readwrite/tests/test_p2g.py,sha256=mtO2mA_2qeV7q8tQz9jf1fuo13blIhLW0SUKb_4S7j0,1327 +networkx/readwrite/tests/test_pajek.py,sha256=XTsnaCaYjroysCHlTsYwMGGrDR0B1MRwWkA-WXbAXTg,4703 +networkx/readwrite/tests/test_shp.py,sha256=w6MnK1LlVNLTPS5d2ufVynN7B537Vfm9RWxlluLu8nM,9166 +networkx/readwrite/tests/test_sparse6.py,sha256=fLpTG0YgcptNOpUipcCcVlni5i8IyC21kkk3ZeD0XhM,5470 +networkx/readwrite/tests/test_text.py,sha256=oEKc-VO5vk3E5XkACtpybaf1xUIcdAoETDkK5LZFiPM,7933 +networkx/readwrite/text.py,sha256=7h437jkHOxid7-DMl09Zsy1bzigTGVWLsribapfCG_I,6498 +networkx/relabel.py,sha256=WQZv_Gil6Bh5oX8imiw4foZQdw6yMQhtYWFNNRHEPJg,10010 +networkx/testing/__init__.py,sha256=7r6YEG1MsWE913m_gOxgwLaENn0d8oW58qyYDnCjlQA,75 +networkx/testing/__pycache__/__init__.cpython-38.pyc,, +networkx/testing/__pycache__/test.cpython-38.pyc,, +networkx/testing/__pycache__/utils.cpython-38.pyc,, +networkx/testing/test.py,sha256=50q8FPBi5Uks6rwzu58yIZ8XXttuDtG7TvU5h1m0WXg,934 +networkx/testing/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/testing/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/testing/tests/__pycache__/test_utils.cpython-38.pyc,, +networkx/testing/tests/test_utils.py,sha256=jDmedlD9406YVdccNmRtiM4VEHuzRGRrbiKMB6UsA6Q,4953 +networkx/testing/utils.py,sha256=WsN3scG0pYJhueHG97wItgMf8htJqcQTwdNDQRI2CMM,1491 +networkx/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/tests/__pycache__/test_all_random_functions.cpython-38.pyc,, +networkx/tests/__pycache__/test_convert.cpython-38.pyc,, +networkx/tests/__pycache__/test_convert_numpy.cpython-38.pyc,, +networkx/tests/__pycache__/test_convert_pandas.cpython-38.pyc,, +networkx/tests/__pycache__/test_convert_scipy.cpython-38.pyc,, +networkx/tests/__pycache__/test_exceptions.cpython-38.pyc,, +networkx/tests/__pycache__/test_import.cpython-38.pyc,, +networkx/tests/__pycache__/test_lazy_imports.cpython-38.pyc,, +networkx/tests/__pycache__/test_relabel.cpython-38.pyc,, +networkx/tests/test_all_random_functions.py,sha256=V1ETv6d-3wbbrinZHGUrjWQ6WOi1G4evQCdWJB87KZQ,8643 +networkx/tests/test_convert.py,sha256=cZJhrpPscLfvDOz4zJqhcq1IUJlJil-JQbiH3__16lU,12752 +networkx/tests/test_convert_numpy.py,sha256=mL7nQ2IezZtdb99IwBQb3iUoI5vP_AjBPOAaVWYu2qY,25240 +networkx/tests/test_convert_pandas.py,sha256=rNZ8UZiZN-9TBNVtFFj2BnY4Qb8lnetvcJndBBn1Nac,12259 +networkx/tests/test_convert_scipy.py,sha256=edYqfKczdlX5HqTNZs1HS8_uhVsbQCN2lzp9IzSxWoU,10880 +networkx/tests/test_exceptions.py,sha256=XYkpPzqMepSw3MPRUJN5LcFsUsy3YT_fiRDhm0OeAeQ,927 +networkx/tests/test_import.py,sha256=Gm4ujfH9JkQtDrSjOlwXXXUuubI057wskKLCkF6Z92k,220 +networkx/tests/test_lazy_imports.py,sha256=hq-0vf78aLRkIMOviEW-nowtAfZ3A778McPZaFKmn5w,2663 +networkx/tests/test_relabel.py,sha256=XNRvVrLwRaHJSuXxBidxl5ojcFGs6l4kHwtCNtgLykI,12448 +networkx/utils/__init__.py,sha256=2FD4Wn9_6sbJFpNCB3bGVKcNvvAn7BOT-dDSH_qvLEc,272 +networkx/utils/__pycache__/__init__.cpython-38.pyc,, +networkx/utils/__pycache__/contextmanagers.cpython-38.pyc,, +networkx/utils/__pycache__/decorators.cpython-38.pyc,, +networkx/utils/__pycache__/heaps.cpython-38.pyc,, +networkx/utils/__pycache__/mapped_queue.cpython-38.pyc,, +networkx/utils/__pycache__/misc.cpython-38.pyc,, +networkx/utils/__pycache__/random_sequence.cpython-38.pyc,, +networkx/utils/__pycache__/rcm.cpython-38.pyc,, +networkx/utils/__pycache__/union_find.cpython-38.pyc,, +networkx/utils/contextmanagers.py,sha256=7WkfpRXxITV6xlm-Fo2Gc3b4DJNzMNRFIedN3Tzrnlg,1269 +networkx/utils/decorators.py,sha256=DKBd2zd6ndFbgpQzLr5OVs5QP_Ka78s8QEmMDr1gIds,46493 +networkx/utils/heaps.py,sha256=rb-eoEAgSx0t2EjUj09XgzpIQzdneZIT2p6opJw8TLI,10414 +networkx/utils/mapped_queue.py,sha256=Oj1L2DyqhGZ1TFhcHyJq5IK8ZTkvm3VNmh4F1zsYX7M,9138 +networkx/utils/misc.py,sha256=1MOrkvjftO3xuWwz0Zjftu-he7pkwXBbt__nm2ZIBFA,20390 +networkx/utils/random_sequence.py,sha256=ADFgbqUxPF6bcCQ7jJeUWoWSOI5w8zMDfMY-6yHmQW4,4240 +networkx/utils/rcm.py,sha256=Sx3iAwZvrzW-eMoQb2-ZprwmXeGFSMaNwpI6xBOEKvs,4629 +networkx/utils/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +networkx/utils/tests/__pycache__/__init__.cpython-38.pyc,, +networkx/utils/tests/__pycache__/test__init.cpython-38.pyc,, +networkx/utils/tests/__pycache__/test_contextmanager.cpython-38.pyc,, +networkx/utils/tests/__pycache__/test_decorators.cpython-38.pyc,, +networkx/utils/tests/__pycache__/test_heaps.cpython-38.pyc,, +networkx/utils/tests/__pycache__/test_mapped_queue.cpython-38.pyc,, +networkx/utils/tests/__pycache__/test_misc.cpython-38.pyc,, +networkx/utils/tests/__pycache__/test_random_sequence.cpython-38.pyc,, +networkx/utils/tests/__pycache__/test_rcm.cpython-38.pyc,, +networkx/utils/tests/__pycache__/test_unionfind.cpython-38.pyc,, +networkx/utils/tests/test__init.py,sha256=QE0i-lNE4pG2eYjB2mZ0uw7jPD-7TdL7Y9p73JoWQmo,363 +networkx/utils/tests/test_contextmanager.py,sha256=ewa7A464FOmm0uZD-kD4iN_9zEcQNwZJPIin0lE9u_c,310 +networkx/utils/tests/test_decorators.py,sha256=Waol9jmnOXS1FAHgE_6yK6zPUOk8zmOTVDePuknwG8E,13642 +networkx/utils/tests/test_heaps.py,sha256=qCuWMzpcMH1Gwu014CAams78o151QD5YL0mB1fz16Yw,3711 +networkx/utils/tests/test_mapped_queue.py,sha256=x4xxZR4CbaSiLyMBahgGt7eBS0BtIx6CYo3sXXM-LgM,6338 +networkx/utils/tests/test_misc.py,sha256=mJ3cXfQmRfWCO9Jn4Jqi1OEo-s5CfruvTi2ZxFFSPbM,9664 +networkx/utils/tests/test_random_sequence.py,sha256=Ou-IeCFybibZuycoin5gUQzzC-iy5yanZFmrqvdGt6Q,925 +networkx/utils/tests/test_rcm.py,sha256=UvUAkgmQMGk_Nn94TJyQsle4A5SLQFqMQWld1tiQ2lk,1421 +networkx/utils/tests/test_unionfind.py,sha256=j-DF5XyeJzq1hoeAgN5Nye2Au7EPD040t8oS4Aw2IwU,1579 +networkx/utils/union_find.py,sha256=PFEk5AyjSq8piU1E1qD4MIoaBWmeBLp1OTwkLeDTmmk,3323 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/WHEEL b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..becc9a66ea739ba941d48a749e248761cc6e658a --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/top_level.txt b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..4d07dfe2f85d6849d7f416dcce756b2501ba847e --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/networkx-2.8.5.dist-info/top_level.txt @@ -0,0 +1 @@ +networkx diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/INSTALLER b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/LICENSE.rst b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/LICENSE.rst new file mode 100644 index 0000000000000000000000000000000000000000..ac630e821cbba5705553194064946097ef575247 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/LICENSE.rst @@ -0,0 +1,24 @@ +Copyright (c) 2005-2019, Ilya Etingof +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/RECORD b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..8b63431c4557b8888bff47448a9dbdffd67be46c --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/RECORD @@ -0,0 +1,80 @@ +pyasn1-0.4.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pyasn1-0.4.8.dist-info/LICENSE.rst,sha256=IsXMaSKrXWn7oy2MXuTN0UmBUIy1OvwOvYVZOEf9laU,1334 +pyasn1-0.4.8.dist-info/METADATA,sha256=Mx_DbLo2GA_t9nOIsqu-18vjHdTjMR1LtUzdcfLzE0Y,1521 +pyasn1-0.4.8.dist-info/RECORD,, +pyasn1-0.4.8.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pyasn1-0.4.8.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 +pyasn1-0.4.8.dist-info/top_level.txt,sha256=dnNEQt3nIDIO5mSCCOB5obQHrjDOUsRycdBujc2vrWE,7 +pyasn1-0.4.8.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +pyasn1/__init__.py,sha256=1Rn8wrJioqfDz7ORFwMehoT15xHOVeiiQD5pZW37D8s,175 +pyasn1/__pycache__/__init__.cpython-38.pyc,, +pyasn1/__pycache__/debug.cpython-38.pyc,, +pyasn1/__pycache__/error.cpython-38.pyc,, +pyasn1/codec/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 +pyasn1/codec/__pycache__/__init__.cpython-38.pyc,, +pyasn1/codec/ber/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 +pyasn1/codec/ber/__pycache__/__init__.cpython-38.pyc,, +pyasn1/codec/ber/__pycache__/decoder.cpython-38.pyc,, +pyasn1/codec/ber/__pycache__/encoder.cpython-38.pyc,, +pyasn1/codec/ber/__pycache__/eoo.cpython-38.pyc,, +pyasn1/codec/ber/decoder.py,sha256=7-WINr38zVEa3KUkmshh8FjK6QnFaA8Y7j7XaTgYfRk,59708 +pyasn1/codec/ber/encoder.py,sha256=xHl01PCIAiHZXev4x01sjbCgAUKcsTT6SzaLI3nt-9E,27741 +pyasn1/codec/ber/eoo.py,sha256=eZ6lEyHdayMcMmNqtceDIyzf7u5lOeZoRK-WEUxVThI,626 +pyasn1/codec/cer/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 +pyasn1/codec/cer/__pycache__/__init__.cpython-38.pyc,, +pyasn1/codec/cer/__pycache__/decoder.cpython-38.pyc,, +pyasn1/codec/cer/__pycache__/encoder.cpython-38.pyc,, +pyasn1/codec/cer/decoder.py,sha256=ZYBqtDGNiYmKDpKDvioMDf-TYVWoJeZY3I8TEAKuk5s,3745 +pyasn1/codec/cer/encoder.py,sha256=PGtzcIelIHj5d5Yqc5FATMEIWCJybQYFlCaK1gy-NIA,9409 +pyasn1/codec/der/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 +pyasn1/codec/der/__pycache__/__init__.cpython-38.pyc,, +pyasn1/codec/der/__pycache__/decoder.cpython-38.pyc,, +pyasn1/codec/der/__pycache__/encoder.cpython-38.pyc,, +pyasn1/codec/der/decoder.py,sha256=kinXcogMDPGlR3f7hmAxRv2YbQyeP-UhuKM0r8gkbeA,2722 +pyasn1/codec/der/encoder.py,sha256=ZfRRxSCefQyLg0DLNb4zllaYf5_AWGIv3SPzB83Ln2I,3073 +pyasn1/codec/native/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 +pyasn1/codec/native/__pycache__/__init__.cpython-38.pyc,, +pyasn1/codec/native/__pycache__/decoder.cpython-38.pyc,, +pyasn1/codec/native/__pycache__/encoder.cpython-38.pyc,, +pyasn1/codec/native/decoder.py,sha256=4Q29tdKyytK3Oz-m94MSWxxPi_GhcBKvUfvPNKQcL0Y,7671 +pyasn1/codec/native/encoder.py,sha256=0eMLWR49dwMA1X4si0XswR1kX1aDAWyCeUNTpEbChag,8002 +pyasn1/compat/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 +pyasn1/compat/__pycache__/__init__.cpython-38.pyc,, +pyasn1/compat/__pycache__/binary.cpython-38.pyc,, +pyasn1/compat/__pycache__/calling.cpython-38.pyc,, +pyasn1/compat/__pycache__/dateandtime.cpython-38.pyc,, +pyasn1/compat/__pycache__/integer.cpython-38.pyc,, +pyasn1/compat/__pycache__/octets.cpython-38.pyc,, +pyasn1/compat/__pycache__/string.cpython-38.pyc,, +pyasn1/compat/binary.py,sha256=mgWqHmr_SMEdB2WVVr6jyYMnodSbPP6IByE5qKccWLM,698 +pyasn1/compat/calling.py,sha256=uTk3nJtGrElqJi8t34SoO8-eWFBG0gwNhXrlo1YmFEE,379 +pyasn1/compat/dateandtime.py,sha256=zHvXXBp4t3XJ6teg_tz6qgNDevzd93qnrLoEbNxZQ_E,482 +pyasn1/compat/integer.py,sha256=k6tqyxXMC0zJoU-Rz4oUPPoUpTmWXE6Prnzu0tkmmks,2988 +pyasn1/compat/octets.py,sha256=ICe-DVLBIOHmNSz-sp3ioMh--smodJ4VW3Ju0ogJMWA,1359 +pyasn1/compat/string.py,sha256=exqXJmPM6vYj4MjzsjciQdpUcJprRdgrLma8I4UcYHA,505 +pyasn1/debug.py,sha256=HWGbLlEPLoCNyHqBd1Vd_KK91TppEn3CA4YgUxktT2k,3726 +pyasn1/error.py,sha256=DIn2FWY3ACYNbk_42b3ny2bevkehpK2lOqfAsfdkvBE,2257 +pyasn1/type/__init__.py,sha256=EEDlJYS172EH39GUidN_8FbkNcWY9OVV8e30AV58pn0,59 +pyasn1/type/__pycache__/__init__.cpython-38.pyc,, +pyasn1/type/__pycache__/base.cpython-38.pyc,, +pyasn1/type/__pycache__/char.cpython-38.pyc,, +pyasn1/type/__pycache__/constraint.cpython-38.pyc,, +pyasn1/type/__pycache__/error.cpython-38.pyc,, +pyasn1/type/__pycache__/namedtype.cpython-38.pyc,, +pyasn1/type/__pycache__/namedval.cpython-38.pyc,, +pyasn1/type/__pycache__/opentype.cpython-38.pyc,, +pyasn1/type/__pycache__/tag.cpython-38.pyc,, +pyasn1/type/__pycache__/tagmap.cpython-38.pyc,, +pyasn1/type/__pycache__/univ.cpython-38.pyc,, +pyasn1/type/__pycache__/useful.cpython-38.pyc,, +pyasn1/type/base.py,sha256=TX7qdOX3EPiY7-11MY4fwK2Hy6nQsrdQ_M41aUcApno,22386 +pyasn1/type/char.py,sha256=5HH8r1IqZMDCsfDlQHVCRphLlFuZ93bE2NW78CgeUTI,11397 +pyasn1/type/constraint.py,sha256=0Qsth_0JctnDMvOSe5R-vd9IosgjqkKZT_X9lBRXtuI,22132 +pyasn1/type/error.py,sha256=4_BHdjX-AL5WMTpU-tX1Nfo_P88c2z1sDvqPU-S9Bns,246 +pyasn1/type/namedtype.py,sha256=VIL3H3oPgA0zNrDSeAhKmi4CZGTb69uDBVNJzzRk3wM,16368 +pyasn1/type/namedval.py,sha256=dXYWiVTihvBy4RiebGY3AlIXsJvW78mJ1L7JSw-H7Qw,4886 +pyasn1/type/opentype.py,sha256=pUpnPqv8o4AFeIsmGHDTFfuxXAq7FvG3hrTEnoAgBO8,2848 +pyasn1/type/tag.py,sha256=nAK54C0_F_DL4_IaWRthIfIYBOTuXZoVVcbcbqgZiVA,9486 +pyasn1/type/tagmap.py,sha256=2bwm0hqxG2gvXYheOI_iasfl2Z_B93qU7y39EHteUvs,2998 +pyasn1/type/univ.py,sha256=FXc_VOStZfC-xIVTznpFO0qTq1aO4XyJFU0ayQWgPMY,108921 +pyasn1/type/useful.py,sha256=r_K6UhgcrJ0ej658X-s9522I9T7oYVdmEKcbXTkZMds,5368 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/REQUESTED b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/WHEEL b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..8b701e93c23159bc1f4145f779049ce0a6a6cf77 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.6) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/top_level.txt b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..38fe4145754bf81c4dea2535da2bd438975e7da5 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/top_level.txt @@ -0,0 +1 @@ +pyasn1 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/zip-safe b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/zip-safe new file mode 100644 index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pyasn1-0.4.8.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytorch_lightning/__about__.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytorch_lightning/__about__.py new file mode 100644 index 0000000000000000000000000000000000000000..fdaa499392c7eb51b7dd914ea08657d2deded05f --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytorch_lightning/__about__.py @@ -0,0 +1,38 @@ +import time + +_this_year = time.strftime("%Y") +__version__ = "1.6.4" +__author__ = "William Falcon et al." +__author_email__ = "waf2107@columbia.edu" +__license__ = "Apache-2.0" +__copyright__ = f"Copyright (c) 2018-{_this_year}, {__author__}." +__homepage__ = "https://github.com/PyTorchLightning/pytorch-lightning" +__docs_url__ = "https://pytorch-lightning.readthedocs.io/en/stable/" +# this has to be simple string, see: https://github.com/pypa/twine/issues/522 +__docs__ = ( + "PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers." + " Scale your models. Write less boilerplate." +) +__long_docs__ = """ +Lightning is a way to organize your PyTorch code to decouple the science code from the engineering. + It's more of a style-guide than a framework. + +In Lightning, you organize your code into 3 distinct categories: + +1. Research code (goes in the LightningModule). +2. Engineering code (you delete, and is handled by the Trainer). +3. Non-essential research code (logging, etc. this goes in Callbacks). + +Although your research/production project might start simple, once you add things like GPU AND TPU training, + 16-bit precision, etc, you end up spending more time engineering than researching. + Lightning automates AND rigorously tests those parts for you. + +Overall, Lightning guarantees rigorously tested, correct, modern best practices for the automated parts. + +Documentation +------------- +- https://pytorch-lightning.readthedocs.io/en/latest +- https://pytorch-lightning.readthedocs.io/en/stable +""" + +__all__ = ["__author__", "__author_email__", "__copyright__", "__docs__", "__homepage__", "__license__", "__version__"] diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytorch_lightning/__init__.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytorch_lightning/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..adc63486d2f8b6bff7a6965d17cd75f31aa7c45a --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytorch_lightning/__init__.py @@ -0,0 +1,38 @@ +"""Root package info.""" + +import logging +from typing import Any + +from pytorch_lightning.__about__ import * # noqa: F401, F403 + +_DETAIL = 15 # between logging.INFO and logging.DEBUG, used for logging in production use cases + + +def _detail(self: Any, message: str, *args: Any, **kwargs: Any) -> None: + if self.isEnabledFor(_DETAIL): + # logger takes its '*args' as 'args' + self._log(_DETAIL, message, args, **kwargs) + + +logging.addLevelName(_DETAIL, "DETAIL") +logging.detail = _detail +logging.Logger.detail = _detail + +_root_logger = logging.getLogger() +_logger = logging.getLogger(__name__) +_logger.setLevel(logging.INFO) + +# if root logger has handlers, propagate messages up and let root logger process them +if not _root_logger.hasHandlers(): + _logger.addHandler(logging.StreamHandler()) + _logger.propagate = False + +from pytorch_lightning.callbacks import Callback # noqa: E402 +from pytorch_lightning.core import LightningDataModule, LightningModule # noqa: E402 +from pytorch_lightning.trainer import Trainer # noqa: E402 +from pytorch_lightning.utilities.seed import seed_everything # noqa: E402 + +__all__ = ["Trainer", "LightningDataModule", "LightningModule", "Callback", "seed_everything"] + +# for compatibility with namespace packages +__import__("pkg_resources").declare_namespace(__name__) diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytorch_lightning/py.typed b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytorch_lightning/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytorch_lightning/setup_tools.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytorch_lightning/setup_tools.py new file mode 100644 index 0000000000000000000000000000000000000000..2df3c7946b4d9cb4e417da289274a9e06113173d --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytorch_lightning/setup_tools.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python +# Copyright The PyTorch Lightning team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import re +from typing import List + +_PROJECT_ROOT = os.path.dirname(os.path.dirname(__file__)) + + +def _load_requirements( + path_dir: str, file_name: str = "base.txt", comment_char: str = "#", unfreeze: bool = True +) -> List[str]: + """Load requirements from a file. + + >>> _load_requirements(os.path.join(_PROJECT_ROOT, "requirements")) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + ['numpy...', 'torch...', ...] + """ + with open(os.path.join(path_dir, file_name)) as file: + lines = [ln.strip() for ln in file.readlines()] + reqs = [] + for ln in lines: + # filer all comments + comment = "" + if comment_char in ln: + comment = ln[ln.index(comment_char) :] + ln = ln[: ln.index(comment_char)] + req = ln.strip() + # skip directly installed dependencies + if not req or req.startswith("http") or "@http" in req: + continue + # remove version restrictions unless they are strict + if unfreeze and "<" in req and "strict" not in comment: + req = re.sub(r",? *<=? *[\d\.\*]+", "", req).strip() + reqs.append(req) + return reqs + + +def _load_readme_description(path_dir: str, homepage: str, version: str) -> str: + """Load readme as decribtion. + + >>> _load_readme_description(_PROJECT_ROOT, "", "") # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + '
...' + """ + path_readme = os.path.join(path_dir, "README.md") + text = open(path_readme, encoding="utf-8").read() + + # drop images from readme + text = text.replace("![PT to PL](docs/source/_static/images/general/pl_quick_start_full_compressed.gif)", "") + + # https://github.com/PyTorchLightning/pytorch-lightning/raw/master/docs/source/_static/images/lightning_module/pt_to_pl.png + github_source_url = os.path.join(homepage, "raw", version) + # replace relative repository path to absolute link to the release + # do not replace all "docs" as in the readme we reger some other sources with particular path to docs + text = text.replace("docs/source/_static/", f"{os.path.join(github_source_url, 'docs/source/_static/')}") + + # readthedocs badge + text = text.replace("badge/?version=stable", f"badge/?version={version}") + text = text.replace("pytorch-lightning.readthedocs.io/en/stable/", f"pytorch-lightning.readthedocs.io/en/{version}") + # codecov badge + text = text.replace("/branch/master/graph/badge.svg", f"/release/{version}/graph/badge.svg") + # replace github badges for release ones + text = text.replace("badge.svg?branch=master&event=push", f"badge.svg?tag={version}") + # Azure... + text = text.replace("?branchName=master", f"?branchName=refs%2Ftags%2F{version}") + text = re.sub(r"\?definitionId=\d+&branchName=master", f"?definitionId=2&branchName=refs%2Ftags%2F{version}", text) + + skip_begin = r"" + skip_end = r"" + # todo: wrap content as commented description + text = re.sub(rf"{skip_begin}.+?{skip_end}", "", text, flags=re.IGNORECASE + re.DOTALL) + + # # https://github.com/Borda/pytorch-lightning/releases/download/1.1.0a6/codecov_badge.png + # github_release_url = os.path.join(homepage, "releases", "download", version) + # # download badge and replace url with local file + # text = _parse_for_badge(text, github_release_url) + return text diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytz/reference.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytz/reference.py new file mode 100644 index 0000000000000000000000000000000000000000..f765ca0af0b24e66dc3b7d51b9bf97e71b2b67aa --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pytz/reference.py @@ -0,0 +1,140 @@ +''' +Reference tzinfo implementations from the Python docs. +Used for testing against as they are only correct for the years +1987 to 2006. Do not use these for real code. +''' + +from datetime import tzinfo, timedelta, datetime +from pytz import HOUR, ZERO, UTC + +__all__ = [ + 'FixedOffset', + 'LocalTimezone', + 'USTimeZone', + 'Eastern', + 'Central', + 'Mountain', + 'Pacific', + 'UTC' +] + + +# A class building tzinfo objects for fixed-offset time zones. +# Note that FixedOffset(0, "UTC") is a different way to build a +# UTC tzinfo object. +class FixedOffset(tzinfo): + """Fixed offset in minutes east from UTC.""" + + def __init__(self, offset, name): + self.__offset = timedelta(minutes=offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return ZERO + + +import time as _time + +STDOFFSET = timedelta(seconds=-_time.timezone) +if _time.daylight: + DSTOFFSET = timedelta(seconds=-_time.altzone) +else: + DSTOFFSET = STDOFFSET + +DSTDIFF = DSTOFFSET - STDOFFSET + + +# A class capturing the platform's idea of local time. +class LocalTimezone(tzinfo): + + def utcoffset(self, dt): + if self._isdst(dt): + return DSTOFFSET + else: + return STDOFFSET + + def dst(self, dt): + if self._isdst(dt): + return DSTDIFF + else: + return ZERO + + def tzname(self, dt): + return _time.tzname[self._isdst(dt)] + + def _isdst(self, dt): + tt = (dt.year, dt.month, dt.day, + dt.hour, dt.minute, dt.second, + dt.weekday(), 0, -1) + stamp = _time.mktime(tt) + tt = _time.localtime(stamp) + return tt.tm_isdst > 0 + +Local = LocalTimezone() + + +def first_sunday_on_or_after(dt): + days_to_go = 6 - dt.weekday() + if days_to_go: + dt += timedelta(days_to_go) + return dt + + +# In the US, DST starts at 2am (standard time) on the first Sunday in April. +DSTSTART = datetime(1, 4, 1, 2) +# and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct. +# which is the first Sunday on or after Oct 25. +DSTEND = datetime(1, 10, 25, 1) + + +# A complete implementation of current DST rules for major US time zones. +class USTimeZone(tzinfo): + + def __init__(self, hours, reprname, stdname, dstname): + self.stdoffset = timedelta(hours=hours) + self.reprname = reprname + self.stdname = stdname + self.dstname = dstname + + def __repr__(self): + return self.reprname + + def tzname(self, dt): + if self.dst(dt): + return self.dstname + else: + return self.stdname + + def utcoffset(self, dt): + return self.stdoffset + self.dst(dt) + + def dst(self, dt): + if dt is None or dt.tzinfo is None: + # An exception may be sensible here, in one or both cases. + # It depends on how you want to treat them. The default + # fromutc() implementation (called by the default astimezone() + # implementation) passes a datetime with dt.tzinfo is self. + return ZERO + assert dt.tzinfo is self + + # Find first Sunday in April & the last in October. + start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year)) + end = first_sunday_on_or_after(DSTEND.replace(year=dt.year)) + + # Can't compare naive to aware objects, so strip the timezone from + # dt first. + if start <= dt.replace(tzinfo=None) < end: + return HOUR + else: + return ZERO + +Eastern = USTimeZone(-5, "Eastern", "EST", "EDT") +Central = USTimeZone(-6, "Central", "CST", "CDT") +Mountain = USTimeZone(-7, "Mountain", "MST", "MDT") +Pacific = USTimeZone(-8, "Pacific", "PST", "PDT") diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/INSTALLER b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/LICENSE b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f2927f5f8147f137783bb5072794999e04655cfd --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2019, threadpoolctl contributors + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/METADATA b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..6015744980a2df34560f836d20cae373b9ea6b94 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/METADATA @@ -0,0 +1,281 @@ +Metadata-Version: 2.1 +Name: threadpoolctl +Version: 3.1.0 +Summary: threadpoolctl +Home-page: https://github.com/joblib/threadpoolctl +License: BSD-3-Clause +Author: Thomas Moreau +Author-email: thomas.moreau.2010@gmail.com +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Topic :: Software Development :: Libraries :: Python Modules + +# Thread-pool Controls [![Build Status](https://dev.azure.com/joblib/threadpoolctl/_apis/build/status/joblib.threadpoolctl?branchName=master)](https://dev.azure.com/joblib/threadpoolctl/_build/latest?definitionId=1&branchName=master) [![codecov](https://codecov.io/gh/joblib/threadpoolctl/branch/master/graph/badge.svg)](https://codecov.io/gh/joblib/threadpoolctl) + +Python helpers to limit the number of threads used in the +threadpool-backed of common native libraries used for scientific +computing and data science (e.g. BLAS and OpenMP). + +Fine control of the underlying thread-pool size can be useful in +workloads that involve nested parallelism so as to mitigate +oversubscription issues. + +## Installation + +- For users, install the last published version from PyPI: + + ```bash + pip install threadpoolctl + ``` + +- For contributors, install from the source repository in developer + mode: + + ```bash + pip install -r dev-requirements.txt + flit install --symlink + ``` + + then you run the tests with pytest: + + ```bash + pytest + ``` + +## Usage + +### Command Line Interface + +Get a JSON description of thread-pools initialized when importing python +packages such as numpy or scipy for instance: + +``` +python -m threadpoolctl -i numpy scipy.linalg +[ + { + "filepath": "/home/ogrisel/miniconda3/envs/tmp/lib/libmkl_rt.so", + "prefix": "libmkl_rt", + "user_api": "blas", + "internal_api": "mkl", + "version": "2019.0.4", + "num_threads": 2, + "threading_layer": "intel" + }, + { + "filepath": "/home/ogrisel/miniconda3/envs/tmp/lib/libiomp5.so", + "prefix": "libiomp", + "user_api": "openmp", + "internal_api": "openmp", + "version": null, + "num_threads": 4 + } +] +``` + +The JSON information is written on STDOUT. If some of the packages are missing, +a warning message is displayed on STDERR. + +### Python Runtime Programmatic Introspection + +Introspect the current state of the threadpool-enabled runtime libraries +that are loaded when importing Python packages: + +```python +>>> from threadpoolctl import threadpool_info +>>> from pprint import pprint +>>> pprint(threadpool_info()) +[] + +>>> import numpy +>>> pprint(threadpool_info()) +[{'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libmkl_rt.so', + 'internal_api': 'mkl', + 'num_threads': 2, + 'prefix': 'libmkl_rt', + 'threading_layer': 'intel', + 'user_api': 'blas', + 'version': '2019.0.4'}, + {'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libiomp5.so', + 'internal_api': 'openmp', + 'num_threads': 4, + 'prefix': 'libiomp', + 'user_api': 'openmp', + 'version': None}] + +>>> import xgboost +>>> pprint(threadpool_info()) +[{'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libmkl_rt.so', + 'internal_api': 'mkl', + 'num_threads': 2, + 'prefix': 'libmkl_rt', + 'threading_layer': 'intel', + 'user_api': 'blas', + 'version': '2019.0.4'}, + {'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libiomp5.so', + 'internal_api': 'openmp', + 'num_threads': 4, + 'prefix': 'libiomp', + 'user_api': 'openmp', + 'version': None}, + {'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libgomp.so.1.0.0', + 'internal_api': 'openmp', + 'num_threads': 4, + 'prefix': 'libgomp', + 'user_api': 'openmp', + 'version': None}] +``` + +In the above example, `numpy` was installed from the default anaconda channel and comes +with MKL and its Intel OpenMP (`libiomp5`) implementation while `xgboost` was installed +from pypi.org and links against GNU OpenMP (`libgomp`) so both OpenMP runtimes are +loaded in the same Python program. + +The state of these libraries is also accessible through the object oriented API: + +```python +>>> from threadpoolctl import ThreadpoolController, threadpool_info +>>> from pprint import pprint +>>> import numpy +>>> controller = ThreadpoolController() +>>> pprint(controller.info()) +[{'architecture': 'Haswell', + 'filepath': '/home/jeremie/miniconda/envs/dev/lib/libopenblasp-r0.3.17.so', + 'internal_api': 'openblas', + 'num_threads': 4, + 'prefix': 'libopenblas', + 'threading_layer': 'pthreads', + 'user_api': 'blas', + 'version': '0.3.17'}] + +>>> controller.info() == threadpool_info() +True +``` + +### Setting the Maximum Size of Thread-Pools + +Control the number of threads used by the underlying runtime libraries +in specific sections of your Python program: + +```python +>>> from threadpoolctl import threadpool_limits +>>> import numpy as np + +>>> with threadpool_limits(limits=1, user_api='blas'): +... # In this block, calls to blas implementation (like openblas or MKL) +... # will be limited to use only one thread. They can thus be used jointly +... # with thread-parallelism. +... a = np.random.randn(1000, 1000) +... a_squared = a @ a +``` + +The threadpools can also be controlled via the object oriented API, which is especially +useful to avoid searching through all the loaded shared libraries each time. It will +however not act on libraries loaded after the instantiation of the +`ThreadpoolController`: + +```python +>>> from threadpoolctl import ThreadpoolController +>>> import numpy as np +>>> controller = ThreadpoolController() + +>>> with controller.limit(limits=1, user_api='blas'): +... a = np.random.randn(1000, 1000) +... a_squared = a @ a +``` + +### Restricting the limits to the scope of a function + +`threadpool_limits` and `ThreadpoolController` can also be used as decorators to set +the maximum number of threads used by the supported libraries at a function level. The +decorators are accessible through their `wrap` method: + +```python +>>> from threadpoolctl import ThreadpoolController, threadpool_limits +>>> import numpy as np +>>> controller = ThreadpoolController() + +>>> @controller.wrap(limits=1, user_api='blas') +... # or @threadpool_limits.wrap(limits=1, user_api='blas') +... def my_func(): +... # Inside this function, calls to blas implementation (like openblas or MKL) +... # will be limited to use only one thread. +... a = np.random.randn(1000, 1000) +... a_squared = a @ a +... +``` + +### Sequential BLAS within OpenMP parallel region + +When one wants to have sequential BLAS calls within an OpenMP parallel region, it's +safer to set `limits="sequential_blas_under_openmp"` since setting `limits=1` and `user_api="blas"` might not lead to the expected behavior in some configurations +(e.g. OpenBLAS with the OpenMP threading layer +https://github.com/xianyi/OpenBLAS/issues/2985). + +### Known Limitations + +- `threadpool_limits` can fail to limit the number of inner threads when nesting + parallel loops managed by distinct OpenMP runtime implementations (for instance + libgomp from GCC and libomp from clang/llvm or libiomp from ICC). + + See the `test_openmp_nesting` function in [tests/test_threadpoolctl.py]( + https://github.com/joblib/threadpoolctl/blob/master/tests/test_threadpoolctl.py) + for an example. More information can be found at: + https://github.com/jeremiedbb/Nested_OpenMP + + Note however that this problem does not happen when `threadpool_limits` is + used to limit the number of threads used internally by BLAS calls that are + themselves nested under OpenMP parallel loops. `threadpool_limits` works as + expected, even if the inner BLAS implementation relies on a distinct OpenMP + implementation. + +- Using Intel OpenMP (ICC) and LLVM OpenMP (clang) in the same Python program + under Linux is known to cause problems. See the following guide for more details + and workarounds: + https://github.com/joblib/threadpoolctl/blob/master/multiple_openmp.md + +- Setting the maximum number of threads of the OpenMP and BLAS libraries has a global + effect and impacts the whole Python process. There is no thread level isolation as + these libraries do not offer thread-local APIs to configure the number of threads to + use in nested parallel calls. + + +## Maintainers + +To make a release: + +Bump the version number (`__version__`) in `threadpoolctl.py`. + +Build the distribution archives: + +```bash +pip install flit +flit build +``` + +Check the contents of `dist/`. + +If everything is fine, make a commit for the release, tag it, push the +tag to github and then: + +```bash +flit publish +``` + +### Credits + +The initial dynamic library introspection code was written by @anton-malakhov +for the smp package available at https://github.com/IntelPython/smp . + +threadpoolctl extends this for other operating systems. Contrary to smp, +threadpoolctl does not attempt to limit the size of Python multiprocessing +pools (threads or processes) or set operating system-level CPU affinity +constraints: threadpoolctl only interacts with native libraries via their +public runtime APIs. + diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/RECORD b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..ea36bc59b94acda514bbc9f9497203c27d8de43f --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/RECORD @@ -0,0 +1,8 @@ +__pycache__/threadpoolctl.cpython-38.pyc,, +threadpoolctl-3.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +threadpoolctl-3.1.0.dist-info/LICENSE,sha256=gaxhkHUkiwblNmC2UtEOSF9GdfXQrg-X6iI3DaH34js,1507 +threadpoolctl-3.1.0.dist-info/METADATA,sha256=0uFBgSmHr-7L63_M0E0eilLjoLUDVgwtOpn8gObeA6o,9204 +threadpoolctl-3.1.0.dist-info/RECORD,, +threadpoolctl-3.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +threadpoolctl-3.1.0.dist-info/WHEEL,sha256=jPMR_Dzkc4X4icQtmz81lnNY_kAsfog7ry7qoRvYLXw,81 +threadpoolctl.py,sha256=9nah2CGMkXS-jRBBSU8XjLlSvWADeodAktnYb6w4PH0,41112 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/REQUESTED b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/WHEEL b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..c727d148239a36b0e8de2b97fdb23fb96da78869 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/threadpoolctl-3.1.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.6.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_classes.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_classes.py new file mode 100644 index 0000000000000000000000000000000000000000..f36463d88198777ad3ffb229273e04e34a24b3c9 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_classes.py @@ -0,0 +1,51 @@ +import types +import torch._C + +class _ClassNamespace(types.ModuleType): + def __init__(self, name): + super(_ClassNamespace, self).__init__('torch.classes' + name) + self.name = name + + def __getattr__(self, attr): + proxy = torch._C._get_custom_class_python_wrapper(self.name, attr) + if proxy is None: + raise RuntimeError(f'Class {self.name}.{attr} not registered!') + return proxy + +class _Classes(types.ModuleType): + __file__ = '_classes.py' + + def __init__(self): + super(_Classes, self).__init__('torch.classes') + + def __getattr__(self, name): + namespace = _ClassNamespace(name) + setattr(self, name, namespace) + return namespace + + @property + def loaded_libraries(self): + return torch.ops.loaded_libraries + + def load_library(self, path): + """ + Loads a shared library from the given path into the current process. + + The library being loaded may run global initialization code to register + custom classes with the PyTorch JIT runtime. This allows dynamically + loading custom classes. For this, you should compile your class + and the static registration code into a shared library object, and then + call ``torch.classes.load_library('path/to/libcustom.so')`` to load the + shared object. + + After the library is loaded, it is added to the + ``torch.classes.loaded_libraries`` attribute, a set that may be inspected + for the paths of all libraries loaded using this function. + + Args: + path (str): A path to a shared library to load. + """ + torch.ops.load_library(path) + +# The classes "namespace" +classes = _Classes() diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_lobpcg.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_lobpcg.py new file mode 100644 index 0000000000000000000000000000000000000000..cb7a6723683ab0d2c21d7ba18ec4fbc5c50b8c47 --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_lobpcg.py @@ -0,0 +1,1117 @@ +"""Locally Optimal Block Preconditioned Conjugate Gradient methods. +""" +# Author: Pearu Peterson +# Created: February 2020 + +from typing import Dict, Tuple, Optional + +import torch +from torch import Tensor +from . import _linalg_utils as _utils +from .overrides import has_torch_function, handle_torch_function + + +__all__ = ['lobpcg'] + +def _symeig_backward_complete_eigenspace(D_grad, U_grad, A, D, U): + # compute F, such that F_ij = (d_j - d_i)^{-1} for i != j, F_ii = 0 + F = D.unsqueeze(-2) - D.unsqueeze(-1) + F.diagonal(dim1=-2, dim2=-1).fill_(float('inf')) + F.pow_(-1) + + # A.grad = U (D.grad + (U^T U.grad * F)) U^T + Ut = U.mT.contiguous() + res = torch.matmul( + U, + torch.matmul( + torch.diag_embed(D_grad) + torch.matmul(Ut, U_grad) * F, + Ut + ) + ) + + return res + + +def _polynomial_coefficients_given_roots(roots): + """ + Given the `roots` of a polynomial, find the polynomial's coefficients. + + If roots = (r_1, ..., r_n), then the method returns + coefficients (a_0, a_1, ..., a_n (== 1)) so that + p(x) = (x - r_1) * ... * (x - r_n) + = x^n + a_{n-1} * x^{n-1} + ... a_1 * x_1 + a_0 + + Note: for better performance requires writing a low-level kernel + """ + poly_order = roots.shape[-1] + poly_coeffs_shape = list(roots.shape) + # we assume p(x) = x^n + a_{n-1} * x^{n-1} + ... + a_1 * x + a_0, + # so poly_coeffs = {a_0, ..., a_n, a_{n+1}(== 1)}, + # but we insert one extra coefficient to enable better vectorization below + poly_coeffs_shape[-1] += 2 + poly_coeffs = roots.new_zeros(poly_coeffs_shape) + poly_coeffs[..., 0] = 1 + poly_coeffs[..., -1] = 1 + + # perform the Horner's rule + for i in range(1, poly_order + 1): + # note that it is computationally hard to compute backward for this method, + # because then given the coefficients it would require finding the roots and/or + # calculating the sensitivity based on the Vieta's theorem. + # So the code below tries to circumvent the explicit root finding by series + # of operations on memory copies imitating the Horner's method. + # The memory copies are required to construct nodes in the computational graph + # by exploting the explicit (not in-place, separate node for each step) + # recursion of the Horner's method. + # Needs more memory, O(... * k^2), but with only O(... * k^2) complexity. + poly_coeffs_new = poly_coeffs.clone() if roots.requires_grad else poly_coeffs + out = poly_coeffs_new.narrow(-1, poly_order - i, i + 1) + out -= roots.narrow(-1, i - 1, 1) * poly_coeffs.narrow(-1, poly_order - i + 1, i + 1) + poly_coeffs = poly_coeffs_new + + return poly_coeffs.narrow(-1, 1, poly_order + 1) + + +def _polynomial_value(poly, x, zero_power, transition): + """ + A generic method for computing poly(x) using the Horner's rule. + + Args: + poly (Tensor): the (possibly batched) 1D Tensor representing + polynomial coefficients such that + poly[..., i] = (a_{i_0}, ..., a{i_n} (==1)), and + poly(x) = poly[..., 0] * zero_power + ... + poly[..., n] * x^n + + x (Tensor): the value (possible batched) to evalate the polynomial `poly` at. + + zero_power (Tensor): the represenation of `x^0`. It is application-specific. + + transition (Callable): the function that accepts some intermediate result `int_val`, + the `x` and a specific polynomial coefficient + `poly[..., k]` for some iteration `k`. + It basically performs one iteration of the Horner's rule + defined as `x * int_val + poly[..., k] * zero_power`. + Note that `zero_power` is not a parameter, + because the step `+ poly[..., k] * zero_power` depends on `x`, + whether it is a vector, a matrix, or something else, so this + functionality is delegated to the user. + """ + + res = zero_power.clone() + for k in range(poly.size(-1) - 2, -1, -1): + res = transition(res, x, poly[..., k]) + return res + +def _matrix_polynomial_value(poly, x, zero_power=None): + """ + Evaluates `poly(x)` for the (batched) matrix input `x`. + Check out `_polynomial_value` function for more details. + """ + + # matrix-aware Horner's rule iteration + def transition(curr_poly_val, x, poly_coeff): + res = x.matmul(curr_poly_val) + res.diagonal(dim1=-2, dim2=-1).add_(poly_coeff.unsqueeze(-1)) + return res + + if zero_power is None: + zero_power = torch.eye(x.size(-1), x.size(-1), dtype=x.dtype, device=x.device) \ + .view(*([1] * len(list(x.shape[:-2]))), x.size(-1), x.size(-1)) + + return _polynomial_value(poly, x, zero_power, transition) + +def _vector_polynomial_value(poly, x, zero_power=None): + """ + Evaluates `poly(x)` for the (batched) vector input `x`. + Check out `_polynomial_value` function for more details. + """ + + # vector-aware Horner's rule iteration + def transition(curr_poly_val, x, poly_coeff): + res = torch.addcmul(poly_coeff.unsqueeze(-1), x, curr_poly_val) + return res + + if zero_power is None: + zero_power = x.new_ones(1).expand(x.shape) + + return _polynomial_value(poly, x, zero_power, transition) + +def _symeig_backward_partial_eigenspace(D_grad, U_grad, A, D, U, largest): + # compute a projection operator onto an orthogonal subspace spanned by the + # columns of U defined as (I - UU^T) + Ut = U.mT.contiguous() + proj_U_ortho = -U.matmul(Ut) + proj_U_ortho.diagonal(dim1=-2, dim2=-1).add_(1) + + # compute U_ortho, a basis for the orthogonal complement to the span(U), + # by projecting a random [..., m, m - k] matrix onto the subspace spanned + # by the columns of U. + # + # fix generator for determinism + gen = torch.Generator(A.device) + + # orthogonal complement to the span(U) + U_ortho = proj_U_ortho.matmul( + torch.randn( + (*A.shape[:-1], A.size(-1) - D.size(-1)), + dtype=A.dtype, + device=A.device, + generator=gen + ) + ) + U_ortho_t = U_ortho.mT.contiguous() + + # compute the coefficients of the characteristic polynomial of the tensor D. + # Note that D is diagonal, so the diagonal elements are exactly the roots + # of the characteristic polynomial. + chr_poly_D = _polynomial_coefficients_given_roots(D) + + # the code belows finds the explicit solution to the Sylvester equation + # U_ortho^T A U_ortho dX - dX D = -U_ortho^T A U + # and incorporates it into the whole gradient stored in the `res` variable. + # + # Equivalent to the following naive implementation: + # res = A.new_zeros(A.shape) + # p_res = A.new_zeros(*A.shape[:-1], D.size(-1)) + # for k in range(1, chr_poly_D.size(-1)): + # p_res.zero_() + # for i in range(0, k): + # p_res += (A.matrix_power(k - 1 - i) @ U_grad) * D.pow(i).unsqueeze(-2) + # res -= chr_poly_D[k] * (U_ortho @ poly_D_at_A.inverse() @ U_ortho_t @ p_res @ U.t()) + # + # Note that dX is a differential, so the gradient contribution comes from the backward sensitivity + # Tr(f(U_grad, D_grad, A, U, D)^T dX) = Tr(g(U_grad, A, U, D)^T dA) for some functions f and g, + # and we need to compute g(U_grad, A, U, D) + # + # The naive implementation is based on the paper + # Hu, Qingxi, and Daizhan Cheng. + # "The polynomial solution to the Sylvester matrix equation." + # Applied mathematics letters 19.9 (2006): 859-864. + # + # We can modify the computation of `p_res` from above in a more efficient way + # p_res = U_grad * (chr_poly_D[1] * D.pow(0) + ... + chr_poly_D[k] * D.pow(k)).unsqueeze(-2) + # + A U_grad * (chr_poly_D[2] * D.pow(0) + ... + chr_poly_D[k] * D.pow(k - 1)).unsqueeze(-2) + # + ... + # + A.matrix_power(k - 1) U_grad * chr_poly_D[k] + # Note that this saves us from redundant matrix products with A (elimination of matrix_power) + U_grad_projected = U_grad + series_acc = U_grad_projected.new_zeros(U_grad_projected.shape) + for k in range(1, chr_poly_D.size(-1)): + poly_D = _vector_polynomial_value(chr_poly_D[..., k:], D) + series_acc += U_grad_projected * poly_D.unsqueeze(-2) + U_grad_projected = A.matmul(U_grad_projected) + + # compute chr_poly_D(A) which essentially is: + # + # chr_poly_D_at_A = A.new_zeros(A.shape) + # for k in range(chr_poly_D.size(-1)): + # chr_poly_D_at_A += chr_poly_D[k] * A.matrix_power(k) + # + # Note, however, for better performance we use the Horner's rule + chr_poly_D_at_A = _matrix_polynomial_value(chr_poly_D, A) + + # compute the action of `chr_poly_D_at_A` restricted to U_ortho_t + chr_poly_D_at_A_to_U_ortho = torch.matmul( + U_ortho_t, + torch.matmul( + chr_poly_D_at_A, + U_ortho + ) + ) + # we need to invert 'chr_poly_D_at_A_to_U_ortho`, for that we compute its + # Cholesky decomposition and then use `torch.cholesky_solve` for better stability. + # Cholesky decomposition requires the input to be positive-definite. + # Note that `chr_poly_D_at_A_to_U_ortho` is positive-definite if + # 1. `largest` == False, or + # 2. `largest` == True and `k` is even + # under the assumption that `A` has distinct eigenvalues. + # + # check if `chr_poly_D_at_A_to_U_ortho` is positive-definite or negative-definite + chr_poly_D_at_A_to_U_ortho_sign = -1 if (largest and (k % 2 == 1)) else +1 + chr_poly_D_at_A_to_U_ortho_L = torch.linalg.cholesky( + chr_poly_D_at_A_to_U_ortho_sign * chr_poly_D_at_A_to_U_ortho + ) + + # compute the gradient part in span(U) + res = _symeig_backward_complete_eigenspace( + D_grad, U_grad, A, D, U + ) + + # incorporate the Sylvester equation solution into the full gradient + # it resides in span(U_ortho) + res -= U_ortho.matmul( + chr_poly_D_at_A_to_U_ortho_sign * torch.cholesky_solve( + U_ortho_t.matmul(series_acc), + chr_poly_D_at_A_to_U_ortho_L + ) + ).matmul(Ut) + + return res + +def _symeig_backward(D_grad, U_grad, A, D, U, largest): + # if `U` is square, then the columns of `U` is a complete eigenspace + if U.size(-1) == U.size(-2): + return _symeig_backward_complete_eigenspace( + D_grad, U_grad, A, D, U + ) + else: + return _symeig_backward_partial_eigenspace( + D_grad, U_grad, A, D, U, largest + ) + +class LOBPCGAutogradFunction(torch.autograd.Function): + + @staticmethod + def forward(ctx, # type: ignore[override] + A: Tensor, + k: Optional[int] = None, + B: Optional[Tensor] = None, + X: Optional[Tensor] = None, + n: Optional[int] = None, + iK: Optional[Tensor] = None, + niter: Optional[int] = None, + tol: Optional[float] = None, + largest: Optional[bool] = None, + method: Optional[str] = None, + tracker: None = None, + ortho_iparams: Optional[Dict[str, int]] = None, + ortho_fparams: Optional[Dict[str, float]] = None, + ortho_bparams: Optional[Dict[str, bool]] = None + ) -> Tuple[Tensor, Tensor]: + + # makes sure that input is contiguous for efficiency. + # Note: autograd does not support dense gradients for sparse input yet. + A = A.contiguous() if (not A.is_sparse) else A + if B is not None: + B = B.contiguous() if (not B.is_sparse) else B + + D, U = _lobpcg( + A, k, B, X, + n, iK, niter, tol, largest, method, tracker, + ortho_iparams, ortho_fparams, ortho_bparams + ) + + ctx.save_for_backward(A, B, D, U) + ctx.largest = largest + + return D, U + + @staticmethod + def backward(ctx, D_grad, U_grad): + A_grad = B_grad = None + grads = [None] * 14 + + A, B, D, U = ctx.saved_tensors + largest = ctx.largest + + # lobpcg.backward has some limitations. Checks for unsupported input + if A.is_sparse or (B is not None and B.is_sparse and ctx.needs_input_grad[2]): + raise ValueError( + 'lobpcg.backward does not support sparse input yet.' + 'Note that lobpcg.forward does though.' + ) + if A.dtype in (torch.complex64, torch.complex128) or \ + B is not None and B.dtype in (torch.complex64, torch.complex128): + raise ValueError( + 'lobpcg.backward does not support complex input yet.' + 'Note that lobpcg.forward does though.' + ) + if B is not None: + raise ValueError( + 'lobpcg.backward does not support backward with B != I yet.' + ) + + if largest is None: + largest = True + + # symeig backward + if B is None: + A_grad = _symeig_backward( + D_grad, U_grad, A, D, U, largest + ) + + # A has index 0 + grads[0] = A_grad + # B has index 2 + grads[2] = B_grad + return tuple(grads) + + +def lobpcg(A: Tensor, + k: Optional[int] = None, + B: Optional[Tensor] = None, + X: Optional[Tensor] = None, + n: Optional[int] = None, + iK: Optional[Tensor] = None, + niter: Optional[int] = None, + tol: Optional[float] = None, + largest: Optional[bool] = None, + method: Optional[str] = None, + tracker: None = None, + ortho_iparams: Optional[Dict[str, int]] = None, + ortho_fparams: Optional[Dict[str, float]] = None, + ortho_bparams: Optional[Dict[str, bool]] = None + ) -> Tuple[Tensor, Tensor]: + + """Find the k largest (or smallest) eigenvalues and the corresponding + eigenvectors of a symmetric positive definite generalized + eigenvalue problem using matrix-free LOBPCG methods. + + This function is a front-end to the following LOBPCG algorithms + selectable via `method` argument: + + `method="basic"` - the LOBPCG method introduced by Andrew + Knyazev, see [Knyazev2001]. A less robust method, may fail when + Cholesky is applied to singular input. + + `method="ortho"` - the LOBPCG method with orthogonal basis + selection [StathopoulosEtal2002]. A robust method. + + Supported inputs are dense, sparse, and batches of dense matrices. + + .. note:: In general, the basic method spends least time per + iteration. However, the robust methods converge much faster and + are more stable. So, the usage of the basic method is generally + not recommended but there exist cases where the usage of the + basic method may be preferred. + + .. warning:: The backward method does not support sparse and complex inputs. + It works only when `B` is not provided (i.e. `B == None`). + We are actively working on extensions, and the details of + the algorithms are going to be published promptly. + + .. warning:: While it is assumed that `A` is symmetric, `A.grad` is not. + To make sure that `A.grad` is symmetric, so that `A - t * A.grad` is symmetric + in first-order optimization routines, prior to running `lobpcg` + we do the following symmetrization map: `A -> (A + A.t()) / 2`. + The map is performed only when the `A` requires gradients. + + Args: + + A (Tensor): the input tensor of size :math:`(*, m, m)` + + B (Tensor, optional): the input tensor of size :math:`(*, m, + m)`. When not specified, `B` is interpereted as + identity matrix. + + X (tensor, optional): the input tensor of size :math:`(*, m, n)` + where `k <= n <= m`. When specified, it is used as + initial approximation of eigenvectors. X must be a + dense tensor. + + iK (tensor, optional): the input tensor of size :math:`(*, m, + m)`. When specified, it will be used as preconditioner. + + k (integer, optional): the number of requested + eigenpairs. Default is the number of :math:`X` + columns (when specified) or `1`. + + n (integer, optional): if :math:`X` is not specified then `n` + specifies the size of the generated random + approximation of eigenvectors. Default value for `n` + is `k`. If :math:`X` is specified, the value of `n` + (when specified) must be the number of :math:`X` + columns. + + tol (float, optional): residual tolerance for stopping + criterion. Default is `feps ** 0.5` where `feps` is + smallest non-zero floating-point number of the given + input tensor `A` data type. + + largest (bool, optional): when True, solve the eigenproblem for + the largest eigenvalues. Otherwise, solve the + eigenproblem for smallest eigenvalues. Default is + `True`. + + method (str, optional): select LOBPCG method. See the + description of the function above. Default is + "ortho". + + niter (int, optional): maximum number of iterations. When + reached, the iteration process is hard-stopped and + the current approximation of eigenpairs is returned. + For infinite iteration but until convergence criteria + is met, use `-1`. + + tracker (callable, optional) : a function for tracing the + iteration process. When specified, it is called at + each iteration step with LOBPCG instance as an + argument. The LOBPCG instance holds the full state of + the iteration process in the following attributes: + + `iparams`, `fparams`, `bparams` - dictionaries of + integer, float, and boolean valued input + parameters, respectively + + `ivars`, `fvars`, `bvars`, `tvars` - dictionaries + of integer, float, boolean, and Tensor valued + iteration variables, respectively. + + `A`, `B`, `iK` - input Tensor arguments. + + `E`, `X`, `S`, `R` - iteration Tensor variables. + + For instance: + + `ivars["istep"]` - the current iteration step + `X` - the current approximation of eigenvectors + `E` - the current approximation of eigenvalues + `R` - the current residual + `ivars["converged_count"]` - the current number of converged eigenpairs + `tvars["rerr"]` - the current state of convergence criteria + + Note that when `tracker` stores Tensor objects from + the LOBPCG instance, it must make copies of these. + + If `tracker` sets `bvars["force_stop"] = True`, the + iteration process will be hard-stopped. + + ortho_iparams, ortho_fparams, ortho_bparams (dict, optional): + various parameters to LOBPCG algorithm when using + `method="ortho"`. + + Returns: + + E (Tensor): tensor of eigenvalues of size :math:`(*, k)` + + X (Tensor): tensor of eigenvectors of size :math:`(*, m, k)` + + References: + + [Knyazev2001] Andrew V. Knyazev. (2001) Toward the Optimal + Preconditioned Eigensolver: Locally Optimal Block Preconditioned + Conjugate Gradient Method. SIAM J. Sci. Comput., 23(2), + 517-541. (25 pages) + https://epubs.siam.org/doi/abs/10.1137/S1064827500366124 + + [StathopoulosEtal2002] Andreas Stathopoulos and Kesheng + Wu. (2002) A Block Orthogonalization Procedure with Constant + Synchronization Requirements. SIAM J. Sci. Comput., 23(6), + 2165-2182. (18 pages) + https://epubs.siam.org/doi/10.1137/S1064827500370883 + + [DuerschEtal2018] Jed A. Duersch, Meiyue Shao, Chao Yang, Ming + Gu. (2018) A Robust and Efficient Implementation of LOBPCG. + SIAM J. Sci. Comput., 40(5), C655-C676. (22 pages) + https://epubs.siam.org/doi/abs/10.1137/17M1129830 + + """ + + if not torch.jit.is_scripting(): + tensor_ops = (A, B, X, iK) + if (not set(map(type, tensor_ops)).issubset((torch.Tensor, type(None))) and has_torch_function(tensor_ops)): + return handle_torch_function( + lobpcg, tensor_ops, A, k=k, + B=B, X=X, n=n, iK=iK, niter=niter, tol=tol, + largest=largest, method=method, tracker=tracker, + ortho_iparams=ortho_iparams, + ortho_fparams=ortho_fparams, + ortho_bparams=ortho_bparams) + + if not torch._jit_internal.is_scripting(): + if A.requires_grad or (B is not None and B.requires_grad): + # While it is expected that `A` is symmetric, + # the `A_grad` might be not. Therefore we perform the trick below, + # so that `A_grad` becomes symmetric. + # The symmetrization is important for first-order optimization methods, + # so that (A - alpha * A_grad) is still a symmetric matrix. + # Same holds for `B`. + A_sym = (A + A.mT) / 2 + B_sym = (B + B.mT) / 2 if (B is not None) else None + + return LOBPCGAutogradFunction.apply( + A_sym, k, B_sym, X, n, iK, niter, tol, largest, + method, tracker, ortho_iparams, ortho_fparams, ortho_bparams + ) + else: + if A.requires_grad or (B is not None and B.requires_grad): + raise RuntimeError( + 'Script and require grads is not supported atm.' + 'If you just want to do the forward, use .detach()' + 'on A and B before calling into lobpcg' + ) + + return _lobpcg( + A, k, B, X, + n, iK, niter, tol, largest, method, tracker, + ortho_iparams, ortho_fparams, ortho_bparams + ) + +def _lobpcg(A: Tensor, + k: Optional[int] = None, + B: Optional[Tensor] = None, + X: Optional[Tensor] = None, + n: Optional[int] = None, + iK: Optional[Tensor] = None, + niter: Optional[int] = None, + tol: Optional[float] = None, + largest: Optional[bool] = None, + method: Optional[str] = None, + tracker: None = None, + ortho_iparams: Optional[Dict[str, int]] = None, + ortho_fparams: Optional[Dict[str, float]] = None, + ortho_bparams: Optional[Dict[str, bool]] = None + ) -> Tuple[Tensor, Tensor]: + + # A must be square: + assert A.shape[-2] == A.shape[-1], A.shape + if B is not None: + # A and B must have the same shapes: + assert A.shape == B.shape, (A.shape, B.shape) + + dtype = _utils.get_floating_dtype(A) + device = A.device + if tol is None: + feps = {torch.float32: 1.2e-07, + torch.float64: 2.23e-16}[dtype] + tol = feps ** 0.5 + + m = A.shape[-1] + k = (1 if X is None else X.shape[-1]) if k is None else k + n = (k if n is None else n) if X is None else X.shape[-1] + + if (m < 3 * n): + raise ValueError( + 'LPBPCG algorithm is not applicable when the number of A rows (={})' + ' is smaller than 3 x the number of requested eigenpairs (={})' + .format(m, n)) + + method = 'ortho' if method is None else method + + iparams = { + 'm': m, + 'n': n, + 'k': k, + 'niter': 1000 if niter is None else niter, + } + + fparams = { + 'tol': tol, + } + + bparams = { + 'largest': True if largest is None else largest + } + + if method == 'ortho': + if ortho_iparams is not None: + iparams.update(ortho_iparams) + if ortho_fparams is not None: + fparams.update(ortho_fparams) + if ortho_bparams is not None: + bparams.update(ortho_bparams) + iparams['ortho_i_max'] = iparams.get('ortho_i_max', 3) + iparams['ortho_j_max'] = iparams.get('ortho_j_max', 3) + fparams['ortho_tol'] = fparams.get('ortho_tol', tol) + fparams['ortho_tol_drop'] = fparams.get('ortho_tol_drop', tol) + fparams['ortho_tol_replace'] = fparams.get('ortho_tol_replace', tol) + bparams['ortho_use_drop'] = bparams.get('ortho_use_drop', False) + + if not torch.jit.is_scripting(): + LOBPCG.call_tracker = LOBPCG_call_tracker # type: ignore[assignment] + + if len(A.shape) > 2: + N = int(torch.prod(torch.tensor(A.shape[:-2]))) + bA = A.reshape((N,) + A.shape[-2:]) + bB = B.reshape((N,) + A.shape[-2:]) if B is not None else None + bX = X.reshape((N,) + X.shape[-2:]) if X is not None else None + bE = torch.empty((N, k), dtype=dtype, device=device) + bXret = torch.empty((N, m, k), dtype=dtype, device=device) + + for i in range(N): + A_ = bA[i] + B_ = bB[i] if bB is not None else None + X_ = torch.randn((m, n), dtype=dtype, device=device) if bX is None else bX[i] + assert len(X_.shape) == 2 and X_.shape == (m, n), (X_.shape, (m, n)) + iparams['batch_index'] = i + worker = LOBPCG(A_, B_, X_, iK, iparams, fparams, bparams, method, tracker) + worker.run() + bE[i] = worker.E[:k] + bXret[i] = worker.X[:, :k] + + if not torch.jit.is_scripting(): + LOBPCG.call_tracker = LOBPCG_call_tracker_orig # type: ignore[assignment] + + return bE.reshape(A.shape[:-2] + (k,)), bXret.reshape(A.shape[:-2] + (m, k)) + + X = torch.randn((m, n), dtype=dtype, device=device) if X is None else X + assert len(X.shape) == 2 and X.shape == (m, n), (X.shape, (m, n)) + + worker = LOBPCG(A, B, X, iK, iparams, fparams, bparams, method, tracker) + + worker.run() + + if not torch.jit.is_scripting(): + LOBPCG.call_tracker = LOBPCG_call_tracker_orig # type: ignore[assignment] + + return worker.E[:k], worker.X[:, :k] + + +class LOBPCG(object): + """Worker class of LOBPCG methods. + """ + + def __init__(self, + A: Optional[Tensor], + B: Optional[Tensor], + X: Tensor, + iK: Optional[Tensor], + iparams: Dict[str, int], + fparams: Dict[str, float], + bparams: Dict[str, bool], + method: str, + tracker: None + ) -> None: + + # constant parameters + self.A = A + self.B = B + self.iK = iK + self.iparams = iparams + self.fparams = fparams + self.bparams = bparams + self.method = method + self.tracker = tracker + m = iparams['m'] + n = iparams['n'] + + # variable parameters + self.X = X + self.E = torch.zeros((n, ), dtype=X.dtype, device=X.device) + self.R = torch.zeros((m, n), dtype=X.dtype, device=X.device) + self.S = torch.zeros((m, 3 * n), dtype=X.dtype, device=X.device) + self.tvars: Dict[str, Tensor] = {} + self.ivars: Dict[str, int] = {'istep': 0} + self.fvars: Dict[str, float] = {'_': 0.0} + self.bvars: Dict[str, bool] = {'_': False} + + def __str__(self): + lines = ['LOPBCG:'] + lines += [' iparams={}'.format(self.iparams)] + lines += [' fparams={}'.format(self.fparams)] + lines += [' bparams={}'.format(self.bparams)] + lines += [' ivars={}'.format(self.ivars)] + lines += [' fvars={}'.format(self.fvars)] + lines += [' bvars={}'.format(self.bvars)] + lines += [' tvars={}'.format(self.tvars)] + lines += [' A={}'.format(self.A)] + lines += [' B={}'.format(self.B)] + lines += [' iK={}'.format(self.iK)] + lines += [' X={}'.format(self.X)] + lines += [' E={}'.format(self.E)] + r = '' + for line in lines: + r += line + '\n' + return r + + def update(self): + """Set and update iteration variables. + """ + if self.ivars['istep'] == 0: + X_norm = float(torch.norm(self.X)) + iX_norm = X_norm ** -1 + A_norm = float(torch.norm(_utils.matmul(self.A, self.X))) * iX_norm + B_norm = float(torch.norm(_utils.matmul(self.B, self.X))) * iX_norm + self.fvars['X_norm'] = X_norm + self.fvars['A_norm'] = A_norm + self.fvars['B_norm'] = B_norm + self.ivars['iterations_left'] = self.iparams['niter'] + self.ivars['converged_count'] = 0 + self.ivars['converged_end'] = 0 + + if self.method == 'ortho': + self._update_ortho() + else: + self._update_basic() + + self.ivars['iterations_left'] = self.ivars['iterations_left'] - 1 + self.ivars['istep'] = self.ivars['istep'] + 1 + + def update_residual(self): + """Update residual R from A, B, X, E. + """ + mm = _utils.matmul + self.R = mm(self.A, self.X) - mm(self.B, self.X) * self.E + + def update_converged_count(self): + """Determine the number of converged eigenpairs using backward stable + convergence criterion, see discussion in Sec 4.3 of [DuerschEtal2018]. + + Users may redefine this method for custom convergence criteria. + """ + # (...) -> int + prev_count = self.ivars['converged_count'] + tol = self.fparams['tol'] + A_norm = self.fvars['A_norm'] + B_norm = self.fvars['B_norm'] + E, X, R = self.E, self.X, self.R + rerr = torch.norm(R, 2, (0, )) * (torch.norm(X, 2, (0, )) * (A_norm + E[:X.shape[-1]] * B_norm)) ** -1 + converged = rerr < tol + count = 0 + for b in converged: + if not b: + # ignore convergence of following pairs to ensure + # strict ordering of eigenpairs + break + count += 1 + assert count >= prev_count, 'the number of converged eigenpairs ' \ + '(was {}, got {}) cannot decrease'.format(prev_count, count) + self.ivars['converged_count'] = count + self.tvars['rerr'] = rerr + return count + + def stop_iteration(self): + """Return True to stop iterations. + + Note that tracker (if defined) can force-stop iterations by + setting ``worker.bvars['force_stop'] = True``. + """ + return (self.bvars.get('force_stop', False) + or self.ivars['iterations_left'] == 0 + or self.ivars['converged_count'] >= self.iparams['k']) + + def run(self): + """Run LOBPCG iterations. + + Use this method as a template for implementing LOBPCG + iteration scheme with custom tracker that is compatible with + TorchScript. + """ + self.update() + + if not torch.jit.is_scripting() and self.tracker is not None: + self.call_tracker() + + while not self.stop_iteration(): + + self.update() + + if not torch.jit.is_scripting() and self.tracker is not None: + self.call_tracker() + + @torch.jit.unused + def call_tracker(self): + """Interface for tracking iteration process in Python mode. + + Tracking the iteration process is disabled in TorchScript + mode. In fact, one should specify tracker=None when JIT + compiling functions using lobpcg. + """ + # do nothing when in TorchScript mode + pass + + # Internal methods + + def _update_basic(self): + """ + Update or initialize iteration variables when `method == "basic"`. + """ + mm = torch.matmul + ns = self.ivars['converged_end'] + nc = self.ivars['converged_count'] + n = self.iparams['n'] + largest = self.bparams['largest'] + + if self.ivars['istep'] == 0: + Ri = self._get_rayleigh_ritz_transform(self.X) + M = _utils.qform(_utils.qform(self.A, self.X), Ri) + E, Z = _utils.symeig(M, largest) + self.X[:] = mm(self.X, mm(Ri, Z)) + self.E[:] = E + np = 0 + self.update_residual() + nc = self.update_converged_count() + self.S[..., :n] = self.X + + W = _utils.matmul(self.iK, self.R) + self.ivars['converged_end'] = ns = n + np + W.shape[-1] + self.S[:, n + np:ns] = W + else: + S_ = self.S[:, nc:ns] + Ri = self._get_rayleigh_ritz_transform(S_) + M = _utils.qform(_utils.qform(self.A, S_), Ri) + E_, Z = _utils.symeig(M, largest) + self.X[:, nc:] = mm(S_, mm(Ri, Z[:, :n - nc])) + self.E[nc:] = E_[:n - nc] + P = mm(S_, mm(Ri, Z[:, n:2 * n - nc])) + np = P.shape[-1] + + self.update_residual() + nc = self.update_converged_count() + self.S[..., :n] = self.X + self.S[:, n:n + np] = P + W = _utils.matmul(self.iK, self.R[:, nc:]) + + self.ivars['converged_end'] = ns = n + np + W.shape[-1] + self.S[:, n + np:ns] = W + + def _update_ortho(self): + """ + Update or initialize iteration variables when `method == "ortho"`. + """ + mm = torch.matmul + ns = self.ivars['converged_end'] + nc = self.ivars['converged_count'] + n = self.iparams['n'] + largest = self.bparams['largest'] + + if self.ivars['istep'] == 0: + Ri = self._get_rayleigh_ritz_transform(self.X) + M = _utils.qform(_utils.qform(self.A, self.X), Ri) + E, Z = _utils.symeig(M, largest) + self.X = mm(self.X, mm(Ri, Z)) + self.update_residual() + np = 0 + nc = self.update_converged_count() + self.S[:, :n] = self.X + W = self._get_ortho(self.R, self.X) + ns = self.ivars['converged_end'] = n + np + W.shape[-1] + self.S[:, n + np:ns] = W + + else: + S_ = self.S[:, nc:ns] + # Rayleigh-Ritz procedure + E_, Z = _utils.symeig(_utils.qform(self.A, S_), largest) + + # Update E, X, P + self.X[:, nc:] = mm(S_, Z[:, :n - nc]) + self.E[nc:] = E_[:n - nc] + P = mm(S_, mm(Z[:, n - nc:], _utils.basis(_utils.transpose(Z[:n - nc, n - nc:])))) + np = P.shape[-1] + + # check convergence + self.update_residual() + nc = self.update_converged_count() + + # update S + self.S[:, :n] = self.X + self.S[:, n:n + np] = P + W = self._get_ortho(self.R[:, nc:], self.S[:, :n + np]) + ns = self.ivars['converged_end'] = n + np + W.shape[-1] + self.S[:, n + np:ns] = W + + def _get_rayleigh_ritz_transform(self, S): + """Return a transformation matrix that is used in Rayleigh-Ritz + procedure for reducing a general eigenvalue problem :math:`(S^TAS) + C = (S^TBS) C E` to a standard eigenvalue problem :math: `(Ri^T + S^TAS Ri) Z = Z E` where `C = Ri Z`. + + .. note:: In the original Rayleight-Ritz procedure in + [DuerschEtal2018], the problem is formulated as follows:: + + SAS = S^T A S + SBS = S^T B S + D = () ** -1/2 + R^T R = Cholesky(D SBS D) + Ri = D R^-1 + solve symeig problem Ri^T SAS Ri Z = Theta Z + C = Ri Z + + To reduce the number of matrix products (denoted by empty + space between matrices), here we introduce element-wise + products (denoted by symbol `*`) so that the Rayleight-Ritz + procedure becomes:: + + SAS = S^T A S + SBS = S^T B S + d = () ** -1/2 # this is 1-d column vector + dd = d d^T # this is 2-d matrix + R^T R = Cholesky(dd * SBS) + Ri = R^-1 * d # broadcasting + solve symeig problem Ri^T SAS Ri Z = Theta Z + C = Ri Z + + where `dd` is 2-d matrix that replaces matrix products `D M + D` with one element-wise product `M * dd`; and `d` replaces + matrix product `D M` with element-wise product `M * + d`. Also, creating the diagonal matrix `D` is avoided. + + Args: + S (Tensor): the matrix basis for the search subspace, size is + :math:`(m, n)`. + + Returns: + Ri (tensor): upper-triangular transformation matrix of size + :math:`(n, n)`. + + """ + B = self.B + mm = torch.matmul + SBS = _utils.qform(B, S) + d_row = SBS.diagonal(0, -2, -1) ** -0.5 + d_col = d_row.reshape(d_row.shape[0], 1) + # TODO use torch.linalg.cholesky_solve once it is implemented + R = torch.linalg.cholesky((SBS * d_row) * d_col, upper=True) + return torch.linalg.solve_triangular(R, d_row.diag_embed(), upper=True, left=False) + + def _get_svqb(self, + U: Tensor, # Tensor + drop: bool, # bool + tau: float # float + ) -> Tensor: + """Return B-orthonormal U. + + .. note:: When `drop` is `False` then `svqb` is based on the + Algorithm 4 from [DuerschPhD2015] that is a slight + modification of the corresponding algorithm + introduced in [StathopolousWu2002]. + + Args: + + U (Tensor) : initial approximation, size is (m, n) + drop (bool) : when True, drop columns that + contribution to the `span([U])` is small. + tau (float) : positive tolerance + + Returns: + + U (Tensor) : B-orthonormal columns (:math:`U^T B U = I`), size + is (m, n1), where `n1 = n` if `drop` is `False, + otherwise `n1 <= n`. + + """ + if torch.numel(U) == 0: + return U + UBU = _utils.qform(self.B, U) + d = UBU.diagonal(0, -2, -1) + + # Detect and drop exact zero columns from U. While the test + # `abs(d) == 0` is unlikely to be True for random data, it is + # possible to construct input data to lobpcg where it will be + # True leading to a failure (notice the `d ** -0.5` operation + # in the original algorithm). To prevent the failure, we drop + # the exact zero columns here and then continue with the + # original algorithm below. + nz = torch.where(abs(d) != 0.0) + assert len(nz) == 1, nz + if len(nz[0]) < len(d): + U = U[:, nz[0]] + if torch.numel(U) == 0: + return U + UBU = _utils.qform(self.B, U) + d = UBU.diagonal(0, -2, -1) + nz = torch.where(abs(d) != 0.0) + assert len(nz[0]) == len(d) + + # The original algorithm 4 from [DuerschPhD2015]. + d_col = (d ** -0.5).reshape(d.shape[0], 1) + DUBUD = (UBU * d_col) * _utils.transpose(d_col) + E, Z = _utils.symeig(DUBUD) + t = tau * abs(E).max() + if drop: + keep = torch.where(E > t) + assert len(keep) == 1, keep + E = E[keep[0]] + Z = Z[:, keep[0]] + d_col = d_col[keep[0]] + else: + E[(torch.where(E < t))[0]] = t + + return torch.matmul(U * _utils.transpose(d_col), Z * E ** -0.5) + + def _get_ortho(self, U, V): + """Return B-orthonormal U with columns are B-orthogonal to V. + + .. note:: When `bparams["ortho_use_drop"] == False` then + `_get_ortho` is based on the Algorithm 3 from + [DuerschPhD2015] that is a slight modification of + the corresponding algorithm introduced in + [StathopolousWu2002]. Otherwise, the method + implements Algorithm 6 from [DuerschPhD2015] + + .. note:: If all U columns are B-collinear to V then the + returned tensor U will be empty. + + Args: + + U (Tensor) : initial approximation, size is (m, n) + V (Tensor) : B-orthogonal external basis, size is (m, k) + + Returns: + + U (Tensor) : B-orthonormal columns (:math:`U^T B U = I`) + such that :math:`V^T B U=0`, size is (m, n1), + where `n1 = n` if `drop` is `False, otherwise + `n1 <= n`. + """ + mm = torch.matmul + mm_B = _utils.matmul + m = self.iparams['m'] + tau_ortho = self.fparams['ortho_tol'] + tau_drop = self.fparams['ortho_tol_drop'] + tau_replace = self.fparams['ortho_tol_replace'] + i_max = self.iparams['ortho_i_max'] + j_max = self.iparams['ortho_j_max'] + # when use_drop==True, enable dropping U columns that have + # small contribution to the `span([U, V])`. + use_drop = self.bparams['ortho_use_drop'] + + # clean up variables from the previous call + for vkey in list(self.fvars.keys()): + if vkey.startswith('ortho_') and vkey.endswith('_rerr'): + self.fvars.pop(vkey) + self.ivars.pop('ortho_i', 0) + self.ivars.pop('ortho_j', 0) + + BV_norm = torch.norm(mm_B(self.B, V)) + BU = mm_B(self.B, U) + VBU = mm(_utils.transpose(V), BU) + i = j = 0 + stats = '' + for i in range(i_max): + U = U - mm(V, VBU) + drop = False + tau_svqb = tau_drop + for j in range(j_max): + if use_drop: + U = self._get_svqb(U, drop, tau_svqb) + drop = True + tau_svqb = tau_replace + else: + U = self._get_svqb(U, False, tau_replace) + if torch.numel(U) == 0: + # all initial U columns are B-collinear to V + self.ivars['ortho_i'] = i + self.ivars['ortho_j'] = j + return U + BU = mm_B(self.B, U) + UBU = mm(_utils.transpose(U), BU) + U_norm = torch.norm(U) + BU_norm = torch.norm(BU) + R = UBU - torch.eye(UBU.shape[-1], + device=UBU.device, + dtype=UBU.dtype) + R_norm = torch.norm(R) + # https://github.com/pytorch/pytorch/issues/33810 workaround: + rerr = float(R_norm) * float(BU_norm * U_norm) ** -1 + vkey = 'ortho_UBUmI_rerr[{}, {}]'.format(i, j) + self.fvars[vkey] = rerr + if rerr < tau_ortho: + break + VBU = mm(_utils.transpose(V), BU) + VBU_norm = torch.norm(VBU) + U_norm = torch.norm(U) + rerr = float(VBU_norm) * float(BV_norm * U_norm) ** -1 + vkey = 'ortho_VBU_rerr[{}]'.format(i) + self.fvars[vkey] = rerr + if rerr < tau_ortho: + break + if m < U.shape[-1] + V.shape[-1]: + # TorchScript needs the class var to be assigned to a local to + # do optional type refinement + B = self.B + assert B is not None + raise ValueError( + 'Overdetermined shape of U:' + ' #B-cols(={}) >= #U-cols(={}) + #V-cols(={}) must hold' + .format(B.shape[-1], U.shape[-1], V.shape[-1])) + self.ivars['ortho_i'] = i + self.ivars['ortho_j'] = j + return U + + +# Calling tracker is separated from LOBPCG definitions because +# TorchScript does not support user-defined callback arguments: +LOBPCG_call_tracker_orig = LOBPCG.call_tracker +def LOBPCG_call_tracker(self): + self.tracker(self) diff --git a/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_python_dispatcher.py b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_python_dispatcher.py new file mode 100644 index 0000000000000000000000000000000000000000..ee2c7d279458e3410421f5fc447fc8e20057209a --- /dev/null +++ b/my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torch/_python_dispatcher.py @@ -0,0 +1,159 @@ +import re +import torch._C as C + + +""" +PythonDispatcher class is a thin python-binding to C++ dispatcher and it +is designed to show how dispatcher precompute works. In particular, +it shows for a certain op `foo`, what the computed dispatch table looks +like after user register their kernels to certains dispatch keys. + +In the real C++ dispatcher we support many dispatch keys for different +functionalities. For simplicity PythonDispatcher only supports dispatch +keys for a single example of each use case. These use cases are listed below: + +- CPU/AutogradCPU: represents in-tree backends which we usually have dedicated inference & + autograd kernel in pytorch core library. + E.g. CPU, CUDA +- FPGA/AutogradOther: represents in-tree backends which we usually have backend specific + inference kernels, but they share the same autograd kernel specified in AutogradOther. + E.g. FPGA, SparseCsrCPU +- XLA/AutogradXLA: represents out-of-tree backends which we don't have either inference or autograd + kernel defined in pytorch core library. Backend owner is responsible for registering both + inference & autograd kernels in their extensions(e.g. torch-xla) for the operators they support. + E.g. XLA, XPU, MPS +- CompositeExplicitAutograd: alias key mapped to inference kernels of all backends like CPU, CUDA, XLA etc. + Kernels registered to this key MUST work for inference for all backends. +- Autograd: alias key mapped to autograd of all backends like AutogradCPU, AutogradXLA, AutogradOther. + Kernels registered to this key MUST work for autograd for all backends. +- CompositeImplicitAutograd: alias key CompositeImplicitAutograd = CompositeExplicitAutograd + Autograd + Kernels registered to this key MUST work for both inference + autograd for all backends. + +Note we only allow registrations to alias keys inside pytorch core library. E.g +you shouldn't register a CompositeImplicitAutograd or CompositeExplicitAutograd +kernel from torch-xla extension, instead you should upstream the kernel into +pytorch/pytorch repo so that it's available for all backends and continuously +tested even without the extension. + +Usage: + dispatcher = PythonDispatcher() + dispatcher.register(["CPU", "XLA", "CompositeImplicitAutograd"]) + print(dispatcher.dispatchTable()) # This tells you exactly which kernel is used for certain backend. + # For more debugging information + # print(dispatcher.keys()) + # print(dispatcher.registrations()) + # print(dispatcher.rawRegistrations()) + # print(dispatcher.rawDispatchTable()) +PythonDispatcher calls C++ dispatcher under the hood for to precompute dispatch table. +This file only provides the simplified API for developers, revelant test code is located in +test/test_dispatch.py +""" +class PythonDispatcher: + namespace = "__test__" + name = "foo" + runtime_keys = [ + "CPU", "AutogradCPU", + "FPGA", "AutogradOther", + "XLA", "AutogradXLA", + "Lazy", "AutogradLazy", + ] + alias_keys = [ + "CompositeExplicitAutograd", + "Autograd", + "CompositeImplicitAutograd", + ] + supported_keys = runtime_keys + alias_keys + + def __init__(self): + C._dispatch_check_invariants(self.name) # type: ignore[attr-defined] + self.ref = C._dispatch_library("FRAGMENT", self.namespace, "") + self.ref.def_("foo(Tensor x) -> Tensor") + + """ + Returns a list of dispatch keys supported by PythonDispatcher. + You can register kernels to these keys. + """ + def keys(self): + return self.supported_keys + + """ + Register kernels to the target dispatchKeys. + dispatchKeys(list[str]): a list of dispatch keys that you want to register + your own kernel. Note that you don't need to write the kernel yourself in + this PythonDispatcher.E.g. for CPU key, a kernel(e.g fn_CPU for CPU) is + automatically generated and registered. + """ + def register(self, dispatchKeys): + # Overriden is not supported and triggers a warning in C++ dispatcher. + if len(set(dispatchKeys)) != len(dispatchKeys): + raise RuntimeError(f"Overriden is not allowed but found duplicates in {dispatchKeys}.") + # We currently forbid this in codegen instead of C++ dispatcher. + if 'CompositeImplicitAutograd' in dispatchKeys and 'CompositeExplicitAutograd' in dispatchKeys: + raise RuntimeError("Registration to both CompositeImplicitAutograd and CompositeExplicitAutograd is not allowed.") + for key in dispatchKeys: + if key not in self.supported_keys: + raise RuntimeError(f"{key} is not supported, please select a dispatch key in {self.supported_keys}.") + self.ref.impl_t_t("foo", dispatch=key, debug="fn_" + key) + + """ + Helper function to format (key, kernel). + """ + def _format_line(self, key, kernel): + return "{:<15} {}\n".format(key, kernel) + + """ + Helper function to print a table header. + """ + def _format_header(self, header): + s = f""" +{header} +""" + s += self._format_line("key", "kernel") + s += "---------------------------\n" + return s + + """ + Returns raw output of all registration info for debugging only. + Use registrations() for a simplified version. + """ + def rawRegistrations(self): + return C._dispatch_dump("{}::{}".format(self.namespace, self.name)) # type: ignore[attr-defined] + + """ + Returns raw output of computed dispatch table for debugging only. + Use dispatchTable() for a simplified version. + """ + def rawDispatchTable(self): + return C._dispatch_dump_table("{}::{}".format(self.namespace, self.name)) # type: ignore[attr-defined] + + """ + Returns a table(str) including all the registrations from users. + Note this includes registrations to both runtime keys and alias keys. + """ + def registrations(self): + output = self._format_header("Registered Kernels") + state = self.rawRegistrations() + state_entries = state.split('\n') + for line in state_entries: + first = line.split(":")[0] + if any(first.startswith(k) for k in self.supported_keys): + kernel = line.split("::")[0].split(" ")[1] + output += self._format_line(first, kernel) + return output + + """ + Returns the computed dispatch table(str). Note this only include + runtime keys, registrations to alias keys have been decoded to their + mapped runtime keys. + """ + def dispatchTable(self): + output = self._format_header("Computed Dispatch Table") + table = self.rawDispatchTable() + table_entries = table.split('\n') + regex = re.compile(r"registered at .*FallbackKernel\.cpp.*(\[)") + for line in table_entries: + k = line.split(":")[0] + if k in self.runtime_keys: + entry = regex.sub('[', line) + output += self._format_line(k, entry.split(": ")[1]) + return output