Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distro/distro.py +1403 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distro/py.typed +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/__init__.py +82 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/__main__.py +17 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/cmdline.py +668 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/formatter.py +129 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/lexer.py +963 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/modeline.py +43 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/regexopt.py +91 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/style.py +203 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/token.py +214 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/unistring.py +153 -0
- llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-310.pyc +3 -0
- llava/lib/python3.10/site-packages/pip/_vendor/tomli/__init__.py +8 -0
- llava/lib/python3.10/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/tomli/_parser.py +770 -0
- llava/lib/python3.10/site-packages/pip/_vendor/tomli/_re.py +112 -0
- llava/lib/python3.10/site-packages/pip/_vendor/tomli/_types.py +10 -0
- llava/lib/python3.10/site-packages/pip/_vendor/tomli/py.typed +1 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/Numbers.py +47 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/Numbers.pyi +2 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/Primality.py +369 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/Primality.pyi +18 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerBase.py +412 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerBase.pyi +67 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerCustom.py +162 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerCustom.pyi +8 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerGMP.py +799 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerGMP.pyi +3 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerNative.py +382 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerNative.pyi +3 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/__init__.py +0 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/Numbers.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/Primality.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/_IntegerBase.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/_IntegerCustom.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/_IntegerGMP.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/_IntegerNative.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Protocol/DH.pyi +19 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Protocol/SecretSharing.pyi +22 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Signature/DSS.py +403 -0
- minigpt2/lib/python3.10/site-packages/Crypto/Signature/DSS.pyi +27 -0
.gitattributes
CHANGED
|
@@ -1355,3 +1355,4 @@ minigpt2/lib/python3.10/site-packages/Crypto/PublicKey/_curve448.abi3.so filter=
|
|
| 1355 |
minigpt2/lib/python3.10/site-packages/Crypto/PublicKey/_ec_ws.abi3.so filter=lfs diff=lfs merge=lfs -text
|
| 1356 |
minigpt2/lib/python3.10/site-packages/Crypto/PublicKey/_curve25519.abi3.so filter=lfs diff=lfs merge=lfs -text
|
| 1357 |
minigpt2/lib/python3.10/site-packages/Crypto/PublicKey/_ed25519.abi3.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 1355 |
minigpt2/lib/python3.10/site-packages/Crypto/PublicKey/_ec_ws.abi3.so filter=lfs diff=lfs merge=lfs -text
|
| 1356 |
minigpt2/lib/python3.10/site-packages/Crypto/PublicKey/_curve25519.abi3.so filter=lfs diff=lfs merge=lfs -text
|
| 1357 |
minigpt2/lib/python3.10/site-packages/Crypto/PublicKey/_ed25519.abi3.so filter=lfs diff=lfs merge=lfs -text
|
| 1358 |
+
llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
llava/lib/python3.10/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (894 Bytes). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (248 Bytes). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-310.pyc
ADDED
|
Binary file (42.1 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/distro/distro.py
ADDED
|
@@ -0,0 +1,1403 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python
|
| 2 |
+
# Copyright 2015-2021 Nir Cohen
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
|
| 16 |
+
"""
|
| 17 |
+
The ``distro`` package (``distro`` stands for Linux Distribution) provides
|
| 18 |
+
information about the Linux distribution it runs on, such as a reliable
|
| 19 |
+
machine-readable distro ID, or version information.
|
| 20 |
+
|
| 21 |
+
It is the recommended replacement for Python's original
|
| 22 |
+
:py:func:`platform.linux_distribution` function, but it provides much more
|
| 23 |
+
functionality. An alternative implementation became necessary because Python
|
| 24 |
+
3.5 deprecated this function, and Python 3.8 removed it altogether. Its
|
| 25 |
+
predecessor function :py:func:`platform.dist` was already deprecated since
|
| 26 |
+
Python 2.6 and removed in Python 3.8. Still, there are many cases in which
|
| 27 |
+
access to OS distribution information is needed. See `Python issue 1322
|
| 28 |
+
<https://bugs.python.org/issue1322>`_ for more information.
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
import argparse
|
| 32 |
+
import json
|
| 33 |
+
import logging
|
| 34 |
+
import os
|
| 35 |
+
import re
|
| 36 |
+
import shlex
|
| 37 |
+
import subprocess
|
| 38 |
+
import sys
|
| 39 |
+
import warnings
|
| 40 |
+
from typing import (
|
| 41 |
+
Any,
|
| 42 |
+
Callable,
|
| 43 |
+
Dict,
|
| 44 |
+
Iterable,
|
| 45 |
+
Optional,
|
| 46 |
+
Sequence,
|
| 47 |
+
TextIO,
|
| 48 |
+
Tuple,
|
| 49 |
+
Type,
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
try:
|
| 53 |
+
from typing import TypedDict
|
| 54 |
+
except ImportError:
|
| 55 |
+
# Python 3.7
|
| 56 |
+
TypedDict = dict
|
| 57 |
+
|
| 58 |
+
__version__ = "1.9.0"
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class VersionDict(TypedDict):
|
| 62 |
+
major: str
|
| 63 |
+
minor: str
|
| 64 |
+
build_number: str
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class InfoDict(TypedDict):
|
| 68 |
+
id: str
|
| 69 |
+
version: str
|
| 70 |
+
version_parts: VersionDict
|
| 71 |
+
like: str
|
| 72 |
+
codename: str
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc")
|
| 76 |
+
_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib")
|
| 77 |
+
_OS_RELEASE_BASENAME = "os-release"
|
| 78 |
+
|
| 79 |
+
#: Translation table for normalizing the "ID" attribute defined in os-release
|
| 80 |
+
#: files, for use by the :func:`distro.id` method.
|
| 81 |
+
#:
|
| 82 |
+
#: * Key: Value as defined in the os-release file, translated to lower case,
|
| 83 |
+
#: with blanks translated to underscores.
|
| 84 |
+
#:
|
| 85 |
+
#: * Value: Normalized value.
|
| 86 |
+
NORMALIZED_OS_ID = {
|
| 87 |
+
"ol": "oracle", # Oracle Linux
|
| 88 |
+
"opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap
|
| 89 |
+
}
|
| 90 |
+
|
| 91 |
+
#: Translation table for normalizing the "Distributor ID" attribute returned by
|
| 92 |
+
#: the lsb_release command, for use by the :func:`distro.id` method.
|
| 93 |
+
#:
|
| 94 |
+
#: * Key: Value as returned by the lsb_release command, translated to lower
|
| 95 |
+
#: case, with blanks translated to underscores.
|
| 96 |
+
#:
|
| 97 |
+
#: * Value: Normalized value.
|
| 98 |
+
NORMALIZED_LSB_ID = {
|
| 99 |
+
"enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4
|
| 100 |
+
"enterpriseenterpriseserver": "oracle", # Oracle Linux 5
|
| 101 |
+
"redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation
|
| 102 |
+
"redhatenterpriseserver": "rhel", # RHEL 6, 7 Server
|
| 103 |
+
"redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
#: Translation table for normalizing the distro ID derived from the file name
|
| 107 |
+
#: of distro release files, for use by the :func:`distro.id` method.
|
| 108 |
+
#:
|
| 109 |
+
#: * Key: Value as derived from the file name of a distro release file,
|
| 110 |
+
#: translated to lower case, with blanks translated to underscores.
|
| 111 |
+
#:
|
| 112 |
+
#: * Value: Normalized value.
|
| 113 |
+
NORMALIZED_DISTRO_ID = {
|
| 114 |
+
"redhat": "rhel", # RHEL 6.x, 7.x
|
| 115 |
+
}
|
| 116 |
+
|
| 117 |
+
# Pattern for content of distro release file (reversed)
|
| 118 |
+
_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
|
| 119 |
+
r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)"
|
| 120 |
+
)
|
| 121 |
+
|
| 122 |
+
# Pattern for base file name of distro release file
|
| 123 |
+
_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$")
|
| 124 |
+
|
| 125 |
+
# Base file names to be looked up for if _UNIXCONFDIR is not readable.
|
| 126 |
+
_DISTRO_RELEASE_BASENAMES = [
|
| 127 |
+
"SuSE-release",
|
| 128 |
+
"altlinux-release",
|
| 129 |
+
"arch-release",
|
| 130 |
+
"base-release",
|
| 131 |
+
"centos-release",
|
| 132 |
+
"fedora-release",
|
| 133 |
+
"gentoo-release",
|
| 134 |
+
"mageia-release",
|
| 135 |
+
"mandrake-release",
|
| 136 |
+
"mandriva-release",
|
| 137 |
+
"mandrivalinux-release",
|
| 138 |
+
"manjaro-release",
|
| 139 |
+
"oracle-release",
|
| 140 |
+
"redhat-release",
|
| 141 |
+
"rocky-release",
|
| 142 |
+
"sl-release",
|
| 143 |
+
"slackware-version",
|
| 144 |
+
]
|
| 145 |
+
|
| 146 |
+
# Base file names to be ignored when searching for distro release file
|
| 147 |
+
_DISTRO_RELEASE_IGNORE_BASENAMES = (
|
| 148 |
+
"debian_version",
|
| 149 |
+
"lsb-release",
|
| 150 |
+
"oem-release",
|
| 151 |
+
_OS_RELEASE_BASENAME,
|
| 152 |
+
"system-release",
|
| 153 |
+
"plesk-release",
|
| 154 |
+
"iredmail-release",
|
| 155 |
+
"board-release",
|
| 156 |
+
"ec2_version",
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]:
|
| 161 |
+
"""
|
| 162 |
+
.. deprecated:: 1.6.0
|
| 163 |
+
|
| 164 |
+
:func:`distro.linux_distribution()` is deprecated. It should only be
|
| 165 |
+
used as a compatibility shim with Python's
|
| 166 |
+
:py:func:`platform.linux_distribution()`. Please use :func:`distro.id`,
|
| 167 |
+
:func:`distro.version` and :func:`distro.name` instead.
|
| 168 |
+
|
| 169 |
+
Return information about the current OS distribution as a tuple
|
| 170 |
+
``(id_name, version, codename)`` with items as follows:
|
| 171 |
+
|
| 172 |
+
* ``id_name``: If *full_distribution_name* is false, the result of
|
| 173 |
+
:func:`distro.id`. Otherwise, the result of :func:`distro.name`.
|
| 174 |
+
|
| 175 |
+
* ``version``: The result of :func:`distro.version`.
|
| 176 |
+
|
| 177 |
+
* ``codename``: The extra item (usually in parentheses) after the
|
| 178 |
+
os-release version number, or the result of :func:`distro.codename`.
|
| 179 |
+
|
| 180 |
+
The interface of this function is compatible with the original
|
| 181 |
+
:py:func:`platform.linux_distribution` function, supporting a subset of
|
| 182 |
+
its parameters.
|
| 183 |
+
|
| 184 |
+
The data it returns may not exactly be the same, because it uses more data
|
| 185 |
+
sources than the original function, and that may lead to different data if
|
| 186 |
+
the OS distribution is not consistent across multiple data sources it
|
| 187 |
+
provides (there are indeed such distributions ...).
|
| 188 |
+
|
| 189 |
+
Another reason for differences is the fact that the :func:`distro.id`
|
| 190 |
+
method normalizes the distro ID string to a reliable machine-readable value
|
| 191 |
+
for a number of popular OS distributions.
|
| 192 |
+
"""
|
| 193 |
+
warnings.warn(
|
| 194 |
+
"distro.linux_distribution() is deprecated. It should only be used as a "
|
| 195 |
+
"compatibility shim with Python's platform.linux_distribution(). Please use "
|
| 196 |
+
"distro.id(), distro.version() and distro.name() instead.",
|
| 197 |
+
DeprecationWarning,
|
| 198 |
+
stacklevel=2,
|
| 199 |
+
)
|
| 200 |
+
return _distro.linux_distribution(full_distribution_name)
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
def id() -> str:
|
| 204 |
+
"""
|
| 205 |
+
Return the distro ID of the current distribution, as a
|
| 206 |
+
machine-readable string.
|
| 207 |
+
|
| 208 |
+
For a number of OS distributions, the returned distro ID value is
|
| 209 |
+
*reliable*, in the sense that it is documented and that it does not change
|
| 210 |
+
across releases of the distribution.
|
| 211 |
+
|
| 212 |
+
This package maintains the following reliable distro ID values:
|
| 213 |
+
|
| 214 |
+
============== =========================================
|
| 215 |
+
Distro ID Distribution
|
| 216 |
+
============== =========================================
|
| 217 |
+
"ubuntu" Ubuntu
|
| 218 |
+
"debian" Debian
|
| 219 |
+
"rhel" RedHat Enterprise Linux
|
| 220 |
+
"centos" CentOS
|
| 221 |
+
"fedora" Fedora
|
| 222 |
+
"sles" SUSE Linux Enterprise Server
|
| 223 |
+
"opensuse" openSUSE
|
| 224 |
+
"amzn" Amazon Linux
|
| 225 |
+
"arch" Arch Linux
|
| 226 |
+
"buildroot" Buildroot
|
| 227 |
+
"cloudlinux" CloudLinux OS
|
| 228 |
+
"exherbo" Exherbo Linux
|
| 229 |
+
"gentoo" GenToo Linux
|
| 230 |
+
"ibm_powerkvm" IBM PowerKVM
|
| 231 |
+
"kvmibm" KVM for IBM z Systems
|
| 232 |
+
"linuxmint" Linux Mint
|
| 233 |
+
"mageia" Mageia
|
| 234 |
+
"mandriva" Mandriva Linux
|
| 235 |
+
"parallels" Parallels
|
| 236 |
+
"pidora" Pidora
|
| 237 |
+
"raspbian" Raspbian
|
| 238 |
+
"oracle" Oracle Linux (and Oracle Enterprise Linux)
|
| 239 |
+
"scientific" Scientific Linux
|
| 240 |
+
"slackware" Slackware
|
| 241 |
+
"xenserver" XenServer
|
| 242 |
+
"openbsd" OpenBSD
|
| 243 |
+
"netbsd" NetBSD
|
| 244 |
+
"freebsd" FreeBSD
|
| 245 |
+
"midnightbsd" MidnightBSD
|
| 246 |
+
"rocky" Rocky Linux
|
| 247 |
+
"aix" AIX
|
| 248 |
+
"guix" Guix System
|
| 249 |
+
"altlinux" ALT Linux
|
| 250 |
+
============== =========================================
|
| 251 |
+
|
| 252 |
+
If you have a need to get distros for reliable IDs added into this set,
|
| 253 |
+
or if you find that the :func:`distro.id` function returns a different
|
| 254 |
+
distro ID for one of the listed distros, please create an issue in the
|
| 255 |
+
`distro issue tracker`_.
|
| 256 |
+
|
| 257 |
+
**Lookup hierarchy and transformations:**
|
| 258 |
+
|
| 259 |
+
First, the ID is obtained from the following sources, in the specified
|
| 260 |
+
order. The first available and non-empty value is used:
|
| 261 |
+
|
| 262 |
+
* the value of the "ID" attribute of the os-release file,
|
| 263 |
+
|
| 264 |
+
* the value of the "Distributor ID" attribute returned by the lsb_release
|
| 265 |
+
command,
|
| 266 |
+
|
| 267 |
+
* the first part of the file name of the distro release file,
|
| 268 |
+
|
| 269 |
+
The so determined ID value then passes the following transformations,
|
| 270 |
+
before it is returned by this method:
|
| 271 |
+
|
| 272 |
+
* it is translated to lower case,
|
| 273 |
+
|
| 274 |
+
* blanks (which should not be there anyway) are translated to underscores,
|
| 275 |
+
|
| 276 |
+
* a normalization of the ID is performed, based upon
|
| 277 |
+
`normalization tables`_. The purpose of this normalization is to ensure
|
| 278 |
+
that the ID is as reliable as possible, even across incompatible changes
|
| 279 |
+
in the OS distributions. A common reason for an incompatible change is
|
| 280 |
+
the addition of an os-release file, or the addition of the lsb_release
|
| 281 |
+
command, with ID values that differ from what was previously determined
|
| 282 |
+
from the distro release file name.
|
| 283 |
+
"""
|
| 284 |
+
return _distro.id()
|
| 285 |
+
|
| 286 |
+
|
| 287 |
+
def name(pretty: bool = False) -> str:
|
| 288 |
+
"""
|
| 289 |
+
Return the name of the current OS distribution, as a human-readable
|
| 290 |
+
string.
|
| 291 |
+
|
| 292 |
+
If *pretty* is false, the name is returned without version or codename.
|
| 293 |
+
(e.g. "CentOS Linux")
|
| 294 |
+
|
| 295 |
+
If *pretty* is true, the version and codename are appended.
|
| 296 |
+
(e.g. "CentOS Linux 7.1.1503 (Core)")
|
| 297 |
+
|
| 298 |
+
**Lookup hierarchy:**
|
| 299 |
+
|
| 300 |
+
The name is obtained from the following sources, in the specified order.
|
| 301 |
+
The first available and non-empty value is used:
|
| 302 |
+
|
| 303 |
+
* If *pretty* is false:
|
| 304 |
+
|
| 305 |
+
- the value of the "NAME" attribute of the os-release file,
|
| 306 |
+
|
| 307 |
+
- the value of the "Distributor ID" attribute returned by the lsb_release
|
| 308 |
+
command,
|
| 309 |
+
|
| 310 |
+
- the value of the "<name>" field of the distro release file.
|
| 311 |
+
|
| 312 |
+
* If *pretty* is true:
|
| 313 |
+
|
| 314 |
+
- the value of the "PRETTY_NAME" attribute of the os-release file,
|
| 315 |
+
|
| 316 |
+
- the value of the "Description" attribute returned by the lsb_release
|
| 317 |
+
command,
|
| 318 |
+
|
| 319 |
+
- the value of the "<name>" field of the distro release file, appended
|
| 320 |
+
with the value of the pretty version ("<version_id>" and "<codename>"
|
| 321 |
+
fields) of the distro release file, if available.
|
| 322 |
+
"""
|
| 323 |
+
return _distro.name(pretty)
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
def version(pretty: bool = False, best: bool = False) -> str:
|
| 327 |
+
"""
|
| 328 |
+
Return the version of the current OS distribution, as a human-readable
|
| 329 |
+
string.
|
| 330 |
+
|
| 331 |
+
If *pretty* is false, the version is returned without codename (e.g.
|
| 332 |
+
"7.0").
|
| 333 |
+
|
| 334 |
+
If *pretty* is true, the codename in parenthesis is appended, if the
|
| 335 |
+
codename is non-empty (e.g. "7.0 (Maipo)").
|
| 336 |
+
|
| 337 |
+
Some distributions provide version numbers with different precisions in
|
| 338 |
+
the different sources of distribution information. Examining the different
|
| 339 |
+
sources in a fixed priority order does not always yield the most precise
|
| 340 |
+
version (e.g. for Debian 8.2, or CentOS 7.1).
|
| 341 |
+
|
| 342 |
+
Some other distributions may not provide this kind of information. In these
|
| 343 |
+
cases, an empty string would be returned. This behavior can be observed
|
| 344 |
+
with rolling releases distributions (e.g. Arch Linux).
|
| 345 |
+
|
| 346 |
+
The *best* parameter can be used to control the approach for the returned
|
| 347 |
+
version:
|
| 348 |
+
|
| 349 |
+
If *best* is false, the first non-empty version number in priority order of
|
| 350 |
+
the examined sources is returned.
|
| 351 |
+
|
| 352 |
+
If *best* is true, the most precise version number out of all examined
|
| 353 |
+
sources is returned.
|
| 354 |
+
|
| 355 |
+
**Lookup hierarchy:**
|
| 356 |
+
|
| 357 |
+
In all cases, the version number is obtained from the following sources.
|
| 358 |
+
If *best* is false, this order represents the priority order:
|
| 359 |
+
|
| 360 |
+
* the value of the "VERSION_ID" attribute of the os-release file,
|
| 361 |
+
* the value of the "Release" attribute returned by the lsb_release
|
| 362 |
+
command,
|
| 363 |
+
* the version number parsed from the "<version_id>" field of the first line
|
| 364 |
+
of the distro release file,
|
| 365 |
+
* the version number parsed from the "PRETTY_NAME" attribute of the
|
| 366 |
+
os-release file, if it follows the format of the distro release files.
|
| 367 |
+
* the version number parsed from the "Description" attribute returned by
|
| 368 |
+
the lsb_release command, if it follows the format of the distro release
|
| 369 |
+
files.
|
| 370 |
+
"""
|
| 371 |
+
return _distro.version(pretty, best)
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
def version_parts(best: bool = False) -> Tuple[str, str, str]:
|
| 375 |
+
"""
|
| 376 |
+
Return the version of the current OS distribution as a tuple
|
| 377 |
+
``(major, minor, build_number)`` with items as follows:
|
| 378 |
+
|
| 379 |
+
* ``major``: The result of :func:`distro.major_version`.
|
| 380 |
+
|
| 381 |
+
* ``minor``: The result of :func:`distro.minor_version`.
|
| 382 |
+
|
| 383 |
+
* ``build_number``: The result of :func:`distro.build_number`.
|
| 384 |
+
|
| 385 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
| 386 |
+
method.
|
| 387 |
+
"""
|
| 388 |
+
return _distro.version_parts(best)
|
| 389 |
+
|
| 390 |
+
|
| 391 |
+
def major_version(best: bool = False) -> str:
|
| 392 |
+
"""
|
| 393 |
+
Return the major version of the current OS distribution, as a string,
|
| 394 |
+
if provided.
|
| 395 |
+
Otherwise, the empty string is returned. The major version is the first
|
| 396 |
+
part of the dot-separated version string.
|
| 397 |
+
|
| 398 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
| 399 |
+
method.
|
| 400 |
+
"""
|
| 401 |
+
return _distro.major_version(best)
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
def minor_version(best: bool = False) -> str:
|
| 405 |
+
"""
|
| 406 |
+
Return the minor version of the current OS distribution, as a string,
|
| 407 |
+
if provided.
|
| 408 |
+
Otherwise, the empty string is returned. The minor version is the second
|
| 409 |
+
part of the dot-separated version string.
|
| 410 |
+
|
| 411 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
| 412 |
+
method.
|
| 413 |
+
"""
|
| 414 |
+
return _distro.minor_version(best)
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
def build_number(best: bool = False) -> str:
|
| 418 |
+
"""
|
| 419 |
+
Return the build number of the current OS distribution, as a string,
|
| 420 |
+
if provided.
|
| 421 |
+
Otherwise, the empty string is returned. The build number is the third part
|
| 422 |
+
of the dot-separated version string.
|
| 423 |
+
|
| 424 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
| 425 |
+
method.
|
| 426 |
+
"""
|
| 427 |
+
return _distro.build_number(best)
|
| 428 |
+
|
| 429 |
+
|
| 430 |
+
def like() -> str:
|
| 431 |
+
"""
|
| 432 |
+
Return a space-separated list of distro IDs of distributions that are
|
| 433 |
+
closely related to the current OS distribution in regards to packaging
|
| 434 |
+
and programming interfaces, for example distributions the current
|
| 435 |
+
distribution is a derivative from.
|
| 436 |
+
|
| 437 |
+
**Lookup hierarchy:**
|
| 438 |
+
|
| 439 |
+
This information item is only provided by the os-release file.
|
| 440 |
+
For details, see the description of the "ID_LIKE" attribute in the
|
| 441 |
+
`os-release man page
|
| 442 |
+
<http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
|
| 443 |
+
"""
|
| 444 |
+
return _distro.like()
|
| 445 |
+
|
| 446 |
+
|
| 447 |
+
def codename() -> str:
|
| 448 |
+
"""
|
| 449 |
+
Return the codename for the release of the current OS distribution,
|
| 450 |
+
as a string.
|
| 451 |
+
|
| 452 |
+
If the distribution does not have a codename, an empty string is returned.
|
| 453 |
+
|
| 454 |
+
Note that the returned codename is not always really a codename. For
|
| 455 |
+
example, openSUSE returns "x86_64". This function does not handle such
|
| 456 |
+
cases in any special way and just returns the string it finds, if any.
|
| 457 |
+
|
| 458 |
+
**Lookup hierarchy:**
|
| 459 |
+
|
| 460 |
+
* the codename within the "VERSION" attribute of the os-release file, if
|
| 461 |
+
provided,
|
| 462 |
+
|
| 463 |
+
* the value of the "Codename" attribute returned by the lsb_release
|
| 464 |
+
command,
|
| 465 |
+
|
| 466 |
+
* the value of the "<codename>" field of the distro release file.
|
| 467 |
+
"""
|
| 468 |
+
return _distro.codename()
|
| 469 |
+
|
| 470 |
+
|
| 471 |
+
def info(pretty: bool = False, best: bool = False) -> InfoDict:
|
| 472 |
+
"""
|
| 473 |
+
Return certain machine-readable information items about the current OS
|
| 474 |
+
distribution in a dictionary, as shown in the following example:
|
| 475 |
+
|
| 476 |
+
.. sourcecode:: python
|
| 477 |
+
|
| 478 |
+
{
|
| 479 |
+
'id': 'rhel',
|
| 480 |
+
'version': '7.0',
|
| 481 |
+
'version_parts': {
|
| 482 |
+
'major': '7',
|
| 483 |
+
'minor': '0',
|
| 484 |
+
'build_number': ''
|
| 485 |
+
},
|
| 486 |
+
'like': 'fedora',
|
| 487 |
+
'codename': 'Maipo'
|
| 488 |
+
}
|
| 489 |
+
|
| 490 |
+
The dictionary structure and keys are always the same, regardless of which
|
| 491 |
+
information items are available in the underlying data sources. The values
|
| 492 |
+
for the various keys are as follows:
|
| 493 |
+
|
| 494 |
+
* ``id``: The result of :func:`distro.id`.
|
| 495 |
+
|
| 496 |
+
* ``version``: The result of :func:`distro.version`.
|
| 497 |
+
|
| 498 |
+
* ``version_parts -> major``: The result of :func:`distro.major_version`.
|
| 499 |
+
|
| 500 |
+
* ``version_parts -> minor``: The result of :func:`distro.minor_version`.
|
| 501 |
+
|
| 502 |
+
* ``version_parts -> build_number``: The result of
|
| 503 |
+
:func:`distro.build_number`.
|
| 504 |
+
|
| 505 |
+
* ``like``: The result of :func:`distro.like`.
|
| 506 |
+
|
| 507 |
+
* ``codename``: The result of :func:`distro.codename`.
|
| 508 |
+
|
| 509 |
+
For a description of the *pretty* and *best* parameters, see the
|
| 510 |
+
:func:`distro.version` method.
|
| 511 |
+
"""
|
| 512 |
+
return _distro.info(pretty, best)
|
| 513 |
+
|
| 514 |
+
|
| 515 |
+
def os_release_info() -> Dict[str, str]:
|
| 516 |
+
"""
|
| 517 |
+
Return a dictionary containing key-value pairs for the information items
|
| 518 |
+
from the os-release file data source of the current OS distribution.
|
| 519 |
+
|
| 520 |
+
See `os-release file`_ for details about these information items.
|
| 521 |
+
"""
|
| 522 |
+
return _distro.os_release_info()
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
def lsb_release_info() -> Dict[str, str]:
|
| 526 |
+
"""
|
| 527 |
+
Return a dictionary containing key-value pairs for the information items
|
| 528 |
+
from the lsb_release command data source of the current OS distribution.
|
| 529 |
+
|
| 530 |
+
See `lsb_release command output`_ for details about these information
|
| 531 |
+
items.
|
| 532 |
+
"""
|
| 533 |
+
return _distro.lsb_release_info()
|
| 534 |
+
|
| 535 |
+
|
| 536 |
+
def distro_release_info() -> Dict[str, str]:
|
| 537 |
+
"""
|
| 538 |
+
Return a dictionary containing key-value pairs for the information items
|
| 539 |
+
from the distro release file data source of the current OS distribution.
|
| 540 |
+
|
| 541 |
+
See `distro release file`_ for details about these information items.
|
| 542 |
+
"""
|
| 543 |
+
return _distro.distro_release_info()
|
| 544 |
+
|
| 545 |
+
|
| 546 |
+
def uname_info() -> Dict[str, str]:
|
| 547 |
+
"""
|
| 548 |
+
Return a dictionary containing key-value pairs for the information items
|
| 549 |
+
from the distro release file data source of the current OS distribution.
|
| 550 |
+
"""
|
| 551 |
+
return _distro.uname_info()
|
| 552 |
+
|
| 553 |
+
|
| 554 |
+
def os_release_attr(attribute: str) -> str:
|
| 555 |
+
"""
|
| 556 |
+
Return a single named information item from the os-release file data source
|
| 557 |
+
of the current OS distribution.
|
| 558 |
+
|
| 559 |
+
Parameters:
|
| 560 |
+
|
| 561 |
+
* ``attribute`` (string): Key of the information item.
|
| 562 |
+
|
| 563 |
+
Returns:
|
| 564 |
+
|
| 565 |
+
* (string): Value of the information item, if the item exists.
|
| 566 |
+
The empty string, if the item does not exist.
|
| 567 |
+
|
| 568 |
+
See `os-release file`_ for details about these information items.
|
| 569 |
+
"""
|
| 570 |
+
return _distro.os_release_attr(attribute)
|
| 571 |
+
|
| 572 |
+
|
| 573 |
+
def lsb_release_attr(attribute: str) -> str:
|
| 574 |
+
"""
|
| 575 |
+
Return a single named information item from the lsb_release command output
|
| 576 |
+
data source of the current OS distribution.
|
| 577 |
+
|
| 578 |
+
Parameters:
|
| 579 |
+
|
| 580 |
+
* ``attribute`` (string): Key of the information item.
|
| 581 |
+
|
| 582 |
+
Returns:
|
| 583 |
+
|
| 584 |
+
* (string): Value of the information item, if the item exists.
|
| 585 |
+
The empty string, if the item does not exist.
|
| 586 |
+
|
| 587 |
+
See `lsb_release command output`_ for details about these information
|
| 588 |
+
items.
|
| 589 |
+
"""
|
| 590 |
+
return _distro.lsb_release_attr(attribute)
|
| 591 |
+
|
| 592 |
+
|
| 593 |
+
def distro_release_attr(attribute: str) -> str:
|
| 594 |
+
"""
|
| 595 |
+
Return a single named information item from the distro release file
|
| 596 |
+
data source of the current OS distribution.
|
| 597 |
+
|
| 598 |
+
Parameters:
|
| 599 |
+
|
| 600 |
+
* ``attribute`` (string): Key of the information item.
|
| 601 |
+
|
| 602 |
+
Returns:
|
| 603 |
+
|
| 604 |
+
* (string): Value of the information item, if the item exists.
|
| 605 |
+
The empty string, if the item does not exist.
|
| 606 |
+
|
| 607 |
+
See `distro release file`_ for details about these information items.
|
| 608 |
+
"""
|
| 609 |
+
return _distro.distro_release_attr(attribute)
|
| 610 |
+
|
| 611 |
+
|
| 612 |
+
def uname_attr(attribute: str) -> str:
|
| 613 |
+
"""
|
| 614 |
+
Return a single named information item from the distro release file
|
| 615 |
+
data source of the current OS distribution.
|
| 616 |
+
|
| 617 |
+
Parameters:
|
| 618 |
+
|
| 619 |
+
* ``attribute`` (string): Key of the information item.
|
| 620 |
+
|
| 621 |
+
Returns:
|
| 622 |
+
|
| 623 |
+
* (string): Value of the information item, if the item exists.
|
| 624 |
+
The empty string, if the item does not exist.
|
| 625 |
+
"""
|
| 626 |
+
return _distro.uname_attr(attribute)
|
| 627 |
+
|
| 628 |
+
|
| 629 |
+
try:
|
| 630 |
+
from functools import cached_property
|
| 631 |
+
except ImportError:
|
| 632 |
+
# Python < 3.8
|
| 633 |
+
class cached_property: # type: ignore
|
| 634 |
+
"""A version of @property which caches the value. On access, it calls the
|
| 635 |
+
underlying function and sets the value in `__dict__` so future accesses
|
| 636 |
+
will not re-call the property.
|
| 637 |
+
"""
|
| 638 |
+
|
| 639 |
+
def __init__(self, f: Callable[[Any], Any]) -> None:
|
| 640 |
+
self._fname = f.__name__
|
| 641 |
+
self._f = f
|
| 642 |
+
|
| 643 |
+
def __get__(self, obj: Any, owner: Type[Any]) -> Any:
|
| 644 |
+
assert obj is not None, f"call {self._fname} on an instance"
|
| 645 |
+
ret = obj.__dict__[self._fname] = self._f(obj)
|
| 646 |
+
return ret
|
| 647 |
+
|
| 648 |
+
|
| 649 |
+
class LinuxDistribution:
|
| 650 |
+
"""
|
| 651 |
+
Provides information about a OS distribution.
|
| 652 |
+
|
| 653 |
+
This package creates a private module-global instance of this class with
|
| 654 |
+
default initialization arguments, that is used by the
|
| 655 |
+
`consolidated accessor functions`_ and `single source accessor functions`_.
|
| 656 |
+
By using default initialization arguments, that module-global instance
|
| 657 |
+
returns data about the current OS distribution (i.e. the distro this
|
| 658 |
+
package runs on).
|
| 659 |
+
|
| 660 |
+
Normally, it is not necessary to create additional instances of this class.
|
| 661 |
+
However, in situations where control is needed over the exact data sources
|
| 662 |
+
that are used, instances of this class can be created with a specific
|
| 663 |
+
distro release file, or a specific os-release file, or without invoking the
|
| 664 |
+
lsb_release command.
|
| 665 |
+
"""
|
| 666 |
+
|
| 667 |
+
def __init__(
|
| 668 |
+
self,
|
| 669 |
+
include_lsb: Optional[bool] = None,
|
| 670 |
+
os_release_file: str = "",
|
| 671 |
+
distro_release_file: str = "",
|
| 672 |
+
include_uname: Optional[bool] = None,
|
| 673 |
+
root_dir: Optional[str] = None,
|
| 674 |
+
include_oslevel: Optional[bool] = None,
|
| 675 |
+
) -> None:
|
| 676 |
+
"""
|
| 677 |
+
The initialization method of this class gathers information from the
|
| 678 |
+
available data sources, and stores that in private instance attributes.
|
| 679 |
+
Subsequent access to the information items uses these private instance
|
| 680 |
+
attributes, so that the data sources are read only once.
|
| 681 |
+
|
| 682 |
+
Parameters:
|
| 683 |
+
|
| 684 |
+
* ``include_lsb`` (bool): Controls whether the
|
| 685 |
+
`lsb_release command output`_ is included as a data source.
|
| 686 |
+
|
| 687 |
+
If the lsb_release command is not available in the program execution
|
| 688 |
+
path, the data source for the lsb_release command will be empty.
|
| 689 |
+
|
| 690 |
+
* ``os_release_file`` (string): The path name of the
|
| 691 |
+
`os-release file`_ that is to be used as a data source.
|
| 692 |
+
|
| 693 |
+
An empty string (the default) will cause the default path name to
|
| 694 |
+
be used (see `os-release file`_ for details).
|
| 695 |
+
|
| 696 |
+
If the specified or defaulted os-release file does not exist, the
|
| 697 |
+
data source for the os-release file will be empty.
|
| 698 |
+
|
| 699 |
+
* ``distro_release_file`` (string): The path name of the
|
| 700 |
+
`distro release file`_ that is to be used as a data source.
|
| 701 |
+
|
| 702 |
+
An empty string (the default) will cause a default search algorithm
|
| 703 |
+
to be used (see `distro release file`_ for details).
|
| 704 |
+
|
| 705 |
+
If the specified distro release file does not exist, or if no default
|
| 706 |
+
distro release file can be found, the data source for the distro
|
| 707 |
+
release file will be empty.
|
| 708 |
+
|
| 709 |
+
* ``include_uname`` (bool): Controls whether uname command output is
|
| 710 |
+
included as a data source. If the uname command is not available in
|
| 711 |
+
the program execution path the data source for the uname command will
|
| 712 |
+
be empty.
|
| 713 |
+
|
| 714 |
+
* ``root_dir`` (string): The absolute path to the root directory to use
|
| 715 |
+
to find distro-related information files. Note that ``include_*``
|
| 716 |
+
parameters must not be enabled in combination with ``root_dir``.
|
| 717 |
+
|
| 718 |
+
* ``include_oslevel`` (bool): Controls whether (AIX) oslevel command
|
| 719 |
+
output is included as a data source. If the oslevel command is not
|
| 720 |
+
available in the program execution path the data source will be
|
| 721 |
+
empty.
|
| 722 |
+
|
| 723 |
+
Public instance attributes:
|
| 724 |
+
|
| 725 |
+
* ``os_release_file`` (string): The path name of the
|
| 726 |
+
`os-release file`_ that is actually used as a data source. The
|
| 727 |
+
empty string if no distro release file is used as a data source.
|
| 728 |
+
|
| 729 |
+
* ``distro_release_file`` (string): The path name of the
|
| 730 |
+
`distro release file`_ that is actually used as a data source. The
|
| 731 |
+
empty string if no distro release file is used as a data source.
|
| 732 |
+
|
| 733 |
+
* ``include_lsb`` (bool): The result of the ``include_lsb`` parameter.
|
| 734 |
+
This controls whether the lsb information will be loaded.
|
| 735 |
+
|
| 736 |
+
* ``include_uname`` (bool): The result of the ``include_uname``
|
| 737 |
+
parameter. This controls whether the uname information will
|
| 738 |
+
be loaded.
|
| 739 |
+
|
| 740 |
+
* ``include_oslevel`` (bool): The result of the ``include_oslevel``
|
| 741 |
+
parameter. This controls whether (AIX) oslevel information will be
|
| 742 |
+
loaded.
|
| 743 |
+
|
| 744 |
+
* ``root_dir`` (string): The result of the ``root_dir`` parameter.
|
| 745 |
+
The absolute path to the root directory to use to find distro-related
|
| 746 |
+
information files.
|
| 747 |
+
|
| 748 |
+
Raises:
|
| 749 |
+
|
| 750 |
+
* :py:exc:`ValueError`: Initialization parameters combination is not
|
| 751 |
+
supported.
|
| 752 |
+
|
| 753 |
+
* :py:exc:`OSError`: Some I/O issue with an os-release file or distro
|
| 754 |
+
release file.
|
| 755 |
+
|
| 756 |
+
* :py:exc:`UnicodeError`: A data source has unexpected characters or
|
| 757 |
+
uses an unexpected encoding.
|
| 758 |
+
"""
|
| 759 |
+
self.root_dir = root_dir
|
| 760 |
+
self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR
|
| 761 |
+
self.usr_lib_dir = (
|
| 762 |
+
os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR
|
| 763 |
+
)
|
| 764 |
+
|
| 765 |
+
if os_release_file:
|
| 766 |
+
self.os_release_file = os_release_file
|
| 767 |
+
else:
|
| 768 |
+
etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME)
|
| 769 |
+
usr_lib_os_release_file = os.path.join(
|
| 770 |
+
self.usr_lib_dir, _OS_RELEASE_BASENAME
|
| 771 |
+
)
|
| 772 |
+
|
| 773 |
+
# NOTE: The idea is to respect order **and** have it set
|
| 774 |
+
# at all times for API backwards compatibility.
|
| 775 |
+
if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile(
|
| 776 |
+
usr_lib_os_release_file
|
| 777 |
+
):
|
| 778 |
+
self.os_release_file = etc_dir_os_release_file
|
| 779 |
+
else:
|
| 780 |
+
self.os_release_file = usr_lib_os_release_file
|
| 781 |
+
|
| 782 |
+
self.distro_release_file = distro_release_file or "" # updated later
|
| 783 |
+
|
| 784 |
+
is_root_dir_defined = root_dir is not None
|
| 785 |
+
if is_root_dir_defined and (include_lsb or include_uname or include_oslevel):
|
| 786 |
+
raise ValueError(
|
| 787 |
+
"Including subprocess data sources from specific root_dir is disallowed"
|
| 788 |
+
" to prevent false information"
|
| 789 |
+
)
|
| 790 |
+
self.include_lsb = (
|
| 791 |
+
include_lsb if include_lsb is not None else not is_root_dir_defined
|
| 792 |
+
)
|
| 793 |
+
self.include_uname = (
|
| 794 |
+
include_uname if include_uname is not None else not is_root_dir_defined
|
| 795 |
+
)
|
| 796 |
+
self.include_oslevel = (
|
| 797 |
+
include_oslevel if include_oslevel is not None else not is_root_dir_defined
|
| 798 |
+
)
|
| 799 |
+
|
| 800 |
+
def __repr__(self) -> str:
|
| 801 |
+
"""Return repr of all info"""
|
| 802 |
+
return (
|
| 803 |
+
"LinuxDistribution("
|
| 804 |
+
"os_release_file={self.os_release_file!r}, "
|
| 805 |
+
"distro_release_file={self.distro_release_file!r}, "
|
| 806 |
+
"include_lsb={self.include_lsb!r}, "
|
| 807 |
+
"include_uname={self.include_uname!r}, "
|
| 808 |
+
"include_oslevel={self.include_oslevel!r}, "
|
| 809 |
+
"root_dir={self.root_dir!r}, "
|
| 810 |
+
"_os_release_info={self._os_release_info!r}, "
|
| 811 |
+
"_lsb_release_info={self._lsb_release_info!r}, "
|
| 812 |
+
"_distro_release_info={self._distro_release_info!r}, "
|
| 813 |
+
"_uname_info={self._uname_info!r}, "
|
| 814 |
+
"_oslevel_info={self._oslevel_info!r})".format(self=self)
|
| 815 |
+
)
|
| 816 |
+
|
| 817 |
+
def linux_distribution(
|
| 818 |
+
self, full_distribution_name: bool = True
|
| 819 |
+
) -> Tuple[str, str, str]:
|
| 820 |
+
"""
|
| 821 |
+
Return information about the OS distribution that is compatible
|
| 822 |
+
with Python's :func:`platform.linux_distribution`, supporting a subset
|
| 823 |
+
of its parameters.
|
| 824 |
+
|
| 825 |
+
For details, see :func:`distro.linux_distribution`.
|
| 826 |
+
"""
|
| 827 |
+
return (
|
| 828 |
+
self.name() if full_distribution_name else self.id(),
|
| 829 |
+
self.version(),
|
| 830 |
+
self._os_release_info.get("release_codename") or self.codename(),
|
| 831 |
+
)
|
| 832 |
+
|
| 833 |
+
def id(self) -> str:
|
| 834 |
+
"""Return the distro ID of the OS distribution, as a string.
|
| 835 |
+
|
| 836 |
+
For details, see :func:`distro.id`.
|
| 837 |
+
"""
|
| 838 |
+
|
| 839 |
+
def normalize(distro_id: str, table: Dict[str, str]) -> str:
|
| 840 |
+
distro_id = distro_id.lower().replace(" ", "_")
|
| 841 |
+
return table.get(distro_id, distro_id)
|
| 842 |
+
|
| 843 |
+
distro_id = self.os_release_attr("id")
|
| 844 |
+
if distro_id:
|
| 845 |
+
return normalize(distro_id, NORMALIZED_OS_ID)
|
| 846 |
+
|
| 847 |
+
distro_id = self.lsb_release_attr("distributor_id")
|
| 848 |
+
if distro_id:
|
| 849 |
+
return normalize(distro_id, NORMALIZED_LSB_ID)
|
| 850 |
+
|
| 851 |
+
distro_id = self.distro_release_attr("id")
|
| 852 |
+
if distro_id:
|
| 853 |
+
return normalize(distro_id, NORMALIZED_DISTRO_ID)
|
| 854 |
+
|
| 855 |
+
distro_id = self.uname_attr("id")
|
| 856 |
+
if distro_id:
|
| 857 |
+
return normalize(distro_id, NORMALIZED_DISTRO_ID)
|
| 858 |
+
|
| 859 |
+
return ""
|
| 860 |
+
|
| 861 |
+
def name(self, pretty: bool = False) -> str:
|
| 862 |
+
"""
|
| 863 |
+
Return the name of the OS distribution, as a string.
|
| 864 |
+
|
| 865 |
+
For details, see :func:`distro.name`.
|
| 866 |
+
"""
|
| 867 |
+
name = (
|
| 868 |
+
self.os_release_attr("name")
|
| 869 |
+
or self.lsb_release_attr("distributor_id")
|
| 870 |
+
or self.distro_release_attr("name")
|
| 871 |
+
or self.uname_attr("name")
|
| 872 |
+
)
|
| 873 |
+
if pretty:
|
| 874 |
+
name = self.os_release_attr("pretty_name") or self.lsb_release_attr(
|
| 875 |
+
"description"
|
| 876 |
+
)
|
| 877 |
+
if not name:
|
| 878 |
+
name = self.distro_release_attr("name") or self.uname_attr("name")
|
| 879 |
+
version = self.version(pretty=True)
|
| 880 |
+
if version:
|
| 881 |
+
name = f"{name} {version}"
|
| 882 |
+
return name or ""
|
| 883 |
+
|
| 884 |
+
def version(self, pretty: bool = False, best: bool = False) -> str:
|
| 885 |
+
"""
|
| 886 |
+
Return the version of the OS distribution, as a string.
|
| 887 |
+
|
| 888 |
+
For details, see :func:`distro.version`.
|
| 889 |
+
"""
|
| 890 |
+
versions = [
|
| 891 |
+
self.os_release_attr("version_id"),
|
| 892 |
+
self.lsb_release_attr("release"),
|
| 893 |
+
self.distro_release_attr("version_id"),
|
| 894 |
+
self._parse_distro_release_content(self.os_release_attr("pretty_name")).get(
|
| 895 |
+
"version_id", ""
|
| 896 |
+
),
|
| 897 |
+
self._parse_distro_release_content(
|
| 898 |
+
self.lsb_release_attr("description")
|
| 899 |
+
).get("version_id", ""),
|
| 900 |
+
self.uname_attr("release"),
|
| 901 |
+
]
|
| 902 |
+
if self.uname_attr("id").startswith("aix"):
|
| 903 |
+
# On AIX platforms, prefer oslevel command output.
|
| 904 |
+
versions.insert(0, self.oslevel_info())
|
| 905 |
+
elif self.id() == "debian" or "debian" in self.like().split():
|
| 906 |
+
# On Debian-like, add debian_version file content to candidates list.
|
| 907 |
+
versions.append(self._debian_version)
|
| 908 |
+
version = ""
|
| 909 |
+
if best:
|
| 910 |
+
# This algorithm uses the last version in priority order that has
|
| 911 |
+
# the best precision. If the versions are not in conflict, that
|
| 912 |
+
# does not matter; otherwise, using the last one instead of the
|
| 913 |
+
# first one might be considered a surprise.
|
| 914 |
+
for v in versions:
|
| 915 |
+
if v.count(".") > version.count(".") or version == "":
|
| 916 |
+
version = v
|
| 917 |
+
else:
|
| 918 |
+
for v in versions:
|
| 919 |
+
if v != "":
|
| 920 |
+
version = v
|
| 921 |
+
break
|
| 922 |
+
if pretty and version and self.codename():
|
| 923 |
+
version = f"{version} ({self.codename()})"
|
| 924 |
+
return version
|
| 925 |
+
|
| 926 |
+
def version_parts(self, best: bool = False) -> Tuple[str, str, str]:
|
| 927 |
+
"""
|
| 928 |
+
Return the version of the OS distribution, as a tuple of version
|
| 929 |
+
numbers.
|
| 930 |
+
|
| 931 |
+
For details, see :func:`distro.version_parts`.
|
| 932 |
+
"""
|
| 933 |
+
version_str = self.version(best=best)
|
| 934 |
+
if version_str:
|
| 935 |
+
version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?")
|
| 936 |
+
matches = version_regex.match(version_str)
|
| 937 |
+
if matches:
|
| 938 |
+
major, minor, build_number = matches.groups()
|
| 939 |
+
return major, minor or "", build_number or ""
|
| 940 |
+
return "", "", ""
|
| 941 |
+
|
| 942 |
+
def major_version(self, best: bool = False) -> str:
|
| 943 |
+
"""
|
| 944 |
+
Return the major version number of the current distribution.
|
| 945 |
+
|
| 946 |
+
For details, see :func:`distro.major_version`.
|
| 947 |
+
"""
|
| 948 |
+
return self.version_parts(best)[0]
|
| 949 |
+
|
| 950 |
+
def minor_version(self, best: bool = False) -> str:
|
| 951 |
+
"""
|
| 952 |
+
Return the minor version number of the current distribution.
|
| 953 |
+
|
| 954 |
+
For details, see :func:`distro.minor_version`.
|
| 955 |
+
"""
|
| 956 |
+
return self.version_parts(best)[1]
|
| 957 |
+
|
| 958 |
+
def build_number(self, best: bool = False) -> str:
|
| 959 |
+
"""
|
| 960 |
+
Return the build number of the current distribution.
|
| 961 |
+
|
| 962 |
+
For details, see :func:`distro.build_number`.
|
| 963 |
+
"""
|
| 964 |
+
return self.version_parts(best)[2]
|
| 965 |
+
|
| 966 |
+
def like(self) -> str:
|
| 967 |
+
"""
|
| 968 |
+
Return the IDs of distributions that are like the OS distribution.
|
| 969 |
+
|
| 970 |
+
For details, see :func:`distro.like`.
|
| 971 |
+
"""
|
| 972 |
+
return self.os_release_attr("id_like") or ""
|
| 973 |
+
|
| 974 |
+
def codename(self) -> str:
|
| 975 |
+
"""
|
| 976 |
+
Return the codename of the OS distribution.
|
| 977 |
+
|
| 978 |
+
For details, see :func:`distro.codename`.
|
| 979 |
+
"""
|
| 980 |
+
try:
|
| 981 |
+
# Handle os_release specially since distros might purposefully set
|
| 982 |
+
# this to empty string to have no codename
|
| 983 |
+
return self._os_release_info["codename"]
|
| 984 |
+
except KeyError:
|
| 985 |
+
return (
|
| 986 |
+
self.lsb_release_attr("codename")
|
| 987 |
+
or self.distro_release_attr("codename")
|
| 988 |
+
or ""
|
| 989 |
+
)
|
| 990 |
+
|
| 991 |
+
def info(self, pretty: bool = False, best: bool = False) -> InfoDict:
|
| 992 |
+
"""
|
| 993 |
+
Return certain machine-readable information about the OS
|
| 994 |
+
distribution.
|
| 995 |
+
|
| 996 |
+
For details, see :func:`distro.info`.
|
| 997 |
+
"""
|
| 998 |
+
return InfoDict(
|
| 999 |
+
id=self.id(),
|
| 1000 |
+
version=self.version(pretty, best),
|
| 1001 |
+
version_parts=VersionDict(
|
| 1002 |
+
major=self.major_version(best),
|
| 1003 |
+
minor=self.minor_version(best),
|
| 1004 |
+
build_number=self.build_number(best),
|
| 1005 |
+
),
|
| 1006 |
+
like=self.like(),
|
| 1007 |
+
codename=self.codename(),
|
| 1008 |
+
)
|
| 1009 |
+
|
| 1010 |
+
def os_release_info(self) -> Dict[str, str]:
|
| 1011 |
+
"""
|
| 1012 |
+
Return a dictionary containing key-value pairs for the information
|
| 1013 |
+
items from the os-release file data source of the OS distribution.
|
| 1014 |
+
|
| 1015 |
+
For details, see :func:`distro.os_release_info`.
|
| 1016 |
+
"""
|
| 1017 |
+
return self._os_release_info
|
| 1018 |
+
|
| 1019 |
+
def lsb_release_info(self) -> Dict[str, str]:
|
| 1020 |
+
"""
|
| 1021 |
+
Return a dictionary containing key-value pairs for the information
|
| 1022 |
+
items from the lsb_release command data source of the OS
|
| 1023 |
+
distribution.
|
| 1024 |
+
|
| 1025 |
+
For details, see :func:`distro.lsb_release_info`.
|
| 1026 |
+
"""
|
| 1027 |
+
return self._lsb_release_info
|
| 1028 |
+
|
| 1029 |
+
def distro_release_info(self) -> Dict[str, str]:
|
| 1030 |
+
"""
|
| 1031 |
+
Return a dictionary containing key-value pairs for the information
|
| 1032 |
+
items from the distro release file data source of the OS
|
| 1033 |
+
distribution.
|
| 1034 |
+
|
| 1035 |
+
For details, see :func:`distro.distro_release_info`.
|
| 1036 |
+
"""
|
| 1037 |
+
return self._distro_release_info
|
| 1038 |
+
|
| 1039 |
+
def uname_info(self) -> Dict[str, str]:
|
| 1040 |
+
"""
|
| 1041 |
+
Return a dictionary containing key-value pairs for the information
|
| 1042 |
+
items from the uname command data source of the OS distribution.
|
| 1043 |
+
|
| 1044 |
+
For details, see :func:`distro.uname_info`.
|
| 1045 |
+
"""
|
| 1046 |
+
return self._uname_info
|
| 1047 |
+
|
| 1048 |
+
def oslevel_info(self) -> str:
|
| 1049 |
+
"""
|
| 1050 |
+
Return AIX' oslevel command output.
|
| 1051 |
+
"""
|
| 1052 |
+
return self._oslevel_info
|
| 1053 |
+
|
| 1054 |
+
def os_release_attr(self, attribute: str) -> str:
|
| 1055 |
+
"""
|
| 1056 |
+
Return a single named information item from the os-release file data
|
| 1057 |
+
source of the OS distribution.
|
| 1058 |
+
|
| 1059 |
+
For details, see :func:`distro.os_release_attr`.
|
| 1060 |
+
"""
|
| 1061 |
+
return self._os_release_info.get(attribute, "")
|
| 1062 |
+
|
| 1063 |
+
def lsb_release_attr(self, attribute: str) -> str:
|
| 1064 |
+
"""
|
| 1065 |
+
Return a single named information item from the lsb_release command
|
| 1066 |
+
output data source of the OS distribution.
|
| 1067 |
+
|
| 1068 |
+
For details, see :func:`distro.lsb_release_attr`.
|
| 1069 |
+
"""
|
| 1070 |
+
return self._lsb_release_info.get(attribute, "")
|
| 1071 |
+
|
| 1072 |
+
def distro_release_attr(self, attribute: str) -> str:
|
| 1073 |
+
"""
|
| 1074 |
+
Return a single named information item from the distro release file
|
| 1075 |
+
data source of the OS distribution.
|
| 1076 |
+
|
| 1077 |
+
For details, see :func:`distro.distro_release_attr`.
|
| 1078 |
+
"""
|
| 1079 |
+
return self._distro_release_info.get(attribute, "")
|
| 1080 |
+
|
| 1081 |
+
def uname_attr(self, attribute: str) -> str:
|
| 1082 |
+
"""
|
| 1083 |
+
Return a single named information item from the uname command
|
| 1084 |
+
output data source of the OS distribution.
|
| 1085 |
+
|
| 1086 |
+
For details, see :func:`distro.uname_attr`.
|
| 1087 |
+
"""
|
| 1088 |
+
return self._uname_info.get(attribute, "")
|
| 1089 |
+
|
| 1090 |
+
@cached_property
|
| 1091 |
+
def _os_release_info(self) -> Dict[str, str]:
|
| 1092 |
+
"""
|
| 1093 |
+
Get the information items from the specified os-release file.
|
| 1094 |
+
|
| 1095 |
+
Returns:
|
| 1096 |
+
A dictionary containing all information items.
|
| 1097 |
+
"""
|
| 1098 |
+
if os.path.isfile(self.os_release_file):
|
| 1099 |
+
with open(self.os_release_file, encoding="utf-8") as release_file:
|
| 1100 |
+
return self._parse_os_release_content(release_file)
|
| 1101 |
+
return {}
|
| 1102 |
+
|
| 1103 |
+
@staticmethod
|
| 1104 |
+
def _parse_os_release_content(lines: TextIO) -> Dict[str, str]:
|
| 1105 |
+
"""
|
| 1106 |
+
Parse the lines of an os-release file.
|
| 1107 |
+
|
| 1108 |
+
Parameters:
|
| 1109 |
+
|
| 1110 |
+
* lines: Iterable through the lines in the os-release file.
|
| 1111 |
+
Each line must be a unicode string or a UTF-8 encoded byte
|
| 1112 |
+
string.
|
| 1113 |
+
|
| 1114 |
+
Returns:
|
| 1115 |
+
A dictionary containing all information items.
|
| 1116 |
+
"""
|
| 1117 |
+
props = {}
|
| 1118 |
+
lexer = shlex.shlex(lines, posix=True)
|
| 1119 |
+
lexer.whitespace_split = True
|
| 1120 |
+
|
| 1121 |
+
tokens = list(lexer)
|
| 1122 |
+
for token in tokens:
|
| 1123 |
+
# At this point, all shell-like parsing has been done (i.e.
|
| 1124 |
+
# comments processed, quotes and backslash escape sequences
|
| 1125 |
+
# processed, multi-line values assembled, trailing newlines
|
| 1126 |
+
# stripped, etc.), so the tokens are now either:
|
| 1127 |
+
# * variable assignments: var=value
|
| 1128 |
+
# * commands or their arguments (not allowed in os-release)
|
| 1129 |
+
# Ignore any tokens that are not variable assignments
|
| 1130 |
+
if "=" in token:
|
| 1131 |
+
k, v = token.split("=", 1)
|
| 1132 |
+
props[k.lower()] = v
|
| 1133 |
+
|
| 1134 |
+
if "version" in props:
|
| 1135 |
+
# extract release codename (if any) from version attribute
|
| 1136 |
+
match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"])
|
| 1137 |
+
if match:
|
| 1138 |
+
release_codename = match.group(1) or match.group(2)
|
| 1139 |
+
props["codename"] = props["release_codename"] = release_codename
|
| 1140 |
+
|
| 1141 |
+
if "version_codename" in props:
|
| 1142 |
+
# os-release added a version_codename field. Use that in
|
| 1143 |
+
# preference to anything else Note that some distros purposefully
|
| 1144 |
+
# do not have code names. They should be setting
|
| 1145 |
+
# version_codename=""
|
| 1146 |
+
props["codename"] = props["version_codename"]
|
| 1147 |
+
elif "ubuntu_codename" in props:
|
| 1148 |
+
# Same as above but a non-standard field name used on older Ubuntus
|
| 1149 |
+
props["codename"] = props["ubuntu_codename"]
|
| 1150 |
+
|
| 1151 |
+
return props
|
| 1152 |
+
|
| 1153 |
+
@cached_property
|
| 1154 |
+
def _lsb_release_info(self) -> Dict[str, str]:
|
| 1155 |
+
"""
|
| 1156 |
+
Get the information items from the lsb_release command output.
|
| 1157 |
+
|
| 1158 |
+
Returns:
|
| 1159 |
+
A dictionary containing all information items.
|
| 1160 |
+
"""
|
| 1161 |
+
if not self.include_lsb:
|
| 1162 |
+
return {}
|
| 1163 |
+
try:
|
| 1164 |
+
cmd = ("lsb_release", "-a")
|
| 1165 |
+
stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
|
| 1166 |
+
# Command not found or lsb_release returned error
|
| 1167 |
+
except (OSError, subprocess.CalledProcessError):
|
| 1168 |
+
return {}
|
| 1169 |
+
content = self._to_str(stdout).splitlines()
|
| 1170 |
+
return self._parse_lsb_release_content(content)
|
| 1171 |
+
|
| 1172 |
+
@staticmethod
|
| 1173 |
+
def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]:
|
| 1174 |
+
"""
|
| 1175 |
+
Parse the output of the lsb_release command.
|
| 1176 |
+
|
| 1177 |
+
Parameters:
|
| 1178 |
+
|
| 1179 |
+
* lines: Iterable through the lines of the lsb_release output.
|
| 1180 |
+
Each line must be a unicode string or a UTF-8 encoded byte
|
| 1181 |
+
string.
|
| 1182 |
+
|
| 1183 |
+
Returns:
|
| 1184 |
+
A dictionary containing all information items.
|
| 1185 |
+
"""
|
| 1186 |
+
props = {}
|
| 1187 |
+
for line in lines:
|
| 1188 |
+
kv = line.strip("\n").split(":", 1)
|
| 1189 |
+
if len(kv) != 2:
|
| 1190 |
+
# Ignore lines without colon.
|
| 1191 |
+
continue
|
| 1192 |
+
k, v = kv
|
| 1193 |
+
props.update({k.replace(" ", "_").lower(): v.strip()})
|
| 1194 |
+
return props
|
| 1195 |
+
|
| 1196 |
+
@cached_property
|
| 1197 |
+
def _uname_info(self) -> Dict[str, str]:
|
| 1198 |
+
if not self.include_uname:
|
| 1199 |
+
return {}
|
| 1200 |
+
try:
|
| 1201 |
+
cmd = ("uname", "-rs")
|
| 1202 |
+
stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
|
| 1203 |
+
except OSError:
|
| 1204 |
+
return {}
|
| 1205 |
+
content = self._to_str(stdout).splitlines()
|
| 1206 |
+
return self._parse_uname_content(content)
|
| 1207 |
+
|
| 1208 |
+
@cached_property
|
| 1209 |
+
def _oslevel_info(self) -> str:
|
| 1210 |
+
if not self.include_oslevel:
|
| 1211 |
+
return ""
|
| 1212 |
+
try:
|
| 1213 |
+
stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL)
|
| 1214 |
+
except (OSError, subprocess.CalledProcessError):
|
| 1215 |
+
return ""
|
| 1216 |
+
return self._to_str(stdout).strip()
|
| 1217 |
+
|
| 1218 |
+
@cached_property
|
| 1219 |
+
def _debian_version(self) -> str:
|
| 1220 |
+
try:
|
| 1221 |
+
with open(
|
| 1222 |
+
os.path.join(self.etc_dir, "debian_version"), encoding="ascii"
|
| 1223 |
+
) as fp:
|
| 1224 |
+
return fp.readline().rstrip()
|
| 1225 |
+
except FileNotFoundError:
|
| 1226 |
+
return ""
|
| 1227 |
+
|
| 1228 |
+
@staticmethod
|
| 1229 |
+
def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]:
|
| 1230 |
+
if not lines:
|
| 1231 |
+
return {}
|
| 1232 |
+
props = {}
|
| 1233 |
+
match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip())
|
| 1234 |
+
if match:
|
| 1235 |
+
name, version = match.groups()
|
| 1236 |
+
|
| 1237 |
+
# This is to prevent the Linux kernel version from
|
| 1238 |
+
# appearing as the 'best' version on otherwise
|
| 1239 |
+
# identifiable distributions.
|
| 1240 |
+
if name == "Linux":
|
| 1241 |
+
return {}
|
| 1242 |
+
props["id"] = name.lower()
|
| 1243 |
+
props["name"] = name
|
| 1244 |
+
props["release"] = version
|
| 1245 |
+
return props
|
| 1246 |
+
|
| 1247 |
+
@staticmethod
|
| 1248 |
+
def _to_str(bytestring: bytes) -> str:
|
| 1249 |
+
encoding = sys.getfilesystemencoding()
|
| 1250 |
+
return bytestring.decode(encoding)
|
| 1251 |
+
|
| 1252 |
+
@cached_property
|
| 1253 |
+
def _distro_release_info(self) -> Dict[str, str]:
|
| 1254 |
+
"""
|
| 1255 |
+
Get the information items from the specified distro release file.
|
| 1256 |
+
|
| 1257 |
+
Returns:
|
| 1258 |
+
A dictionary containing all information items.
|
| 1259 |
+
"""
|
| 1260 |
+
if self.distro_release_file:
|
| 1261 |
+
# If it was specified, we use it and parse what we can, even if
|
| 1262 |
+
# its file name or content does not match the expected pattern.
|
| 1263 |
+
distro_info = self._parse_distro_release_file(self.distro_release_file)
|
| 1264 |
+
basename = os.path.basename(self.distro_release_file)
|
| 1265 |
+
# The file name pattern for user-specified distro release files
|
| 1266 |
+
# is somewhat more tolerant (compared to when searching for the
|
| 1267 |
+
# file), because we want to use what was specified as best as
|
| 1268 |
+
# possible.
|
| 1269 |
+
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
| 1270 |
+
else:
|
| 1271 |
+
try:
|
| 1272 |
+
basenames = [
|
| 1273 |
+
basename
|
| 1274 |
+
for basename in os.listdir(self.etc_dir)
|
| 1275 |
+
if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
| 1276 |
+
and os.path.isfile(os.path.join(self.etc_dir, basename))
|
| 1277 |
+
]
|
| 1278 |
+
# We sort for repeatability in cases where there are multiple
|
| 1279 |
+
# distro specific files; e.g. CentOS, Oracle, Enterprise all
|
| 1280 |
+
# containing `redhat-release` on top of their own.
|
| 1281 |
+
basenames.sort()
|
| 1282 |
+
except OSError:
|
| 1283 |
+
# This may occur when /etc is not readable but we can't be
|
| 1284 |
+
# sure about the *-release files. Check common entries of
|
| 1285 |
+
# /etc for information. If they turn out to not be there the
|
| 1286 |
+
# error is handled in `_parse_distro_release_file()`.
|
| 1287 |
+
basenames = _DISTRO_RELEASE_BASENAMES
|
| 1288 |
+
for basename in basenames:
|
| 1289 |
+
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
| 1290 |
+
if match is None:
|
| 1291 |
+
continue
|
| 1292 |
+
filepath = os.path.join(self.etc_dir, basename)
|
| 1293 |
+
distro_info = self._parse_distro_release_file(filepath)
|
| 1294 |
+
# The name is always present if the pattern matches.
|
| 1295 |
+
if "name" not in distro_info:
|
| 1296 |
+
continue
|
| 1297 |
+
self.distro_release_file = filepath
|
| 1298 |
+
break
|
| 1299 |
+
else: # the loop didn't "break": no candidate.
|
| 1300 |
+
return {}
|
| 1301 |
+
|
| 1302 |
+
if match is not None:
|
| 1303 |
+
distro_info["id"] = match.group(1)
|
| 1304 |
+
|
| 1305 |
+
# CloudLinux < 7: manually enrich info with proper id.
|
| 1306 |
+
if "cloudlinux" in distro_info.get("name", "").lower():
|
| 1307 |
+
distro_info["id"] = "cloudlinux"
|
| 1308 |
+
|
| 1309 |
+
return distro_info
|
| 1310 |
+
|
| 1311 |
+
def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]:
|
| 1312 |
+
"""
|
| 1313 |
+
Parse a distro release file.
|
| 1314 |
+
|
| 1315 |
+
Parameters:
|
| 1316 |
+
|
| 1317 |
+
* filepath: Path name of the distro release file.
|
| 1318 |
+
|
| 1319 |
+
Returns:
|
| 1320 |
+
A dictionary containing all information items.
|
| 1321 |
+
"""
|
| 1322 |
+
try:
|
| 1323 |
+
with open(filepath, encoding="utf-8") as fp:
|
| 1324 |
+
# Only parse the first line. For instance, on SLES there
|
| 1325 |
+
# are multiple lines. We don't want them...
|
| 1326 |
+
return self._parse_distro_release_content(fp.readline())
|
| 1327 |
+
except OSError:
|
| 1328 |
+
# Ignore not being able to read a specific, seemingly version
|
| 1329 |
+
# related file.
|
| 1330 |
+
# See https://github.com/python-distro/distro/issues/162
|
| 1331 |
+
return {}
|
| 1332 |
+
|
| 1333 |
+
@staticmethod
|
| 1334 |
+
def _parse_distro_release_content(line: str) -> Dict[str, str]:
|
| 1335 |
+
"""
|
| 1336 |
+
Parse a line from a distro release file.
|
| 1337 |
+
|
| 1338 |
+
Parameters:
|
| 1339 |
+
* line: Line from the distro release file. Must be a unicode string
|
| 1340 |
+
or a UTF-8 encoded byte string.
|
| 1341 |
+
|
| 1342 |
+
Returns:
|
| 1343 |
+
A dictionary containing all information items.
|
| 1344 |
+
"""
|
| 1345 |
+
matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1])
|
| 1346 |
+
distro_info = {}
|
| 1347 |
+
if matches:
|
| 1348 |
+
# regexp ensures non-None
|
| 1349 |
+
distro_info["name"] = matches.group(3)[::-1]
|
| 1350 |
+
if matches.group(2):
|
| 1351 |
+
distro_info["version_id"] = matches.group(2)[::-1]
|
| 1352 |
+
if matches.group(1):
|
| 1353 |
+
distro_info["codename"] = matches.group(1)[::-1]
|
| 1354 |
+
elif line:
|
| 1355 |
+
distro_info["name"] = line.strip()
|
| 1356 |
+
return distro_info
|
| 1357 |
+
|
| 1358 |
+
|
| 1359 |
+
_distro = LinuxDistribution()
|
| 1360 |
+
|
| 1361 |
+
|
| 1362 |
+
def main() -> None:
|
| 1363 |
+
logger = logging.getLogger(__name__)
|
| 1364 |
+
logger.setLevel(logging.DEBUG)
|
| 1365 |
+
logger.addHandler(logging.StreamHandler(sys.stdout))
|
| 1366 |
+
|
| 1367 |
+
parser = argparse.ArgumentParser(description="OS distro info tool")
|
| 1368 |
+
parser.add_argument(
|
| 1369 |
+
"--json", "-j", help="Output in machine readable format", action="store_true"
|
| 1370 |
+
)
|
| 1371 |
+
|
| 1372 |
+
parser.add_argument(
|
| 1373 |
+
"--root-dir",
|
| 1374 |
+
"-r",
|
| 1375 |
+
type=str,
|
| 1376 |
+
dest="root_dir",
|
| 1377 |
+
help="Path to the root filesystem directory (defaults to /)",
|
| 1378 |
+
)
|
| 1379 |
+
|
| 1380 |
+
args = parser.parse_args()
|
| 1381 |
+
|
| 1382 |
+
if args.root_dir:
|
| 1383 |
+
dist = LinuxDistribution(
|
| 1384 |
+
include_lsb=False,
|
| 1385 |
+
include_uname=False,
|
| 1386 |
+
include_oslevel=False,
|
| 1387 |
+
root_dir=args.root_dir,
|
| 1388 |
+
)
|
| 1389 |
+
else:
|
| 1390 |
+
dist = _distro
|
| 1391 |
+
|
| 1392 |
+
if args.json:
|
| 1393 |
+
logger.info(json.dumps(dist.info(), indent=4, sort_keys=True))
|
| 1394 |
+
else:
|
| 1395 |
+
logger.info("Name: %s", dist.name(pretty=True))
|
| 1396 |
+
distribution_version = dist.version(pretty=True)
|
| 1397 |
+
logger.info("Version: %s", distribution_version)
|
| 1398 |
+
distribution_codename = dist.codename()
|
| 1399 |
+
logger.info("Codename: %s", distribution_codename)
|
| 1400 |
+
|
| 1401 |
+
|
| 1402 |
+
if __name__ == "__main__":
|
| 1403 |
+
main()
|
llava/lib/python3.10/site-packages/pip/_vendor/distro/py.typed
ADDED
|
File without changes
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/__init__.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Pygments
|
| 3 |
+
~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Pygments is a syntax highlighting package written in Python.
|
| 6 |
+
|
| 7 |
+
It is a generic syntax highlighter for general use in all kinds of software
|
| 8 |
+
such as forum systems, wikis or other applications that need to prettify
|
| 9 |
+
source code. Highlights are:
|
| 10 |
+
|
| 11 |
+
* a wide range of common languages and markup formats is supported
|
| 12 |
+
* special attention is paid to details, increasing quality by a fair amount
|
| 13 |
+
* support for new languages and formats are added easily
|
| 14 |
+
* a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
|
| 15 |
+
formats that PIL supports, and ANSI sequences
|
| 16 |
+
* it is usable as a command-line tool and as a library
|
| 17 |
+
* ... and it highlights even Brainfuck!
|
| 18 |
+
|
| 19 |
+
The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
|
| 20 |
+
|
| 21 |
+
.. _Pygments master branch:
|
| 22 |
+
https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
|
| 23 |
+
|
| 24 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 25 |
+
:license: BSD, see LICENSE for details.
|
| 26 |
+
"""
|
| 27 |
+
from io import StringIO, BytesIO
|
| 28 |
+
|
| 29 |
+
__version__ = '2.18.0'
|
| 30 |
+
__docformat__ = 'restructuredtext'
|
| 31 |
+
|
| 32 |
+
__all__ = ['lex', 'format', 'highlight']
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def lex(code, lexer):
|
| 36 |
+
"""
|
| 37 |
+
Lex `code` with the `lexer` (must be a `Lexer` instance)
|
| 38 |
+
and return an iterable of tokens. Currently, this only calls
|
| 39 |
+
`lexer.get_tokens()`.
|
| 40 |
+
"""
|
| 41 |
+
try:
|
| 42 |
+
return lexer.get_tokens(code)
|
| 43 |
+
except TypeError:
|
| 44 |
+
# Heuristic to catch a common mistake.
|
| 45 |
+
from pip._vendor.pygments.lexer import RegexLexer
|
| 46 |
+
if isinstance(lexer, type) and issubclass(lexer, RegexLexer):
|
| 47 |
+
raise TypeError('lex() argument must be a lexer instance, '
|
| 48 |
+
'not a class')
|
| 49 |
+
raise
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin
|
| 53 |
+
"""
|
| 54 |
+
Format ``tokens`` (an iterable of tokens) with the formatter ``formatter``
|
| 55 |
+
(a `Formatter` instance).
|
| 56 |
+
|
| 57 |
+
If ``outfile`` is given and a valid file object (an object with a
|
| 58 |
+
``write`` method), the result will be written to it, otherwise it
|
| 59 |
+
is returned as a string.
|
| 60 |
+
"""
|
| 61 |
+
try:
|
| 62 |
+
if not outfile:
|
| 63 |
+
realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO()
|
| 64 |
+
formatter.format(tokens, realoutfile)
|
| 65 |
+
return realoutfile.getvalue()
|
| 66 |
+
else:
|
| 67 |
+
formatter.format(tokens, outfile)
|
| 68 |
+
except TypeError:
|
| 69 |
+
# Heuristic to catch a common mistake.
|
| 70 |
+
from pip._vendor.pygments.formatter import Formatter
|
| 71 |
+
if isinstance(formatter, type) and issubclass(formatter, Formatter):
|
| 72 |
+
raise TypeError('format() argument must be a formatter instance, '
|
| 73 |
+
'not a class')
|
| 74 |
+
raise
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def highlight(code, lexer, formatter, outfile=None):
|
| 78 |
+
"""
|
| 79 |
+
This is the most high-level highlighting function. It combines `lex` and
|
| 80 |
+
`format` in one function.
|
| 81 |
+
"""
|
| 82 |
+
return format(lex(code, lexer), formatter, outfile)
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/__main__.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.__main__
|
| 3 |
+
~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Main entry point for ``python -m pygments``.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
from pip._vendor.pygments.cmdline import main
|
| 13 |
+
|
| 14 |
+
try:
|
| 15 |
+
sys.exit(main(sys.argv))
|
| 16 |
+
except KeyboardInterrupt:
|
| 17 |
+
sys.exit(1)
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/cmdline.py
ADDED
|
@@ -0,0 +1,668 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.cmdline
|
| 3 |
+
~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Command line interface.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import os
|
| 12 |
+
import sys
|
| 13 |
+
import shutil
|
| 14 |
+
import argparse
|
| 15 |
+
from textwrap import dedent
|
| 16 |
+
|
| 17 |
+
from pip._vendor.pygments import __version__, highlight
|
| 18 |
+
from pip._vendor.pygments.util import ClassNotFound, OptionError, docstring_headline, \
|
| 19 |
+
guess_decode, guess_decode_from_terminal, terminal_encoding, \
|
| 20 |
+
UnclosingTextIOWrapper
|
| 21 |
+
from pip._vendor.pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
|
| 22 |
+
load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename
|
| 23 |
+
from pip._vendor.pygments.lexers.special import TextLexer
|
| 24 |
+
from pip._vendor.pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
|
| 25 |
+
from pip._vendor.pygments.formatters import get_all_formatters, get_formatter_by_name, \
|
| 26 |
+
load_formatter_from_file, get_formatter_for_filename, find_formatter_class
|
| 27 |
+
from pip._vendor.pygments.formatters.terminal import TerminalFormatter
|
| 28 |
+
from pip._vendor.pygments.formatters.terminal256 import Terminal256Formatter, TerminalTrueColorFormatter
|
| 29 |
+
from pip._vendor.pygments.filters import get_all_filters, find_filter_class
|
| 30 |
+
from pip._vendor.pygments.styles import get_all_styles, get_style_by_name
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def _parse_options(o_strs):
|
| 34 |
+
opts = {}
|
| 35 |
+
if not o_strs:
|
| 36 |
+
return opts
|
| 37 |
+
for o_str in o_strs:
|
| 38 |
+
if not o_str.strip():
|
| 39 |
+
continue
|
| 40 |
+
o_args = o_str.split(',')
|
| 41 |
+
for o_arg in o_args:
|
| 42 |
+
o_arg = o_arg.strip()
|
| 43 |
+
try:
|
| 44 |
+
o_key, o_val = o_arg.split('=', 1)
|
| 45 |
+
o_key = o_key.strip()
|
| 46 |
+
o_val = o_val.strip()
|
| 47 |
+
except ValueError:
|
| 48 |
+
opts[o_arg] = True
|
| 49 |
+
else:
|
| 50 |
+
opts[o_key] = o_val
|
| 51 |
+
return opts
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def _parse_filters(f_strs):
|
| 55 |
+
filters = []
|
| 56 |
+
if not f_strs:
|
| 57 |
+
return filters
|
| 58 |
+
for f_str in f_strs:
|
| 59 |
+
if ':' in f_str:
|
| 60 |
+
fname, fopts = f_str.split(':', 1)
|
| 61 |
+
filters.append((fname, _parse_options([fopts])))
|
| 62 |
+
else:
|
| 63 |
+
filters.append((f_str, {}))
|
| 64 |
+
return filters
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _print_help(what, name):
|
| 68 |
+
try:
|
| 69 |
+
if what == 'lexer':
|
| 70 |
+
cls = get_lexer_by_name(name)
|
| 71 |
+
print(f"Help on the {cls.name} lexer:")
|
| 72 |
+
print(dedent(cls.__doc__))
|
| 73 |
+
elif what == 'formatter':
|
| 74 |
+
cls = find_formatter_class(name)
|
| 75 |
+
print(f"Help on the {cls.name} formatter:")
|
| 76 |
+
print(dedent(cls.__doc__))
|
| 77 |
+
elif what == 'filter':
|
| 78 |
+
cls = find_filter_class(name)
|
| 79 |
+
print(f"Help on the {name} filter:")
|
| 80 |
+
print(dedent(cls.__doc__))
|
| 81 |
+
return 0
|
| 82 |
+
except (AttributeError, ValueError):
|
| 83 |
+
print(f"{what} not found!", file=sys.stderr)
|
| 84 |
+
return 1
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def _print_list(what):
|
| 88 |
+
if what == 'lexer':
|
| 89 |
+
print()
|
| 90 |
+
print("Lexers:")
|
| 91 |
+
print("~~~~~~~")
|
| 92 |
+
|
| 93 |
+
info = []
|
| 94 |
+
for fullname, names, exts, _ in get_all_lexers():
|
| 95 |
+
tup = (', '.join(names)+':', fullname,
|
| 96 |
+
exts and '(filenames ' + ', '.join(exts) + ')' or '')
|
| 97 |
+
info.append(tup)
|
| 98 |
+
info.sort()
|
| 99 |
+
for i in info:
|
| 100 |
+
print(('* {}\n {} {}').format(*i))
|
| 101 |
+
|
| 102 |
+
elif what == 'formatter':
|
| 103 |
+
print()
|
| 104 |
+
print("Formatters:")
|
| 105 |
+
print("~~~~~~~~~~~")
|
| 106 |
+
|
| 107 |
+
info = []
|
| 108 |
+
for cls in get_all_formatters():
|
| 109 |
+
doc = docstring_headline(cls)
|
| 110 |
+
tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and
|
| 111 |
+
'(filenames ' + ', '.join(cls.filenames) + ')' or '')
|
| 112 |
+
info.append(tup)
|
| 113 |
+
info.sort()
|
| 114 |
+
for i in info:
|
| 115 |
+
print(('* {}\n {} {}').format(*i))
|
| 116 |
+
|
| 117 |
+
elif what == 'filter':
|
| 118 |
+
print()
|
| 119 |
+
print("Filters:")
|
| 120 |
+
print("~~~~~~~~")
|
| 121 |
+
|
| 122 |
+
for name in get_all_filters():
|
| 123 |
+
cls = find_filter_class(name)
|
| 124 |
+
print("* " + name + ':')
|
| 125 |
+
print(f" {docstring_headline(cls)}")
|
| 126 |
+
|
| 127 |
+
elif what == 'style':
|
| 128 |
+
print()
|
| 129 |
+
print("Styles:")
|
| 130 |
+
print("~~~~~~~")
|
| 131 |
+
|
| 132 |
+
for name in get_all_styles():
|
| 133 |
+
cls = get_style_by_name(name)
|
| 134 |
+
print("* " + name + ':')
|
| 135 |
+
print(f" {docstring_headline(cls)}")
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
def _print_list_as_json(requested_items):
|
| 139 |
+
import json
|
| 140 |
+
result = {}
|
| 141 |
+
if 'lexer' in requested_items:
|
| 142 |
+
info = {}
|
| 143 |
+
for fullname, names, filenames, mimetypes in get_all_lexers():
|
| 144 |
+
info[fullname] = {
|
| 145 |
+
'aliases': names,
|
| 146 |
+
'filenames': filenames,
|
| 147 |
+
'mimetypes': mimetypes
|
| 148 |
+
}
|
| 149 |
+
result['lexers'] = info
|
| 150 |
+
|
| 151 |
+
if 'formatter' in requested_items:
|
| 152 |
+
info = {}
|
| 153 |
+
for cls in get_all_formatters():
|
| 154 |
+
doc = docstring_headline(cls)
|
| 155 |
+
info[cls.name] = {
|
| 156 |
+
'aliases': cls.aliases,
|
| 157 |
+
'filenames': cls.filenames,
|
| 158 |
+
'doc': doc
|
| 159 |
+
}
|
| 160 |
+
result['formatters'] = info
|
| 161 |
+
|
| 162 |
+
if 'filter' in requested_items:
|
| 163 |
+
info = {}
|
| 164 |
+
for name in get_all_filters():
|
| 165 |
+
cls = find_filter_class(name)
|
| 166 |
+
info[name] = {
|
| 167 |
+
'doc': docstring_headline(cls)
|
| 168 |
+
}
|
| 169 |
+
result['filters'] = info
|
| 170 |
+
|
| 171 |
+
if 'style' in requested_items:
|
| 172 |
+
info = {}
|
| 173 |
+
for name in get_all_styles():
|
| 174 |
+
cls = get_style_by_name(name)
|
| 175 |
+
info[name] = {
|
| 176 |
+
'doc': docstring_headline(cls)
|
| 177 |
+
}
|
| 178 |
+
result['styles'] = info
|
| 179 |
+
|
| 180 |
+
json.dump(result, sys.stdout)
|
| 181 |
+
|
| 182 |
+
def main_inner(parser, argns):
|
| 183 |
+
if argns.help:
|
| 184 |
+
parser.print_help()
|
| 185 |
+
return 0
|
| 186 |
+
|
| 187 |
+
if argns.V:
|
| 188 |
+
print(f'Pygments version {__version__}, (c) 2006-2024 by Georg Brandl, Matthäus '
|
| 189 |
+
'Chajdas and contributors.')
|
| 190 |
+
return 0
|
| 191 |
+
|
| 192 |
+
def is_only_option(opt):
|
| 193 |
+
return not any(v for (k, v) in vars(argns).items() if k != opt)
|
| 194 |
+
|
| 195 |
+
# handle ``pygmentize -L``
|
| 196 |
+
if argns.L is not None:
|
| 197 |
+
arg_set = set()
|
| 198 |
+
for k, v in vars(argns).items():
|
| 199 |
+
if v:
|
| 200 |
+
arg_set.add(k)
|
| 201 |
+
|
| 202 |
+
arg_set.discard('L')
|
| 203 |
+
arg_set.discard('json')
|
| 204 |
+
|
| 205 |
+
if arg_set:
|
| 206 |
+
parser.print_help(sys.stderr)
|
| 207 |
+
return 2
|
| 208 |
+
|
| 209 |
+
# print version
|
| 210 |
+
if not argns.json:
|
| 211 |
+
main(['', '-V'])
|
| 212 |
+
allowed_types = {'lexer', 'formatter', 'filter', 'style'}
|
| 213 |
+
largs = [arg.rstrip('s') for arg in argns.L]
|
| 214 |
+
if any(arg not in allowed_types for arg in largs):
|
| 215 |
+
parser.print_help(sys.stderr)
|
| 216 |
+
return 0
|
| 217 |
+
if not largs:
|
| 218 |
+
largs = allowed_types
|
| 219 |
+
if not argns.json:
|
| 220 |
+
for arg in largs:
|
| 221 |
+
_print_list(arg)
|
| 222 |
+
else:
|
| 223 |
+
_print_list_as_json(largs)
|
| 224 |
+
return 0
|
| 225 |
+
|
| 226 |
+
# handle ``pygmentize -H``
|
| 227 |
+
if argns.H:
|
| 228 |
+
if not is_only_option('H'):
|
| 229 |
+
parser.print_help(sys.stderr)
|
| 230 |
+
return 2
|
| 231 |
+
what, name = argns.H
|
| 232 |
+
if what not in ('lexer', 'formatter', 'filter'):
|
| 233 |
+
parser.print_help(sys.stderr)
|
| 234 |
+
return 2
|
| 235 |
+
return _print_help(what, name)
|
| 236 |
+
|
| 237 |
+
# parse -O options
|
| 238 |
+
parsed_opts = _parse_options(argns.O or [])
|
| 239 |
+
|
| 240 |
+
# parse -P options
|
| 241 |
+
for p_opt in argns.P or []:
|
| 242 |
+
try:
|
| 243 |
+
name, value = p_opt.split('=', 1)
|
| 244 |
+
except ValueError:
|
| 245 |
+
parsed_opts[p_opt] = True
|
| 246 |
+
else:
|
| 247 |
+
parsed_opts[name] = value
|
| 248 |
+
|
| 249 |
+
# encodings
|
| 250 |
+
inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding'))
|
| 251 |
+
outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
|
| 252 |
+
|
| 253 |
+
# handle ``pygmentize -N``
|
| 254 |
+
if argns.N:
|
| 255 |
+
lexer = find_lexer_class_for_filename(argns.N)
|
| 256 |
+
if lexer is None:
|
| 257 |
+
lexer = TextLexer
|
| 258 |
+
|
| 259 |
+
print(lexer.aliases[0])
|
| 260 |
+
return 0
|
| 261 |
+
|
| 262 |
+
# handle ``pygmentize -C``
|
| 263 |
+
if argns.C:
|
| 264 |
+
inp = sys.stdin.buffer.read()
|
| 265 |
+
try:
|
| 266 |
+
lexer = guess_lexer(inp, inencoding=inencoding)
|
| 267 |
+
except ClassNotFound:
|
| 268 |
+
lexer = TextLexer
|
| 269 |
+
|
| 270 |
+
print(lexer.aliases[0])
|
| 271 |
+
return 0
|
| 272 |
+
|
| 273 |
+
# handle ``pygmentize -S``
|
| 274 |
+
S_opt = argns.S
|
| 275 |
+
a_opt = argns.a
|
| 276 |
+
if S_opt is not None:
|
| 277 |
+
f_opt = argns.f
|
| 278 |
+
if not f_opt:
|
| 279 |
+
parser.print_help(sys.stderr)
|
| 280 |
+
return 2
|
| 281 |
+
if argns.l or argns.INPUTFILE:
|
| 282 |
+
parser.print_help(sys.stderr)
|
| 283 |
+
return 2
|
| 284 |
+
|
| 285 |
+
try:
|
| 286 |
+
parsed_opts['style'] = S_opt
|
| 287 |
+
fmter = get_formatter_by_name(f_opt, **parsed_opts)
|
| 288 |
+
except ClassNotFound as err:
|
| 289 |
+
print(err, file=sys.stderr)
|
| 290 |
+
return 1
|
| 291 |
+
|
| 292 |
+
print(fmter.get_style_defs(a_opt or ''))
|
| 293 |
+
return 0
|
| 294 |
+
|
| 295 |
+
# if no -S is given, -a is not allowed
|
| 296 |
+
if argns.a is not None:
|
| 297 |
+
parser.print_help(sys.stderr)
|
| 298 |
+
return 2
|
| 299 |
+
|
| 300 |
+
# parse -F options
|
| 301 |
+
F_opts = _parse_filters(argns.F or [])
|
| 302 |
+
|
| 303 |
+
# -x: allow custom (eXternal) lexers and formatters
|
| 304 |
+
allow_custom_lexer_formatter = bool(argns.x)
|
| 305 |
+
|
| 306 |
+
# select lexer
|
| 307 |
+
lexer = None
|
| 308 |
+
|
| 309 |
+
# given by name?
|
| 310 |
+
lexername = argns.l
|
| 311 |
+
if lexername:
|
| 312 |
+
# custom lexer, located relative to user's cwd
|
| 313 |
+
if allow_custom_lexer_formatter and '.py' in lexername:
|
| 314 |
+
try:
|
| 315 |
+
filename = None
|
| 316 |
+
name = None
|
| 317 |
+
if ':' in lexername:
|
| 318 |
+
filename, name = lexername.rsplit(':', 1)
|
| 319 |
+
|
| 320 |
+
if '.py' in name:
|
| 321 |
+
# This can happen on Windows: If the lexername is
|
| 322 |
+
# C:\lexer.py -- return to normal load path in that case
|
| 323 |
+
name = None
|
| 324 |
+
|
| 325 |
+
if filename and name:
|
| 326 |
+
lexer = load_lexer_from_file(filename, name,
|
| 327 |
+
**parsed_opts)
|
| 328 |
+
else:
|
| 329 |
+
lexer = load_lexer_from_file(lexername, **parsed_opts)
|
| 330 |
+
except ClassNotFound as err:
|
| 331 |
+
print('Error:', err, file=sys.stderr)
|
| 332 |
+
return 1
|
| 333 |
+
else:
|
| 334 |
+
try:
|
| 335 |
+
lexer = get_lexer_by_name(lexername, **parsed_opts)
|
| 336 |
+
except (OptionError, ClassNotFound) as err:
|
| 337 |
+
print('Error:', err, file=sys.stderr)
|
| 338 |
+
return 1
|
| 339 |
+
|
| 340 |
+
# read input code
|
| 341 |
+
code = None
|
| 342 |
+
|
| 343 |
+
if argns.INPUTFILE:
|
| 344 |
+
if argns.s:
|
| 345 |
+
print('Error: -s option not usable when input file specified',
|
| 346 |
+
file=sys.stderr)
|
| 347 |
+
return 2
|
| 348 |
+
|
| 349 |
+
infn = argns.INPUTFILE
|
| 350 |
+
try:
|
| 351 |
+
with open(infn, 'rb') as infp:
|
| 352 |
+
code = infp.read()
|
| 353 |
+
except Exception as err:
|
| 354 |
+
print('Error: cannot read infile:', err, file=sys.stderr)
|
| 355 |
+
return 1
|
| 356 |
+
if not inencoding:
|
| 357 |
+
code, inencoding = guess_decode(code)
|
| 358 |
+
|
| 359 |
+
# do we have to guess the lexer?
|
| 360 |
+
if not lexer:
|
| 361 |
+
try:
|
| 362 |
+
lexer = get_lexer_for_filename(infn, code, **parsed_opts)
|
| 363 |
+
except ClassNotFound as err:
|
| 364 |
+
if argns.g:
|
| 365 |
+
try:
|
| 366 |
+
lexer = guess_lexer(code, **parsed_opts)
|
| 367 |
+
except ClassNotFound:
|
| 368 |
+
lexer = TextLexer(**parsed_opts)
|
| 369 |
+
else:
|
| 370 |
+
print('Error:', err, file=sys.stderr)
|
| 371 |
+
return 1
|
| 372 |
+
except OptionError as err:
|
| 373 |
+
print('Error:', err, file=sys.stderr)
|
| 374 |
+
return 1
|
| 375 |
+
|
| 376 |
+
elif not argns.s: # treat stdin as full file (-s support is later)
|
| 377 |
+
# read code from terminal, always in binary mode since we want to
|
| 378 |
+
# decode ourselves and be tolerant with it
|
| 379 |
+
code = sys.stdin.buffer.read() # use .buffer to get a binary stream
|
| 380 |
+
if not inencoding:
|
| 381 |
+
code, inencoding = guess_decode_from_terminal(code, sys.stdin)
|
| 382 |
+
# else the lexer will do the decoding
|
| 383 |
+
if not lexer:
|
| 384 |
+
try:
|
| 385 |
+
lexer = guess_lexer(code, **parsed_opts)
|
| 386 |
+
except ClassNotFound:
|
| 387 |
+
lexer = TextLexer(**parsed_opts)
|
| 388 |
+
|
| 389 |
+
else: # -s option needs a lexer with -l
|
| 390 |
+
if not lexer:
|
| 391 |
+
print('Error: when using -s a lexer has to be selected with -l',
|
| 392 |
+
file=sys.stderr)
|
| 393 |
+
return 2
|
| 394 |
+
|
| 395 |
+
# process filters
|
| 396 |
+
for fname, fopts in F_opts:
|
| 397 |
+
try:
|
| 398 |
+
lexer.add_filter(fname, **fopts)
|
| 399 |
+
except ClassNotFound as err:
|
| 400 |
+
print('Error:', err, file=sys.stderr)
|
| 401 |
+
return 1
|
| 402 |
+
|
| 403 |
+
# select formatter
|
| 404 |
+
outfn = argns.o
|
| 405 |
+
fmter = argns.f
|
| 406 |
+
if fmter:
|
| 407 |
+
# custom formatter, located relative to user's cwd
|
| 408 |
+
if allow_custom_lexer_formatter and '.py' in fmter:
|
| 409 |
+
try:
|
| 410 |
+
filename = None
|
| 411 |
+
name = None
|
| 412 |
+
if ':' in fmter:
|
| 413 |
+
# Same logic as above for custom lexer
|
| 414 |
+
filename, name = fmter.rsplit(':', 1)
|
| 415 |
+
|
| 416 |
+
if '.py' in name:
|
| 417 |
+
name = None
|
| 418 |
+
|
| 419 |
+
if filename and name:
|
| 420 |
+
fmter = load_formatter_from_file(filename, name,
|
| 421 |
+
**parsed_opts)
|
| 422 |
+
else:
|
| 423 |
+
fmter = load_formatter_from_file(fmter, **parsed_opts)
|
| 424 |
+
except ClassNotFound as err:
|
| 425 |
+
print('Error:', err, file=sys.stderr)
|
| 426 |
+
return 1
|
| 427 |
+
else:
|
| 428 |
+
try:
|
| 429 |
+
fmter = get_formatter_by_name(fmter, **parsed_opts)
|
| 430 |
+
except (OptionError, ClassNotFound) as err:
|
| 431 |
+
print('Error:', err, file=sys.stderr)
|
| 432 |
+
return 1
|
| 433 |
+
|
| 434 |
+
if outfn:
|
| 435 |
+
if not fmter:
|
| 436 |
+
try:
|
| 437 |
+
fmter = get_formatter_for_filename(outfn, **parsed_opts)
|
| 438 |
+
except (OptionError, ClassNotFound) as err:
|
| 439 |
+
print('Error:', err, file=sys.stderr)
|
| 440 |
+
return 1
|
| 441 |
+
try:
|
| 442 |
+
outfile = open(outfn, 'wb')
|
| 443 |
+
except Exception as err:
|
| 444 |
+
print('Error: cannot open outfile:', err, file=sys.stderr)
|
| 445 |
+
return 1
|
| 446 |
+
else:
|
| 447 |
+
if not fmter:
|
| 448 |
+
if os.environ.get('COLORTERM','') in ('truecolor', '24bit'):
|
| 449 |
+
fmter = TerminalTrueColorFormatter(**parsed_opts)
|
| 450 |
+
elif '256' in os.environ.get('TERM', ''):
|
| 451 |
+
fmter = Terminal256Formatter(**parsed_opts)
|
| 452 |
+
else:
|
| 453 |
+
fmter = TerminalFormatter(**parsed_opts)
|
| 454 |
+
outfile = sys.stdout.buffer
|
| 455 |
+
|
| 456 |
+
# determine output encoding if not explicitly selected
|
| 457 |
+
if not outencoding:
|
| 458 |
+
if outfn:
|
| 459 |
+
# output file? use lexer encoding for now (can still be None)
|
| 460 |
+
fmter.encoding = inencoding
|
| 461 |
+
else:
|
| 462 |
+
# else use terminal encoding
|
| 463 |
+
fmter.encoding = terminal_encoding(sys.stdout)
|
| 464 |
+
|
| 465 |
+
# provide coloring under Windows, if possible
|
| 466 |
+
if not outfn and sys.platform in ('win32', 'cygwin') and \
|
| 467 |
+
fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover
|
| 468 |
+
# unfortunately colorama doesn't support binary streams on Py3
|
| 469 |
+
outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding)
|
| 470 |
+
fmter.encoding = None
|
| 471 |
+
try:
|
| 472 |
+
import colorama.initialise
|
| 473 |
+
except ImportError:
|
| 474 |
+
pass
|
| 475 |
+
else:
|
| 476 |
+
outfile = colorama.initialise.wrap_stream(
|
| 477 |
+
outfile, convert=None, strip=None, autoreset=False, wrap=True)
|
| 478 |
+
|
| 479 |
+
# When using the LaTeX formatter and the option `escapeinside` is
|
| 480 |
+
# specified, we need a special lexer which collects escaped text
|
| 481 |
+
# before running the chosen language lexer.
|
| 482 |
+
escapeinside = parsed_opts.get('escapeinside', '')
|
| 483 |
+
if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter):
|
| 484 |
+
left = escapeinside[0]
|
| 485 |
+
right = escapeinside[1]
|
| 486 |
+
lexer = LatexEmbeddedLexer(left, right, lexer)
|
| 487 |
+
|
| 488 |
+
# ... and do it!
|
| 489 |
+
if not argns.s:
|
| 490 |
+
# process whole input as per normal...
|
| 491 |
+
try:
|
| 492 |
+
highlight(code, lexer, fmter, outfile)
|
| 493 |
+
finally:
|
| 494 |
+
if outfn:
|
| 495 |
+
outfile.close()
|
| 496 |
+
return 0
|
| 497 |
+
else:
|
| 498 |
+
# line by line processing of stdin (eg: for 'tail -f')...
|
| 499 |
+
try:
|
| 500 |
+
while 1:
|
| 501 |
+
line = sys.stdin.buffer.readline()
|
| 502 |
+
if not line:
|
| 503 |
+
break
|
| 504 |
+
if not inencoding:
|
| 505 |
+
line = guess_decode_from_terminal(line, sys.stdin)[0]
|
| 506 |
+
highlight(line, lexer, fmter, outfile)
|
| 507 |
+
if hasattr(outfile, 'flush'):
|
| 508 |
+
outfile.flush()
|
| 509 |
+
return 0
|
| 510 |
+
except KeyboardInterrupt: # pragma: no cover
|
| 511 |
+
return 0
|
| 512 |
+
finally:
|
| 513 |
+
if outfn:
|
| 514 |
+
outfile.close()
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
class HelpFormatter(argparse.HelpFormatter):
|
| 518 |
+
def __init__(self, prog, indent_increment=2, max_help_position=16, width=None):
|
| 519 |
+
if width is None:
|
| 520 |
+
try:
|
| 521 |
+
width = shutil.get_terminal_size().columns - 2
|
| 522 |
+
except Exception:
|
| 523 |
+
pass
|
| 524 |
+
argparse.HelpFormatter.__init__(self, prog, indent_increment,
|
| 525 |
+
max_help_position, width)
|
| 526 |
+
|
| 527 |
+
|
| 528 |
+
def main(args=sys.argv):
|
| 529 |
+
"""
|
| 530 |
+
Main command line entry point.
|
| 531 |
+
"""
|
| 532 |
+
desc = "Highlight an input file and write the result to an output file."
|
| 533 |
+
parser = argparse.ArgumentParser(description=desc, add_help=False,
|
| 534 |
+
formatter_class=HelpFormatter)
|
| 535 |
+
|
| 536 |
+
operation = parser.add_argument_group('Main operation')
|
| 537 |
+
lexersel = operation.add_mutually_exclusive_group()
|
| 538 |
+
lexersel.add_argument(
|
| 539 |
+
'-l', metavar='LEXER',
|
| 540 |
+
help='Specify the lexer to use. (Query names with -L.) If not '
|
| 541 |
+
'given and -g is not present, the lexer is guessed from the filename.')
|
| 542 |
+
lexersel.add_argument(
|
| 543 |
+
'-g', action='store_true',
|
| 544 |
+
help='Guess the lexer from the file contents, or pass through '
|
| 545 |
+
'as plain text if nothing can be guessed.')
|
| 546 |
+
operation.add_argument(
|
| 547 |
+
'-F', metavar='FILTER[:options]', action='append',
|
| 548 |
+
help='Add a filter to the token stream. (Query names with -L.) '
|
| 549 |
+
'Filter options are given after a colon if necessary.')
|
| 550 |
+
operation.add_argument(
|
| 551 |
+
'-f', metavar='FORMATTER',
|
| 552 |
+
help='Specify the formatter to use. (Query names with -L.) '
|
| 553 |
+
'If not given, the formatter is guessed from the output filename, '
|
| 554 |
+
'and defaults to the terminal formatter if the output is to the '
|
| 555 |
+
'terminal or an unknown file extension.')
|
| 556 |
+
operation.add_argument(
|
| 557 |
+
'-O', metavar='OPTION=value[,OPTION=value,...]', action='append',
|
| 558 |
+
help='Give options to the lexer and formatter as a comma-separated '
|
| 559 |
+
'list of key-value pairs. '
|
| 560 |
+
'Example: `-O bg=light,python=cool`.')
|
| 561 |
+
operation.add_argument(
|
| 562 |
+
'-P', metavar='OPTION=value', action='append',
|
| 563 |
+
help='Give a single option to the lexer and formatter - with this '
|
| 564 |
+
'you can pass options whose value contains commas and equal signs. '
|
| 565 |
+
'Example: `-P "heading=Pygments, the Python highlighter"`.')
|
| 566 |
+
operation.add_argument(
|
| 567 |
+
'-o', metavar='OUTPUTFILE',
|
| 568 |
+
help='Where to write the output. Defaults to standard output.')
|
| 569 |
+
|
| 570 |
+
operation.add_argument(
|
| 571 |
+
'INPUTFILE', nargs='?',
|
| 572 |
+
help='Where to read the input. Defaults to standard input.')
|
| 573 |
+
|
| 574 |
+
flags = parser.add_argument_group('Operation flags')
|
| 575 |
+
flags.add_argument(
|
| 576 |
+
'-v', action='store_true',
|
| 577 |
+
help='Print a detailed traceback on unhandled exceptions, which '
|
| 578 |
+
'is useful for debugging and bug reports.')
|
| 579 |
+
flags.add_argument(
|
| 580 |
+
'-s', action='store_true',
|
| 581 |
+
help='Process lines one at a time until EOF, rather than waiting to '
|
| 582 |
+
'process the entire file. This only works for stdin, only for lexers '
|
| 583 |
+
'with no line-spanning constructs, and is intended for streaming '
|
| 584 |
+
'input such as you get from `tail -f`. '
|
| 585 |
+
'Example usage: `tail -f sql.log | pygmentize -s -l sql`.')
|
| 586 |
+
flags.add_argument(
|
| 587 |
+
'-x', action='store_true',
|
| 588 |
+
help='Allow custom lexers and formatters to be loaded from a .py file '
|
| 589 |
+
'relative to the current working directory. For example, '
|
| 590 |
+
'`-l ./customlexer.py -x`. By default, this option expects a file '
|
| 591 |
+
'with a class named CustomLexer or CustomFormatter; you can also '
|
| 592 |
+
'specify your own class name with a colon (`-l ./lexer.py:MyLexer`). '
|
| 593 |
+
'Users should be very careful not to use this option with untrusted '
|
| 594 |
+
'files, because it will import and run them.')
|
| 595 |
+
flags.add_argument('--json', help='Output as JSON. This can '
|
| 596 |
+
'be only used in conjunction with -L.',
|
| 597 |
+
default=False,
|
| 598 |
+
action='store_true')
|
| 599 |
+
|
| 600 |
+
special_modes_group = parser.add_argument_group(
|
| 601 |
+
'Special modes - do not do any highlighting')
|
| 602 |
+
special_modes = special_modes_group.add_mutually_exclusive_group()
|
| 603 |
+
special_modes.add_argument(
|
| 604 |
+
'-S', metavar='STYLE -f formatter',
|
| 605 |
+
help='Print style definitions for STYLE for a formatter '
|
| 606 |
+
'given with -f. The argument given by -a is formatter '
|
| 607 |
+
'dependent.')
|
| 608 |
+
special_modes.add_argument(
|
| 609 |
+
'-L', nargs='*', metavar='WHAT',
|
| 610 |
+
help='List lexers, formatters, styles or filters -- '
|
| 611 |
+
'give additional arguments for the thing(s) you want to list '
|
| 612 |
+
'(e.g. "styles"), or omit them to list everything.')
|
| 613 |
+
special_modes.add_argument(
|
| 614 |
+
'-N', metavar='FILENAME',
|
| 615 |
+
help='Guess and print out a lexer name based solely on the given '
|
| 616 |
+
'filename. Does not take input or highlight anything. If no specific '
|
| 617 |
+
'lexer can be determined, "text" is printed.')
|
| 618 |
+
special_modes.add_argument(
|
| 619 |
+
'-C', action='store_true',
|
| 620 |
+
help='Like -N, but print out a lexer name based solely on '
|
| 621 |
+
'a given content from standard input.')
|
| 622 |
+
special_modes.add_argument(
|
| 623 |
+
'-H', action='store', nargs=2, metavar=('NAME', 'TYPE'),
|
| 624 |
+
help='Print detailed help for the object <name> of type <type>, '
|
| 625 |
+
'where <type> is one of "lexer", "formatter" or "filter".')
|
| 626 |
+
special_modes.add_argument(
|
| 627 |
+
'-V', action='store_true',
|
| 628 |
+
help='Print the package version.')
|
| 629 |
+
special_modes.add_argument(
|
| 630 |
+
'-h', '--help', action='store_true',
|
| 631 |
+
help='Print this help.')
|
| 632 |
+
special_modes_group.add_argument(
|
| 633 |
+
'-a', metavar='ARG',
|
| 634 |
+
help='Formatter-specific additional argument for the -S (print '
|
| 635 |
+
'style sheet) mode.')
|
| 636 |
+
|
| 637 |
+
argns = parser.parse_args(args[1:])
|
| 638 |
+
|
| 639 |
+
try:
|
| 640 |
+
return main_inner(parser, argns)
|
| 641 |
+
except BrokenPipeError:
|
| 642 |
+
# someone closed our stdout, e.g. by quitting a pager.
|
| 643 |
+
return 0
|
| 644 |
+
except Exception:
|
| 645 |
+
if argns.v:
|
| 646 |
+
print(file=sys.stderr)
|
| 647 |
+
print('*' * 65, file=sys.stderr)
|
| 648 |
+
print('An unhandled exception occurred while highlighting.',
|
| 649 |
+
file=sys.stderr)
|
| 650 |
+
print('Please report the whole traceback to the issue tracker at',
|
| 651 |
+
file=sys.stderr)
|
| 652 |
+
print('<https://github.com/pygments/pygments/issues>.',
|
| 653 |
+
file=sys.stderr)
|
| 654 |
+
print('*' * 65, file=sys.stderr)
|
| 655 |
+
print(file=sys.stderr)
|
| 656 |
+
raise
|
| 657 |
+
import traceback
|
| 658 |
+
info = traceback.format_exception(*sys.exc_info())
|
| 659 |
+
msg = info[-1].strip()
|
| 660 |
+
if len(info) >= 3:
|
| 661 |
+
# extract relevant file and position info
|
| 662 |
+
msg += '\n (f{})'.format(info[-2].split('\n')[0].strip()[1:])
|
| 663 |
+
print(file=sys.stderr)
|
| 664 |
+
print('*** Error while highlighting:', file=sys.stderr)
|
| 665 |
+
print(msg, file=sys.stderr)
|
| 666 |
+
print('*** If this is a bug you want to report, please rerun with -v.',
|
| 667 |
+
file=sys.stderr)
|
| 668 |
+
return 1
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/formatter.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.formatter
|
| 3 |
+
~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Base formatter class.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import codecs
|
| 12 |
+
|
| 13 |
+
from pip._vendor.pygments.util import get_bool_opt
|
| 14 |
+
from pip._vendor.pygments.styles import get_style_by_name
|
| 15 |
+
|
| 16 |
+
__all__ = ['Formatter']
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def _lookup_style(style):
|
| 20 |
+
if isinstance(style, str):
|
| 21 |
+
return get_style_by_name(style)
|
| 22 |
+
return style
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class Formatter:
|
| 26 |
+
"""
|
| 27 |
+
Converts a token stream to text.
|
| 28 |
+
|
| 29 |
+
Formatters should have attributes to help selecting them. These
|
| 30 |
+
are similar to the corresponding :class:`~pygments.lexer.Lexer`
|
| 31 |
+
attributes.
|
| 32 |
+
|
| 33 |
+
.. autoattribute:: name
|
| 34 |
+
:no-value:
|
| 35 |
+
|
| 36 |
+
.. autoattribute:: aliases
|
| 37 |
+
:no-value:
|
| 38 |
+
|
| 39 |
+
.. autoattribute:: filenames
|
| 40 |
+
:no-value:
|
| 41 |
+
|
| 42 |
+
You can pass options as keyword arguments to the constructor.
|
| 43 |
+
All formatters accept these basic options:
|
| 44 |
+
|
| 45 |
+
``style``
|
| 46 |
+
The style to use, can be a string or a Style subclass
|
| 47 |
+
(default: "default"). Not used by e.g. the
|
| 48 |
+
TerminalFormatter.
|
| 49 |
+
``full``
|
| 50 |
+
Tells the formatter to output a "full" document, i.e.
|
| 51 |
+
a complete self-contained document. This doesn't have
|
| 52 |
+
any effect for some formatters (default: false).
|
| 53 |
+
``title``
|
| 54 |
+
If ``full`` is true, the title that should be used to
|
| 55 |
+
caption the document (default: '').
|
| 56 |
+
``encoding``
|
| 57 |
+
If given, must be an encoding name. This will be used to
|
| 58 |
+
convert the Unicode token strings to byte strings in the
|
| 59 |
+
output. If it is "" or None, Unicode strings will be written
|
| 60 |
+
to the output file, which most file-like objects do not
|
| 61 |
+
support (default: None).
|
| 62 |
+
``outencoding``
|
| 63 |
+
Overrides ``encoding`` if given.
|
| 64 |
+
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
#: Full name for the formatter, in human-readable form.
|
| 68 |
+
name = None
|
| 69 |
+
|
| 70 |
+
#: A list of short, unique identifiers that can be used to lookup
|
| 71 |
+
#: the formatter from a list, e.g. using :func:`.get_formatter_by_name()`.
|
| 72 |
+
aliases = []
|
| 73 |
+
|
| 74 |
+
#: A list of fnmatch patterns that match filenames for which this
|
| 75 |
+
#: formatter can produce output. The patterns in this list should be unique
|
| 76 |
+
#: among all formatters.
|
| 77 |
+
filenames = []
|
| 78 |
+
|
| 79 |
+
#: If True, this formatter outputs Unicode strings when no encoding
|
| 80 |
+
#: option is given.
|
| 81 |
+
unicodeoutput = True
|
| 82 |
+
|
| 83 |
+
def __init__(self, **options):
|
| 84 |
+
"""
|
| 85 |
+
As with lexers, this constructor takes arbitrary optional arguments,
|
| 86 |
+
and if you override it, you should first process your own options, then
|
| 87 |
+
call the base class implementation.
|
| 88 |
+
"""
|
| 89 |
+
self.style = _lookup_style(options.get('style', 'default'))
|
| 90 |
+
self.full = get_bool_opt(options, 'full', False)
|
| 91 |
+
self.title = options.get('title', '')
|
| 92 |
+
self.encoding = options.get('encoding', None) or None
|
| 93 |
+
if self.encoding in ('guess', 'chardet'):
|
| 94 |
+
# can happen for e.g. pygmentize -O encoding=guess
|
| 95 |
+
self.encoding = 'utf-8'
|
| 96 |
+
self.encoding = options.get('outencoding') or self.encoding
|
| 97 |
+
self.options = options
|
| 98 |
+
|
| 99 |
+
def get_style_defs(self, arg=''):
|
| 100 |
+
"""
|
| 101 |
+
This method must return statements or declarations suitable to define
|
| 102 |
+
the current style for subsequent highlighted text (e.g. CSS classes
|
| 103 |
+
in the `HTMLFormatter`).
|
| 104 |
+
|
| 105 |
+
The optional argument `arg` can be used to modify the generation and
|
| 106 |
+
is formatter dependent (it is standardized because it can be given on
|
| 107 |
+
the command line).
|
| 108 |
+
|
| 109 |
+
This method is called by the ``-S`` :doc:`command-line option <cmdline>`,
|
| 110 |
+
the `arg` is then given by the ``-a`` option.
|
| 111 |
+
"""
|
| 112 |
+
return ''
|
| 113 |
+
|
| 114 |
+
def format(self, tokensource, outfile):
|
| 115 |
+
"""
|
| 116 |
+
This method must format the tokens from the `tokensource` iterable and
|
| 117 |
+
write the formatted version to the file object `outfile`.
|
| 118 |
+
|
| 119 |
+
Formatter options can control how exactly the tokens are converted.
|
| 120 |
+
"""
|
| 121 |
+
if self.encoding:
|
| 122 |
+
# wrap the outfile in a StreamWriter
|
| 123 |
+
outfile = codecs.lookup(self.encoding)[3](outfile)
|
| 124 |
+
return self.format_unencoded(tokensource, outfile)
|
| 125 |
+
|
| 126 |
+
# Allow writing Formatter[str] or Formatter[bytes]. That's equivalent to
|
| 127 |
+
# Formatter. This helps when using third-party type stubs from typeshed.
|
| 128 |
+
def __class_getitem__(cls, name):
|
| 129 |
+
return cls
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/lexer.py
ADDED
|
@@ -0,0 +1,963 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.lexer
|
| 3 |
+
~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Base lexer classes.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import re
|
| 12 |
+
import sys
|
| 13 |
+
import time
|
| 14 |
+
|
| 15 |
+
from pip._vendor.pygments.filter import apply_filters, Filter
|
| 16 |
+
from pip._vendor.pygments.filters import get_filter_by_name
|
| 17 |
+
from pip._vendor.pygments.token import Error, Text, Other, Whitespace, _TokenType
|
| 18 |
+
from pip._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
|
| 19 |
+
make_analysator, Future, guess_decode
|
| 20 |
+
from pip._vendor.pygments.regexopt import regex_opt
|
| 21 |
+
|
| 22 |
+
__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
|
| 23 |
+
'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this',
|
| 24 |
+
'default', 'words', 'line_re']
|
| 25 |
+
|
| 26 |
+
line_re = re.compile('.*?\n')
|
| 27 |
+
|
| 28 |
+
_encoding_map = [(b'\xef\xbb\xbf', 'utf-8'),
|
| 29 |
+
(b'\xff\xfe\0\0', 'utf-32'),
|
| 30 |
+
(b'\0\0\xfe\xff', 'utf-32be'),
|
| 31 |
+
(b'\xff\xfe', 'utf-16'),
|
| 32 |
+
(b'\xfe\xff', 'utf-16be')]
|
| 33 |
+
|
| 34 |
+
_default_analyse = staticmethod(lambda x: 0.0)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class LexerMeta(type):
|
| 38 |
+
"""
|
| 39 |
+
This metaclass automagically converts ``analyse_text`` methods into
|
| 40 |
+
static methods which always return float values.
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
def __new__(mcs, name, bases, d):
|
| 44 |
+
if 'analyse_text' in d:
|
| 45 |
+
d['analyse_text'] = make_analysator(d['analyse_text'])
|
| 46 |
+
return type.__new__(mcs, name, bases, d)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class Lexer(metaclass=LexerMeta):
|
| 50 |
+
"""
|
| 51 |
+
Lexer for a specific language.
|
| 52 |
+
|
| 53 |
+
See also :doc:`lexerdevelopment`, a high-level guide to writing
|
| 54 |
+
lexers.
|
| 55 |
+
|
| 56 |
+
Lexer classes have attributes used for choosing the most appropriate
|
| 57 |
+
lexer based on various criteria.
|
| 58 |
+
|
| 59 |
+
.. autoattribute:: name
|
| 60 |
+
:no-value:
|
| 61 |
+
.. autoattribute:: aliases
|
| 62 |
+
:no-value:
|
| 63 |
+
.. autoattribute:: filenames
|
| 64 |
+
:no-value:
|
| 65 |
+
.. autoattribute:: alias_filenames
|
| 66 |
+
.. autoattribute:: mimetypes
|
| 67 |
+
:no-value:
|
| 68 |
+
.. autoattribute:: priority
|
| 69 |
+
|
| 70 |
+
Lexers included in Pygments should have two additional attributes:
|
| 71 |
+
|
| 72 |
+
.. autoattribute:: url
|
| 73 |
+
:no-value:
|
| 74 |
+
.. autoattribute:: version_added
|
| 75 |
+
:no-value:
|
| 76 |
+
|
| 77 |
+
Lexers included in Pygments may have additional attributes:
|
| 78 |
+
|
| 79 |
+
.. autoattribute:: _example
|
| 80 |
+
:no-value:
|
| 81 |
+
|
| 82 |
+
You can pass options to the constructor. The basic options recognized
|
| 83 |
+
by all lexers and processed by the base `Lexer` class are:
|
| 84 |
+
|
| 85 |
+
``stripnl``
|
| 86 |
+
Strip leading and trailing newlines from the input (default: True).
|
| 87 |
+
``stripall``
|
| 88 |
+
Strip all leading and trailing whitespace from the input
|
| 89 |
+
(default: False).
|
| 90 |
+
``ensurenl``
|
| 91 |
+
Make sure that the input ends with a newline (default: True). This
|
| 92 |
+
is required for some lexers that consume input linewise.
|
| 93 |
+
|
| 94 |
+
.. versionadded:: 1.3
|
| 95 |
+
|
| 96 |
+
``tabsize``
|
| 97 |
+
If given and greater than 0, expand tabs in the input (default: 0).
|
| 98 |
+
``encoding``
|
| 99 |
+
If given, must be an encoding name. This encoding will be used to
|
| 100 |
+
convert the input string to Unicode, if it is not already a Unicode
|
| 101 |
+
string (default: ``'guess'``, which uses a simple UTF-8 / Locale /
|
| 102 |
+
Latin1 detection. Can also be ``'chardet'`` to use the chardet
|
| 103 |
+
library, if it is installed.
|
| 104 |
+
``inencoding``
|
| 105 |
+
Overrides the ``encoding`` if given.
|
| 106 |
+
"""
|
| 107 |
+
|
| 108 |
+
#: Full name of the lexer, in human-readable form
|
| 109 |
+
name = None
|
| 110 |
+
|
| 111 |
+
#: A list of short, unique identifiers that can be used to look
|
| 112 |
+
#: up the lexer from a list, e.g., using `get_lexer_by_name()`.
|
| 113 |
+
aliases = []
|
| 114 |
+
|
| 115 |
+
#: A list of `fnmatch` patterns that match filenames which contain
|
| 116 |
+
#: content for this lexer. The patterns in this list should be unique among
|
| 117 |
+
#: all lexers.
|
| 118 |
+
filenames = []
|
| 119 |
+
|
| 120 |
+
#: A list of `fnmatch` patterns that match filenames which may or may not
|
| 121 |
+
#: contain content for this lexer. This list is used by the
|
| 122 |
+
#: :func:`.guess_lexer_for_filename()` function, to determine which lexers
|
| 123 |
+
#: are then included in guessing the correct one. That means that
|
| 124 |
+
#: e.g. every lexer for HTML and a template language should include
|
| 125 |
+
#: ``\*.html`` in this list.
|
| 126 |
+
alias_filenames = []
|
| 127 |
+
|
| 128 |
+
#: A list of MIME types for content that can be lexed with this lexer.
|
| 129 |
+
mimetypes = []
|
| 130 |
+
|
| 131 |
+
#: Priority, should multiple lexers match and no content is provided
|
| 132 |
+
priority = 0
|
| 133 |
+
|
| 134 |
+
#: URL of the language specification/definition. Used in the Pygments
|
| 135 |
+
#: documentation. Set to an empty string to disable.
|
| 136 |
+
url = None
|
| 137 |
+
|
| 138 |
+
#: Version of Pygments in which the lexer was added.
|
| 139 |
+
version_added = None
|
| 140 |
+
|
| 141 |
+
#: Example file name. Relative to the ``tests/examplefiles`` directory.
|
| 142 |
+
#: This is used by the documentation generator to show an example.
|
| 143 |
+
_example = None
|
| 144 |
+
|
| 145 |
+
def __init__(self, **options):
|
| 146 |
+
"""
|
| 147 |
+
This constructor takes arbitrary options as keyword arguments.
|
| 148 |
+
Every subclass must first process its own options and then call
|
| 149 |
+
the `Lexer` constructor, since it processes the basic
|
| 150 |
+
options like `stripnl`.
|
| 151 |
+
|
| 152 |
+
An example looks like this:
|
| 153 |
+
|
| 154 |
+
.. sourcecode:: python
|
| 155 |
+
|
| 156 |
+
def __init__(self, **options):
|
| 157 |
+
self.compress = options.get('compress', '')
|
| 158 |
+
Lexer.__init__(self, **options)
|
| 159 |
+
|
| 160 |
+
As these options must all be specifiable as strings (due to the
|
| 161 |
+
command line usage), there are various utility functions
|
| 162 |
+
available to help with that, see `Utilities`_.
|
| 163 |
+
"""
|
| 164 |
+
self.options = options
|
| 165 |
+
self.stripnl = get_bool_opt(options, 'stripnl', True)
|
| 166 |
+
self.stripall = get_bool_opt(options, 'stripall', False)
|
| 167 |
+
self.ensurenl = get_bool_opt(options, 'ensurenl', True)
|
| 168 |
+
self.tabsize = get_int_opt(options, 'tabsize', 0)
|
| 169 |
+
self.encoding = options.get('encoding', 'guess')
|
| 170 |
+
self.encoding = options.get('inencoding') or self.encoding
|
| 171 |
+
self.filters = []
|
| 172 |
+
for filter_ in get_list_opt(options, 'filters', ()):
|
| 173 |
+
self.add_filter(filter_)
|
| 174 |
+
|
| 175 |
+
def __repr__(self):
|
| 176 |
+
if self.options:
|
| 177 |
+
return f'<pygments.lexers.{self.__class__.__name__} with {self.options!r}>'
|
| 178 |
+
else:
|
| 179 |
+
return f'<pygments.lexers.{self.__class__.__name__}>'
|
| 180 |
+
|
| 181 |
+
def add_filter(self, filter_, **options):
|
| 182 |
+
"""
|
| 183 |
+
Add a new stream filter to this lexer.
|
| 184 |
+
"""
|
| 185 |
+
if not isinstance(filter_, Filter):
|
| 186 |
+
filter_ = get_filter_by_name(filter_, **options)
|
| 187 |
+
self.filters.append(filter_)
|
| 188 |
+
|
| 189 |
+
def analyse_text(text):
|
| 190 |
+
"""
|
| 191 |
+
A static method which is called for lexer guessing.
|
| 192 |
+
|
| 193 |
+
It should analyse the text and return a float in the range
|
| 194 |
+
from ``0.0`` to ``1.0``. If it returns ``0.0``, the lexer
|
| 195 |
+
will not be selected as the most probable one, if it returns
|
| 196 |
+
``1.0``, it will be selected immediately. This is used by
|
| 197 |
+
`guess_lexer`.
|
| 198 |
+
|
| 199 |
+
The `LexerMeta` metaclass automatically wraps this function so
|
| 200 |
+
that it works like a static method (no ``self`` or ``cls``
|
| 201 |
+
parameter) and the return value is automatically converted to
|
| 202 |
+
`float`. If the return value is an object that is boolean `False`
|
| 203 |
+
it's the same as if the return values was ``0.0``.
|
| 204 |
+
"""
|
| 205 |
+
|
| 206 |
+
def _preprocess_lexer_input(self, text):
|
| 207 |
+
"""Apply preprocessing such as decoding the input, removing BOM and normalizing newlines."""
|
| 208 |
+
|
| 209 |
+
if not isinstance(text, str):
|
| 210 |
+
if self.encoding == 'guess':
|
| 211 |
+
text, _ = guess_decode(text)
|
| 212 |
+
elif self.encoding == 'chardet':
|
| 213 |
+
try:
|
| 214 |
+
# pip vendoring note: this code is not reachable by pip,
|
| 215 |
+
# removed import of chardet to make it clear.
|
| 216 |
+
raise ImportError('chardet is not vendored by pip')
|
| 217 |
+
except ImportError as e:
|
| 218 |
+
raise ImportError('To enable chardet encoding guessing, '
|
| 219 |
+
'please install the chardet library '
|
| 220 |
+
'from http://chardet.feedparser.org/') from e
|
| 221 |
+
# check for BOM first
|
| 222 |
+
decoded = None
|
| 223 |
+
for bom, encoding in _encoding_map:
|
| 224 |
+
if text.startswith(bom):
|
| 225 |
+
decoded = text[len(bom):].decode(encoding, 'replace')
|
| 226 |
+
break
|
| 227 |
+
# no BOM found, so use chardet
|
| 228 |
+
if decoded is None:
|
| 229 |
+
enc = chardet.detect(text[:1024]) # Guess using first 1KB
|
| 230 |
+
decoded = text.decode(enc.get('encoding') or 'utf-8',
|
| 231 |
+
'replace')
|
| 232 |
+
text = decoded
|
| 233 |
+
else:
|
| 234 |
+
text = text.decode(self.encoding)
|
| 235 |
+
if text.startswith('\ufeff'):
|
| 236 |
+
text = text[len('\ufeff'):]
|
| 237 |
+
else:
|
| 238 |
+
if text.startswith('\ufeff'):
|
| 239 |
+
text = text[len('\ufeff'):]
|
| 240 |
+
|
| 241 |
+
# text now *is* a unicode string
|
| 242 |
+
text = text.replace('\r\n', '\n')
|
| 243 |
+
text = text.replace('\r', '\n')
|
| 244 |
+
if self.stripall:
|
| 245 |
+
text = text.strip()
|
| 246 |
+
elif self.stripnl:
|
| 247 |
+
text = text.strip('\n')
|
| 248 |
+
if self.tabsize > 0:
|
| 249 |
+
text = text.expandtabs(self.tabsize)
|
| 250 |
+
if self.ensurenl and not text.endswith('\n'):
|
| 251 |
+
text += '\n'
|
| 252 |
+
|
| 253 |
+
return text
|
| 254 |
+
|
| 255 |
+
def get_tokens(self, text, unfiltered=False):
|
| 256 |
+
"""
|
| 257 |
+
This method is the basic interface of a lexer. It is called by
|
| 258 |
+
the `highlight()` function. It must process the text and return an
|
| 259 |
+
iterable of ``(tokentype, value)`` pairs from `text`.
|
| 260 |
+
|
| 261 |
+
Normally, you don't need to override this method. The default
|
| 262 |
+
implementation processes the options recognized by all lexers
|
| 263 |
+
(`stripnl`, `stripall` and so on), and then yields all tokens
|
| 264 |
+
from `get_tokens_unprocessed()`, with the ``index`` dropped.
|
| 265 |
+
|
| 266 |
+
If `unfiltered` is set to `True`, the filtering mechanism is
|
| 267 |
+
bypassed even if filters are defined.
|
| 268 |
+
"""
|
| 269 |
+
text = self._preprocess_lexer_input(text)
|
| 270 |
+
|
| 271 |
+
def streamer():
|
| 272 |
+
for _, t, v in self.get_tokens_unprocessed(text):
|
| 273 |
+
yield t, v
|
| 274 |
+
stream = streamer()
|
| 275 |
+
if not unfiltered:
|
| 276 |
+
stream = apply_filters(stream, self.filters, self)
|
| 277 |
+
return stream
|
| 278 |
+
|
| 279 |
+
def get_tokens_unprocessed(self, text):
|
| 280 |
+
"""
|
| 281 |
+
This method should process the text and return an iterable of
|
| 282 |
+
``(index, tokentype, value)`` tuples where ``index`` is the starting
|
| 283 |
+
position of the token within the input text.
|
| 284 |
+
|
| 285 |
+
It must be overridden by subclasses. It is recommended to
|
| 286 |
+
implement it as a generator to maximize effectiveness.
|
| 287 |
+
"""
|
| 288 |
+
raise NotImplementedError
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
class DelegatingLexer(Lexer):
|
| 292 |
+
"""
|
| 293 |
+
This lexer takes two lexer as arguments. A root lexer and
|
| 294 |
+
a language lexer. First everything is scanned using the language
|
| 295 |
+
lexer, afterwards all ``Other`` tokens are lexed using the root
|
| 296 |
+
lexer.
|
| 297 |
+
|
| 298 |
+
The lexers from the ``template`` lexer package use this base lexer.
|
| 299 |
+
"""
|
| 300 |
+
|
| 301 |
+
def __init__(self, _root_lexer, _language_lexer, _needle=Other, **options):
|
| 302 |
+
self.root_lexer = _root_lexer(**options)
|
| 303 |
+
self.language_lexer = _language_lexer(**options)
|
| 304 |
+
self.needle = _needle
|
| 305 |
+
Lexer.__init__(self, **options)
|
| 306 |
+
|
| 307 |
+
def get_tokens_unprocessed(self, text):
|
| 308 |
+
buffered = ''
|
| 309 |
+
insertions = []
|
| 310 |
+
lng_buffer = []
|
| 311 |
+
for i, t, v in self.language_lexer.get_tokens_unprocessed(text):
|
| 312 |
+
if t is self.needle:
|
| 313 |
+
if lng_buffer:
|
| 314 |
+
insertions.append((len(buffered), lng_buffer))
|
| 315 |
+
lng_buffer = []
|
| 316 |
+
buffered += v
|
| 317 |
+
else:
|
| 318 |
+
lng_buffer.append((i, t, v))
|
| 319 |
+
if lng_buffer:
|
| 320 |
+
insertions.append((len(buffered), lng_buffer))
|
| 321 |
+
return do_insertions(insertions,
|
| 322 |
+
self.root_lexer.get_tokens_unprocessed(buffered))
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
# ------------------------------------------------------------------------------
|
| 326 |
+
# RegexLexer and ExtendedRegexLexer
|
| 327 |
+
#
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
class include(str): # pylint: disable=invalid-name
|
| 331 |
+
"""
|
| 332 |
+
Indicates that a state should include rules from another state.
|
| 333 |
+
"""
|
| 334 |
+
pass
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
class _inherit:
|
| 338 |
+
"""
|
| 339 |
+
Indicates the a state should inherit from its superclass.
|
| 340 |
+
"""
|
| 341 |
+
def __repr__(self):
|
| 342 |
+
return 'inherit'
|
| 343 |
+
|
| 344 |
+
inherit = _inherit() # pylint: disable=invalid-name
|
| 345 |
+
|
| 346 |
+
|
| 347 |
+
class combined(tuple): # pylint: disable=invalid-name
|
| 348 |
+
"""
|
| 349 |
+
Indicates a state combined from multiple states.
|
| 350 |
+
"""
|
| 351 |
+
|
| 352 |
+
def __new__(cls, *args):
|
| 353 |
+
return tuple.__new__(cls, args)
|
| 354 |
+
|
| 355 |
+
def __init__(self, *args):
|
| 356 |
+
# tuple.__init__ doesn't do anything
|
| 357 |
+
pass
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
class _PseudoMatch:
|
| 361 |
+
"""
|
| 362 |
+
A pseudo match object constructed from a string.
|
| 363 |
+
"""
|
| 364 |
+
|
| 365 |
+
def __init__(self, start, text):
|
| 366 |
+
self._text = text
|
| 367 |
+
self._start = start
|
| 368 |
+
|
| 369 |
+
def start(self, arg=None):
|
| 370 |
+
return self._start
|
| 371 |
+
|
| 372 |
+
def end(self, arg=None):
|
| 373 |
+
return self._start + len(self._text)
|
| 374 |
+
|
| 375 |
+
def group(self, arg=None):
|
| 376 |
+
if arg:
|
| 377 |
+
raise IndexError('No such group')
|
| 378 |
+
return self._text
|
| 379 |
+
|
| 380 |
+
def groups(self):
|
| 381 |
+
return (self._text,)
|
| 382 |
+
|
| 383 |
+
def groupdict(self):
|
| 384 |
+
return {}
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
def bygroups(*args):
|
| 388 |
+
"""
|
| 389 |
+
Callback that yields multiple actions for each group in the match.
|
| 390 |
+
"""
|
| 391 |
+
def callback(lexer, match, ctx=None):
|
| 392 |
+
for i, action in enumerate(args):
|
| 393 |
+
if action is None:
|
| 394 |
+
continue
|
| 395 |
+
elif type(action) is _TokenType:
|
| 396 |
+
data = match.group(i + 1)
|
| 397 |
+
if data:
|
| 398 |
+
yield match.start(i + 1), action, data
|
| 399 |
+
else:
|
| 400 |
+
data = match.group(i + 1)
|
| 401 |
+
if data is not None:
|
| 402 |
+
if ctx:
|
| 403 |
+
ctx.pos = match.start(i + 1)
|
| 404 |
+
for item in action(lexer,
|
| 405 |
+
_PseudoMatch(match.start(i + 1), data), ctx):
|
| 406 |
+
if item:
|
| 407 |
+
yield item
|
| 408 |
+
if ctx:
|
| 409 |
+
ctx.pos = match.end()
|
| 410 |
+
return callback
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
class _This:
|
| 414 |
+
"""
|
| 415 |
+
Special singleton used for indicating the caller class.
|
| 416 |
+
Used by ``using``.
|
| 417 |
+
"""
|
| 418 |
+
|
| 419 |
+
this = _This()
|
| 420 |
+
|
| 421 |
+
|
| 422 |
+
def using(_other, **kwargs):
|
| 423 |
+
"""
|
| 424 |
+
Callback that processes the match with a different lexer.
|
| 425 |
+
|
| 426 |
+
The keyword arguments are forwarded to the lexer, except `state` which
|
| 427 |
+
is handled separately.
|
| 428 |
+
|
| 429 |
+
`state` specifies the state that the new lexer will start in, and can
|
| 430 |
+
be an enumerable such as ('root', 'inline', 'string') or a simple
|
| 431 |
+
string which is assumed to be on top of the root state.
|
| 432 |
+
|
| 433 |
+
Note: For that to work, `_other` must not be an `ExtendedRegexLexer`.
|
| 434 |
+
"""
|
| 435 |
+
gt_kwargs = {}
|
| 436 |
+
if 'state' in kwargs:
|
| 437 |
+
s = kwargs.pop('state')
|
| 438 |
+
if isinstance(s, (list, tuple)):
|
| 439 |
+
gt_kwargs['stack'] = s
|
| 440 |
+
else:
|
| 441 |
+
gt_kwargs['stack'] = ('root', s)
|
| 442 |
+
|
| 443 |
+
if _other is this:
|
| 444 |
+
def callback(lexer, match, ctx=None):
|
| 445 |
+
# if keyword arguments are given the callback
|
| 446 |
+
# function has to create a new lexer instance
|
| 447 |
+
if kwargs:
|
| 448 |
+
# XXX: cache that somehow
|
| 449 |
+
kwargs.update(lexer.options)
|
| 450 |
+
lx = lexer.__class__(**kwargs)
|
| 451 |
+
else:
|
| 452 |
+
lx = lexer
|
| 453 |
+
s = match.start()
|
| 454 |
+
for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
|
| 455 |
+
yield i + s, t, v
|
| 456 |
+
if ctx:
|
| 457 |
+
ctx.pos = match.end()
|
| 458 |
+
else:
|
| 459 |
+
def callback(lexer, match, ctx=None):
|
| 460 |
+
# XXX: cache that somehow
|
| 461 |
+
kwargs.update(lexer.options)
|
| 462 |
+
lx = _other(**kwargs)
|
| 463 |
+
|
| 464 |
+
s = match.start()
|
| 465 |
+
for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
|
| 466 |
+
yield i + s, t, v
|
| 467 |
+
if ctx:
|
| 468 |
+
ctx.pos = match.end()
|
| 469 |
+
return callback
|
| 470 |
+
|
| 471 |
+
|
| 472 |
+
class default:
|
| 473 |
+
"""
|
| 474 |
+
Indicates a state or state action (e.g. #pop) to apply.
|
| 475 |
+
For example default('#pop') is equivalent to ('', Token, '#pop')
|
| 476 |
+
Note that state tuples may be used as well.
|
| 477 |
+
|
| 478 |
+
.. versionadded:: 2.0
|
| 479 |
+
"""
|
| 480 |
+
def __init__(self, state):
|
| 481 |
+
self.state = state
|
| 482 |
+
|
| 483 |
+
|
| 484 |
+
class words(Future):
|
| 485 |
+
"""
|
| 486 |
+
Indicates a list of literal words that is transformed into an optimized
|
| 487 |
+
regex that matches any of the words.
|
| 488 |
+
|
| 489 |
+
.. versionadded:: 2.0
|
| 490 |
+
"""
|
| 491 |
+
def __init__(self, words, prefix='', suffix=''):
|
| 492 |
+
self.words = words
|
| 493 |
+
self.prefix = prefix
|
| 494 |
+
self.suffix = suffix
|
| 495 |
+
|
| 496 |
+
def get(self):
|
| 497 |
+
return regex_opt(self.words, prefix=self.prefix, suffix=self.suffix)
|
| 498 |
+
|
| 499 |
+
|
| 500 |
+
class RegexLexerMeta(LexerMeta):
|
| 501 |
+
"""
|
| 502 |
+
Metaclass for RegexLexer, creates the self._tokens attribute from
|
| 503 |
+
self.tokens on the first instantiation.
|
| 504 |
+
"""
|
| 505 |
+
|
| 506 |
+
def _process_regex(cls, regex, rflags, state):
|
| 507 |
+
"""Preprocess the regular expression component of a token definition."""
|
| 508 |
+
if isinstance(regex, Future):
|
| 509 |
+
regex = regex.get()
|
| 510 |
+
return re.compile(regex, rflags).match
|
| 511 |
+
|
| 512 |
+
def _process_token(cls, token):
|
| 513 |
+
"""Preprocess the token component of a token definition."""
|
| 514 |
+
assert type(token) is _TokenType or callable(token), \
|
| 515 |
+
f'token type must be simple type or callable, not {token!r}'
|
| 516 |
+
return token
|
| 517 |
+
|
| 518 |
+
def _process_new_state(cls, new_state, unprocessed, processed):
|
| 519 |
+
"""Preprocess the state transition action of a token definition."""
|
| 520 |
+
if isinstance(new_state, str):
|
| 521 |
+
# an existing state
|
| 522 |
+
if new_state == '#pop':
|
| 523 |
+
return -1
|
| 524 |
+
elif new_state in unprocessed:
|
| 525 |
+
return (new_state,)
|
| 526 |
+
elif new_state == '#push':
|
| 527 |
+
return new_state
|
| 528 |
+
elif new_state[:5] == '#pop:':
|
| 529 |
+
return -int(new_state[5:])
|
| 530 |
+
else:
|
| 531 |
+
assert False, f'unknown new state {new_state!r}'
|
| 532 |
+
elif isinstance(new_state, combined):
|
| 533 |
+
# combine a new state from existing ones
|
| 534 |
+
tmp_state = '_tmp_%d' % cls._tmpname
|
| 535 |
+
cls._tmpname += 1
|
| 536 |
+
itokens = []
|
| 537 |
+
for istate in new_state:
|
| 538 |
+
assert istate != new_state, f'circular state ref {istate!r}'
|
| 539 |
+
itokens.extend(cls._process_state(unprocessed,
|
| 540 |
+
processed, istate))
|
| 541 |
+
processed[tmp_state] = itokens
|
| 542 |
+
return (tmp_state,)
|
| 543 |
+
elif isinstance(new_state, tuple):
|
| 544 |
+
# push more than one state
|
| 545 |
+
for istate in new_state:
|
| 546 |
+
assert (istate in unprocessed or
|
| 547 |
+
istate in ('#pop', '#push')), \
|
| 548 |
+
'unknown new state ' + istate
|
| 549 |
+
return new_state
|
| 550 |
+
else:
|
| 551 |
+
assert False, f'unknown new state def {new_state!r}'
|
| 552 |
+
|
| 553 |
+
def _process_state(cls, unprocessed, processed, state):
|
| 554 |
+
"""Preprocess a single state definition."""
|
| 555 |
+
assert isinstance(state, str), f"wrong state name {state!r}"
|
| 556 |
+
assert state[0] != '#', f"invalid state name {state!r}"
|
| 557 |
+
if state in processed:
|
| 558 |
+
return processed[state]
|
| 559 |
+
tokens = processed[state] = []
|
| 560 |
+
rflags = cls.flags
|
| 561 |
+
for tdef in unprocessed[state]:
|
| 562 |
+
if isinstance(tdef, include):
|
| 563 |
+
# it's a state reference
|
| 564 |
+
assert tdef != state, f"circular state reference {state!r}"
|
| 565 |
+
tokens.extend(cls._process_state(unprocessed, processed,
|
| 566 |
+
str(tdef)))
|
| 567 |
+
continue
|
| 568 |
+
if isinstance(tdef, _inherit):
|
| 569 |
+
# should be processed already, but may not in the case of:
|
| 570 |
+
# 1. the state has no counterpart in any parent
|
| 571 |
+
# 2. the state includes more than one 'inherit'
|
| 572 |
+
continue
|
| 573 |
+
if isinstance(tdef, default):
|
| 574 |
+
new_state = cls._process_new_state(tdef.state, unprocessed, processed)
|
| 575 |
+
tokens.append((re.compile('').match, None, new_state))
|
| 576 |
+
continue
|
| 577 |
+
|
| 578 |
+
assert type(tdef) is tuple, f"wrong rule def {tdef!r}"
|
| 579 |
+
|
| 580 |
+
try:
|
| 581 |
+
rex = cls._process_regex(tdef[0], rflags, state)
|
| 582 |
+
except Exception as err:
|
| 583 |
+
raise ValueError(f"uncompilable regex {tdef[0]!r} in state {state!r} of {cls!r}: {err}") from err
|
| 584 |
+
|
| 585 |
+
token = cls._process_token(tdef[1])
|
| 586 |
+
|
| 587 |
+
if len(tdef) == 2:
|
| 588 |
+
new_state = None
|
| 589 |
+
else:
|
| 590 |
+
new_state = cls._process_new_state(tdef[2],
|
| 591 |
+
unprocessed, processed)
|
| 592 |
+
|
| 593 |
+
tokens.append((rex, token, new_state))
|
| 594 |
+
return tokens
|
| 595 |
+
|
| 596 |
+
def process_tokendef(cls, name, tokendefs=None):
|
| 597 |
+
"""Preprocess a dictionary of token definitions."""
|
| 598 |
+
processed = cls._all_tokens[name] = {}
|
| 599 |
+
tokendefs = tokendefs or cls.tokens[name]
|
| 600 |
+
for state in list(tokendefs):
|
| 601 |
+
cls._process_state(tokendefs, processed, state)
|
| 602 |
+
return processed
|
| 603 |
+
|
| 604 |
+
def get_tokendefs(cls):
|
| 605 |
+
"""
|
| 606 |
+
Merge tokens from superclasses in MRO order, returning a single tokendef
|
| 607 |
+
dictionary.
|
| 608 |
+
|
| 609 |
+
Any state that is not defined by a subclass will be inherited
|
| 610 |
+
automatically. States that *are* defined by subclasses will, by
|
| 611 |
+
default, override that state in the superclass. If a subclass wishes to
|
| 612 |
+
inherit definitions from a superclass, it can use the special value
|
| 613 |
+
"inherit", which will cause the superclass' state definition to be
|
| 614 |
+
included at that point in the state.
|
| 615 |
+
"""
|
| 616 |
+
tokens = {}
|
| 617 |
+
inheritable = {}
|
| 618 |
+
for c in cls.__mro__:
|
| 619 |
+
toks = c.__dict__.get('tokens', {})
|
| 620 |
+
|
| 621 |
+
for state, items in toks.items():
|
| 622 |
+
curitems = tokens.get(state)
|
| 623 |
+
if curitems is None:
|
| 624 |
+
# N.b. because this is assigned by reference, sufficiently
|
| 625 |
+
# deep hierarchies are processed incrementally (e.g. for
|
| 626 |
+
# A(B), B(C), C(RegexLexer), B will be premodified so X(B)
|
| 627 |
+
# will not see any inherits in B).
|
| 628 |
+
tokens[state] = items
|
| 629 |
+
try:
|
| 630 |
+
inherit_ndx = items.index(inherit)
|
| 631 |
+
except ValueError:
|
| 632 |
+
continue
|
| 633 |
+
inheritable[state] = inherit_ndx
|
| 634 |
+
continue
|
| 635 |
+
|
| 636 |
+
inherit_ndx = inheritable.pop(state, None)
|
| 637 |
+
if inherit_ndx is None:
|
| 638 |
+
continue
|
| 639 |
+
|
| 640 |
+
# Replace the "inherit" value with the items
|
| 641 |
+
curitems[inherit_ndx:inherit_ndx+1] = items
|
| 642 |
+
try:
|
| 643 |
+
# N.b. this is the index in items (that is, the superclass
|
| 644 |
+
# copy), so offset required when storing below.
|
| 645 |
+
new_inh_ndx = items.index(inherit)
|
| 646 |
+
except ValueError:
|
| 647 |
+
pass
|
| 648 |
+
else:
|
| 649 |
+
inheritable[state] = inherit_ndx + new_inh_ndx
|
| 650 |
+
|
| 651 |
+
return tokens
|
| 652 |
+
|
| 653 |
+
def __call__(cls, *args, **kwds):
|
| 654 |
+
"""Instantiate cls after preprocessing its token definitions."""
|
| 655 |
+
if '_tokens' not in cls.__dict__:
|
| 656 |
+
cls._all_tokens = {}
|
| 657 |
+
cls._tmpname = 0
|
| 658 |
+
if hasattr(cls, 'token_variants') and cls.token_variants:
|
| 659 |
+
# don't process yet
|
| 660 |
+
pass
|
| 661 |
+
else:
|
| 662 |
+
cls._tokens = cls.process_tokendef('', cls.get_tokendefs())
|
| 663 |
+
|
| 664 |
+
return type.__call__(cls, *args, **kwds)
|
| 665 |
+
|
| 666 |
+
|
| 667 |
+
class RegexLexer(Lexer, metaclass=RegexLexerMeta):
|
| 668 |
+
"""
|
| 669 |
+
Base for simple stateful regular expression-based lexers.
|
| 670 |
+
Simplifies the lexing process so that you need only
|
| 671 |
+
provide a list of states and regular expressions.
|
| 672 |
+
"""
|
| 673 |
+
|
| 674 |
+
#: Flags for compiling the regular expressions.
|
| 675 |
+
#: Defaults to MULTILINE.
|
| 676 |
+
flags = re.MULTILINE
|
| 677 |
+
|
| 678 |
+
#: At all time there is a stack of states. Initially, the stack contains
|
| 679 |
+
#: a single state 'root'. The top of the stack is called "the current state".
|
| 680 |
+
#:
|
| 681 |
+
#: Dict of ``{'state': [(regex, tokentype, new_state), ...], ...}``
|
| 682 |
+
#:
|
| 683 |
+
#: ``new_state`` can be omitted to signify no state transition.
|
| 684 |
+
#: If ``new_state`` is a string, it is pushed on the stack. This ensure
|
| 685 |
+
#: the new current state is ``new_state``.
|
| 686 |
+
#: If ``new_state`` is a tuple of strings, all of those strings are pushed
|
| 687 |
+
#: on the stack and the current state will be the last element of the list.
|
| 688 |
+
#: ``new_state`` can also be ``combined('state1', 'state2', ...)``
|
| 689 |
+
#: to signify a new, anonymous state combined from the rules of two
|
| 690 |
+
#: or more existing ones.
|
| 691 |
+
#: Furthermore, it can be '#pop' to signify going back one step in
|
| 692 |
+
#: the state stack, or '#push' to push the current state on the stack
|
| 693 |
+
#: again. Note that if you push while in a combined state, the combined
|
| 694 |
+
#: state itself is pushed, and not only the state in which the rule is
|
| 695 |
+
#: defined.
|
| 696 |
+
#:
|
| 697 |
+
#: The tuple can also be replaced with ``include('state')``, in which
|
| 698 |
+
#: case the rules from the state named by the string are included in the
|
| 699 |
+
#: current one.
|
| 700 |
+
tokens = {}
|
| 701 |
+
|
| 702 |
+
def get_tokens_unprocessed(self, text, stack=('root',)):
|
| 703 |
+
"""
|
| 704 |
+
Split ``text`` into (tokentype, text) pairs.
|
| 705 |
+
|
| 706 |
+
``stack`` is the initial stack (default: ``['root']``)
|
| 707 |
+
"""
|
| 708 |
+
pos = 0
|
| 709 |
+
tokendefs = self._tokens
|
| 710 |
+
statestack = list(stack)
|
| 711 |
+
statetokens = tokendefs[statestack[-1]]
|
| 712 |
+
while 1:
|
| 713 |
+
for rexmatch, action, new_state in statetokens:
|
| 714 |
+
m = rexmatch(text, pos)
|
| 715 |
+
if m:
|
| 716 |
+
if action is not None:
|
| 717 |
+
if type(action) is _TokenType:
|
| 718 |
+
yield pos, action, m.group()
|
| 719 |
+
else:
|
| 720 |
+
yield from action(self, m)
|
| 721 |
+
pos = m.end()
|
| 722 |
+
if new_state is not None:
|
| 723 |
+
# state transition
|
| 724 |
+
if isinstance(new_state, tuple):
|
| 725 |
+
for state in new_state:
|
| 726 |
+
if state == '#pop':
|
| 727 |
+
if len(statestack) > 1:
|
| 728 |
+
statestack.pop()
|
| 729 |
+
elif state == '#push':
|
| 730 |
+
statestack.append(statestack[-1])
|
| 731 |
+
else:
|
| 732 |
+
statestack.append(state)
|
| 733 |
+
elif isinstance(new_state, int):
|
| 734 |
+
# pop, but keep at least one state on the stack
|
| 735 |
+
# (random code leading to unexpected pops should
|
| 736 |
+
# not allow exceptions)
|
| 737 |
+
if abs(new_state) >= len(statestack):
|
| 738 |
+
del statestack[1:]
|
| 739 |
+
else:
|
| 740 |
+
del statestack[new_state:]
|
| 741 |
+
elif new_state == '#push':
|
| 742 |
+
statestack.append(statestack[-1])
|
| 743 |
+
else:
|
| 744 |
+
assert False, f"wrong state def: {new_state!r}"
|
| 745 |
+
statetokens = tokendefs[statestack[-1]]
|
| 746 |
+
break
|
| 747 |
+
else:
|
| 748 |
+
# We are here only if all state tokens have been considered
|
| 749 |
+
# and there was not a match on any of them.
|
| 750 |
+
try:
|
| 751 |
+
if text[pos] == '\n':
|
| 752 |
+
# at EOL, reset state to "root"
|
| 753 |
+
statestack = ['root']
|
| 754 |
+
statetokens = tokendefs['root']
|
| 755 |
+
yield pos, Whitespace, '\n'
|
| 756 |
+
pos += 1
|
| 757 |
+
continue
|
| 758 |
+
yield pos, Error, text[pos]
|
| 759 |
+
pos += 1
|
| 760 |
+
except IndexError:
|
| 761 |
+
break
|
| 762 |
+
|
| 763 |
+
|
| 764 |
+
class LexerContext:
|
| 765 |
+
"""
|
| 766 |
+
A helper object that holds lexer position data.
|
| 767 |
+
"""
|
| 768 |
+
|
| 769 |
+
def __init__(self, text, pos, stack=None, end=None):
|
| 770 |
+
self.text = text
|
| 771 |
+
self.pos = pos
|
| 772 |
+
self.end = end or len(text) # end=0 not supported ;-)
|
| 773 |
+
self.stack = stack or ['root']
|
| 774 |
+
|
| 775 |
+
def __repr__(self):
|
| 776 |
+
return f'LexerContext({self.text!r}, {self.pos!r}, {self.stack!r})'
|
| 777 |
+
|
| 778 |
+
|
| 779 |
+
class ExtendedRegexLexer(RegexLexer):
|
| 780 |
+
"""
|
| 781 |
+
A RegexLexer that uses a context object to store its state.
|
| 782 |
+
"""
|
| 783 |
+
|
| 784 |
+
def get_tokens_unprocessed(self, text=None, context=None):
|
| 785 |
+
"""
|
| 786 |
+
Split ``text`` into (tokentype, text) pairs.
|
| 787 |
+
If ``context`` is given, use this lexer context instead.
|
| 788 |
+
"""
|
| 789 |
+
tokendefs = self._tokens
|
| 790 |
+
if not context:
|
| 791 |
+
ctx = LexerContext(text, 0)
|
| 792 |
+
statetokens = tokendefs['root']
|
| 793 |
+
else:
|
| 794 |
+
ctx = context
|
| 795 |
+
statetokens = tokendefs[ctx.stack[-1]]
|
| 796 |
+
text = ctx.text
|
| 797 |
+
while 1:
|
| 798 |
+
for rexmatch, action, new_state in statetokens:
|
| 799 |
+
m = rexmatch(text, ctx.pos, ctx.end)
|
| 800 |
+
if m:
|
| 801 |
+
if action is not None:
|
| 802 |
+
if type(action) is _TokenType:
|
| 803 |
+
yield ctx.pos, action, m.group()
|
| 804 |
+
ctx.pos = m.end()
|
| 805 |
+
else:
|
| 806 |
+
yield from action(self, m, ctx)
|
| 807 |
+
if not new_state:
|
| 808 |
+
# altered the state stack?
|
| 809 |
+
statetokens = tokendefs[ctx.stack[-1]]
|
| 810 |
+
# CAUTION: callback must set ctx.pos!
|
| 811 |
+
if new_state is not None:
|
| 812 |
+
# state transition
|
| 813 |
+
if isinstance(new_state, tuple):
|
| 814 |
+
for state in new_state:
|
| 815 |
+
if state == '#pop':
|
| 816 |
+
if len(ctx.stack) > 1:
|
| 817 |
+
ctx.stack.pop()
|
| 818 |
+
elif state == '#push':
|
| 819 |
+
ctx.stack.append(ctx.stack[-1])
|
| 820 |
+
else:
|
| 821 |
+
ctx.stack.append(state)
|
| 822 |
+
elif isinstance(new_state, int):
|
| 823 |
+
# see RegexLexer for why this check is made
|
| 824 |
+
if abs(new_state) >= len(ctx.stack):
|
| 825 |
+
del ctx.stack[1:]
|
| 826 |
+
else:
|
| 827 |
+
del ctx.stack[new_state:]
|
| 828 |
+
elif new_state == '#push':
|
| 829 |
+
ctx.stack.append(ctx.stack[-1])
|
| 830 |
+
else:
|
| 831 |
+
assert False, f"wrong state def: {new_state!r}"
|
| 832 |
+
statetokens = tokendefs[ctx.stack[-1]]
|
| 833 |
+
break
|
| 834 |
+
else:
|
| 835 |
+
try:
|
| 836 |
+
if ctx.pos >= ctx.end:
|
| 837 |
+
break
|
| 838 |
+
if text[ctx.pos] == '\n':
|
| 839 |
+
# at EOL, reset state to "root"
|
| 840 |
+
ctx.stack = ['root']
|
| 841 |
+
statetokens = tokendefs['root']
|
| 842 |
+
yield ctx.pos, Text, '\n'
|
| 843 |
+
ctx.pos += 1
|
| 844 |
+
continue
|
| 845 |
+
yield ctx.pos, Error, text[ctx.pos]
|
| 846 |
+
ctx.pos += 1
|
| 847 |
+
except IndexError:
|
| 848 |
+
break
|
| 849 |
+
|
| 850 |
+
|
| 851 |
+
def do_insertions(insertions, tokens):
|
| 852 |
+
"""
|
| 853 |
+
Helper for lexers which must combine the results of several
|
| 854 |
+
sublexers.
|
| 855 |
+
|
| 856 |
+
``insertions`` is a list of ``(index, itokens)`` pairs.
|
| 857 |
+
Each ``itokens`` iterable should be inserted at position
|
| 858 |
+
``index`` into the token stream given by the ``tokens``
|
| 859 |
+
argument.
|
| 860 |
+
|
| 861 |
+
The result is a combined token stream.
|
| 862 |
+
|
| 863 |
+
TODO: clean up the code here.
|
| 864 |
+
"""
|
| 865 |
+
insertions = iter(insertions)
|
| 866 |
+
try:
|
| 867 |
+
index, itokens = next(insertions)
|
| 868 |
+
except StopIteration:
|
| 869 |
+
# no insertions
|
| 870 |
+
yield from tokens
|
| 871 |
+
return
|
| 872 |
+
|
| 873 |
+
realpos = None
|
| 874 |
+
insleft = True
|
| 875 |
+
|
| 876 |
+
# iterate over the token stream where we want to insert
|
| 877 |
+
# the tokens from the insertion list.
|
| 878 |
+
for i, t, v in tokens:
|
| 879 |
+
# first iteration. store the position of first item
|
| 880 |
+
if realpos is None:
|
| 881 |
+
realpos = i
|
| 882 |
+
oldi = 0
|
| 883 |
+
while insleft and i + len(v) >= index:
|
| 884 |
+
tmpval = v[oldi:index - i]
|
| 885 |
+
if tmpval:
|
| 886 |
+
yield realpos, t, tmpval
|
| 887 |
+
realpos += len(tmpval)
|
| 888 |
+
for it_index, it_token, it_value in itokens:
|
| 889 |
+
yield realpos, it_token, it_value
|
| 890 |
+
realpos += len(it_value)
|
| 891 |
+
oldi = index - i
|
| 892 |
+
try:
|
| 893 |
+
index, itokens = next(insertions)
|
| 894 |
+
except StopIteration:
|
| 895 |
+
insleft = False
|
| 896 |
+
break # not strictly necessary
|
| 897 |
+
if oldi < len(v):
|
| 898 |
+
yield realpos, t, v[oldi:]
|
| 899 |
+
realpos += len(v) - oldi
|
| 900 |
+
|
| 901 |
+
# leftover tokens
|
| 902 |
+
while insleft:
|
| 903 |
+
# no normal tokens, set realpos to zero
|
| 904 |
+
realpos = realpos or 0
|
| 905 |
+
for p, t, v in itokens:
|
| 906 |
+
yield realpos, t, v
|
| 907 |
+
realpos += len(v)
|
| 908 |
+
try:
|
| 909 |
+
index, itokens = next(insertions)
|
| 910 |
+
except StopIteration:
|
| 911 |
+
insleft = False
|
| 912 |
+
break # not strictly necessary
|
| 913 |
+
|
| 914 |
+
|
| 915 |
+
class ProfilingRegexLexerMeta(RegexLexerMeta):
|
| 916 |
+
"""Metaclass for ProfilingRegexLexer, collects regex timing info."""
|
| 917 |
+
|
| 918 |
+
def _process_regex(cls, regex, rflags, state):
|
| 919 |
+
if isinstance(regex, words):
|
| 920 |
+
rex = regex_opt(regex.words, prefix=regex.prefix,
|
| 921 |
+
suffix=regex.suffix)
|
| 922 |
+
else:
|
| 923 |
+
rex = regex
|
| 924 |
+
compiled = re.compile(rex, rflags)
|
| 925 |
+
|
| 926 |
+
def match_func(text, pos, endpos=sys.maxsize):
|
| 927 |
+
info = cls._prof_data[-1].setdefault((state, rex), [0, 0.0])
|
| 928 |
+
t0 = time.time()
|
| 929 |
+
res = compiled.match(text, pos, endpos)
|
| 930 |
+
t1 = time.time()
|
| 931 |
+
info[0] += 1
|
| 932 |
+
info[1] += t1 - t0
|
| 933 |
+
return res
|
| 934 |
+
return match_func
|
| 935 |
+
|
| 936 |
+
|
| 937 |
+
class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta):
|
| 938 |
+
"""Drop-in replacement for RegexLexer that does profiling of its regexes."""
|
| 939 |
+
|
| 940 |
+
_prof_data = []
|
| 941 |
+
_prof_sort_index = 4 # defaults to time per call
|
| 942 |
+
|
| 943 |
+
def get_tokens_unprocessed(self, text, stack=('root',)):
|
| 944 |
+
# this needs to be a stack, since using(this) will produce nested calls
|
| 945 |
+
self.__class__._prof_data.append({})
|
| 946 |
+
yield from RegexLexer.get_tokens_unprocessed(self, text, stack)
|
| 947 |
+
rawdata = self.__class__._prof_data.pop()
|
| 948 |
+
data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65],
|
| 949 |
+
n, 1000 * t, 1000 * t / n)
|
| 950 |
+
for ((s, r), (n, t)) in rawdata.items()),
|
| 951 |
+
key=lambda x: x[self._prof_sort_index],
|
| 952 |
+
reverse=True)
|
| 953 |
+
sum_total = sum(x[3] for x in data)
|
| 954 |
+
|
| 955 |
+
print()
|
| 956 |
+
print('Profiling result for %s lexing %d chars in %.3f ms' %
|
| 957 |
+
(self.__class__.__name__, len(text), sum_total))
|
| 958 |
+
print('=' * 110)
|
| 959 |
+
print('%-20s %-64s ncalls tottime percall' % ('state', 'regex'))
|
| 960 |
+
print('-' * 110)
|
| 961 |
+
for d in data:
|
| 962 |
+
print('%-20s %-65s %5d %8.4f %8.4f' % d)
|
| 963 |
+
print('=' * 110)
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/modeline.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.modeline
|
| 3 |
+
~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
A simple modeline parser (based on pymodeline).
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import re
|
| 12 |
+
|
| 13 |
+
__all__ = ['get_filetype_from_buffer']
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
modeline_re = re.compile(r'''
|
| 17 |
+
(?: vi | vim | ex ) (?: [<=>]? \d* )? :
|
| 18 |
+
.* (?: ft | filetype | syn | syntax ) = ( [^:\s]+ )
|
| 19 |
+
''', re.VERBOSE)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def get_filetype_from_line(l): # noqa: E741
|
| 23 |
+
m = modeline_re.search(l)
|
| 24 |
+
if m:
|
| 25 |
+
return m.group(1)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def get_filetype_from_buffer(buf, max_lines=5):
|
| 29 |
+
"""
|
| 30 |
+
Scan the buffer for modelines and return filetype if one is found.
|
| 31 |
+
"""
|
| 32 |
+
lines = buf.splitlines()
|
| 33 |
+
for line in lines[-1:-max_lines-1:-1]:
|
| 34 |
+
ret = get_filetype_from_line(line)
|
| 35 |
+
if ret:
|
| 36 |
+
return ret
|
| 37 |
+
for i in range(max_lines, -1, -1):
|
| 38 |
+
if i < len(lines):
|
| 39 |
+
ret = get_filetype_from_line(lines[i])
|
| 40 |
+
if ret:
|
| 41 |
+
return ret
|
| 42 |
+
|
| 43 |
+
return None
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/regexopt.py
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.regexopt
|
| 3 |
+
~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
An algorithm that generates optimized regexes for matching long lists of
|
| 6 |
+
literal strings.
|
| 7 |
+
|
| 8 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 9 |
+
:license: BSD, see LICENSE for details.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import re
|
| 13 |
+
from re import escape
|
| 14 |
+
from os.path import commonprefix
|
| 15 |
+
from itertools import groupby
|
| 16 |
+
from operator import itemgetter
|
| 17 |
+
|
| 18 |
+
CS_ESCAPE = re.compile(r'[\[\^\\\-\]]')
|
| 19 |
+
FIRST_ELEMENT = itemgetter(0)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def make_charset(letters):
|
| 23 |
+
return '[' + CS_ESCAPE.sub(lambda m: '\\' + m.group(), ''.join(letters)) + ']'
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def regex_opt_inner(strings, open_paren):
|
| 27 |
+
"""Return a regex that matches any string in the sorted list of strings."""
|
| 28 |
+
close_paren = open_paren and ')' or ''
|
| 29 |
+
# print strings, repr(open_paren)
|
| 30 |
+
if not strings:
|
| 31 |
+
# print '-> nothing left'
|
| 32 |
+
return ''
|
| 33 |
+
first = strings[0]
|
| 34 |
+
if len(strings) == 1:
|
| 35 |
+
# print '-> only 1 string'
|
| 36 |
+
return open_paren + escape(first) + close_paren
|
| 37 |
+
if not first:
|
| 38 |
+
# print '-> first string empty'
|
| 39 |
+
return open_paren + regex_opt_inner(strings[1:], '(?:') \
|
| 40 |
+
+ '?' + close_paren
|
| 41 |
+
if len(first) == 1:
|
| 42 |
+
# multiple one-char strings? make a charset
|
| 43 |
+
oneletter = []
|
| 44 |
+
rest = []
|
| 45 |
+
for s in strings:
|
| 46 |
+
if len(s) == 1:
|
| 47 |
+
oneletter.append(s)
|
| 48 |
+
else:
|
| 49 |
+
rest.append(s)
|
| 50 |
+
if len(oneletter) > 1: # do we have more than one oneletter string?
|
| 51 |
+
if rest:
|
| 52 |
+
# print '-> 1-character + rest'
|
| 53 |
+
return open_paren + regex_opt_inner(rest, '') + '|' \
|
| 54 |
+
+ make_charset(oneletter) + close_paren
|
| 55 |
+
# print '-> only 1-character'
|
| 56 |
+
return open_paren + make_charset(oneletter) + close_paren
|
| 57 |
+
prefix = commonprefix(strings)
|
| 58 |
+
if prefix:
|
| 59 |
+
plen = len(prefix)
|
| 60 |
+
# we have a prefix for all strings
|
| 61 |
+
# print '-> prefix:', prefix
|
| 62 |
+
return open_paren + escape(prefix) \
|
| 63 |
+
+ regex_opt_inner([s[plen:] for s in strings], '(?:') \
|
| 64 |
+
+ close_paren
|
| 65 |
+
# is there a suffix?
|
| 66 |
+
strings_rev = [s[::-1] for s in strings]
|
| 67 |
+
suffix = commonprefix(strings_rev)
|
| 68 |
+
if suffix:
|
| 69 |
+
slen = len(suffix)
|
| 70 |
+
# print '-> suffix:', suffix[::-1]
|
| 71 |
+
return open_paren \
|
| 72 |
+
+ regex_opt_inner(sorted(s[:-slen] for s in strings), '(?:') \
|
| 73 |
+
+ escape(suffix[::-1]) + close_paren
|
| 74 |
+
# recurse on common 1-string prefixes
|
| 75 |
+
# print '-> last resort'
|
| 76 |
+
return open_paren + \
|
| 77 |
+
'|'.join(regex_opt_inner(list(group[1]), '')
|
| 78 |
+
for group in groupby(strings, lambda s: s[0] == first[0])) \
|
| 79 |
+
+ close_paren
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def regex_opt(strings, prefix='', suffix=''):
|
| 83 |
+
"""Return a compiled regex that matches any string in the given list.
|
| 84 |
+
|
| 85 |
+
The strings to match must be literal strings, not regexes. They will be
|
| 86 |
+
regex-escaped.
|
| 87 |
+
|
| 88 |
+
*prefix* and *suffix* are pre- and appended to the final regex.
|
| 89 |
+
"""
|
| 90 |
+
strings = sorted(strings)
|
| 91 |
+
return prefix + regex_opt_inner(strings, '(') + suffix
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/style.py
ADDED
|
@@ -0,0 +1,203 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.style
|
| 3 |
+
~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Basic style object.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from pip._vendor.pygments.token import Token, STANDARD_TYPES
|
| 12 |
+
|
| 13 |
+
# Default mapping of ansixxx to RGB colors.
|
| 14 |
+
_ansimap = {
|
| 15 |
+
# dark
|
| 16 |
+
'ansiblack': '000000',
|
| 17 |
+
'ansired': '7f0000',
|
| 18 |
+
'ansigreen': '007f00',
|
| 19 |
+
'ansiyellow': '7f7fe0',
|
| 20 |
+
'ansiblue': '00007f',
|
| 21 |
+
'ansimagenta': '7f007f',
|
| 22 |
+
'ansicyan': '007f7f',
|
| 23 |
+
'ansigray': 'e5e5e5',
|
| 24 |
+
# normal
|
| 25 |
+
'ansibrightblack': '555555',
|
| 26 |
+
'ansibrightred': 'ff0000',
|
| 27 |
+
'ansibrightgreen': '00ff00',
|
| 28 |
+
'ansibrightyellow': 'ffff00',
|
| 29 |
+
'ansibrightblue': '0000ff',
|
| 30 |
+
'ansibrightmagenta': 'ff00ff',
|
| 31 |
+
'ansibrightcyan': '00ffff',
|
| 32 |
+
'ansiwhite': 'ffffff',
|
| 33 |
+
}
|
| 34 |
+
# mapping of deprecated #ansixxx colors to new color names
|
| 35 |
+
_deprecated_ansicolors = {
|
| 36 |
+
# dark
|
| 37 |
+
'#ansiblack': 'ansiblack',
|
| 38 |
+
'#ansidarkred': 'ansired',
|
| 39 |
+
'#ansidarkgreen': 'ansigreen',
|
| 40 |
+
'#ansibrown': 'ansiyellow',
|
| 41 |
+
'#ansidarkblue': 'ansiblue',
|
| 42 |
+
'#ansipurple': 'ansimagenta',
|
| 43 |
+
'#ansiteal': 'ansicyan',
|
| 44 |
+
'#ansilightgray': 'ansigray',
|
| 45 |
+
# normal
|
| 46 |
+
'#ansidarkgray': 'ansibrightblack',
|
| 47 |
+
'#ansired': 'ansibrightred',
|
| 48 |
+
'#ansigreen': 'ansibrightgreen',
|
| 49 |
+
'#ansiyellow': 'ansibrightyellow',
|
| 50 |
+
'#ansiblue': 'ansibrightblue',
|
| 51 |
+
'#ansifuchsia': 'ansibrightmagenta',
|
| 52 |
+
'#ansiturquoise': 'ansibrightcyan',
|
| 53 |
+
'#ansiwhite': 'ansiwhite',
|
| 54 |
+
}
|
| 55 |
+
ansicolors = set(_ansimap)
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class StyleMeta(type):
|
| 59 |
+
|
| 60 |
+
def __new__(mcs, name, bases, dct):
|
| 61 |
+
obj = type.__new__(mcs, name, bases, dct)
|
| 62 |
+
for token in STANDARD_TYPES:
|
| 63 |
+
if token not in obj.styles:
|
| 64 |
+
obj.styles[token] = ''
|
| 65 |
+
|
| 66 |
+
def colorformat(text):
|
| 67 |
+
if text in ansicolors:
|
| 68 |
+
return text
|
| 69 |
+
if text[0:1] == '#':
|
| 70 |
+
col = text[1:]
|
| 71 |
+
if len(col) == 6:
|
| 72 |
+
return col
|
| 73 |
+
elif len(col) == 3:
|
| 74 |
+
return col[0] * 2 + col[1] * 2 + col[2] * 2
|
| 75 |
+
elif text == '':
|
| 76 |
+
return ''
|
| 77 |
+
elif text.startswith('var') or text.startswith('calc'):
|
| 78 |
+
return text
|
| 79 |
+
assert False, f"wrong color format {text!r}"
|
| 80 |
+
|
| 81 |
+
_styles = obj._styles = {}
|
| 82 |
+
|
| 83 |
+
for ttype in obj.styles:
|
| 84 |
+
for token in ttype.split():
|
| 85 |
+
if token in _styles:
|
| 86 |
+
continue
|
| 87 |
+
ndef = _styles.get(token.parent, None)
|
| 88 |
+
styledefs = obj.styles.get(token, '').split()
|
| 89 |
+
if not ndef or token is None:
|
| 90 |
+
ndef = ['', 0, 0, 0, '', '', 0, 0, 0]
|
| 91 |
+
elif 'noinherit' in styledefs and token is not Token:
|
| 92 |
+
ndef = _styles[Token][:]
|
| 93 |
+
else:
|
| 94 |
+
ndef = ndef[:]
|
| 95 |
+
_styles[token] = ndef
|
| 96 |
+
for styledef in obj.styles.get(token, '').split():
|
| 97 |
+
if styledef == 'noinherit':
|
| 98 |
+
pass
|
| 99 |
+
elif styledef == 'bold':
|
| 100 |
+
ndef[1] = 1
|
| 101 |
+
elif styledef == 'nobold':
|
| 102 |
+
ndef[1] = 0
|
| 103 |
+
elif styledef == 'italic':
|
| 104 |
+
ndef[2] = 1
|
| 105 |
+
elif styledef == 'noitalic':
|
| 106 |
+
ndef[2] = 0
|
| 107 |
+
elif styledef == 'underline':
|
| 108 |
+
ndef[3] = 1
|
| 109 |
+
elif styledef == 'nounderline':
|
| 110 |
+
ndef[3] = 0
|
| 111 |
+
elif styledef[:3] == 'bg:':
|
| 112 |
+
ndef[4] = colorformat(styledef[3:])
|
| 113 |
+
elif styledef[:7] == 'border:':
|
| 114 |
+
ndef[5] = colorformat(styledef[7:])
|
| 115 |
+
elif styledef == 'roman':
|
| 116 |
+
ndef[6] = 1
|
| 117 |
+
elif styledef == 'sans':
|
| 118 |
+
ndef[7] = 1
|
| 119 |
+
elif styledef == 'mono':
|
| 120 |
+
ndef[8] = 1
|
| 121 |
+
else:
|
| 122 |
+
ndef[0] = colorformat(styledef)
|
| 123 |
+
|
| 124 |
+
return obj
|
| 125 |
+
|
| 126 |
+
def style_for_token(cls, token):
|
| 127 |
+
t = cls._styles[token]
|
| 128 |
+
ansicolor = bgansicolor = None
|
| 129 |
+
color = t[0]
|
| 130 |
+
if color in _deprecated_ansicolors:
|
| 131 |
+
color = _deprecated_ansicolors[color]
|
| 132 |
+
if color in ansicolors:
|
| 133 |
+
ansicolor = color
|
| 134 |
+
color = _ansimap[color]
|
| 135 |
+
bgcolor = t[4]
|
| 136 |
+
if bgcolor in _deprecated_ansicolors:
|
| 137 |
+
bgcolor = _deprecated_ansicolors[bgcolor]
|
| 138 |
+
if bgcolor in ansicolors:
|
| 139 |
+
bgansicolor = bgcolor
|
| 140 |
+
bgcolor = _ansimap[bgcolor]
|
| 141 |
+
|
| 142 |
+
return {
|
| 143 |
+
'color': color or None,
|
| 144 |
+
'bold': bool(t[1]),
|
| 145 |
+
'italic': bool(t[2]),
|
| 146 |
+
'underline': bool(t[3]),
|
| 147 |
+
'bgcolor': bgcolor or None,
|
| 148 |
+
'border': t[5] or None,
|
| 149 |
+
'roman': bool(t[6]) or None,
|
| 150 |
+
'sans': bool(t[7]) or None,
|
| 151 |
+
'mono': bool(t[8]) or None,
|
| 152 |
+
'ansicolor': ansicolor,
|
| 153 |
+
'bgansicolor': bgansicolor,
|
| 154 |
+
}
|
| 155 |
+
|
| 156 |
+
def list_styles(cls):
|
| 157 |
+
return list(cls)
|
| 158 |
+
|
| 159 |
+
def styles_token(cls, ttype):
|
| 160 |
+
return ttype in cls._styles
|
| 161 |
+
|
| 162 |
+
def __iter__(cls):
|
| 163 |
+
for token in cls._styles:
|
| 164 |
+
yield token, cls.style_for_token(token)
|
| 165 |
+
|
| 166 |
+
def __len__(cls):
|
| 167 |
+
return len(cls._styles)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class Style(metaclass=StyleMeta):
|
| 171 |
+
|
| 172 |
+
#: overall background color (``None`` means transparent)
|
| 173 |
+
background_color = '#ffffff'
|
| 174 |
+
|
| 175 |
+
#: highlight background color
|
| 176 |
+
highlight_color = '#ffffcc'
|
| 177 |
+
|
| 178 |
+
#: line number font color
|
| 179 |
+
line_number_color = 'inherit'
|
| 180 |
+
|
| 181 |
+
#: line number background color
|
| 182 |
+
line_number_background_color = 'transparent'
|
| 183 |
+
|
| 184 |
+
#: special line number font color
|
| 185 |
+
line_number_special_color = '#000000'
|
| 186 |
+
|
| 187 |
+
#: special line number background color
|
| 188 |
+
line_number_special_background_color = '#ffffc0'
|
| 189 |
+
|
| 190 |
+
#: Style definitions for individual token types.
|
| 191 |
+
styles = {}
|
| 192 |
+
|
| 193 |
+
#: user-friendly style name (used when selecting the style, so this
|
| 194 |
+
# should be all-lowercase, no spaces, hyphens)
|
| 195 |
+
name = 'unnamed'
|
| 196 |
+
|
| 197 |
+
aliases = []
|
| 198 |
+
|
| 199 |
+
# Attribute for lexers defined within Pygments. If set
|
| 200 |
+
# to True, the style is not shown in the style gallery
|
| 201 |
+
# on the website. This is intended for language-specific
|
| 202 |
+
# styles.
|
| 203 |
+
web_style_gallery_exclude = False
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/token.py
ADDED
|
@@ -0,0 +1,214 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.token
|
| 3 |
+
~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Basic token types and the standard tokens.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class _TokenType(tuple):
|
| 13 |
+
parent = None
|
| 14 |
+
|
| 15 |
+
def split(self):
|
| 16 |
+
buf = []
|
| 17 |
+
node = self
|
| 18 |
+
while node is not None:
|
| 19 |
+
buf.append(node)
|
| 20 |
+
node = node.parent
|
| 21 |
+
buf.reverse()
|
| 22 |
+
return buf
|
| 23 |
+
|
| 24 |
+
def __init__(self, *args):
|
| 25 |
+
# no need to call super.__init__
|
| 26 |
+
self.subtypes = set()
|
| 27 |
+
|
| 28 |
+
def __contains__(self, val):
|
| 29 |
+
return self is val or (
|
| 30 |
+
type(val) is self.__class__ and
|
| 31 |
+
val[:len(self)] == self
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
def __getattr__(self, val):
|
| 35 |
+
if not val or not val[0].isupper():
|
| 36 |
+
return tuple.__getattribute__(self, val)
|
| 37 |
+
new = _TokenType(self + (val,))
|
| 38 |
+
setattr(self, val, new)
|
| 39 |
+
self.subtypes.add(new)
|
| 40 |
+
new.parent = self
|
| 41 |
+
return new
|
| 42 |
+
|
| 43 |
+
def __repr__(self):
|
| 44 |
+
return 'Token' + (self and '.' or '') + '.'.join(self)
|
| 45 |
+
|
| 46 |
+
def __copy__(self):
|
| 47 |
+
# These instances are supposed to be singletons
|
| 48 |
+
return self
|
| 49 |
+
|
| 50 |
+
def __deepcopy__(self, memo):
|
| 51 |
+
# These instances are supposed to be singletons
|
| 52 |
+
return self
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
Token = _TokenType()
|
| 56 |
+
|
| 57 |
+
# Special token types
|
| 58 |
+
Text = Token.Text
|
| 59 |
+
Whitespace = Text.Whitespace
|
| 60 |
+
Escape = Token.Escape
|
| 61 |
+
Error = Token.Error
|
| 62 |
+
# Text that doesn't belong to this lexer (e.g. HTML in PHP)
|
| 63 |
+
Other = Token.Other
|
| 64 |
+
|
| 65 |
+
# Common token types for source code
|
| 66 |
+
Keyword = Token.Keyword
|
| 67 |
+
Name = Token.Name
|
| 68 |
+
Literal = Token.Literal
|
| 69 |
+
String = Literal.String
|
| 70 |
+
Number = Literal.Number
|
| 71 |
+
Punctuation = Token.Punctuation
|
| 72 |
+
Operator = Token.Operator
|
| 73 |
+
Comment = Token.Comment
|
| 74 |
+
|
| 75 |
+
# Generic types for non-source code
|
| 76 |
+
Generic = Token.Generic
|
| 77 |
+
|
| 78 |
+
# String and some others are not direct children of Token.
|
| 79 |
+
# alias them:
|
| 80 |
+
Token.Token = Token
|
| 81 |
+
Token.String = String
|
| 82 |
+
Token.Number = Number
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def is_token_subtype(ttype, other):
|
| 86 |
+
"""
|
| 87 |
+
Return True if ``ttype`` is a subtype of ``other``.
|
| 88 |
+
|
| 89 |
+
exists for backwards compatibility. use ``ttype in other`` now.
|
| 90 |
+
"""
|
| 91 |
+
return ttype in other
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def string_to_tokentype(s):
|
| 95 |
+
"""
|
| 96 |
+
Convert a string into a token type::
|
| 97 |
+
|
| 98 |
+
>>> string_to_token('String.Double')
|
| 99 |
+
Token.Literal.String.Double
|
| 100 |
+
>>> string_to_token('Token.Literal.Number')
|
| 101 |
+
Token.Literal.Number
|
| 102 |
+
>>> string_to_token('')
|
| 103 |
+
Token
|
| 104 |
+
|
| 105 |
+
Tokens that are already tokens are returned unchanged:
|
| 106 |
+
|
| 107 |
+
>>> string_to_token(String)
|
| 108 |
+
Token.Literal.String
|
| 109 |
+
"""
|
| 110 |
+
if isinstance(s, _TokenType):
|
| 111 |
+
return s
|
| 112 |
+
if not s:
|
| 113 |
+
return Token
|
| 114 |
+
node = Token
|
| 115 |
+
for item in s.split('.'):
|
| 116 |
+
node = getattr(node, item)
|
| 117 |
+
return node
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
# Map standard token types to short names, used in CSS class naming.
|
| 121 |
+
# If you add a new item, please be sure to run this file to perform
|
| 122 |
+
# a consistency check for duplicate values.
|
| 123 |
+
STANDARD_TYPES = {
|
| 124 |
+
Token: '',
|
| 125 |
+
|
| 126 |
+
Text: '',
|
| 127 |
+
Whitespace: 'w',
|
| 128 |
+
Escape: 'esc',
|
| 129 |
+
Error: 'err',
|
| 130 |
+
Other: 'x',
|
| 131 |
+
|
| 132 |
+
Keyword: 'k',
|
| 133 |
+
Keyword.Constant: 'kc',
|
| 134 |
+
Keyword.Declaration: 'kd',
|
| 135 |
+
Keyword.Namespace: 'kn',
|
| 136 |
+
Keyword.Pseudo: 'kp',
|
| 137 |
+
Keyword.Reserved: 'kr',
|
| 138 |
+
Keyword.Type: 'kt',
|
| 139 |
+
|
| 140 |
+
Name: 'n',
|
| 141 |
+
Name.Attribute: 'na',
|
| 142 |
+
Name.Builtin: 'nb',
|
| 143 |
+
Name.Builtin.Pseudo: 'bp',
|
| 144 |
+
Name.Class: 'nc',
|
| 145 |
+
Name.Constant: 'no',
|
| 146 |
+
Name.Decorator: 'nd',
|
| 147 |
+
Name.Entity: 'ni',
|
| 148 |
+
Name.Exception: 'ne',
|
| 149 |
+
Name.Function: 'nf',
|
| 150 |
+
Name.Function.Magic: 'fm',
|
| 151 |
+
Name.Property: 'py',
|
| 152 |
+
Name.Label: 'nl',
|
| 153 |
+
Name.Namespace: 'nn',
|
| 154 |
+
Name.Other: 'nx',
|
| 155 |
+
Name.Tag: 'nt',
|
| 156 |
+
Name.Variable: 'nv',
|
| 157 |
+
Name.Variable.Class: 'vc',
|
| 158 |
+
Name.Variable.Global: 'vg',
|
| 159 |
+
Name.Variable.Instance: 'vi',
|
| 160 |
+
Name.Variable.Magic: 'vm',
|
| 161 |
+
|
| 162 |
+
Literal: 'l',
|
| 163 |
+
Literal.Date: 'ld',
|
| 164 |
+
|
| 165 |
+
String: 's',
|
| 166 |
+
String.Affix: 'sa',
|
| 167 |
+
String.Backtick: 'sb',
|
| 168 |
+
String.Char: 'sc',
|
| 169 |
+
String.Delimiter: 'dl',
|
| 170 |
+
String.Doc: 'sd',
|
| 171 |
+
String.Double: 's2',
|
| 172 |
+
String.Escape: 'se',
|
| 173 |
+
String.Heredoc: 'sh',
|
| 174 |
+
String.Interpol: 'si',
|
| 175 |
+
String.Other: 'sx',
|
| 176 |
+
String.Regex: 'sr',
|
| 177 |
+
String.Single: 's1',
|
| 178 |
+
String.Symbol: 'ss',
|
| 179 |
+
|
| 180 |
+
Number: 'm',
|
| 181 |
+
Number.Bin: 'mb',
|
| 182 |
+
Number.Float: 'mf',
|
| 183 |
+
Number.Hex: 'mh',
|
| 184 |
+
Number.Integer: 'mi',
|
| 185 |
+
Number.Integer.Long: 'il',
|
| 186 |
+
Number.Oct: 'mo',
|
| 187 |
+
|
| 188 |
+
Operator: 'o',
|
| 189 |
+
Operator.Word: 'ow',
|
| 190 |
+
|
| 191 |
+
Punctuation: 'p',
|
| 192 |
+
Punctuation.Marker: 'pm',
|
| 193 |
+
|
| 194 |
+
Comment: 'c',
|
| 195 |
+
Comment.Hashbang: 'ch',
|
| 196 |
+
Comment.Multiline: 'cm',
|
| 197 |
+
Comment.Preproc: 'cp',
|
| 198 |
+
Comment.PreprocFile: 'cpf',
|
| 199 |
+
Comment.Single: 'c1',
|
| 200 |
+
Comment.Special: 'cs',
|
| 201 |
+
|
| 202 |
+
Generic: 'g',
|
| 203 |
+
Generic.Deleted: 'gd',
|
| 204 |
+
Generic.Emph: 'ge',
|
| 205 |
+
Generic.Error: 'gr',
|
| 206 |
+
Generic.Heading: 'gh',
|
| 207 |
+
Generic.Inserted: 'gi',
|
| 208 |
+
Generic.Output: 'go',
|
| 209 |
+
Generic.Prompt: 'gp',
|
| 210 |
+
Generic.Strong: 'gs',
|
| 211 |
+
Generic.Subheading: 'gu',
|
| 212 |
+
Generic.EmphStrong: 'ges',
|
| 213 |
+
Generic.Traceback: 'gt',
|
| 214 |
+
}
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/unistring.py
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.unistring
|
| 3 |
+
~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Strings of all Unicode characters of a certain category.
|
| 6 |
+
Used for matching in Unicode-aware languages. Run to regenerate.
|
| 7 |
+
|
| 8 |
+
Inspired by chartypes_create.py from the MoinMoin project.
|
| 9 |
+
|
| 10 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 11 |
+
:license: BSD, see LICENSE for details.
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
Cc = '\x00-\x1f\x7f-\x9f'
|
| 15 |
+
|
| 16 |
+
Cf = '\xad\u0600-\u0605\u061c\u06dd\u070f\u08e2\u180e\u200b-\u200f\u202a-\u202e\u2060-\u2064\u2066-\u206f\ufeff\ufff9-\ufffb\U000110bd\U000110cd\U0001bca0-\U0001bca3\U0001d173-\U0001d17a\U000e0001\U000e0020-\U000e007f'
|
| 17 |
+
|
| 18 |
+
Cn = '\u0378-\u0379\u0380-\u0383\u038b\u038d\u03a2\u0530\u0557-\u0558\u058b-\u058c\u0590\u05c8-\u05cf\u05eb-\u05ee\u05f5-\u05ff\u061d\u070e\u074b-\u074c\u07b2-\u07bf\u07fb-\u07fc\u082e-\u082f\u083f\u085c-\u085d\u085f\u086b-\u089f\u08b5\u08be-\u08d2\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09c5-\u09c6\u09c9-\u09ca\u09cf-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09ff-\u0a00\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a58\u0a5d\u0a5f-\u0a65\u0a77-\u0a80\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0acf\u0ad1-\u0adf\u0ae4-\u0ae5\u0af2-\u0af8\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3b\u0b45-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b64-\u0b65\u0b78-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bcf\u0bd1-\u0bd6\u0bd8-\u0be5\u0bfb-\u0bff\u0c0d\u0c11\u0c29\u0c3a-\u0c3c\u0c45\u0c49\u0c4e-\u0c54\u0c57\u0c5b-\u0c5f\u0c64-\u0c65\u0c70-\u0c77\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbb\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce4-\u0ce5\u0cf0\u0cf3-\u0cff\u0d04\u0d0d\u0d11\u0d45\u0d49\u0d50-\u0d53\u0d64-\u0d65\u0d80-\u0d81\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0de5\u0df0-\u0df1\u0df5-\u0e00\u0e3b-\u0e3e\u0e5c-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0edb\u0ee0-\u0eff\u0f48\u0f6d-\u0f70\u0f98\u0fbd\u0fcd\u0fdb-\u0fff\u10c6\u10c8-\u10cc\u10ce-\u10cf\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u135c\u137d-\u137f\u139a-\u139f\u13f6-\u13f7\u13fe-\u13ff\u169d-\u169f\u16f9-\u16ff\u170d\u1715-\u171f\u1737-\u173f\u1754-\u175f\u176d\u1771\u1774-\u177f\u17de-\u17df\u17ea-\u17ef\u17fa-\u17ff\u180f\u181a-\u181f\u1879-\u187f\u18ab-\u18af\u18f6-\u18ff\u191f\u192c-\u192f\u193c-\u193f\u1941-\u1943\u196e-\u196f\u1975-\u197f\u19ac-\u19af\u19ca-\u19cf\u19db-\u19dd\u1a1c-\u1a1d\u1a5f\u1a7d-\u1a7e\u1a8a-\u1a8f\u1a9a-\u1a9f\u1aae-\u1aaf\u1abf-\u1aff\u1b4c-\u1b4f\u1b7d-\u1b7f\u1bf4-\u1bfb\u1c38-\u1c3a\u1c4a-\u1c4c\u1c89-\u1c8f\u1cbb-\u1cbc\u1cc8-\u1ccf\u1cfa-\u1cff\u1dfa\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fc5\u1fd4-\u1fd5\u1fdc\u1ff0-\u1ff1\u1ff5\u1fff\u2065\u2072-\u2073\u208f\u209d-\u209f\u20c0-\u20cf\u20f1-\u20ff\u218c-\u218f\u2427-\u243f\u244b-\u245f\u2b74-\u2b75\u2b96-\u2b97\u2bc9\u2bff\u2c2f\u2c5f\u2cf4-\u2cf8\u2d26\u2d28-\u2d2c\u2d2e-\u2d2f\u2d68-\u2d6e\u2d71-\u2d7e\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf\u2e4f-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u2fff\u3040\u3097-\u3098\u3100-\u3104\u3130\u318f\u31bb-\u31bf\u31e4-\u31ef\u321f\u32ff\u4db6-\u4dbf\u9ff0-\u9fff\ua48d-\ua48f\ua4c7-\ua4cf\ua62c-\ua63f\ua6f8-\ua6ff\ua7ba-\ua7f6\ua82c-\ua82f\ua83a-\ua83f\ua878-\ua87f\ua8c6-\ua8cd\ua8da-\ua8df\ua954-\ua95e\ua97d-\ua97f\ua9ce\ua9da-\ua9dd\ua9ff\uaa37-\uaa3f\uaa4e-\uaa4f\uaa5a-\uaa5b\uaac3-\uaada\uaaf7-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f\uab66-\uab6f\uabee-\uabef\uabfa-\uabff\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\ud7ff\ufa6e-\ufa6f\ufada-\ufaff\ufb07-\ufb12\ufb18-\ufb1c\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbc2-\ufbd2\ufd40-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfe-\ufdff\ufe1a-\ufe1f\ufe53\ufe67\ufe6c-\ufe6f\ufe75\ufefd-\ufefe\uff00\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffdf\uffe7\uffef-\ufff8\ufffe-\uffff\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U000100ff\U00010103-\U00010106\U00010134-\U00010136\U0001018f\U0001019c-\U0001019f\U000101a1-\U000101cf\U000101fe-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102df\U000102fc-\U000102ff\U00010324-\U0001032c\U0001034b-\U0001034f\U0001037b-\U0001037f\U0001039e\U000103c4-\U000103c7\U000103d6-\U000103ff\U0001049e-\U0001049f\U000104aa-\U000104af\U000104d4-\U000104d7\U000104fc-\U000104ff\U00010528-\U0001052f\U00010564-\U0001056e\U00010570-\U000105ff\U00010737-\U0001073f\U00010756-\U0001075f\U00010768-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856\U0001089f-\U000108a6\U000108b0-\U000108df\U000108f3\U000108f6-\U000108fa\U0001091c-\U0001091e\U0001093a-\U0001093e\U00010940-\U0001097f\U000109b8-\U000109bb\U000109d0-\U000109d1\U00010a04\U00010a07-\U00010a0b\U00010a14\U00010a18\U00010a36-\U00010a37\U00010a3b-\U00010a3e\U00010a49-\U00010a4f\U00010a59-\U00010a5f\U00010aa0-\U00010abf\U00010ae7-\U00010aea\U00010af7-\U00010aff\U00010b36-\U00010b38\U00010b56-\U00010b57\U00010b73-\U00010b77\U00010b92-\U00010b98\U00010b9d-\U00010ba8\U00010bb0-\U00010bff\U00010c49-\U00010c7f\U00010cb3-\U00010cbf\U00010cf3-\U00010cf9\U00010d28-\U00010d2f\U00010d3a-\U00010e5f\U00010e7f-\U00010eff\U00010f28-\U00010f2f\U00010f5a-\U00010fff\U0001104e-\U00011051\U00011070-\U0001107e\U000110c2-\U000110cc\U000110ce-\U000110cf\U000110e9-\U000110ef\U000110fa-\U000110ff\U00011135\U00011147-\U0001114f\U00011177-\U0001117f\U000111ce-\U000111cf\U000111e0\U000111f5-\U000111ff\U00011212\U0001123f-\U0001127f\U00011287\U00011289\U0001128e\U0001129e\U000112aa-\U000112af\U000112eb-\U000112ef\U000112fa-\U000112ff\U00011304\U0001130d-\U0001130e\U00011311-\U00011312\U00011329\U00011331\U00011334\U0001133a\U00011345-\U00011346\U00011349-\U0001134a\U0001134e-\U0001134f\U00011351-\U00011356\U00011358-\U0001135c\U00011364-\U00011365\U0001136d-\U0001136f\U00011375-\U000113ff\U0001145a\U0001145c\U0001145f-\U0001147f\U000114c8-\U000114cf\U000114da-\U0001157f\U000115b6-\U000115b7\U000115de-\U000115ff\U00011645-\U0001164f\U0001165a-\U0001165f\U0001166d-\U0001167f\U000116b8-\U000116bf\U000116ca-\U000116ff\U0001171b-\U0001171c\U0001172c-\U0001172f\U00011740-\U000117ff\U0001183c-\U0001189f\U000118f3-\U000118fe\U00011900-\U000119ff\U00011a48-\U00011a4f\U00011a84-\U00011a85\U00011aa3-\U00011abf\U00011af9-\U00011bff\U00011c09\U00011c37\U00011c46-\U00011c4f\U00011c6d-\U00011c6f\U00011c90-\U00011c91\U00011ca8\U00011cb7-\U00011cff\U00011d07\U00011d0a\U00011d37-\U00011d39\U00011d3b\U00011d3e\U00011d48-\U00011d4f\U00011d5a-\U00011d5f\U00011d66\U00011d69\U00011d8f\U00011d92\U00011d99-\U00011d9f\U00011daa-\U00011edf\U00011ef9-\U00011fff\U0001239a-\U000123ff\U0001246f\U00012475-\U0001247f\U00012544-\U00012fff\U0001342f-\U000143ff\U00014647-\U000167ff\U00016a39-\U00016a3f\U00016a5f\U00016a6a-\U00016a6d\U00016a70-\U00016acf\U00016aee-\U00016aef\U00016af6-\U00016aff\U00016b46-\U00016b4f\U00016b5a\U00016b62\U00016b78-\U00016b7c\U00016b90-\U00016e3f\U00016e9b-\U00016eff\U00016f45-\U00016f4f\U00016f7f-\U00016f8e\U00016fa0-\U00016fdf\U00016fe2-\U00016fff\U000187f2-\U000187ff\U00018af3-\U0001afff\U0001b11f-\U0001b16f\U0001b2fc-\U0001bbff\U0001bc6b-\U0001bc6f\U0001bc7d-\U0001bc7f\U0001bc89-\U0001bc8f\U0001bc9a-\U0001bc9b\U0001bca4-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d1e9-\U0001d1ff\U0001d246-\U0001d2df\U0001d2f4-\U0001d2ff\U0001d357-\U0001d35f\U0001d379-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d7cc-\U0001d7cd\U0001da8c-\U0001da9a\U0001daa0\U0001dab0-\U0001dfff\U0001e007\U0001e019-\U0001e01a\U0001e022\U0001e025\U0001e02b-\U0001e7ff\U0001e8c5-\U0001e8c6\U0001e8d7-\U0001e8ff\U0001e94b-\U0001e94f\U0001e95a-\U0001e95d\U0001e960-\U0001ec70\U0001ecb5-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001eeef\U0001eef2-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0c0\U0001f0d0\U0001f0f6-\U0001f0ff\U0001f10d-\U0001f10f\U0001f16c-\U0001f16f\U0001f1ad-\U0001f1e5\U0001f203-\U0001f20f\U0001f23c-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f25f\U0001f266-\U0001f2ff\U0001f6d5-\U0001f6df\U0001f6ed-\U0001f6ef\U0001f6fa-\U0001f6ff\U0001f774-\U0001f77f\U0001f7d9-\U0001f7ff\U0001f80c-\U0001f80f\U0001f848-\U0001f84f\U0001f85a-\U0001f85f\U0001f888-\U0001f88f\U0001f8ae-\U0001f8ff\U0001f90c-\U0001f90f\U0001f93f\U0001f971-\U0001f972\U0001f977-\U0001f979\U0001f97b\U0001f9a3-\U0001f9af\U0001f9ba-\U0001f9bf\U0001f9c3-\U0001f9cf\U0001fa00-\U0001fa5f\U0001fa6e-\U0001ffff\U0002a6d7-\U0002a6ff\U0002b735-\U0002b73f\U0002b81e-\U0002b81f\U0002cea2-\U0002ceaf\U0002ebe1-\U0002f7ff\U0002fa1e-\U000e0000\U000e0002-\U000e001f\U000e0080-\U000e00ff\U000e01f0-\U000effff\U000ffffe-\U000fffff\U0010fffe-\U0010ffff'
|
| 19 |
+
|
| 20 |
+
Co = '\ue000-\uf8ff\U000f0000-\U000ffffd\U00100000-\U0010fffd'
|
| 21 |
+
|
| 22 |
+
Cs = '\ud800-\udbff\\\udc00\udc01-\udfff'
|
| 23 |
+
|
| 24 |
+
Ll = 'a-z\xb5\xdf-\xf6\xf8-\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0529\u052b\u052d\u052f\u0560-\u0588\u10d0-\u10fa\u10fd-\u10ff\u13f8-\u13fd\u1c80-\u1c88\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua699\ua69b\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793-\ua795\ua797\ua799\ua79b\ua79d\ua79f\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7af\ua7b5\ua7b7\ua7b9\ua7fa\uab30-\uab5a\uab60-\uab65\uab70-\uabbf\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a\U00010428-\U0001044f\U000104d8-\U000104fb\U00010cc0-\U00010cf2\U000118c0-\U000118df\U00016e60-\U00016e7f\U0001d41a-\U0001d433\U0001d44e-\U0001d454\U0001d456-\U0001d467\U0001d482-\U0001d49b\U0001d4b6-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d4cf\U0001d4ea-\U0001d503\U0001d51e-\U0001d537\U0001d552-\U0001d56b\U0001d586-\U0001d59f\U0001d5ba-\U0001d5d3\U0001d5ee-\U0001d607\U0001d622-\U0001d63b\U0001d656-\U0001d66f\U0001d68a-\U0001d6a5\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6e1\U0001d6fc-\U0001d714\U0001d716-\U0001d71b\U0001d736-\U0001d74e\U0001d750-\U0001d755\U0001d770-\U0001d788\U0001d78a-\U0001d78f\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7c9\U0001d7cb\U0001e922-\U0001e943'
|
| 25 |
+
|
| 26 |
+
Lm = '\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua69c-\ua69d\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\ua9e6\uaa70\uaadd\uaaf3-\uaaf4\uab5c-\uab5f\uff70\uff9e-\uff9f\U00016b40-\U00016b43\U00016f93-\U00016f9f\U00016fe0-\U00016fe1'
|
| 27 |
+
|
| 28 |
+
Lo = '\xaa\xba\u01bb\u01c0-\u01c3\u0294\u05d0-\u05ea\u05ef-\u05f2\u0620-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0800-\u0815\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2-\u0eb3\u0ebd\u0ec0-\u0ec4\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u1100-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16f1-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u1878\u1880-\u1884\u1887-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u2135-\u2138\u2d30-\u2d67\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3006\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua014\ua016-\ua48c\ua4d0-\ua4f7\ua500-\ua60b\ua610-\ua61f\ua62a-\ua62b\ua66e\ua6a0-\ua6e5\ua78f\ua7f7\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9e0-\ua9e4\ua9e7-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa6f\uaa71-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadc\uaae0-\uaaea\uaaf2\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff66-\uff6f\uff71-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U00010340\U00010342-\U00010349\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U00010450-\U0001049d\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016f00-\U00016f44\U00016f50\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001e800-\U0001e8c4\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
|
| 29 |
+
|
| 30 |
+
Lt = '\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc'
|
| 31 |
+
|
| 32 |
+
Lu = 'A-Z\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u037f\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0528\u052a\u052c\u052e\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u13a0-\u13f5\u1c90-\u1cba\u1cbd-\u1cbf\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua698\ua69a\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua796\ua798\ua79a\ua79c\ua79e\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa-\ua7ae\ua7b0-\ua7b4\ua7b6\ua7b8\uff21-\uff3a\U00010400-\U00010427\U000104b0-\U000104d3\U00010c80-\U00010cb2\U000118a0-\U000118bf\U00016e40-\U00016e5f\U0001d400-\U0001d419\U0001d434-\U0001d44d\U0001d468-\U0001d481\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b5\U0001d4d0-\U0001d4e9\U0001d504-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d538-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d56c-\U0001d585\U0001d5a0-\U0001d5b9\U0001d5d4-\U0001d5ed\U0001d608-\U0001d621\U0001d63c-\U0001d655\U0001d670-\U0001d689\U0001d6a8-\U0001d6c0\U0001d6e2-\U0001d6fa\U0001d71c-\U0001d734\U0001d756-\U0001d76e\U0001d790-\U0001d7a8\U0001d7ca\U0001e900-\U0001e921'
|
| 33 |
+
|
| 34 |
+
Mc = '\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u1a19-\u1a1a\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u1cf7\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaa7d\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec\U00011000\U00011002\U00011082\U000110b0-\U000110b2\U000110b7-\U000110b8\U0001112c\U00011145-\U00011146\U00011182\U000111b3-\U000111b5\U000111bf-\U000111c0\U0001122c-\U0001122e\U00011232-\U00011233\U00011235\U000112e0-\U000112e2\U00011302-\U00011303\U0001133e-\U0001133f\U00011341-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011357\U00011362-\U00011363\U00011435-\U00011437\U00011440-\U00011441\U00011445\U000114b0-\U000114b2\U000114b9\U000114bb-\U000114be\U000114c1\U000115af-\U000115b1\U000115b8-\U000115bb\U000115be\U00011630-\U00011632\U0001163b-\U0001163c\U0001163e\U000116ac\U000116ae-\U000116af\U000116b6\U00011720-\U00011721\U00011726\U0001182c-\U0001182e\U00011838\U00011a39\U00011a57-\U00011a58\U00011a97\U00011c2f\U00011c3e\U00011ca9\U00011cb1\U00011cb4\U00011d8a-\U00011d8e\U00011d93-\U00011d94\U00011d96\U00011ef5-\U00011ef6\U00016f51-\U00016f7e\U0001d165-\U0001d166\U0001d16d-\U0001d172'
|
| 35 |
+
|
| 36 |
+
Me = '\u0488-\u0489\u1abe\u20dd-\u20e0\u20e2-\u20e4\ua670-\ua672'
|
| 37 |
+
|
| 38 |
+
Mn = '\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08d3-\u08e1\u08e3-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u09fe\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0afa-\u0aff\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c00\u0c04\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0c81\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d00-\u0d01\u0d3b-\u0d3c\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u1885-\u1886\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a1b\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1ab0-\u1abd\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab-\u1bad\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1cf8-\u1cf9\u1dc0-\u1df9\u1dfb-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69e-\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4-\ua8c5\ua8e0-\ua8f1\ua8ff\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\ua9e5\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaa7c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\U000101fd\U000102e0\U00010376-\U0001037a\U00010a01-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a0f\U00010a38-\U00010a3a\U00010a3f\U00010ae5-\U00010ae6\U00010d24-\U00010d27\U00010f46-\U00010f50\U00011001\U00011038-\U00011046\U0001107f-\U00011081\U000110b3-\U000110b6\U000110b9-\U000110ba\U00011100-\U00011102\U00011127-\U0001112b\U0001112d-\U00011134\U00011173\U00011180-\U00011181\U000111b6-\U000111be\U000111c9-\U000111cc\U0001122f-\U00011231\U00011234\U00011236-\U00011237\U0001123e\U000112df\U000112e3-\U000112ea\U00011300-\U00011301\U0001133b-\U0001133c\U00011340\U00011366-\U0001136c\U00011370-\U00011374\U00011438-\U0001143f\U00011442-\U00011444\U00011446\U0001145e\U000114b3-\U000114b8\U000114ba\U000114bf-\U000114c0\U000114c2-\U000114c3\U000115b2-\U000115b5\U000115bc-\U000115bd\U000115bf-\U000115c0\U000115dc-\U000115dd\U00011633-\U0001163a\U0001163d\U0001163f-\U00011640\U000116ab\U000116ad\U000116b0-\U000116b5\U000116b7\U0001171d-\U0001171f\U00011722-\U00011725\U00011727-\U0001172b\U0001182f-\U00011837\U00011839-\U0001183a\U00011a01-\U00011a0a\U00011a33-\U00011a38\U00011a3b-\U00011a3e\U00011a47\U00011a51-\U00011a56\U00011a59-\U00011a5b\U00011a8a-\U00011a96\U00011a98-\U00011a99\U00011c30-\U00011c36\U00011c38-\U00011c3d\U00011c3f\U00011c92-\U00011ca7\U00011caa-\U00011cb0\U00011cb2-\U00011cb3\U00011cb5-\U00011cb6\U00011d31-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d45\U00011d47\U00011d90-\U00011d91\U00011d95\U00011d97\U00011ef3-\U00011ef4\U00016af0-\U00016af4\U00016b30-\U00016b36\U00016f8f-\U00016f92\U0001bc9d-\U0001bc9e\U0001d167-\U0001d169\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e8d0-\U0001e8d6\U0001e944-\U0001e94a\U000e0100-\U000e01ef'
|
| 39 |
+
|
| 40 |
+
Nd = '0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0de6-\u0def\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\ua9f0-\ua9f9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19\U000104a0-\U000104a9\U00010d30-\U00010d39\U00011066-\U0001106f\U000110f0-\U000110f9\U00011136-\U0001113f\U000111d0-\U000111d9\U000112f0-\U000112f9\U00011450-\U00011459\U000114d0-\U000114d9\U00011650-\U00011659\U000116c0-\U000116c9\U00011730-\U00011739\U000118e0-\U000118e9\U00011c50-\U00011c59\U00011d50-\U00011d59\U00011da0-\U00011da9\U00016a60-\U00016a69\U00016b50-\U00016b59\U0001d7ce-\U0001d7ff\U0001e950-\U0001e959'
|
| 41 |
+
|
| 42 |
+
Nl = '\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef\U00010140-\U00010174\U00010341\U0001034a\U000103d1-\U000103d5\U00012400-\U0001246e'
|
| 43 |
+
|
| 44 |
+
No = '\xb2-\xb3\xb9\xbc-\xbe\u09f4-\u09f9\u0b72-\u0b77\u0bf0-\u0bf2\u0c78-\u0c7e\u0d58-\u0d5e\u0d70-\u0d78\u0f2a-\u0f33\u1369-\u137c\u17f0-\u17f9\u19da\u2070\u2074-\u2079\u2080-\u2089\u2150-\u215f\u2189\u2460-\u249b\u24ea-\u24ff\u2776-\u2793\u2cfd\u3192-\u3195\u3220-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\ua830-\ua835\U00010107-\U00010133\U00010175-\U00010178\U0001018a-\U0001018b\U000102e1-\U000102fb\U00010320-\U00010323\U00010858-\U0001085f\U00010879-\U0001087f\U000108a7-\U000108af\U000108fb-\U000108ff\U00010916-\U0001091b\U000109bc-\U000109bd\U000109c0-\U000109cf\U000109d2-\U000109ff\U00010a40-\U00010a48\U00010a7d-\U00010a7e\U00010a9d-\U00010a9f\U00010aeb-\U00010aef\U00010b58-\U00010b5f\U00010b78-\U00010b7f\U00010ba9-\U00010baf\U00010cfa-\U00010cff\U00010e60-\U00010e7e\U00010f1d-\U00010f26\U00010f51-\U00010f54\U00011052-\U00011065\U000111e1-\U000111f4\U0001173a-\U0001173b\U000118ea-\U000118f2\U00011c5a-\U00011c6c\U00016b5b-\U00016b61\U00016e80-\U00016e96\U0001d2e0-\U0001d2f3\U0001d360-\U0001d378\U0001e8c7-\U0001e8cf\U0001ec71-\U0001ecab\U0001ecad-\U0001ecaf\U0001ecb1-\U0001ecb4\U0001f100-\U0001f10c'
|
| 45 |
+
|
| 46 |
+
Pc = '_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f'
|
| 47 |
+
|
| 48 |
+
Pd = '\\-\u058a\u05be\u1400\u1806\u2010-\u2015\u2e17\u2e1a\u2e3a-\u2e3b\u2e40\u301c\u3030\u30a0\ufe31-\ufe32\ufe58\ufe63\uff0d'
|
| 49 |
+
|
| 50 |
+
Pe = ')\\]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u2309\u230b\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e-\u301f\ufd3e\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63'
|
| 51 |
+
|
| 52 |
+
Pf = '\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21'
|
| 53 |
+
|
| 54 |
+
Pi = '\xab\u2018\u201b-\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20'
|
| 55 |
+
|
| 56 |
+
Po = "!-#%-'*,.-/:-;?-@\\\\\xa1\xa7\xb6-\xb7\xbf\u037e\u0387\u055a-\u055f\u0589\u05c0\u05c3\u05c6\u05f3-\u05f4\u0609-\u060a\u060c-\u060d\u061b\u061e-\u061f\u066a-\u066d\u06d4\u0700-\u070d\u07f7-\u07f9\u0830-\u083e\u085e\u0964-\u0965\u0970\u09fd\u0a76\u0af0\u0c84\u0df4\u0e4f\u0e5a-\u0e5b\u0f04-\u0f12\u0f14\u0f85\u0fd0-\u0fd4\u0fd9-\u0fda\u104a-\u104f\u10fb\u1360-\u1368\u166d-\u166e\u16eb-\u16ed\u1735-\u1736\u17d4-\u17d6\u17d8-\u17da\u1800-\u1805\u1807-\u180a\u1944-\u1945\u1a1e-\u1a1f\u1aa0-\u1aa6\u1aa8-\u1aad\u1b5a-\u1b60\u1bfc-\u1bff\u1c3b-\u1c3f\u1c7e-\u1c7f\u1cc0-\u1cc7\u1cd3\u2016-\u2017\u2020-\u2027\u2030-\u2038\u203b-\u203e\u2041-\u2043\u2047-\u2051\u2053\u2055-\u205e\u2cf9-\u2cfc\u2cfe-\u2cff\u2d70\u2e00-\u2e01\u2e06-\u2e08\u2e0b\u2e0e-\u2e16\u2e18-\u2e19\u2e1b\u2e1e-\u2e1f\u2e2a-\u2e2e\u2e30-\u2e39\u2e3c-\u2e3f\u2e41\u2e43-\u2e4e\u3001-\u3003\u303d\u30fb\ua4fe-\ua4ff\ua60d-\ua60f\ua673\ua67e\ua6f2-\ua6f7\ua874-\ua877\ua8ce-\ua8cf\ua8f8-\ua8fa\ua8fc\ua92e-\ua92f\ua95f\ua9c1-\ua9cd\ua9de-\ua9df\uaa5c-\uaa5f\uaade-\uaadf\uaaf0-\uaaf1\uabeb\ufe10-\ufe16\ufe19\ufe30\ufe45-\ufe46\ufe49-\ufe4c\ufe50-\ufe52\ufe54-\ufe57\ufe5f-\ufe61\ufe68\ufe6a-\ufe6b\uff01-\uff03\uff05-\uff07\uff0a\uff0c\uff0e-\uff0f\uff1a-\uff1b\uff1f-\uff20\uff3c\uff61\uff64-\uff65\U00010100-\U00010102\U0001039f\U000103d0\U0001056f\U00010857\U0001091f\U0001093f\U00010a50-\U00010a58\U00010a7f\U00010af0-\U00010af6\U00010b39-\U00010b3f\U00010b99-\U00010b9c\U00010f55-\U00010f59\U00011047-\U0001104d\U000110bb-\U000110bc\U000110be-\U000110c1\U00011140-\U00011143\U00011174-\U00011175\U000111c5-\U000111c8\U000111cd\U000111db\U000111dd-\U000111df\U00011238-\U0001123d\U000112a9\U0001144b-\U0001144f\U0001145b\U0001145d\U000114c6\U000115c1-\U000115d7\U00011641-\U00011643\U00011660-\U0001166c\U0001173c-\U0001173e\U0001183b\U00011a3f-\U00011a46\U00011a9a-\U00011a9c\U00011a9e-\U00011aa2\U00011c41-\U00011c45\U00011c70-\U00011c71\U00011ef7-\U00011ef8\U00012470-\U00012474\U00016a6e-\U00016a6f\U00016af5\U00016b37-\U00016b3b\U00016b44\U00016e97-\U00016e9a\U0001bc9f\U0001da87-\U0001da8b\U0001e95e-\U0001e95f"
|
| 57 |
+
|
| 58 |
+
Ps = '(\\[{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2308\u230a\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u2e42\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3f\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62'
|
| 59 |
+
|
| 60 |
+
Sc = '$\xa2-\xa5\u058f\u060b\u07fe-\u07ff\u09f2-\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0-\u20bf\ua838\ufdfc\ufe69\uff04\uffe0-\uffe1\uffe5-\uffe6\U0001ecb0'
|
| 61 |
+
|
| 62 |
+
Sk = '\\^`\xa8\xaf\xb4\xb8\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u02ff\u0375\u0384-\u0385\u1fbd\u1fbf-\u1fc1\u1fcd-\u1fcf\u1fdd-\u1fdf\u1fed-\u1fef\u1ffd-\u1ffe\u309b-\u309c\ua700-\ua716\ua720-\ua721\ua789-\ua78a\uab5b\ufbb2-\ufbc1\uff3e\uff40\uffe3\U0001f3fb-\U0001f3ff'
|
| 63 |
+
|
| 64 |
+
Sm = '+<->|~\xac\xb1\xd7\xf7\u03f6\u0606-\u0608\u2044\u2052\u207a-\u207c\u208a-\u208c\u2118\u2140-\u2144\u214b\u2190-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2320-\u2321\u237c\u239b-\u23b3\u23dc-\u23e1\u25b7\u25c1\u25f8-\u25ff\u266f\u27c0-\u27c4\u27c7-\u27e5\u27f0-\u27ff\u2900-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2aff\u2b30-\u2b44\u2b47-\u2b4c\ufb29\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe9-\uffec\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001eef0-\U0001eef1'
|
| 65 |
+
|
| 66 |
+
So = '\xa6\xa9\xae\xb0\u0482\u058d-\u058e\u060e-\u060f\u06de\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0d4f\u0d79\u0f01-\u0f03\u0f13\u0f15-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcc\u0fce-\u0fcf\u0fd5-\u0fd8\u109e-\u109f\u1390-\u1399\u1940\u19de-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2117\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u214a\u214c-\u214d\u214f\u218a-\u218b\u2195-\u2199\u219c-\u219f\u21a1-\u21a2\u21a4-\u21a5\u21a7-\u21ad\u21af-\u21cd\u21d0-\u21d1\u21d3\u21d5-\u21f3\u2300-\u2307\u230c-\u231f\u2322-\u2328\u232b-\u237b\u237d-\u239a\u23b4-\u23db\u23e2-\u2426\u2440-\u244a\u249c-\u24e9\u2500-\u25b6\u25b8-\u25c0\u25c2-\u25f7\u2600-\u266e\u2670-\u2767\u2794-\u27bf\u2800-\u28ff\u2b00-\u2b2f\u2b45-\u2b46\u2b4d-\u2b73\u2b76-\u2b95\u2b98-\u2bc8\u2bca-\u2bfe\u2ce5-\u2cea\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u32fe\u3300-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ua836-\ua837\ua839\uaa77-\uaa79\ufdfd\uffe4\uffe8\uffed-\uffee\ufffc-\ufffd\U00010137-\U0001013f\U00010179-\U00010189\U0001018c-\U0001018e\U00010190-\U0001019b\U000101a0\U000101d0-\U000101fc\U00010877-\U00010878\U00010ac8\U0001173f\U00016b3c-\U00016b3f\U00016b45\U0001bc9c\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d129-\U0001d164\U0001d16a-\U0001d16c\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d1e8\U0001d200-\U0001d241\U0001d245\U0001d300-\U0001d356\U0001d800-\U0001d9ff\U0001da37-\U0001da3a\U0001da6d-\U0001da74\U0001da76-\U0001da83\U0001da85-\U0001da86\U0001ecac\U0001f000-\U0001f02b\U0001f030-\U0001f093\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0bf\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0f5\U0001f110-\U0001f16b\U0001f170-\U0001f1ac\U0001f1e6-\U0001f202\U0001f210-\U0001f23b\U0001f240-\U0001f248\U0001f250-\U0001f251\U0001f260-\U0001f265\U0001f300-\U0001f3fa\U0001f400-\U0001f6d4\U0001f6e0-\U0001f6ec\U0001f6f0-\U0001f6f9\U0001f700-\U0001f773\U0001f780-\U0001f7d8\U0001f800-\U0001f80b\U0001f810-\U0001f847\U0001f850-\U0001f859\U0001f860-\U0001f887\U0001f890-\U0001f8ad\U0001f900-\U0001f90b\U0001f910-\U0001f93e\U0001f940-\U0001f970\U0001f973-\U0001f976\U0001f97a\U0001f97c-\U0001f9a2\U0001f9b0-\U0001f9b9\U0001f9c0-\U0001f9c2\U0001f9d0-\U0001f9ff\U0001fa60-\U0001fa6d'
|
| 67 |
+
|
| 68 |
+
Zl = '\u2028'
|
| 69 |
+
|
| 70 |
+
Zp = '\u2029'
|
| 71 |
+
|
| 72 |
+
Zs = ' \xa0\u1680\u2000-\u200a\u202f\u205f\u3000'
|
| 73 |
+
|
| 74 |
+
xid_continue = '0-9A-Z_a-z\xaa\xb5\xb7\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0300-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u0483-\u0487\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u05d0-\u05ea\u05ef-\u05f2\u0610-\u061a\u0620-\u0669\u066e-\u06d3\u06d5-\u06dc\u06df-\u06e8\u06ea-\u06fc\u06ff\u0710-\u074a\u074d-\u07b1\u07c0-\u07f5\u07fa\u07fd\u0800-\u082d\u0840-\u085b\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u08d3-\u08e1\u08e3-\u0963\u0966-\u096f\u0971-\u0983\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bc-\u09c4\u09c7-\u09c8\u09cb-\u09ce\u09d7\u09dc-\u09dd\u09df-\u09e3\u09e6-\u09f1\u09fc\u09fe\u0a01-\u0a03\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a3c\u0a3e-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a59-\u0a5c\u0a5e\u0a66-\u0a75\u0a81-\u0a83\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abc-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ad0\u0ae0-\u0ae3\u0ae6-\u0aef\u0af9-\u0aff\u0b01-\u0b03\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3c-\u0b44\u0b47-\u0b48\u0b4b-\u0b4d\u0b56-\u0b57\u0b5c-\u0b5d\u0b5f-\u0b63\u0b66-\u0b6f\u0b71\u0b82-\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd0\u0bd7\u0be6-\u0bef\u0c00-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c58-\u0c5a\u0c60-\u0c63\u0c66-\u0c6f\u0c80-\u0c83\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbc-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5-\u0cd6\u0cde\u0ce0-\u0ce3\u0ce6-\u0cef\u0cf1-\u0cf2\u0d00-\u0d03\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d44\u0d46-\u0d48\u0d4a-\u0d4e\u0d54-\u0d57\u0d5f-\u0d63\u0d66-\u0d6f\u0d7a-\u0d7f\u0d82-\u0d83\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2-\u0df3\u0e01-\u0e3a\u0e40-\u0e4e\u0e50-\u0e59\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb9\u0ebb-\u0ebd\u0ec0-\u0ec4\u0ec6\u0ec8-\u0ecd\u0ed0-\u0ed9\u0edc-\u0edf\u0f00\u0f18-\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e-\u0f47\u0f49-\u0f6c\u0f71-\u0f84\u0f86-\u0f97\u0f99-\u0fbc\u0fc6\u1000-\u1049\u1050-\u109d\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u135d-\u135f\u1369-\u1371\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176c\u176e-\u1770\u1772-\u1773\u1780-\u17d3\u17d7\u17dc-\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u1820-\u1878\u1880-\u18aa\u18b0-\u18f5\u1900-\u191e\u1920-\u192b\u1930-\u193b\u1946-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u19d0-\u19da\u1a00-\u1a1b\u1a20-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1aa7\u1ab0-\u1abd\u1b00-\u1b4b\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1bf3\u1c00-\u1c37\u1c40-\u1c49\u1c4d-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1cd0-\u1cd2\u1cd4-\u1cf9\u1d00-\u1df9\u1dfb-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u203f-\u2040\u2054\u2071\u207f\u2090-\u209c\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d7f-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2de0-\u2dff\u3005-\u3007\u3021-\u302f\u3031-\u3035\u3038-\u303c\u3041-\u3096\u3099-\u309a\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua62b\ua640-\ua66f\ua674-\ua67d\ua67f-\ua6f1\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua827\ua840-\ua873\ua880-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f7\ua8fb\ua8fd-\ua92d\ua930-\ua953\ua960-\ua97c\ua980-\ua9c0\ua9cf-\ua9d9\ua9e0-\ua9fe\uaa00-\uaa36\uaa40-\uaa4d\uaa50-\uaa59\uaa60-\uaa76\uaa7a-\uaac2\uaadb-\uaadd\uaae0-\uaaef\uaaf2-\uaaf6\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabea\uabec-\uabed\uabf0-\uabf9\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe00-\ufe0f\ufe20-\ufe2f\ufe33-\ufe34\ufe4d-\ufe4f\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff10-\uff19\uff21-\uff3a\uff3f\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U000101fd\U00010280-\U0001029c\U000102a0-\U000102d0\U000102e0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U0001037a\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104a0-\U000104a9\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a38-\U00010a3a\U00010a3f\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae6\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d27\U00010d30-\U00010d39\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f50\U00011000-\U00011046\U00011066-\U0001106f\U0001107f-\U000110ba\U000110d0-\U000110e8\U000110f0-\U000110f9\U00011100-\U00011134\U00011136-\U0001113f\U00011144-\U00011146\U00011150-\U00011173\U00011176\U00011180-\U000111c4\U000111c9-\U000111cc\U000111d0-\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U00011237\U0001123e\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112ea\U000112f0-\U000112f9\U00011300-\U00011303\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133b-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011350\U00011357\U0001135d-\U00011363\U00011366-\U0001136c\U00011370-\U00011374\U00011400-\U0001144a\U00011450-\U00011459\U0001145e\U00011480-\U000114c5\U000114c7\U000114d0-\U000114d9\U00011580-\U000115b5\U000115b8-\U000115c0\U000115d8-\U000115dd\U00011600-\U00011640\U00011644\U00011650-\U00011659\U00011680-\U000116b7\U000116c0-\U000116c9\U00011700-\U0001171a\U0001171d-\U0001172b\U00011730-\U00011739\U00011800-\U0001183a\U000118a0-\U000118e9\U000118ff\U00011a00-\U00011a3e\U00011a47\U00011a50-\U00011a83\U00011a86-\U00011a99\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c36\U00011c38-\U00011c40\U00011c50-\U00011c59\U00011c72-\U00011c8f\U00011c92-\U00011ca7\U00011ca9-\U00011cb6\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d47\U00011d50-\U00011d59\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d8e\U00011d90-\U00011d91\U00011d93-\U00011d98\U00011da0-\U00011da9\U00011ee0-\U00011ef6\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016a60-\U00016a69\U00016ad0-\U00016aed\U00016af0-\U00016af4\U00016b00-\U00016b36\U00016b40-\U00016b43\U00016b50-\U00016b59\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50-\U00016f7e\U00016f8f-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001bc9d-\U0001bc9e\U0001d165-\U0001d169\U0001d16d-\U0001d172\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001d7ce-\U0001d7ff\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e800-\U0001e8c4\U0001e8d0-\U0001e8d6\U0001e900-\U0001e94a\U0001e950-\U0001e959\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d\U000e0100-\U000e01ef'
|
| 75 |
+
|
| 76 |
+
xid_start = 'A-Z_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06e5-\u06e6\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4-\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e40-\u0e46\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a-\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118a0-\U000118df\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b40-\U00016b43\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50\U00016f93-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001e800-\U0001e8c4\U0001e900-\U0001e943\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
|
| 77 |
+
|
| 78 |
+
cats = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu', 'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps', 'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs']
|
| 79 |
+
|
| 80 |
+
# Generated from unidata 11.0.0
|
| 81 |
+
|
| 82 |
+
def combine(*args):
|
| 83 |
+
return ''.join(globals()[cat] for cat in args)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def allexcept(*args):
|
| 87 |
+
newcats = cats[:]
|
| 88 |
+
for arg in args:
|
| 89 |
+
newcats.remove(arg)
|
| 90 |
+
return ''.join(globals()[cat] for cat in newcats)
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def _handle_runs(char_list): # pragma: no cover
|
| 94 |
+
buf = []
|
| 95 |
+
for c in char_list:
|
| 96 |
+
if len(c) == 1:
|
| 97 |
+
if buf and buf[-1][1] == chr(ord(c)-1):
|
| 98 |
+
buf[-1] = (buf[-1][0], c)
|
| 99 |
+
else:
|
| 100 |
+
buf.append((c, c))
|
| 101 |
+
else:
|
| 102 |
+
buf.append((c, c))
|
| 103 |
+
for a, b in buf:
|
| 104 |
+
if a == b:
|
| 105 |
+
yield a
|
| 106 |
+
else:
|
| 107 |
+
yield f'{a}-{b}'
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
if __name__ == '__main__': # pragma: no cover
|
| 111 |
+
import unicodedata
|
| 112 |
+
|
| 113 |
+
categories = {'xid_start': [], 'xid_continue': []}
|
| 114 |
+
|
| 115 |
+
with open(__file__, encoding='utf-8') as fp:
|
| 116 |
+
content = fp.read()
|
| 117 |
+
|
| 118 |
+
header = content[:content.find('Cc =')]
|
| 119 |
+
footer = content[content.find("def combine("):]
|
| 120 |
+
|
| 121 |
+
for code in range(0x110000):
|
| 122 |
+
c = chr(code)
|
| 123 |
+
cat = unicodedata.category(c)
|
| 124 |
+
if ord(c) == 0xdc00:
|
| 125 |
+
# Hack to avoid combining this combining with the preceding high
|
| 126 |
+
# surrogate, 0xdbff, when doing a repr.
|
| 127 |
+
c = '\\' + c
|
| 128 |
+
elif ord(c) in (0x2d, 0x5b, 0x5c, 0x5d, 0x5e):
|
| 129 |
+
# Escape regex metachars.
|
| 130 |
+
c = '\\' + c
|
| 131 |
+
categories.setdefault(cat, []).append(c)
|
| 132 |
+
# XID_START and XID_CONTINUE are special categories used for matching
|
| 133 |
+
# identifiers in Python 3.
|
| 134 |
+
if c.isidentifier():
|
| 135 |
+
categories['xid_start'].append(c)
|
| 136 |
+
if ('a' + c).isidentifier():
|
| 137 |
+
categories['xid_continue'].append(c)
|
| 138 |
+
|
| 139 |
+
with open(__file__, 'w', encoding='utf-8') as fp:
|
| 140 |
+
fp.write(header)
|
| 141 |
+
|
| 142 |
+
for cat in sorted(categories):
|
| 143 |
+
val = ''.join(_handle_runs(categories[cat]))
|
| 144 |
+
fp.write(f'{cat} = {val!a}\n\n')
|
| 145 |
+
|
| 146 |
+
cats = sorted(categories)
|
| 147 |
+
cats.remove('xid_start')
|
| 148 |
+
cats.remove('xid_continue')
|
| 149 |
+
fp.write(f'cats = {cats!r}\n\n')
|
| 150 |
+
|
| 151 |
+
fp.write(f'# Generated from unidata {unicodedata.unidata_version}\n\n')
|
| 152 |
+
|
| 153 |
+
fp.write(footer)
|
llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-310.pyc
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0ff3d3e6200be60c96f362da98d34bb401818ad2d194e25153493b08c269fd9f
|
| 3 |
+
size 360033
|
llava/lib/python3.10/site-packages/pip/_vendor/tomli/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-License-Identifier: MIT
|
| 2 |
+
# SPDX-FileCopyrightText: 2021 Taneli Hukkinen
|
| 3 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 4 |
+
|
| 5 |
+
__all__ = ("loads", "load", "TOMLDecodeError")
|
| 6 |
+
__version__ = "2.2.1" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT
|
| 7 |
+
|
| 8 |
+
from ._parser import TOMLDecodeError, load, loads
|
llava/lib/python3.10/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (304 Bytes). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-310.pyc
ADDED
|
Binary file (18.6 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-310.pyc
ADDED
|
Binary file (2.97 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-310.pyc
ADDED
|
Binary file (304 Bytes). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/tomli/_parser.py
ADDED
|
@@ -0,0 +1,770 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-License-Identifier: MIT
|
| 2 |
+
# SPDX-FileCopyrightText: 2021 Taneli Hukkinen
|
| 3 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 4 |
+
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
from collections.abc import Iterable
|
| 8 |
+
import string
|
| 9 |
+
import sys
|
| 10 |
+
from types import MappingProxyType
|
| 11 |
+
from typing import IO, Any, Final, NamedTuple
|
| 12 |
+
import warnings
|
| 13 |
+
|
| 14 |
+
from ._re import (
|
| 15 |
+
RE_DATETIME,
|
| 16 |
+
RE_LOCALTIME,
|
| 17 |
+
RE_NUMBER,
|
| 18 |
+
match_to_datetime,
|
| 19 |
+
match_to_localtime,
|
| 20 |
+
match_to_number,
|
| 21 |
+
)
|
| 22 |
+
from ._types import Key, ParseFloat, Pos
|
| 23 |
+
|
| 24 |
+
# Inline tables/arrays are implemented using recursion. Pathologically
|
| 25 |
+
# nested documents cause pure Python to raise RecursionError (which is OK),
|
| 26 |
+
# but mypyc binary wheels will crash unrecoverably (not OK). According to
|
| 27 |
+
# mypyc docs this will be fixed in the future:
|
| 28 |
+
# https://mypyc.readthedocs.io/en/latest/differences_from_python.html#stack-overflows
|
| 29 |
+
# Before mypyc's fix is in, recursion needs to be limited by this library.
|
| 30 |
+
# Choosing `sys.getrecursionlimit()` as maximum inline table/array nesting
|
| 31 |
+
# level, as it allows more nesting than pure Python, but still seems a far
|
| 32 |
+
# lower number than where mypyc binaries crash.
|
| 33 |
+
MAX_INLINE_NESTING: Final = sys.getrecursionlimit()
|
| 34 |
+
|
| 35 |
+
ASCII_CTRL: Final = frozenset(chr(i) for i in range(32)) | frozenset(chr(127))
|
| 36 |
+
|
| 37 |
+
# Neither of these sets include quotation mark or backslash. They are
|
| 38 |
+
# currently handled as separate cases in the parser functions.
|
| 39 |
+
ILLEGAL_BASIC_STR_CHARS: Final = ASCII_CTRL - frozenset("\t")
|
| 40 |
+
ILLEGAL_MULTILINE_BASIC_STR_CHARS: Final = ASCII_CTRL - frozenset("\t\n")
|
| 41 |
+
|
| 42 |
+
ILLEGAL_LITERAL_STR_CHARS: Final = ILLEGAL_BASIC_STR_CHARS
|
| 43 |
+
ILLEGAL_MULTILINE_LITERAL_STR_CHARS: Final = ILLEGAL_MULTILINE_BASIC_STR_CHARS
|
| 44 |
+
|
| 45 |
+
ILLEGAL_COMMENT_CHARS: Final = ILLEGAL_BASIC_STR_CHARS
|
| 46 |
+
|
| 47 |
+
TOML_WS: Final = frozenset(" \t")
|
| 48 |
+
TOML_WS_AND_NEWLINE: Final = TOML_WS | frozenset("\n")
|
| 49 |
+
BARE_KEY_CHARS: Final = frozenset(string.ascii_letters + string.digits + "-_")
|
| 50 |
+
KEY_INITIAL_CHARS: Final = BARE_KEY_CHARS | frozenset("\"'")
|
| 51 |
+
HEXDIGIT_CHARS: Final = frozenset(string.hexdigits)
|
| 52 |
+
|
| 53 |
+
BASIC_STR_ESCAPE_REPLACEMENTS: Final = MappingProxyType(
|
| 54 |
+
{
|
| 55 |
+
"\\b": "\u0008", # backspace
|
| 56 |
+
"\\t": "\u0009", # tab
|
| 57 |
+
"\\n": "\u000A", # linefeed
|
| 58 |
+
"\\f": "\u000C", # form feed
|
| 59 |
+
"\\r": "\u000D", # carriage return
|
| 60 |
+
'\\"': "\u0022", # quote
|
| 61 |
+
"\\\\": "\u005C", # backslash
|
| 62 |
+
}
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class DEPRECATED_DEFAULT:
|
| 67 |
+
"""Sentinel to be used as default arg during deprecation
|
| 68 |
+
period of TOMLDecodeError's free-form arguments."""
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class TOMLDecodeError(ValueError):
|
| 72 |
+
"""An error raised if a document is not valid TOML.
|
| 73 |
+
|
| 74 |
+
Adds the following attributes to ValueError:
|
| 75 |
+
msg: The unformatted error message
|
| 76 |
+
doc: The TOML document being parsed
|
| 77 |
+
pos: The index of doc where parsing failed
|
| 78 |
+
lineno: The line corresponding to pos
|
| 79 |
+
colno: The column corresponding to pos
|
| 80 |
+
"""
|
| 81 |
+
|
| 82 |
+
def __init__(
|
| 83 |
+
self,
|
| 84 |
+
msg: str | type[DEPRECATED_DEFAULT] = DEPRECATED_DEFAULT,
|
| 85 |
+
doc: str | type[DEPRECATED_DEFAULT] = DEPRECATED_DEFAULT,
|
| 86 |
+
pos: Pos | type[DEPRECATED_DEFAULT] = DEPRECATED_DEFAULT,
|
| 87 |
+
*args: Any,
|
| 88 |
+
):
|
| 89 |
+
if (
|
| 90 |
+
args
|
| 91 |
+
or not isinstance(msg, str)
|
| 92 |
+
or not isinstance(doc, str)
|
| 93 |
+
or not isinstance(pos, int)
|
| 94 |
+
):
|
| 95 |
+
warnings.warn(
|
| 96 |
+
"Free-form arguments for TOMLDecodeError are deprecated. "
|
| 97 |
+
"Please set 'msg' (str), 'doc' (str) and 'pos' (int) arguments only.",
|
| 98 |
+
DeprecationWarning,
|
| 99 |
+
stacklevel=2,
|
| 100 |
+
)
|
| 101 |
+
if pos is not DEPRECATED_DEFAULT:
|
| 102 |
+
args = pos, *args
|
| 103 |
+
if doc is not DEPRECATED_DEFAULT:
|
| 104 |
+
args = doc, *args
|
| 105 |
+
if msg is not DEPRECATED_DEFAULT:
|
| 106 |
+
args = msg, *args
|
| 107 |
+
ValueError.__init__(self, *args)
|
| 108 |
+
return
|
| 109 |
+
|
| 110 |
+
lineno = doc.count("\n", 0, pos) + 1
|
| 111 |
+
if lineno == 1:
|
| 112 |
+
colno = pos + 1
|
| 113 |
+
else:
|
| 114 |
+
colno = pos - doc.rindex("\n", 0, pos)
|
| 115 |
+
|
| 116 |
+
if pos >= len(doc):
|
| 117 |
+
coord_repr = "end of document"
|
| 118 |
+
else:
|
| 119 |
+
coord_repr = f"line {lineno}, column {colno}"
|
| 120 |
+
errmsg = f"{msg} (at {coord_repr})"
|
| 121 |
+
ValueError.__init__(self, errmsg)
|
| 122 |
+
|
| 123 |
+
self.msg = msg
|
| 124 |
+
self.doc = doc
|
| 125 |
+
self.pos = pos
|
| 126 |
+
self.lineno = lineno
|
| 127 |
+
self.colno = colno
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def load(__fp: IO[bytes], *, parse_float: ParseFloat = float) -> dict[str, Any]:
|
| 131 |
+
"""Parse TOML from a binary file object."""
|
| 132 |
+
b = __fp.read()
|
| 133 |
+
try:
|
| 134 |
+
s = b.decode()
|
| 135 |
+
except AttributeError:
|
| 136 |
+
raise TypeError(
|
| 137 |
+
"File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`"
|
| 138 |
+
) from None
|
| 139 |
+
return loads(s, parse_float=parse_float)
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def loads(__s: str, *, parse_float: ParseFloat = float) -> dict[str, Any]: # noqa: C901
|
| 143 |
+
"""Parse TOML from a string."""
|
| 144 |
+
|
| 145 |
+
# The spec allows converting "\r\n" to "\n", even in string
|
| 146 |
+
# literals. Let's do so to simplify parsing.
|
| 147 |
+
try:
|
| 148 |
+
src = __s.replace("\r\n", "\n")
|
| 149 |
+
except (AttributeError, TypeError):
|
| 150 |
+
raise TypeError(
|
| 151 |
+
f"Expected str object, not '{type(__s).__qualname__}'"
|
| 152 |
+
) from None
|
| 153 |
+
pos = 0
|
| 154 |
+
out = Output(NestedDict(), Flags())
|
| 155 |
+
header: Key = ()
|
| 156 |
+
parse_float = make_safe_parse_float(parse_float)
|
| 157 |
+
|
| 158 |
+
# Parse one statement at a time
|
| 159 |
+
# (typically means one line in TOML source)
|
| 160 |
+
while True:
|
| 161 |
+
# 1. Skip line leading whitespace
|
| 162 |
+
pos = skip_chars(src, pos, TOML_WS)
|
| 163 |
+
|
| 164 |
+
# 2. Parse rules. Expect one of the following:
|
| 165 |
+
# - end of file
|
| 166 |
+
# - end of line
|
| 167 |
+
# - comment
|
| 168 |
+
# - key/value pair
|
| 169 |
+
# - append dict to list (and move to its namespace)
|
| 170 |
+
# - create dict (and move to its namespace)
|
| 171 |
+
# Skip trailing whitespace when applicable.
|
| 172 |
+
try:
|
| 173 |
+
char = src[pos]
|
| 174 |
+
except IndexError:
|
| 175 |
+
break
|
| 176 |
+
if char == "\n":
|
| 177 |
+
pos += 1
|
| 178 |
+
continue
|
| 179 |
+
if char in KEY_INITIAL_CHARS:
|
| 180 |
+
pos = key_value_rule(src, pos, out, header, parse_float)
|
| 181 |
+
pos = skip_chars(src, pos, TOML_WS)
|
| 182 |
+
elif char == "[":
|
| 183 |
+
try:
|
| 184 |
+
second_char: str | None = src[pos + 1]
|
| 185 |
+
except IndexError:
|
| 186 |
+
second_char = None
|
| 187 |
+
out.flags.finalize_pending()
|
| 188 |
+
if second_char == "[":
|
| 189 |
+
pos, header = create_list_rule(src, pos, out)
|
| 190 |
+
else:
|
| 191 |
+
pos, header = create_dict_rule(src, pos, out)
|
| 192 |
+
pos = skip_chars(src, pos, TOML_WS)
|
| 193 |
+
elif char != "#":
|
| 194 |
+
raise TOMLDecodeError("Invalid statement", src, pos)
|
| 195 |
+
|
| 196 |
+
# 3. Skip comment
|
| 197 |
+
pos = skip_comment(src, pos)
|
| 198 |
+
|
| 199 |
+
# 4. Expect end of line or end of file
|
| 200 |
+
try:
|
| 201 |
+
char = src[pos]
|
| 202 |
+
except IndexError:
|
| 203 |
+
break
|
| 204 |
+
if char != "\n":
|
| 205 |
+
raise TOMLDecodeError(
|
| 206 |
+
"Expected newline or end of document after a statement", src, pos
|
| 207 |
+
)
|
| 208 |
+
pos += 1
|
| 209 |
+
|
| 210 |
+
return out.data.dict
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
class Flags:
|
| 214 |
+
"""Flags that map to parsed keys/namespaces."""
|
| 215 |
+
|
| 216 |
+
# Marks an immutable namespace (inline array or inline table).
|
| 217 |
+
FROZEN: Final = 0
|
| 218 |
+
# Marks a nest that has been explicitly created and can no longer
|
| 219 |
+
# be opened using the "[table]" syntax.
|
| 220 |
+
EXPLICIT_NEST: Final = 1
|
| 221 |
+
|
| 222 |
+
def __init__(self) -> None:
|
| 223 |
+
self._flags: dict[str, dict] = {}
|
| 224 |
+
self._pending_flags: set[tuple[Key, int]] = set()
|
| 225 |
+
|
| 226 |
+
def add_pending(self, key: Key, flag: int) -> None:
|
| 227 |
+
self._pending_flags.add((key, flag))
|
| 228 |
+
|
| 229 |
+
def finalize_pending(self) -> None:
|
| 230 |
+
for key, flag in self._pending_flags:
|
| 231 |
+
self.set(key, flag, recursive=False)
|
| 232 |
+
self._pending_flags.clear()
|
| 233 |
+
|
| 234 |
+
def unset_all(self, key: Key) -> None:
|
| 235 |
+
cont = self._flags
|
| 236 |
+
for k in key[:-1]:
|
| 237 |
+
if k not in cont:
|
| 238 |
+
return
|
| 239 |
+
cont = cont[k]["nested"]
|
| 240 |
+
cont.pop(key[-1], None)
|
| 241 |
+
|
| 242 |
+
def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003
|
| 243 |
+
cont = self._flags
|
| 244 |
+
key_parent, key_stem = key[:-1], key[-1]
|
| 245 |
+
for k in key_parent:
|
| 246 |
+
if k not in cont:
|
| 247 |
+
cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}}
|
| 248 |
+
cont = cont[k]["nested"]
|
| 249 |
+
if key_stem not in cont:
|
| 250 |
+
cont[key_stem] = {"flags": set(), "recursive_flags": set(), "nested": {}}
|
| 251 |
+
cont[key_stem]["recursive_flags" if recursive else "flags"].add(flag)
|
| 252 |
+
|
| 253 |
+
def is_(self, key: Key, flag: int) -> bool:
|
| 254 |
+
if not key:
|
| 255 |
+
return False # document root has no flags
|
| 256 |
+
cont = self._flags
|
| 257 |
+
for k in key[:-1]:
|
| 258 |
+
if k not in cont:
|
| 259 |
+
return False
|
| 260 |
+
inner_cont = cont[k]
|
| 261 |
+
if flag in inner_cont["recursive_flags"]:
|
| 262 |
+
return True
|
| 263 |
+
cont = inner_cont["nested"]
|
| 264 |
+
key_stem = key[-1]
|
| 265 |
+
if key_stem in cont:
|
| 266 |
+
inner_cont = cont[key_stem]
|
| 267 |
+
return flag in inner_cont["flags"] or flag in inner_cont["recursive_flags"]
|
| 268 |
+
return False
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
class NestedDict:
|
| 272 |
+
def __init__(self) -> None:
|
| 273 |
+
# The parsed content of the TOML document
|
| 274 |
+
self.dict: dict[str, Any] = {}
|
| 275 |
+
|
| 276 |
+
def get_or_create_nest(
|
| 277 |
+
self,
|
| 278 |
+
key: Key,
|
| 279 |
+
*,
|
| 280 |
+
access_lists: bool = True,
|
| 281 |
+
) -> dict:
|
| 282 |
+
cont: Any = self.dict
|
| 283 |
+
for k in key:
|
| 284 |
+
if k not in cont:
|
| 285 |
+
cont[k] = {}
|
| 286 |
+
cont = cont[k]
|
| 287 |
+
if access_lists and isinstance(cont, list):
|
| 288 |
+
cont = cont[-1]
|
| 289 |
+
if not isinstance(cont, dict):
|
| 290 |
+
raise KeyError("There is no nest behind this key")
|
| 291 |
+
return cont
|
| 292 |
+
|
| 293 |
+
def append_nest_to_list(self, key: Key) -> None:
|
| 294 |
+
cont = self.get_or_create_nest(key[:-1])
|
| 295 |
+
last_key = key[-1]
|
| 296 |
+
if last_key in cont:
|
| 297 |
+
list_ = cont[last_key]
|
| 298 |
+
if not isinstance(list_, list):
|
| 299 |
+
raise KeyError("An object other than list found behind this key")
|
| 300 |
+
list_.append({})
|
| 301 |
+
else:
|
| 302 |
+
cont[last_key] = [{}]
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
class Output(NamedTuple):
|
| 306 |
+
data: NestedDict
|
| 307 |
+
flags: Flags
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos:
|
| 311 |
+
try:
|
| 312 |
+
while src[pos] in chars:
|
| 313 |
+
pos += 1
|
| 314 |
+
except IndexError:
|
| 315 |
+
pass
|
| 316 |
+
return pos
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
def skip_until(
|
| 320 |
+
src: str,
|
| 321 |
+
pos: Pos,
|
| 322 |
+
expect: str,
|
| 323 |
+
*,
|
| 324 |
+
error_on: frozenset[str],
|
| 325 |
+
error_on_eof: bool,
|
| 326 |
+
) -> Pos:
|
| 327 |
+
try:
|
| 328 |
+
new_pos = src.index(expect, pos)
|
| 329 |
+
except ValueError:
|
| 330 |
+
new_pos = len(src)
|
| 331 |
+
if error_on_eof:
|
| 332 |
+
raise TOMLDecodeError(f"Expected {expect!r}", src, new_pos) from None
|
| 333 |
+
|
| 334 |
+
if not error_on.isdisjoint(src[pos:new_pos]):
|
| 335 |
+
while src[pos] not in error_on:
|
| 336 |
+
pos += 1
|
| 337 |
+
raise TOMLDecodeError(f"Found invalid character {src[pos]!r}", src, pos)
|
| 338 |
+
return new_pos
|
| 339 |
+
|
| 340 |
+
|
| 341 |
+
def skip_comment(src: str, pos: Pos) -> Pos:
|
| 342 |
+
try:
|
| 343 |
+
char: str | None = src[pos]
|
| 344 |
+
except IndexError:
|
| 345 |
+
char = None
|
| 346 |
+
if char == "#":
|
| 347 |
+
return skip_until(
|
| 348 |
+
src, pos + 1, "\n", error_on=ILLEGAL_COMMENT_CHARS, error_on_eof=False
|
| 349 |
+
)
|
| 350 |
+
return pos
|
| 351 |
+
|
| 352 |
+
|
| 353 |
+
def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos:
|
| 354 |
+
while True:
|
| 355 |
+
pos_before_skip = pos
|
| 356 |
+
pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE)
|
| 357 |
+
pos = skip_comment(src, pos)
|
| 358 |
+
if pos == pos_before_skip:
|
| 359 |
+
return pos
|
| 360 |
+
|
| 361 |
+
|
| 362 |
+
def create_dict_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]:
|
| 363 |
+
pos += 1 # Skip "["
|
| 364 |
+
pos = skip_chars(src, pos, TOML_WS)
|
| 365 |
+
pos, key = parse_key(src, pos)
|
| 366 |
+
|
| 367 |
+
if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN):
|
| 368 |
+
raise TOMLDecodeError(f"Cannot declare {key} twice", src, pos)
|
| 369 |
+
out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False)
|
| 370 |
+
try:
|
| 371 |
+
out.data.get_or_create_nest(key)
|
| 372 |
+
except KeyError:
|
| 373 |
+
raise TOMLDecodeError("Cannot overwrite a value", src, pos) from None
|
| 374 |
+
|
| 375 |
+
if not src.startswith("]", pos):
|
| 376 |
+
raise TOMLDecodeError(
|
| 377 |
+
"Expected ']' at the end of a table declaration", src, pos
|
| 378 |
+
)
|
| 379 |
+
return pos + 1, key
|
| 380 |
+
|
| 381 |
+
|
| 382 |
+
def create_list_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]:
|
| 383 |
+
pos += 2 # Skip "[["
|
| 384 |
+
pos = skip_chars(src, pos, TOML_WS)
|
| 385 |
+
pos, key = parse_key(src, pos)
|
| 386 |
+
|
| 387 |
+
if out.flags.is_(key, Flags.FROZEN):
|
| 388 |
+
raise TOMLDecodeError(f"Cannot mutate immutable namespace {key}", src, pos)
|
| 389 |
+
# Free the namespace now that it points to another empty list item...
|
| 390 |
+
out.flags.unset_all(key)
|
| 391 |
+
# ...but this key precisely is still prohibited from table declaration
|
| 392 |
+
out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False)
|
| 393 |
+
try:
|
| 394 |
+
out.data.append_nest_to_list(key)
|
| 395 |
+
except KeyError:
|
| 396 |
+
raise TOMLDecodeError("Cannot overwrite a value", src, pos) from None
|
| 397 |
+
|
| 398 |
+
if not src.startswith("]]", pos):
|
| 399 |
+
raise TOMLDecodeError(
|
| 400 |
+
"Expected ']]' at the end of an array declaration", src, pos
|
| 401 |
+
)
|
| 402 |
+
return pos + 2, key
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
def key_value_rule(
|
| 406 |
+
src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat
|
| 407 |
+
) -> Pos:
|
| 408 |
+
pos, key, value = parse_key_value_pair(src, pos, parse_float, nest_lvl=0)
|
| 409 |
+
key_parent, key_stem = key[:-1], key[-1]
|
| 410 |
+
abs_key_parent = header + key_parent
|
| 411 |
+
|
| 412 |
+
relative_path_cont_keys = (header + key[:i] for i in range(1, len(key)))
|
| 413 |
+
for cont_key in relative_path_cont_keys:
|
| 414 |
+
# Check that dotted key syntax does not redefine an existing table
|
| 415 |
+
if out.flags.is_(cont_key, Flags.EXPLICIT_NEST):
|
| 416 |
+
raise TOMLDecodeError(f"Cannot redefine namespace {cont_key}", src, pos)
|
| 417 |
+
# Containers in the relative path can't be opened with the table syntax or
|
| 418 |
+
# dotted key/value syntax in following table sections.
|
| 419 |
+
out.flags.add_pending(cont_key, Flags.EXPLICIT_NEST)
|
| 420 |
+
|
| 421 |
+
if out.flags.is_(abs_key_parent, Flags.FROZEN):
|
| 422 |
+
raise TOMLDecodeError(
|
| 423 |
+
f"Cannot mutate immutable namespace {abs_key_parent}", src, pos
|
| 424 |
+
)
|
| 425 |
+
|
| 426 |
+
try:
|
| 427 |
+
nest = out.data.get_or_create_nest(abs_key_parent)
|
| 428 |
+
except KeyError:
|
| 429 |
+
raise TOMLDecodeError("Cannot overwrite a value", src, pos) from None
|
| 430 |
+
if key_stem in nest:
|
| 431 |
+
raise TOMLDecodeError("Cannot overwrite a value", src, pos)
|
| 432 |
+
# Mark inline table and array namespaces recursively immutable
|
| 433 |
+
if isinstance(value, (dict, list)):
|
| 434 |
+
out.flags.set(header + key, Flags.FROZEN, recursive=True)
|
| 435 |
+
nest[key_stem] = value
|
| 436 |
+
return pos
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
def parse_key_value_pair(
|
| 440 |
+
src: str, pos: Pos, parse_float: ParseFloat, nest_lvl: int
|
| 441 |
+
) -> tuple[Pos, Key, Any]:
|
| 442 |
+
pos, key = parse_key(src, pos)
|
| 443 |
+
try:
|
| 444 |
+
char: str | None = src[pos]
|
| 445 |
+
except IndexError:
|
| 446 |
+
char = None
|
| 447 |
+
if char != "=":
|
| 448 |
+
raise TOMLDecodeError("Expected '=' after a key in a key/value pair", src, pos)
|
| 449 |
+
pos += 1
|
| 450 |
+
pos = skip_chars(src, pos, TOML_WS)
|
| 451 |
+
pos, value = parse_value(src, pos, parse_float, nest_lvl)
|
| 452 |
+
return pos, key, value
|
| 453 |
+
|
| 454 |
+
|
| 455 |
+
def parse_key(src: str, pos: Pos) -> tuple[Pos, Key]:
|
| 456 |
+
pos, key_part = parse_key_part(src, pos)
|
| 457 |
+
key: Key = (key_part,)
|
| 458 |
+
pos = skip_chars(src, pos, TOML_WS)
|
| 459 |
+
while True:
|
| 460 |
+
try:
|
| 461 |
+
char: str | None = src[pos]
|
| 462 |
+
except IndexError:
|
| 463 |
+
char = None
|
| 464 |
+
if char != ".":
|
| 465 |
+
return pos, key
|
| 466 |
+
pos += 1
|
| 467 |
+
pos = skip_chars(src, pos, TOML_WS)
|
| 468 |
+
pos, key_part = parse_key_part(src, pos)
|
| 469 |
+
key += (key_part,)
|
| 470 |
+
pos = skip_chars(src, pos, TOML_WS)
|
| 471 |
+
|
| 472 |
+
|
| 473 |
+
def parse_key_part(src: str, pos: Pos) -> tuple[Pos, str]:
|
| 474 |
+
try:
|
| 475 |
+
char: str | None = src[pos]
|
| 476 |
+
except IndexError:
|
| 477 |
+
char = None
|
| 478 |
+
if char in BARE_KEY_CHARS:
|
| 479 |
+
start_pos = pos
|
| 480 |
+
pos = skip_chars(src, pos, BARE_KEY_CHARS)
|
| 481 |
+
return pos, src[start_pos:pos]
|
| 482 |
+
if char == "'":
|
| 483 |
+
return parse_literal_str(src, pos)
|
| 484 |
+
if char == '"':
|
| 485 |
+
return parse_one_line_basic_str(src, pos)
|
| 486 |
+
raise TOMLDecodeError("Invalid initial character for a key part", src, pos)
|
| 487 |
+
|
| 488 |
+
|
| 489 |
+
def parse_one_line_basic_str(src: str, pos: Pos) -> tuple[Pos, str]:
|
| 490 |
+
pos += 1
|
| 491 |
+
return parse_basic_str(src, pos, multiline=False)
|
| 492 |
+
|
| 493 |
+
|
| 494 |
+
def parse_array(
|
| 495 |
+
src: str, pos: Pos, parse_float: ParseFloat, nest_lvl: int
|
| 496 |
+
) -> tuple[Pos, list]:
|
| 497 |
+
pos += 1
|
| 498 |
+
array: list = []
|
| 499 |
+
|
| 500 |
+
pos = skip_comments_and_array_ws(src, pos)
|
| 501 |
+
if src.startswith("]", pos):
|
| 502 |
+
return pos + 1, array
|
| 503 |
+
while True:
|
| 504 |
+
pos, val = parse_value(src, pos, parse_float, nest_lvl)
|
| 505 |
+
array.append(val)
|
| 506 |
+
pos = skip_comments_and_array_ws(src, pos)
|
| 507 |
+
|
| 508 |
+
c = src[pos : pos + 1]
|
| 509 |
+
if c == "]":
|
| 510 |
+
return pos + 1, array
|
| 511 |
+
if c != ",":
|
| 512 |
+
raise TOMLDecodeError("Unclosed array", src, pos)
|
| 513 |
+
pos += 1
|
| 514 |
+
|
| 515 |
+
pos = skip_comments_and_array_ws(src, pos)
|
| 516 |
+
if src.startswith("]", pos):
|
| 517 |
+
return pos + 1, array
|
| 518 |
+
|
| 519 |
+
|
| 520 |
+
def parse_inline_table(
|
| 521 |
+
src: str, pos: Pos, parse_float: ParseFloat, nest_lvl: int
|
| 522 |
+
) -> tuple[Pos, dict]:
|
| 523 |
+
pos += 1
|
| 524 |
+
nested_dict = NestedDict()
|
| 525 |
+
flags = Flags()
|
| 526 |
+
|
| 527 |
+
pos = skip_chars(src, pos, TOML_WS)
|
| 528 |
+
if src.startswith("}", pos):
|
| 529 |
+
return pos + 1, nested_dict.dict
|
| 530 |
+
while True:
|
| 531 |
+
pos, key, value = parse_key_value_pair(src, pos, parse_float, nest_lvl)
|
| 532 |
+
key_parent, key_stem = key[:-1], key[-1]
|
| 533 |
+
if flags.is_(key, Flags.FROZEN):
|
| 534 |
+
raise TOMLDecodeError(f"Cannot mutate immutable namespace {key}", src, pos)
|
| 535 |
+
try:
|
| 536 |
+
nest = nested_dict.get_or_create_nest(key_parent, access_lists=False)
|
| 537 |
+
except KeyError:
|
| 538 |
+
raise TOMLDecodeError("Cannot overwrite a value", src, pos) from None
|
| 539 |
+
if key_stem in nest:
|
| 540 |
+
raise TOMLDecodeError(f"Duplicate inline table key {key_stem!r}", src, pos)
|
| 541 |
+
nest[key_stem] = value
|
| 542 |
+
pos = skip_chars(src, pos, TOML_WS)
|
| 543 |
+
c = src[pos : pos + 1]
|
| 544 |
+
if c == "}":
|
| 545 |
+
return pos + 1, nested_dict.dict
|
| 546 |
+
if c != ",":
|
| 547 |
+
raise TOMLDecodeError("Unclosed inline table", src, pos)
|
| 548 |
+
if isinstance(value, (dict, list)):
|
| 549 |
+
flags.set(key, Flags.FROZEN, recursive=True)
|
| 550 |
+
pos += 1
|
| 551 |
+
pos = skip_chars(src, pos, TOML_WS)
|
| 552 |
+
|
| 553 |
+
|
| 554 |
+
def parse_basic_str_escape(
|
| 555 |
+
src: str, pos: Pos, *, multiline: bool = False
|
| 556 |
+
) -> tuple[Pos, str]:
|
| 557 |
+
escape_id = src[pos : pos + 2]
|
| 558 |
+
pos += 2
|
| 559 |
+
if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}:
|
| 560 |
+
# Skip whitespace until next non-whitespace character or end of
|
| 561 |
+
# the doc. Error if non-whitespace is found before newline.
|
| 562 |
+
if escape_id != "\\\n":
|
| 563 |
+
pos = skip_chars(src, pos, TOML_WS)
|
| 564 |
+
try:
|
| 565 |
+
char = src[pos]
|
| 566 |
+
except IndexError:
|
| 567 |
+
return pos, ""
|
| 568 |
+
if char != "\n":
|
| 569 |
+
raise TOMLDecodeError("Unescaped '\\' in a string", src, pos)
|
| 570 |
+
pos += 1
|
| 571 |
+
pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE)
|
| 572 |
+
return pos, ""
|
| 573 |
+
if escape_id == "\\u":
|
| 574 |
+
return parse_hex_char(src, pos, 4)
|
| 575 |
+
if escape_id == "\\U":
|
| 576 |
+
return parse_hex_char(src, pos, 8)
|
| 577 |
+
try:
|
| 578 |
+
return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id]
|
| 579 |
+
except KeyError:
|
| 580 |
+
raise TOMLDecodeError("Unescaped '\\' in a string", src, pos) from None
|
| 581 |
+
|
| 582 |
+
|
| 583 |
+
def parse_basic_str_escape_multiline(src: str, pos: Pos) -> tuple[Pos, str]:
|
| 584 |
+
return parse_basic_str_escape(src, pos, multiline=True)
|
| 585 |
+
|
| 586 |
+
|
| 587 |
+
def parse_hex_char(src: str, pos: Pos, hex_len: int) -> tuple[Pos, str]:
|
| 588 |
+
hex_str = src[pos : pos + hex_len]
|
| 589 |
+
if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str):
|
| 590 |
+
raise TOMLDecodeError("Invalid hex value", src, pos)
|
| 591 |
+
pos += hex_len
|
| 592 |
+
hex_int = int(hex_str, 16)
|
| 593 |
+
if not is_unicode_scalar_value(hex_int):
|
| 594 |
+
raise TOMLDecodeError(
|
| 595 |
+
"Escaped character is not a Unicode scalar value", src, pos
|
| 596 |
+
)
|
| 597 |
+
return pos, chr(hex_int)
|
| 598 |
+
|
| 599 |
+
|
| 600 |
+
def parse_literal_str(src: str, pos: Pos) -> tuple[Pos, str]:
|
| 601 |
+
pos += 1 # Skip starting apostrophe
|
| 602 |
+
start_pos = pos
|
| 603 |
+
pos = skip_until(
|
| 604 |
+
src, pos, "'", error_on=ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True
|
| 605 |
+
)
|
| 606 |
+
return pos + 1, src[start_pos:pos] # Skip ending apostrophe
|
| 607 |
+
|
| 608 |
+
|
| 609 |
+
def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> tuple[Pos, str]:
|
| 610 |
+
pos += 3
|
| 611 |
+
if src.startswith("\n", pos):
|
| 612 |
+
pos += 1
|
| 613 |
+
|
| 614 |
+
if literal:
|
| 615 |
+
delim = "'"
|
| 616 |
+
end_pos = skip_until(
|
| 617 |
+
src,
|
| 618 |
+
pos,
|
| 619 |
+
"'''",
|
| 620 |
+
error_on=ILLEGAL_MULTILINE_LITERAL_STR_CHARS,
|
| 621 |
+
error_on_eof=True,
|
| 622 |
+
)
|
| 623 |
+
result = src[pos:end_pos]
|
| 624 |
+
pos = end_pos + 3
|
| 625 |
+
else:
|
| 626 |
+
delim = '"'
|
| 627 |
+
pos, result = parse_basic_str(src, pos, multiline=True)
|
| 628 |
+
|
| 629 |
+
# Add at maximum two extra apostrophes/quotes if the end sequence
|
| 630 |
+
# is 4 or 5 chars long instead of just 3.
|
| 631 |
+
if not src.startswith(delim, pos):
|
| 632 |
+
return pos, result
|
| 633 |
+
pos += 1
|
| 634 |
+
if not src.startswith(delim, pos):
|
| 635 |
+
return pos, result + delim
|
| 636 |
+
pos += 1
|
| 637 |
+
return pos, result + (delim * 2)
|
| 638 |
+
|
| 639 |
+
|
| 640 |
+
def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> tuple[Pos, str]:
|
| 641 |
+
if multiline:
|
| 642 |
+
error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS
|
| 643 |
+
parse_escapes = parse_basic_str_escape_multiline
|
| 644 |
+
else:
|
| 645 |
+
error_on = ILLEGAL_BASIC_STR_CHARS
|
| 646 |
+
parse_escapes = parse_basic_str_escape
|
| 647 |
+
result = ""
|
| 648 |
+
start_pos = pos
|
| 649 |
+
while True:
|
| 650 |
+
try:
|
| 651 |
+
char = src[pos]
|
| 652 |
+
except IndexError:
|
| 653 |
+
raise TOMLDecodeError("Unterminated string", src, pos) from None
|
| 654 |
+
if char == '"':
|
| 655 |
+
if not multiline:
|
| 656 |
+
return pos + 1, result + src[start_pos:pos]
|
| 657 |
+
if src.startswith('"""', pos):
|
| 658 |
+
return pos + 3, result + src[start_pos:pos]
|
| 659 |
+
pos += 1
|
| 660 |
+
continue
|
| 661 |
+
if char == "\\":
|
| 662 |
+
result += src[start_pos:pos]
|
| 663 |
+
pos, parsed_escape = parse_escapes(src, pos)
|
| 664 |
+
result += parsed_escape
|
| 665 |
+
start_pos = pos
|
| 666 |
+
continue
|
| 667 |
+
if char in error_on:
|
| 668 |
+
raise TOMLDecodeError(f"Illegal character {char!r}", src, pos)
|
| 669 |
+
pos += 1
|
| 670 |
+
|
| 671 |
+
|
| 672 |
+
def parse_value( # noqa: C901
|
| 673 |
+
src: str, pos: Pos, parse_float: ParseFloat, nest_lvl: int
|
| 674 |
+
) -> tuple[Pos, Any]:
|
| 675 |
+
if nest_lvl > MAX_INLINE_NESTING:
|
| 676 |
+
# Pure Python should have raised RecursionError already.
|
| 677 |
+
# This ensures mypyc binaries eventually do the same.
|
| 678 |
+
raise RecursionError( # pragma: no cover
|
| 679 |
+
"TOML inline arrays/tables are nested more than the allowed"
|
| 680 |
+
f" {MAX_INLINE_NESTING} levels"
|
| 681 |
+
)
|
| 682 |
+
|
| 683 |
+
try:
|
| 684 |
+
char: str | None = src[pos]
|
| 685 |
+
except IndexError:
|
| 686 |
+
char = None
|
| 687 |
+
|
| 688 |
+
# IMPORTANT: order conditions based on speed of checking and likelihood
|
| 689 |
+
|
| 690 |
+
# Basic strings
|
| 691 |
+
if char == '"':
|
| 692 |
+
if src.startswith('"""', pos):
|
| 693 |
+
return parse_multiline_str(src, pos, literal=False)
|
| 694 |
+
return parse_one_line_basic_str(src, pos)
|
| 695 |
+
|
| 696 |
+
# Literal strings
|
| 697 |
+
if char == "'":
|
| 698 |
+
if src.startswith("'''", pos):
|
| 699 |
+
return parse_multiline_str(src, pos, literal=True)
|
| 700 |
+
return parse_literal_str(src, pos)
|
| 701 |
+
|
| 702 |
+
# Booleans
|
| 703 |
+
if char == "t":
|
| 704 |
+
if src.startswith("true", pos):
|
| 705 |
+
return pos + 4, True
|
| 706 |
+
if char == "f":
|
| 707 |
+
if src.startswith("false", pos):
|
| 708 |
+
return pos + 5, False
|
| 709 |
+
|
| 710 |
+
# Arrays
|
| 711 |
+
if char == "[":
|
| 712 |
+
return parse_array(src, pos, parse_float, nest_lvl + 1)
|
| 713 |
+
|
| 714 |
+
# Inline tables
|
| 715 |
+
if char == "{":
|
| 716 |
+
return parse_inline_table(src, pos, parse_float, nest_lvl + 1)
|
| 717 |
+
|
| 718 |
+
# Dates and times
|
| 719 |
+
datetime_match = RE_DATETIME.match(src, pos)
|
| 720 |
+
if datetime_match:
|
| 721 |
+
try:
|
| 722 |
+
datetime_obj = match_to_datetime(datetime_match)
|
| 723 |
+
except ValueError as e:
|
| 724 |
+
raise TOMLDecodeError("Invalid date or datetime", src, pos) from e
|
| 725 |
+
return datetime_match.end(), datetime_obj
|
| 726 |
+
localtime_match = RE_LOCALTIME.match(src, pos)
|
| 727 |
+
if localtime_match:
|
| 728 |
+
return localtime_match.end(), match_to_localtime(localtime_match)
|
| 729 |
+
|
| 730 |
+
# Integers and "normal" floats.
|
| 731 |
+
# The regex will greedily match any type starting with a decimal
|
| 732 |
+
# char, so needs to be located after handling of dates and times.
|
| 733 |
+
number_match = RE_NUMBER.match(src, pos)
|
| 734 |
+
if number_match:
|
| 735 |
+
return number_match.end(), match_to_number(number_match, parse_float)
|
| 736 |
+
|
| 737 |
+
# Special floats
|
| 738 |
+
first_three = src[pos : pos + 3]
|
| 739 |
+
if first_three in {"inf", "nan"}:
|
| 740 |
+
return pos + 3, parse_float(first_three)
|
| 741 |
+
first_four = src[pos : pos + 4]
|
| 742 |
+
if first_four in {"-inf", "+inf", "-nan", "+nan"}:
|
| 743 |
+
return pos + 4, parse_float(first_four)
|
| 744 |
+
|
| 745 |
+
raise TOMLDecodeError("Invalid value", src, pos)
|
| 746 |
+
|
| 747 |
+
|
| 748 |
+
def is_unicode_scalar_value(codepoint: int) -> bool:
|
| 749 |
+
return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111)
|
| 750 |
+
|
| 751 |
+
|
| 752 |
+
def make_safe_parse_float(parse_float: ParseFloat) -> ParseFloat:
|
| 753 |
+
"""A decorator to make `parse_float` safe.
|
| 754 |
+
|
| 755 |
+
`parse_float` must not return dicts or lists, because these types
|
| 756 |
+
would be mixed with parsed TOML tables and arrays, thus confusing
|
| 757 |
+
the parser. The returned decorated callable raises `ValueError`
|
| 758 |
+
instead of returning illegal types.
|
| 759 |
+
"""
|
| 760 |
+
# The default `float` callable never returns illegal types. Optimize it.
|
| 761 |
+
if parse_float is float:
|
| 762 |
+
return float
|
| 763 |
+
|
| 764 |
+
def safe_parse_float(float_str: str) -> Any:
|
| 765 |
+
float_value = parse_float(float_str)
|
| 766 |
+
if isinstance(float_value, (dict, list)):
|
| 767 |
+
raise ValueError("parse_float must not return dicts or lists")
|
| 768 |
+
return float_value
|
| 769 |
+
|
| 770 |
+
return safe_parse_float
|
llava/lib/python3.10/site-packages/pip/_vendor/tomli/_re.py
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-License-Identifier: MIT
|
| 2 |
+
# SPDX-FileCopyrightText: 2021 Taneli Hukkinen
|
| 3 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 4 |
+
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
from datetime import date, datetime, time, timedelta, timezone, tzinfo
|
| 8 |
+
from functools import lru_cache
|
| 9 |
+
import re
|
| 10 |
+
from typing import Any, Final
|
| 11 |
+
|
| 12 |
+
from ._types import ParseFloat
|
| 13 |
+
|
| 14 |
+
# E.g.
|
| 15 |
+
# - 00:32:00.999999
|
| 16 |
+
# - 00:32:00
|
| 17 |
+
_TIME_RE_STR: Final = (
|
| 18 |
+
r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?"
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
RE_NUMBER: Final = re.compile(
|
| 22 |
+
r"""
|
| 23 |
+
0
|
| 24 |
+
(?:
|
| 25 |
+
x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex
|
| 26 |
+
|
|
| 27 |
+
b[01](?:_?[01])* # bin
|
| 28 |
+
|
|
| 29 |
+
o[0-7](?:_?[0-7])* # oct
|
| 30 |
+
)
|
| 31 |
+
|
|
| 32 |
+
[+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part
|
| 33 |
+
(?P<floatpart>
|
| 34 |
+
(?:\.[0-9](?:_?[0-9])*)? # optional fractional part
|
| 35 |
+
(?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part
|
| 36 |
+
)
|
| 37 |
+
""",
|
| 38 |
+
flags=re.VERBOSE,
|
| 39 |
+
)
|
| 40 |
+
RE_LOCALTIME: Final = re.compile(_TIME_RE_STR)
|
| 41 |
+
RE_DATETIME: Final = re.compile(
|
| 42 |
+
rf"""
|
| 43 |
+
([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27
|
| 44 |
+
(?:
|
| 45 |
+
[Tt ]
|
| 46 |
+
{_TIME_RE_STR}
|
| 47 |
+
(?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset
|
| 48 |
+
)?
|
| 49 |
+
""",
|
| 50 |
+
flags=re.VERBOSE,
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def match_to_datetime(match: re.Match) -> datetime | date:
|
| 55 |
+
"""Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`.
|
| 56 |
+
|
| 57 |
+
Raises ValueError if the match does not correspond to a valid date
|
| 58 |
+
or datetime.
|
| 59 |
+
"""
|
| 60 |
+
(
|
| 61 |
+
year_str,
|
| 62 |
+
month_str,
|
| 63 |
+
day_str,
|
| 64 |
+
hour_str,
|
| 65 |
+
minute_str,
|
| 66 |
+
sec_str,
|
| 67 |
+
micros_str,
|
| 68 |
+
zulu_time,
|
| 69 |
+
offset_sign_str,
|
| 70 |
+
offset_hour_str,
|
| 71 |
+
offset_minute_str,
|
| 72 |
+
) = match.groups()
|
| 73 |
+
year, month, day = int(year_str), int(month_str), int(day_str)
|
| 74 |
+
if hour_str is None:
|
| 75 |
+
return date(year, month, day)
|
| 76 |
+
hour, minute, sec = int(hour_str), int(minute_str), int(sec_str)
|
| 77 |
+
micros = int(micros_str.ljust(6, "0")) if micros_str else 0
|
| 78 |
+
if offset_sign_str:
|
| 79 |
+
tz: tzinfo | None = cached_tz(
|
| 80 |
+
offset_hour_str, offset_minute_str, offset_sign_str
|
| 81 |
+
)
|
| 82 |
+
elif zulu_time:
|
| 83 |
+
tz = timezone.utc
|
| 84 |
+
else: # local date-time
|
| 85 |
+
tz = None
|
| 86 |
+
return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
# No need to limit cache size. This is only ever called on input
|
| 90 |
+
# that matched RE_DATETIME, so there is an implicit bound of
|
| 91 |
+
# 24 (hours) * 60 (minutes) * 2 (offset direction) = 2880.
|
| 92 |
+
@lru_cache(maxsize=None)
|
| 93 |
+
def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone:
|
| 94 |
+
sign = 1 if sign_str == "+" else -1
|
| 95 |
+
return timezone(
|
| 96 |
+
timedelta(
|
| 97 |
+
hours=sign * int(hour_str),
|
| 98 |
+
minutes=sign * int(minute_str),
|
| 99 |
+
)
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def match_to_localtime(match: re.Match) -> time:
|
| 104 |
+
hour_str, minute_str, sec_str, micros_str = match.groups()
|
| 105 |
+
micros = int(micros_str.ljust(6, "0")) if micros_str else 0
|
| 106 |
+
return time(int(hour_str), int(minute_str), int(sec_str), micros)
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def match_to_number(match: re.Match, parse_float: ParseFloat) -> Any:
|
| 110 |
+
if match.group("floatpart"):
|
| 111 |
+
return parse_float(match.group())
|
| 112 |
+
return int(match.group(), 0)
|
llava/lib/python3.10/site-packages/pip/_vendor/tomli/_types.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-License-Identifier: MIT
|
| 2 |
+
# SPDX-FileCopyrightText: 2021 Taneli Hukkinen
|
| 3 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 4 |
+
|
| 5 |
+
from typing import Any, Callable, Tuple
|
| 6 |
+
|
| 7 |
+
# Type annotations
|
| 8 |
+
ParseFloat = Callable[[str], Any]
|
| 9 |
+
Key = Tuple[str, ...]
|
| 10 |
+
Pos = int
|
llava/lib/python3.10/site-packages/pip/_vendor/tomli/py.typed
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# Marker file for PEP 561
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/Numbers.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ===================================================================
|
| 2 |
+
#
|
| 3 |
+
# Copyright (c) 2014, Legrandin <helderijs@gmail.com>
|
| 4 |
+
# All rights reserved.
|
| 5 |
+
#
|
| 6 |
+
# Redistribution and use in source and binary forms, with or without
|
| 7 |
+
# modification, are permitted provided that the following conditions
|
| 8 |
+
# are met:
|
| 9 |
+
#
|
| 10 |
+
# 1. Redistributions of source code must retain the above copyright
|
| 11 |
+
# notice, this list of conditions and the following disclaimer.
|
| 12 |
+
# 2. Redistributions in binary form must reproduce the above copyright
|
| 13 |
+
# notice, this list of conditions and the following disclaimer in
|
| 14 |
+
# the documentation and/or other materials provided with the
|
| 15 |
+
# distribution.
|
| 16 |
+
#
|
| 17 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 18 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 19 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
| 20 |
+
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
| 21 |
+
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
| 22 |
+
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
| 23 |
+
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
| 24 |
+
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
| 25 |
+
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
| 26 |
+
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
| 27 |
+
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 28 |
+
# POSSIBILITY OF SUCH DAMAGE.
|
| 29 |
+
# ===================================================================
|
| 30 |
+
|
| 31 |
+
__all__ = ["Integer"]
|
| 32 |
+
|
| 33 |
+
import os
|
| 34 |
+
|
| 35 |
+
try:
|
| 36 |
+
if os.getenv("PYCRYPTODOME_DISABLE_GMP"):
|
| 37 |
+
raise ImportError()
|
| 38 |
+
|
| 39 |
+
from Crypto.Math._IntegerGMP import IntegerGMP as Integer
|
| 40 |
+
from Crypto.Math._IntegerGMP import implementation as _implementation
|
| 41 |
+
except (ImportError, OSError, AttributeError):
|
| 42 |
+
try:
|
| 43 |
+
from Crypto.Math._IntegerCustom import IntegerCustom as Integer
|
| 44 |
+
from Crypto.Math._IntegerCustom import implementation as _implementation
|
| 45 |
+
except (ImportError, OSError):
|
| 46 |
+
from Crypto.Math._IntegerNative import IntegerNative as Integer
|
| 47 |
+
_implementation = {}
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/Numbers.pyi
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from Crypto.Math._IntegerBase import IntegerBase as Integer
|
| 2 |
+
__all__ = ['Integer']
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/Primality.py
ADDED
|
@@ -0,0 +1,369 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ===================================================================
|
| 2 |
+
#
|
| 3 |
+
# Copyright (c) 2014, Legrandin <helderijs@gmail.com>
|
| 4 |
+
# All rights reserved.
|
| 5 |
+
#
|
| 6 |
+
# Redistribution and use in source and binary forms, with or without
|
| 7 |
+
# modification, are permitted provided that the following conditions
|
| 8 |
+
# are met:
|
| 9 |
+
#
|
| 10 |
+
# 1. Redistributions of source code must retain the above copyright
|
| 11 |
+
# notice, this list of conditions and the following disclaimer.
|
| 12 |
+
# 2. Redistributions in binary form must reproduce the above copyright
|
| 13 |
+
# notice, this list of conditions and the following disclaimer in
|
| 14 |
+
# the documentation and/or other materials provided with the
|
| 15 |
+
# distribution.
|
| 16 |
+
#
|
| 17 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 18 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 19 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
| 20 |
+
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
| 21 |
+
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
| 22 |
+
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
| 23 |
+
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
| 24 |
+
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
| 25 |
+
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
| 26 |
+
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
| 27 |
+
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 28 |
+
# POSSIBILITY OF SUCH DAMAGE.
|
| 29 |
+
# ===================================================================
|
| 30 |
+
|
| 31 |
+
"""Functions to create and test prime numbers.
|
| 32 |
+
|
| 33 |
+
:undocumented: __package__
|
| 34 |
+
"""
|
| 35 |
+
|
| 36 |
+
from Crypto import Random
|
| 37 |
+
from Crypto.Math.Numbers import Integer
|
| 38 |
+
|
| 39 |
+
from Crypto.Util.py3compat import iter_range
|
| 40 |
+
|
| 41 |
+
COMPOSITE = 0
|
| 42 |
+
PROBABLY_PRIME = 1
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def miller_rabin_test(candidate, iterations, randfunc=None):
|
| 46 |
+
"""Perform a Miller-Rabin primality test on an integer.
|
| 47 |
+
|
| 48 |
+
The test is specified in Section C.3.1 of `FIPS PUB 186-4`__.
|
| 49 |
+
|
| 50 |
+
:Parameters:
|
| 51 |
+
candidate : integer
|
| 52 |
+
The number to test for primality.
|
| 53 |
+
iterations : integer
|
| 54 |
+
The maximum number of iterations to perform before
|
| 55 |
+
declaring a candidate a probable prime.
|
| 56 |
+
randfunc : callable
|
| 57 |
+
An RNG function where bases are taken from.
|
| 58 |
+
|
| 59 |
+
:Returns:
|
| 60 |
+
``Primality.COMPOSITE`` or ``Primality.PROBABLY_PRIME``.
|
| 61 |
+
|
| 62 |
+
.. __: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf
|
| 63 |
+
"""
|
| 64 |
+
|
| 65 |
+
if not isinstance(candidate, Integer):
|
| 66 |
+
candidate = Integer(candidate)
|
| 67 |
+
|
| 68 |
+
if candidate in (1, 2, 3, 5):
|
| 69 |
+
return PROBABLY_PRIME
|
| 70 |
+
|
| 71 |
+
if candidate.is_even():
|
| 72 |
+
return COMPOSITE
|
| 73 |
+
|
| 74 |
+
one = Integer(1)
|
| 75 |
+
minus_one = Integer(candidate - 1)
|
| 76 |
+
|
| 77 |
+
if randfunc is None:
|
| 78 |
+
randfunc = Random.new().read
|
| 79 |
+
|
| 80 |
+
# Step 1 and 2
|
| 81 |
+
m = Integer(minus_one)
|
| 82 |
+
a = 0
|
| 83 |
+
while m.is_even():
|
| 84 |
+
m >>= 1
|
| 85 |
+
a += 1
|
| 86 |
+
|
| 87 |
+
# Skip step 3
|
| 88 |
+
|
| 89 |
+
# Step 4
|
| 90 |
+
for i in iter_range(iterations):
|
| 91 |
+
|
| 92 |
+
# Step 4.1-2
|
| 93 |
+
base = 1
|
| 94 |
+
while base in (one, minus_one):
|
| 95 |
+
base = Integer.random_range(min_inclusive=2,
|
| 96 |
+
max_inclusive=candidate - 2,
|
| 97 |
+
randfunc=randfunc)
|
| 98 |
+
assert(2 <= base <= candidate - 2)
|
| 99 |
+
|
| 100 |
+
# Step 4.3-4.4
|
| 101 |
+
z = pow(base, m, candidate)
|
| 102 |
+
if z in (one, minus_one):
|
| 103 |
+
continue
|
| 104 |
+
|
| 105 |
+
# Step 4.5
|
| 106 |
+
for j in iter_range(1, a):
|
| 107 |
+
z = pow(z, 2, candidate)
|
| 108 |
+
if z == minus_one:
|
| 109 |
+
break
|
| 110 |
+
if z == one:
|
| 111 |
+
return COMPOSITE
|
| 112 |
+
else:
|
| 113 |
+
return COMPOSITE
|
| 114 |
+
|
| 115 |
+
# Step 5
|
| 116 |
+
return PROBABLY_PRIME
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def lucas_test(candidate):
|
| 120 |
+
"""Perform a Lucas primality test on an integer.
|
| 121 |
+
|
| 122 |
+
The test is specified in Section C.3.3 of `FIPS PUB 186-4`__.
|
| 123 |
+
|
| 124 |
+
:Parameters:
|
| 125 |
+
candidate : integer
|
| 126 |
+
The number to test for primality.
|
| 127 |
+
|
| 128 |
+
:Returns:
|
| 129 |
+
``Primality.COMPOSITE`` or ``Primality.PROBABLY_PRIME``.
|
| 130 |
+
|
| 131 |
+
.. __: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf
|
| 132 |
+
"""
|
| 133 |
+
|
| 134 |
+
if not isinstance(candidate, Integer):
|
| 135 |
+
candidate = Integer(candidate)
|
| 136 |
+
|
| 137 |
+
# Step 1
|
| 138 |
+
if candidate in (1, 2, 3, 5):
|
| 139 |
+
return PROBABLY_PRIME
|
| 140 |
+
if candidate.is_even() or candidate.is_perfect_square():
|
| 141 |
+
return COMPOSITE
|
| 142 |
+
|
| 143 |
+
# Step 2
|
| 144 |
+
def alternate():
|
| 145 |
+
value = 5
|
| 146 |
+
while True:
|
| 147 |
+
yield value
|
| 148 |
+
if value > 0:
|
| 149 |
+
value += 2
|
| 150 |
+
else:
|
| 151 |
+
value -= 2
|
| 152 |
+
value = -value
|
| 153 |
+
|
| 154 |
+
for D in alternate():
|
| 155 |
+
if candidate in (D, -D):
|
| 156 |
+
continue
|
| 157 |
+
js = Integer.jacobi_symbol(D, candidate)
|
| 158 |
+
if js == 0:
|
| 159 |
+
return COMPOSITE
|
| 160 |
+
if js == -1:
|
| 161 |
+
break
|
| 162 |
+
# Found D. P=1 and Q=(1-D)/4 (note that Q is guaranteed to be an integer)
|
| 163 |
+
|
| 164 |
+
# Step 3
|
| 165 |
+
# This is \delta(n) = n - jacobi(D/n)
|
| 166 |
+
K = candidate + 1
|
| 167 |
+
# Step 4
|
| 168 |
+
r = K.size_in_bits() - 1
|
| 169 |
+
# Step 5
|
| 170 |
+
# U_1=1 and V_1=P
|
| 171 |
+
U_i = Integer(1)
|
| 172 |
+
V_i = Integer(1)
|
| 173 |
+
U_temp = Integer(0)
|
| 174 |
+
V_temp = Integer(0)
|
| 175 |
+
# Step 6
|
| 176 |
+
for i in iter_range(r - 1, -1, -1):
|
| 177 |
+
# Square
|
| 178 |
+
# U_temp = U_i * V_i % candidate
|
| 179 |
+
U_temp.set(U_i)
|
| 180 |
+
U_temp *= V_i
|
| 181 |
+
U_temp %= candidate
|
| 182 |
+
# V_temp = (((V_i ** 2 + (U_i ** 2 * D)) * K) >> 1) % candidate
|
| 183 |
+
V_temp.set(U_i)
|
| 184 |
+
V_temp *= U_i
|
| 185 |
+
V_temp *= D
|
| 186 |
+
V_temp.multiply_accumulate(V_i, V_i)
|
| 187 |
+
if V_temp.is_odd():
|
| 188 |
+
V_temp += candidate
|
| 189 |
+
V_temp >>= 1
|
| 190 |
+
V_temp %= candidate
|
| 191 |
+
# Multiply
|
| 192 |
+
if K.get_bit(i):
|
| 193 |
+
# U_i = (((U_temp + V_temp) * K) >> 1) % candidate
|
| 194 |
+
U_i.set(U_temp)
|
| 195 |
+
U_i += V_temp
|
| 196 |
+
if U_i.is_odd():
|
| 197 |
+
U_i += candidate
|
| 198 |
+
U_i >>= 1
|
| 199 |
+
U_i %= candidate
|
| 200 |
+
# V_i = (((V_temp + U_temp * D) * K) >> 1) % candidate
|
| 201 |
+
V_i.set(V_temp)
|
| 202 |
+
V_i.multiply_accumulate(U_temp, D)
|
| 203 |
+
if V_i.is_odd():
|
| 204 |
+
V_i += candidate
|
| 205 |
+
V_i >>= 1
|
| 206 |
+
V_i %= candidate
|
| 207 |
+
else:
|
| 208 |
+
U_i.set(U_temp)
|
| 209 |
+
V_i.set(V_temp)
|
| 210 |
+
# Step 7
|
| 211 |
+
if U_i == 0:
|
| 212 |
+
return PROBABLY_PRIME
|
| 213 |
+
return COMPOSITE
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
from Crypto.Util.number import sieve_base as _sieve_base_large
|
| 217 |
+
## The optimal number of small primes to use for the sieve
|
| 218 |
+
## is probably dependent on the platform and the candidate size
|
| 219 |
+
_sieve_base = set(_sieve_base_large[:100])
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def test_probable_prime(candidate, randfunc=None):
|
| 223 |
+
"""Test if a number is prime.
|
| 224 |
+
|
| 225 |
+
A number is qualified as prime if it passes a certain
|
| 226 |
+
number of Miller-Rabin tests (dependent on the size
|
| 227 |
+
of the number, but such that probability of a false
|
| 228 |
+
positive is less than 10^-30) and a single Lucas test.
|
| 229 |
+
|
| 230 |
+
For instance, a 1024-bit candidate will need to pass
|
| 231 |
+
4 Miller-Rabin tests.
|
| 232 |
+
|
| 233 |
+
:Parameters:
|
| 234 |
+
candidate : integer
|
| 235 |
+
The number to test for primality.
|
| 236 |
+
randfunc : callable
|
| 237 |
+
The routine to draw random bytes from to select Miller-Rabin bases.
|
| 238 |
+
:Returns:
|
| 239 |
+
``PROBABLE_PRIME`` if the number if prime with very high probability.
|
| 240 |
+
``COMPOSITE`` if the number is a composite.
|
| 241 |
+
For efficiency reasons, ``COMPOSITE`` is also returned for small primes.
|
| 242 |
+
"""
|
| 243 |
+
|
| 244 |
+
if randfunc is None:
|
| 245 |
+
randfunc = Random.new().read
|
| 246 |
+
|
| 247 |
+
if not isinstance(candidate, Integer):
|
| 248 |
+
candidate = Integer(candidate)
|
| 249 |
+
|
| 250 |
+
# First, check trial division by the smallest primes
|
| 251 |
+
if int(candidate) in _sieve_base:
|
| 252 |
+
return PROBABLY_PRIME
|
| 253 |
+
try:
|
| 254 |
+
map(candidate.fail_if_divisible_by, _sieve_base)
|
| 255 |
+
except ValueError:
|
| 256 |
+
return COMPOSITE
|
| 257 |
+
|
| 258 |
+
# These are the number of Miller-Rabin iterations s.t. p(k, t) < 1E-30,
|
| 259 |
+
# with p(k, t) being the probability that a randomly chosen k-bit number
|
| 260 |
+
# is composite but still survives t MR iterations.
|
| 261 |
+
mr_ranges = ((220, 30), (280, 20), (390, 15), (512, 10),
|
| 262 |
+
(620, 7), (740, 6), (890, 5), (1200, 4),
|
| 263 |
+
(1700, 3), (3700, 2))
|
| 264 |
+
|
| 265 |
+
bit_size = candidate.size_in_bits()
|
| 266 |
+
try:
|
| 267 |
+
mr_iterations = list(filter(lambda x: bit_size < x[0],
|
| 268 |
+
mr_ranges))[0][1]
|
| 269 |
+
except IndexError:
|
| 270 |
+
mr_iterations = 1
|
| 271 |
+
|
| 272 |
+
if miller_rabin_test(candidate, mr_iterations,
|
| 273 |
+
randfunc=randfunc) == COMPOSITE:
|
| 274 |
+
return COMPOSITE
|
| 275 |
+
if lucas_test(candidate) == COMPOSITE:
|
| 276 |
+
return COMPOSITE
|
| 277 |
+
return PROBABLY_PRIME
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
def generate_probable_prime(**kwargs):
|
| 281 |
+
"""Generate a random probable prime.
|
| 282 |
+
|
| 283 |
+
The prime will not have any specific properties
|
| 284 |
+
(e.g. it will not be a *strong* prime).
|
| 285 |
+
|
| 286 |
+
Random numbers are evaluated for primality until one
|
| 287 |
+
passes all tests, consisting of a certain number of
|
| 288 |
+
Miller-Rabin tests with random bases followed by
|
| 289 |
+
a single Lucas test.
|
| 290 |
+
|
| 291 |
+
The number of Miller-Rabin iterations is chosen such that
|
| 292 |
+
the probability that the output number is a non-prime is
|
| 293 |
+
less than 1E-30 (roughly 2^{-100}).
|
| 294 |
+
|
| 295 |
+
This approach is compliant to `FIPS PUB 186-4`__.
|
| 296 |
+
|
| 297 |
+
:Keywords:
|
| 298 |
+
exact_bits : integer
|
| 299 |
+
The desired size in bits of the probable prime.
|
| 300 |
+
It must be at least 160.
|
| 301 |
+
randfunc : callable
|
| 302 |
+
An RNG function where candidate primes are taken from.
|
| 303 |
+
prime_filter : callable
|
| 304 |
+
A function that takes an Integer as parameter and returns
|
| 305 |
+
True if the number can be passed to further primality tests,
|
| 306 |
+
False if it should be immediately discarded.
|
| 307 |
+
|
| 308 |
+
:Return:
|
| 309 |
+
A probable prime in the range 2^exact_bits > p > 2^(exact_bits-1).
|
| 310 |
+
|
| 311 |
+
.. __: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf
|
| 312 |
+
"""
|
| 313 |
+
|
| 314 |
+
exact_bits = kwargs.pop("exact_bits", None)
|
| 315 |
+
randfunc = kwargs.pop("randfunc", None)
|
| 316 |
+
prime_filter = kwargs.pop("prime_filter", lambda x: True)
|
| 317 |
+
if kwargs:
|
| 318 |
+
raise ValueError("Unknown parameters: " + kwargs.keys())
|
| 319 |
+
|
| 320 |
+
if exact_bits is None:
|
| 321 |
+
raise ValueError("Missing exact_bits parameter")
|
| 322 |
+
if exact_bits < 160:
|
| 323 |
+
raise ValueError("Prime number is not big enough.")
|
| 324 |
+
|
| 325 |
+
if randfunc is None:
|
| 326 |
+
randfunc = Random.new().read
|
| 327 |
+
|
| 328 |
+
result = COMPOSITE
|
| 329 |
+
while result == COMPOSITE:
|
| 330 |
+
candidate = Integer.random(exact_bits=exact_bits,
|
| 331 |
+
randfunc=randfunc) | 1
|
| 332 |
+
if not prime_filter(candidate):
|
| 333 |
+
continue
|
| 334 |
+
result = test_probable_prime(candidate, randfunc)
|
| 335 |
+
return candidate
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
def generate_probable_safe_prime(**kwargs):
|
| 339 |
+
"""Generate a random, probable safe prime.
|
| 340 |
+
|
| 341 |
+
Note this operation is much slower than generating a simple prime.
|
| 342 |
+
|
| 343 |
+
:Keywords:
|
| 344 |
+
exact_bits : integer
|
| 345 |
+
The desired size in bits of the probable safe prime.
|
| 346 |
+
randfunc : callable
|
| 347 |
+
An RNG function where candidate primes are taken from.
|
| 348 |
+
|
| 349 |
+
:Return:
|
| 350 |
+
A probable safe prime in the range
|
| 351 |
+
2^exact_bits > p > 2^(exact_bits-1).
|
| 352 |
+
"""
|
| 353 |
+
|
| 354 |
+
exact_bits = kwargs.pop("exact_bits", None)
|
| 355 |
+
randfunc = kwargs.pop("randfunc", None)
|
| 356 |
+
if kwargs:
|
| 357 |
+
raise ValueError("Unknown parameters: " + kwargs.keys())
|
| 358 |
+
|
| 359 |
+
if randfunc is None:
|
| 360 |
+
randfunc = Random.new().read
|
| 361 |
+
|
| 362 |
+
result = COMPOSITE
|
| 363 |
+
while result == COMPOSITE:
|
| 364 |
+
q = generate_probable_prime(exact_bits=exact_bits - 1, randfunc=randfunc)
|
| 365 |
+
candidate = q * 2 + 1
|
| 366 |
+
if candidate.size_in_bits() != exact_bits:
|
| 367 |
+
continue
|
| 368 |
+
result = test_probable_prime(candidate, randfunc=randfunc)
|
| 369 |
+
return candidate
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/Primality.pyi
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Callable, Optional, Union, Set
|
| 2 |
+
|
| 3 |
+
PrimeResult = int
|
| 4 |
+
|
| 5 |
+
COMPOSITE: PrimeResult
|
| 6 |
+
PROBABLY_PRIME: PrimeResult
|
| 7 |
+
|
| 8 |
+
def miller_rabin_test(candidate: int, iterations: int, randfunc: Optional[Callable[[int],bytes]]=None) -> PrimeResult: ...
|
| 9 |
+
def lucas_test(candidate: int) -> PrimeResult: ...
|
| 10 |
+
_sieve_base: Set[int]
|
| 11 |
+
def test_probable_prime(candidate: int, randfunc: Optional[Callable[[int],bytes]]=None) -> PrimeResult: ...
|
| 12 |
+
def generate_probable_prime(*,
|
| 13 |
+
exact_bits: int = ...,
|
| 14 |
+
randfunc: Callable[[int],bytes] = ...,
|
| 15 |
+
prime_filter: Callable[[int],bool] = ...) -> int: ...
|
| 16 |
+
def generate_probable_safe_prime(*,
|
| 17 |
+
exact_bits: int = ...,
|
| 18 |
+
randfunc: Callable[[int],bytes] = ...) -> int: ...
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerBase.py
ADDED
|
@@ -0,0 +1,412 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ===================================================================
|
| 2 |
+
#
|
| 3 |
+
# Copyright (c) 2018, Helder Eijs <helderijs@gmail.com>
|
| 4 |
+
# All rights reserved.
|
| 5 |
+
#
|
| 6 |
+
# Redistribution and use in source and binary forms, with or without
|
| 7 |
+
# modification, are permitted provided that the following conditions
|
| 8 |
+
# are met:
|
| 9 |
+
#
|
| 10 |
+
# 1. Redistributions of source code must retain the above copyright
|
| 11 |
+
# notice, this list of conditions and the following disclaimer.
|
| 12 |
+
# 2. Redistributions in binary form must reproduce the above copyright
|
| 13 |
+
# notice, this list of conditions and the following disclaimer in
|
| 14 |
+
# the documentation and/or other materials provided with the
|
| 15 |
+
# distribution.
|
| 16 |
+
#
|
| 17 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 18 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 19 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
| 20 |
+
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
| 21 |
+
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
| 22 |
+
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
| 23 |
+
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
| 24 |
+
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
| 25 |
+
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
| 26 |
+
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
| 27 |
+
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 28 |
+
# POSSIBILITY OF SUCH DAMAGE.
|
| 29 |
+
# ===================================================================
|
| 30 |
+
|
| 31 |
+
import abc
|
| 32 |
+
|
| 33 |
+
from Crypto.Util.py3compat import iter_range, bord, bchr, ABC
|
| 34 |
+
|
| 35 |
+
from Crypto import Random
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class IntegerBase(ABC):
|
| 39 |
+
|
| 40 |
+
# Conversions
|
| 41 |
+
@abc.abstractmethod
|
| 42 |
+
def __int__(self):
|
| 43 |
+
pass
|
| 44 |
+
|
| 45 |
+
@abc.abstractmethod
|
| 46 |
+
def __str__(self):
|
| 47 |
+
pass
|
| 48 |
+
|
| 49 |
+
@abc.abstractmethod
|
| 50 |
+
def __repr__(self):
|
| 51 |
+
pass
|
| 52 |
+
|
| 53 |
+
@abc.abstractmethod
|
| 54 |
+
def to_bytes(self, block_size=0, byteorder='big'):
|
| 55 |
+
pass
|
| 56 |
+
|
| 57 |
+
@staticmethod
|
| 58 |
+
@abc.abstractmethod
|
| 59 |
+
def from_bytes(byte_string, byteorder='big'):
|
| 60 |
+
pass
|
| 61 |
+
|
| 62 |
+
# Relations
|
| 63 |
+
@abc.abstractmethod
|
| 64 |
+
def __eq__(self, term):
|
| 65 |
+
pass
|
| 66 |
+
|
| 67 |
+
@abc.abstractmethod
|
| 68 |
+
def __ne__(self, term):
|
| 69 |
+
pass
|
| 70 |
+
|
| 71 |
+
@abc.abstractmethod
|
| 72 |
+
def __lt__(self, term):
|
| 73 |
+
pass
|
| 74 |
+
|
| 75 |
+
@abc.abstractmethod
|
| 76 |
+
def __le__(self, term):
|
| 77 |
+
pass
|
| 78 |
+
|
| 79 |
+
@abc.abstractmethod
|
| 80 |
+
def __gt__(self, term):
|
| 81 |
+
pass
|
| 82 |
+
|
| 83 |
+
@abc.abstractmethod
|
| 84 |
+
def __ge__(self, term):
|
| 85 |
+
pass
|
| 86 |
+
|
| 87 |
+
@abc.abstractmethod
|
| 88 |
+
def __nonzero__(self):
|
| 89 |
+
pass
|
| 90 |
+
__bool__ = __nonzero__
|
| 91 |
+
|
| 92 |
+
@abc.abstractmethod
|
| 93 |
+
def is_negative(self):
|
| 94 |
+
pass
|
| 95 |
+
|
| 96 |
+
# Arithmetic operations
|
| 97 |
+
@abc.abstractmethod
|
| 98 |
+
def __add__(self, term):
|
| 99 |
+
pass
|
| 100 |
+
|
| 101 |
+
@abc.abstractmethod
|
| 102 |
+
def __sub__(self, term):
|
| 103 |
+
pass
|
| 104 |
+
|
| 105 |
+
@abc.abstractmethod
|
| 106 |
+
def __mul__(self, factor):
|
| 107 |
+
pass
|
| 108 |
+
|
| 109 |
+
@abc.abstractmethod
|
| 110 |
+
def __floordiv__(self, divisor):
|
| 111 |
+
pass
|
| 112 |
+
|
| 113 |
+
@abc.abstractmethod
|
| 114 |
+
def __mod__(self, divisor):
|
| 115 |
+
pass
|
| 116 |
+
|
| 117 |
+
@abc.abstractmethod
|
| 118 |
+
def inplace_pow(self, exponent, modulus=None):
|
| 119 |
+
pass
|
| 120 |
+
|
| 121 |
+
@abc.abstractmethod
|
| 122 |
+
def __pow__(self, exponent, modulus=None):
|
| 123 |
+
pass
|
| 124 |
+
|
| 125 |
+
@abc.abstractmethod
|
| 126 |
+
def __abs__(self):
|
| 127 |
+
pass
|
| 128 |
+
|
| 129 |
+
@abc.abstractmethod
|
| 130 |
+
def sqrt(self, modulus=None):
|
| 131 |
+
pass
|
| 132 |
+
|
| 133 |
+
@abc.abstractmethod
|
| 134 |
+
def __iadd__(self, term):
|
| 135 |
+
pass
|
| 136 |
+
|
| 137 |
+
@abc.abstractmethod
|
| 138 |
+
def __isub__(self, term):
|
| 139 |
+
pass
|
| 140 |
+
|
| 141 |
+
@abc.abstractmethod
|
| 142 |
+
def __imul__(self, term):
|
| 143 |
+
pass
|
| 144 |
+
|
| 145 |
+
@abc.abstractmethod
|
| 146 |
+
def __imod__(self, term):
|
| 147 |
+
pass
|
| 148 |
+
|
| 149 |
+
# Boolean/bit operations
|
| 150 |
+
@abc.abstractmethod
|
| 151 |
+
def __and__(self, term):
|
| 152 |
+
pass
|
| 153 |
+
|
| 154 |
+
@abc.abstractmethod
|
| 155 |
+
def __or__(self, term):
|
| 156 |
+
pass
|
| 157 |
+
|
| 158 |
+
@abc.abstractmethod
|
| 159 |
+
def __rshift__(self, pos):
|
| 160 |
+
pass
|
| 161 |
+
|
| 162 |
+
@abc.abstractmethod
|
| 163 |
+
def __irshift__(self, pos):
|
| 164 |
+
pass
|
| 165 |
+
|
| 166 |
+
@abc.abstractmethod
|
| 167 |
+
def __lshift__(self, pos):
|
| 168 |
+
pass
|
| 169 |
+
|
| 170 |
+
@abc.abstractmethod
|
| 171 |
+
def __ilshift__(self, pos):
|
| 172 |
+
pass
|
| 173 |
+
|
| 174 |
+
@abc.abstractmethod
|
| 175 |
+
def get_bit(self, n):
|
| 176 |
+
pass
|
| 177 |
+
|
| 178 |
+
# Extra
|
| 179 |
+
@abc.abstractmethod
|
| 180 |
+
def is_odd(self):
|
| 181 |
+
pass
|
| 182 |
+
|
| 183 |
+
@abc.abstractmethod
|
| 184 |
+
def is_even(self):
|
| 185 |
+
pass
|
| 186 |
+
|
| 187 |
+
@abc.abstractmethod
|
| 188 |
+
def size_in_bits(self):
|
| 189 |
+
pass
|
| 190 |
+
|
| 191 |
+
@abc.abstractmethod
|
| 192 |
+
def size_in_bytes(self):
|
| 193 |
+
pass
|
| 194 |
+
|
| 195 |
+
@abc.abstractmethod
|
| 196 |
+
def is_perfect_square(self):
|
| 197 |
+
pass
|
| 198 |
+
|
| 199 |
+
@abc.abstractmethod
|
| 200 |
+
def fail_if_divisible_by(self, small_prime):
|
| 201 |
+
pass
|
| 202 |
+
|
| 203 |
+
@abc.abstractmethod
|
| 204 |
+
def multiply_accumulate(self, a, b):
|
| 205 |
+
pass
|
| 206 |
+
|
| 207 |
+
@abc.abstractmethod
|
| 208 |
+
def set(self, source):
|
| 209 |
+
pass
|
| 210 |
+
|
| 211 |
+
@abc.abstractmethod
|
| 212 |
+
def inplace_inverse(self, modulus):
|
| 213 |
+
pass
|
| 214 |
+
|
| 215 |
+
@abc.abstractmethod
|
| 216 |
+
def inverse(self, modulus):
|
| 217 |
+
pass
|
| 218 |
+
|
| 219 |
+
@abc.abstractmethod
|
| 220 |
+
def gcd(self, term):
|
| 221 |
+
pass
|
| 222 |
+
|
| 223 |
+
@abc.abstractmethod
|
| 224 |
+
def lcm(self, term):
|
| 225 |
+
pass
|
| 226 |
+
|
| 227 |
+
@staticmethod
|
| 228 |
+
@abc.abstractmethod
|
| 229 |
+
def jacobi_symbol(a, n):
|
| 230 |
+
pass
|
| 231 |
+
|
| 232 |
+
@staticmethod
|
| 233 |
+
def _tonelli_shanks(n, p):
|
| 234 |
+
"""Tonelli-shanks algorithm for computing the square root
|
| 235 |
+
of n modulo a prime p.
|
| 236 |
+
|
| 237 |
+
n must be in the range [0..p-1].
|
| 238 |
+
p must be at least even.
|
| 239 |
+
|
| 240 |
+
The return value r is the square root of modulo p. If non-zero,
|
| 241 |
+
another solution will also exist (p-r).
|
| 242 |
+
|
| 243 |
+
Note we cannot assume that p is really a prime: if it's not,
|
| 244 |
+
we can either raise an exception or return the correct value.
|
| 245 |
+
"""
|
| 246 |
+
|
| 247 |
+
# See https://rosettacode.org/wiki/Tonelli-Shanks_algorithm
|
| 248 |
+
|
| 249 |
+
if n in (0, 1):
|
| 250 |
+
return n
|
| 251 |
+
|
| 252 |
+
if p % 4 == 3:
|
| 253 |
+
root = pow(n, (p + 1) // 4, p)
|
| 254 |
+
if pow(root, 2, p) != n:
|
| 255 |
+
raise ValueError("Cannot compute square root")
|
| 256 |
+
return root
|
| 257 |
+
|
| 258 |
+
s = 1
|
| 259 |
+
q = (p - 1) // 2
|
| 260 |
+
while not (q & 1):
|
| 261 |
+
s += 1
|
| 262 |
+
q >>= 1
|
| 263 |
+
|
| 264 |
+
z = n.__class__(2)
|
| 265 |
+
while True:
|
| 266 |
+
euler = pow(z, (p - 1) // 2, p)
|
| 267 |
+
if euler == 1:
|
| 268 |
+
z += 1
|
| 269 |
+
continue
|
| 270 |
+
if euler == p - 1:
|
| 271 |
+
break
|
| 272 |
+
# Most probably p is not a prime
|
| 273 |
+
raise ValueError("Cannot compute square root")
|
| 274 |
+
|
| 275 |
+
m = s
|
| 276 |
+
c = pow(z, q, p)
|
| 277 |
+
t = pow(n, q, p)
|
| 278 |
+
r = pow(n, (q + 1) // 2, p)
|
| 279 |
+
|
| 280 |
+
while t != 1:
|
| 281 |
+
for i in iter_range(0, m):
|
| 282 |
+
if pow(t, 2**i, p) == 1:
|
| 283 |
+
break
|
| 284 |
+
if i == m:
|
| 285 |
+
raise ValueError("Cannot compute square root of %d mod %d" % (n, p))
|
| 286 |
+
b = pow(c, 2**(m - i - 1), p)
|
| 287 |
+
m = i
|
| 288 |
+
c = b**2 % p
|
| 289 |
+
t = (t * b**2) % p
|
| 290 |
+
r = (r * b) % p
|
| 291 |
+
|
| 292 |
+
if pow(r, 2, p) != n:
|
| 293 |
+
raise ValueError("Cannot compute square root")
|
| 294 |
+
|
| 295 |
+
return r
|
| 296 |
+
|
| 297 |
+
@classmethod
|
| 298 |
+
def random(cls, **kwargs):
|
| 299 |
+
"""Generate a random natural integer of a certain size.
|
| 300 |
+
|
| 301 |
+
:Keywords:
|
| 302 |
+
exact_bits : positive integer
|
| 303 |
+
The length in bits of the resulting random Integer number.
|
| 304 |
+
The number is guaranteed to fulfil the relation:
|
| 305 |
+
|
| 306 |
+
2^bits > result >= 2^(bits - 1)
|
| 307 |
+
|
| 308 |
+
max_bits : positive integer
|
| 309 |
+
The maximum length in bits of the resulting random Integer number.
|
| 310 |
+
The number is guaranteed to fulfil the relation:
|
| 311 |
+
|
| 312 |
+
2^bits > result >=0
|
| 313 |
+
|
| 314 |
+
randfunc : callable
|
| 315 |
+
A function that returns a random byte string. The length of the
|
| 316 |
+
byte string is passed as parameter. Optional.
|
| 317 |
+
If not provided (or ``None``), randomness is read from the system RNG.
|
| 318 |
+
|
| 319 |
+
:Return: a Integer object
|
| 320 |
+
"""
|
| 321 |
+
|
| 322 |
+
exact_bits = kwargs.pop("exact_bits", None)
|
| 323 |
+
max_bits = kwargs.pop("max_bits", None)
|
| 324 |
+
randfunc = kwargs.pop("randfunc", None)
|
| 325 |
+
|
| 326 |
+
if randfunc is None:
|
| 327 |
+
randfunc = Random.new().read
|
| 328 |
+
|
| 329 |
+
if exact_bits is None and max_bits is None:
|
| 330 |
+
raise ValueError("Either 'exact_bits' or 'max_bits' must be specified")
|
| 331 |
+
|
| 332 |
+
if exact_bits is not None and max_bits is not None:
|
| 333 |
+
raise ValueError("'exact_bits' and 'max_bits' are mutually exclusive")
|
| 334 |
+
|
| 335 |
+
bits = exact_bits or max_bits
|
| 336 |
+
bytes_needed = ((bits - 1) // 8) + 1
|
| 337 |
+
significant_bits_msb = 8 - (bytes_needed * 8 - bits)
|
| 338 |
+
msb = bord(randfunc(1)[0])
|
| 339 |
+
if exact_bits is not None:
|
| 340 |
+
msb |= 1 << (significant_bits_msb - 1)
|
| 341 |
+
msb &= (1 << significant_bits_msb) - 1
|
| 342 |
+
|
| 343 |
+
return cls.from_bytes(bchr(msb) + randfunc(bytes_needed - 1))
|
| 344 |
+
|
| 345 |
+
@classmethod
|
| 346 |
+
def random_range(cls, **kwargs):
|
| 347 |
+
"""Generate a random integer within a given internal.
|
| 348 |
+
|
| 349 |
+
:Keywords:
|
| 350 |
+
min_inclusive : integer
|
| 351 |
+
The lower end of the interval (inclusive).
|
| 352 |
+
max_inclusive : integer
|
| 353 |
+
The higher end of the interval (inclusive).
|
| 354 |
+
max_exclusive : integer
|
| 355 |
+
The higher end of the interval (exclusive).
|
| 356 |
+
randfunc : callable
|
| 357 |
+
A function that returns a random byte string. The length of the
|
| 358 |
+
byte string is passed as parameter. Optional.
|
| 359 |
+
If not provided (or ``None``), randomness is read from the system RNG.
|
| 360 |
+
:Returns:
|
| 361 |
+
An Integer randomly taken in the given interval.
|
| 362 |
+
"""
|
| 363 |
+
|
| 364 |
+
min_inclusive = kwargs.pop("min_inclusive", None)
|
| 365 |
+
max_inclusive = kwargs.pop("max_inclusive", None)
|
| 366 |
+
max_exclusive = kwargs.pop("max_exclusive", None)
|
| 367 |
+
randfunc = kwargs.pop("randfunc", None)
|
| 368 |
+
|
| 369 |
+
if kwargs:
|
| 370 |
+
raise ValueError("Unknown keywords: " + str(kwargs.keys))
|
| 371 |
+
if None not in (max_inclusive, max_exclusive):
|
| 372 |
+
raise ValueError("max_inclusive and max_exclusive cannot be both"
|
| 373 |
+
" specified")
|
| 374 |
+
if max_exclusive is not None:
|
| 375 |
+
max_inclusive = max_exclusive - 1
|
| 376 |
+
if None in (min_inclusive, max_inclusive):
|
| 377 |
+
raise ValueError("Missing keyword to identify the interval")
|
| 378 |
+
|
| 379 |
+
if randfunc is None:
|
| 380 |
+
randfunc = Random.new().read
|
| 381 |
+
|
| 382 |
+
norm_maximum = max_inclusive - min_inclusive
|
| 383 |
+
bits_needed = cls(norm_maximum).size_in_bits()
|
| 384 |
+
|
| 385 |
+
norm_candidate = -1
|
| 386 |
+
while not 0 <= norm_candidate <= norm_maximum:
|
| 387 |
+
norm_candidate = cls.random(
|
| 388 |
+
max_bits=bits_needed,
|
| 389 |
+
randfunc=randfunc
|
| 390 |
+
)
|
| 391 |
+
return norm_candidate + min_inclusive
|
| 392 |
+
|
| 393 |
+
@staticmethod
|
| 394 |
+
@abc.abstractmethod
|
| 395 |
+
def _mult_modulo_bytes(term1, term2, modulus):
|
| 396 |
+
"""Multiply two integers, take the modulo, and encode as big endian.
|
| 397 |
+
This specialized method is used for RSA decryption.
|
| 398 |
+
|
| 399 |
+
Args:
|
| 400 |
+
term1 : integer
|
| 401 |
+
The first term of the multiplication, non-negative.
|
| 402 |
+
term2 : integer
|
| 403 |
+
The second term of the multiplication, non-negative.
|
| 404 |
+
modulus: integer
|
| 405 |
+
The modulus, a positive odd number.
|
| 406 |
+
:Returns:
|
| 407 |
+
A byte string, with the result of the modular multiplication
|
| 408 |
+
encoded in big endian mode.
|
| 409 |
+
It is as long as the modulus would be, with zero padding
|
| 410 |
+
on the left if needed.
|
| 411 |
+
"""
|
| 412 |
+
pass
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerBase.pyi
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional, Union, Callable
|
| 2 |
+
|
| 3 |
+
RandFunc = Callable[[int],int]
|
| 4 |
+
|
| 5 |
+
class IntegerBase:
|
| 6 |
+
|
| 7 |
+
def __init__(self, value: Union[IntegerBase, int]): ...
|
| 8 |
+
|
| 9 |
+
def __int__(self) -> int: ...
|
| 10 |
+
def __str__(self) -> str: ...
|
| 11 |
+
def __repr__(self) -> str: ...
|
| 12 |
+
def to_bytes(self, block_size: Optional[int]=0, byteorder: str= ...) -> bytes: ...
|
| 13 |
+
@staticmethod
|
| 14 |
+
def from_bytes(byte_string: bytes, byteorder: Optional[str] = ...) -> IntegerBase: ...
|
| 15 |
+
def __eq__(self, term: object) -> bool: ...
|
| 16 |
+
def __ne__(self, term: object) -> bool: ...
|
| 17 |
+
def __lt__(self, term: Union[IntegerBase, int]) -> bool: ...
|
| 18 |
+
def __le__(self, term: Union[IntegerBase, int]) -> bool: ...
|
| 19 |
+
def __gt__(self, term: Union[IntegerBase, int]) -> bool: ...
|
| 20 |
+
def __ge__(self, term: Union[IntegerBase, int]) -> bool: ...
|
| 21 |
+
def __nonzero__(self) -> bool: ...
|
| 22 |
+
def is_negative(self) -> bool: ...
|
| 23 |
+
def __add__(self, term: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 24 |
+
def __sub__(self, term: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 25 |
+
def __mul__(self, term: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 26 |
+
def __floordiv__(self, divisor: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 27 |
+
def __mod__(self, divisor: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 28 |
+
def inplace_pow(self, exponent: int, modulus: Optional[Union[IntegerBase, int]]=None) -> IntegerBase: ...
|
| 29 |
+
def __pow__(self, exponent: int, modulus: Optional[int]) -> IntegerBase: ...
|
| 30 |
+
def __abs__(self) -> IntegerBase: ...
|
| 31 |
+
def sqrt(self, modulus: Optional[int]) -> IntegerBase: ...
|
| 32 |
+
def __iadd__(self, term: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 33 |
+
def __isub__(self, term: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 34 |
+
def __imul__(self, term: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 35 |
+
def __imod__(self, divisor: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 36 |
+
def __and__(self, term: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 37 |
+
def __or__(self, term: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 38 |
+
def __rshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 39 |
+
def __irshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 40 |
+
def __lshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 41 |
+
def __ilshift__(self, pos: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 42 |
+
def get_bit(self, n: int) -> bool: ...
|
| 43 |
+
def is_odd(self) -> bool: ...
|
| 44 |
+
def is_even(self) -> bool: ...
|
| 45 |
+
def size_in_bits(self) -> int: ...
|
| 46 |
+
def size_in_bytes(self) -> int: ...
|
| 47 |
+
def is_perfect_square(self) -> bool: ...
|
| 48 |
+
def fail_if_divisible_by(self, small_prime: Union[IntegerBase, int]) -> None: ...
|
| 49 |
+
def multiply_accumulate(self, a: Union[IntegerBase, int], b: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 50 |
+
def set(self, source: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 51 |
+
def inplace_inverse(self, modulus: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 52 |
+
def inverse(self, modulus: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 53 |
+
def gcd(self, term: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 54 |
+
def lcm(self, term: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 55 |
+
@staticmethod
|
| 56 |
+
def jacobi_symbol(a: Union[IntegerBase, int], n: Union[IntegerBase, int]) -> IntegerBase: ...
|
| 57 |
+
@staticmethod
|
| 58 |
+
def _tonelli_shanks(n: Union[IntegerBase, int], p: Union[IntegerBase, int]) -> IntegerBase : ...
|
| 59 |
+
@classmethod
|
| 60 |
+
def random(cls, **kwargs: Union[int,RandFunc]) -> IntegerBase : ...
|
| 61 |
+
@classmethod
|
| 62 |
+
def random_range(cls, **kwargs: Union[int,RandFunc]) -> IntegerBase : ...
|
| 63 |
+
@staticmethod
|
| 64 |
+
def _mult_modulo_bytes(term1: Union[IntegerBase, int],
|
| 65 |
+
term2: Union[IntegerBase, int],
|
| 66 |
+
modulus: Union[IntegerBase, int]) -> bytes: ...
|
| 67 |
+
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerCustom.py
ADDED
|
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ===================================================================
|
| 2 |
+
#
|
| 3 |
+
# Copyright (c) 2018, Helder Eijs <helderijs@gmail.com>
|
| 4 |
+
# All rights reserved.
|
| 5 |
+
#
|
| 6 |
+
# Redistribution and use in source and binary forms, with or without
|
| 7 |
+
# modification, are permitted provided that the following conditions
|
| 8 |
+
# are met:
|
| 9 |
+
#
|
| 10 |
+
# 1. Redistributions of source code must retain the above copyright
|
| 11 |
+
# notice, this list of conditions and the following disclaimer.
|
| 12 |
+
# 2. Redistributions in binary form must reproduce the above copyright
|
| 13 |
+
# notice, this list of conditions and the following disclaimer in
|
| 14 |
+
# the documentation and/or other materials provided with the
|
| 15 |
+
# distribution.
|
| 16 |
+
#
|
| 17 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 18 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 19 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
| 20 |
+
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
| 21 |
+
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
| 22 |
+
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
| 23 |
+
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
| 24 |
+
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
| 25 |
+
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
| 26 |
+
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
| 27 |
+
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 28 |
+
# POSSIBILITY OF SUCH DAMAGE.
|
| 29 |
+
# ===================================================================
|
| 30 |
+
|
| 31 |
+
from ._IntegerNative import IntegerNative
|
| 32 |
+
|
| 33 |
+
from Crypto.Util.number import long_to_bytes, bytes_to_long
|
| 34 |
+
|
| 35 |
+
from Crypto.Util._raw_api import (load_pycryptodome_raw_lib,
|
| 36 |
+
create_string_buffer,
|
| 37 |
+
get_raw_buffer, backend,
|
| 38 |
+
c_size_t, c_ulonglong)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
from Crypto.Random.random import getrandbits
|
| 42 |
+
|
| 43 |
+
c_defs = """
|
| 44 |
+
int monty_pow(uint8_t *out,
|
| 45 |
+
const uint8_t *base,
|
| 46 |
+
const uint8_t *exp,
|
| 47 |
+
const uint8_t *modulus,
|
| 48 |
+
size_t len,
|
| 49 |
+
uint64_t seed);
|
| 50 |
+
|
| 51 |
+
int monty_multiply(uint8_t *out,
|
| 52 |
+
const uint8_t *term1,
|
| 53 |
+
const uint8_t *term2,
|
| 54 |
+
const uint8_t *modulus,
|
| 55 |
+
size_t len);
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
_raw_montgomery = load_pycryptodome_raw_lib("Crypto.Math._modexp", c_defs)
|
| 60 |
+
implementation = {"library": "custom", "api": backend}
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class IntegerCustom(IntegerNative):
|
| 64 |
+
|
| 65 |
+
@staticmethod
|
| 66 |
+
def from_bytes(byte_string, byteorder='big'):
|
| 67 |
+
if byteorder == 'big':
|
| 68 |
+
pass
|
| 69 |
+
elif byteorder == 'little':
|
| 70 |
+
byte_string = bytearray(byte_string)
|
| 71 |
+
byte_string.reverse()
|
| 72 |
+
else:
|
| 73 |
+
raise ValueError("Incorrect byteorder")
|
| 74 |
+
return IntegerCustom(bytes_to_long(byte_string))
|
| 75 |
+
|
| 76 |
+
def inplace_pow(self, exponent, modulus=None):
|
| 77 |
+
exp_value = int(exponent)
|
| 78 |
+
if exp_value < 0:
|
| 79 |
+
raise ValueError("Exponent must not be negative")
|
| 80 |
+
|
| 81 |
+
# No modular reduction
|
| 82 |
+
if modulus is None:
|
| 83 |
+
self._value = pow(self._value, exp_value)
|
| 84 |
+
return self
|
| 85 |
+
|
| 86 |
+
# With modular reduction
|
| 87 |
+
mod_value = int(modulus)
|
| 88 |
+
if mod_value < 0:
|
| 89 |
+
raise ValueError("Modulus must be positive")
|
| 90 |
+
if mod_value == 0:
|
| 91 |
+
raise ZeroDivisionError("Modulus cannot be zero")
|
| 92 |
+
|
| 93 |
+
# C extension only works with odd moduli
|
| 94 |
+
if (mod_value & 1) == 0:
|
| 95 |
+
self._value = pow(self._value, exp_value, mod_value)
|
| 96 |
+
return self
|
| 97 |
+
|
| 98 |
+
# C extension only works with bases smaller than modulus
|
| 99 |
+
if self._value >= mod_value:
|
| 100 |
+
self._value %= mod_value
|
| 101 |
+
|
| 102 |
+
max_len = len(long_to_bytes(max(self._value, exp_value, mod_value)))
|
| 103 |
+
|
| 104 |
+
base_b = long_to_bytes(self._value, max_len)
|
| 105 |
+
exp_b = long_to_bytes(exp_value, max_len)
|
| 106 |
+
modulus_b = long_to_bytes(mod_value, max_len)
|
| 107 |
+
|
| 108 |
+
out = create_string_buffer(max_len)
|
| 109 |
+
|
| 110 |
+
error = _raw_montgomery.monty_pow(
|
| 111 |
+
out,
|
| 112 |
+
base_b,
|
| 113 |
+
exp_b,
|
| 114 |
+
modulus_b,
|
| 115 |
+
c_size_t(max_len),
|
| 116 |
+
c_ulonglong(getrandbits(64))
|
| 117 |
+
)
|
| 118 |
+
|
| 119 |
+
if error:
|
| 120 |
+
raise ValueError("monty_pow failed with error: %d" % error)
|
| 121 |
+
|
| 122 |
+
result = bytes_to_long(get_raw_buffer(out))
|
| 123 |
+
self._value = result
|
| 124 |
+
return self
|
| 125 |
+
|
| 126 |
+
@staticmethod
|
| 127 |
+
def _mult_modulo_bytes(term1, term2, modulus):
|
| 128 |
+
|
| 129 |
+
# With modular reduction
|
| 130 |
+
mod_value = int(modulus)
|
| 131 |
+
if mod_value < 0:
|
| 132 |
+
raise ValueError("Modulus must be positive")
|
| 133 |
+
if mod_value == 0:
|
| 134 |
+
raise ZeroDivisionError("Modulus cannot be zero")
|
| 135 |
+
|
| 136 |
+
# C extension only works with odd moduli
|
| 137 |
+
if (mod_value & 1) == 0:
|
| 138 |
+
raise ValueError("Odd modulus is required")
|
| 139 |
+
|
| 140 |
+
# C extension only works with non-negative terms smaller than modulus
|
| 141 |
+
if term1 >= mod_value or term1 < 0:
|
| 142 |
+
term1 %= mod_value
|
| 143 |
+
if term2 >= mod_value or term2 < 0:
|
| 144 |
+
term2 %= mod_value
|
| 145 |
+
|
| 146 |
+
modulus_b = long_to_bytes(mod_value)
|
| 147 |
+
numbers_len = len(modulus_b)
|
| 148 |
+
term1_b = long_to_bytes(term1, numbers_len)
|
| 149 |
+
term2_b = long_to_bytes(term2, numbers_len)
|
| 150 |
+
out = create_string_buffer(numbers_len)
|
| 151 |
+
|
| 152 |
+
error = _raw_montgomery.monty_multiply(
|
| 153 |
+
out,
|
| 154 |
+
term1_b,
|
| 155 |
+
term2_b,
|
| 156 |
+
modulus_b,
|
| 157 |
+
c_size_t(numbers_len)
|
| 158 |
+
)
|
| 159 |
+
if error:
|
| 160 |
+
raise ValueError("monty_multiply failed with error: %d" % error)
|
| 161 |
+
|
| 162 |
+
return get_raw_buffer(out)
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerCustom.pyi
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any
|
| 2 |
+
|
| 3 |
+
from ._IntegerNative import IntegerNative
|
| 4 |
+
|
| 5 |
+
_raw_montgomery = Any
|
| 6 |
+
|
| 7 |
+
class IntegerCustom(IntegerNative):
|
| 8 |
+
pass
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerGMP.py
ADDED
|
@@ -0,0 +1,799 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ===================================================================
|
| 2 |
+
#
|
| 3 |
+
# Copyright (c) 2014, Legrandin <helderijs@gmail.com>
|
| 4 |
+
# All rights reserved.
|
| 5 |
+
#
|
| 6 |
+
# Redistribution and use in source and binary forms, with or without
|
| 7 |
+
# modification, are permitted provided that the following conditions
|
| 8 |
+
# are met:
|
| 9 |
+
#
|
| 10 |
+
# 1. Redistributions of source code must retain the above copyright
|
| 11 |
+
# notice, this list of conditions and the following disclaimer.
|
| 12 |
+
# 2. Redistributions in binary form must reproduce the above copyright
|
| 13 |
+
# notice, this list of conditions and the following disclaimer in
|
| 14 |
+
# the documentation and/or other materials provided with the
|
| 15 |
+
# distribution.
|
| 16 |
+
#
|
| 17 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 18 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 19 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
| 20 |
+
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
| 21 |
+
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
| 22 |
+
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
| 23 |
+
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
| 24 |
+
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
| 25 |
+
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
| 26 |
+
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
| 27 |
+
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 28 |
+
# POSSIBILITY OF SUCH DAMAGE.
|
| 29 |
+
# ===================================================================
|
| 30 |
+
|
| 31 |
+
import sys
|
| 32 |
+
import struct
|
| 33 |
+
|
| 34 |
+
from Crypto.Util.py3compat import is_native_int
|
| 35 |
+
|
| 36 |
+
from Crypto.Util._raw_api import (backend, load_lib,
|
| 37 |
+
c_ulong, c_size_t, c_uint8_ptr)
|
| 38 |
+
|
| 39 |
+
from ._IntegerBase import IntegerBase
|
| 40 |
+
|
| 41 |
+
gmp_defs = """typedef unsigned long UNIX_ULONG;
|
| 42 |
+
typedef struct { int a; int b; void *c; } MPZ;
|
| 43 |
+
typedef MPZ mpz_t[1];
|
| 44 |
+
typedef UNIX_ULONG mp_bitcnt_t;
|
| 45 |
+
|
| 46 |
+
void __gmpz_init (mpz_t x);
|
| 47 |
+
void __gmpz_init_set (mpz_t rop, const mpz_t op);
|
| 48 |
+
void __gmpz_init_set_ui (mpz_t rop, UNIX_ULONG op);
|
| 49 |
+
|
| 50 |
+
UNIX_ULONG __gmpz_get_ui (const mpz_t op);
|
| 51 |
+
void __gmpz_set (mpz_t rop, const mpz_t op);
|
| 52 |
+
void __gmpz_set_ui (mpz_t rop, UNIX_ULONG op);
|
| 53 |
+
void __gmpz_add (mpz_t rop, const mpz_t op1, const mpz_t op2);
|
| 54 |
+
void __gmpz_add_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2);
|
| 55 |
+
void __gmpz_sub_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2);
|
| 56 |
+
void __gmpz_addmul (mpz_t rop, const mpz_t op1, const mpz_t op2);
|
| 57 |
+
void __gmpz_addmul_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2);
|
| 58 |
+
void __gmpz_submul_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2);
|
| 59 |
+
void __gmpz_import (mpz_t rop, size_t count, int order, size_t size,
|
| 60 |
+
int endian, size_t nails, const void *op);
|
| 61 |
+
void * __gmpz_export (void *rop, size_t *countp, int order,
|
| 62 |
+
size_t size,
|
| 63 |
+
int endian, size_t nails, const mpz_t op);
|
| 64 |
+
size_t __gmpz_sizeinbase (const mpz_t op, int base);
|
| 65 |
+
void __gmpz_sub (mpz_t rop, const mpz_t op1, const mpz_t op2);
|
| 66 |
+
void __gmpz_mul (mpz_t rop, const mpz_t op1, const mpz_t op2);
|
| 67 |
+
void __gmpz_mul_ui (mpz_t rop, const mpz_t op1, UNIX_ULONG op2);
|
| 68 |
+
int __gmpz_cmp (const mpz_t op1, const mpz_t op2);
|
| 69 |
+
void __gmpz_powm (mpz_t rop, const mpz_t base, const mpz_t exp, const
|
| 70 |
+
mpz_t mod);
|
| 71 |
+
void __gmpz_powm_ui (mpz_t rop, const mpz_t base, UNIX_ULONG exp,
|
| 72 |
+
const mpz_t mod);
|
| 73 |
+
void __gmpz_pow_ui (mpz_t rop, const mpz_t base, UNIX_ULONG exp);
|
| 74 |
+
void __gmpz_sqrt(mpz_t rop, const mpz_t op);
|
| 75 |
+
void __gmpz_mod (mpz_t r, const mpz_t n, const mpz_t d);
|
| 76 |
+
void __gmpz_neg (mpz_t rop, const mpz_t op);
|
| 77 |
+
void __gmpz_abs (mpz_t rop, const mpz_t op);
|
| 78 |
+
void __gmpz_and (mpz_t rop, const mpz_t op1, const mpz_t op2);
|
| 79 |
+
void __gmpz_ior (mpz_t rop, const mpz_t op1, const mpz_t op2);
|
| 80 |
+
void __gmpz_clear (mpz_t x);
|
| 81 |
+
void __gmpz_tdiv_q_2exp (mpz_t q, const mpz_t n, mp_bitcnt_t b);
|
| 82 |
+
void __gmpz_fdiv_q (mpz_t q, const mpz_t n, const mpz_t d);
|
| 83 |
+
void __gmpz_mul_2exp (mpz_t rop, const mpz_t op1, mp_bitcnt_t op2);
|
| 84 |
+
int __gmpz_tstbit (const mpz_t op, mp_bitcnt_t bit_index);
|
| 85 |
+
int __gmpz_perfect_square_p (const mpz_t op);
|
| 86 |
+
int __gmpz_jacobi (const mpz_t a, const mpz_t b);
|
| 87 |
+
void __gmpz_gcd (mpz_t rop, const mpz_t op1, const mpz_t op2);
|
| 88 |
+
UNIX_ULONG __gmpz_gcd_ui (mpz_t rop, const mpz_t op1,
|
| 89 |
+
UNIX_ULONG op2);
|
| 90 |
+
void __gmpz_lcm (mpz_t rop, const mpz_t op1, const mpz_t op2);
|
| 91 |
+
int __gmpz_invert (mpz_t rop, const mpz_t op1, const mpz_t op2);
|
| 92 |
+
int __gmpz_divisible_p (const mpz_t n, const mpz_t d);
|
| 93 |
+
int __gmpz_divisible_ui_p (const mpz_t n, UNIX_ULONG d);
|
| 94 |
+
|
| 95 |
+
size_t __gmpz_size (const mpz_t op);
|
| 96 |
+
UNIX_ULONG __gmpz_getlimbn (const mpz_t op, size_t n);
|
| 97 |
+
"""
|
| 98 |
+
|
| 99 |
+
if sys.platform == "win32":
|
| 100 |
+
raise ImportError("Not using GMP on Windows")
|
| 101 |
+
|
| 102 |
+
lib = load_lib("gmp", gmp_defs)
|
| 103 |
+
implementation = {"library": "gmp", "api": backend}
|
| 104 |
+
|
| 105 |
+
if hasattr(lib, "__mpir_version"):
|
| 106 |
+
raise ImportError("MPIR library detected")
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
# Lazy creation of GMP methods
|
| 110 |
+
class _GMP(object):
|
| 111 |
+
|
| 112 |
+
def __getattr__(self, name):
|
| 113 |
+
if name.startswith("mpz_"):
|
| 114 |
+
func_name = "__gmpz_" + name[4:]
|
| 115 |
+
elif name.startswith("gmp_"):
|
| 116 |
+
func_name = "__gmp_" + name[4:]
|
| 117 |
+
else:
|
| 118 |
+
raise AttributeError("Attribute %s is invalid" % name)
|
| 119 |
+
func = getattr(lib, func_name)
|
| 120 |
+
setattr(self, name, func)
|
| 121 |
+
return func
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
_gmp = _GMP()
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
# In order to create a function that returns a pointer to
|
| 128 |
+
# a new MPZ structure, we need to break the abstraction
|
| 129 |
+
# and know exactly what ffi backend we have
|
| 130 |
+
if implementation["api"] == "ctypes":
|
| 131 |
+
from ctypes import Structure, c_int, c_void_p, byref
|
| 132 |
+
|
| 133 |
+
class _MPZ(Structure):
|
| 134 |
+
_fields_ = [('_mp_alloc', c_int),
|
| 135 |
+
('_mp_size', c_int),
|
| 136 |
+
('_mp_d', c_void_p)]
|
| 137 |
+
|
| 138 |
+
def new_mpz():
|
| 139 |
+
return byref(_MPZ())
|
| 140 |
+
|
| 141 |
+
_gmp.mpz_getlimbn.restype = c_ulong
|
| 142 |
+
|
| 143 |
+
else:
|
| 144 |
+
# We are using CFFI
|
| 145 |
+
from Crypto.Util._raw_api import ffi
|
| 146 |
+
|
| 147 |
+
def new_mpz():
|
| 148 |
+
return ffi.new("MPZ*")
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
# Size of a native word
|
| 152 |
+
_sys_bits = 8 * struct.calcsize("P")
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
class IntegerGMP(IntegerBase):
|
| 156 |
+
"""A fast, arbitrary precision integer"""
|
| 157 |
+
|
| 158 |
+
_zero_mpz_p = new_mpz()
|
| 159 |
+
_gmp.mpz_init_set_ui(_zero_mpz_p, c_ulong(0))
|
| 160 |
+
|
| 161 |
+
def __init__(self, value):
|
| 162 |
+
"""Initialize the integer to the given value."""
|
| 163 |
+
|
| 164 |
+
self._mpz_p = new_mpz()
|
| 165 |
+
self._initialized = False
|
| 166 |
+
|
| 167 |
+
if isinstance(value, float):
|
| 168 |
+
raise ValueError("A floating point type is not a natural number")
|
| 169 |
+
|
| 170 |
+
if is_native_int(value):
|
| 171 |
+
_gmp.mpz_init(self._mpz_p)
|
| 172 |
+
self._initialized = True
|
| 173 |
+
if value == 0:
|
| 174 |
+
return
|
| 175 |
+
|
| 176 |
+
tmp = new_mpz()
|
| 177 |
+
_gmp.mpz_init(tmp)
|
| 178 |
+
|
| 179 |
+
try:
|
| 180 |
+
positive = value >= 0
|
| 181 |
+
reduce = abs(value)
|
| 182 |
+
slots = (reduce.bit_length() - 1) // 32 + 1
|
| 183 |
+
|
| 184 |
+
while slots > 0:
|
| 185 |
+
slots = slots - 1
|
| 186 |
+
_gmp.mpz_set_ui(tmp,
|
| 187 |
+
c_ulong(0xFFFFFFFF & (reduce >> (slots * 32))))
|
| 188 |
+
_gmp.mpz_mul_2exp(tmp, tmp, c_ulong(slots * 32))
|
| 189 |
+
_gmp.mpz_add(self._mpz_p, self._mpz_p, tmp)
|
| 190 |
+
finally:
|
| 191 |
+
_gmp.mpz_clear(tmp)
|
| 192 |
+
|
| 193 |
+
if not positive:
|
| 194 |
+
_gmp.mpz_neg(self._mpz_p, self._mpz_p)
|
| 195 |
+
|
| 196 |
+
elif isinstance(value, IntegerGMP):
|
| 197 |
+
_gmp.mpz_init_set(self._mpz_p, value._mpz_p)
|
| 198 |
+
self._initialized = True
|
| 199 |
+
else:
|
| 200 |
+
raise NotImplementedError
|
| 201 |
+
|
| 202 |
+
# Conversions
|
| 203 |
+
def __int__(self):
|
| 204 |
+
tmp = new_mpz()
|
| 205 |
+
_gmp.mpz_init_set(tmp, self._mpz_p)
|
| 206 |
+
|
| 207 |
+
try:
|
| 208 |
+
value = 0
|
| 209 |
+
slot = 0
|
| 210 |
+
while _gmp.mpz_cmp(tmp, self._zero_mpz_p) != 0:
|
| 211 |
+
lsb = _gmp.mpz_get_ui(tmp) & 0xFFFFFFFF
|
| 212 |
+
value |= lsb << (slot * 32)
|
| 213 |
+
_gmp.mpz_tdiv_q_2exp(tmp, tmp, c_ulong(32))
|
| 214 |
+
slot = slot + 1
|
| 215 |
+
finally:
|
| 216 |
+
_gmp.mpz_clear(tmp)
|
| 217 |
+
|
| 218 |
+
if self < 0:
|
| 219 |
+
value = -value
|
| 220 |
+
return int(value)
|
| 221 |
+
|
| 222 |
+
def __str__(self):
|
| 223 |
+
return str(int(self))
|
| 224 |
+
|
| 225 |
+
def __repr__(self):
|
| 226 |
+
return "Integer(%s)" % str(self)
|
| 227 |
+
|
| 228 |
+
# Only Python 2.x
|
| 229 |
+
def __hex__(self):
|
| 230 |
+
return hex(int(self))
|
| 231 |
+
|
| 232 |
+
# Only Python 3.x
|
| 233 |
+
def __index__(self):
|
| 234 |
+
return int(self)
|
| 235 |
+
|
| 236 |
+
def to_bytes(self, block_size=0, byteorder='big'):
|
| 237 |
+
"""Convert the number into a byte string.
|
| 238 |
+
|
| 239 |
+
This method encodes the number in network order and prepends
|
| 240 |
+
as many zero bytes as required. It only works for non-negative
|
| 241 |
+
values.
|
| 242 |
+
|
| 243 |
+
:Parameters:
|
| 244 |
+
block_size : integer
|
| 245 |
+
The exact size the output byte string must have.
|
| 246 |
+
If zero, the string has the minimal length.
|
| 247 |
+
byteorder : string
|
| 248 |
+
'big' for big-endian integers (default), 'little' for litte-endian.
|
| 249 |
+
:Returns:
|
| 250 |
+
A byte string.
|
| 251 |
+
:Raise ValueError:
|
| 252 |
+
If the value is negative or if ``block_size`` is
|
| 253 |
+
provided and the length of the byte string would exceed it.
|
| 254 |
+
"""
|
| 255 |
+
|
| 256 |
+
if self < 0:
|
| 257 |
+
raise ValueError("Conversion only valid for non-negative numbers")
|
| 258 |
+
|
| 259 |
+
num_limbs = _gmp.mpz_size(self._mpz_p)
|
| 260 |
+
if _sys_bits == 32:
|
| 261 |
+
spchar = "L"
|
| 262 |
+
num_limbs = max(1, num_limbs, (block_size + 3) // 4)
|
| 263 |
+
elif _sys_bits == 64:
|
| 264 |
+
spchar = "Q"
|
| 265 |
+
num_limbs = max(1, num_limbs, (block_size + 7) // 8)
|
| 266 |
+
else:
|
| 267 |
+
raise ValueError("Unknown limb size")
|
| 268 |
+
|
| 269 |
+
# mpz_getlimbn returns 0 if i is larger than the number of actual limbs
|
| 270 |
+
limbs = [_gmp.mpz_getlimbn(self._mpz_p, num_limbs - i - 1) for i in range(num_limbs)]
|
| 271 |
+
|
| 272 |
+
result = struct.pack(">" + spchar * num_limbs, *limbs)
|
| 273 |
+
cutoff_len = len(result) - block_size
|
| 274 |
+
if block_size == 0:
|
| 275 |
+
result = result.lstrip(b'\x00')
|
| 276 |
+
elif cutoff_len > 0:
|
| 277 |
+
if result[:cutoff_len] != b'\x00' * (cutoff_len):
|
| 278 |
+
raise ValueError("Number is too big to convert to "
|
| 279 |
+
"byte string of prescribed length")
|
| 280 |
+
result = result[cutoff_len:]
|
| 281 |
+
elif cutoff_len < 0:
|
| 282 |
+
result = b'\x00' * (-cutoff_len) + result
|
| 283 |
+
|
| 284 |
+
if byteorder == 'little':
|
| 285 |
+
result = result[::-1]
|
| 286 |
+
elif byteorder == 'big':
|
| 287 |
+
pass
|
| 288 |
+
else:
|
| 289 |
+
raise ValueError("Incorrect byteorder")
|
| 290 |
+
|
| 291 |
+
if len(result) == 0:
|
| 292 |
+
result = b'\x00'
|
| 293 |
+
|
| 294 |
+
return result
|
| 295 |
+
|
| 296 |
+
@staticmethod
|
| 297 |
+
def from_bytes(byte_string, byteorder='big'):
|
| 298 |
+
"""Convert a byte string into a number.
|
| 299 |
+
|
| 300 |
+
:Parameters:
|
| 301 |
+
byte_string : byte string
|
| 302 |
+
The input number, encoded in network order.
|
| 303 |
+
It can only be non-negative.
|
| 304 |
+
byteorder : string
|
| 305 |
+
'big' for big-endian integers (default), 'little' for litte-endian.
|
| 306 |
+
|
| 307 |
+
:Return:
|
| 308 |
+
The ``Integer`` object carrying the same value as the input.
|
| 309 |
+
"""
|
| 310 |
+
result = IntegerGMP(0)
|
| 311 |
+
if byteorder == 'big':
|
| 312 |
+
pass
|
| 313 |
+
elif byteorder == 'little':
|
| 314 |
+
byte_string = bytearray(byte_string)
|
| 315 |
+
byte_string.reverse()
|
| 316 |
+
else:
|
| 317 |
+
raise ValueError("Incorrect byteorder")
|
| 318 |
+
_gmp.mpz_import(
|
| 319 |
+
result._mpz_p,
|
| 320 |
+
c_size_t(len(byte_string)), # Amount of words to read
|
| 321 |
+
1, # Big endian
|
| 322 |
+
c_size_t(1), # Each word is 1 byte long
|
| 323 |
+
0, # Endianess within a word - not relevant
|
| 324 |
+
c_size_t(0), # No nails
|
| 325 |
+
c_uint8_ptr(byte_string))
|
| 326 |
+
return result
|
| 327 |
+
|
| 328 |
+
# Relations
|
| 329 |
+
def _apply_and_return(self, func, term):
|
| 330 |
+
if not isinstance(term, IntegerGMP):
|
| 331 |
+
term = IntegerGMP(term)
|
| 332 |
+
return func(self._mpz_p, term._mpz_p)
|
| 333 |
+
|
| 334 |
+
def __eq__(self, term):
|
| 335 |
+
if not (isinstance(term, IntegerGMP) or is_native_int(term)):
|
| 336 |
+
return False
|
| 337 |
+
return self._apply_and_return(_gmp.mpz_cmp, term) == 0
|
| 338 |
+
|
| 339 |
+
def __ne__(self, term):
|
| 340 |
+
if not (isinstance(term, IntegerGMP) or is_native_int(term)):
|
| 341 |
+
return True
|
| 342 |
+
return self._apply_and_return(_gmp.mpz_cmp, term) != 0
|
| 343 |
+
|
| 344 |
+
def __lt__(self, term):
|
| 345 |
+
return self._apply_and_return(_gmp.mpz_cmp, term) < 0
|
| 346 |
+
|
| 347 |
+
def __le__(self, term):
|
| 348 |
+
return self._apply_and_return(_gmp.mpz_cmp, term) <= 0
|
| 349 |
+
|
| 350 |
+
def __gt__(self, term):
|
| 351 |
+
return self._apply_and_return(_gmp.mpz_cmp, term) > 0
|
| 352 |
+
|
| 353 |
+
def __ge__(self, term):
|
| 354 |
+
return self._apply_and_return(_gmp.mpz_cmp, term) >= 0
|
| 355 |
+
|
| 356 |
+
def __nonzero__(self):
|
| 357 |
+
return _gmp.mpz_cmp(self._mpz_p, self._zero_mpz_p) != 0
|
| 358 |
+
__bool__ = __nonzero__
|
| 359 |
+
|
| 360 |
+
def is_negative(self):
|
| 361 |
+
return _gmp.mpz_cmp(self._mpz_p, self._zero_mpz_p) < 0
|
| 362 |
+
|
| 363 |
+
# Arithmetic operations
|
| 364 |
+
def __add__(self, term):
|
| 365 |
+
result = IntegerGMP(0)
|
| 366 |
+
if not isinstance(term, IntegerGMP):
|
| 367 |
+
try:
|
| 368 |
+
term = IntegerGMP(term)
|
| 369 |
+
except NotImplementedError:
|
| 370 |
+
return NotImplemented
|
| 371 |
+
_gmp.mpz_add(result._mpz_p,
|
| 372 |
+
self._mpz_p,
|
| 373 |
+
term._mpz_p)
|
| 374 |
+
return result
|
| 375 |
+
|
| 376 |
+
def __sub__(self, term):
|
| 377 |
+
result = IntegerGMP(0)
|
| 378 |
+
if not isinstance(term, IntegerGMP):
|
| 379 |
+
try:
|
| 380 |
+
term = IntegerGMP(term)
|
| 381 |
+
except NotImplementedError:
|
| 382 |
+
return NotImplemented
|
| 383 |
+
_gmp.mpz_sub(result._mpz_p,
|
| 384 |
+
self._mpz_p,
|
| 385 |
+
term._mpz_p)
|
| 386 |
+
return result
|
| 387 |
+
|
| 388 |
+
def __mul__(self, term):
|
| 389 |
+
result = IntegerGMP(0)
|
| 390 |
+
if not isinstance(term, IntegerGMP):
|
| 391 |
+
try:
|
| 392 |
+
term = IntegerGMP(term)
|
| 393 |
+
except NotImplementedError:
|
| 394 |
+
return NotImplemented
|
| 395 |
+
_gmp.mpz_mul(result._mpz_p,
|
| 396 |
+
self._mpz_p,
|
| 397 |
+
term._mpz_p)
|
| 398 |
+
return result
|
| 399 |
+
|
| 400 |
+
def __floordiv__(self, divisor):
|
| 401 |
+
if not isinstance(divisor, IntegerGMP):
|
| 402 |
+
divisor = IntegerGMP(divisor)
|
| 403 |
+
if _gmp.mpz_cmp(divisor._mpz_p,
|
| 404 |
+
self._zero_mpz_p) == 0:
|
| 405 |
+
raise ZeroDivisionError("Division by zero")
|
| 406 |
+
result = IntegerGMP(0)
|
| 407 |
+
_gmp.mpz_fdiv_q(result._mpz_p,
|
| 408 |
+
self._mpz_p,
|
| 409 |
+
divisor._mpz_p)
|
| 410 |
+
return result
|
| 411 |
+
|
| 412 |
+
def __mod__(self, divisor):
|
| 413 |
+
if not isinstance(divisor, IntegerGMP):
|
| 414 |
+
divisor = IntegerGMP(divisor)
|
| 415 |
+
comp = _gmp.mpz_cmp(divisor._mpz_p,
|
| 416 |
+
self._zero_mpz_p)
|
| 417 |
+
if comp == 0:
|
| 418 |
+
raise ZeroDivisionError("Division by zero")
|
| 419 |
+
if comp < 0:
|
| 420 |
+
raise ValueError("Modulus must be positive")
|
| 421 |
+
result = IntegerGMP(0)
|
| 422 |
+
_gmp.mpz_mod(result._mpz_p,
|
| 423 |
+
self._mpz_p,
|
| 424 |
+
divisor._mpz_p)
|
| 425 |
+
return result
|
| 426 |
+
|
| 427 |
+
def inplace_pow(self, exponent, modulus=None):
|
| 428 |
+
|
| 429 |
+
if modulus is None:
|
| 430 |
+
if exponent < 0:
|
| 431 |
+
raise ValueError("Exponent must not be negative")
|
| 432 |
+
|
| 433 |
+
# Normal exponentiation
|
| 434 |
+
if exponent > 256:
|
| 435 |
+
raise ValueError("Exponent is too big")
|
| 436 |
+
_gmp.mpz_pow_ui(self._mpz_p,
|
| 437 |
+
self._mpz_p, # Base
|
| 438 |
+
c_ulong(int(exponent))
|
| 439 |
+
)
|
| 440 |
+
else:
|
| 441 |
+
# Modular exponentiation
|
| 442 |
+
if not isinstance(modulus, IntegerGMP):
|
| 443 |
+
modulus = IntegerGMP(modulus)
|
| 444 |
+
if not modulus:
|
| 445 |
+
raise ZeroDivisionError("Division by zero")
|
| 446 |
+
if modulus.is_negative():
|
| 447 |
+
raise ValueError("Modulus must be positive")
|
| 448 |
+
if is_native_int(exponent):
|
| 449 |
+
if exponent < 0:
|
| 450 |
+
raise ValueError("Exponent must not be negative")
|
| 451 |
+
if exponent < 65536:
|
| 452 |
+
_gmp.mpz_powm_ui(self._mpz_p,
|
| 453 |
+
self._mpz_p,
|
| 454 |
+
c_ulong(exponent),
|
| 455 |
+
modulus._mpz_p)
|
| 456 |
+
return self
|
| 457 |
+
exponent = IntegerGMP(exponent)
|
| 458 |
+
elif exponent.is_negative():
|
| 459 |
+
raise ValueError("Exponent must not be negative")
|
| 460 |
+
_gmp.mpz_powm(self._mpz_p,
|
| 461 |
+
self._mpz_p,
|
| 462 |
+
exponent._mpz_p,
|
| 463 |
+
modulus._mpz_p)
|
| 464 |
+
return self
|
| 465 |
+
|
| 466 |
+
def __pow__(self, exponent, modulus=None):
|
| 467 |
+
result = IntegerGMP(self)
|
| 468 |
+
return result.inplace_pow(exponent, modulus)
|
| 469 |
+
|
| 470 |
+
def __abs__(self):
|
| 471 |
+
result = IntegerGMP(0)
|
| 472 |
+
_gmp.mpz_abs(result._mpz_p, self._mpz_p)
|
| 473 |
+
return result
|
| 474 |
+
|
| 475 |
+
def sqrt(self, modulus=None):
|
| 476 |
+
"""Return the largest Integer that does not
|
| 477 |
+
exceed the square root"""
|
| 478 |
+
|
| 479 |
+
if modulus is None:
|
| 480 |
+
if self < 0:
|
| 481 |
+
raise ValueError("Square root of negative value")
|
| 482 |
+
result = IntegerGMP(0)
|
| 483 |
+
_gmp.mpz_sqrt(result._mpz_p,
|
| 484 |
+
self._mpz_p)
|
| 485 |
+
else:
|
| 486 |
+
if modulus <= 0:
|
| 487 |
+
raise ValueError("Modulus must be positive")
|
| 488 |
+
modulus = int(modulus)
|
| 489 |
+
result = IntegerGMP(self._tonelli_shanks(int(self) % modulus, modulus))
|
| 490 |
+
|
| 491 |
+
return result
|
| 492 |
+
|
| 493 |
+
def __iadd__(self, term):
|
| 494 |
+
if is_native_int(term):
|
| 495 |
+
if 0 <= term < 65536:
|
| 496 |
+
_gmp.mpz_add_ui(self._mpz_p,
|
| 497 |
+
self._mpz_p,
|
| 498 |
+
c_ulong(term))
|
| 499 |
+
return self
|
| 500 |
+
if -65535 < term < 0:
|
| 501 |
+
_gmp.mpz_sub_ui(self._mpz_p,
|
| 502 |
+
self._mpz_p,
|
| 503 |
+
c_ulong(-term))
|
| 504 |
+
return self
|
| 505 |
+
term = IntegerGMP(term)
|
| 506 |
+
_gmp.mpz_add(self._mpz_p,
|
| 507 |
+
self._mpz_p,
|
| 508 |
+
term._mpz_p)
|
| 509 |
+
return self
|
| 510 |
+
|
| 511 |
+
def __isub__(self, term):
|
| 512 |
+
if is_native_int(term):
|
| 513 |
+
if 0 <= term < 65536:
|
| 514 |
+
_gmp.mpz_sub_ui(self._mpz_p,
|
| 515 |
+
self._mpz_p,
|
| 516 |
+
c_ulong(term))
|
| 517 |
+
return self
|
| 518 |
+
if -65535 < term < 0:
|
| 519 |
+
_gmp.mpz_add_ui(self._mpz_p,
|
| 520 |
+
self._mpz_p,
|
| 521 |
+
c_ulong(-term))
|
| 522 |
+
return self
|
| 523 |
+
term = IntegerGMP(term)
|
| 524 |
+
_gmp.mpz_sub(self._mpz_p,
|
| 525 |
+
self._mpz_p,
|
| 526 |
+
term._mpz_p)
|
| 527 |
+
return self
|
| 528 |
+
|
| 529 |
+
def __imul__(self, term):
|
| 530 |
+
if is_native_int(term):
|
| 531 |
+
if 0 <= term < 65536:
|
| 532 |
+
_gmp.mpz_mul_ui(self._mpz_p,
|
| 533 |
+
self._mpz_p,
|
| 534 |
+
c_ulong(term))
|
| 535 |
+
return self
|
| 536 |
+
if -65535 < term < 0:
|
| 537 |
+
_gmp.mpz_mul_ui(self._mpz_p,
|
| 538 |
+
self._mpz_p,
|
| 539 |
+
c_ulong(-term))
|
| 540 |
+
_gmp.mpz_neg(self._mpz_p, self._mpz_p)
|
| 541 |
+
return self
|
| 542 |
+
term = IntegerGMP(term)
|
| 543 |
+
_gmp.mpz_mul(self._mpz_p,
|
| 544 |
+
self._mpz_p,
|
| 545 |
+
term._mpz_p)
|
| 546 |
+
return self
|
| 547 |
+
|
| 548 |
+
def __imod__(self, divisor):
|
| 549 |
+
if not isinstance(divisor, IntegerGMP):
|
| 550 |
+
divisor = IntegerGMP(divisor)
|
| 551 |
+
comp = _gmp.mpz_cmp(divisor._mpz_p,
|
| 552 |
+
divisor._zero_mpz_p)
|
| 553 |
+
if comp == 0:
|
| 554 |
+
raise ZeroDivisionError("Division by zero")
|
| 555 |
+
if comp < 0:
|
| 556 |
+
raise ValueError("Modulus must be positive")
|
| 557 |
+
_gmp.mpz_mod(self._mpz_p,
|
| 558 |
+
self._mpz_p,
|
| 559 |
+
divisor._mpz_p)
|
| 560 |
+
return self
|
| 561 |
+
|
| 562 |
+
# Boolean/bit operations
|
| 563 |
+
def __and__(self, term):
|
| 564 |
+
result = IntegerGMP(0)
|
| 565 |
+
if not isinstance(term, IntegerGMP):
|
| 566 |
+
term = IntegerGMP(term)
|
| 567 |
+
_gmp.mpz_and(result._mpz_p,
|
| 568 |
+
self._mpz_p,
|
| 569 |
+
term._mpz_p)
|
| 570 |
+
return result
|
| 571 |
+
|
| 572 |
+
def __or__(self, term):
|
| 573 |
+
result = IntegerGMP(0)
|
| 574 |
+
if not isinstance(term, IntegerGMP):
|
| 575 |
+
term = IntegerGMP(term)
|
| 576 |
+
_gmp.mpz_ior(result._mpz_p,
|
| 577 |
+
self._mpz_p,
|
| 578 |
+
term._mpz_p)
|
| 579 |
+
return result
|
| 580 |
+
|
| 581 |
+
def __rshift__(self, pos):
|
| 582 |
+
result = IntegerGMP(0)
|
| 583 |
+
if pos < 0:
|
| 584 |
+
raise ValueError("negative shift count")
|
| 585 |
+
if pos > 65536:
|
| 586 |
+
if self < 0:
|
| 587 |
+
return -1
|
| 588 |
+
else:
|
| 589 |
+
return 0
|
| 590 |
+
_gmp.mpz_tdiv_q_2exp(result._mpz_p,
|
| 591 |
+
self._mpz_p,
|
| 592 |
+
c_ulong(int(pos)))
|
| 593 |
+
return result
|
| 594 |
+
|
| 595 |
+
def __irshift__(self, pos):
|
| 596 |
+
if pos < 0:
|
| 597 |
+
raise ValueError("negative shift count")
|
| 598 |
+
if pos > 65536:
|
| 599 |
+
if self < 0:
|
| 600 |
+
return -1
|
| 601 |
+
else:
|
| 602 |
+
return 0
|
| 603 |
+
_gmp.mpz_tdiv_q_2exp(self._mpz_p,
|
| 604 |
+
self._mpz_p,
|
| 605 |
+
c_ulong(int(pos)))
|
| 606 |
+
return self
|
| 607 |
+
|
| 608 |
+
def __lshift__(self, pos):
|
| 609 |
+
result = IntegerGMP(0)
|
| 610 |
+
if not 0 <= pos < 65536:
|
| 611 |
+
raise ValueError("Incorrect shift count")
|
| 612 |
+
_gmp.mpz_mul_2exp(result._mpz_p,
|
| 613 |
+
self._mpz_p,
|
| 614 |
+
c_ulong(int(pos)))
|
| 615 |
+
return result
|
| 616 |
+
|
| 617 |
+
def __ilshift__(self, pos):
|
| 618 |
+
if not 0 <= pos < 65536:
|
| 619 |
+
raise ValueError("Incorrect shift count")
|
| 620 |
+
_gmp.mpz_mul_2exp(self._mpz_p,
|
| 621 |
+
self._mpz_p,
|
| 622 |
+
c_ulong(int(pos)))
|
| 623 |
+
return self
|
| 624 |
+
|
| 625 |
+
def get_bit(self, n):
|
| 626 |
+
"""Return True if the n-th bit is set to 1.
|
| 627 |
+
Bit 0 is the least significant."""
|
| 628 |
+
|
| 629 |
+
if self < 0:
|
| 630 |
+
raise ValueError("no bit representation for negative values")
|
| 631 |
+
if n < 0:
|
| 632 |
+
raise ValueError("negative bit count")
|
| 633 |
+
if n > 65536:
|
| 634 |
+
return 0
|
| 635 |
+
return bool(_gmp.mpz_tstbit(self._mpz_p,
|
| 636 |
+
c_ulong(int(n))))
|
| 637 |
+
|
| 638 |
+
# Extra
|
| 639 |
+
def is_odd(self):
|
| 640 |
+
return _gmp.mpz_tstbit(self._mpz_p, 0) == 1
|
| 641 |
+
|
| 642 |
+
def is_even(self):
|
| 643 |
+
return _gmp.mpz_tstbit(self._mpz_p, 0) == 0
|
| 644 |
+
|
| 645 |
+
def size_in_bits(self):
|
| 646 |
+
"""Return the minimum number of bits that can encode the number."""
|
| 647 |
+
|
| 648 |
+
if self < 0:
|
| 649 |
+
raise ValueError("Conversion only valid for non-negative numbers")
|
| 650 |
+
return _gmp.mpz_sizeinbase(self._mpz_p, 2)
|
| 651 |
+
|
| 652 |
+
def size_in_bytes(self):
|
| 653 |
+
"""Return the minimum number of bytes that can encode the number."""
|
| 654 |
+
return (self.size_in_bits() - 1) // 8 + 1
|
| 655 |
+
|
| 656 |
+
def is_perfect_square(self):
|
| 657 |
+
return _gmp.mpz_perfect_square_p(self._mpz_p) != 0
|
| 658 |
+
|
| 659 |
+
def fail_if_divisible_by(self, small_prime):
|
| 660 |
+
"""Raise an exception if the small prime is a divisor."""
|
| 661 |
+
|
| 662 |
+
if is_native_int(small_prime):
|
| 663 |
+
if 0 < small_prime < 65536:
|
| 664 |
+
if _gmp.mpz_divisible_ui_p(self._mpz_p,
|
| 665 |
+
c_ulong(small_prime)):
|
| 666 |
+
raise ValueError("The value is composite")
|
| 667 |
+
return
|
| 668 |
+
small_prime = IntegerGMP(small_prime)
|
| 669 |
+
if _gmp.mpz_divisible_p(self._mpz_p,
|
| 670 |
+
small_prime._mpz_p):
|
| 671 |
+
raise ValueError("The value is composite")
|
| 672 |
+
|
| 673 |
+
def multiply_accumulate(self, a, b):
|
| 674 |
+
"""Increment the number by the product of a and b."""
|
| 675 |
+
|
| 676 |
+
if not isinstance(a, IntegerGMP):
|
| 677 |
+
a = IntegerGMP(a)
|
| 678 |
+
if is_native_int(b):
|
| 679 |
+
if 0 < b < 65536:
|
| 680 |
+
_gmp.mpz_addmul_ui(self._mpz_p,
|
| 681 |
+
a._mpz_p,
|
| 682 |
+
c_ulong(b))
|
| 683 |
+
return self
|
| 684 |
+
if -65535 < b < 0:
|
| 685 |
+
_gmp.mpz_submul_ui(self._mpz_p,
|
| 686 |
+
a._mpz_p,
|
| 687 |
+
c_ulong(-b))
|
| 688 |
+
return self
|
| 689 |
+
b = IntegerGMP(b)
|
| 690 |
+
_gmp.mpz_addmul(self._mpz_p,
|
| 691 |
+
a._mpz_p,
|
| 692 |
+
b._mpz_p)
|
| 693 |
+
return self
|
| 694 |
+
|
| 695 |
+
def set(self, source):
|
| 696 |
+
"""Set the Integer to have the given value"""
|
| 697 |
+
|
| 698 |
+
if not isinstance(source, IntegerGMP):
|
| 699 |
+
source = IntegerGMP(source)
|
| 700 |
+
_gmp.mpz_set(self._mpz_p,
|
| 701 |
+
source._mpz_p)
|
| 702 |
+
return self
|
| 703 |
+
|
| 704 |
+
def inplace_inverse(self, modulus):
|
| 705 |
+
"""Compute the inverse of this number in the ring of
|
| 706 |
+
modulo integers.
|
| 707 |
+
|
| 708 |
+
Raise an exception if no inverse exists.
|
| 709 |
+
"""
|
| 710 |
+
|
| 711 |
+
if not isinstance(modulus, IntegerGMP):
|
| 712 |
+
modulus = IntegerGMP(modulus)
|
| 713 |
+
|
| 714 |
+
comp = _gmp.mpz_cmp(modulus._mpz_p,
|
| 715 |
+
self._zero_mpz_p)
|
| 716 |
+
if comp == 0:
|
| 717 |
+
raise ZeroDivisionError("Modulus cannot be zero")
|
| 718 |
+
if comp < 0:
|
| 719 |
+
raise ValueError("Modulus must be positive")
|
| 720 |
+
|
| 721 |
+
result = _gmp.mpz_invert(self._mpz_p,
|
| 722 |
+
self._mpz_p,
|
| 723 |
+
modulus._mpz_p)
|
| 724 |
+
if not result:
|
| 725 |
+
raise ValueError("No inverse value can be computed")
|
| 726 |
+
return self
|
| 727 |
+
|
| 728 |
+
def inverse(self, modulus):
|
| 729 |
+
result = IntegerGMP(self)
|
| 730 |
+
result.inplace_inverse(modulus)
|
| 731 |
+
return result
|
| 732 |
+
|
| 733 |
+
def gcd(self, term):
|
| 734 |
+
"""Compute the greatest common denominator between this
|
| 735 |
+
number and another term."""
|
| 736 |
+
|
| 737 |
+
result = IntegerGMP(0)
|
| 738 |
+
if is_native_int(term):
|
| 739 |
+
if 0 < term < 65535:
|
| 740 |
+
_gmp.mpz_gcd_ui(result._mpz_p,
|
| 741 |
+
self._mpz_p,
|
| 742 |
+
c_ulong(term))
|
| 743 |
+
return result
|
| 744 |
+
term = IntegerGMP(term)
|
| 745 |
+
_gmp.mpz_gcd(result._mpz_p, self._mpz_p, term._mpz_p)
|
| 746 |
+
return result
|
| 747 |
+
|
| 748 |
+
def lcm(self, term):
|
| 749 |
+
"""Compute the least common multiplier between this
|
| 750 |
+
number and another term."""
|
| 751 |
+
|
| 752 |
+
result = IntegerGMP(0)
|
| 753 |
+
if not isinstance(term, IntegerGMP):
|
| 754 |
+
term = IntegerGMP(term)
|
| 755 |
+
_gmp.mpz_lcm(result._mpz_p, self._mpz_p, term._mpz_p)
|
| 756 |
+
return result
|
| 757 |
+
|
| 758 |
+
@staticmethod
|
| 759 |
+
def jacobi_symbol(a, n):
|
| 760 |
+
"""Compute the Jacobi symbol"""
|
| 761 |
+
|
| 762 |
+
if not isinstance(a, IntegerGMP):
|
| 763 |
+
a = IntegerGMP(a)
|
| 764 |
+
if not isinstance(n, IntegerGMP):
|
| 765 |
+
n = IntegerGMP(n)
|
| 766 |
+
if n <= 0 or n.is_even():
|
| 767 |
+
raise ValueError("n must be positive odd for the Jacobi symbol")
|
| 768 |
+
return _gmp.mpz_jacobi(a._mpz_p, n._mpz_p)
|
| 769 |
+
|
| 770 |
+
@staticmethod
|
| 771 |
+
def _mult_modulo_bytes(term1, term2, modulus):
|
| 772 |
+
if not isinstance(term1, IntegerGMP):
|
| 773 |
+
term1 = IntegerGMP(term1)
|
| 774 |
+
if not isinstance(term2, IntegerGMP):
|
| 775 |
+
term2 = IntegerGMP(term2)
|
| 776 |
+
if not isinstance(modulus, IntegerGMP):
|
| 777 |
+
modulus = IntegerGMP(modulus)
|
| 778 |
+
|
| 779 |
+
if modulus < 0:
|
| 780 |
+
raise ValueError("Modulus must be positive")
|
| 781 |
+
if modulus == 0:
|
| 782 |
+
raise ZeroDivisionError("Modulus cannot be zero")
|
| 783 |
+
if (modulus & 1) == 0:
|
| 784 |
+
raise ValueError("Odd modulus is required")
|
| 785 |
+
|
| 786 |
+
product = (term1 * term2) % modulus
|
| 787 |
+
return product.to_bytes(modulus.size_in_bytes())
|
| 788 |
+
|
| 789 |
+
# Clean-up
|
| 790 |
+
def __del__(self):
|
| 791 |
+
|
| 792 |
+
try:
|
| 793 |
+
if self._mpz_p is not None:
|
| 794 |
+
if self._initialized:
|
| 795 |
+
_gmp.mpz_clear(self._mpz_p)
|
| 796 |
+
|
| 797 |
+
self._mpz_p = None
|
| 798 |
+
except AttributeError:
|
| 799 |
+
pass
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerGMP.pyi
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from ._IntegerBase import IntegerBase
|
| 2 |
+
class IntegerGMP(IntegerBase):
|
| 3 |
+
pass
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerNative.py
ADDED
|
@@ -0,0 +1,382 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ===================================================================
|
| 2 |
+
#
|
| 3 |
+
# Copyright (c) 2014, Legrandin <helderijs@gmail.com>
|
| 4 |
+
# All rights reserved.
|
| 5 |
+
#
|
| 6 |
+
# Redistribution and use in source and binary forms, with or without
|
| 7 |
+
# modification, are permitted provided that the following conditions
|
| 8 |
+
# are met:
|
| 9 |
+
#
|
| 10 |
+
# 1. Redistributions of source code must retain the above copyright
|
| 11 |
+
# notice, this list of conditions and the following disclaimer.
|
| 12 |
+
# 2. Redistributions in binary form must reproduce the above copyright
|
| 13 |
+
# notice, this list of conditions and the following disclaimer in
|
| 14 |
+
# the documentation and/or other materials provided with the
|
| 15 |
+
# distribution.
|
| 16 |
+
#
|
| 17 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 18 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 19 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
| 20 |
+
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
| 21 |
+
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
| 22 |
+
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
| 23 |
+
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
| 24 |
+
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
| 25 |
+
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
| 26 |
+
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
| 27 |
+
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 28 |
+
# POSSIBILITY OF SUCH DAMAGE.
|
| 29 |
+
# ===================================================================
|
| 30 |
+
|
| 31 |
+
from ._IntegerBase import IntegerBase
|
| 32 |
+
|
| 33 |
+
from Crypto.Util.number import long_to_bytes, bytes_to_long, inverse, GCD
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class IntegerNative(IntegerBase):
|
| 37 |
+
"""A class to model a natural integer (including zero)"""
|
| 38 |
+
|
| 39 |
+
def __init__(self, value):
|
| 40 |
+
if isinstance(value, float):
|
| 41 |
+
raise ValueError("A floating point type is not a natural number")
|
| 42 |
+
try:
|
| 43 |
+
self._value = value._value
|
| 44 |
+
except AttributeError:
|
| 45 |
+
self._value = value
|
| 46 |
+
|
| 47 |
+
# Conversions
|
| 48 |
+
def __int__(self):
|
| 49 |
+
return self._value
|
| 50 |
+
|
| 51 |
+
def __str__(self):
|
| 52 |
+
return str(int(self))
|
| 53 |
+
|
| 54 |
+
def __repr__(self):
|
| 55 |
+
return "Integer(%s)" % str(self)
|
| 56 |
+
|
| 57 |
+
# Only Python 2.x
|
| 58 |
+
def __hex__(self):
|
| 59 |
+
return hex(self._value)
|
| 60 |
+
|
| 61 |
+
# Only Python 3.x
|
| 62 |
+
def __index__(self):
|
| 63 |
+
return int(self._value)
|
| 64 |
+
|
| 65 |
+
def to_bytes(self, block_size=0, byteorder='big'):
|
| 66 |
+
if self._value < 0:
|
| 67 |
+
raise ValueError("Conversion only valid for non-negative numbers")
|
| 68 |
+
result = long_to_bytes(self._value, block_size)
|
| 69 |
+
if len(result) > block_size > 0:
|
| 70 |
+
raise ValueError("Value too large to encode")
|
| 71 |
+
if byteorder == 'big':
|
| 72 |
+
pass
|
| 73 |
+
elif byteorder == 'little':
|
| 74 |
+
result = bytearray(result)
|
| 75 |
+
result.reverse()
|
| 76 |
+
result = bytes(result)
|
| 77 |
+
else:
|
| 78 |
+
raise ValueError("Incorrect byteorder")
|
| 79 |
+
return result
|
| 80 |
+
|
| 81 |
+
@classmethod
|
| 82 |
+
def from_bytes(cls, byte_string, byteorder='big'):
|
| 83 |
+
if byteorder == 'big':
|
| 84 |
+
pass
|
| 85 |
+
elif byteorder == 'little':
|
| 86 |
+
byte_string = bytearray(byte_string)
|
| 87 |
+
byte_string.reverse()
|
| 88 |
+
else:
|
| 89 |
+
raise ValueError("Incorrect byteorder")
|
| 90 |
+
return cls(bytes_to_long(byte_string))
|
| 91 |
+
|
| 92 |
+
# Relations
|
| 93 |
+
def __eq__(self, term):
|
| 94 |
+
if term is None:
|
| 95 |
+
return False
|
| 96 |
+
return self._value == int(term)
|
| 97 |
+
|
| 98 |
+
def __ne__(self, term):
|
| 99 |
+
return not self.__eq__(term)
|
| 100 |
+
|
| 101 |
+
def __lt__(self, term):
|
| 102 |
+
return self._value < int(term)
|
| 103 |
+
|
| 104 |
+
def __le__(self, term):
|
| 105 |
+
return self.__lt__(term) or self.__eq__(term)
|
| 106 |
+
|
| 107 |
+
def __gt__(self, term):
|
| 108 |
+
return not self.__le__(term)
|
| 109 |
+
|
| 110 |
+
def __ge__(self, term):
|
| 111 |
+
return not self.__lt__(term)
|
| 112 |
+
|
| 113 |
+
def __nonzero__(self):
|
| 114 |
+
return self._value != 0
|
| 115 |
+
__bool__ = __nonzero__
|
| 116 |
+
|
| 117 |
+
def is_negative(self):
|
| 118 |
+
return self._value < 0
|
| 119 |
+
|
| 120 |
+
# Arithmetic operations
|
| 121 |
+
def __add__(self, term):
|
| 122 |
+
try:
|
| 123 |
+
return self.__class__(self._value + int(term))
|
| 124 |
+
except (ValueError, AttributeError, TypeError):
|
| 125 |
+
return NotImplemented
|
| 126 |
+
|
| 127 |
+
def __sub__(self, term):
|
| 128 |
+
try:
|
| 129 |
+
return self.__class__(self._value - int(term))
|
| 130 |
+
except (ValueError, AttributeError, TypeError):
|
| 131 |
+
return NotImplemented
|
| 132 |
+
|
| 133 |
+
def __mul__(self, factor):
|
| 134 |
+
try:
|
| 135 |
+
return self.__class__(self._value * int(factor))
|
| 136 |
+
except (ValueError, AttributeError, TypeError):
|
| 137 |
+
return NotImplemented
|
| 138 |
+
|
| 139 |
+
def __floordiv__(self, divisor):
|
| 140 |
+
return self.__class__(self._value // int(divisor))
|
| 141 |
+
|
| 142 |
+
def __mod__(self, divisor):
|
| 143 |
+
divisor_value = int(divisor)
|
| 144 |
+
if divisor_value < 0:
|
| 145 |
+
raise ValueError("Modulus must be positive")
|
| 146 |
+
return self.__class__(self._value % divisor_value)
|
| 147 |
+
|
| 148 |
+
def inplace_pow(self, exponent, modulus=None):
|
| 149 |
+
exp_value = int(exponent)
|
| 150 |
+
if exp_value < 0:
|
| 151 |
+
raise ValueError("Exponent must not be negative")
|
| 152 |
+
|
| 153 |
+
if modulus is not None:
|
| 154 |
+
mod_value = int(modulus)
|
| 155 |
+
if mod_value < 0:
|
| 156 |
+
raise ValueError("Modulus must be positive")
|
| 157 |
+
if mod_value == 0:
|
| 158 |
+
raise ZeroDivisionError("Modulus cannot be zero")
|
| 159 |
+
else:
|
| 160 |
+
mod_value = None
|
| 161 |
+
self._value = pow(self._value, exp_value, mod_value)
|
| 162 |
+
return self
|
| 163 |
+
|
| 164 |
+
def __pow__(self, exponent, modulus=None):
|
| 165 |
+
result = self.__class__(self)
|
| 166 |
+
return result.inplace_pow(exponent, modulus)
|
| 167 |
+
|
| 168 |
+
def __abs__(self):
|
| 169 |
+
return abs(self._value)
|
| 170 |
+
|
| 171 |
+
def sqrt(self, modulus=None):
|
| 172 |
+
|
| 173 |
+
value = self._value
|
| 174 |
+
if modulus is None:
|
| 175 |
+
if value < 0:
|
| 176 |
+
raise ValueError("Square root of negative value")
|
| 177 |
+
# http://stackoverflow.com/questions/15390807/integer-square-root-in-python
|
| 178 |
+
|
| 179 |
+
x = value
|
| 180 |
+
y = (x + 1) // 2
|
| 181 |
+
while y < x:
|
| 182 |
+
x = y
|
| 183 |
+
y = (x + value // x) // 2
|
| 184 |
+
result = x
|
| 185 |
+
else:
|
| 186 |
+
if modulus <= 0:
|
| 187 |
+
raise ValueError("Modulus must be positive")
|
| 188 |
+
result = self._tonelli_shanks(self % modulus, modulus)
|
| 189 |
+
|
| 190 |
+
return self.__class__(result)
|
| 191 |
+
|
| 192 |
+
def __iadd__(self, term):
|
| 193 |
+
self._value += int(term)
|
| 194 |
+
return self
|
| 195 |
+
|
| 196 |
+
def __isub__(self, term):
|
| 197 |
+
self._value -= int(term)
|
| 198 |
+
return self
|
| 199 |
+
|
| 200 |
+
def __imul__(self, term):
|
| 201 |
+
self._value *= int(term)
|
| 202 |
+
return self
|
| 203 |
+
|
| 204 |
+
def __imod__(self, term):
|
| 205 |
+
modulus = int(term)
|
| 206 |
+
if modulus == 0:
|
| 207 |
+
raise ZeroDivisionError("Division by zero")
|
| 208 |
+
if modulus < 0:
|
| 209 |
+
raise ValueError("Modulus must be positive")
|
| 210 |
+
self._value %= modulus
|
| 211 |
+
return self
|
| 212 |
+
|
| 213 |
+
# Boolean/bit operations
|
| 214 |
+
def __and__(self, term):
|
| 215 |
+
return self.__class__(self._value & int(term))
|
| 216 |
+
|
| 217 |
+
def __or__(self, term):
|
| 218 |
+
return self.__class__(self._value | int(term))
|
| 219 |
+
|
| 220 |
+
def __rshift__(self, pos):
|
| 221 |
+
try:
|
| 222 |
+
return self.__class__(self._value >> int(pos))
|
| 223 |
+
except OverflowError:
|
| 224 |
+
if self._value >= 0:
|
| 225 |
+
return 0
|
| 226 |
+
else:
|
| 227 |
+
return -1
|
| 228 |
+
|
| 229 |
+
def __irshift__(self, pos):
|
| 230 |
+
try:
|
| 231 |
+
self._value >>= int(pos)
|
| 232 |
+
except OverflowError:
|
| 233 |
+
if self._value >= 0:
|
| 234 |
+
return 0
|
| 235 |
+
else:
|
| 236 |
+
return -1
|
| 237 |
+
return self
|
| 238 |
+
|
| 239 |
+
def __lshift__(self, pos):
|
| 240 |
+
try:
|
| 241 |
+
return self.__class__(self._value << int(pos))
|
| 242 |
+
except OverflowError:
|
| 243 |
+
raise ValueError("Incorrect shift count")
|
| 244 |
+
|
| 245 |
+
def __ilshift__(self, pos):
|
| 246 |
+
try:
|
| 247 |
+
self._value <<= int(pos)
|
| 248 |
+
except OverflowError:
|
| 249 |
+
raise ValueError("Incorrect shift count")
|
| 250 |
+
return self
|
| 251 |
+
|
| 252 |
+
def get_bit(self, n):
|
| 253 |
+
if self._value < 0:
|
| 254 |
+
raise ValueError("no bit representation for negative values")
|
| 255 |
+
try:
|
| 256 |
+
try:
|
| 257 |
+
result = (self._value >> n._value) & 1
|
| 258 |
+
if n._value < 0:
|
| 259 |
+
raise ValueError("negative bit count")
|
| 260 |
+
except AttributeError:
|
| 261 |
+
result = (self._value >> n) & 1
|
| 262 |
+
if n < 0:
|
| 263 |
+
raise ValueError("negative bit count")
|
| 264 |
+
except OverflowError:
|
| 265 |
+
result = 0
|
| 266 |
+
return result
|
| 267 |
+
|
| 268 |
+
# Extra
|
| 269 |
+
def is_odd(self):
|
| 270 |
+
return (self._value & 1) == 1
|
| 271 |
+
|
| 272 |
+
def is_even(self):
|
| 273 |
+
return (self._value & 1) == 0
|
| 274 |
+
|
| 275 |
+
def size_in_bits(self):
|
| 276 |
+
|
| 277 |
+
if self._value < 0:
|
| 278 |
+
raise ValueError("Conversion only valid for non-negative numbers")
|
| 279 |
+
|
| 280 |
+
if self._value == 0:
|
| 281 |
+
return 1
|
| 282 |
+
|
| 283 |
+
return self._value.bit_length()
|
| 284 |
+
|
| 285 |
+
def size_in_bytes(self):
|
| 286 |
+
return (self.size_in_bits() - 1) // 8 + 1
|
| 287 |
+
|
| 288 |
+
def is_perfect_square(self):
|
| 289 |
+
if self._value < 0:
|
| 290 |
+
return False
|
| 291 |
+
if self._value in (0, 1):
|
| 292 |
+
return True
|
| 293 |
+
|
| 294 |
+
x = self._value // 2
|
| 295 |
+
square_x = x ** 2
|
| 296 |
+
|
| 297 |
+
while square_x > self._value:
|
| 298 |
+
x = (square_x + self._value) // (2 * x)
|
| 299 |
+
square_x = x ** 2
|
| 300 |
+
|
| 301 |
+
return self._value == x ** 2
|
| 302 |
+
|
| 303 |
+
def fail_if_divisible_by(self, small_prime):
|
| 304 |
+
if (self._value % int(small_prime)) == 0:
|
| 305 |
+
raise ValueError("Value is composite")
|
| 306 |
+
|
| 307 |
+
def multiply_accumulate(self, a, b):
|
| 308 |
+
self._value += int(a) * int(b)
|
| 309 |
+
return self
|
| 310 |
+
|
| 311 |
+
def set(self, source):
|
| 312 |
+
self._value = int(source)
|
| 313 |
+
|
| 314 |
+
def inplace_inverse(self, modulus):
|
| 315 |
+
self._value = inverse(self._value, int(modulus))
|
| 316 |
+
return self
|
| 317 |
+
|
| 318 |
+
def inverse(self, modulus):
|
| 319 |
+
result = self.__class__(self)
|
| 320 |
+
result.inplace_inverse(modulus)
|
| 321 |
+
return result
|
| 322 |
+
|
| 323 |
+
def gcd(self, term):
|
| 324 |
+
return self.__class__(GCD(abs(self._value), abs(int(term))))
|
| 325 |
+
|
| 326 |
+
def lcm(self, term):
|
| 327 |
+
term = int(term)
|
| 328 |
+
if self._value == 0 or term == 0:
|
| 329 |
+
return self.__class__(0)
|
| 330 |
+
return self.__class__(abs((self._value * term) // self.gcd(term)._value))
|
| 331 |
+
|
| 332 |
+
@staticmethod
|
| 333 |
+
def jacobi_symbol(a, n):
|
| 334 |
+
a = int(a)
|
| 335 |
+
n = int(n)
|
| 336 |
+
|
| 337 |
+
if n <= 0:
|
| 338 |
+
raise ValueError("n must be a positive integer")
|
| 339 |
+
|
| 340 |
+
if (n & 1) == 0:
|
| 341 |
+
raise ValueError("n must be odd for the Jacobi symbol")
|
| 342 |
+
|
| 343 |
+
# Step 1
|
| 344 |
+
a = a % n
|
| 345 |
+
# Step 2
|
| 346 |
+
if a == 1 or n == 1:
|
| 347 |
+
return 1
|
| 348 |
+
# Step 3
|
| 349 |
+
if a == 0:
|
| 350 |
+
return 0
|
| 351 |
+
# Step 4
|
| 352 |
+
e = 0
|
| 353 |
+
a1 = a
|
| 354 |
+
while (a1 & 1) == 0:
|
| 355 |
+
a1 >>= 1
|
| 356 |
+
e += 1
|
| 357 |
+
# Step 5
|
| 358 |
+
if (e & 1) == 0:
|
| 359 |
+
s = 1
|
| 360 |
+
elif n % 8 in (1, 7):
|
| 361 |
+
s = 1
|
| 362 |
+
else:
|
| 363 |
+
s = -1
|
| 364 |
+
# Step 6
|
| 365 |
+
if n % 4 == 3 and a1 % 4 == 3:
|
| 366 |
+
s = -s
|
| 367 |
+
# Step 7
|
| 368 |
+
n1 = n % a1
|
| 369 |
+
# Step 8
|
| 370 |
+
return s * IntegerNative.jacobi_symbol(n1, a1)
|
| 371 |
+
|
| 372 |
+
@staticmethod
|
| 373 |
+
def _mult_modulo_bytes(term1, term2, modulus):
|
| 374 |
+
if modulus < 0:
|
| 375 |
+
raise ValueError("Modulus must be positive")
|
| 376 |
+
if modulus == 0:
|
| 377 |
+
raise ZeroDivisionError("Modulus cannot be zero")
|
| 378 |
+
if (modulus & 1) == 0:
|
| 379 |
+
raise ValueError("Odd modulus is required")
|
| 380 |
+
|
| 381 |
+
number_len = len(long_to_bytes(modulus))
|
| 382 |
+
return long_to_bytes((term1 * term2) % modulus, number_len)
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/_IntegerNative.pyi
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from ._IntegerBase import IntegerBase
|
| 2 |
+
class IntegerNative(IntegerBase):
|
| 3 |
+
pass
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/__init__.py
ADDED
|
File without changes
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/Numbers.cpython-310.pyc
ADDED
|
Binary file (633 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/Primality.cpython-310.pyc
ADDED
|
Binary file (7.44 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/_IntegerBase.cpython-310.pyc
ADDED
|
Binary file (11.4 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/_IntegerCustom.cpython-310.pyc
ADDED
|
Binary file (2.97 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/_IntegerGMP.cpython-310.pyc
ADDED
|
Binary file (20.9 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/_IntegerNative.cpython-310.pyc
ADDED
|
Binary file (10.7 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/Crypto/Math/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (165 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/Crypto/Protocol/DH.pyi
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import TypedDict, Callable, TypeVar, Generic
|
| 2 |
+
from typing_extensions import Unpack, NotRequired
|
| 3 |
+
|
| 4 |
+
from Crypto.PublicKey.ECC import EccKey
|
| 5 |
+
|
| 6 |
+
T = TypeVar('T')
|
| 7 |
+
|
| 8 |
+
class RequestParams(TypedDict, Generic[T]):
|
| 9 |
+
kdf: Callable[[bytes|bytearray|memoryview], T]
|
| 10 |
+
static_priv: NotRequired[EccKey]
|
| 11 |
+
static_pub: NotRequired[EccKey]
|
| 12 |
+
eph_priv: NotRequired[EccKey]
|
| 13 |
+
eph_pub: NotRequired[EccKey]
|
| 14 |
+
|
| 15 |
+
def import_x25519_public_key(encoded: bytes) -> EccKey: ...
|
| 16 |
+
def import_x25519_private_key(encoded: bytes) -> EccKey: ...
|
| 17 |
+
def import_x448_public_key(encoded: bytes) -> EccKey: ...
|
| 18 |
+
def import_x448_private_key(encoded: bytes) -> EccKey: ...
|
| 19 |
+
def key_agreement(**kwargs: Unpack[RequestParams[T]]) -> T: ...
|
minigpt2/lib/python3.10/site-packages/Crypto/Protocol/SecretSharing.pyi
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Union, List, Tuple, Optional
|
| 2 |
+
|
| 3 |
+
def _mult_gf2(f1: int, f2: int) -> int : ...
|
| 4 |
+
def _div_gf2(a: int, b: int) -> int : ...
|
| 5 |
+
|
| 6 |
+
class _Element(object):
|
| 7 |
+
irr_poly: int
|
| 8 |
+
def __init__(self, encoded_value: Union[int, bytes]) -> None: ...
|
| 9 |
+
def __eq__(self, other) -> bool: ...
|
| 10 |
+
def __int__(self) -> int: ...
|
| 11 |
+
def encode(self) -> bytes: ...
|
| 12 |
+
def __mul__(self, factor: int) -> _Element: ...
|
| 13 |
+
def __add__(self, term: _Element) -> _Element: ...
|
| 14 |
+
def inverse(self) -> _Element: ...
|
| 15 |
+
def __pow__(self, exponent) -> _Element: ...
|
| 16 |
+
|
| 17 |
+
class Shamir(object):
|
| 18 |
+
@staticmethod
|
| 19 |
+
def split(k: int, n: int, secret: bytes, ssss: Optional[bool]) -> List[Tuple[int, bytes]]: ...
|
| 20 |
+
@staticmethod
|
| 21 |
+
def combine(shares: List[Tuple[int, bytes]], ssss: Optional[bool]) -> bytes: ...
|
| 22 |
+
|
minigpt2/lib/python3.10/site-packages/Crypto/Signature/DSS.py
ADDED
|
@@ -0,0 +1,403 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Signature/DSS.py : DSS.py
|
| 3 |
+
#
|
| 4 |
+
# ===================================================================
|
| 5 |
+
#
|
| 6 |
+
# Copyright (c) 2014, Legrandin <helderijs@gmail.com>
|
| 7 |
+
# All rights reserved.
|
| 8 |
+
#
|
| 9 |
+
# Redistribution and use in source and binary forms, with or without
|
| 10 |
+
# modification, are permitted provided that the following conditions
|
| 11 |
+
# are met:
|
| 12 |
+
#
|
| 13 |
+
# 1. Redistributions of source code must retain the above copyright
|
| 14 |
+
# notice, this list of conditions and the following disclaimer.
|
| 15 |
+
# 2. Redistributions in binary form must reproduce the above copyright
|
| 16 |
+
# notice, this list of conditions and the following disclaimer in
|
| 17 |
+
# the documentation and/or other materials provided with the
|
| 18 |
+
# distribution.
|
| 19 |
+
#
|
| 20 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 21 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 22 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
| 23 |
+
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
| 24 |
+
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
| 25 |
+
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
| 26 |
+
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
| 27 |
+
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
| 28 |
+
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
| 29 |
+
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
| 30 |
+
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 31 |
+
# POSSIBILITY OF SUCH DAMAGE.
|
| 32 |
+
# ===================================================================
|
| 33 |
+
|
| 34 |
+
from Crypto.Util.asn1 import DerSequence
|
| 35 |
+
from Crypto.Util.number import long_to_bytes
|
| 36 |
+
from Crypto.Math.Numbers import Integer
|
| 37 |
+
|
| 38 |
+
from Crypto.Hash import HMAC
|
| 39 |
+
from Crypto.PublicKey.ECC import EccKey
|
| 40 |
+
from Crypto.PublicKey.DSA import DsaKey
|
| 41 |
+
|
| 42 |
+
__all__ = ['DssSigScheme', 'new']
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class DssSigScheme(object):
|
| 46 |
+
"""A (EC)DSA signature object.
|
| 47 |
+
Do not instantiate directly.
|
| 48 |
+
Use :func:`Crypto.Signature.DSS.new`.
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
def __init__(self, key, encoding, order):
|
| 52 |
+
"""Create a new Digital Signature Standard (DSS) object.
|
| 53 |
+
|
| 54 |
+
Do not instantiate this object directly,
|
| 55 |
+
use `Crypto.Signature.DSS.new` instead.
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
self._key = key
|
| 59 |
+
self._encoding = encoding
|
| 60 |
+
self._order = order
|
| 61 |
+
|
| 62 |
+
self._order_bits = self._order.size_in_bits()
|
| 63 |
+
self._order_bytes = (self._order_bits - 1) // 8 + 1
|
| 64 |
+
|
| 65 |
+
def can_sign(self):
|
| 66 |
+
"""Return ``True`` if this signature object can be used
|
| 67 |
+
for signing messages."""
|
| 68 |
+
|
| 69 |
+
return self._key.has_private()
|
| 70 |
+
|
| 71 |
+
def _compute_nonce(self, msg_hash):
|
| 72 |
+
raise NotImplementedError("To be provided by subclasses")
|
| 73 |
+
|
| 74 |
+
def _valid_hash(self, msg_hash):
|
| 75 |
+
raise NotImplementedError("To be provided by subclasses")
|
| 76 |
+
|
| 77 |
+
def sign(self, msg_hash):
|
| 78 |
+
"""Compute the DSA/ECDSA signature of a message.
|
| 79 |
+
|
| 80 |
+
Args:
|
| 81 |
+
msg_hash (hash object):
|
| 82 |
+
The hash that was carried out over the message.
|
| 83 |
+
The object belongs to the :mod:`Crypto.Hash` package.
|
| 84 |
+
Under mode ``'fips-186-3'``, the hash must be a FIPS
|
| 85 |
+
approved secure hash (SHA-2 or SHA-3).
|
| 86 |
+
|
| 87 |
+
:return: The signature as ``bytes``
|
| 88 |
+
:raise ValueError: if the hash algorithm is incompatible to the (EC)DSA key
|
| 89 |
+
:raise TypeError: if the (EC)DSA key has no private half
|
| 90 |
+
"""
|
| 91 |
+
|
| 92 |
+
if not self._key.has_private():
|
| 93 |
+
raise TypeError("Private key is needed to sign")
|
| 94 |
+
|
| 95 |
+
if not self._valid_hash(msg_hash):
|
| 96 |
+
raise ValueError("Hash is not sufficiently strong")
|
| 97 |
+
|
| 98 |
+
# Generate the nonce k (critical!)
|
| 99 |
+
nonce = self._compute_nonce(msg_hash)
|
| 100 |
+
|
| 101 |
+
# Perform signature using the raw API
|
| 102 |
+
z = Integer.from_bytes(msg_hash.digest()[:self._order_bytes])
|
| 103 |
+
sig_pair = self._key._sign(z, nonce)
|
| 104 |
+
|
| 105 |
+
# Encode the signature into a single byte string
|
| 106 |
+
if self._encoding == 'binary':
|
| 107 |
+
output = b"".join([long_to_bytes(x, self._order_bytes)
|
| 108 |
+
for x in sig_pair])
|
| 109 |
+
else:
|
| 110 |
+
# Dss-sig ::= SEQUENCE {
|
| 111 |
+
# r INTEGER,
|
| 112 |
+
# s INTEGER
|
| 113 |
+
# }
|
| 114 |
+
# Ecdsa-Sig-Value ::= SEQUENCE {
|
| 115 |
+
# r INTEGER,
|
| 116 |
+
# s INTEGER
|
| 117 |
+
# }
|
| 118 |
+
output = DerSequence(sig_pair).encode()
|
| 119 |
+
|
| 120 |
+
return output
|
| 121 |
+
|
| 122 |
+
def verify(self, msg_hash, signature):
|
| 123 |
+
"""Check if a certain (EC)DSA signature is authentic.
|
| 124 |
+
|
| 125 |
+
Args:
|
| 126 |
+
msg_hash (hash object):
|
| 127 |
+
The hash that was carried out over the message.
|
| 128 |
+
This is an object belonging to the :mod:`Crypto.Hash` module.
|
| 129 |
+
Under mode ``'fips-186-3'``, the hash must be a FIPS
|
| 130 |
+
approved secure hash (SHA-2 or SHA-3).
|
| 131 |
+
|
| 132 |
+
signature (``bytes``):
|
| 133 |
+
The signature that needs to be validated.
|
| 134 |
+
|
| 135 |
+
:raise ValueError: if the signature is not authentic
|
| 136 |
+
"""
|
| 137 |
+
|
| 138 |
+
if not self._valid_hash(msg_hash):
|
| 139 |
+
raise ValueError("Hash is not sufficiently strong")
|
| 140 |
+
|
| 141 |
+
if self._encoding == 'binary':
|
| 142 |
+
if len(signature) != (2 * self._order_bytes):
|
| 143 |
+
raise ValueError("The signature is not authentic (length)")
|
| 144 |
+
r_prime, s_prime = [Integer.from_bytes(x)
|
| 145 |
+
for x in (signature[:self._order_bytes],
|
| 146 |
+
signature[self._order_bytes:])]
|
| 147 |
+
else:
|
| 148 |
+
try:
|
| 149 |
+
der_seq = DerSequence().decode(signature, strict=True)
|
| 150 |
+
except (ValueError, IndexError):
|
| 151 |
+
raise ValueError("The signature is not authentic (DER)")
|
| 152 |
+
if len(der_seq) != 2 or not der_seq.hasOnlyInts():
|
| 153 |
+
raise ValueError("The signature is not authentic (DER content)")
|
| 154 |
+
r_prime, s_prime = Integer(der_seq[0]), Integer(der_seq[1])
|
| 155 |
+
|
| 156 |
+
if not (0 < r_prime < self._order) or not (0 < s_prime < self._order):
|
| 157 |
+
raise ValueError("The signature is not authentic (d)")
|
| 158 |
+
|
| 159 |
+
z = Integer.from_bytes(msg_hash.digest()[:self._order_bytes])
|
| 160 |
+
result = self._key._verify(z, (r_prime, s_prime))
|
| 161 |
+
if not result:
|
| 162 |
+
raise ValueError("The signature is not authentic")
|
| 163 |
+
# Make PyCrypto code to fail
|
| 164 |
+
return False
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
class DeterministicDsaSigScheme(DssSigScheme):
|
| 168 |
+
# Also applicable to ECDSA
|
| 169 |
+
|
| 170 |
+
def __init__(self, key, encoding, order, private_key):
|
| 171 |
+
super(DeterministicDsaSigScheme, self).__init__(key, encoding, order)
|
| 172 |
+
self._private_key = private_key
|
| 173 |
+
|
| 174 |
+
def _bits2int(self, bstr):
|
| 175 |
+
"""See 2.3.2 in RFC6979"""
|
| 176 |
+
|
| 177 |
+
result = Integer.from_bytes(bstr)
|
| 178 |
+
q_len = self._order.size_in_bits()
|
| 179 |
+
b_len = len(bstr) * 8
|
| 180 |
+
if b_len > q_len:
|
| 181 |
+
# Only keep leftmost q_len bits
|
| 182 |
+
result >>= (b_len - q_len)
|
| 183 |
+
return result
|
| 184 |
+
|
| 185 |
+
def _int2octets(self, int_mod_q):
|
| 186 |
+
"""See 2.3.3 in RFC6979"""
|
| 187 |
+
|
| 188 |
+
assert 0 < int_mod_q < self._order
|
| 189 |
+
return long_to_bytes(int_mod_q, self._order_bytes)
|
| 190 |
+
|
| 191 |
+
def _bits2octets(self, bstr):
|
| 192 |
+
"""See 2.3.4 in RFC6979"""
|
| 193 |
+
|
| 194 |
+
z1 = self._bits2int(bstr)
|
| 195 |
+
if z1 < self._order:
|
| 196 |
+
z2 = z1
|
| 197 |
+
else:
|
| 198 |
+
z2 = z1 - self._order
|
| 199 |
+
return self._int2octets(z2)
|
| 200 |
+
|
| 201 |
+
def _compute_nonce(self, mhash):
|
| 202 |
+
"""Generate k in a deterministic way"""
|
| 203 |
+
|
| 204 |
+
# See section 3.2 in RFC6979.txt
|
| 205 |
+
# Step a
|
| 206 |
+
h1 = mhash.digest()
|
| 207 |
+
# Step b
|
| 208 |
+
mask_v = b'\x01' * mhash.digest_size
|
| 209 |
+
# Step c
|
| 210 |
+
nonce_k = b'\x00' * mhash.digest_size
|
| 211 |
+
|
| 212 |
+
for int_oct in (b'\x00', b'\x01'):
|
| 213 |
+
# Step d/f
|
| 214 |
+
nonce_k = HMAC.new(nonce_k,
|
| 215 |
+
mask_v + int_oct +
|
| 216 |
+
self._int2octets(self._private_key) +
|
| 217 |
+
self._bits2octets(h1), mhash).digest()
|
| 218 |
+
# Step e/g
|
| 219 |
+
mask_v = HMAC.new(nonce_k, mask_v, mhash).digest()
|
| 220 |
+
|
| 221 |
+
nonce = -1
|
| 222 |
+
while not (0 < nonce < self._order):
|
| 223 |
+
# Step h.C (second part)
|
| 224 |
+
if nonce != -1:
|
| 225 |
+
nonce_k = HMAC.new(nonce_k, mask_v + b'\x00',
|
| 226 |
+
mhash).digest()
|
| 227 |
+
mask_v = HMAC.new(nonce_k, mask_v, mhash).digest()
|
| 228 |
+
|
| 229 |
+
# Step h.A
|
| 230 |
+
mask_t = b""
|
| 231 |
+
|
| 232 |
+
# Step h.B
|
| 233 |
+
while len(mask_t) < self._order_bytes:
|
| 234 |
+
mask_v = HMAC.new(nonce_k, mask_v, mhash).digest()
|
| 235 |
+
mask_t += mask_v
|
| 236 |
+
|
| 237 |
+
# Step h.C (first part)
|
| 238 |
+
nonce = self._bits2int(mask_t)
|
| 239 |
+
return nonce
|
| 240 |
+
|
| 241 |
+
def _valid_hash(self, msg_hash):
|
| 242 |
+
return True
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
class FipsDsaSigScheme(DssSigScheme):
|
| 246 |
+
|
| 247 |
+
#: List of L (bit length of p) and N (bit length of q) combinations
|
| 248 |
+
#: that are allowed by FIPS 186-3. The security level is provided in
|
| 249 |
+
#: Table 2 of FIPS 800-57 (rev3).
|
| 250 |
+
_fips_186_3_L_N = (
|
| 251 |
+
(1024, 160), # 80 bits (SHA-1 or stronger)
|
| 252 |
+
(2048, 224), # 112 bits (SHA-224 or stronger)
|
| 253 |
+
(2048, 256), # 128 bits (SHA-256 or stronger)
|
| 254 |
+
(3072, 256) # 256 bits (SHA-512)
|
| 255 |
+
)
|
| 256 |
+
|
| 257 |
+
def __init__(self, key, encoding, order, randfunc):
|
| 258 |
+
super(FipsDsaSigScheme, self).__init__(key, encoding, order)
|
| 259 |
+
self._randfunc = randfunc
|
| 260 |
+
|
| 261 |
+
L = Integer(key.p).size_in_bits()
|
| 262 |
+
if (L, self._order_bits) not in self._fips_186_3_L_N:
|
| 263 |
+
error = ("L/N (%d, %d) is not compliant to FIPS 186-3"
|
| 264 |
+
% (L, self._order_bits))
|
| 265 |
+
raise ValueError(error)
|
| 266 |
+
|
| 267 |
+
def _compute_nonce(self, msg_hash):
|
| 268 |
+
# hash is not used
|
| 269 |
+
return Integer.random_range(min_inclusive=1,
|
| 270 |
+
max_exclusive=self._order,
|
| 271 |
+
randfunc=self._randfunc)
|
| 272 |
+
|
| 273 |
+
def _valid_hash(self, msg_hash):
|
| 274 |
+
"""Verify that SHA-1, SHA-2 or SHA-3 are used"""
|
| 275 |
+
return (msg_hash.oid == "1.3.14.3.2.26" or
|
| 276 |
+
msg_hash.oid.startswith("2.16.840.1.101.3.4.2."))
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
class FipsEcDsaSigScheme(DssSigScheme):
|
| 280 |
+
|
| 281 |
+
def __init__(self, key, encoding, order, randfunc):
|
| 282 |
+
super(FipsEcDsaSigScheme, self).__init__(key, encoding, order)
|
| 283 |
+
self._randfunc = randfunc
|
| 284 |
+
|
| 285 |
+
def _compute_nonce(self, msg_hash):
|
| 286 |
+
return Integer.random_range(min_inclusive=1,
|
| 287 |
+
max_exclusive=self._key._curve.order,
|
| 288 |
+
randfunc=self._randfunc)
|
| 289 |
+
|
| 290 |
+
def _valid_hash(self, msg_hash):
|
| 291 |
+
"""Verify that the strength of the hash matches or exceeds
|
| 292 |
+
the strength of the EC. We fail if the hash is too weak."""
|
| 293 |
+
|
| 294 |
+
modulus_bits = self._key.pointQ.size_in_bits()
|
| 295 |
+
|
| 296 |
+
# SHS: SHA-2, SHA-3, truncated SHA-512
|
| 297 |
+
sha224 = ("2.16.840.1.101.3.4.2.4", "2.16.840.1.101.3.4.2.7", "2.16.840.1.101.3.4.2.5")
|
| 298 |
+
sha256 = ("2.16.840.1.101.3.4.2.1", "2.16.840.1.101.3.4.2.8", "2.16.840.1.101.3.4.2.6")
|
| 299 |
+
sha384 = ("2.16.840.1.101.3.4.2.2", "2.16.840.1.101.3.4.2.9")
|
| 300 |
+
sha512 = ("2.16.840.1.101.3.4.2.3", "2.16.840.1.101.3.4.2.10")
|
| 301 |
+
shs = sha224 + sha256 + sha384 + sha512
|
| 302 |
+
|
| 303 |
+
try:
|
| 304 |
+
result = msg_hash.oid in shs
|
| 305 |
+
except AttributeError:
|
| 306 |
+
result = False
|
| 307 |
+
return result
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
def new(key, mode, encoding='binary', randfunc=None):
|
| 311 |
+
"""Create a signature object :class:`DssSigScheme` that
|
| 312 |
+
can perform (EC)DSA signature or verification.
|
| 313 |
+
|
| 314 |
+
.. note::
|
| 315 |
+
Refer to `NIST SP 800 Part 1 Rev 4`_ (or newer release) for an
|
| 316 |
+
overview of the recommended key lengths.
|
| 317 |
+
|
| 318 |
+
Args:
|
| 319 |
+
key (:class:`Crypto.PublicKey.DSA` or :class:`Crypto.PublicKey.ECC`):
|
| 320 |
+
The key to use for computing the signature (*private* keys only)
|
| 321 |
+
or for verifying one.
|
| 322 |
+
For DSA keys, let ``L`` and ``N`` be the bit lengths of the modulus ``p``
|
| 323 |
+
and of ``q``: the pair ``(L,N)`` must appear in the following list,
|
| 324 |
+
in compliance to section 4.2 of `FIPS 186-4`_:
|
| 325 |
+
|
| 326 |
+
- (1024, 160) *legacy only; do not create new signatures with this*
|
| 327 |
+
- (2048, 224) *deprecated; do not create new signatures with this*
|
| 328 |
+
- (2048, 256)
|
| 329 |
+
- (3072, 256)
|
| 330 |
+
|
| 331 |
+
For ECC, only keys over P-224, P-256, P-384, and P-521 are accepted.
|
| 332 |
+
|
| 333 |
+
mode (string):
|
| 334 |
+
The parameter can take these values:
|
| 335 |
+
|
| 336 |
+
- ``'fips-186-3'``. The signature generation is randomized and carried out
|
| 337 |
+
according to `FIPS 186-3`_: the nonce ``k`` is taken from the RNG.
|
| 338 |
+
- ``'deterministic-rfc6979'``. The signature generation is not
|
| 339 |
+
randomized. See RFC6979_.
|
| 340 |
+
|
| 341 |
+
encoding (string):
|
| 342 |
+
How the signature is encoded. This value determines the output of
|
| 343 |
+
:meth:`sign` and the input to :meth:`verify`.
|
| 344 |
+
|
| 345 |
+
The following values are accepted:
|
| 346 |
+
|
| 347 |
+
- ``'binary'`` (default), the signature is the raw concatenation
|
| 348 |
+
of ``r`` and ``s``. It is defined in the IEEE P.1363 standard.
|
| 349 |
+
For DSA, the size in bytes of the signature is ``N/4`` bytes
|
| 350 |
+
(e.g. 64 for ``N=256``).
|
| 351 |
+
For ECDSA, the signature is always twice the length of a point
|
| 352 |
+
coordinate (e.g. 64 bytes for P-256).
|
| 353 |
+
|
| 354 |
+
- ``'der'``, the signature is a ASN.1 DER SEQUENCE
|
| 355 |
+
with two INTEGERs (``r`` and ``s``). It is defined in RFC3279_.
|
| 356 |
+
The size of the signature is variable.
|
| 357 |
+
|
| 358 |
+
randfunc (callable):
|
| 359 |
+
A function that returns random ``bytes``, of a given length.
|
| 360 |
+
If omitted, the internal RNG is used.
|
| 361 |
+
Only applicable for the *'fips-186-3'* mode.
|
| 362 |
+
|
| 363 |
+
.. _FIPS 186-3: http://csrc.nist.gov/publications/fips/fips186-3/fips_186-3.pdf
|
| 364 |
+
.. _FIPS 186-4: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf
|
| 365 |
+
.. _NIST SP 800 Part 1 Rev 4: http://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-57pt1r4.pdf
|
| 366 |
+
.. _RFC6979: http://tools.ietf.org/html/rfc6979
|
| 367 |
+
.. _RFC3279: https://tools.ietf.org/html/rfc3279#section-2.2.2
|
| 368 |
+
"""
|
| 369 |
+
|
| 370 |
+
# The goal of the 'mode' parameter is to avoid to
|
| 371 |
+
# have the current version of the standard as default.
|
| 372 |
+
#
|
| 373 |
+
# Over time, such version will be superseded by (for instance)
|
| 374 |
+
# FIPS 186-4 and it will be odd to have -3 as default.
|
| 375 |
+
|
| 376 |
+
if encoding not in ('binary', 'der'):
|
| 377 |
+
raise ValueError("Unknown encoding '%s'" % encoding)
|
| 378 |
+
|
| 379 |
+
if isinstance(key, EccKey):
|
| 380 |
+
order = key._curve.order
|
| 381 |
+
private_key_attr = 'd'
|
| 382 |
+
if not key.curve.startswith("NIST"):
|
| 383 |
+
raise ValueError("ECC key is not on a NIST P curve")
|
| 384 |
+
elif isinstance(key, DsaKey):
|
| 385 |
+
order = Integer(key.q)
|
| 386 |
+
private_key_attr = 'x'
|
| 387 |
+
else:
|
| 388 |
+
raise ValueError("Unsupported key type " + str(type(key)))
|
| 389 |
+
|
| 390 |
+
if key.has_private():
|
| 391 |
+
private_key = getattr(key, private_key_attr)
|
| 392 |
+
else:
|
| 393 |
+
private_key = None
|
| 394 |
+
|
| 395 |
+
if mode == 'deterministic-rfc6979':
|
| 396 |
+
return DeterministicDsaSigScheme(key, encoding, order, private_key)
|
| 397 |
+
elif mode == 'fips-186-3':
|
| 398 |
+
if isinstance(key, EccKey):
|
| 399 |
+
return FipsEcDsaSigScheme(key, encoding, order, randfunc)
|
| 400 |
+
else:
|
| 401 |
+
return FipsDsaSigScheme(key, encoding, order, randfunc)
|
| 402 |
+
else:
|
| 403 |
+
raise ValueError("Unknown DSS mode '%s'" % mode)
|
minigpt2/lib/python3.10/site-packages/Crypto/Signature/DSS.pyi
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Union, Optional, Callable
|
| 2 |
+
from typing_extensions import Protocol
|
| 3 |
+
|
| 4 |
+
from Crypto.PublicKey.DSA import DsaKey
|
| 5 |
+
from Crypto.PublicKey.ECC import EccKey
|
| 6 |
+
|
| 7 |
+
class Hash(Protocol):
|
| 8 |
+
def digest(self) -> bytes: ...
|
| 9 |
+
|
| 10 |
+
__all__ = ['new']
|
| 11 |
+
|
| 12 |
+
class DssSigScheme:
|
| 13 |
+
def __init__(self, key: Union[DsaKey, EccKey], encoding: str, order: int) -> None: ...
|
| 14 |
+
def can_sign(self) -> bool: ...
|
| 15 |
+
def sign(self, msg_hash: Hash) -> bytes: ...
|
| 16 |
+
def verify(self, msg_hash: Hash, signature: bytes) -> bool: ...
|
| 17 |
+
|
| 18 |
+
class DeterministicDsaSigScheme(DssSigScheme):
|
| 19 |
+
def __init__(self, key, encoding, order, private_key) -> None: ...
|
| 20 |
+
|
| 21 |
+
class FipsDsaSigScheme(DssSigScheme):
|
| 22 |
+
def __init__(self, key: DsaKey, encoding: str, order: int, randfunc: Callable) -> None: ...
|
| 23 |
+
|
| 24 |
+
class FipsEcDsaSigScheme(DssSigScheme):
|
| 25 |
+
def __init__(self, key: EccKey, encoding: str, order: int, randfunc: Callable) -> None: ...
|
| 26 |
+
|
| 27 |
+
def new(key: Union[DsaKey, EccKey], mode: str, encoding: Optional[str]='binary', randfunc: Optional[Callable]=None) -> Union[DeterministicDsaSigScheme, FipsDsaSigScheme, FipsEcDsaSigScheme]: ...
|