Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +1 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/__init__.py +46 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/__main__.py +4 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/__main__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/api.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/cd.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/constant.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/legacy.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/md.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/models.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/utils.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/version.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/api.py +668 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/cd.py +395 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/cli/__init__.py +6 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/cli/__main__.py +320 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/constant.py +1997 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/legacy.py +65 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/md.cpython-310-x86_64-linux-gnu.so +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/md.py +628 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so +3 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/models.py +359 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/py.typed +0 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/utils.py +421 -0
- minigpt2/lib/python3.10/site-packages/charset_normalizer/version.py +6 -0
- minigpt2/lib/python3.10/site-packages/idna/__pycache__/compat.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/INSTALLER +1 -0
- minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/License.txt +1568 -0
- minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/METADATA +35 -0
- minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/RECORD +54 -0
- minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/REQUESTED +0 -0
- minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/WHEEL +5 -0
- minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/top_level.txt +1 -0
- minigpt2/lib/python3.10/site-packages/shellingham/__pycache__/_core.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/shellingham/__pycache__/nt.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/shellingham/_core.py +11 -0
- minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/_optical_flow.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/cifar.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/cityscapes.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/coco.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/eurosat.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/fakedata.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/food101.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/kinetics.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/lsun.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/mnist.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/moving_mnist.cpython-310.pyc +0 -0
.gitattributes
CHANGED
|
@@ -1362,3 +1362,4 @@ minigpt2/lib/python3.10/site-packages/tokenizers/tokenizers.cpython-310-x86_64-l
|
|
| 1362 |
minigpt2/lib/python3.10/site-packages/torchvision/image.so filter=lfs diff=lfs merge=lfs -text
|
| 1363 |
minigpt2/lib/python3.10/site-packages/torchvision/_C.so filter=lfs diff=lfs merge=lfs -text
|
| 1364 |
llava_next/bin/python3.1 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 1362 |
minigpt2/lib/python3.10/site-packages/torchvision/image.so filter=lfs diff=lfs merge=lfs -text
|
| 1363 |
minigpt2/lib/python3.10/site-packages/torchvision/_C.so filter=lfs diff=lfs merge=lfs -text
|
| 1364 |
llava_next/bin/python3.1 filter=lfs diff=lfs merge=lfs -text
|
| 1365 |
+
minigpt2/lib/python3.10/site-packages/charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/__init__.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
Charset-Normalizer
|
| 4 |
+
~~~~~~~~~~~~~~
|
| 5 |
+
The Real First Universal Charset Detector.
|
| 6 |
+
A library that helps you read text from an unknown charset encoding.
|
| 7 |
+
Motivated by chardet, This package is trying to resolve the issue by taking a new approach.
|
| 8 |
+
All IANA character set names for which the Python core library provides codecs are supported.
|
| 9 |
+
|
| 10 |
+
Basic usage:
|
| 11 |
+
>>> from charset_normalizer import from_bytes
|
| 12 |
+
>>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8'))
|
| 13 |
+
>>> best_guess = results.best()
|
| 14 |
+
>>> str(best_guess)
|
| 15 |
+
'Bсеки човек има право на образование. Oбразованието!'
|
| 16 |
+
|
| 17 |
+
Others methods and usages are available - see the full documentation
|
| 18 |
+
at <https://github.com/Ousret/charset_normalizer>.
|
| 19 |
+
:copyright: (c) 2021 by Ahmed TAHRI
|
| 20 |
+
:license: MIT, see LICENSE for more details.
|
| 21 |
+
"""
|
| 22 |
+
import logging
|
| 23 |
+
|
| 24 |
+
from .api import from_bytes, from_fp, from_path, is_binary
|
| 25 |
+
from .legacy import detect
|
| 26 |
+
from .models import CharsetMatch, CharsetMatches
|
| 27 |
+
from .utils import set_logging_handler
|
| 28 |
+
from .version import VERSION, __version__
|
| 29 |
+
|
| 30 |
+
__all__ = (
|
| 31 |
+
"from_fp",
|
| 32 |
+
"from_path",
|
| 33 |
+
"from_bytes",
|
| 34 |
+
"is_binary",
|
| 35 |
+
"detect",
|
| 36 |
+
"CharsetMatch",
|
| 37 |
+
"CharsetMatches",
|
| 38 |
+
"__version__",
|
| 39 |
+
"VERSION",
|
| 40 |
+
"set_logging_handler",
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
# Attach a NullHandler to the top level logger by default
|
| 44 |
+
# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
|
| 45 |
+
|
| 46 |
+
logging.getLogger("charset_normalizer").addHandler(logging.NullHandler())
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/__main__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .cli import cli_detect
|
| 2 |
+
|
| 3 |
+
if __name__ == "__main__":
|
| 4 |
+
cli_detect()
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.57 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (254 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/api.cpython-310.pyc
ADDED
|
Binary file (11.7 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/cd.cpython-310.pyc
ADDED
|
Binary file (9.65 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/constant.cpython-310.pyc
ADDED
|
Binary file (30.4 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/legacy.cpython-310.pyc
ADDED
|
Binary file (2.18 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/md.cpython-310.pyc
ADDED
|
Binary file (15.9 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/models.cpython-310.pyc
ADDED
|
Binary file (12 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/utils.cpython-310.pyc
ADDED
|
Binary file (8.91 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/__pycache__/version.cpython-310.pyc
ADDED
|
Binary file (256 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/api.py
ADDED
|
@@ -0,0 +1,668 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from os import PathLike
|
| 3 |
+
from typing import BinaryIO, List, Optional, Set, Union
|
| 4 |
+
|
| 5 |
+
from .cd import (
|
| 6 |
+
coherence_ratio,
|
| 7 |
+
encoding_languages,
|
| 8 |
+
mb_encoding_languages,
|
| 9 |
+
merge_coherence_ratios,
|
| 10 |
+
)
|
| 11 |
+
from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE
|
| 12 |
+
from .md import mess_ratio
|
| 13 |
+
from .models import CharsetMatch, CharsetMatches
|
| 14 |
+
from .utils import (
|
| 15 |
+
any_specified_encoding,
|
| 16 |
+
cut_sequence_chunks,
|
| 17 |
+
iana_name,
|
| 18 |
+
identify_sig_or_bom,
|
| 19 |
+
is_cp_similar,
|
| 20 |
+
is_multi_byte_encoding,
|
| 21 |
+
should_strip_sig_or_bom,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
# Will most likely be controversial
|
| 25 |
+
# logging.addLevelName(TRACE, "TRACE")
|
| 26 |
+
logger = logging.getLogger("charset_normalizer")
|
| 27 |
+
explain_handler = logging.StreamHandler()
|
| 28 |
+
explain_handler.setFormatter(
|
| 29 |
+
logging.Formatter("%(asctime)s | %(levelname)s | %(message)s")
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def from_bytes(
|
| 34 |
+
sequences: Union[bytes, bytearray],
|
| 35 |
+
steps: int = 5,
|
| 36 |
+
chunk_size: int = 512,
|
| 37 |
+
threshold: float = 0.2,
|
| 38 |
+
cp_isolation: Optional[List[str]] = None,
|
| 39 |
+
cp_exclusion: Optional[List[str]] = None,
|
| 40 |
+
preemptive_behaviour: bool = True,
|
| 41 |
+
explain: bool = False,
|
| 42 |
+
language_threshold: float = 0.1,
|
| 43 |
+
enable_fallback: bool = True,
|
| 44 |
+
) -> CharsetMatches:
|
| 45 |
+
"""
|
| 46 |
+
Given a raw bytes sequence, return the best possibles charset usable to render str objects.
|
| 47 |
+
If there is no results, it is a strong indicator that the source is binary/not text.
|
| 48 |
+
By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence.
|
| 49 |
+
And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will.
|
| 50 |
+
|
| 51 |
+
The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page
|
| 52 |
+
but never take it for granted. Can improve the performance.
|
| 53 |
+
|
| 54 |
+
You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that
|
| 55 |
+
purpose.
|
| 56 |
+
|
| 57 |
+
This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32.
|
| 58 |
+
By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain'
|
| 59 |
+
toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging.
|
| 60 |
+
Custom logging format and handler can be set manually.
|
| 61 |
+
"""
|
| 62 |
+
|
| 63 |
+
if not isinstance(sequences, (bytearray, bytes)):
|
| 64 |
+
raise TypeError(
|
| 65 |
+
"Expected object of type bytes or bytearray, got: {0}".format(
|
| 66 |
+
type(sequences)
|
| 67 |
+
)
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
if explain:
|
| 71 |
+
previous_logger_level: int = logger.level
|
| 72 |
+
logger.addHandler(explain_handler)
|
| 73 |
+
logger.setLevel(TRACE)
|
| 74 |
+
|
| 75 |
+
length: int = len(sequences)
|
| 76 |
+
|
| 77 |
+
if length == 0:
|
| 78 |
+
logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.")
|
| 79 |
+
if explain:
|
| 80 |
+
logger.removeHandler(explain_handler)
|
| 81 |
+
logger.setLevel(previous_logger_level or logging.WARNING)
|
| 82 |
+
return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")])
|
| 83 |
+
|
| 84 |
+
if cp_isolation is not None:
|
| 85 |
+
logger.log(
|
| 86 |
+
TRACE,
|
| 87 |
+
"cp_isolation is set. use this flag for debugging purpose. "
|
| 88 |
+
"limited list of encoding allowed : %s.",
|
| 89 |
+
", ".join(cp_isolation),
|
| 90 |
+
)
|
| 91 |
+
cp_isolation = [iana_name(cp, False) for cp in cp_isolation]
|
| 92 |
+
else:
|
| 93 |
+
cp_isolation = []
|
| 94 |
+
|
| 95 |
+
if cp_exclusion is not None:
|
| 96 |
+
logger.log(
|
| 97 |
+
TRACE,
|
| 98 |
+
"cp_exclusion is set. use this flag for debugging purpose. "
|
| 99 |
+
"limited list of encoding excluded : %s.",
|
| 100 |
+
", ".join(cp_exclusion),
|
| 101 |
+
)
|
| 102 |
+
cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion]
|
| 103 |
+
else:
|
| 104 |
+
cp_exclusion = []
|
| 105 |
+
|
| 106 |
+
if length <= (chunk_size * steps):
|
| 107 |
+
logger.log(
|
| 108 |
+
TRACE,
|
| 109 |
+
"override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.",
|
| 110 |
+
steps,
|
| 111 |
+
chunk_size,
|
| 112 |
+
length,
|
| 113 |
+
)
|
| 114 |
+
steps = 1
|
| 115 |
+
chunk_size = length
|
| 116 |
+
|
| 117 |
+
if steps > 1 and length / steps < chunk_size:
|
| 118 |
+
chunk_size = int(length / steps)
|
| 119 |
+
|
| 120 |
+
is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE
|
| 121 |
+
is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE
|
| 122 |
+
|
| 123 |
+
if is_too_small_sequence:
|
| 124 |
+
logger.log(
|
| 125 |
+
TRACE,
|
| 126 |
+
"Trying to detect encoding from a tiny portion of ({}) byte(s).".format(
|
| 127 |
+
length
|
| 128 |
+
),
|
| 129 |
+
)
|
| 130 |
+
elif is_too_large_sequence:
|
| 131 |
+
logger.log(
|
| 132 |
+
TRACE,
|
| 133 |
+
"Using lazy str decoding because the payload is quite large, ({}) byte(s).".format(
|
| 134 |
+
length
|
| 135 |
+
),
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
prioritized_encodings: List[str] = []
|
| 139 |
+
|
| 140 |
+
specified_encoding: Optional[str] = (
|
| 141 |
+
any_specified_encoding(sequences) if preemptive_behaviour else None
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
if specified_encoding is not None:
|
| 145 |
+
prioritized_encodings.append(specified_encoding)
|
| 146 |
+
logger.log(
|
| 147 |
+
TRACE,
|
| 148 |
+
"Detected declarative mark in sequence. Priority +1 given for %s.",
|
| 149 |
+
specified_encoding,
|
| 150 |
+
)
|
| 151 |
+
|
| 152 |
+
tested: Set[str] = set()
|
| 153 |
+
tested_but_hard_failure: List[str] = []
|
| 154 |
+
tested_but_soft_failure: List[str] = []
|
| 155 |
+
|
| 156 |
+
fallback_ascii: Optional[CharsetMatch] = None
|
| 157 |
+
fallback_u8: Optional[CharsetMatch] = None
|
| 158 |
+
fallback_specified: Optional[CharsetMatch] = None
|
| 159 |
+
|
| 160 |
+
results: CharsetMatches = CharsetMatches()
|
| 161 |
+
|
| 162 |
+
early_stop_results: CharsetMatches = CharsetMatches()
|
| 163 |
+
|
| 164 |
+
sig_encoding, sig_payload = identify_sig_or_bom(sequences)
|
| 165 |
+
|
| 166 |
+
if sig_encoding is not None:
|
| 167 |
+
prioritized_encodings.append(sig_encoding)
|
| 168 |
+
logger.log(
|
| 169 |
+
TRACE,
|
| 170 |
+
"Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.",
|
| 171 |
+
len(sig_payload),
|
| 172 |
+
sig_encoding,
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
prioritized_encodings.append("ascii")
|
| 176 |
+
|
| 177 |
+
if "utf_8" not in prioritized_encodings:
|
| 178 |
+
prioritized_encodings.append("utf_8")
|
| 179 |
+
|
| 180 |
+
for encoding_iana in prioritized_encodings + IANA_SUPPORTED:
|
| 181 |
+
if cp_isolation and encoding_iana not in cp_isolation:
|
| 182 |
+
continue
|
| 183 |
+
|
| 184 |
+
if cp_exclusion and encoding_iana in cp_exclusion:
|
| 185 |
+
continue
|
| 186 |
+
|
| 187 |
+
if encoding_iana in tested:
|
| 188 |
+
continue
|
| 189 |
+
|
| 190 |
+
tested.add(encoding_iana)
|
| 191 |
+
|
| 192 |
+
decoded_payload: Optional[str] = None
|
| 193 |
+
bom_or_sig_available: bool = sig_encoding == encoding_iana
|
| 194 |
+
strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom(
|
| 195 |
+
encoding_iana
|
| 196 |
+
)
|
| 197 |
+
|
| 198 |
+
if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available:
|
| 199 |
+
logger.log(
|
| 200 |
+
TRACE,
|
| 201 |
+
"Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.",
|
| 202 |
+
encoding_iana,
|
| 203 |
+
)
|
| 204 |
+
continue
|
| 205 |
+
if encoding_iana in {"utf_7"} and not bom_or_sig_available:
|
| 206 |
+
logger.log(
|
| 207 |
+
TRACE,
|
| 208 |
+
"Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.",
|
| 209 |
+
encoding_iana,
|
| 210 |
+
)
|
| 211 |
+
continue
|
| 212 |
+
|
| 213 |
+
try:
|
| 214 |
+
is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana)
|
| 215 |
+
except (ModuleNotFoundError, ImportError):
|
| 216 |
+
logger.log(
|
| 217 |
+
TRACE,
|
| 218 |
+
"Encoding %s does not provide an IncrementalDecoder",
|
| 219 |
+
encoding_iana,
|
| 220 |
+
)
|
| 221 |
+
continue
|
| 222 |
+
|
| 223 |
+
try:
|
| 224 |
+
if is_too_large_sequence and is_multi_byte_decoder is False:
|
| 225 |
+
str(
|
| 226 |
+
(
|
| 227 |
+
sequences[: int(50e4)]
|
| 228 |
+
if strip_sig_or_bom is False
|
| 229 |
+
else sequences[len(sig_payload) : int(50e4)]
|
| 230 |
+
),
|
| 231 |
+
encoding=encoding_iana,
|
| 232 |
+
)
|
| 233 |
+
else:
|
| 234 |
+
decoded_payload = str(
|
| 235 |
+
(
|
| 236 |
+
sequences
|
| 237 |
+
if strip_sig_or_bom is False
|
| 238 |
+
else sequences[len(sig_payload) :]
|
| 239 |
+
),
|
| 240 |
+
encoding=encoding_iana,
|
| 241 |
+
)
|
| 242 |
+
except (UnicodeDecodeError, LookupError) as e:
|
| 243 |
+
if not isinstance(e, LookupError):
|
| 244 |
+
logger.log(
|
| 245 |
+
TRACE,
|
| 246 |
+
"Code page %s does not fit given bytes sequence at ALL. %s",
|
| 247 |
+
encoding_iana,
|
| 248 |
+
str(e),
|
| 249 |
+
)
|
| 250 |
+
tested_but_hard_failure.append(encoding_iana)
|
| 251 |
+
continue
|
| 252 |
+
|
| 253 |
+
similar_soft_failure_test: bool = False
|
| 254 |
+
|
| 255 |
+
for encoding_soft_failed in tested_but_soft_failure:
|
| 256 |
+
if is_cp_similar(encoding_iana, encoding_soft_failed):
|
| 257 |
+
similar_soft_failure_test = True
|
| 258 |
+
break
|
| 259 |
+
|
| 260 |
+
if similar_soft_failure_test:
|
| 261 |
+
logger.log(
|
| 262 |
+
TRACE,
|
| 263 |
+
"%s is deemed too similar to code page %s and was consider unsuited already. Continuing!",
|
| 264 |
+
encoding_iana,
|
| 265 |
+
encoding_soft_failed,
|
| 266 |
+
)
|
| 267 |
+
continue
|
| 268 |
+
|
| 269 |
+
r_ = range(
|
| 270 |
+
0 if not bom_or_sig_available else len(sig_payload),
|
| 271 |
+
length,
|
| 272 |
+
int(length / steps),
|
| 273 |
+
)
|
| 274 |
+
|
| 275 |
+
multi_byte_bonus: bool = (
|
| 276 |
+
is_multi_byte_decoder
|
| 277 |
+
and decoded_payload is not None
|
| 278 |
+
and len(decoded_payload) < length
|
| 279 |
+
)
|
| 280 |
+
|
| 281 |
+
if multi_byte_bonus:
|
| 282 |
+
logger.log(
|
| 283 |
+
TRACE,
|
| 284 |
+
"Code page %s is a multi byte encoding table and it appear that at least one character "
|
| 285 |
+
"was encoded using n-bytes.",
|
| 286 |
+
encoding_iana,
|
| 287 |
+
)
|
| 288 |
+
|
| 289 |
+
max_chunk_gave_up: int = int(len(r_) / 4)
|
| 290 |
+
|
| 291 |
+
max_chunk_gave_up = max(max_chunk_gave_up, 2)
|
| 292 |
+
early_stop_count: int = 0
|
| 293 |
+
lazy_str_hard_failure = False
|
| 294 |
+
|
| 295 |
+
md_chunks: List[str] = []
|
| 296 |
+
md_ratios = []
|
| 297 |
+
|
| 298 |
+
try:
|
| 299 |
+
for chunk in cut_sequence_chunks(
|
| 300 |
+
sequences,
|
| 301 |
+
encoding_iana,
|
| 302 |
+
r_,
|
| 303 |
+
chunk_size,
|
| 304 |
+
bom_or_sig_available,
|
| 305 |
+
strip_sig_or_bom,
|
| 306 |
+
sig_payload,
|
| 307 |
+
is_multi_byte_decoder,
|
| 308 |
+
decoded_payload,
|
| 309 |
+
):
|
| 310 |
+
md_chunks.append(chunk)
|
| 311 |
+
|
| 312 |
+
md_ratios.append(
|
| 313 |
+
mess_ratio(
|
| 314 |
+
chunk,
|
| 315 |
+
threshold,
|
| 316 |
+
explain is True and 1 <= len(cp_isolation) <= 2,
|
| 317 |
+
)
|
| 318 |
+
)
|
| 319 |
+
|
| 320 |
+
if md_ratios[-1] >= threshold:
|
| 321 |
+
early_stop_count += 1
|
| 322 |
+
|
| 323 |
+
if (early_stop_count >= max_chunk_gave_up) or (
|
| 324 |
+
bom_or_sig_available and strip_sig_or_bom is False
|
| 325 |
+
):
|
| 326 |
+
break
|
| 327 |
+
except (
|
| 328 |
+
UnicodeDecodeError
|
| 329 |
+
) as e: # Lazy str loading may have missed something there
|
| 330 |
+
logger.log(
|
| 331 |
+
TRACE,
|
| 332 |
+
"LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s",
|
| 333 |
+
encoding_iana,
|
| 334 |
+
str(e),
|
| 335 |
+
)
|
| 336 |
+
early_stop_count = max_chunk_gave_up
|
| 337 |
+
lazy_str_hard_failure = True
|
| 338 |
+
|
| 339 |
+
# We might want to check the sequence again with the whole content
|
| 340 |
+
# Only if initial MD tests passes
|
| 341 |
+
if (
|
| 342 |
+
not lazy_str_hard_failure
|
| 343 |
+
and is_too_large_sequence
|
| 344 |
+
and not is_multi_byte_decoder
|
| 345 |
+
):
|
| 346 |
+
try:
|
| 347 |
+
sequences[int(50e3) :].decode(encoding_iana, errors="strict")
|
| 348 |
+
except UnicodeDecodeError as e:
|
| 349 |
+
logger.log(
|
| 350 |
+
TRACE,
|
| 351 |
+
"LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s",
|
| 352 |
+
encoding_iana,
|
| 353 |
+
str(e),
|
| 354 |
+
)
|
| 355 |
+
tested_but_hard_failure.append(encoding_iana)
|
| 356 |
+
continue
|
| 357 |
+
|
| 358 |
+
mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0
|
| 359 |
+
if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up:
|
| 360 |
+
tested_but_soft_failure.append(encoding_iana)
|
| 361 |
+
logger.log(
|
| 362 |
+
TRACE,
|
| 363 |
+
"%s was excluded because of initial chaos probing. Gave up %i time(s). "
|
| 364 |
+
"Computed mean chaos is %f %%.",
|
| 365 |
+
encoding_iana,
|
| 366 |
+
early_stop_count,
|
| 367 |
+
round(mean_mess_ratio * 100, ndigits=3),
|
| 368 |
+
)
|
| 369 |
+
# Preparing those fallbacks in case we got nothing.
|
| 370 |
+
if (
|
| 371 |
+
enable_fallback
|
| 372 |
+
and encoding_iana in ["ascii", "utf_8", specified_encoding]
|
| 373 |
+
and not lazy_str_hard_failure
|
| 374 |
+
):
|
| 375 |
+
fallback_entry = CharsetMatch(
|
| 376 |
+
sequences,
|
| 377 |
+
encoding_iana,
|
| 378 |
+
threshold,
|
| 379 |
+
False,
|
| 380 |
+
[],
|
| 381 |
+
decoded_payload,
|
| 382 |
+
preemptive_declaration=specified_encoding,
|
| 383 |
+
)
|
| 384 |
+
if encoding_iana == specified_encoding:
|
| 385 |
+
fallback_specified = fallback_entry
|
| 386 |
+
elif encoding_iana == "ascii":
|
| 387 |
+
fallback_ascii = fallback_entry
|
| 388 |
+
else:
|
| 389 |
+
fallback_u8 = fallback_entry
|
| 390 |
+
continue
|
| 391 |
+
|
| 392 |
+
logger.log(
|
| 393 |
+
TRACE,
|
| 394 |
+
"%s passed initial chaos probing. Mean measured chaos is %f %%",
|
| 395 |
+
encoding_iana,
|
| 396 |
+
round(mean_mess_ratio * 100, ndigits=3),
|
| 397 |
+
)
|
| 398 |
+
|
| 399 |
+
if not is_multi_byte_decoder:
|
| 400 |
+
target_languages: List[str] = encoding_languages(encoding_iana)
|
| 401 |
+
else:
|
| 402 |
+
target_languages = mb_encoding_languages(encoding_iana)
|
| 403 |
+
|
| 404 |
+
if target_languages:
|
| 405 |
+
logger.log(
|
| 406 |
+
TRACE,
|
| 407 |
+
"{} should target any language(s) of {}".format(
|
| 408 |
+
encoding_iana, str(target_languages)
|
| 409 |
+
),
|
| 410 |
+
)
|
| 411 |
+
|
| 412 |
+
cd_ratios = []
|
| 413 |
+
|
| 414 |
+
# We shall skip the CD when its about ASCII
|
| 415 |
+
# Most of the time its not relevant to run "language-detection" on it.
|
| 416 |
+
if encoding_iana != "ascii":
|
| 417 |
+
for chunk in md_chunks:
|
| 418 |
+
chunk_languages = coherence_ratio(
|
| 419 |
+
chunk,
|
| 420 |
+
language_threshold,
|
| 421 |
+
",".join(target_languages) if target_languages else None,
|
| 422 |
+
)
|
| 423 |
+
|
| 424 |
+
cd_ratios.append(chunk_languages)
|
| 425 |
+
|
| 426 |
+
cd_ratios_merged = merge_coherence_ratios(cd_ratios)
|
| 427 |
+
|
| 428 |
+
if cd_ratios_merged:
|
| 429 |
+
logger.log(
|
| 430 |
+
TRACE,
|
| 431 |
+
"We detected language {} using {}".format(
|
| 432 |
+
cd_ratios_merged, encoding_iana
|
| 433 |
+
),
|
| 434 |
+
)
|
| 435 |
+
|
| 436 |
+
current_match = CharsetMatch(
|
| 437 |
+
sequences,
|
| 438 |
+
encoding_iana,
|
| 439 |
+
mean_mess_ratio,
|
| 440 |
+
bom_or_sig_available,
|
| 441 |
+
cd_ratios_merged,
|
| 442 |
+
(
|
| 443 |
+
decoded_payload
|
| 444 |
+
if (
|
| 445 |
+
is_too_large_sequence is False
|
| 446 |
+
or encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
| 447 |
+
)
|
| 448 |
+
else None
|
| 449 |
+
),
|
| 450 |
+
preemptive_declaration=specified_encoding,
|
| 451 |
+
)
|
| 452 |
+
|
| 453 |
+
results.append(current_match)
|
| 454 |
+
|
| 455 |
+
if (
|
| 456 |
+
encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
| 457 |
+
and mean_mess_ratio < 0.1
|
| 458 |
+
):
|
| 459 |
+
# If md says nothing to worry about, then... stop immediately!
|
| 460 |
+
if mean_mess_ratio == 0.0:
|
| 461 |
+
logger.debug(
|
| 462 |
+
"Encoding detection: %s is most likely the one.",
|
| 463 |
+
current_match.encoding,
|
| 464 |
+
)
|
| 465 |
+
if explain:
|
| 466 |
+
logger.removeHandler(explain_handler)
|
| 467 |
+
logger.setLevel(previous_logger_level)
|
| 468 |
+
return CharsetMatches([current_match])
|
| 469 |
+
|
| 470 |
+
early_stop_results.append(current_match)
|
| 471 |
+
|
| 472 |
+
if (
|
| 473 |
+
len(early_stop_results)
|
| 474 |
+
and (specified_encoding is None or specified_encoding in tested)
|
| 475 |
+
and "ascii" in tested
|
| 476 |
+
and "utf_8" in tested
|
| 477 |
+
):
|
| 478 |
+
probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment]
|
| 479 |
+
logger.debug(
|
| 480 |
+
"Encoding detection: %s is most likely the one.",
|
| 481 |
+
probable_result.encoding,
|
| 482 |
+
)
|
| 483 |
+
if explain:
|
| 484 |
+
logger.removeHandler(explain_handler)
|
| 485 |
+
logger.setLevel(previous_logger_level)
|
| 486 |
+
|
| 487 |
+
return CharsetMatches([probable_result])
|
| 488 |
+
|
| 489 |
+
if encoding_iana == sig_encoding:
|
| 490 |
+
logger.debug(
|
| 491 |
+
"Encoding detection: %s is most likely the one as we detected a BOM or SIG within "
|
| 492 |
+
"the beginning of the sequence.",
|
| 493 |
+
encoding_iana,
|
| 494 |
+
)
|
| 495 |
+
if explain:
|
| 496 |
+
logger.removeHandler(explain_handler)
|
| 497 |
+
logger.setLevel(previous_logger_level)
|
| 498 |
+
return CharsetMatches([results[encoding_iana]])
|
| 499 |
+
|
| 500 |
+
if len(results) == 0:
|
| 501 |
+
if fallback_u8 or fallback_ascii or fallback_specified:
|
| 502 |
+
logger.log(
|
| 503 |
+
TRACE,
|
| 504 |
+
"Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.",
|
| 505 |
+
)
|
| 506 |
+
|
| 507 |
+
if fallback_specified:
|
| 508 |
+
logger.debug(
|
| 509 |
+
"Encoding detection: %s will be used as a fallback match",
|
| 510 |
+
fallback_specified.encoding,
|
| 511 |
+
)
|
| 512 |
+
results.append(fallback_specified)
|
| 513 |
+
elif (
|
| 514 |
+
(fallback_u8 and fallback_ascii is None)
|
| 515 |
+
or (
|
| 516 |
+
fallback_u8
|
| 517 |
+
and fallback_ascii
|
| 518 |
+
and fallback_u8.fingerprint != fallback_ascii.fingerprint
|
| 519 |
+
)
|
| 520 |
+
or (fallback_u8 is not None)
|
| 521 |
+
):
|
| 522 |
+
logger.debug("Encoding detection: utf_8 will be used as a fallback match")
|
| 523 |
+
results.append(fallback_u8)
|
| 524 |
+
elif fallback_ascii:
|
| 525 |
+
logger.debug("Encoding detection: ascii will be used as a fallback match")
|
| 526 |
+
results.append(fallback_ascii)
|
| 527 |
+
|
| 528 |
+
if results:
|
| 529 |
+
logger.debug(
|
| 530 |
+
"Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.",
|
| 531 |
+
results.best().encoding, # type: ignore
|
| 532 |
+
len(results) - 1,
|
| 533 |
+
)
|
| 534 |
+
else:
|
| 535 |
+
logger.debug("Encoding detection: Unable to determine any suitable charset.")
|
| 536 |
+
|
| 537 |
+
if explain:
|
| 538 |
+
logger.removeHandler(explain_handler)
|
| 539 |
+
logger.setLevel(previous_logger_level)
|
| 540 |
+
|
| 541 |
+
return results
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
def from_fp(
|
| 545 |
+
fp: BinaryIO,
|
| 546 |
+
steps: int = 5,
|
| 547 |
+
chunk_size: int = 512,
|
| 548 |
+
threshold: float = 0.20,
|
| 549 |
+
cp_isolation: Optional[List[str]] = None,
|
| 550 |
+
cp_exclusion: Optional[List[str]] = None,
|
| 551 |
+
preemptive_behaviour: bool = True,
|
| 552 |
+
explain: bool = False,
|
| 553 |
+
language_threshold: float = 0.1,
|
| 554 |
+
enable_fallback: bool = True,
|
| 555 |
+
) -> CharsetMatches:
|
| 556 |
+
"""
|
| 557 |
+
Same thing than the function from_bytes but using a file pointer that is already ready.
|
| 558 |
+
Will not close the file pointer.
|
| 559 |
+
"""
|
| 560 |
+
return from_bytes(
|
| 561 |
+
fp.read(),
|
| 562 |
+
steps,
|
| 563 |
+
chunk_size,
|
| 564 |
+
threshold,
|
| 565 |
+
cp_isolation,
|
| 566 |
+
cp_exclusion,
|
| 567 |
+
preemptive_behaviour,
|
| 568 |
+
explain,
|
| 569 |
+
language_threshold,
|
| 570 |
+
enable_fallback,
|
| 571 |
+
)
|
| 572 |
+
|
| 573 |
+
|
| 574 |
+
def from_path(
|
| 575 |
+
path: Union[str, bytes, PathLike], # type: ignore[type-arg]
|
| 576 |
+
steps: int = 5,
|
| 577 |
+
chunk_size: int = 512,
|
| 578 |
+
threshold: float = 0.20,
|
| 579 |
+
cp_isolation: Optional[List[str]] = None,
|
| 580 |
+
cp_exclusion: Optional[List[str]] = None,
|
| 581 |
+
preemptive_behaviour: bool = True,
|
| 582 |
+
explain: bool = False,
|
| 583 |
+
language_threshold: float = 0.1,
|
| 584 |
+
enable_fallback: bool = True,
|
| 585 |
+
) -> CharsetMatches:
|
| 586 |
+
"""
|
| 587 |
+
Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode.
|
| 588 |
+
Can raise IOError.
|
| 589 |
+
"""
|
| 590 |
+
with open(path, "rb") as fp:
|
| 591 |
+
return from_fp(
|
| 592 |
+
fp,
|
| 593 |
+
steps,
|
| 594 |
+
chunk_size,
|
| 595 |
+
threshold,
|
| 596 |
+
cp_isolation,
|
| 597 |
+
cp_exclusion,
|
| 598 |
+
preemptive_behaviour,
|
| 599 |
+
explain,
|
| 600 |
+
language_threshold,
|
| 601 |
+
enable_fallback,
|
| 602 |
+
)
|
| 603 |
+
|
| 604 |
+
|
| 605 |
+
def is_binary(
|
| 606 |
+
fp_or_path_or_payload: Union[PathLike, str, BinaryIO, bytes], # type: ignore[type-arg]
|
| 607 |
+
steps: int = 5,
|
| 608 |
+
chunk_size: int = 512,
|
| 609 |
+
threshold: float = 0.20,
|
| 610 |
+
cp_isolation: Optional[List[str]] = None,
|
| 611 |
+
cp_exclusion: Optional[List[str]] = None,
|
| 612 |
+
preemptive_behaviour: bool = True,
|
| 613 |
+
explain: bool = False,
|
| 614 |
+
language_threshold: float = 0.1,
|
| 615 |
+
enable_fallback: bool = False,
|
| 616 |
+
) -> bool:
|
| 617 |
+
"""
|
| 618 |
+
Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string.
|
| 619 |
+
Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match
|
| 620 |
+
are disabled to be stricter around ASCII-compatible but unlikely to be a string.
|
| 621 |
+
"""
|
| 622 |
+
if isinstance(fp_or_path_or_payload, (str, PathLike)):
|
| 623 |
+
guesses = from_path(
|
| 624 |
+
fp_or_path_or_payload,
|
| 625 |
+
steps=steps,
|
| 626 |
+
chunk_size=chunk_size,
|
| 627 |
+
threshold=threshold,
|
| 628 |
+
cp_isolation=cp_isolation,
|
| 629 |
+
cp_exclusion=cp_exclusion,
|
| 630 |
+
preemptive_behaviour=preemptive_behaviour,
|
| 631 |
+
explain=explain,
|
| 632 |
+
language_threshold=language_threshold,
|
| 633 |
+
enable_fallback=enable_fallback,
|
| 634 |
+
)
|
| 635 |
+
elif isinstance(
|
| 636 |
+
fp_or_path_or_payload,
|
| 637 |
+
(
|
| 638 |
+
bytes,
|
| 639 |
+
bytearray,
|
| 640 |
+
),
|
| 641 |
+
):
|
| 642 |
+
guesses = from_bytes(
|
| 643 |
+
fp_or_path_or_payload,
|
| 644 |
+
steps=steps,
|
| 645 |
+
chunk_size=chunk_size,
|
| 646 |
+
threshold=threshold,
|
| 647 |
+
cp_isolation=cp_isolation,
|
| 648 |
+
cp_exclusion=cp_exclusion,
|
| 649 |
+
preemptive_behaviour=preemptive_behaviour,
|
| 650 |
+
explain=explain,
|
| 651 |
+
language_threshold=language_threshold,
|
| 652 |
+
enable_fallback=enable_fallback,
|
| 653 |
+
)
|
| 654 |
+
else:
|
| 655 |
+
guesses = from_fp(
|
| 656 |
+
fp_or_path_or_payload,
|
| 657 |
+
steps=steps,
|
| 658 |
+
chunk_size=chunk_size,
|
| 659 |
+
threshold=threshold,
|
| 660 |
+
cp_isolation=cp_isolation,
|
| 661 |
+
cp_exclusion=cp_exclusion,
|
| 662 |
+
preemptive_behaviour=preemptive_behaviour,
|
| 663 |
+
explain=explain,
|
| 664 |
+
language_threshold=language_threshold,
|
| 665 |
+
enable_fallback=enable_fallback,
|
| 666 |
+
)
|
| 667 |
+
|
| 668 |
+
return not guesses
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/cd.py
ADDED
|
@@ -0,0 +1,395 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib
|
| 2 |
+
from codecs import IncrementalDecoder
|
| 3 |
+
from collections import Counter
|
| 4 |
+
from functools import lru_cache
|
| 5 |
+
from typing import Counter as TypeCounter, Dict, List, Optional, Tuple
|
| 6 |
+
|
| 7 |
+
from .constant import (
|
| 8 |
+
FREQUENCIES,
|
| 9 |
+
KO_NAMES,
|
| 10 |
+
LANGUAGE_SUPPORTED_COUNT,
|
| 11 |
+
TOO_SMALL_SEQUENCE,
|
| 12 |
+
ZH_NAMES,
|
| 13 |
+
)
|
| 14 |
+
from .md import is_suspiciously_successive_range
|
| 15 |
+
from .models import CoherenceMatches
|
| 16 |
+
from .utils import (
|
| 17 |
+
is_accentuated,
|
| 18 |
+
is_latin,
|
| 19 |
+
is_multi_byte_encoding,
|
| 20 |
+
is_unicode_range_secondary,
|
| 21 |
+
unicode_range,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def encoding_unicode_range(iana_name: str) -> List[str]:
|
| 26 |
+
"""
|
| 27 |
+
Return associated unicode ranges in a single byte code page.
|
| 28 |
+
"""
|
| 29 |
+
if is_multi_byte_encoding(iana_name):
|
| 30 |
+
raise IOError("Function not supported on multi-byte code page")
|
| 31 |
+
|
| 32 |
+
decoder = importlib.import_module(
|
| 33 |
+
"encodings.{}".format(iana_name)
|
| 34 |
+
).IncrementalDecoder
|
| 35 |
+
|
| 36 |
+
p: IncrementalDecoder = decoder(errors="ignore")
|
| 37 |
+
seen_ranges: Dict[str, int] = {}
|
| 38 |
+
character_count: int = 0
|
| 39 |
+
|
| 40 |
+
for i in range(0x40, 0xFF):
|
| 41 |
+
chunk: str = p.decode(bytes([i]))
|
| 42 |
+
|
| 43 |
+
if chunk:
|
| 44 |
+
character_range: Optional[str] = unicode_range(chunk)
|
| 45 |
+
|
| 46 |
+
if character_range is None:
|
| 47 |
+
continue
|
| 48 |
+
|
| 49 |
+
if is_unicode_range_secondary(character_range) is False:
|
| 50 |
+
if character_range not in seen_ranges:
|
| 51 |
+
seen_ranges[character_range] = 0
|
| 52 |
+
seen_ranges[character_range] += 1
|
| 53 |
+
character_count += 1
|
| 54 |
+
|
| 55 |
+
return sorted(
|
| 56 |
+
[
|
| 57 |
+
character_range
|
| 58 |
+
for character_range in seen_ranges
|
| 59 |
+
if seen_ranges[character_range] / character_count >= 0.15
|
| 60 |
+
]
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def unicode_range_languages(primary_range: str) -> List[str]:
|
| 65 |
+
"""
|
| 66 |
+
Return inferred languages used with a unicode range.
|
| 67 |
+
"""
|
| 68 |
+
languages: List[str] = []
|
| 69 |
+
|
| 70 |
+
for language, characters in FREQUENCIES.items():
|
| 71 |
+
for character in characters:
|
| 72 |
+
if unicode_range(character) == primary_range:
|
| 73 |
+
languages.append(language)
|
| 74 |
+
break
|
| 75 |
+
|
| 76 |
+
return languages
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
@lru_cache()
|
| 80 |
+
def encoding_languages(iana_name: str) -> List[str]:
|
| 81 |
+
"""
|
| 82 |
+
Single-byte encoding language association. Some code page are heavily linked to particular language(s).
|
| 83 |
+
This function does the correspondence.
|
| 84 |
+
"""
|
| 85 |
+
unicode_ranges: List[str] = encoding_unicode_range(iana_name)
|
| 86 |
+
primary_range: Optional[str] = None
|
| 87 |
+
|
| 88 |
+
for specified_range in unicode_ranges:
|
| 89 |
+
if "Latin" not in specified_range:
|
| 90 |
+
primary_range = specified_range
|
| 91 |
+
break
|
| 92 |
+
|
| 93 |
+
if primary_range is None:
|
| 94 |
+
return ["Latin Based"]
|
| 95 |
+
|
| 96 |
+
return unicode_range_languages(primary_range)
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
@lru_cache()
|
| 100 |
+
def mb_encoding_languages(iana_name: str) -> List[str]:
|
| 101 |
+
"""
|
| 102 |
+
Multi-byte encoding language association. Some code page are heavily linked to particular language(s).
|
| 103 |
+
This function does the correspondence.
|
| 104 |
+
"""
|
| 105 |
+
if (
|
| 106 |
+
iana_name.startswith("shift_")
|
| 107 |
+
or iana_name.startswith("iso2022_jp")
|
| 108 |
+
or iana_name.startswith("euc_j")
|
| 109 |
+
or iana_name == "cp932"
|
| 110 |
+
):
|
| 111 |
+
return ["Japanese"]
|
| 112 |
+
if iana_name.startswith("gb") or iana_name in ZH_NAMES:
|
| 113 |
+
return ["Chinese"]
|
| 114 |
+
if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES:
|
| 115 |
+
return ["Korean"]
|
| 116 |
+
|
| 117 |
+
return []
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT)
|
| 121 |
+
def get_target_features(language: str) -> Tuple[bool, bool]:
|
| 122 |
+
"""
|
| 123 |
+
Determine main aspects from a supported language if it contains accents and if is pure Latin.
|
| 124 |
+
"""
|
| 125 |
+
target_have_accents: bool = False
|
| 126 |
+
target_pure_latin: bool = True
|
| 127 |
+
|
| 128 |
+
for character in FREQUENCIES[language]:
|
| 129 |
+
if not target_have_accents and is_accentuated(character):
|
| 130 |
+
target_have_accents = True
|
| 131 |
+
if target_pure_latin and is_latin(character) is False:
|
| 132 |
+
target_pure_latin = False
|
| 133 |
+
|
| 134 |
+
return target_have_accents, target_pure_latin
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def alphabet_languages(
|
| 138 |
+
characters: List[str], ignore_non_latin: bool = False
|
| 139 |
+
) -> List[str]:
|
| 140 |
+
"""
|
| 141 |
+
Return associated languages associated to given characters.
|
| 142 |
+
"""
|
| 143 |
+
languages: List[Tuple[str, float]] = []
|
| 144 |
+
|
| 145 |
+
source_have_accents = any(is_accentuated(character) for character in characters)
|
| 146 |
+
|
| 147 |
+
for language, language_characters in FREQUENCIES.items():
|
| 148 |
+
target_have_accents, target_pure_latin = get_target_features(language)
|
| 149 |
+
|
| 150 |
+
if ignore_non_latin and target_pure_latin is False:
|
| 151 |
+
continue
|
| 152 |
+
|
| 153 |
+
if target_have_accents is False and source_have_accents:
|
| 154 |
+
continue
|
| 155 |
+
|
| 156 |
+
character_count: int = len(language_characters)
|
| 157 |
+
|
| 158 |
+
character_match_count: int = len(
|
| 159 |
+
[c for c in language_characters if c in characters]
|
| 160 |
+
)
|
| 161 |
+
|
| 162 |
+
ratio: float = character_match_count / character_count
|
| 163 |
+
|
| 164 |
+
if ratio >= 0.2:
|
| 165 |
+
languages.append((language, ratio))
|
| 166 |
+
|
| 167 |
+
languages = sorted(languages, key=lambda x: x[1], reverse=True)
|
| 168 |
+
|
| 169 |
+
return [compatible_language[0] for compatible_language in languages]
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def characters_popularity_compare(
|
| 173 |
+
language: str, ordered_characters: List[str]
|
| 174 |
+
) -> float:
|
| 175 |
+
"""
|
| 176 |
+
Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language.
|
| 177 |
+
The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit).
|
| 178 |
+
Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.)
|
| 179 |
+
"""
|
| 180 |
+
if language not in FREQUENCIES:
|
| 181 |
+
raise ValueError("{} not available".format(language))
|
| 182 |
+
|
| 183 |
+
character_approved_count: int = 0
|
| 184 |
+
FREQUENCIES_language_set = set(FREQUENCIES[language])
|
| 185 |
+
|
| 186 |
+
ordered_characters_count: int = len(ordered_characters)
|
| 187 |
+
target_language_characters_count: int = len(FREQUENCIES[language])
|
| 188 |
+
|
| 189 |
+
large_alphabet: bool = target_language_characters_count > 26
|
| 190 |
+
|
| 191 |
+
for character, character_rank in zip(
|
| 192 |
+
ordered_characters, range(0, ordered_characters_count)
|
| 193 |
+
):
|
| 194 |
+
if character not in FREQUENCIES_language_set:
|
| 195 |
+
continue
|
| 196 |
+
|
| 197 |
+
character_rank_in_language: int = FREQUENCIES[language].index(character)
|
| 198 |
+
expected_projection_ratio: float = (
|
| 199 |
+
target_language_characters_count / ordered_characters_count
|
| 200 |
+
)
|
| 201 |
+
character_rank_projection: int = int(character_rank * expected_projection_ratio)
|
| 202 |
+
|
| 203 |
+
if (
|
| 204 |
+
large_alphabet is False
|
| 205 |
+
and abs(character_rank_projection - character_rank_in_language) > 4
|
| 206 |
+
):
|
| 207 |
+
continue
|
| 208 |
+
|
| 209 |
+
if (
|
| 210 |
+
large_alphabet is True
|
| 211 |
+
and abs(character_rank_projection - character_rank_in_language)
|
| 212 |
+
< target_language_characters_count / 3
|
| 213 |
+
):
|
| 214 |
+
character_approved_count += 1
|
| 215 |
+
continue
|
| 216 |
+
|
| 217 |
+
characters_before_source: List[str] = FREQUENCIES[language][
|
| 218 |
+
0:character_rank_in_language
|
| 219 |
+
]
|
| 220 |
+
characters_after_source: List[str] = FREQUENCIES[language][
|
| 221 |
+
character_rank_in_language:
|
| 222 |
+
]
|
| 223 |
+
characters_before: List[str] = ordered_characters[0:character_rank]
|
| 224 |
+
characters_after: List[str] = ordered_characters[character_rank:]
|
| 225 |
+
|
| 226 |
+
before_match_count: int = len(
|
| 227 |
+
set(characters_before) & set(characters_before_source)
|
| 228 |
+
)
|
| 229 |
+
|
| 230 |
+
after_match_count: int = len(
|
| 231 |
+
set(characters_after) & set(characters_after_source)
|
| 232 |
+
)
|
| 233 |
+
|
| 234 |
+
if len(characters_before_source) == 0 and before_match_count <= 4:
|
| 235 |
+
character_approved_count += 1
|
| 236 |
+
continue
|
| 237 |
+
|
| 238 |
+
if len(characters_after_source) == 0 and after_match_count <= 4:
|
| 239 |
+
character_approved_count += 1
|
| 240 |
+
continue
|
| 241 |
+
|
| 242 |
+
if (
|
| 243 |
+
before_match_count / len(characters_before_source) >= 0.4
|
| 244 |
+
or after_match_count / len(characters_after_source) >= 0.4
|
| 245 |
+
):
|
| 246 |
+
character_approved_count += 1
|
| 247 |
+
continue
|
| 248 |
+
|
| 249 |
+
return character_approved_count / len(ordered_characters)
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
def alpha_unicode_split(decoded_sequence: str) -> List[str]:
|
| 253 |
+
"""
|
| 254 |
+
Given a decoded text sequence, return a list of str. Unicode range / alphabet separation.
|
| 255 |
+
Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list;
|
| 256 |
+
One containing the latin letters and the other hebrew.
|
| 257 |
+
"""
|
| 258 |
+
layers: Dict[str, str] = {}
|
| 259 |
+
|
| 260 |
+
for character in decoded_sequence:
|
| 261 |
+
if character.isalpha() is False:
|
| 262 |
+
continue
|
| 263 |
+
|
| 264 |
+
character_range: Optional[str] = unicode_range(character)
|
| 265 |
+
|
| 266 |
+
if character_range is None:
|
| 267 |
+
continue
|
| 268 |
+
|
| 269 |
+
layer_target_range: Optional[str] = None
|
| 270 |
+
|
| 271 |
+
for discovered_range in layers:
|
| 272 |
+
if (
|
| 273 |
+
is_suspiciously_successive_range(discovered_range, character_range)
|
| 274 |
+
is False
|
| 275 |
+
):
|
| 276 |
+
layer_target_range = discovered_range
|
| 277 |
+
break
|
| 278 |
+
|
| 279 |
+
if layer_target_range is None:
|
| 280 |
+
layer_target_range = character_range
|
| 281 |
+
|
| 282 |
+
if layer_target_range not in layers:
|
| 283 |
+
layers[layer_target_range] = character.lower()
|
| 284 |
+
continue
|
| 285 |
+
|
| 286 |
+
layers[layer_target_range] += character.lower()
|
| 287 |
+
|
| 288 |
+
return list(layers.values())
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches:
|
| 292 |
+
"""
|
| 293 |
+
This function merge results previously given by the function coherence_ratio.
|
| 294 |
+
The return type is the same as coherence_ratio.
|
| 295 |
+
"""
|
| 296 |
+
per_language_ratios: Dict[str, List[float]] = {}
|
| 297 |
+
for result in results:
|
| 298 |
+
for sub_result in result:
|
| 299 |
+
language, ratio = sub_result
|
| 300 |
+
if language not in per_language_ratios:
|
| 301 |
+
per_language_ratios[language] = [ratio]
|
| 302 |
+
continue
|
| 303 |
+
per_language_ratios[language].append(ratio)
|
| 304 |
+
|
| 305 |
+
merge = [
|
| 306 |
+
(
|
| 307 |
+
language,
|
| 308 |
+
round(
|
| 309 |
+
sum(per_language_ratios[language]) / len(per_language_ratios[language]),
|
| 310 |
+
4,
|
| 311 |
+
),
|
| 312 |
+
)
|
| 313 |
+
for language in per_language_ratios
|
| 314 |
+
]
|
| 315 |
+
|
| 316 |
+
return sorted(merge, key=lambda x: x[1], reverse=True)
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches:
|
| 320 |
+
"""
|
| 321 |
+
We shall NOT return "English—" in CoherenceMatches because it is an alternative
|
| 322 |
+
of "English". This function only keeps the best match and remove the em-dash in it.
|
| 323 |
+
"""
|
| 324 |
+
index_results: Dict[str, List[float]] = dict()
|
| 325 |
+
|
| 326 |
+
for result in results:
|
| 327 |
+
language, ratio = result
|
| 328 |
+
no_em_name: str = language.replace("—", "")
|
| 329 |
+
|
| 330 |
+
if no_em_name not in index_results:
|
| 331 |
+
index_results[no_em_name] = []
|
| 332 |
+
|
| 333 |
+
index_results[no_em_name].append(ratio)
|
| 334 |
+
|
| 335 |
+
if any(len(index_results[e]) > 1 for e in index_results):
|
| 336 |
+
filtered_results: CoherenceMatches = []
|
| 337 |
+
|
| 338 |
+
for language in index_results:
|
| 339 |
+
filtered_results.append((language, max(index_results[language])))
|
| 340 |
+
|
| 341 |
+
return filtered_results
|
| 342 |
+
|
| 343 |
+
return results
|
| 344 |
+
|
| 345 |
+
|
| 346 |
+
@lru_cache(maxsize=2048)
|
| 347 |
+
def coherence_ratio(
|
| 348 |
+
decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None
|
| 349 |
+
) -> CoherenceMatches:
|
| 350 |
+
"""
|
| 351 |
+
Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers.
|
| 352 |
+
A layer = Character extraction by alphabets/ranges.
|
| 353 |
+
"""
|
| 354 |
+
|
| 355 |
+
results: List[Tuple[str, float]] = []
|
| 356 |
+
ignore_non_latin: bool = False
|
| 357 |
+
|
| 358 |
+
sufficient_match_count: int = 0
|
| 359 |
+
|
| 360 |
+
lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else []
|
| 361 |
+
if "Latin Based" in lg_inclusion_list:
|
| 362 |
+
ignore_non_latin = True
|
| 363 |
+
lg_inclusion_list.remove("Latin Based")
|
| 364 |
+
|
| 365 |
+
for layer in alpha_unicode_split(decoded_sequence):
|
| 366 |
+
sequence_frequencies: TypeCounter[str] = Counter(layer)
|
| 367 |
+
most_common = sequence_frequencies.most_common()
|
| 368 |
+
|
| 369 |
+
character_count: int = sum(o for c, o in most_common)
|
| 370 |
+
|
| 371 |
+
if character_count <= TOO_SMALL_SEQUENCE:
|
| 372 |
+
continue
|
| 373 |
+
|
| 374 |
+
popular_character_ordered: List[str] = [c for c, o in most_common]
|
| 375 |
+
|
| 376 |
+
for language in lg_inclusion_list or alphabet_languages(
|
| 377 |
+
popular_character_ordered, ignore_non_latin
|
| 378 |
+
):
|
| 379 |
+
ratio: float = characters_popularity_compare(
|
| 380 |
+
language, popular_character_ordered
|
| 381 |
+
)
|
| 382 |
+
|
| 383 |
+
if ratio < threshold:
|
| 384 |
+
continue
|
| 385 |
+
elif ratio >= 0.8:
|
| 386 |
+
sufficient_match_count += 1
|
| 387 |
+
|
| 388 |
+
results.append((language, round(ratio, 4)))
|
| 389 |
+
|
| 390 |
+
if sufficient_match_count >= 3:
|
| 391 |
+
break
|
| 392 |
+
|
| 393 |
+
return sorted(
|
| 394 |
+
filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True
|
| 395 |
+
)
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/cli/__init__.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .__main__ import cli_detect, query_yes_no
|
| 2 |
+
|
| 3 |
+
__all__ = (
|
| 4 |
+
"cli_detect",
|
| 5 |
+
"query_yes_no",
|
| 6 |
+
)
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/cli/__main__.py
ADDED
|
@@ -0,0 +1,320 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import argparse
|
| 2 |
+
import sys
|
| 3 |
+
from json import dumps
|
| 4 |
+
from os.path import abspath, basename, dirname, join, realpath
|
| 5 |
+
from platform import python_version
|
| 6 |
+
from typing import List, Optional
|
| 7 |
+
from unicodedata import unidata_version
|
| 8 |
+
|
| 9 |
+
import charset_normalizer.md as md_module
|
| 10 |
+
from charset_normalizer import from_fp
|
| 11 |
+
from charset_normalizer.models import CliDetectionResult
|
| 12 |
+
from charset_normalizer.version import __version__
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def query_yes_no(question: str, default: str = "yes") -> bool:
|
| 16 |
+
"""Ask a yes/no question via input() and return their answer.
|
| 17 |
+
|
| 18 |
+
"question" is a string that is presented to the user.
|
| 19 |
+
"default" is the presumed answer if the user just hits <Enter>.
|
| 20 |
+
It must be "yes" (the default), "no" or None (meaning
|
| 21 |
+
an answer is required of the user).
|
| 22 |
+
|
| 23 |
+
The "answer" return value is True for "yes" or False for "no".
|
| 24 |
+
|
| 25 |
+
Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input
|
| 26 |
+
"""
|
| 27 |
+
valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
|
| 28 |
+
if default is None:
|
| 29 |
+
prompt = " [y/n] "
|
| 30 |
+
elif default == "yes":
|
| 31 |
+
prompt = " [Y/n] "
|
| 32 |
+
elif default == "no":
|
| 33 |
+
prompt = " [y/N] "
|
| 34 |
+
else:
|
| 35 |
+
raise ValueError("invalid default answer: '%s'" % default)
|
| 36 |
+
|
| 37 |
+
while True:
|
| 38 |
+
sys.stdout.write(question + prompt)
|
| 39 |
+
choice = input().lower()
|
| 40 |
+
if default is not None and choice == "":
|
| 41 |
+
return valid[default]
|
| 42 |
+
elif choice in valid:
|
| 43 |
+
return valid[choice]
|
| 44 |
+
else:
|
| 45 |
+
sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n")
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def cli_detect(argv: Optional[List[str]] = None) -> int:
|
| 49 |
+
"""
|
| 50 |
+
CLI assistant using ARGV and ArgumentParser
|
| 51 |
+
:param argv:
|
| 52 |
+
:return: 0 if everything is fine, anything else equal trouble
|
| 53 |
+
"""
|
| 54 |
+
parser = argparse.ArgumentParser(
|
| 55 |
+
description="The Real First Universal Charset Detector. "
|
| 56 |
+
"Discover originating encoding used on text file. "
|
| 57 |
+
"Normalize text to unicode."
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
parser.add_argument(
|
| 61 |
+
"files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed"
|
| 62 |
+
)
|
| 63 |
+
parser.add_argument(
|
| 64 |
+
"-v",
|
| 65 |
+
"--verbose",
|
| 66 |
+
action="store_true",
|
| 67 |
+
default=False,
|
| 68 |
+
dest="verbose",
|
| 69 |
+
help="Display complementary information about file if any. "
|
| 70 |
+
"Stdout will contain logs about the detection process.",
|
| 71 |
+
)
|
| 72 |
+
parser.add_argument(
|
| 73 |
+
"-a",
|
| 74 |
+
"--with-alternative",
|
| 75 |
+
action="store_true",
|
| 76 |
+
default=False,
|
| 77 |
+
dest="alternatives",
|
| 78 |
+
help="Output complementary possibilities if any. Top-level JSON WILL be a list.",
|
| 79 |
+
)
|
| 80 |
+
parser.add_argument(
|
| 81 |
+
"-n",
|
| 82 |
+
"--normalize",
|
| 83 |
+
action="store_true",
|
| 84 |
+
default=False,
|
| 85 |
+
dest="normalize",
|
| 86 |
+
help="Permit to normalize input file. If not set, program does not write anything.",
|
| 87 |
+
)
|
| 88 |
+
parser.add_argument(
|
| 89 |
+
"-m",
|
| 90 |
+
"--minimal",
|
| 91 |
+
action="store_true",
|
| 92 |
+
default=False,
|
| 93 |
+
dest="minimal",
|
| 94 |
+
help="Only output the charset detected to STDOUT. Disabling JSON output.",
|
| 95 |
+
)
|
| 96 |
+
parser.add_argument(
|
| 97 |
+
"-r",
|
| 98 |
+
"--replace",
|
| 99 |
+
action="store_true",
|
| 100 |
+
default=False,
|
| 101 |
+
dest="replace",
|
| 102 |
+
help="Replace file when trying to normalize it instead of creating a new one.",
|
| 103 |
+
)
|
| 104 |
+
parser.add_argument(
|
| 105 |
+
"-f",
|
| 106 |
+
"--force",
|
| 107 |
+
action="store_true",
|
| 108 |
+
default=False,
|
| 109 |
+
dest="force",
|
| 110 |
+
help="Replace file without asking if you are sure, use this flag with caution.",
|
| 111 |
+
)
|
| 112 |
+
parser.add_argument(
|
| 113 |
+
"-i",
|
| 114 |
+
"--no-preemptive",
|
| 115 |
+
action="store_true",
|
| 116 |
+
default=False,
|
| 117 |
+
dest="no_preemptive",
|
| 118 |
+
help="Disable looking at a charset declaration to hint the detector.",
|
| 119 |
+
)
|
| 120 |
+
parser.add_argument(
|
| 121 |
+
"-t",
|
| 122 |
+
"--threshold",
|
| 123 |
+
action="store",
|
| 124 |
+
default=0.2,
|
| 125 |
+
type=float,
|
| 126 |
+
dest="threshold",
|
| 127 |
+
help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.",
|
| 128 |
+
)
|
| 129 |
+
parser.add_argument(
|
| 130 |
+
"--version",
|
| 131 |
+
action="version",
|
| 132 |
+
version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format(
|
| 133 |
+
__version__,
|
| 134 |
+
python_version(),
|
| 135 |
+
unidata_version,
|
| 136 |
+
"OFF" if md_module.__file__.lower().endswith(".py") else "ON",
|
| 137 |
+
),
|
| 138 |
+
help="Show version information and exit.",
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
args = parser.parse_args(argv)
|
| 142 |
+
|
| 143 |
+
if args.replace is True and args.normalize is False:
|
| 144 |
+
if args.files:
|
| 145 |
+
for my_file in args.files:
|
| 146 |
+
my_file.close()
|
| 147 |
+
print("Use --replace in addition of --normalize only.", file=sys.stderr)
|
| 148 |
+
return 1
|
| 149 |
+
|
| 150 |
+
if args.force is True and args.replace is False:
|
| 151 |
+
if args.files:
|
| 152 |
+
for my_file in args.files:
|
| 153 |
+
my_file.close()
|
| 154 |
+
print("Use --force in addition of --replace only.", file=sys.stderr)
|
| 155 |
+
return 1
|
| 156 |
+
|
| 157 |
+
if args.threshold < 0.0 or args.threshold > 1.0:
|
| 158 |
+
if args.files:
|
| 159 |
+
for my_file in args.files:
|
| 160 |
+
my_file.close()
|
| 161 |
+
print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
|
| 162 |
+
return 1
|
| 163 |
+
|
| 164 |
+
x_ = []
|
| 165 |
+
|
| 166 |
+
for my_file in args.files:
|
| 167 |
+
matches = from_fp(
|
| 168 |
+
my_file,
|
| 169 |
+
threshold=args.threshold,
|
| 170 |
+
explain=args.verbose,
|
| 171 |
+
preemptive_behaviour=args.no_preemptive is False,
|
| 172 |
+
)
|
| 173 |
+
|
| 174 |
+
best_guess = matches.best()
|
| 175 |
+
|
| 176 |
+
if best_guess is None:
|
| 177 |
+
print(
|
| 178 |
+
'Unable to identify originating encoding for "{}". {}'.format(
|
| 179 |
+
my_file.name,
|
| 180 |
+
(
|
| 181 |
+
"Maybe try increasing maximum amount of chaos."
|
| 182 |
+
if args.threshold < 1.0
|
| 183 |
+
else ""
|
| 184 |
+
),
|
| 185 |
+
),
|
| 186 |
+
file=sys.stderr,
|
| 187 |
+
)
|
| 188 |
+
x_.append(
|
| 189 |
+
CliDetectionResult(
|
| 190 |
+
abspath(my_file.name),
|
| 191 |
+
None,
|
| 192 |
+
[],
|
| 193 |
+
[],
|
| 194 |
+
"Unknown",
|
| 195 |
+
[],
|
| 196 |
+
False,
|
| 197 |
+
1.0,
|
| 198 |
+
0.0,
|
| 199 |
+
None,
|
| 200 |
+
True,
|
| 201 |
+
)
|
| 202 |
+
)
|
| 203 |
+
else:
|
| 204 |
+
x_.append(
|
| 205 |
+
CliDetectionResult(
|
| 206 |
+
abspath(my_file.name),
|
| 207 |
+
best_guess.encoding,
|
| 208 |
+
best_guess.encoding_aliases,
|
| 209 |
+
[
|
| 210 |
+
cp
|
| 211 |
+
for cp in best_guess.could_be_from_charset
|
| 212 |
+
if cp != best_guess.encoding
|
| 213 |
+
],
|
| 214 |
+
best_guess.language,
|
| 215 |
+
best_guess.alphabets,
|
| 216 |
+
best_guess.bom,
|
| 217 |
+
best_guess.percent_chaos,
|
| 218 |
+
best_guess.percent_coherence,
|
| 219 |
+
None,
|
| 220 |
+
True,
|
| 221 |
+
)
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
if len(matches) > 1 and args.alternatives:
|
| 225 |
+
for el in matches:
|
| 226 |
+
if el != best_guess:
|
| 227 |
+
x_.append(
|
| 228 |
+
CliDetectionResult(
|
| 229 |
+
abspath(my_file.name),
|
| 230 |
+
el.encoding,
|
| 231 |
+
el.encoding_aliases,
|
| 232 |
+
[
|
| 233 |
+
cp
|
| 234 |
+
for cp in el.could_be_from_charset
|
| 235 |
+
if cp != el.encoding
|
| 236 |
+
],
|
| 237 |
+
el.language,
|
| 238 |
+
el.alphabets,
|
| 239 |
+
el.bom,
|
| 240 |
+
el.percent_chaos,
|
| 241 |
+
el.percent_coherence,
|
| 242 |
+
None,
|
| 243 |
+
False,
|
| 244 |
+
)
|
| 245 |
+
)
|
| 246 |
+
|
| 247 |
+
if args.normalize is True:
|
| 248 |
+
if best_guess.encoding.startswith("utf") is True:
|
| 249 |
+
print(
|
| 250 |
+
'"{}" file does not need to be normalized, as it already came from unicode.'.format(
|
| 251 |
+
my_file.name
|
| 252 |
+
),
|
| 253 |
+
file=sys.stderr,
|
| 254 |
+
)
|
| 255 |
+
if my_file.closed is False:
|
| 256 |
+
my_file.close()
|
| 257 |
+
continue
|
| 258 |
+
|
| 259 |
+
dir_path = dirname(realpath(my_file.name))
|
| 260 |
+
file_name = basename(realpath(my_file.name))
|
| 261 |
+
|
| 262 |
+
o_: List[str] = file_name.split(".")
|
| 263 |
+
|
| 264 |
+
if args.replace is False:
|
| 265 |
+
o_.insert(-1, best_guess.encoding)
|
| 266 |
+
if my_file.closed is False:
|
| 267 |
+
my_file.close()
|
| 268 |
+
elif (
|
| 269 |
+
args.force is False
|
| 270 |
+
and query_yes_no(
|
| 271 |
+
'Are you sure to normalize "{}" by replacing it ?'.format(
|
| 272 |
+
my_file.name
|
| 273 |
+
),
|
| 274 |
+
"no",
|
| 275 |
+
)
|
| 276 |
+
is False
|
| 277 |
+
):
|
| 278 |
+
if my_file.closed is False:
|
| 279 |
+
my_file.close()
|
| 280 |
+
continue
|
| 281 |
+
|
| 282 |
+
try:
|
| 283 |
+
x_[0].unicode_path = join(dir_path, ".".join(o_))
|
| 284 |
+
|
| 285 |
+
with open(x_[0].unicode_path, "wb") as fp:
|
| 286 |
+
fp.write(best_guess.output())
|
| 287 |
+
except IOError as e:
|
| 288 |
+
print(str(e), file=sys.stderr)
|
| 289 |
+
if my_file.closed is False:
|
| 290 |
+
my_file.close()
|
| 291 |
+
return 2
|
| 292 |
+
|
| 293 |
+
if my_file.closed is False:
|
| 294 |
+
my_file.close()
|
| 295 |
+
|
| 296 |
+
if args.minimal is False:
|
| 297 |
+
print(
|
| 298 |
+
dumps(
|
| 299 |
+
[el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__,
|
| 300 |
+
ensure_ascii=True,
|
| 301 |
+
indent=4,
|
| 302 |
+
)
|
| 303 |
+
)
|
| 304 |
+
else:
|
| 305 |
+
for my_file in args.files:
|
| 306 |
+
print(
|
| 307 |
+
", ".join(
|
| 308 |
+
[
|
| 309 |
+
el.encoding or "undefined"
|
| 310 |
+
for el in x_
|
| 311 |
+
if el.path == abspath(my_file.name)
|
| 312 |
+
]
|
| 313 |
+
)
|
| 314 |
+
)
|
| 315 |
+
|
| 316 |
+
return 0
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
if __name__ == "__main__":
|
| 320 |
+
cli_detect()
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (257 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (6.73 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/constant.py
ADDED
|
@@ -0,0 +1,1997 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE
|
| 3 |
+
from encodings.aliases import aliases
|
| 4 |
+
from re import IGNORECASE, compile as re_compile
|
| 5 |
+
from typing import Dict, List, Set, Union
|
| 6 |
+
|
| 7 |
+
# Contain for each eligible encoding a list of/item bytes SIG/BOM
|
| 8 |
+
ENCODING_MARKS: Dict[str, Union[bytes, List[bytes]]] = {
|
| 9 |
+
"utf_8": BOM_UTF8,
|
| 10 |
+
"utf_7": [
|
| 11 |
+
b"\x2b\x2f\x76\x38",
|
| 12 |
+
b"\x2b\x2f\x76\x39",
|
| 13 |
+
b"\x2b\x2f\x76\x2b",
|
| 14 |
+
b"\x2b\x2f\x76\x2f",
|
| 15 |
+
b"\x2b\x2f\x76\x38\x2d",
|
| 16 |
+
],
|
| 17 |
+
"gb18030": b"\x84\x31\x95\x33",
|
| 18 |
+
"utf_32": [BOM_UTF32_BE, BOM_UTF32_LE],
|
| 19 |
+
"utf_16": [BOM_UTF16_BE, BOM_UTF16_LE],
|
| 20 |
+
}
|
| 21 |
+
|
| 22 |
+
TOO_SMALL_SEQUENCE: int = 32
|
| 23 |
+
TOO_BIG_SEQUENCE: int = int(10e6)
|
| 24 |
+
|
| 25 |
+
UTF8_MAXIMAL_ALLOCATION: int = 1_112_064
|
| 26 |
+
|
| 27 |
+
# Up-to-date Unicode ucd/15.0.0
|
| 28 |
+
UNICODE_RANGES_COMBINED: Dict[str, range] = {
|
| 29 |
+
"Control character": range(32),
|
| 30 |
+
"Basic Latin": range(32, 128),
|
| 31 |
+
"Latin-1 Supplement": range(128, 256),
|
| 32 |
+
"Latin Extended-A": range(256, 384),
|
| 33 |
+
"Latin Extended-B": range(384, 592),
|
| 34 |
+
"IPA Extensions": range(592, 688),
|
| 35 |
+
"Spacing Modifier Letters": range(688, 768),
|
| 36 |
+
"Combining Diacritical Marks": range(768, 880),
|
| 37 |
+
"Greek and Coptic": range(880, 1024),
|
| 38 |
+
"Cyrillic": range(1024, 1280),
|
| 39 |
+
"Cyrillic Supplement": range(1280, 1328),
|
| 40 |
+
"Armenian": range(1328, 1424),
|
| 41 |
+
"Hebrew": range(1424, 1536),
|
| 42 |
+
"Arabic": range(1536, 1792),
|
| 43 |
+
"Syriac": range(1792, 1872),
|
| 44 |
+
"Arabic Supplement": range(1872, 1920),
|
| 45 |
+
"Thaana": range(1920, 1984),
|
| 46 |
+
"NKo": range(1984, 2048),
|
| 47 |
+
"Samaritan": range(2048, 2112),
|
| 48 |
+
"Mandaic": range(2112, 2144),
|
| 49 |
+
"Syriac Supplement": range(2144, 2160),
|
| 50 |
+
"Arabic Extended-B": range(2160, 2208),
|
| 51 |
+
"Arabic Extended-A": range(2208, 2304),
|
| 52 |
+
"Devanagari": range(2304, 2432),
|
| 53 |
+
"Bengali": range(2432, 2560),
|
| 54 |
+
"Gurmukhi": range(2560, 2688),
|
| 55 |
+
"Gujarati": range(2688, 2816),
|
| 56 |
+
"Oriya": range(2816, 2944),
|
| 57 |
+
"Tamil": range(2944, 3072),
|
| 58 |
+
"Telugu": range(3072, 3200),
|
| 59 |
+
"Kannada": range(3200, 3328),
|
| 60 |
+
"Malayalam": range(3328, 3456),
|
| 61 |
+
"Sinhala": range(3456, 3584),
|
| 62 |
+
"Thai": range(3584, 3712),
|
| 63 |
+
"Lao": range(3712, 3840),
|
| 64 |
+
"Tibetan": range(3840, 4096),
|
| 65 |
+
"Myanmar": range(4096, 4256),
|
| 66 |
+
"Georgian": range(4256, 4352),
|
| 67 |
+
"Hangul Jamo": range(4352, 4608),
|
| 68 |
+
"Ethiopic": range(4608, 4992),
|
| 69 |
+
"Ethiopic Supplement": range(4992, 5024),
|
| 70 |
+
"Cherokee": range(5024, 5120),
|
| 71 |
+
"Unified Canadian Aboriginal Syllabics": range(5120, 5760),
|
| 72 |
+
"Ogham": range(5760, 5792),
|
| 73 |
+
"Runic": range(5792, 5888),
|
| 74 |
+
"Tagalog": range(5888, 5920),
|
| 75 |
+
"Hanunoo": range(5920, 5952),
|
| 76 |
+
"Buhid": range(5952, 5984),
|
| 77 |
+
"Tagbanwa": range(5984, 6016),
|
| 78 |
+
"Khmer": range(6016, 6144),
|
| 79 |
+
"Mongolian": range(6144, 6320),
|
| 80 |
+
"Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400),
|
| 81 |
+
"Limbu": range(6400, 6480),
|
| 82 |
+
"Tai Le": range(6480, 6528),
|
| 83 |
+
"New Tai Lue": range(6528, 6624),
|
| 84 |
+
"Khmer Symbols": range(6624, 6656),
|
| 85 |
+
"Buginese": range(6656, 6688),
|
| 86 |
+
"Tai Tham": range(6688, 6832),
|
| 87 |
+
"Combining Diacritical Marks Extended": range(6832, 6912),
|
| 88 |
+
"Balinese": range(6912, 7040),
|
| 89 |
+
"Sundanese": range(7040, 7104),
|
| 90 |
+
"Batak": range(7104, 7168),
|
| 91 |
+
"Lepcha": range(7168, 7248),
|
| 92 |
+
"Ol Chiki": range(7248, 7296),
|
| 93 |
+
"Cyrillic Extended-C": range(7296, 7312),
|
| 94 |
+
"Georgian Extended": range(7312, 7360),
|
| 95 |
+
"Sundanese Supplement": range(7360, 7376),
|
| 96 |
+
"Vedic Extensions": range(7376, 7424),
|
| 97 |
+
"Phonetic Extensions": range(7424, 7552),
|
| 98 |
+
"Phonetic Extensions Supplement": range(7552, 7616),
|
| 99 |
+
"Combining Diacritical Marks Supplement": range(7616, 7680),
|
| 100 |
+
"Latin Extended Additional": range(7680, 7936),
|
| 101 |
+
"Greek Extended": range(7936, 8192),
|
| 102 |
+
"General Punctuation": range(8192, 8304),
|
| 103 |
+
"Superscripts and Subscripts": range(8304, 8352),
|
| 104 |
+
"Currency Symbols": range(8352, 8400),
|
| 105 |
+
"Combining Diacritical Marks for Symbols": range(8400, 8448),
|
| 106 |
+
"Letterlike Symbols": range(8448, 8528),
|
| 107 |
+
"Number Forms": range(8528, 8592),
|
| 108 |
+
"Arrows": range(8592, 8704),
|
| 109 |
+
"Mathematical Operators": range(8704, 8960),
|
| 110 |
+
"Miscellaneous Technical": range(8960, 9216),
|
| 111 |
+
"Control Pictures": range(9216, 9280),
|
| 112 |
+
"Optical Character Recognition": range(9280, 9312),
|
| 113 |
+
"Enclosed Alphanumerics": range(9312, 9472),
|
| 114 |
+
"Box Drawing": range(9472, 9600),
|
| 115 |
+
"Block Elements": range(9600, 9632),
|
| 116 |
+
"Geometric Shapes": range(9632, 9728),
|
| 117 |
+
"Miscellaneous Symbols": range(9728, 9984),
|
| 118 |
+
"Dingbats": range(9984, 10176),
|
| 119 |
+
"Miscellaneous Mathematical Symbols-A": range(10176, 10224),
|
| 120 |
+
"Supplemental Arrows-A": range(10224, 10240),
|
| 121 |
+
"Braille Patterns": range(10240, 10496),
|
| 122 |
+
"Supplemental Arrows-B": range(10496, 10624),
|
| 123 |
+
"Miscellaneous Mathematical Symbols-B": range(10624, 10752),
|
| 124 |
+
"Supplemental Mathematical Operators": range(10752, 11008),
|
| 125 |
+
"Miscellaneous Symbols and Arrows": range(11008, 11264),
|
| 126 |
+
"Glagolitic": range(11264, 11360),
|
| 127 |
+
"Latin Extended-C": range(11360, 11392),
|
| 128 |
+
"Coptic": range(11392, 11520),
|
| 129 |
+
"Georgian Supplement": range(11520, 11568),
|
| 130 |
+
"Tifinagh": range(11568, 11648),
|
| 131 |
+
"Ethiopic Extended": range(11648, 11744),
|
| 132 |
+
"Cyrillic Extended-A": range(11744, 11776),
|
| 133 |
+
"Supplemental Punctuation": range(11776, 11904),
|
| 134 |
+
"CJK Radicals Supplement": range(11904, 12032),
|
| 135 |
+
"Kangxi Radicals": range(12032, 12256),
|
| 136 |
+
"Ideographic Description Characters": range(12272, 12288),
|
| 137 |
+
"CJK Symbols and Punctuation": range(12288, 12352),
|
| 138 |
+
"Hiragana": range(12352, 12448),
|
| 139 |
+
"Katakana": range(12448, 12544),
|
| 140 |
+
"Bopomofo": range(12544, 12592),
|
| 141 |
+
"Hangul Compatibility Jamo": range(12592, 12688),
|
| 142 |
+
"Kanbun": range(12688, 12704),
|
| 143 |
+
"Bopomofo Extended": range(12704, 12736),
|
| 144 |
+
"CJK Strokes": range(12736, 12784),
|
| 145 |
+
"Katakana Phonetic Extensions": range(12784, 12800),
|
| 146 |
+
"Enclosed CJK Letters and Months": range(12800, 13056),
|
| 147 |
+
"CJK Compatibility": range(13056, 13312),
|
| 148 |
+
"CJK Unified Ideographs Extension A": range(13312, 19904),
|
| 149 |
+
"Yijing Hexagram Symbols": range(19904, 19968),
|
| 150 |
+
"CJK Unified Ideographs": range(19968, 40960),
|
| 151 |
+
"Yi Syllables": range(40960, 42128),
|
| 152 |
+
"Yi Radicals": range(42128, 42192),
|
| 153 |
+
"Lisu": range(42192, 42240),
|
| 154 |
+
"Vai": range(42240, 42560),
|
| 155 |
+
"Cyrillic Extended-B": range(42560, 42656),
|
| 156 |
+
"Bamum": range(42656, 42752),
|
| 157 |
+
"Modifier Tone Letters": range(42752, 42784),
|
| 158 |
+
"Latin Extended-D": range(42784, 43008),
|
| 159 |
+
"Syloti Nagri": range(43008, 43056),
|
| 160 |
+
"Common Indic Number Forms": range(43056, 43072),
|
| 161 |
+
"Phags-pa": range(43072, 43136),
|
| 162 |
+
"Saurashtra": range(43136, 43232),
|
| 163 |
+
"Devanagari Extended": range(43232, 43264),
|
| 164 |
+
"Kayah Li": range(43264, 43312),
|
| 165 |
+
"Rejang": range(43312, 43360),
|
| 166 |
+
"Hangul Jamo Extended-A": range(43360, 43392),
|
| 167 |
+
"Javanese": range(43392, 43488),
|
| 168 |
+
"Myanmar Extended-B": range(43488, 43520),
|
| 169 |
+
"Cham": range(43520, 43616),
|
| 170 |
+
"Myanmar Extended-A": range(43616, 43648),
|
| 171 |
+
"Tai Viet": range(43648, 43744),
|
| 172 |
+
"Meetei Mayek Extensions": range(43744, 43776),
|
| 173 |
+
"Ethiopic Extended-A": range(43776, 43824),
|
| 174 |
+
"Latin Extended-E": range(43824, 43888),
|
| 175 |
+
"Cherokee Supplement": range(43888, 43968),
|
| 176 |
+
"Meetei Mayek": range(43968, 44032),
|
| 177 |
+
"Hangul Syllables": range(44032, 55216),
|
| 178 |
+
"Hangul Jamo Extended-B": range(55216, 55296),
|
| 179 |
+
"High Surrogates": range(55296, 56192),
|
| 180 |
+
"High Private Use Surrogates": range(56192, 56320),
|
| 181 |
+
"Low Surrogates": range(56320, 57344),
|
| 182 |
+
"Private Use Area": range(57344, 63744),
|
| 183 |
+
"CJK Compatibility Ideographs": range(63744, 64256),
|
| 184 |
+
"Alphabetic Presentation Forms": range(64256, 64336),
|
| 185 |
+
"Arabic Presentation Forms-A": range(64336, 65024),
|
| 186 |
+
"Variation Selectors": range(65024, 65040),
|
| 187 |
+
"Vertical Forms": range(65040, 65056),
|
| 188 |
+
"Combining Half Marks": range(65056, 65072),
|
| 189 |
+
"CJK Compatibility Forms": range(65072, 65104),
|
| 190 |
+
"Small Form Variants": range(65104, 65136),
|
| 191 |
+
"Arabic Presentation Forms-B": range(65136, 65280),
|
| 192 |
+
"Halfwidth and Fullwidth Forms": range(65280, 65520),
|
| 193 |
+
"Specials": range(65520, 65536),
|
| 194 |
+
"Linear B Syllabary": range(65536, 65664),
|
| 195 |
+
"Linear B Ideograms": range(65664, 65792),
|
| 196 |
+
"Aegean Numbers": range(65792, 65856),
|
| 197 |
+
"Ancient Greek Numbers": range(65856, 65936),
|
| 198 |
+
"Ancient Symbols": range(65936, 66000),
|
| 199 |
+
"Phaistos Disc": range(66000, 66048),
|
| 200 |
+
"Lycian": range(66176, 66208),
|
| 201 |
+
"Carian": range(66208, 66272),
|
| 202 |
+
"Coptic Epact Numbers": range(66272, 66304),
|
| 203 |
+
"Old Italic": range(66304, 66352),
|
| 204 |
+
"Gothic": range(66352, 66384),
|
| 205 |
+
"Old Permic": range(66384, 66432),
|
| 206 |
+
"Ugaritic": range(66432, 66464),
|
| 207 |
+
"Old Persian": range(66464, 66528),
|
| 208 |
+
"Deseret": range(66560, 66640),
|
| 209 |
+
"Shavian": range(66640, 66688),
|
| 210 |
+
"Osmanya": range(66688, 66736),
|
| 211 |
+
"Osage": range(66736, 66816),
|
| 212 |
+
"Elbasan": range(66816, 66864),
|
| 213 |
+
"Caucasian Albanian": range(66864, 66928),
|
| 214 |
+
"Vithkuqi": range(66928, 67008),
|
| 215 |
+
"Linear A": range(67072, 67456),
|
| 216 |
+
"Latin Extended-F": range(67456, 67520),
|
| 217 |
+
"Cypriot Syllabary": range(67584, 67648),
|
| 218 |
+
"Imperial Aramaic": range(67648, 67680),
|
| 219 |
+
"Palmyrene": range(67680, 67712),
|
| 220 |
+
"Nabataean": range(67712, 67760),
|
| 221 |
+
"Hatran": range(67808, 67840),
|
| 222 |
+
"Phoenician": range(67840, 67872),
|
| 223 |
+
"Lydian": range(67872, 67904),
|
| 224 |
+
"Meroitic Hieroglyphs": range(67968, 68000),
|
| 225 |
+
"Meroitic Cursive": range(68000, 68096),
|
| 226 |
+
"Kharoshthi": range(68096, 68192),
|
| 227 |
+
"Old South Arabian": range(68192, 68224),
|
| 228 |
+
"Old North Arabian": range(68224, 68256),
|
| 229 |
+
"Manichaean": range(68288, 68352),
|
| 230 |
+
"Avestan": range(68352, 68416),
|
| 231 |
+
"Inscriptional Parthian": range(68416, 68448),
|
| 232 |
+
"Inscriptional Pahlavi": range(68448, 68480),
|
| 233 |
+
"Psalter Pahlavi": range(68480, 68528),
|
| 234 |
+
"Old Turkic": range(68608, 68688),
|
| 235 |
+
"Old Hungarian": range(68736, 68864),
|
| 236 |
+
"Hanifi Rohingya": range(68864, 68928),
|
| 237 |
+
"Rumi Numeral Symbols": range(69216, 69248),
|
| 238 |
+
"Yezidi": range(69248, 69312),
|
| 239 |
+
"Arabic Extended-C": range(69312, 69376),
|
| 240 |
+
"Old Sogdian": range(69376, 69424),
|
| 241 |
+
"Sogdian": range(69424, 69488),
|
| 242 |
+
"Old Uyghur": range(69488, 69552),
|
| 243 |
+
"Chorasmian": range(69552, 69600),
|
| 244 |
+
"Elymaic": range(69600, 69632),
|
| 245 |
+
"Brahmi": range(69632, 69760),
|
| 246 |
+
"Kaithi": range(69760, 69840),
|
| 247 |
+
"Sora Sompeng": range(69840, 69888),
|
| 248 |
+
"Chakma": range(69888, 69968),
|
| 249 |
+
"Mahajani": range(69968, 70016),
|
| 250 |
+
"Sharada": range(70016, 70112),
|
| 251 |
+
"Sinhala Archaic Numbers": range(70112, 70144),
|
| 252 |
+
"Khojki": range(70144, 70224),
|
| 253 |
+
"Multani": range(70272, 70320),
|
| 254 |
+
"Khudawadi": range(70320, 70400),
|
| 255 |
+
"Grantha": range(70400, 70528),
|
| 256 |
+
"Newa": range(70656, 70784),
|
| 257 |
+
"Tirhuta": range(70784, 70880),
|
| 258 |
+
"Siddham": range(71040, 71168),
|
| 259 |
+
"Modi": range(71168, 71264),
|
| 260 |
+
"Mongolian Supplement": range(71264, 71296),
|
| 261 |
+
"Takri": range(71296, 71376),
|
| 262 |
+
"Ahom": range(71424, 71504),
|
| 263 |
+
"Dogra": range(71680, 71760),
|
| 264 |
+
"Warang Citi": range(71840, 71936),
|
| 265 |
+
"Dives Akuru": range(71936, 72032),
|
| 266 |
+
"Nandinagari": range(72096, 72192),
|
| 267 |
+
"Zanabazar Square": range(72192, 72272),
|
| 268 |
+
"Soyombo": range(72272, 72368),
|
| 269 |
+
"Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384),
|
| 270 |
+
"Pau Cin Hau": range(72384, 72448),
|
| 271 |
+
"Devanagari Extended-A": range(72448, 72544),
|
| 272 |
+
"Bhaiksuki": range(72704, 72816),
|
| 273 |
+
"Marchen": range(72816, 72896),
|
| 274 |
+
"Masaram Gondi": range(72960, 73056),
|
| 275 |
+
"Gunjala Gondi": range(73056, 73136),
|
| 276 |
+
"Makasar": range(73440, 73472),
|
| 277 |
+
"Kawi": range(73472, 73568),
|
| 278 |
+
"Lisu Supplement": range(73648, 73664),
|
| 279 |
+
"Tamil Supplement": range(73664, 73728),
|
| 280 |
+
"Cuneiform": range(73728, 74752),
|
| 281 |
+
"Cuneiform Numbers and Punctuation": range(74752, 74880),
|
| 282 |
+
"Early Dynastic Cuneiform": range(74880, 75088),
|
| 283 |
+
"Cypro-Minoan": range(77712, 77824),
|
| 284 |
+
"Egyptian Hieroglyphs": range(77824, 78896),
|
| 285 |
+
"Egyptian Hieroglyph Format Controls": range(78896, 78944),
|
| 286 |
+
"Anatolian Hieroglyphs": range(82944, 83584),
|
| 287 |
+
"Bamum Supplement": range(92160, 92736),
|
| 288 |
+
"Mro": range(92736, 92784),
|
| 289 |
+
"Tangsa": range(92784, 92880),
|
| 290 |
+
"Bassa Vah": range(92880, 92928),
|
| 291 |
+
"Pahawh Hmong": range(92928, 93072),
|
| 292 |
+
"Medefaidrin": range(93760, 93856),
|
| 293 |
+
"Miao": range(93952, 94112),
|
| 294 |
+
"Ideographic Symbols and Punctuation": range(94176, 94208),
|
| 295 |
+
"Tangut": range(94208, 100352),
|
| 296 |
+
"Tangut Components": range(100352, 101120),
|
| 297 |
+
"Khitan Small Script": range(101120, 101632),
|
| 298 |
+
"Tangut Supplement": range(101632, 101760),
|
| 299 |
+
"Kana Extended-B": range(110576, 110592),
|
| 300 |
+
"Kana Supplement": range(110592, 110848),
|
| 301 |
+
"Kana Extended-A": range(110848, 110896),
|
| 302 |
+
"Small Kana Extension": range(110896, 110960),
|
| 303 |
+
"Nushu": range(110960, 111360),
|
| 304 |
+
"Duployan": range(113664, 113824),
|
| 305 |
+
"Shorthand Format Controls": range(113824, 113840),
|
| 306 |
+
"Znamenny Musical Notation": range(118528, 118736),
|
| 307 |
+
"Byzantine Musical Symbols": range(118784, 119040),
|
| 308 |
+
"Musical Symbols": range(119040, 119296),
|
| 309 |
+
"Ancient Greek Musical Notation": range(119296, 119376),
|
| 310 |
+
"Kaktovik Numerals": range(119488, 119520),
|
| 311 |
+
"Mayan Numerals": range(119520, 119552),
|
| 312 |
+
"Tai Xuan Jing Symbols": range(119552, 119648),
|
| 313 |
+
"Counting Rod Numerals": range(119648, 119680),
|
| 314 |
+
"Mathematical Alphanumeric Symbols": range(119808, 120832),
|
| 315 |
+
"Sutton SignWriting": range(120832, 121520),
|
| 316 |
+
"Latin Extended-G": range(122624, 122880),
|
| 317 |
+
"Glagolitic Supplement": range(122880, 122928),
|
| 318 |
+
"Cyrillic Extended-D": range(122928, 123024),
|
| 319 |
+
"Nyiakeng Puachue Hmong": range(123136, 123216),
|
| 320 |
+
"Toto": range(123536, 123584),
|
| 321 |
+
"Wancho": range(123584, 123648),
|
| 322 |
+
"Nag Mundari": range(124112, 124160),
|
| 323 |
+
"Ethiopic Extended-B": range(124896, 124928),
|
| 324 |
+
"Mende Kikakui": range(124928, 125152),
|
| 325 |
+
"Adlam": range(125184, 125280),
|
| 326 |
+
"Indic Siyaq Numbers": range(126064, 126144),
|
| 327 |
+
"Ottoman Siyaq Numbers": range(126208, 126288),
|
| 328 |
+
"Arabic Mathematical Alphabetic Symbols": range(126464, 126720),
|
| 329 |
+
"Mahjong Tiles": range(126976, 127024),
|
| 330 |
+
"Domino Tiles": range(127024, 127136),
|
| 331 |
+
"Playing Cards": range(127136, 127232),
|
| 332 |
+
"Enclosed Alphanumeric Supplement": range(127232, 127488),
|
| 333 |
+
"Enclosed Ideographic Supplement": range(127488, 127744),
|
| 334 |
+
"Miscellaneous Symbols and Pictographs": range(127744, 128512),
|
| 335 |
+
"Emoticons range(Emoji)": range(128512, 128592),
|
| 336 |
+
"Ornamental Dingbats": range(128592, 128640),
|
| 337 |
+
"Transport and Map Symbols": range(128640, 128768),
|
| 338 |
+
"Alchemical Symbols": range(128768, 128896),
|
| 339 |
+
"Geometric Shapes Extended": range(128896, 129024),
|
| 340 |
+
"Supplemental Arrows-C": range(129024, 129280),
|
| 341 |
+
"Supplemental Symbols and Pictographs": range(129280, 129536),
|
| 342 |
+
"Chess Symbols": range(129536, 129648),
|
| 343 |
+
"Symbols and Pictographs Extended-A": range(129648, 129792),
|
| 344 |
+
"Symbols for Legacy Computing": range(129792, 130048),
|
| 345 |
+
"CJK Unified Ideographs Extension B": range(131072, 173792),
|
| 346 |
+
"CJK Unified Ideographs Extension C": range(173824, 177984),
|
| 347 |
+
"CJK Unified Ideographs Extension D": range(177984, 178208),
|
| 348 |
+
"CJK Unified Ideographs Extension E": range(178208, 183984),
|
| 349 |
+
"CJK Unified Ideographs Extension F": range(183984, 191472),
|
| 350 |
+
"CJK Compatibility Ideographs Supplement": range(194560, 195104),
|
| 351 |
+
"CJK Unified Ideographs Extension G": range(196608, 201552),
|
| 352 |
+
"CJK Unified Ideographs Extension H": range(201552, 205744),
|
| 353 |
+
"Tags": range(917504, 917632),
|
| 354 |
+
"Variation Selectors Supplement": range(917760, 918000),
|
| 355 |
+
"Supplementary Private Use Area-A": range(983040, 1048576),
|
| 356 |
+
"Supplementary Private Use Area-B": range(1048576, 1114112),
|
| 357 |
+
}
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
UNICODE_SECONDARY_RANGE_KEYWORD: List[str] = [
|
| 361 |
+
"Supplement",
|
| 362 |
+
"Extended",
|
| 363 |
+
"Extensions",
|
| 364 |
+
"Modifier",
|
| 365 |
+
"Marks",
|
| 366 |
+
"Punctuation",
|
| 367 |
+
"Symbols",
|
| 368 |
+
"Forms",
|
| 369 |
+
"Operators",
|
| 370 |
+
"Miscellaneous",
|
| 371 |
+
"Drawing",
|
| 372 |
+
"Block",
|
| 373 |
+
"Shapes",
|
| 374 |
+
"Supplemental",
|
| 375 |
+
"Tags",
|
| 376 |
+
]
|
| 377 |
+
|
| 378 |
+
RE_POSSIBLE_ENCODING_INDICATION = re_compile(
|
| 379 |
+
r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)",
|
| 380 |
+
IGNORECASE,
|
| 381 |
+
)
|
| 382 |
+
|
| 383 |
+
IANA_NO_ALIASES = [
|
| 384 |
+
"cp720",
|
| 385 |
+
"cp737",
|
| 386 |
+
"cp856",
|
| 387 |
+
"cp874",
|
| 388 |
+
"cp875",
|
| 389 |
+
"cp1006",
|
| 390 |
+
"koi8_r",
|
| 391 |
+
"koi8_t",
|
| 392 |
+
"koi8_u",
|
| 393 |
+
]
|
| 394 |
+
|
| 395 |
+
IANA_SUPPORTED: List[str] = sorted(
|
| 396 |
+
filter(
|
| 397 |
+
lambda x: x.endswith("_codec") is False
|
| 398 |
+
and x not in {"rot_13", "tactis", "mbcs"},
|
| 399 |
+
list(set(aliases.values())) + IANA_NO_ALIASES,
|
| 400 |
+
)
|
| 401 |
+
)
|
| 402 |
+
|
| 403 |
+
IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED)
|
| 404 |
+
|
| 405 |
+
# pre-computed code page that are similar using the function cp_similarity.
|
| 406 |
+
IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = {
|
| 407 |
+
"cp037": ["cp1026", "cp1140", "cp273", "cp500"],
|
| 408 |
+
"cp1026": ["cp037", "cp1140", "cp273", "cp500"],
|
| 409 |
+
"cp1125": ["cp866"],
|
| 410 |
+
"cp1140": ["cp037", "cp1026", "cp273", "cp500"],
|
| 411 |
+
"cp1250": ["iso8859_2"],
|
| 412 |
+
"cp1251": ["kz1048", "ptcp154"],
|
| 413 |
+
"cp1252": ["iso8859_15", "iso8859_9", "latin_1"],
|
| 414 |
+
"cp1253": ["iso8859_7"],
|
| 415 |
+
"cp1254": ["iso8859_15", "iso8859_9", "latin_1"],
|
| 416 |
+
"cp1257": ["iso8859_13"],
|
| 417 |
+
"cp273": ["cp037", "cp1026", "cp1140", "cp500"],
|
| 418 |
+
"cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"],
|
| 419 |
+
"cp500": ["cp037", "cp1026", "cp1140", "cp273"],
|
| 420 |
+
"cp850": ["cp437", "cp857", "cp858", "cp865"],
|
| 421 |
+
"cp857": ["cp850", "cp858", "cp865"],
|
| 422 |
+
"cp858": ["cp437", "cp850", "cp857", "cp865"],
|
| 423 |
+
"cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"],
|
| 424 |
+
"cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"],
|
| 425 |
+
"cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"],
|
| 426 |
+
"cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"],
|
| 427 |
+
"cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"],
|
| 428 |
+
"cp866": ["cp1125"],
|
| 429 |
+
"iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"],
|
| 430 |
+
"iso8859_11": ["tis_620"],
|
| 431 |
+
"iso8859_13": ["cp1257"],
|
| 432 |
+
"iso8859_14": [
|
| 433 |
+
"iso8859_10",
|
| 434 |
+
"iso8859_15",
|
| 435 |
+
"iso8859_16",
|
| 436 |
+
"iso8859_3",
|
| 437 |
+
"iso8859_9",
|
| 438 |
+
"latin_1",
|
| 439 |
+
],
|
| 440 |
+
"iso8859_15": [
|
| 441 |
+
"cp1252",
|
| 442 |
+
"cp1254",
|
| 443 |
+
"iso8859_10",
|
| 444 |
+
"iso8859_14",
|
| 445 |
+
"iso8859_16",
|
| 446 |
+
"iso8859_3",
|
| 447 |
+
"iso8859_9",
|
| 448 |
+
"latin_1",
|
| 449 |
+
],
|
| 450 |
+
"iso8859_16": [
|
| 451 |
+
"iso8859_14",
|
| 452 |
+
"iso8859_15",
|
| 453 |
+
"iso8859_2",
|
| 454 |
+
"iso8859_3",
|
| 455 |
+
"iso8859_9",
|
| 456 |
+
"latin_1",
|
| 457 |
+
],
|
| 458 |
+
"iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"],
|
| 459 |
+
"iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"],
|
| 460 |
+
"iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"],
|
| 461 |
+
"iso8859_7": ["cp1253"],
|
| 462 |
+
"iso8859_9": [
|
| 463 |
+
"cp1252",
|
| 464 |
+
"cp1254",
|
| 465 |
+
"cp1258",
|
| 466 |
+
"iso8859_10",
|
| 467 |
+
"iso8859_14",
|
| 468 |
+
"iso8859_15",
|
| 469 |
+
"iso8859_16",
|
| 470 |
+
"iso8859_3",
|
| 471 |
+
"iso8859_4",
|
| 472 |
+
"latin_1",
|
| 473 |
+
],
|
| 474 |
+
"kz1048": ["cp1251", "ptcp154"],
|
| 475 |
+
"latin_1": [
|
| 476 |
+
"cp1252",
|
| 477 |
+
"cp1254",
|
| 478 |
+
"cp1258",
|
| 479 |
+
"iso8859_10",
|
| 480 |
+
"iso8859_14",
|
| 481 |
+
"iso8859_15",
|
| 482 |
+
"iso8859_16",
|
| 483 |
+
"iso8859_3",
|
| 484 |
+
"iso8859_4",
|
| 485 |
+
"iso8859_9",
|
| 486 |
+
],
|
| 487 |
+
"mac_iceland": ["mac_roman", "mac_turkish"],
|
| 488 |
+
"mac_roman": ["mac_iceland", "mac_turkish"],
|
| 489 |
+
"mac_turkish": ["mac_iceland", "mac_roman"],
|
| 490 |
+
"ptcp154": ["cp1251", "kz1048"],
|
| 491 |
+
"tis_620": ["iso8859_11"],
|
| 492 |
+
}
|
| 493 |
+
|
| 494 |
+
|
| 495 |
+
CHARDET_CORRESPONDENCE: Dict[str, str] = {
|
| 496 |
+
"iso2022_kr": "ISO-2022-KR",
|
| 497 |
+
"iso2022_jp": "ISO-2022-JP",
|
| 498 |
+
"euc_kr": "EUC-KR",
|
| 499 |
+
"tis_620": "TIS-620",
|
| 500 |
+
"utf_32": "UTF-32",
|
| 501 |
+
"euc_jp": "EUC-JP",
|
| 502 |
+
"koi8_r": "KOI8-R",
|
| 503 |
+
"iso8859_1": "ISO-8859-1",
|
| 504 |
+
"iso8859_2": "ISO-8859-2",
|
| 505 |
+
"iso8859_5": "ISO-8859-5",
|
| 506 |
+
"iso8859_6": "ISO-8859-6",
|
| 507 |
+
"iso8859_7": "ISO-8859-7",
|
| 508 |
+
"iso8859_8": "ISO-8859-8",
|
| 509 |
+
"utf_16": "UTF-16",
|
| 510 |
+
"cp855": "IBM855",
|
| 511 |
+
"mac_cyrillic": "MacCyrillic",
|
| 512 |
+
"gb2312": "GB2312",
|
| 513 |
+
"gb18030": "GB18030",
|
| 514 |
+
"cp932": "CP932",
|
| 515 |
+
"cp866": "IBM866",
|
| 516 |
+
"utf_8": "utf-8",
|
| 517 |
+
"utf_8_sig": "UTF-8-SIG",
|
| 518 |
+
"shift_jis": "SHIFT_JIS",
|
| 519 |
+
"big5": "Big5",
|
| 520 |
+
"cp1250": "windows-1250",
|
| 521 |
+
"cp1251": "windows-1251",
|
| 522 |
+
"cp1252": "Windows-1252",
|
| 523 |
+
"cp1253": "windows-1253",
|
| 524 |
+
"cp1255": "windows-1255",
|
| 525 |
+
"cp1256": "windows-1256",
|
| 526 |
+
"cp1254": "Windows-1254",
|
| 527 |
+
"cp949": "CP949",
|
| 528 |
+
}
|
| 529 |
+
|
| 530 |
+
|
| 531 |
+
COMMON_SAFE_ASCII_CHARACTERS: Set[str] = {
|
| 532 |
+
"<",
|
| 533 |
+
">",
|
| 534 |
+
"=",
|
| 535 |
+
":",
|
| 536 |
+
"/",
|
| 537 |
+
"&",
|
| 538 |
+
";",
|
| 539 |
+
"{",
|
| 540 |
+
"}",
|
| 541 |
+
"[",
|
| 542 |
+
"]",
|
| 543 |
+
",",
|
| 544 |
+
"|",
|
| 545 |
+
'"',
|
| 546 |
+
"-",
|
| 547 |
+
"(",
|
| 548 |
+
")",
|
| 549 |
+
}
|
| 550 |
+
|
| 551 |
+
|
| 552 |
+
KO_NAMES: Set[str] = {"johab", "cp949", "euc_kr"}
|
| 553 |
+
ZH_NAMES: Set[str] = {"big5", "cp950", "big5hkscs", "hz"}
|
| 554 |
+
|
| 555 |
+
# Logging LEVEL below DEBUG
|
| 556 |
+
TRACE: int = 5
|
| 557 |
+
|
| 558 |
+
|
| 559 |
+
# Language label that contain the em dash "—"
|
| 560 |
+
# character are to be considered alternative seq to origin
|
| 561 |
+
FREQUENCIES: Dict[str, List[str]] = {
|
| 562 |
+
"English": [
|
| 563 |
+
"e",
|
| 564 |
+
"a",
|
| 565 |
+
"t",
|
| 566 |
+
"i",
|
| 567 |
+
"o",
|
| 568 |
+
"n",
|
| 569 |
+
"s",
|
| 570 |
+
"r",
|
| 571 |
+
"h",
|
| 572 |
+
"l",
|
| 573 |
+
"d",
|
| 574 |
+
"c",
|
| 575 |
+
"u",
|
| 576 |
+
"m",
|
| 577 |
+
"f",
|
| 578 |
+
"p",
|
| 579 |
+
"g",
|
| 580 |
+
"w",
|
| 581 |
+
"y",
|
| 582 |
+
"b",
|
| 583 |
+
"v",
|
| 584 |
+
"k",
|
| 585 |
+
"x",
|
| 586 |
+
"j",
|
| 587 |
+
"z",
|
| 588 |
+
"q",
|
| 589 |
+
],
|
| 590 |
+
"English—": [
|
| 591 |
+
"e",
|
| 592 |
+
"a",
|
| 593 |
+
"t",
|
| 594 |
+
"i",
|
| 595 |
+
"o",
|
| 596 |
+
"n",
|
| 597 |
+
"s",
|
| 598 |
+
"r",
|
| 599 |
+
"h",
|
| 600 |
+
"l",
|
| 601 |
+
"d",
|
| 602 |
+
"c",
|
| 603 |
+
"m",
|
| 604 |
+
"u",
|
| 605 |
+
"f",
|
| 606 |
+
"p",
|
| 607 |
+
"g",
|
| 608 |
+
"w",
|
| 609 |
+
"b",
|
| 610 |
+
"y",
|
| 611 |
+
"v",
|
| 612 |
+
"k",
|
| 613 |
+
"j",
|
| 614 |
+
"x",
|
| 615 |
+
"z",
|
| 616 |
+
"q",
|
| 617 |
+
],
|
| 618 |
+
"German": [
|
| 619 |
+
"e",
|
| 620 |
+
"n",
|
| 621 |
+
"i",
|
| 622 |
+
"r",
|
| 623 |
+
"s",
|
| 624 |
+
"t",
|
| 625 |
+
"a",
|
| 626 |
+
"d",
|
| 627 |
+
"h",
|
| 628 |
+
"u",
|
| 629 |
+
"l",
|
| 630 |
+
"g",
|
| 631 |
+
"o",
|
| 632 |
+
"c",
|
| 633 |
+
"m",
|
| 634 |
+
"b",
|
| 635 |
+
"f",
|
| 636 |
+
"k",
|
| 637 |
+
"w",
|
| 638 |
+
"z",
|
| 639 |
+
"p",
|
| 640 |
+
"v",
|
| 641 |
+
"ü",
|
| 642 |
+
"ä",
|
| 643 |
+
"ö",
|
| 644 |
+
"j",
|
| 645 |
+
],
|
| 646 |
+
"French": [
|
| 647 |
+
"e",
|
| 648 |
+
"a",
|
| 649 |
+
"s",
|
| 650 |
+
"n",
|
| 651 |
+
"i",
|
| 652 |
+
"t",
|
| 653 |
+
"r",
|
| 654 |
+
"l",
|
| 655 |
+
"u",
|
| 656 |
+
"o",
|
| 657 |
+
"d",
|
| 658 |
+
"c",
|
| 659 |
+
"p",
|
| 660 |
+
"m",
|
| 661 |
+
"é",
|
| 662 |
+
"v",
|
| 663 |
+
"g",
|
| 664 |
+
"f",
|
| 665 |
+
"b",
|
| 666 |
+
"h",
|
| 667 |
+
"q",
|
| 668 |
+
"à",
|
| 669 |
+
"x",
|
| 670 |
+
"è",
|
| 671 |
+
"y",
|
| 672 |
+
"j",
|
| 673 |
+
],
|
| 674 |
+
"Dutch": [
|
| 675 |
+
"e",
|
| 676 |
+
"n",
|
| 677 |
+
"a",
|
| 678 |
+
"i",
|
| 679 |
+
"r",
|
| 680 |
+
"t",
|
| 681 |
+
"o",
|
| 682 |
+
"d",
|
| 683 |
+
"s",
|
| 684 |
+
"l",
|
| 685 |
+
"g",
|
| 686 |
+
"h",
|
| 687 |
+
"v",
|
| 688 |
+
"m",
|
| 689 |
+
"u",
|
| 690 |
+
"k",
|
| 691 |
+
"c",
|
| 692 |
+
"p",
|
| 693 |
+
"b",
|
| 694 |
+
"w",
|
| 695 |
+
"j",
|
| 696 |
+
"z",
|
| 697 |
+
"f",
|
| 698 |
+
"y",
|
| 699 |
+
"x",
|
| 700 |
+
"ë",
|
| 701 |
+
],
|
| 702 |
+
"Italian": [
|
| 703 |
+
"e",
|
| 704 |
+
"i",
|
| 705 |
+
"a",
|
| 706 |
+
"o",
|
| 707 |
+
"n",
|
| 708 |
+
"l",
|
| 709 |
+
"t",
|
| 710 |
+
"r",
|
| 711 |
+
"s",
|
| 712 |
+
"c",
|
| 713 |
+
"d",
|
| 714 |
+
"u",
|
| 715 |
+
"p",
|
| 716 |
+
"m",
|
| 717 |
+
"g",
|
| 718 |
+
"v",
|
| 719 |
+
"f",
|
| 720 |
+
"b",
|
| 721 |
+
"z",
|
| 722 |
+
"h",
|
| 723 |
+
"q",
|
| 724 |
+
"è",
|
| 725 |
+
"à",
|
| 726 |
+
"k",
|
| 727 |
+
"y",
|
| 728 |
+
"ò",
|
| 729 |
+
],
|
| 730 |
+
"Polish": [
|
| 731 |
+
"a",
|
| 732 |
+
"i",
|
| 733 |
+
"o",
|
| 734 |
+
"e",
|
| 735 |
+
"n",
|
| 736 |
+
"r",
|
| 737 |
+
"z",
|
| 738 |
+
"w",
|
| 739 |
+
"s",
|
| 740 |
+
"c",
|
| 741 |
+
"t",
|
| 742 |
+
"k",
|
| 743 |
+
"y",
|
| 744 |
+
"d",
|
| 745 |
+
"p",
|
| 746 |
+
"m",
|
| 747 |
+
"u",
|
| 748 |
+
"l",
|
| 749 |
+
"j",
|
| 750 |
+
"ł",
|
| 751 |
+
"g",
|
| 752 |
+
"b",
|
| 753 |
+
"h",
|
| 754 |
+
"ą",
|
| 755 |
+
"ę",
|
| 756 |
+
"ó",
|
| 757 |
+
],
|
| 758 |
+
"Spanish": [
|
| 759 |
+
"e",
|
| 760 |
+
"a",
|
| 761 |
+
"o",
|
| 762 |
+
"n",
|
| 763 |
+
"s",
|
| 764 |
+
"r",
|
| 765 |
+
"i",
|
| 766 |
+
"l",
|
| 767 |
+
"d",
|
| 768 |
+
"t",
|
| 769 |
+
"c",
|
| 770 |
+
"u",
|
| 771 |
+
"m",
|
| 772 |
+
"p",
|
| 773 |
+
"b",
|
| 774 |
+
"g",
|
| 775 |
+
"v",
|
| 776 |
+
"f",
|
| 777 |
+
"y",
|
| 778 |
+
"ó",
|
| 779 |
+
"h",
|
| 780 |
+
"q",
|
| 781 |
+
"í",
|
| 782 |
+
"j",
|
| 783 |
+
"z",
|
| 784 |
+
"á",
|
| 785 |
+
],
|
| 786 |
+
"Russian": [
|
| 787 |
+
"о",
|
| 788 |
+
"а",
|
| 789 |
+
"е",
|
| 790 |
+
"и",
|
| 791 |
+
"н",
|
| 792 |
+
"с",
|
| 793 |
+
"т",
|
| 794 |
+
"р",
|
| 795 |
+
"в",
|
| 796 |
+
"л",
|
| 797 |
+
"к",
|
| 798 |
+
"м",
|
| 799 |
+
"д",
|
| 800 |
+
"п",
|
| 801 |
+
"у",
|
| 802 |
+
"г",
|
| 803 |
+
"я",
|
| 804 |
+
"ы",
|
| 805 |
+
"з",
|
| 806 |
+
"б",
|
| 807 |
+
"й",
|
| 808 |
+
"ь",
|
| 809 |
+
"ч",
|
| 810 |
+
"х",
|
| 811 |
+
"ж",
|
| 812 |
+
"ц",
|
| 813 |
+
],
|
| 814 |
+
# Jap-Kanji
|
| 815 |
+
"Japanese": [
|
| 816 |
+
"人",
|
| 817 |
+
"一",
|
| 818 |
+
"大",
|
| 819 |
+
"亅",
|
| 820 |
+
"丁",
|
| 821 |
+
"丨",
|
| 822 |
+
"竹",
|
| 823 |
+
"笑",
|
| 824 |
+
"口",
|
| 825 |
+
"日",
|
| 826 |
+
"今",
|
| 827 |
+
"二",
|
| 828 |
+
"彳",
|
| 829 |
+
"行",
|
| 830 |
+
"十",
|
| 831 |
+
"土",
|
| 832 |
+
"丶",
|
| 833 |
+
"寸",
|
| 834 |
+
"寺",
|
| 835 |
+
"時",
|
| 836 |
+
"乙",
|
| 837 |
+
"丿",
|
| 838 |
+
"乂",
|
| 839 |
+
"气",
|
| 840 |
+
"気",
|
| 841 |
+
"冂",
|
| 842 |
+
"巾",
|
| 843 |
+
"亠",
|
| 844 |
+
"市",
|
| 845 |
+
"目",
|
| 846 |
+
"儿",
|
| 847 |
+
"見",
|
| 848 |
+
"八",
|
| 849 |
+
"小",
|
| 850 |
+
"凵",
|
| 851 |
+
"県",
|
| 852 |
+
"月",
|
| 853 |
+
"彐",
|
| 854 |
+
"門",
|
| 855 |
+
"間",
|
| 856 |
+
"木",
|
| 857 |
+
"東",
|
| 858 |
+
"山",
|
| 859 |
+
"出",
|
| 860 |
+
"本",
|
| 861 |
+
"中",
|
| 862 |
+
"刀",
|
| 863 |
+
"分",
|
| 864 |
+
"耳",
|
| 865 |
+
"又",
|
| 866 |
+
"取",
|
| 867 |
+
"最",
|
| 868 |
+
"言",
|
| 869 |
+
"田",
|
| 870 |
+
"心",
|
| 871 |
+
"思",
|
| 872 |
+
"刂",
|
| 873 |
+
"前",
|
| 874 |
+
"京",
|
| 875 |
+
"尹",
|
| 876 |
+
"事",
|
| 877 |
+
"生",
|
| 878 |
+
"厶",
|
| 879 |
+
"云",
|
| 880 |
+
"会",
|
| 881 |
+
"未",
|
| 882 |
+
"来",
|
| 883 |
+
"白",
|
| 884 |
+
"冫",
|
| 885 |
+
"楽",
|
| 886 |
+
"灬",
|
| 887 |
+
"馬",
|
| 888 |
+
"尸",
|
| 889 |
+
"尺",
|
| 890 |
+
"駅",
|
| 891 |
+
"明",
|
| 892 |
+
"耂",
|
| 893 |
+
"者",
|
| 894 |
+
"了",
|
| 895 |
+
"阝",
|
| 896 |
+
"都",
|
| 897 |
+
"高",
|
| 898 |
+
"卜",
|
| 899 |
+
"占",
|
| 900 |
+
"厂",
|
| 901 |
+
"广",
|
| 902 |
+
"店",
|
| 903 |
+
"子",
|
| 904 |
+
"申",
|
| 905 |
+
"奄",
|
| 906 |
+
"亻",
|
| 907 |
+
"俺",
|
| 908 |
+
"上",
|
| 909 |
+
"方",
|
| 910 |
+
"冖",
|
| 911 |
+
"学",
|
| 912 |
+
"衣",
|
| 913 |
+
"艮",
|
| 914 |
+
"食",
|
| 915 |
+
"自",
|
| 916 |
+
],
|
| 917 |
+
# Jap-Katakana
|
| 918 |
+
"Japanese—": [
|
| 919 |
+
"ー",
|
| 920 |
+
"ン",
|
| 921 |
+
"ス",
|
| 922 |
+
"・",
|
| 923 |
+
"ル",
|
| 924 |
+
"ト",
|
| 925 |
+
"リ",
|
| 926 |
+
"イ",
|
| 927 |
+
"ア",
|
| 928 |
+
"ラ",
|
| 929 |
+
"ッ",
|
| 930 |
+
"ク",
|
| 931 |
+
"ド",
|
| 932 |
+
"シ",
|
| 933 |
+
"レ",
|
| 934 |
+
"ジ",
|
| 935 |
+
"タ",
|
| 936 |
+
"フ",
|
| 937 |
+
"ロ",
|
| 938 |
+
"カ",
|
| 939 |
+
"テ",
|
| 940 |
+
"マ",
|
| 941 |
+
"ィ",
|
| 942 |
+
"グ",
|
| 943 |
+
"バ",
|
| 944 |
+
"ム",
|
| 945 |
+
"プ",
|
| 946 |
+
"オ",
|
| 947 |
+
"コ",
|
| 948 |
+
"デ",
|
| 949 |
+
"ニ",
|
| 950 |
+
"ウ",
|
| 951 |
+
"メ",
|
| 952 |
+
"サ",
|
| 953 |
+
"ビ",
|
| 954 |
+
"ナ",
|
| 955 |
+
"ブ",
|
| 956 |
+
"ャ",
|
| 957 |
+
"エ",
|
| 958 |
+
"ュ",
|
| 959 |
+
"チ",
|
| 960 |
+
"キ",
|
| 961 |
+
"ズ",
|
| 962 |
+
"ダ",
|
| 963 |
+
"パ",
|
| 964 |
+
"ミ",
|
| 965 |
+
"ェ",
|
| 966 |
+
"ョ",
|
| 967 |
+
"ハ",
|
| 968 |
+
"セ",
|
| 969 |
+
"ベ",
|
| 970 |
+
"ガ",
|
| 971 |
+
"モ",
|
| 972 |
+
"ツ",
|
| 973 |
+
"ネ",
|
| 974 |
+
"ボ",
|
| 975 |
+
"ソ",
|
| 976 |
+
"ノ",
|
| 977 |
+
"ァ",
|
| 978 |
+
"ヴ",
|
| 979 |
+
"ワ",
|
| 980 |
+
"ポ",
|
| 981 |
+
"ペ",
|
| 982 |
+
"ピ",
|
| 983 |
+
"ケ",
|
| 984 |
+
"ゴ",
|
| 985 |
+
"ギ",
|
| 986 |
+
"ザ",
|
| 987 |
+
"ホ",
|
| 988 |
+
"ゲ",
|
| 989 |
+
"ォ",
|
| 990 |
+
"ヤ",
|
| 991 |
+
"ヒ",
|
| 992 |
+
"ユ",
|
| 993 |
+
"ヨ",
|
| 994 |
+
"ヘ",
|
| 995 |
+
"ゼ",
|
| 996 |
+
"ヌ",
|
| 997 |
+
"ゥ",
|
| 998 |
+
"ゾ",
|
| 999 |
+
"ヶ",
|
| 1000 |
+
"ヂ",
|
| 1001 |
+
"ヲ",
|
| 1002 |
+
"ヅ",
|
| 1003 |
+
"ヵ",
|
| 1004 |
+
"ヱ",
|
| 1005 |
+
"ヰ",
|
| 1006 |
+
"ヮ",
|
| 1007 |
+
"ヽ",
|
| 1008 |
+
"゠",
|
| 1009 |
+
"ヾ",
|
| 1010 |
+
"ヷ",
|
| 1011 |
+
"ヿ",
|
| 1012 |
+
"ヸ",
|
| 1013 |
+
"ヹ",
|
| 1014 |
+
"ヺ",
|
| 1015 |
+
],
|
| 1016 |
+
# Jap-Hiragana
|
| 1017 |
+
"Japanese——": [
|
| 1018 |
+
"の",
|
| 1019 |
+
"に",
|
| 1020 |
+
"る",
|
| 1021 |
+
"た",
|
| 1022 |
+
"と",
|
| 1023 |
+
"は",
|
| 1024 |
+
"し",
|
| 1025 |
+
"い",
|
| 1026 |
+
"を",
|
| 1027 |
+
"で",
|
| 1028 |
+
"て",
|
| 1029 |
+
"が",
|
| 1030 |
+
"な",
|
| 1031 |
+
"れ",
|
| 1032 |
+
"か",
|
| 1033 |
+
"ら",
|
| 1034 |
+
"さ",
|
| 1035 |
+
"っ",
|
| 1036 |
+
"り",
|
| 1037 |
+
"す",
|
| 1038 |
+
"あ",
|
| 1039 |
+
"も",
|
| 1040 |
+
"こ",
|
| 1041 |
+
"ま",
|
| 1042 |
+
"う",
|
| 1043 |
+
"く",
|
| 1044 |
+
"よ",
|
| 1045 |
+
"き",
|
| 1046 |
+
"ん",
|
| 1047 |
+
"め",
|
| 1048 |
+
"お",
|
| 1049 |
+
"け",
|
| 1050 |
+
"そ",
|
| 1051 |
+
"つ",
|
| 1052 |
+
"だ",
|
| 1053 |
+
"や",
|
| 1054 |
+
"え",
|
| 1055 |
+
"ど",
|
| 1056 |
+
"わ",
|
| 1057 |
+
"ち",
|
| 1058 |
+
"み",
|
| 1059 |
+
"せ",
|
| 1060 |
+
"じ",
|
| 1061 |
+
"ば",
|
| 1062 |
+
"へ",
|
| 1063 |
+
"び",
|
| 1064 |
+
"ず",
|
| 1065 |
+
"ろ",
|
| 1066 |
+
"ほ",
|
| 1067 |
+
"げ",
|
| 1068 |
+
"む",
|
| 1069 |
+
"べ",
|
| 1070 |
+
"ひ",
|
| 1071 |
+
"ょ",
|
| 1072 |
+
"ゆ",
|
| 1073 |
+
"ぶ",
|
| 1074 |
+
"ご",
|
| 1075 |
+
"ゃ",
|
| 1076 |
+
"ね",
|
| 1077 |
+
"ふ",
|
| 1078 |
+
"ぐ",
|
| 1079 |
+
"ぎ",
|
| 1080 |
+
"ぼ",
|
| 1081 |
+
"ゅ",
|
| 1082 |
+
"づ",
|
| 1083 |
+
"ざ",
|
| 1084 |
+
"ぞ",
|
| 1085 |
+
"ぬ",
|
| 1086 |
+
"ぜ",
|
| 1087 |
+
"ぱ",
|
| 1088 |
+
"ぽ",
|
| 1089 |
+
"ぷ",
|
| 1090 |
+
"ぴ",
|
| 1091 |
+
"ぃ",
|
| 1092 |
+
"ぁ",
|
| 1093 |
+
"ぇ",
|
| 1094 |
+
"ぺ",
|
| 1095 |
+
"ゞ",
|
| 1096 |
+
"ぢ",
|
| 1097 |
+
"ぉ",
|
| 1098 |
+
"ぅ",
|
| 1099 |
+
"ゐ",
|
| 1100 |
+
"ゝ",
|
| 1101 |
+
"ゑ",
|
| 1102 |
+
"゛",
|
| 1103 |
+
"゜",
|
| 1104 |
+
"ゎ",
|
| 1105 |
+
"ゔ",
|
| 1106 |
+
"゚",
|
| 1107 |
+
"ゟ",
|
| 1108 |
+
"゙",
|
| 1109 |
+
"ゕ",
|
| 1110 |
+
"ゖ",
|
| 1111 |
+
],
|
| 1112 |
+
"Portuguese": [
|
| 1113 |
+
"a",
|
| 1114 |
+
"e",
|
| 1115 |
+
"o",
|
| 1116 |
+
"s",
|
| 1117 |
+
"i",
|
| 1118 |
+
"r",
|
| 1119 |
+
"d",
|
| 1120 |
+
"n",
|
| 1121 |
+
"t",
|
| 1122 |
+
"m",
|
| 1123 |
+
"u",
|
| 1124 |
+
"c",
|
| 1125 |
+
"l",
|
| 1126 |
+
"p",
|
| 1127 |
+
"g",
|
| 1128 |
+
"v",
|
| 1129 |
+
"b",
|
| 1130 |
+
"f",
|
| 1131 |
+
"h",
|
| 1132 |
+
"ã",
|
| 1133 |
+
"q",
|
| 1134 |
+
"é",
|
| 1135 |
+
"ç",
|
| 1136 |
+
"á",
|
| 1137 |
+
"z",
|
| 1138 |
+
"í",
|
| 1139 |
+
],
|
| 1140 |
+
"Swedish": [
|
| 1141 |
+
"e",
|
| 1142 |
+
"a",
|
| 1143 |
+
"n",
|
| 1144 |
+
"r",
|
| 1145 |
+
"t",
|
| 1146 |
+
"s",
|
| 1147 |
+
"i",
|
| 1148 |
+
"l",
|
| 1149 |
+
"d",
|
| 1150 |
+
"o",
|
| 1151 |
+
"m",
|
| 1152 |
+
"k",
|
| 1153 |
+
"g",
|
| 1154 |
+
"v",
|
| 1155 |
+
"h",
|
| 1156 |
+
"f",
|
| 1157 |
+
"u",
|
| 1158 |
+
"p",
|
| 1159 |
+
"ä",
|
| 1160 |
+
"c",
|
| 1161 |
+
"b",
|
| 1162 |
+
"ö",
|
| 1163 |
+
"å",
|
| 1164 |
+
"y",
|
| 1165 |
+
"j",
|
| 1166 |
+
"x",
|
| 1167 |
+
],
|
| 1168 |
+
"Chinese": [
|
| 1169 |
+
"的",
|
| 1170 |
+
"一",
|
| 1171 |
+
"是",
|
| 1172 |
+
"不",
|
| 1173 |
+
"了",
|
| 1174 |
+
"在",
|
| 1175 |
+
"人",
|
| 1176 |
+
"有",
|
| 1177 |
+
"我",
|
| 1178 |
+
"他",
|
| 1179 |
+
"这",
|
| 1180 |
+
"个",
|
| 1181 |
+
"们",
|
| 1182 |
+
"中",
|
| 1183 |
+
"来",
|
| 1184 |
+
"上",
|
| 1185 |
+
"大",
|
| 1186 |
+
"为",
|
| 1187 |
+
"和",
|
| 1188 |
+
"国",
|
| 1189 |
+
"地",
|
| 1190 |
+
"到",
|
| 1191 |
+
"以",
|
| 1192 |
+
"说",
|
| 1193 |
+
"时",
|
| 1194 |
+
"要",
|
| 1195 |
+
"就",
|
| 1196 |
+
"出",
|
| 1197 |
+
"会",
|
| 1198 |
+
"可",
|
| 1199 |
+
"也",
|
| 1200 |
+
"你",
|
| 1201 |
+
"对",
|
| 1202 |
+
"生",
|
| 1203 |
+
"能",
|
| 1204 |
+
"而",
|
| 1205 |
+
"子",
|
| 1206 |
+
"那",
|
| 1207 |
+
"得",
|
| 1208 |
+
"于",
|
| 1209 |
+
"着",
|
| 1210 |
+
"下",
|
| 1211 |
+
"自",
|
| 1212 |
+
"之",
|
| 1213 |
+
"年",
|
| 1214 |
+
"过",
|
| 1215 |
+
"发",
|
| 1216 |
+
"后",
|
| 1217 |
+
"作",
|
| 1218 |
+
"里",
|
| 1219 |
+
"用",
|
| 1220 |
+
"道",
|
| 1221 |
+
"行",
|
| 1222 |
+
"所",
|
| 1223 |
+
"然",
|
| 1224 |
+
"家",
|
| 1225 |
+
"种",
|
| 1226 |
+
"事",
|
| 1227 |
+
"成",
|
| 1228 |
+
"方",
|
| 1229 |
+
"多",
|
| 1230 |
+
"经",
|
| 1231 |
+
"么",
|
| 1232 |
+
"去",
|
| 1233 |
+
"法",
|
| 1234 |
+
"学",
|
| 1235 |
+
"如",
|
| 1236 |
+
"都",
|
| 1237 |
+
"同",
|
| 1238 |
+
"现",
|
| 1239 |
+
"当",
|
| 1240 |
+
"没",
|
| 1241 |
+
"动",
|
| 1242 |
+
"面",
|
| 1243 |
+
"起",
|
| 1244 |
+
"看",
|
| 1245 |
+
"定",
|
| 1246 |
+
"天",
|
| 1247 |
+
"分",
|
| 1248 |
+
"还",
|
| 1249 |
+
"进",
|
| 1250 |
+
"好",
|
| 1251 |
+
"小",
|
| 1252 |
+
"部",
|
| 1253 |
+
"其",
|
| 1254 |
+
"些",
|
| 1255 |
+
"主",
|
| 1256 |
+
"样",
|
| 1257 |
+
"理",
|
| 1258 |
+
"心",
|
| 1259 |
+
"她",
|
| 1260 |
+
"本",
|
| 1261 |
+
"前",
|
| 1262 |
+
"开",
|
| 1263 |
+
"但",
|
| 1264 |
+
"因",
|
| 1265 |
+
"只",
|
| 1266 |
+
"从",
|
| 1267 |
+
"想",
|
| 1268 |
+
"实",
|
| 1269 |
+
],
|
| 1270 |
+
"Ukrainian": [
|
| 1271 |
+
"о",
|
| 1272 |
+
"а",
|
| 1273 |
+
"н",
|
| 1274 |
+
"і",
|
| 1275 |
+
"и",
|
| 1276 |
+
"р",
|
| 1277 |
+
"в",
|
| 1278 |
+
"т",
|
| 1279 |
+
"е",
|
| 1280 |
+
"с",
|
| 1281 |
+
"к",
|
| 1282 |
+
"л",
|
| 1283 |
+
"у",
|
| 1284 |
+
"д",
|
| 1285 |
+
"м",
|
| 1286 |
+
"п",
|
| 1287 |
+
"з",
|
| 1288 |
+
"я",
|
| 1289 |
+
"ь",
|
| 1290 |
+
"б",
|
| 1291 |
+
"г",
|
| 1292 |
+
"й",
|
| 1293 |
+
"ч",
|
| 1294 |
+
"х",
|
| 1295 |
+
"ц",
|
| 1296 |
+
"ї",
|
| 1297 |
+
],
|
| 1298 |
+
"Norwegian": [
|
| 1299 |
+
"e",
|
| 1300 |
+
"r",
|
| 1301 |
+
"n",
|
| 1302 |
+
"t",
|
| 1303 |
+
"a",
|
| 1304 |
+
"s",
|
| 1305 |
+
"i",
|
| 1306 |
+
"o",
|
| 1307 |
+
"l",
|
| 1308 |
+
"d",
|
| 1309 |
+
"g",
|
| 1310 |
+
"k",
|
| 1311 |
+
"m",
|
| 1312 |
+
"v",
|
| 1313 |
+
"f",
|
| 1314 |
+
"p",
|
| 1315 |
+
"u",
|
| 1316 |
+
"b",
|
| 1317 |
+
"h",
|
| 1318 |
+
"å",
|
| 1319 |
+
"y",
|
| 1320 |
+
"j",
|
| 1321 |
+
"ø",
|
| 1322 |
+
"c",
|
| 1323 |
+
"æ",
|
| 1324 |
+
"w",
|
| 1325 |
+
],
|
| 1326 |
+
"Finnish": [
|
| 1327 |
+
"a",
|
| 1328 |
+
"i",
|
| 1329 |
+
"n",
|
| 1330 |
+
"t",
|
| 1331 |
+
"e",
|
| 1332 |
+
"s",
|
| 1333 |
+
"l",
|
| 1334 |
+
"o",
|
| 1335 |
+
"u",
|
| 1336 |
+
"k",
|
| 1337 |
+
"ä",
|
| 1338 |
+
"m",
|
| 1339 |
+
"r",
|
| 1340 |
+
"v",
|
| 1341 |
+
"j",
|
| 1342 |
+
"h",
|
| 1343 |
+
"p",
|
| 1344 |
+
"y",
|
| 1345 |
+
"d",
|
| 1346 |
+
"ö",
|
| 1347 |
+
"g",
|
| 1348 |
+
"c",
|
| 1349 |
+
"b",
|
| 1350 |
+
"f",
|
| 1351 |
+
"w",
|
| 1352 |
+
"z",
|
| 1353 |
+
],
|
| 1354 |
+
"Vietnamese": [
|
| 1355 |
+
"n",
|
| 1356 |
+
"h",
|
| 1357 |
+
"t",
|
| 1358 |
+
"i",
|
| 1359 |
+
"c",
|
| 1360 |
+
"g",
|
| 1361 |
+
"a",
|
| 1362 |
+
"o",
|
| 1363 |
+
"u",
|
| 1364 |
+
"m",
|
| 1365 |
+
"l",
|
| 1366 |
+
"r",
|
| 1367 |
+
"à",
|
| 1368 |
+
"đ",
|
| 1369 |
+
"s",
|
| 1370 |
+
"e",
|
| 1371 |
+
"v",
|
| 1372 |
+
"p",
|
| 1373 |
+
"b",
|
| 1374 |
+
"y",
|
| 1375 |
+
"ư",
|
| 1376 |
+
"d",
|
| 1377 |
+
"á",
|
| 1378 |
+
"k",
|
| 1379 |
+
"ộ",
|
| 1380 |
+
"ế",
|
| 1381 |
+
],
|
| 1382 |
+
"Czech": [
|
| 1383 |
+
"o",
|
| 1384 |
+
"e",
|
| 1385 |
+
"a",
|
| 1386 |
+
"n",
|
| 1387 |
+
"t",
|
| 1388 |
+
"s",
|
| 1389 |
+
"i",
|
| 1390 |
+
"l",
|
| 1391 |
+
"v",
|
| 1392 |
+
"r",
|
| 1393 |
+
"k",
|
| 1394 |
+
"d",
|
| 1395 |
+
"u",
|
| 1396 |
+
"m",
|
| 1397 |
+
"p",
|
| 1398 |
+
"í",
|
| 1399 |
+
"c",
|
| 1400 |
+
"h",
|
| 1401 |
+
"z",
|
| 1402 |
+
"á",
|
| 1403 |
+
"y",
|
| 1404 |
+
"j",
|
| 1405 |
+
"b",
|
| 1406 |
+
"ě",
|
| 1407 |
+
"é",
|
| 1408 |
+
"ř",
|
| 1409 |
+
],
|
| 1410 |
+
"Hungarian": [
|
| 1411 |
+
"e",
|
| 1412 |
+
"a",
|
| 1413 |
+
"t",
|
| 1414 |
+
"l",
|
| 1415 |
+
"s",
|
| 1416 |
+
"n",
|
| 1417 |
+
"k",
|
| 1418 |
+
"r",
|
| 1419 |
+
"i",
|
| 1420 |
+
"o",
|
| 1421 |
+
"z",
|
| 1422 |
+
"á",
|
| 1423 |
+
"é",
|
| 1424 |
+
"g",
|
| 1425 |
+
"m",
|
| 1426 |
+
"b",
|
| 1427 |
+
"y",
|
| 1428 |
+
"v",
|
| 1429 |
+
"d",
|
| 1430 |
+
"h",
|
| 1431 |
+
"u",
|
| 1432 |
+
"p",
|
| 1433 |
+
"j",
|
| 1434 |
+
"ö",
|
| 1435 |
+
"f",
|
| 1436 |
+
"c",
|
| 1437 |
+
],
|
| 1438 |
+
"Korean": [
|
| 1439 |
+
"이",
|
| 1440 |
+
"다",
|
| 1441 |
+
"에",
|
| 1442 |
+
"의",
|
| 1443 |
+
"는",
|
| 1444 |
+
"로",
|
| 1445 |
+
"하",
|
| 1446 |
+
"을",
|
| 1447 |
+
"가",
|
| 1448 |
+
"고",
|
| 1449 |
+
"지",
|
| 1450 |
+
"서",
|
| 1451 |
+
"한",
|
| 1452 |
+
"은",
|
| 1453 |
+
"기",
|
| 1454 |
+
"으",
|
| 1455 |
+
"년",
|
| 1456 |
+
"대",
|
| 1457 |
+
"사",
|
| 1458 |
+
"시",
|
| 1459 |
+
"를",
|
| 1460 |
+
"리",
|
| 1461 |
+
"도",
|
| 1462 |
+
"인",
|
| 1463 |
+
"스",
|
| 1464 |
+
"일",
|
| 1465 |
+
],
|
| 1466 |
+
"Indonesian": [
|
| 1467 |
+
"a",
|
| 1468 |
+
"n",
|
| 1469 |
+
"e",
|
| 1470 |
+
"i",
|
| 1471 |
+
"r",
|
| 1472 |
+
"t",
|
| 1473 |
+
"u",
|
| 1474 |
+
"s",
|
| 1475 |
+
"d",
|
| 1476 |
+
"k",
|
| 1477 |
+
"m",
|
| 1478 |
+
"l",
|
| 1479 |
+
"g",
|
| 1480 |
+
"p",
|
| 1481 |
+
"b",
|
| 1482 |
+
"o",
|
| 1483 |
+
"h",
|
| 1484 |
+
"y",
|
| 1485 |
+
"j",
|
| 1486 |
+
"c",
|
| 1487 |
+
"w",
|
| 1488 |
+
"f",
|
| 1489 |
+
"v",
|
| 1490 |
+
"z",
|
| 1491 |
+
"x",
|
| 1492 |
+
"q",
|
| 1493 |
+
],
|
| 1494 |
+
"Turkish": [
|
| 1495 |
+
"a",
|
| 1496 |
+
"e",
|
| 1497 |
+
"i",
|
| 1498 |
+
"n",
|
| 1499 |
+
"r",
|
| 1500 |
+
"l",
|
| 1501 |
+
"ı",
|
| 1502 |
+
"k",
|
| 1503 |
+
"d",
|
| 1504 |
+
"t",
|
| 1505 |
+
"s",
|
| 1506 |
+
"m",
|
| 1507 |
+
"y",
|
| 1508 |
+
"u",
|
| 1509 |
+
"o",
|
| 1510 |
+
"b",
|
| 1511 |
+
"ü",
|
| 1512 |
+
"ş",
|
| 1513 |
+
"v",
|
| 1514 |
+
"g",
|
| 1515 |
+
"z",
|
| 1516 |
+
"h",
|
| 1517 |
+
"c",
|
| 1518 |
+
"p",
|
| 1519 |
+
"ç",
|
| 1520 |
+
"ğ",
|
| 1521 |
+
],
|
| 1522 |
+
"Romanian": [
|
| 1523 |
+
"e",
|
| 1524 |
+
"i",
|
| 1525 |
+
"a",
|
| 1526 |
+
"r",
|
| 1527 |
+
"n",
|
| 1528 |
+
"t",
|
| 1529 |
+
"u",
|
| 1530 |
+
"l",
|
| 1531 |
+
"o",
|
| 1532 |
+
"c",
|
| 1533 |
+
"s",
|
| 1534 |
+
"d",
|
| 1535 |
+
"p",
|
| 1536 |
+
"m",
|
| 1537 |
+
"ă",
|
| 1538 |
+
"f",
|
| 1539 |
+
"v",
|
| 1540 |
+
"î",
|
| 1541 |
+
"g",
|
| 1542 |
+
"b",
|
| 1543 |
+
"ș",
|
| 1544 |
+
"ț",
|
| 1545 |
+
"z",
|
| 1546 |
+
"h",
|
| 1547 |
+
"â",
|
| 1548 |
+
"j",
|
| 1549 |
+
],
|
| 1550 |
+
"Farsi": [
|
| 1551 |
+
"ا",
|
| 1552 |
+
"ی",
|
| 1553 |
+
"ر",
|
| 1554 |
+
"د",
|
| 1555 |
+
"ن",
|
| 1556 |
+
"ه",
|
| 1557 |
+
"و",
|
| 1558 |
+
"م",
|
| 1559 |
+
"ت",
|
| 1560 |
+
"ب",
|
| 1561 |
+
"س",
|
| 1562 |
+
"ل",
|
| 1563 |
+
"ک",
|
| 1564 |
+
"ش",
|
| 1565 |
+
"ز",
|
| 1566 |
+
"ف",
|
| 1567 |
+
"گ",
|
| 1568 |
+
"ع",
|
| 1569 |
+
"خ",
|
| 1570 |
+
"ق",
|
| 1571 |
+
"ج",
|
| 1572 |
+
"آ",
|
| 1573 |
+
"پ",
|
| 1574 |
+
"ح",
|
| 1575 |
+
"ط",
|
| 1576 |
+
"ص",
|
| 1577 |
+
],
|
| 1578 |
+
"Arabic": [
|
| 1579 |
+
"ا",
|
| 1580 |
+
"ل",
|
| 1581 |
+
"ي",
|
| 1582 |
+
"م",
|
| 1583 |
+
"و",
|
| 1584 |
+
"ن",
|
| 1585 |
+
"ر",
|
| 1586 |
+
"ت",
|
| 1587 |
+
"ب",
|
| 1588 |
+
"ة",
|
| 1589 |
+
"ع",
|
| 1590 |
+
"د",
|
| 1591 |
+
"س",
|
| 1592 |
+
"ف",
|
| 1593 |
+
"ه",
|
| 1594 |
+
"ك",
|
| 1595 |
+
"ق",
|
| 1596 |
+
"أ",
|
| 1597 |
+
"ح",
|
| 1598 |
+
"ج",
|
| 1599 |
+
"ش",
|
| 1600 |
+
"ط",
|
| 1601 |
+
"ص",
|
| 1602 |
+
"ى",
|
| 1603 |
+
"خ",
|
| 1604 |
+
"إ",
|
| 1605 |
+
],
|
| 1606 |
+
"Danish": [
|
| 1607 |
+
"e",
|
| 1608 |
+
"r",
|
| 1609 |
+
"n",
|
| 1610 |
+
"t",
|
| 1611 |
+
"a",
|
| 1612 |
+
"i",
|
| 1613 |
+
"s",
|
| 1614 |
+
"d",
|
| 1615 |
+
"l",
|
| 1616 |
+
"o",
|
| 1617 |
+
"g",
|
| 1618 |
+
"m",
|
| 1619 |
+
"k",
|
| 1620 |
+
"f",
|
| 1621 |
+
"v",
|
| 1622 |
+
"u",
|
| 1623 |
+
"b",
|
| 1624 |
+
"h",
|
| 1625 |
+
"p",
|
| 1626 |
+
"å",
|
| 1627 |
+
"y",
|
| 1628 |
+
"ø",
|
| 1629 |
+
"æ",
|
| 1630 |
+
"c",
|
| 1631 |
+
"j",
|
| 1632 |
+
"w",
|
| 1633 |
+
],
|
| 1634 |
+
"Serbian": [
|
| 1635 |
+
"а",
|
| 1636 |
+
"и",
|
| 1637 |
+
"о",
|
| 1638 |
+
"е",
|
| 1639 |
+
"н",
|
| 1640 |
+
"р",
|
| 1641 |
+
"с",
|
| 1642 |
+
"у",
|
| 1643 |
+
"т",
|
| 1644 |
+
"к",
|
| 1645 |
+
"ј",
|
| 1646 |
+
"в",
|
| 1647 |
+
"д",
|
| 1648 |
+
"м",
|
| 1649 |
+
"п",
|
| 1650 |
+
"л",
|
| 1651 |
+
"г",
|
| 1652 |
+
"з",
|
| 1653 |
+
"б",
|
| 1654 |
+
"a",
|
| 1655 |
+
"i",
|
| 1656 |
+
"e",
|
| 1657 |
+
"o",
|
| 1658 |
+
"n",
|
| 1659 |
+
"ц",
|
| 1660 |
+
"ш",
|
| 1661 |
+
],
|
| 1662 |
+
"Lithuanian": [
|
| 1663 |
+
"i",
|
| 1664 |
+
"a",
|
| 1665 |
+
"s",
|
| 1666 |
+
"o",
|
| 1667 |
+
"r",
|
| 1668 |
+
"e",
|
| 1669 |
+
"t",
|
| 1670 |
+
"n",
|
| 1671 |
+
"u",
|
| 1672 |
+
"k",
|
| 1673 |
+
"m",
|
| 1674 |
+
"l",
|
| 1675 |
+
"p",
|
| 1676 |
+
"v",
|
| 1677 |
+
"d",
|
| 1678 |
+
"j",
|
| 1679 |
+
"g",
|
| 1680 |
+
"ė",
|
| 1681 |
+
"b",
|
| 1682 |
+
"y",
|
| 1683 |
+
"ų",
|
| 1684 |
+
"š",
|
| 1685 |
+
"ž",
|
| 1686 |
+
"c",
|
| 1687 |
+
"ą",
|
| 1688 |
+
"į",
|
| 1689 |
+
],
|
| 1690 |
+
"Slovene": [
|
| 1691 |
+
"e",
|
| 1692 |
+
"a",
|
| 1693 |
+
"i",
|
| 1694 |
+
"o",
|
| 1695 |
+
"n",
|
| 1696 |
+
"r",
|
| 1697 |
+
"s",
|
| 1698 |
+
"l",
|
| 1699 |
+
"t",
|
| 1700 |
+
"j",
|
| 1701 |
+
"v",
|
| 1702 |
+
"k",
|
| 1703 |
+
"d",
|
| 1704 |
+
"p",
|
| 1705 |
+
"m",
|
| 1706 |
+
"u",
|
| 1707 |
+
"z",
|
| 1708 |
+
"b",
|
| 1709 |
+
"g",
|
| 1710 |
+
"h",
|
| 1711 |
+
"č",
|
| 1712 |
+
"c",
|
| 1713 |
+
"š",
|
| 1714 |
+
"ž",
|
| 1715 |
+
"f",
|
| 1716 |
+
"y",
|
| 1717 |
+
],
|
| 1718 |
+
"Slovak": [
|
| 1719 |
+
"o",
|
| 1720 |
+
"a",
|
| 1721 |
+
"e",
|
| 1722 |
+
"n",
|
| 1723 |
+
"i",
|
| 1724 |
+
"r",
|
| 1725 |
+
"v",
|
| 1726 |
+
"t",
|
| 1727 |
+
"s",
|
| 1728 |
+
"l",
|
| 1729 |
+
"k",
|
| 1730 |
+
"d",
|
| 1731 |
+
"m",
|
| 1732 |
+
"p",
|
| 1733 |
+
"u",
|
| 1734 |
+
"c",
|
| 1735 |
+
"h",
|
| 1736 |
+
"j",
|
| 1737 |
+
"b",
|
| 1738 |
+
"z",
|
| 1739 |
+
"á",
|
| 1740 |
+
"y",
|
| 1741 |
+
"ý",
|
| 1742 |
+
"í",
|
| 1743 |
+
"č",
|
| 1744 |
+
"é",
|
| 1745 |
+
],
|
| 1746 |
+
"Hebrew": [
|
| 1747 |
+
"י",
|
| 1748 |
+
"ו",
|
| 1749 |
+
"ה",
|
| 1750 |
+
"ל",
|
| 1751 |
+
"ר",
|
| 1752 |
+
"ב",
|
| 1753 |
+
"ת",
|
| 1754 |
+
"מ",
|
| 1755 |
+
"א",
|
| 1756 |
+
"ש",
|
| 1757 |
+
"נ",
|
| 1758 |
+
"ע",
|
| 1759 |
+
"ם",
|
| 1760 |
+
"ד",
|
| 1761 |
+
"ק",
|
| 1762 |
+
"ח",
|
| 1763 |
+
"פ",
|
| 1764 |
+
"ס",
|
| 1765 |
+
"כ",
|
| 1766 |
+
"ג",
|
| 1767 |
+
"ט",
|
| 1768 |
+
"צ",
|
| 1769 |
+
"ן",
|
| 1770 |
+
"ז",
|
| 1771 |
+
"ך",
|
| 1772 |
+
],
|
| 1773 |
+
"Bulgarian": [
|
| 1774 |
+
"а",
|
| 1775 |
+
"и",
|
| 1776 |
+
"о",
|
| 1777 |
+
"е",
|
| 1778 |
+
"н",
|
| 1779 |
+
"т",
|
| 1780 |
+
"р",
|
| 1781 |
+
"с",
|
| 1782 |
+
"в",
|
| 1783 |
+
"л",
|
| 1784 |
+
"к",
|
| 1785 |
+
"д",
|
| 1786 |
+
"п",
|
| 1787 |
+
"м",
|
| 1788 |
+
"з",
|
| 1789 |
+
"г",
|
| 1790 |
+
"я",
|
| 1791 |
+
"ъ",
|
| 1792 |
+
"у",
|
| 1793 |
+
"б",
|
| 1794 |
+
"ч",
|
| 1795 |
+
"ц",
|
| 1796 |
+
"й",
|
| 1797 |
+
"ж",
|
| 1798 |
+
"щ",
|
| 1799 |
+
"х",
|
| 1800 |
+
],
|
| 1801 |
+
"Croatian": [
|
| 1802 |
+
"a",
|
| 1803 |
+
"i",
|
| 1804 |
+
"o",
|
| 1805 |
+
"e",
|
| 1806 |
+
"n",
|
| 1807 |
+
"r",
|
| 1808 |
+
"j",
|
| 1809 |
+
"s",
|
| 1810 |
+
"t",
|
| 1811 |
+
"u",
|
| 1812 |
+
"k",
|
| 1813 |
+
"l",
|
| 1814 |
+
"v",
|
| 1815 |
+
"d",
|
| 1816 |
+
"m",
|
| 1817 |
+
"p",
|
| 1818 |
+
"g",
|
| 1819 |
+
"z",
|
| 1820 |
+
"b",
|
| 1821 |
+
"c",
|
| 1822 |
+
"č",
|
| 1823 |
+
"h",
|
| 1824 |
+
"š",
|
| 1825 |
+
"ž",
|
| 1826 |
+
"ć",
|
| 1827 |
+
"f",
|
| 1828 |
+
],
|
| 1829 |
+
"Hindi": [
|
| 1830 |
+
"क",
|
| 1831 |
+
"र",
|
| 1832 |
+
"स",
|
| 1833 |
+
"न",
|
| 1834 |
+
"त",
|
| 1835 |
+
"म",
|
| 1836 |
+
"ह",
|
| 1837 |
+
"प",
|
| 1838 |
+
"य",
|
| 1839 |
+
"ल",
|
| 1840 |
+
"व",
|
| 1841 |
+
"ज",
|
| 1842 |
+
"द",
|
| 1843 |
+
"ग",
|
| 1844 |
+
"ब",
|
| 1845 |
+
"श",
|
| 1846 |
+
"ट",
|
| 1847 |
+
"अ",
|
| 1848 |
+
"ए",
|
| 1849 |
+
"थ",
|
| 1850 |
+
"भ",
|
| 1851 |
+
"ड",
|
| 1852 |
+
"च",
|
| 1853 |
+
"ध",
|
| 1854 |
+
"ष",
|
| 1855 |
+
"इ",
|
| 1856 |
+
],
|
| 1857 |
+
"Estonian": [
|
| 1858 |
+
"a",
|
| 1859 |
+
"i",
|
| 1860 |
+
"e",
|
| 1861 |
+
"s",
|
| 1862 |
+
"t",
|
| 1863 |
+
"l",
|
| 1864 |
+
"u",
|
| 1865 |
+
"n",
|
| 1866 |
+
"o",
|
| 1867 |
+
"k",
|
| 1868 |
+
"r",
|
| 1869 |
+
"d",
|
| 1870 |
+
"m",
|
| 1871 |
+
"v",
|
| 1872 |
+
"g",
|
| 1873 |
+
"p",
|
| 1874 |
+
"j",
|
| 1875 |
+
"h",
|
| 1876 |
+
"ä",
|
| 1877 |
+
"b",
|
| 1878 |
+
"õ",
|
| 1879 |
+
"ü",
|
| 1880 |
+
"f",
|
| 1881 |
+
"c",
|
| 1882 |
+
"ö",
|
| 1883 |
+
"y",
|
| 1884 |
+
],
|
| 1885 |
+
"Thai": [
|
| 1886 |
+
"า",
|
| 1887 |
+
"น",
|
| 1888 |
+
"ร",
|
| 1889 |
+
"อ",
|
| 1890 |
+
"ก",
|
| 1891 |
+
"เ",
|
| 1892 |
+
"ง",
|
| 1893 |
+
"ม",
|
| 1894 |
+
"ย",
|
| 1895 |
+
"ล",
|
| 1896 |
+
"ว",
|
| 1897 |
+
"ด",
|
| 1898 |
+
"ท",
|
| 1899 |
+
"ส",
|
| 1900 |
+
"ต",
|
| 1901 |
+
"ะ",
|
| 1902 |
+
"ป",
|
| 1903 |
+
"บ",
|
| 1904 |
+
"ค",
|
| 1905 |
+
"ห",
|
| 1906 |
+
"แ",
|
| 1907 |
+
"จ",
|
| 1908 |
+
"พ",
|
| 1909 |
+
"ช",
|
| 1910 |
+
"ข",
|
| 1911 |
+
"ใ",
|
| 1912 |
+
],
|
| 1913 |
+
"Greek": [
|
| 1914 |
+
"α",
|
| 1915 |
+
"τ",
|
| 1916 |
+
"ο",
|
| 1917 |
+
"ι",
|
| 1918 |
+
"ε",
|
| 1919 |
+
"ν",
|
| 1920 |
+
"ρ",
|
| 1921 |
+
"σ",
|
| 1922 |
+
"κ",
|
| 1923 |
+
"η",
|
| 1924 |
+
"π",
|
| 1925 |
+
"ς",
|
| 1926 |
+
"υ",
|
| 1927 |
+
"μ",
|
| 1928 |
+
"λ",
|
| 1929 |
+
"ί",
|
| 1930 |
+
"ό",
|
| 1931 |
+
"ά",
|
| 1932 |
+
"γ",
|
| 1933 |
+
"έ",
|
| 1934 |
+
"δ",
|
| 1935 |
+
"ή",
|
| 1936 |
+
"ω",
|
| 1937 |
+
"χ",
|
| 1938 |
+
"θ",
|
| 1939 |
+
"ύ",
|
| 1940 |
+
],
|
| 1941 |
+
"Tamil": [
|
| 1942 |
+
"க",
|
| 1943 |
+
"த",
|
| 1944 |
+
"ப",
|
| 1945 |
+
"ட",
|
| 1946 |
+
"ர",
|
| 1947 |
+
"ம",
|
| 1948 |
+
"ல",
|
| 1949 |
+
"ன",
|
| 1950 |
+
"வ",
|
| 1951 |
+
"ற",
|
| 1952 |
+
"ய",
|
| 1953 |
+
"ள",
|
| 1954 |
+
"ச",
|
| 1955 |
+
"ந",
|
| 1956 |
+
"இ",
|
| 1957 |
+
"ண",
|
| 1958 |
+
"அ",
|
| 1959 |
+
"ஆ",
|
| 1960 |
+
"ழ",
|
| 1961 |
+
"ங",
|
| 1962 |
+
"எ",
|
| 1963 |
+
"உ",
|
| 1964 |
+
"ஒ",
|
| 1965 |
+
"ஸ",
|
| 1966 |
+
],
|
| 1967 |
+
"Kazakh": [
|
| 1968 |
+
"а",
|
| 1969 |
+
"ы",
|
| 1970 |
+
"е",
|
| 1971 |
+
"н",
|
| 1972 |
+
"т",
|
| 1973 |
+
"р",
|
| 1974 |
+
"л",
|
| 1975 |
+
"і",
|
| 1976 |
+
"д",
|
| 1977 |
+
"с",
|
| 1978 |
+
"м",
|
| 1979 |
+
"қ",
|
| 1980 |
+
"к",
|
| 1981 |
+
"о",
|
| 1982 |
+
"б",
|
| 1983 |
+
"и",
|
| 1984 |
+
"у",
|
| 1985 |
+
"ғ",
|
| 1986 |
+
"ж",
|
| 1987 |
+
"ң",
|
| 1988 |
+
"з",
|
| 1989 |
+
"ш",
|
| 1990 |
+
"й",
|
| 1991 |
+
"п",
|
| 1992 |
+
"г",
|
| 1993 |
+
"ө",
|
| 1994 |
+
],
|
| 1995 |
+
}
|
| 1996 |
+
|
| 1997 |
+
LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES)
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/legacy.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import TYPE_CHECKING, Any, Optional
|
| 4 |
+
from warnings import warn
|
| 5 |
+
|
| 6 |
+
from .api import from_bytes
|
| 7 |
+
from .constant import CHARDET_CORRESPONDENCE
|
| 8 |
+
|
| 9 |
+
# TODO: remove this check when dropping Python 3.7 support
|
| 10 |
+
if TYPE_CHECKING:
|
| 11 |
+
from typing_extensions import TypedDict
|
| 12 |
+
|
| 13 |
+
class ResultDict(TypedDict):
|
| 14 |
+
encoding: Optional[str]
|
| 15 |
+
language: str
|
| 16 |
+
confidence: Optional[float]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def detect(
|
| 20 |
+
byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any
|
| 21 |
+
) -> ResultDict:
|
| 22 |
+
"""
|
| 23 |
+
chardet legacy method
|
| 24 |
+
Detect the encoding of the given byte string. It should be mostly backward-compatible.
|
| 25 |
+
Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it)
|
| 26 |
+
This function is deprecated and should be used to migrate your project easily, consult the documentation for
|
| 27 |
+
further information. Not planned for removal.
|
| 28 |
+
|
| 29 |
+
:param byte_str: The byte sequence to examine.
|
| 30 |
+
:param should_rename_legacy: Should we rename legacy encodings
|
| 31 |
+
to their more modern equivalents?
|
| 32 |
+
"""
|
| 33 |
+
if len(kwargs):
|
| 34 |
+
warn(
|
| 35 |
+
f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()"
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
if not isinstance(byte_str, (bytearray, bytes)):
|
| 39 |
+
raise TypeError( # pragma: nocover
|
| 40 |
+
"Expected object of type bytes or bytearray, got: "
|
| 41 |
+
"{0}".format(type(byte_str))
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
if isinstance(byte_str, bytearray):
|
| 45 |
+
byte_str = bytes(byte_str)
|
| 46 |
+
|
| 47 |
+
r = from_bytes(byte_str).best()
|
| 48 |
+
|
| 49 |
+
encoding = r.encoding if r is not None else None
|
| 50 |
+
language = r.language if r is not None and r.language != "Unknown" else ""
|
| 51 |
+
confidence = 1.0 - r.chaos if r is not None else None
|
| 52 |
+
|
| 53 |
+
# Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process
|
| 54 |
+
# but chardet does return 'utf-8-sig' and it is a valid codec name.
|
| 55 |
+
if r is not None and encoding == "utf_8" and r.bom:
|
| 56 |
+
encoding += "_sig"
|
| 57 |
+
|
| 58 |
+
if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE:
|
| 59 |
+
encoding = CHARDET_CORRESPONDENCE[encoding]
|
| 60 |
+
|
| 61 |
+
return {
|
| 62 |
+
"encoding": encoding,
|
| 63 |
+
"language": language,
|
| 64 |
+
"confidence": confidence,
|
| 65 |
+
}
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/md.cpython-310-x86_64-linux-gnu.so
ADDED
|
Binary file (16.1 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/md.py
ADDED
|
@@ -0,0 +1,628 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from functools import lru_cache
|
| 2 |
+
from logging import getLogger
|
| 3 |
+
from typing import List, Optional
|
| 4 |
+
|
| 5 |
+
from .constant import (
|
| 6 |
+
COMMON_SAFE_ASCII_CHARACTERS,
|
| 7 |
+
TRACE,
|
| 8 |
+
UNICODE_SECONDARY_RANGE_KEYWORD,
|
| 9 |
+
)
|
| 10 |
+
from .utils import (
|
| 11 |
+
is_accentuated,
|
| 12 |
+
is_arabic,
|
| 13 |
+
is_arabic_isolated_form,
|
| 14 |
+
is_case_variable,
|
| 15 |
+
is_cjk,
|
| 16 |
+
is_emoticon,
|
| 17 |
+
is_hangul,
|
| 18 |
+
is_hiragana,
|
| 19 |
+
is_katakana,
|
| 20 |
+
is_latin,
|
| 21 |
+
is_punctuation,
|
| 22 |
+
is_separator,
|
| 23 |
+
is_symbol,
|
| 24 |
+
is_thai,
|
| 25 |
+
is_unprintable,
|
| 26 |
+
remove_accent,
|
| 27 |
+
unicode_range,
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class MessDetectorPlugin:
|
| 32 |
+
"""
|
| 33 |
+
Base abstract class used for mess detection plugins.
|
| 34 |
+
All detectors MUST extend and implement given methods.
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
def eligible(self, character: str) -> bool:
|
| 38 |
+
"""
|
| 39 |
+
Determine if given character should be fed in.
|
| 40 |
+
"""
|
| 41 |
+
raise NotImplementedError # pragma: nocover
|
| 42 |
+
|
| 43 |
+
def feed(self, character: str) -> None:
|
| 44 |
+
"""
|
| 45 |
+
The main routine to be executed upon character.
|
| 46 |
+
Insert the logic in witch the text would be considered chaotic.
|
| 47 |
+
"""
|
| 48 |
+
raise NotImplementedError # pragma: nocover
|
| 49 |
+
|
| 50 |
+
def reset(self) -> None: # pragma: no cover
|
| 51 |
+
"""
|
| 52 |
+
Permit to reset the plugin to the initial state.
|
| 53 |
+
"""
|
| 54 |
+
raise NotImplementedError
|
| 55 |
+
|
| 56 |
+
@property
|
| 57 |
+
def ratio(self) -> float:
|
| 58 |
+
"""
|
| 59 |
+
Compute the chaos ratio based on what your feed() has seen.
|
| 60 |
+
Must NOT be lower than 0.; No restriction gt 0.
|
| 61 |
+
"""
|
| 62 |
+
raise NotImplementedError # pragma: nocover
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin):
|
| 66 |
+
def __init__(self) -> None:
|
| 67 |
+
self._punctuation_count: int = 0
|
| 68 |
+
self._symbol_count: int = 0
|
| 69 |
+
self._character_count: int = 0
|
| 70 |
+
|
| 71 |
+
self._last_printable_char: Optional[str] = None
|
| 72 |
+
self._frenzy_symbol_in_word: bool = False
|
| 73 |
+
|
| 74 |
+
def eligible(self, character: str) -> bool:
|
| 75 |
+
return character.isprintable()
|
| 76 |
+
|
| 77 |
+
def feed(self, character: str) -> None:
|
| 78 |
+
self._character_count += 1
|
| 79 |
+
|
| 80 |
+
if (
|
| 81 |
+
character != self._last_printable_char
|
| 82 |
+
and character not in COMMON_SAFE_ASCII_CHARACTERS
|
| 83 |
+
):
|
| 84 |
+
if is_punctuation(character):
|
| 85 |
+
self._punctuation_count += 1
|
| 86 |
+
elif (
|
| 87 |
+
character.isdigit() is False
|
| 88 |
+
and is_symbol(character)
|
| 89 |
+
and is_emoticon(character) is False
|
| 90 |
+
):
|
| 91 |
+
self._symbol_count += 2
|
| 92 |
+
|
| 93 |
+
self._last_printable_char = character
|
| 94 |
+
|
| 95 |
+
def reset(self) -> None: # pragma: no cover
|
| 96 |
+
self._punctuation_count = 0
|
| 97 |
+
self._character_count = 0
|
| 98 |
+
self._symbol_count = 0
|
| 99 |
+
|
| 100 |
+
@property
|
| 101 |
+
def ratio(self) -> float:
|
| 102 |
+
if self._character_count == 0:
|
| 103 |
+
return 0.0
|
| 104 |
+
|
| 105 |
+
ratio_of_punctuation: float = (
|
| 106 |
+
self._punctuation_count + self._symbol_count
|
| 107 |
+
) / self._character_count
|
| 108 |
+
|
| 109 |
+
return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class TooManyAccentuatedPlugin(MessDetectorPlugin):
|
| 113 |
+
def __init__(self) -> None:
|
| 114 |
+
self._character_count: int = 0
|
| 115 |
+
self._accentuated_count: int = 0
|
| 116 |
+
|
| 117 |
+
def eligible(self, character: str) -> bool:
|
| 118 |
+
return character.isalpha()
|
| 119 |
+
|
| 120 |
+
def feed(self, character: str) -> None:
|
| 121 |
+
self._character_count += 1
|
| 122 |
+
|
| 123 |
+
if is_accentuated(character):
|
| 124 |
+
self._accentuated_count += 1
|
| 125 |
+
|
| 126 |
+
def reset(self) -> None: # pragma: no cover
|
| 127 |
+
self._character_count = 0
|
| 128 |
+
self._accentuated_count = 0
|
| 129 |
+
|
| 130 |
+
@property
|
| 131 |
+
def ratio(self) -> float:
|
| 132 |
+
if self._character_count < 8:
|
| 133 |
+
return 0.0
|
| 134 |
+
|
| 135 |
+
ratio_of_accentuation: float = self._accentuated_count / self._character_count
|
| 136 |
+
return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
class UnprintablePlugin(MessDetectorPlugin):
|
| 140 |
+
def __init__(self) -> None:
|
| 141 |
+
self._unprintable_count: int = 0
|
| 142 |
+
self._character_count: int = 0
|
| 143 |
+
|
| 144 |
+
def eligible(self, character: str) -> bool:
|
| 145 |
+
return True
|
| 146 |
+
|
| 147 |
+
def feed(self, character: str) -> None:
|
| 148 |
+
if is_unprintable(character):
|
| 149 |
+
self._unprintable_count += 1
|
| 150 |
+
self._character_count += 1
|
| 151 |
+
|
| 152 |
+
def reset(self) -> None: # pragma: no cover
|
| 153 |
+
self._unprintable_count = 0
|
| 154 |
+
|
| 155 |
+
@property
|
| 156 |
+
def ratio(self) -> float:
|
| 157 |
+
if self._character_count == 0:
|
| 158 |
+
return 0.0
|
| 159 |
+
|
| 160 |
+
return (self._unprintable_count * 8) / self._character_count
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin):
|
| 164 |
+
def __init__(self) -> None:
|
| 165 |
+
self._successive_count: int = 0
|
| 166 |
+
self._character_count: int = 0
|
| 167 |
+
|
| 168 |
+
self._last_latin_character: Optional[str] = None
|
| 169 |
+
|
| 170 |
+
def eligible(self, character: str) -> bool:
|
| 171 |
+
return character.isalpha() and is_latin(character)
|
| 172 |
+
|
| 173 |
+
def feed(self, character: str) -> None:
|
| 174 |
+
self._character_count += 1
|
| 175 |
+
if (
|
| 176 |
+
self._last_latin_character is not None
|
| 177 |
+
and is_accentuated(character)
|
| 178 |
+
and is_accentuated(self._last_latin_character)
|
| 179 |
+
):
|
| 180 |
+
if character.isupper() and self._last_latin_character.isupper():
|
| 181 |
+
self._successive_count += 1
|
| 182 |
+
# Worse if its the same char duplicated with different accent.
|
| 183 |
+
if remove_accent(character) == remove_accent(self._last_latin_character):
|
| 184 |
+
self._successive_count += 1
|
| 185 |
+
self._last_latin_character = character
|
| 186 |
+
|
| 187 |
+
def reset(self) -> None: # pragma: no cover
|
| 188 |
+
self._successive_count = 0
|
| 189 |
+
self._character_count = 0
|
| 190 |
+
self._last_latin_character = None
|
| 191 |
+
|
| 192 |
+
@property
|
| 193 |
+
def ratio(self) -> float:
|
| 194 |
+
if self._character_count == 0:
|
| 195 |
+
return 0.0
|
| 196 |
+
|
| 197 |
+
return (self._successive_count * 2) / self._character_count
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
class SuspiciousRange(MessDetectorPlugin):
|
| 201 |
+
def __init__(self) -> None:
|
| 202 |
+
self._suspicious_successive_range_count: int = 0
|
| 203 |
+
self._character_count: int = 0
|
| 204 |
+
self._last_printable_seen: Optional[str] = None
|
| 205 |
+
|
| 206 |
+
def eligible(self, character: str) -> bool:
|
| 207 |
+
return character.isprintable()
|
| 208 |
+
|
| 209 |
+
def feed(self, character: str) -> None:
|
| 210 |
+
self._character_count += 1
|
| 211 |
+
|
| 212 |
+
if (
|
| 213 |
+
character.isspace()
|
| 214 |
+
or is_punctuation(character)
|
| 215 |
+
or character in COMMON_SAFE_ASCII_CHARACTERS
|
| 216 |
+
):
|
| 217 |
+
self._last_printable_seen = None
|
| 218 |
+
return
|
| 219 |
+
|
| 220 |
+
if self._last_printable_seen is None:
|
| 221 |
+
self._last_printable_seen = character
|
| 222 |
+
return
|
| 223 |
+
|
| 224 |
+
unicode_range_a: Optional[str] = unicode_range(self._last_printable_seen)
|
| 225 |
+
unicode_range_b: Optional[str] = unicode_range(character)
|
| 226 |
+
|
| 227 |
+
if is_suspiciously_successive_range(unicode_range_a, unicode_range_b):
|
| 228 |
+
self._suspicious_successive_range_count += 1
|
| 229 |
+
|
| 230 |
+
self._last_printable_seen = character
|
| 231 |
+
|
| 232 |
+
def reset(self) -> None: # pragma: no cover
|
| 233 |
+
self._character_count = 0
|
| 234 |
+
self._suspicious_successive_range_count = 0
|
| 235 |
+
self._last_printable_seen = None
|
| 236 |
+
|
| 237 |
+
@property
|
| 238 |
+
def ratio(self) -> float:
|
| 239 |
+
if self._character_count <= 13:
|
| 240 |
+
return 0.0
|
| 241 |
+
|
| 242 |
+
ratio_of_suspicious_range_usage: float = (
|
| 243 |
+
self._suspicious_successive_range_count * 2
|
| 244 |
+
) / self._character_count
|
| 245 |
+
|
| 246 |
+
return ratio_of_suspicious_range_usage
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
class SuperWeirdWordPlugin(MessDetectorPlugin):
|
| 250 |
+
def __init__(self) -> None:
|
| 251 |
+
self._word_count: int = 0
|
| 252 |
+
self._bad_word_count: int = 0
|
| 253 |
+
self._foreign_long_count: int = 0
|
| 254 |
+
|
| 255 |
+
self._is_current_word_bad: bool = False
|
| 256 |
+
self._foreign_long_watch: bool = False
|
| 257 |
+
|
| 258 |
+
self._character_count: int = 0
|
| 259 |
+
self._bad_character_count: int = 0
|
| 260 |
+
|
| 261 |
+
self._buffer: str = ""
|
| 262 |
+
self._buffer_accent_count: int = 0
|
| 263 |
+
self._buffer_glyph_count: int = 0
|
| 264 |
+
|
| 265 |
+
def eligible(self, character: str) -> bool:
|
| 266 |
+
return True
|
| 267 |
+
|
| 268 |
+
def feed(self, character: str) -> None:
|
| 269 |
+
if character.isalpha():
|
| 270 |
+
self._buffer += character
|
| 271 |
+
if is_accentuated(character):
|
| 272 |
+
self._buffer_accent_count += 1
|
| 273 |
+
if (
|
| 274 |
+
self._foreign_long_watch is False
|
| 275 |
+
and (is_latin(character) is False or is_accentuated(character))
|
| 276 |
+
and is_cjk(character) is False
|
| 277 |
+
and is_hangul(character) is False
|
| 278 |
+
and is_katakana(character) is False
|
| 279 |
+
and is_hiragana(character) is False
|
| 280 |
+
and is_thai(character) is False
|
| 281 |
+
):
|
| 282 |
+
self._foreign_long_watch = True
|
| 283 |
+
if (
|
| 284 |
+
is_cjk(character)
|
| 285 |
+
or is_hangul(character)
|
| 286 |
+
or is_katakana(character)
|
| 287 |
+
or is_hiragana(character)
|
| 288 |
+
or is_thai(character)
|
| 289 |
+
):
|
| 290 |
+
self._buffer_glyph_count += 1
|
| 291 |
+
return
|
| 292 |
+
if not self._buffer:
|
| 293 |
+
return
|
| 294 |
+
if (
|
| 295 |
+
character.isspace() or is_punctuation(character) or is_separator(character)
|
| 296 |
+
) and self._buffer:
|
| 297 |
+
self._word_count += 1
|
| 298 |
+
buffer_length: int = len(self._buffer)
|
| 299 |
+
|
| 300 |
+
self._character_count += buffer_length
|
| 301 |
+
|
| 302 |
+
if buffer_length >= 4:
|
| 303 |
+
if self._buffer_accent_count / buffer_length >= 0.5:
|
| 304 |
+
self._is_current_word_bad = True
|
| 305 |
+
# Word/Buffer ending with an upper case accentuated letter are so rare,
|
| 306 |
+
# that we will consider them all as suspicious. Same weight as foreign_long suspicious.
|
| 307 |
+
elif (
|
| 308 |
+
is_accentuated(self._buffer[-1])
|
| 309 |
+
and self._buffer[-1].isupper()
|
| 310 |
+
and all(_.isupper() for _ in self._buffer) is False
|
| 311 |
+
):
|
| 312 |
+
self._foreign_long_count += 1
|
| 313 |
+
self._is_current_word_bad = True
|
| 314 |
+
elif self._buffer_glyph_count == 1:
|
| 315 |
+
self._is_current_word_bad = True
|
| 316 |
+
self._foreign_long_count += 1
|
| 317 |
+
if buffer_length >= 24 and self._foreign_long_watch:
|
| 318 |
+
camel_case_dst = [
|
| 319 |
+
i
|
| 320 |
+
for c, i in zip(self._buffer, range(0, buffer_length))
|
| 321 |
+
if c.isupper()
|
| 322 |
+
]
|
| 323 |
+
probable_camel_cased: bool = False
|
| 324 |
+
|
| 325 |
+
if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3):
|
| 326 |
+
probable_camel_cased = True
|
| 327 |
+
|
| 328 |
+
if not probable_camel_cased:
|
| 329 |
+
self._foreign_long_count += 1
|
| 330 |
+
self._is_current_word_bad = True
|
| 331 |
+
|
| 332 |
+
if self._is_current_word_bad:
|
| 333 |
+
self._bad_word_count += 1
|
| 334 |
+
self._bad_character_count += len(self._buffer)
|
| 335 |
+
self._is_current_word_bad = False
|
| 336 |
+
|
| 337 |
+
self._foreign_long_watch = False
|
| 338 |
+
self._buffer = ""
|
| 339 |
+
self._buffer_accent_count = 0
|
| 340 |
+
self._buffer_glyph_count = 0
|
| 341 |
+
elif (
|
| 342 |
+
character not in {"<", ">", "-", "=", "~", "|", "_"}
|
| 343 |
+
and character.isdigit() is False
|
| 344 |
+
and is_symbol(character)
|
| 345 |
+
):
|
| 346 |
+
self._is_current_word_bad = True
|
| 347 |
+
self._buffer += character
|
| 348 |
+
|
| 349 |
+
def reset(self) -> None: # pragma: no cover
|
| 350 |
+
self._buffer = ""
|
| 351 |
+
self._is_current_word_bad = False
|
| 352 |
+
self._foreign_long_watch = False
|
| 353 |
+
self._bad_word_count = 0
|
| 354 |
+
self._word_count = 0
|
| 355 |
+
self._character_count = 0
|
| 356 |
+
self._bad_character_count = 0
|
| 357 |
+
self._foreign_long_count = 0
|
| 358 |
+
|
| 359 |
+
@property
|
| 360 |
+
def ratio(self) -> float:
|
| 361 |
+
if self._word_count <= 10 and self._foreign_long_count == 0:
|
| 362 |
+
return 0.0
|
| 363 |
+
|
| 364 |
+
return self._bad_character_count / self._character_count
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
class CjkInvalidStopPlugin(MessDetectorPlugin):
|
| 368 |
+
"""
|
| 369 |
+
GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and
|
| 370 |
+
can be easily detected. Searching for the overuse of '丅' and '丄'.
|
| 371 |
+
"""
|
| 372 |
+
|
| 373 |
+
def __init__(self) -> None:
|
| 374 |
+
self._wrong_stop_count: int = 0
|
| 375 |
+
self._cjk_character_count: int = 0
|
| 376 |
+
|
| 377 |
+
def eligible(self, character: str) -> bool:
|
| 378 |
+
return True
|
| 379 |
+
|
| 380 |
+
def feed(self, character: str) -> None:
|
| 381 |
+
if character in {"丅", "丄"}:
|
| 382 |
+
self._wrong_stop_count += 1
|
| 383 |
+
return
|
| 384 |
+
if is_cjk(character):
|
| 385 |
+
self._cjk_character_count += 1
|
| 386 |
+
|
| 387 |
+
def reset(self) -> None: # pragma: no cover
|
| 388 |
+
self._wrong_stop_count = 0
|
| 389 |
+
self._cjk_character_count = 0
|
| 390 |
+
|
| 391 |
+
@property
|
| 392 |
+
def ratio(self) -> float:
|
| 393 |
+
if self._cjk_character_count < 16:
|
| 394 |
+
return 0.0
|
| 395 |
+
return self._wrong_stop_count / self._cjk_character_count
|
| 396 |
+
|
| 397 |
+
|
| 398 |
+
class ArchaicUpperLowerPlugin(MessDetectorPlugin):
|
| 399 |
+
def __init__(self) -> None:
|
| 400 |
+
self._buf: bool = False
|
| 401 |
+
|
| 402 |
+
self._character_count_since_last_sep: int = 0
|
| 403 |
+
|
| 404 |
+
self._successive_upper_lower_count: int = 0
|
| 405 |
+
self._successive_upper_lower_count_final: int = 0
|
| 406 |
+
|
| 407 |
+
self._character_count: int = 0
|
| 408 |
+
|
| 409 |
+
self._last_alpha_seen: Optional[str] = None
|
| 410 |
+
self._current_ascii_only: bool = True
|
| 411 |
+
|
| 412 |
+
def eligible(self, character: str) -> bool:
|
| 413 |
+
return True
|
| 414 |
+
|
| 415 |
+
def feed(self, character: str) -> None:
|
| 416 |
+
is_concerned = character.isalpha() and is_case_variable(character)
|
| 417 |
+
chunk_sep = is_concerned is False
|
| 418 |
+
|
| 419 |
+
if chunk_sep and self._character_count_since_last_sep > 0:
|
| 420 |
+
if (
|
| 421 |
+
self._character_count_since_last_sep <= 64
|
| 422 |
+
and character.isdigit() is False
|
| 423 |
+
and self._current_ascii_only is False
|
| 424 |
+
):
|
| 425 |
+
self._successive_upper_lower_count_final += (
|
| 426 |
+
self._successive_upper_lower_count
|
| 427 |
+
)
|
| 428 |
+
|
| 429 |
+
self._successive_upper_lower_count = 0
|
| 430 |
+
self._character_count_since_last_sep = 0
|
| 431 |
+
self._last_alpha_seen = None
|
| 432 |
+
self._buf = False
|
| 433 |
+
self._character_count += 1
|
| 434 |
+
self._current_ascii_only = True
|
| 435 |
+
|
| 436 |
+
return
|
| 437 |
+
|
| 438 |
+
if self._current_ascii_only is True and character.isascii() is False:
|
| 439 |
+
self._current_ascii_only = False
|
| 440 |
+
|
| 441 |
+
if self._last_alpha_seen is not None:
|
| 442 |
+
if (character.isupper() and self._last_alpha_seen.islower()) or (
|
| 443 |
+
character.islower() and self._last_alpha_seen.isupper()
|
| 444 |
+
):
|
| 445 |
+
if self._buf is True:
|
| 446 |
+
self._successive_upper_lower_count += 2
|
| 447 |
+
self._buf = False
|
| 448 |
+
else:
|
| 449 |
+
self._buf = True
|
| 450 |
+
else:
|
| 451 |
+
self._buf = False
|
| 452 |
+
|
| 453 |
+
self._character_count += 1
|
| 454 |
+
self._character_count_since_last_sep += 1
|
| 455 |
+
self._last_alpha_seen = character
|
| 456 |
+
|
| 457 |
+
def reset(self) -> None: # pragma: no cover
|
| 458 |
+
self._character_count = 0
|
| 459 |
+
self._character_count_since_last_sep = 0
|
| 460 |
+
self._successive_upper_lower_count = 0
|
| 461 |
+
self._successive_upper_lower_count_final = 0
|
| 462 |
+
self._last_alpha_seen = None
|
| 463 |
+
self._buf = False
|
| 464 |
+
self._current_ascii_only = True
|
| 465 |
+
|
| 466 |
+
@property
|
| 467 |
+
def ratio(self) -> float:
|
| 468 |
+
if self._character_count == 0:
|
| 469 |
+
return 0.0
|
| 470 |
+
|
| 471 |
+
return self._successive_upper_lower_count_final / self._character_count
|
| 472 |
+
|
| 473 |
+
|
| 474 |
+
class ArabicIsolatedFormPlugin(MessDetectorPlugin):
|
| 475 |
+
def __init__(self) -> None:
|
| 476 |
+
self._character_count: int = 0
|
| 477 |
+
self._isolated_form_count: int = 0
|
| 478 |
+
|
| 479 |
+
def reset(self) -> None: # pragma: no cover
|
| 480 |
+
self._character_count = 0
|
| 481 |
+
self._isolated_form_count = 0
|
| 482 |
+
|
| 483 |
+
def eligible(self, character: str) -> bool:
|
| 484 |
+
return is_arabic(character)
|
| 485 |
+
|
| 486 |
+
def feed(self, character: str) -> None:
|
| 487 |
+
self._character_count += 1
|
| 488 |
+
|
| 489 |
+
if is_arabic_isolated_form(character):
|
| 490 |
+
self._isolated_form_count += 1
|
| 491 |
+
|
| 492 |
+
@property
|
| 493 |
+
def ratio(self) -> float:
|
| 494 |
+
if self._character_count < 8:
|
| 495 |
+
return 0.0
|
| 496 |
+
|
| 497 |
+
isolated_form_usage: float = self._isolated_form_count / self._character_count
|
| 498 |
+
|
| 499 |
+
return isolated_form_usage
|
| 500 |
+
|
| 501 |
+
|
| 502 |
+
@lru_cache(maxsize=1024)
|
| 503 |
+
def is_suspiciously_successive_range(
|
| 504 |
+
unicode_range_a: Optional[str], unicode_range_b: Optional[str]
|
| 505 |
+
) -> bool:
|
| 506 |
+
"""
|
| 507 |
+
Determine if two Unicode range seen next to each other can be considered as suspicious.
|
| 508 |
+
"""
|
| 509 |
+
if unicode_range_a is None or unicode_range_b is None:
|
| 510 |
+
return True
|
| 511 |
+
|
| 512 |
+
if unicode_range_a == unicode_range_b:
|
| 513 |
+
return False
|
| 514 |
+
|
| 515 |
+
if "Latin" in unicode_range_a and "Latin" in unicode_range_b:
|
| 516 |
+
return False
|
| 517 |
+
|
| 518 |
+
if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b:
|
| 519 |
+
return False
|
| 520 |
+
|
| 521 |
+
# Latin characters can be accompanied with a combining diacritical mark
|
| 522 |
+
# eg. Vietnamese.
|
| 523 |
+
if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and (
|
| 524 |
+
"Combining" in unicode_range_a or "Combining" in unicode_range_b
|
| 525 |
+
):
|
| 526 |
+
return False
|
| 527 |
+
|
| 528 |
+
keywords_range_a, keywords_range_b = unicode_range_a.split(
|
| 529 |
+
" "
|
| 530 |
+
), unicode_range_b.split(" ")
|
| 531 |
+
|
| 532 |
+
for el in keywords_range_a:
|
| 533 |
+
if el in UNICODE_SECONDARY_RANGE_KEYWORD:
|
| 534 |
+
continue
|
| 535 |
+
if el in keywords_range_b:
|
| 536 |
+
return False
|
| 537 |
+
|
| 538 |
+
# Japanese Exception
|
| 539 |
+
range_a_jp_chars, range_b_jp_chars = (
|
| 540 |
+
unicode_range_a
|
| 541 |
+
in (
|
| 542 |
+
"Hiragana",
|
| 543 |
+
"Katakana",
|
| 544 |
+
),
|
| 545 |
+
unicode_range_b in ("Hiragana", "Katakana"),
|
| 546 |
+
)
|
| 547 |
+
if (range_a_jp_chars or range_b_jp_chars) and (
|
| 548 |
+
"CJK" in unicode_range_a or "CJK" in unicode_range_b
|
| 549 |
+
):
|
| 550 |
+
return False
|
| 551 |
+
if range_a_jp_chars and range_b_jp_chars:
|
| 552 |
+
return False
|
| 553 |
+
|
| 554 |
+
if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b:
|
| 555 |
+
if "CJK" in unicode_range_a or "CJK" in unicode_range_b:
|
| 556 |
+
return False
|
| 557 |
+
if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin":
|
| 558 |
+
return False
|
| 559 |
+
|
| 560 |
+
# Chinese/Japanese use dedicated range for punctuation and/or separators.
|
| 561 |
+
if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or (
|
| 562 |
+
unicode_range_a in ["Katakana", "Hiragana"]
|
| 563 |
+
and unicode_range_b in ["Katakana", "Hiragana"]
|
| 564 |
+
):
|
| 565 |
+
if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b:
|
| 566 |
+
return False
|
| 567 |
+
if "Forms" in unicode_range_a or "Forms" in unicode_range_b:
|
| 568 |
+
return False
|
| 569 |
+
if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin":
|
| 570 |
+
return False
|
| 571 |
+
|
| 572 |
+
return True
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
@lru_cache(maxsize=2048)
|
| 576 |
+
def mess_ratio(
|
| 577 |
+
decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False
|
| 578 |
+
) -> float:
|
| 579 |
+
"""
|
| 580 |
+
Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier.
|
| 581 |
+
"""
|
| 582 |
+
|
| 583 |
+
detectors: List[MessDetectorPlugin] = [
|
| 584 |
+
md_class() for md_class in MessDetectorPlugin.__subclasses__()
|
| 585 |
+
]
|
| 586 |
+
|
| 587 |
+
length: int = len(decoded_sequence) + 1
|
| 588 |
+
|
| 589 |
+
mean_mess_ratio: float = 0.0
|
| 590 |
+
|
| 591 |
+
if length < 512:
|
| 592 |
+
intermediary_mean_mess_ratio_calc: int = 32
|
| 593 |
+
elif length <= 1024:
|
| 594 |
+
intermediary_mean_mess_ratio_calc = 64
|
| 595 |
+
else:
|
| 596 |
+
intermediary_mean_mess_ratio_calc = 128
|
| 597 |
+
|
| 598 |
+
for character, index in zip(decoded_sequence + "\n", range(length)):
|
| 599 |
+
for detector in detectors:
|
| 600 |
+
if detector.eligible(character):
|
| 601 |
+
detector.feed(character)
|
| 602 |
+
|
| 603 |
+
if (
|
| 604 |
+
index > 0 and index % intermediary_mean_mess_ratio_calc == 0
|
| 605 |
+
) or index == length - 1:
|
| 606 |
+
mean_mess_ratio = sum(dt.ratio for dt in detectors)
|
| 607 |
+
|
| 608 |
+
if mean_mess_ratio >= maximum_threshold:
|
| 609 |
+
break
|
| 610 |
+
|
| 611 |
+
if debug:
|
| 612 |
+
logger = getLogger("charset_normalizer")
|
| 613 |
+
|
| 614 |
+
logger.log(
|
| 615 |
+
TRACE,
|
| 616 |
+
"Mess-detector extended-analysis start. "
|
| 617 |
+
f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} "
|
| 618 |
+
f"maximum_threshold={maximum_threshold}",
|
| 619 |
+
)
|
| 620 |
+
|
| 621 |
+
if len(decoded_sequence) > 16:
|
| 622 |
+
logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}")
|
| 623 |
+
logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}")
|
| 624 |
+
|
| 625 |
+
for dt in detectors: # pragma: nocover
|
| 626 |
+
logger.log(TRACE, f"{dt.__class__}: {dt.ratio}")
|
| 627 |
+
|
| 628 |
+
return round(mean_mess_ratio, 3)
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f24deef17dd5a807c4db1d6ea407a25bc377193ef60aca8956d1d422a1705d91
|
| 3 |
+
size 276808
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/models.py
ADDED
|
@@ -0,0 +1,359 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from encodings.aliases import aliases
|
| 2 |
+
from hashlib import sha256
|
| 3 |
+
from json import dumps
|
| 4 |
+
from re import sub
|
| 5 |
+
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
|
| 6 |
+
|
| 7 |
+
from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE
|
| 8 |
+
from .utils import iana_name, is_multi_byte_encoding, unicode_range
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class CharsetMatch:
|
| 12 |
+
def __init__(
|
| 13 |
+
self,
|
| 14 |
+
payload: bytes,
|
| 15 |
+
guessed_encoding: str,
|
| 16 |
+
mean_mess_ratio: float,
|
| 17 |
+
has_sig_or_bom: bool,
|
| 18 |
+
languages: "CoherenceMatches",
|
| 19 |
+
decoded_payload: Optional[str] = None,
|
| 20 |
+
preemptive_declaration: Optional[str] = None,
|
| 21 |
+
):
|
| 22 |
+
self._payload: bytes = payload
|
| 23 |
+
|
| 24 |
+
self._encoding: str = guessed_encoding
|
| 25 |
+
self._mean_mess_ratio: float = mean_mess_ratio
|
| 26 |
+
self._languages: CoherenceMatches = languages
|
| 27 |
+
self._has_sig_or_bom: bool = has_sig_or_bom
|
| 28 |
+
self._unicode_ranges: Optional[List[str]] = None
|
| 29 |
+
|
| 30 |
+
self._leaves: List[CharsetMatch] = []
|
| 31 |
+
self._mean_coherence_ratio: float = 0.0
|
| 32 |
+
|
| 33 |
+
self._output_payload: Optional[bytes] = None
|
| 34 |
+
self._output_encoding: Optional[str] = None
|
| 35 |
+
|
| 36 |
+
self._string: Optional[str] = decoded_payload
|
| 37 |
+
|
| 38 |
+
self._preemptive_declaration: Optional[str] = preemptive_declaration
|
| 39 |
+
|
| 40 |
+
def __eq__(self, other: object) -> bool:
|
| 41 |
+
if not isinstance(other, CharsetMatch):
|
| 42 |
+
if isinstance(other, str):
|
| 43 |
+
return iana_name(other) == self.encoding
|
| 44 |
+
return False
|
| 45 |
+
return self.encoding == other.encoding and self.fingerprint == other.fingerprint
|
| 46 |
+
|
| 47 |
+
def __lt__(self, other: object) -> bool:
|
| 48 |
+
"""
|
| 49 |
+
Implemented to make sorted available upon CharsetMatches items.
|
| 50 |
+
"""
|
| 51 |
+
if not isinstance(other, CharsetMatch):
|
| 52 |
+
raise ValueError
|
| 53 |
+
|
| 54 |
+
chaos_difference: float = abs(self.chaos - other.chaos)
|
| 55 |
+
coherence_difference: float = abs(self.coherence - other.coherence)
|
| 56 |
+
|
| 57 |
+
# Below 1% difference --> Use Coherence
|
| 58 |
+
if chaos_difference < 0.01 and coherence_difference > 0.02:
|
| 59 |
+
return self.coherence > other.coherence
|
| 60 |
+
elif chaos_difference < 0.01 and coherence_difference <= 0.02:
|
| 61 |
+
# When having a difficult decision, use the result that decoded as many multi-byte as possible.
|
| 62 |
+
# preserve RAM usage!
|
| 63 |
+
if len(self._payload) >= TOO_BIG_SEQUENCE:
|
| 64 |
+
return self.chaos < other.chaos
|
| 65 |
+
return self.multi_byte_usage > other.multi_byte_usage
|
| 66 |
+
|
| 67 |
+
return self.chaos < other.chaos
|
| 68 |
+
|
| 69 |
+
@property
|
| 70 |
+
def multi_byte_usage(self) -> float:
|
| 71 |
+
return 1.0 - (len(str(self)) / len(self.raw))
|
| 72 |
+
|
| 73 |
+
def __str__(self) -> str:
|
| 74 |
+
# Lazy Str Loading
|
| 75 |
+
if self._string is None:
|
| 76 |
+
self._string = str(self._payload, self._encoding, "strict")
|
| 77 |
+
return self._string
|
| 78 |
+
|
| 79 |
+
def __repr__(self) -> str:
|
| 80 |
+
return "<CharsetMatch '{}' bytes({})>".format(self.encoding, self.fingerprint)
|
| 81 |
+
|
| 82 |
+
def add_submatch(self, other: "CharsetMatch") -> None:
|
| 83 |
+
if not isinstance(other, CharsetMatch) or other == self:
|
| 84 |
+
raise ValueError(
|
| 85 |
+
"Unable to add instance <{}> as a submatch of a CharsetMatch".format(
|
| 86 |
+
other.__class__
|
| 87 |
+
)
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
other._string = None # Unload RAM usage; dirty trick.
|
| 91 |
+
self._leaves.append(other)
|
| 92 |
+
|
| 93 |
+
@property
|
| 94 |
+
def encoding(self) -> str:
|
| 95 |
+
return self._encoding
|
| 96 |
+
|
| 97 |
+
@property
|
| 98 |
+
def encoding_aliases(self) -> List[str]:
|
| 99 |
+
"""
|
| 100 |
+
Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855.
|
| 101 |
+
"""
|
| 102 |
+
also_known_as: List[str] = []
|
| 103 |
+
for u, p in aliases.items():
|
| 104 |
+
if self.encoding == u:
|
| 105 |
+
also_known_as.append(p)
|
| 106 |
+
elif self.encoding == p:
|
| 107 |
+
also_known_as.append(u)
|
| 108 |
+
return also_known_as
|
| 109 |
+
|
| 110 |
+
@property
|
| 111 |
+
def bom(self) -> bool:
|
| 112 |
+
return self._has_sig_or_bom
|
| 113 |
+
|
| 114 |
+
@property
|
| 115 |
+
def byte_order_mark(self) -> bool:
|
| 116 |
+
return self._has_sig_or_bom
|
| 117 |
+
|
| 118 |
+
@property
|
| 119 |
+
def languages(self) -> List[str]:
|
| 120 |
+
"""
|
| 121 |
+
Return the complete list of possible languages found in decoded sequence.
|
| 122 |
+
Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'.
|
| 123 |
+
"""
|
| 124 |
+
return [e[0] for e in self._languages]
|
| 125 |
+
|
| 126 |
+
@property
|
| 127 |
+
def language(self) -> str:
|
| 128 |
+
"""
|
| 129 |
+
Most probable language found in decoded sequence. If none were detected or inferred, the property will return
|
| 130 |
+
"Unknown".
|
| 131 |
+
"""
|
| 132 |
+
if not self._languages:
|
| 133 |
+
# Trying to infer the language based on the given encoding
|
| 134 |
+
# Its either English or we should not pronounce ourselves in certain cases.
|
| 135 |
+
if "ascii" in self.could_be_from_charset:
|
| 136 |
+
return "English"
|
| 137 |
+
|
| 138 |
+
# doing it there to avoid circular import
|
| 139 |
+
from charset_normalizer.cd import encoding_languages, mb_encoding_languages
|
| 140 |
+
|
| 141 |
+
languages = (
|
| 142 |
+
mb_encoding_languages(self.encoding)
|
| 143 |
+
if is_multi_byte_encoding(self.encoding)
|
| 144 |
+
else encoding_languages(self.encoding)
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
if len(languages) == 0 or "Latin Based" in languages:
|
| 148 |
+
return "Unknown"
|
| 149 |
+
|
| 150 |
+
return languages[0]
|
| 151 |
+
|
| 152 |
+
return self._languages[0][0]
|
| 153 |
+
|
| 154 |
+
@property
|
| 155 |
+
def chaos(self) -> float:
|
| 156 |
+
return self._mean_mess_ratio
|
| 157 |
+
|
| 158 |
+
@property
|
| 159 |
+
def coherence(self) -> float:
|
| 160 |
+
if not self._languages:
|
| 161 |
+
return 0.0
|
| 162 |
+
return self._languages[0][1]
|
| 163 |
+
|
| 164 |
+
@property
|
| 165 |
+
def percent_chaos(self) -> float:
|
| 166 |
+
return round(self.chaos * 100, ndigits=3)
|
| 167 |
+
|
| 168 |
+
@property
|
| 169 |
+
def percent_coherence(self) -> float:
|
| 170 |
+
return round(self.coherence * 100, ndigits=3)
|
| 171 |
+
|
| 172 |
+
@property
|
| 173 |
+
def raw(self) -> bytes:
|
| 174 |
+
"""
|
| 175 |
+
Original untouched bytes.
|
| 176 |
+
"""
|
| 177 |
+
return self._payload
|
| 178 |
+
|
| 179 |
+
@property
|
| 180 |
+
def submatch(self) -> List["CharsetMatch"]:
|
| 181 |
+
return self._leaves
|
| 182 |
+
|
| 183 |
+
@property
|
| 184 |
+
def has_submatch(self) -> bool:
|
| 185 |
+
return len(self._leaves) > 0
|
| 186 |
+
|
| 187 |
+
@property
|
| 188 |
+
def alphabets(self) -> List[str]:
|
| 189 |
+
if self._unicode_ranges is not None:
|
| 190 |
+
return self._unicode_ranges
|
| 191 |
+
# list detected ranges
|
| 192 |
+
detected_ranges: List[Optional[str]] = [
|
| 193 |
+
unicode_range(char) for char in str(self)
|
| 194 |
+
]
|
| 195 |
+
# filter and sort
|
| 196 |
+
self._unicode_ranges = sorted(list({r for r in detected_ranges if r}))
|
| 197 |
+
return self._unicode_ranges
|
| 198 |
+
|
| 199 |
+
@property
|
| 200 |
+
def could_be_from_charset(self) -> List[str]:
|
| 201 |
+
"""
|
| 202 |
+
The complete list of encoding that output the exact SAME str result and therefore could be the originating
|
| 203 |
+
encoding.
|
| 204 |
+
This list does include the encoding available in property 'encoding'.
|
| 205 |
+
"""
|
| 206 |
+
return [self._encoding] + [m.encoding for m in self._leaves]
|
| 207 |
+
|
| 208 |
+
def output(self, encoding: str = "utf_8") -> bytes:
|
| 209 |
+
"""
|
| 210 |
+
Method to get re-encoded bytes payload using given target encoding. Default to UTF-8.
|
| 211 |
+
Any errors will be simply ignored by the encoder NOT replaced.
|
| 212 |
+
"""
|
| 213 |
+
if self._output_encoding is None or self._output_encoding != encoding:
|
| 214 |
+
self._output_encoding = encoding
|
| 215 |
+
decoded_string = str(self)
|
| 216 |
+
if (
|
| 217 |
+
self._preemptive_declaration is not None
|
| 218 |
+
and self._preemptive_declaration.lower()
|
| 219 |
+
not in ["utf-8", "utf8", "utf_8"]
|
| 220 |
+
):
|
| 221 |
+
patched_header = sub(
|
| 222 |
+
RE_POSSIBLE_ENCODING_INDICATION,
|
| 223 |
+
lambda m: m.string[m.span()[0] : m.span()[1]].replace(
|
| 224 |
+
m.groups()[0], iana_name(self._output_encoding) # type: ignore[arg-type]
|
| 225 |
+
),
|
| 226 |
+
decoded_string[:8192],
|
| 227 |
+
1,
|
| 228 |
+
)
|
| 229 |
+
|
| 230 |
+
decoded_string = patched_header + decoded_string[8192:]
|
| 231 |
+
|
| 232 |
+
self._output_payload = decoded_string.encode(encoding, "replace")
|
| 233 |
+
|
| 234 |
+
return self._output_payload # type: ignore
|
| 235 |
+
|
| 236 |
+
@property
|
| 237 |
+
def fingerprint(self) -> str:
|
| 238 |
+
"""
|
| 239 |
+
Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one.
|
| 240 |
+
"""
|
| 241 |
+
return sha256(self.output()).hexdigest()
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
class CharsetMatches:
|
| 245 |
+
"""
|
| 246 |
+
Container with every CharsetMatch items ordered by default from most probable to the less one.
|
| 247 |
+
Act like a list(iterable) but does not implements all related methods.
|
| 248 |
+
"""
|
| 249 |
+
|
| 250 |
+
def __init__(self, results: Optional[List[CharsetMatch]] = None):
|
| 251 |
+
self._results: List[CharsetMatch] = sorted(results) if results else []
|
| 252 |
+
|
| 253 |
+
def __iter__(self) -> Iterator[CharsetMatch]:
|
| 254 |
+
yield from self._results
|
| 255 |
+
|
| 256 |
+
def __getitem__(self, item: Union[int, str]) -> CharsetMatch:
|
| 257 |
+
"""
|
| 258 |
+
Retrieve a single item either by its position or encoding name (alias may be used here).
|
| 259 |
+
Raise KeyError upon invalid index or encoding not present in results.
|
| 260 |
+
"""
|
| 261 |
+
if isinstance(item, int):
|
| 262 |
+
return self._results[item]
|
| 263 |
+
if isinstance(item, str):
|
| 264 |
+
item = iana_name(item, False)
|
| 265 |
+
for result in self._results:
|
| 266 |
+
if item in result.could_be_from_charset:
|
| 267 |
+
return result
|
| 268 |
+
raise KeyError
|
| 269 |
+
|
| 270 |
+
def __len__(self) -> int:
|
| 271 |
+
return len(self._results)
|
| 272 |
+
|
| 273 |
+
def __bool__(self) -> bool:
|
| 274 |
+
return len(self._results) > 0
|
| 275 |
+
|
| 276 |
+
def append(self, item: CharsetMatch) -> None:
|
| 277 |
+
"""
|
| 278 |
+
Insert a single match. Will be inserted accordingly to preserve sort.
|
| 279 |
+
Can be inserted as a submatch.
|
| 280 |
+
"""
|
| 281 |
+
if not isinstance(item, CharsetMatch):
|
| 282 |
+
raise ValueError(
|
| 283 |
+
"Cannot append instance '{}' to CharsetMatches".format(
|
| 284 |
+
str(item.__class__)
|
| 285 |
+
)
|
| 286 |
+
)
|
| 287 |
+
# We should disable the submatch factoring when the input file is too heavy (conserve RAM usage)
|
| 288 |
+
if len(item.raw) < TOO_BIG_SEQUENCE:
|
| 289 |
+
for match in self._results:
|
| 290 |
+
if match.fingerprint == item.fingerprint and match.chaos == item.chaos:
|
| 291 |
+
match.add_submatch(item)
|
| 292 |
+
return
|
| 293 |
+
self._results.append(item)
|
| 294 |
+
self._results = sorted(self._results)
|
| 295 |
+
|
| 296 |
+
def best(self) -> Optional["CharsetMatch"]:
|
| 297 |
+
"""
|
| 298 |
+
Simply return the first match. Strict equivalent to matches[0].
|
| 299 |
+
"""
|
| 300 |
+
if not self._results:
|
| 301 |
+
return None
|
| 302 |
+
return self._results[0]
|
| 303 |
+
|
| 304 |
+
def first(self) -> Optional["CharsetMatch"]:
|
| 305 |
+
"""
|
| 306 |
+
Redundant method, call the method best(). Kept for BC reasons.
|
| 307 |
+
"""
|
| 308 |
+
return self.best()
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
CoherenceMatch = Tuple[str, float]
|
| 312 |
+
CoherenceMatches = List[CoherenceMatch]
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
class CliDetectionResult:
|
| 316 |
+
def __init__(
|
| 317 |
+
self,
|
| 318 |
+
path: str,
|
| 319 |
+
encoding: Optional[str],
|
| 320 |
+
encoding_aliases: List[str],
|
| 321 |
+
alternative_encodings: List[str],
|
| 322 |
+
language: str,
|
| 323 |
+
alphabets: List[str],
|
| 324 |
+
has_sig_or_bom: bool,
|
| 325 |
+
chaos: float,
|
| 326 |
+
coherence: float,
|
| 327 |
+
unicode_path: Optional[str],
|
| 328 |
+
is_preferred: bool,
|
| 329 |
+
):
|
| 330 |
+
self.path: str = path
|
| 331 |
+
self.unicode_path: Optional[str] = unicode_path
|
| 332 |
+
self.encoding: Optional[str] = encoding
|
| 333 |
+
self.encoding_aliases: List[str] = encoding_aliases
|
| 334 |
+
self.alternative_encodings: List[str] = alternative_encodings
|
| 335 |
+
self.language: str = language
|
| 336 |
+
self.alphabets: List[str] = alphabets
|
| 337 |
+
self.has_sig_or_bom: bool = has_sig_or_bom
|
| 338 |
+
self.chaos: float = chaos
|
| 339 |
+
self.coherence: float = coherence
|
| 340 |
+
self.is_preferred: bool = is_preferred
|
| 341 |
+
|
| 342 |
+
@property
|
| 343 |
+
def __dict__(self) -> Dict[str, Any]: # type: ignore
|
| 344 |
+
return {
|
| 345 |
+
"path": self.path,
|
| 346 |
+
"encoding": self.encoding,
|
| 347 |
+
"encoding_aliases": self.encoding_aliases,
|
| 348 |
+
"alternative_encodings": self.alternative_encodings,
|
| 349 |
+
"language": self.language,
|
| 350 |
+
"alphabets": self.alphabets,
|
| 351 |
+
"has_sig_or_bom": self.has_sig_or_bom,
|
| 352 |
+
"chaos": self.chaos,
|
| 353 |
+
"coherence": self.coherence,
|
| 354 |
+
"unicode_path": self.unicode_path,
|
| 355 |
+
"is_preferred": self.is_preferred,
|
| 356 |
+
}
|
| 357 |
+
|
| 358 |
+
def to_json(self) -> str:
|
| 359 |
+
return dumps(self.__dict__, ensure_ascii=True, indent=4)
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/py.typed
ADDED
|
File without changes
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/utils.py
ADDED
|
@@ -0,0 +1,421 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib
|
| 2 |
+
import logging
|
| 3 |
+
import unicodedata
|
| 4 |
+
from codecs import IncrementalDecoder
|
| 5 |
+
from encodings.aliases import aliases
|
| 6 |
+
from functools import lru_cache
|
| 7 |
+
from re import findall
|
| 8 |
+
from typing import Generator, List, Optional, Set, Tuple, Union
|
| 9 |
+
|
| 10 |
+
from _multibytecodec import MultibyteIncrementalDecoder
|
| 11 |
+
|
| 12 |
+
from .constant import (
|
| 13 |
+
ENCODING_MARKS,
|
| 14 |
+
IANA_SUPPORTED_SIMILAR,
|
| 15 |
+
RE_POSSIBLE_ENCODING_INDICATION,
|
| 16 |
+
UNICODE_RANGES_COMBINED,
|
| 17 |
+
UNICODE_SECONDARY_RANGE_KEYWORD,
|
| 18 |
+
UTF8_MAXIMAL_ALLOCATION,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 23 |
+
def is_accentuated(character: str) -> bool:
|
| 24 |
+
try:
|
| 25 |
+
description: str = unicodedata.name(character)
|
| 26 |
+
except ValueError:
|
| 27 |
+
return False
|
| 28 |
+
return (
|
| 29 |
+
"WITH GRAVE" in description
|
| 30 |
+
or "WITH ACUTE" in description
|
| 31 |
+
or "WITH CEDILLA" in description
|
| 32 |
+
or "WITH DIAERESIS" in description
|
| 33 |
+
or "WITH CIRCUMFLEX" in description
|
| 34 |
+
or "WITH TILDE" in description
|
| 35 |
+
or "WITH MACRON" in description
|
| 36 |
+
or "WITH RING ABOVE" in description
|
| 37 |
+
)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 41 |
+
def remove_accent(character: str) -> str:
|
| 42 |
+
decomposed: str = unicodedata.decomposition(character)
|
| 43 |
+
if not decomposed:
|
| 44 |
+
return character
|
| 45 |
+
|
| 46 |
+
codes: List[str] = decomposed.split(" ")
|
| 47 |
+
|
| 48 |
+
return chr(int(codes[0], 16))
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 52 |
+
def unicode_range(character: str) -> Optional[str]:
|
| 53 |
+
"""
|
| 54 |
+
Retrieve the Unicode range official name from a single character.
|
| 55 |
+
"""
|
| 56 |
+
character_ord: int = ord(character)
|
| 57 |
+
|
| 58 |
+
for range_name, ord_range in UNICODE_RANGES_COMBINED.items():
|
| 59 |
+
if character_ord in ord_range:
|
| 60 |
+
return range_name
|
| 61 |
+
|
| 62 |
+
return None
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 66 |
+
def is_latin(character: str) -> bool:
|
| 67 |
+
try:
|
| 68 |
+
description: str = unicodedata.name(character)
|
| 69 |
+
except ValueError:
|
| 70 |
+
return False
|
| 71 |
+
return "LATIN" in description
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 75 |
+
def is_punctuation(character: str) -> bool:
|
| 76 |
+
character_category: str = unicodedata.category(character)
|
| 77 |
+
|
| 78 |
+
if "P" in character_category:
|
| 79 |
+
return True
|
| 80 |
+
|
| 81 |
+
character_range: Optional[str] = unicode_range(character)
|
| 82 |
+
|
| 83 |
+
if character_range is None:
|
| 84 |
+
return False
|
| 85 |
+
|
| 86 |
+
return "Punctuation" in character_range
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 90 |
+
def is_symbol(character: str) -> bool:
|
| 91 |
+
character_category: str = unicodedata.category(character)
|
| 92 |
+
|
| 93 |
+
if "S" in character_category or "N" in character_category:
|
| 94 |
+
return True
|
| 95 |
+
|
| 96 |
+
character_range: Optional[str] = unicode_range(character)
|
| 97 |
+
|
| 98 |
+
if character_range is None:
|
| 99 |
+
return False
|
| 100 |
+
|
| 101 |
+
return "Forms" in character_range and character_category != "Lo"
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 105 |
+
def is_emoticon(character: str) -> bool:
|
| 106 |
+
character_range: Optional[str] = unicode_range(character)
|
| 107 |
+
|
| 108 |
+
if character_range is None:
|
| 109 |
+
return False
|
| 110 |
+
|
| 111 |
+
return "Emoticons" in character_range or "Pictographs" in character_range
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 115 |
+
def is_separator(character: str) -> bool:
|
| 116 |
+
if character.isspace() or character in {"|", "+", "<", ">"}:
|
| 117 |
+
return True
|
| 118 |
+
|
| 119 |
+
character_category: str = unicodedata.category(character)
|
| 120 |
+
|
| 121 |
+
return "Z" in character_category or character_category in {"Po", "Pd", "Pc"}
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 125 |
+
def is_case_variable(character: str) -> bool:
|
| 126 |
+
return character.islower() != character.isupper()
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 130 |
+
def is_cjk(character: str) -> bool:
|
| 131 |
+
try:
|
| 132 |
+
character_name = unicodedata.name(character)
|
| 133 |
+
except ValueError:
|
| 134 |
+
return False
|
| 135 |
+
|
| 136 |
+
return "CJK" in character_name
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 140 |
+
def is_hiragana(character: str) -> bool:
|
| 141 |
+
try:
|
| 142 |
+
character_name = unicodedata.name(character)
|
| 143 |
+
except ValueError:
|
| 144 |
+
return False
|
| 145 |
+
|
| 146 |
+
return "HIRAGANA" in character_name
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 150 |
+
def is_katakana(character: str) -> bool:
|
| 151 |
+
try:
|
| 152 |
+
character_name = unicodedata.name(character)
|
| 153 |
+
except ValueError:
|
| 154 |
+
return False
|
| 155 |
+
|
| 156 |
+
return "KATAKANA" in character_name
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 160 |
+
def is_hangul(character: str) -> bool:
|
| 161 |
+
try:
|
| 162 |
+
character_name = unicodedata.name(character)
|
| 163 |
+
except ValueError:
|
| 164 |
+
return False
|
| 165 |
+
|
| 166 |
+
return "HANGUL" in character_name
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 170 |
+
def is_thai(character: str) -> bool:
|
| 171 |
+
try:
|
| 172 |
+
character_name = unicodedata.name(character)
|
| 173 |
+
except ValueError:
|
| 174 |
+
return False
|
| 175 |
+
|
| 176 |
+
return "THAI" in character_name
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 180 |
+
def is_arabic(character: str) -> bool:
|
| 181 |
+
try:
|
| 182 |
+
character_name = unicodedata.name(character)
|
| 183 |
+
except ValueError:
|
| 184 |
+
return False
|
| 185 |
+
|
| 186 |
+
return "ARABIC" in character_name
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 190 |
+
def is_arabic_isolated_form(character: str) -> bool:
|
| 191 |
+
try:
|
| 192 |
+
character_name = unicodedata.name(character)
|
| 193 |
+
except ValueError:
|
| 194 |
+
return False
|
| 195 |
+
|
| 196 |
+
return "ARABIC" in character_name and "ISOLATED FORM" in character_name
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED))
|
| 200 |
+
def is_unicode_range_secondary(range_name: str) -> bool:
|
| 201 |
+
return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
| 205 |
+
def is_unprintable(character: str) -> bool:
|
| 206 |
+
return (
|
| 207 |
+
character.isspace() is False # includes \n \t \r \v
|
| 208 |
+
and character.isprintable() is False
|
| 209 |
+
and character != "\x1A" # Why? Its the ASCII substitute character.
|
| 210 |
+
and character != "\ufeff" # bug discovered in Python,
|
| 211 |
+
# Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space.
|
| 212 |
+
)
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> Optional[str]:
|
| 216 |
+
"""
|
| 217 |
+
Extract using ASCII-only decoder any specified encoding in the first n-bytes.
|
| 218 |
+
"""
|
| 219 |
+
if not isinstance(sequence, bytes):
|
| 220 |
+
raise TypeError
|
| 221 |
+
|
| 222 |
+
seq_len: int = len(sequence)
|
| 223 |
+
|
| 224 |
+
results: List[str] = findall(
|
| 225 |
+
RE_POSSIBLE_ENCODING_INDICATION,
|
| 226 |
+
sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"),
|
| 227 |
+
)
|
| 228 |
+
|
| 229 |
+
if len(results) == 0:
|
| 230 |
+
return None
|
| 231 |
+
|
| 232 |
+
for specified_encoding in results:
|
| 233 |
+
specified_encoding = specified_encoding.lower().replace("-", "_")
|
| 234 |
+
|
| 235 |
+
encoding_alias: str
|
| 236 |
+
encoding_iana: str
|
| 237 |
+
|
| 238 |
+
for encoding_alias, encoding_iana in aliases.items():
|
| 239 |
+
if encoding_alias == specified_encoding:
|
| 240 |
+
return encoding_iana
|
| 241 |
+
if encoding_iana == specified_encoding:
|
| 242 |
+
return encoding_iana
|
| 243 |
+
|
| 244 |
+
return None
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
@lru_cache(maxsize=128)
|
| 248 |
+
def is_multi_byte_encoding(name: str) -> bool:
|
| 249 |
+
"""
|
| 250 |
+
Verify is a specific encoding is a multi byte one based on it IANA name
|
| 251 |
+
"""
|
| 252 |
+
return name in {
|
| 253 |
+
"utf_8",
|
| 254 |
+
"utf_8_sig",
|
| 255 |
+
"utf_16",
|
| 256 |
+
"utf_16_be",
|
| 257 |
+
"utf_16_le",
|
| 258 |
+
"utf_32",
|
| 259 |
+
"utf_32_le",
|
| 260 |
+
"utf_32_be",
|
| 261 |
+
"utf_7",
|
| 262 |
+
} or issubclass(
|
| 263 |
+
importlib.import_module("encodings.{}".format(name)).IncrementalDecoder,
|
| 264 |
+
MultibyteIncrementalDecoder,
|
| 265 |
+
)
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]:
|
| 269 |
+
"""
|
| 270 |
+
Identify and extract SIG/BOM in given sequence.
|
| 271 |
+
"""
|
| 272 |
+
|
| 273 |
+
for iana_encoding in ENCODING_MARKS:
|
| 274 |
+
marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding]
|
| 275 |
+
|
| 276 |
+
if isinstance(marks, bytes):
|
| 277 |
+
marks = [marks]
|
| 278 |
+
|
| 279 |
+
for mark in marks:
|
| 280 |
+
if sequence.startswith(mark):
|
| 281 |
+
return iana_encoding, mark
|
| 282 |
+
|
| 283 |
+
return None, b""
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
def should_strip_sig_or_bom(iana_encoding: str) -> bool:
|
| 287 |
+
return iana_encoding not in {"utf_16", "utf_32"}
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
def iana_name(cp_name: str, strict: bool = True) -> str:
|
| 291 |
+
cp_name = cp_name.lower().replace("-", "_")
|
| 292 |
+
|
| 293 |
+
encoding_alias: str
|
| 294 |
+
encoding_iana: str
|
| 295 |
+
|
| 296 |
+
for encoding_alias, encoding_iana in aliases.items():
|
| 297 |
+
if cp_name in [encoding_alias, encoding_iana]:
|
| 298 |
+
return encoding_iana
|
| 299 |
+
|
| 300 |
+
if strict:
|
| 301 |
+
raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name))
|
| 302 |
+
|
| 303 |
+
return cp_name
|
| 304 |
+
|
| 305 |
+
|
| 306 |
+
def range_scan(decoded_sequence: str) -> List[str]:
|
| 307 |
+
ranges: Set[str] = set()
|
| 308 |
+
|
| 309 |
+
for character in decoded_sequence:
|
| 310 |
+
character_range: Optional[str] = unicode_range(character)
|
| 311 |
+
|
| 312 |
+
if character_range is None:
|
| 313 |
+
continue
|
| 314 |
+
|
| 315 |
+
ranges.add(character_range)
|
| 316 |
+
|
| 317 |
+
return list(ranges)
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def cp_similarity(iana_name_a: str, iana_name_b: str) -> float:
|
| 321 |
+
if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b):
|
| 322 |
+
return 0.0
|
| 323 |
+
|
| 324 |
+
decoder_a = importlib.import_module(
|
| 325 |
+
"encodings.{}".format(iana_name_a)
|
| 326 |
+
).IncrementalDecoder
|
| 327 |
+
decoder_b = importlib.import_module(
|
| 328 |
+
"encodings.{}".format(iana_name_b)
|
| 329 |
+
).IncrementalDecoder
|
| 330 |
+
|
| 331 |
+
id_a: IncrementalDecoder = decoder_a(errors="ignore")
|
| 332 |
+
id_b: IncrementalDecoder = decoder_b(errors="ignore")
|
| 333 |
+
|
| 334 |
+
character_match_count: int = 0
|
| 335 |
+
|
| 336 |
+
for i in range(255):
|
| 337 |
+
to_be_decoded: bytes = bytes([i])
|
| 338 |
+
if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded):
|
| 339 |
+
character_match_count += 1
|
| 340 |
+
|
| 341 |
+
return character_match_count / 254
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool:
|
| 345 |
+
"""
|
| 346 |
+
Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using
|
| 347 |
+
the function cp_similarity.
|
| 348 |
+
"""
|
| 349 |
+
return (
|
| 350 |
+
iana_name_a in IANA_SUPPORTED_SIMILAR
|
| 351 |
+
and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a]
|
| 352 |
+
)
|
| 353 |
+
|
| 354 |
+
|
| 355 |
+
def set_logging_handler(
|
| 356 |
+
name: str = "charset_normalizer",
|
| 357 |
+
level: int = logging.INFO,
|
| 358 |
+
format_string: str = "%(asctime)s | %(levelname)s | %(message)s",
|
| 359 |
+
) -> None:
|
| 360 |
+
logger = logging.getLogger(name)
|
| 361 |
+
logger.setLevel(level)
|
| 362 |
+
|
| 363 |
+
handler = logging.StreamHandler()
|
| 364 |
+
handler.setFormatter(logging.Formatter(format_string))
|
| 365 |
+
logger.addHandler(handler)
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
def cut_sequence_chunks(
|
| 369 |
+
sequences: bytes,
|
| 370 |
+
encoding_iana: str,
|
| 371 |
+
offsets: range,
|
| 372 |
+
chunk_size: int,
|
| 373 |
+
bom_or_sig_available: bool,
|
| 374 |
+
strip_sig_or_bom: bool,
|
| 375 |
+
sig_payload: bytes,
|
| 376 |
+
is_multi_byte_decoder: bool,
|
| 377 |
+
decoded_payload: Optional[str] = None,
|
| 378 |
+
) -> Generator[str, None, None]:
|
| 379 |
+
if decoded_payload and is_multi_byte_decoder is False:
|
| 380 |
+
for i in offsets:
|
| 381 |
+
chunk = decoded_payload[i : i + chunk_size]
|
| 382 |
+
if not chunk:
|
| 383 |
+
break
|
| 384 |
+
yield chunk
|
| 385 |
+
else:
|
| 386 |
+
for i in offsets:
|
| 387 |
+
chunk_end = i + chunk_size
|
| 388 |
+
if chunk_end > len(sequences) + 8:
|
| 389 |
+
continue
|
| 390 |
+
|
| 391 |
+
cut_sequence = sequences[i : i + chunk_size]
|
| 392 |
+
|
| 393 |
+
if bom_or_sig_available and strip_sig_or_bom is False:
|
| 394 |
+
cut_sequence = sig_payload + cut_sequence
|
| 395 |
+
|
| 396 |
+
chunk = cut_sequence.decode(
|
| 397 |
+
encoding_iana,
|
| 398 |
+
errors="ignore" if is_multi_byte_decoder else "strict",
|
| 399 |
+
)
|
| 400 |
+
|
| 401 |
+
# multi-byte bad cutting detector and adjustment
|
| 402 |
+
# not the cleanest way to perform that fix but clever enough for now.
|
| 403 |
+
if is_multi_byte_decoder and i > 0:
|
| 404 |
+
chunk_partial_size_chk: int = min(chunk_size, 16)
|
| 405 |
+
|
| 406 |
+
if (
|
| 407 |
+
decoded_payload
|
| 408 |
+
and chunk[:chunk_partial_size_chk] not in decoded_payload
|
| 409 |
+
):
|
| 410 |
+
for j in range(i, i - 4, -1):
|
| 411 |
+
cut_sequence = sequences[j:chunk_end]
|
| 412 |
+
|
| 413 |
+
if bom_or_sig_available and strip_sig_or_bom is False:
|
| 414 |
+
cut_sequence = sig_payload + cut_sequence
|
| 415 |
+
|
| 416 |
+
chunk = cut_sequence.decode(encoding_iana, errors="ignore")
|
| 417 |
+
|
| 418 |
+
if chunk[:chunk_partial_size_chk] in decoded_payload:
|
| 419 |
+
break
|
| 420 |
+
|
| 421 |
+
yield chunk
|
minigpt2/lib/python3.10/site-packages/charset_normalizer/version.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Expose version
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
__version__ = "3.4.0"
|
| 6 |
+
VERSION = __version__.split(".")
|
minigpt2/lib/python3.10/site-packages/idna/__pycache__/compat.cpython-310.pyc
ADDED
|
Binary file (725 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/License.txt
ADDED
|
@@ -0,0 +1,1568 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
End User License Agreement
|
| 2 |
+
--------------------------
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
Preface
|
| 6 |
+
-------
|
| 7 |
+
|
| 8 |
+
The Software License Agreement in Chapter 1 and the Supplement
|
| 9 |
+
in Chapter 2 contain license terms and conditions that govern
|
| 10 |
+
the use of NVIDIA software. By accepting this agreement, you
|
| 11 |
+
agree to comply with all the terms and conditions applicable
|
| 12 |
+
to the product(s) included herein.
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
NVIDIA Driver
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
Description
|
| 19 |
+
|
| 20 |
+
This package contains the operating system driver and
|
| 21 |
+
fundamental system software components for NVIDIA GPUs.
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
NVIDIA CUDA Toolkit
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
Description
|
| 28 |
+
|
| 29 |
+
The NVIDIA CUDA Toolkit provides command-line and graphical
|
| 30 |
+
tools for building, debugging and optimizing the performance
|
| 31 |
+
of applications accelerated by NVIDIA GPUs, runtime and math
|
| 32 |
+
libraries, and documentation including programming guides,
|
| 33 |
+
user manuals, and API references.
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
Default Install Location of CUDA Toolkit
|
| 37 |
+
|
| 38 |
+
Windows platform:
|
| 39 |
+
|
| 40 |
+
%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.#
|
| 41 |
+
|
| 42 |
+
Linux platform:
|
| 43 |
+
|
| 44 |
+
/usr/local/cuda-#.#
|
| 45 |
+
|
| 46 |
+
Mac platform:
|
| 47 |
+
|
| 48 |
+
/Developer/NVIDIA/CUDA-#.#
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
NVIDIA CUDA Samples
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
Description
|
| 55 |
+
|
| 56 |
+
This package includes over 100+ CUDA examples that demonstrate
|
| 57 |
+
various CUDA programming principles, and efficient CUDA
|
| 58 |
+
implementation of algorithms in specific application domains.
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
Default Install Location of CUDA Samples
|
| 62 |
+
|
| 63 |
+
Windows platform:
|
| 64 |
+
|
| 65 |
+
%ProgramData%\NVIDIA Corporation\CUDA Samples\v#.#
|
| 66 |
+
|
| 67 |
+
Linux platform:
|
| 68 |
+
|
| 69 |
+
/usr/local/cuda-#.#/samples
|
| 70 |
+
|
| 71 |
+
and
|
| 72 |
+
|
| 73 |
+
$HOME/NVIDIA_CUDA-#.#_Samples
|
| 74 |
+
|
| 75 |
+
Mac platform:
|
| 76 |
+
|
| 77 |
+
/Developer/NVIDIA/CUDA-#.#/samples
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
NVIDIA Nsight Visual Studio Edition (Windows only)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
Description
|
| 84 |
+
|
| 85 |
+
NVIDIA Nsight Development Platform, Visual Studio Edition is a
|
| 86 |
+
development environment integrated into Microsoft Visual
|
| 87 |
+
Studio that provides tools for debugging, profiling, analyzing
|
| 88 |
+
and optimizing your GPU computing and graphics applications.
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
Default Install Location of Nsight Visual Studio Edition
|
| 92 |
+
|
| 93 |
+
Windows platform:
|
| 94 |
+
|
| 95 |
+
%ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.#
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
1. License Agreement for NVIDIA Software Development Kits
|
| 99 |
+
---------------------------------------------------------
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
Release Date: July 26, 2018
|
| 103 |
+
---------------------------
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
Important NoticeRead before downloading, installing,
|
| 107 |
+
copying or using the licensed software:
|
| 108 |
+
-------------------------------------------------------
|
| 109 |
+
|
| 110 |
+
This license agreement, including exhibits attached
|
| 111 |
+
("Agreement”) is a legal agreement between you and NVIDIA
|
| 112 |
+
Corporation ("NVIDIA") and governs your use of a NVIDIA
|
| 113 |
+
software development kit (“SDK”).
|
| 114 |
+
|
| 115 |
+
Each SDK has its own set of software and materials, but here
|
| 116 |
+
is a description of the types of items that may be included in
|
| 117 |
+
a SDK: source code, header files, APIs, data sets and assets
|
| 118 |
+
(examples include images, textures, models, scenes, videos,
|
| 119 |
+
native API input/output files), binary software, sample code,
|
| 120 |
+
libraries, utility programs, programming code and
|
| 121 |
+
documentation.
|
| 122 |
+
|
| 123 |
+
This Agreement can be accepted only by an adult of legal age
|
| 124 |
+
of majority in the country in which the SDK is used.
|
| 125 |
+
|
| 126 |
+
If you are entering into this Agreement on behalf of a company
|
| 127 |
+
or other legal entity, you represent that you have the legal
|
| 128 |
+
authority to bind the entity to this Agreement, in which case
|
| 129 |
+
“you” will mean the entity you represent.
|
| 130 |
+
|
| 131 |
+
If you don’t have the required age or authority to accept
|
| 132 |
+
this Agreement, or if you don’t accept all the terms and
|
| 133 |
+
conditions of this Agreement, do not download, install or use
|
| 134 |
+
the SDK.
|
| 135 |
+
|
| 136 |
+
You agree to use the SDK only for purposes that are permitted
|
| 137 |
+
by (a) this Agreement, and (b) any applicable law, regulation
|
| 138 |
+
or generally accepted practices or guidelines in the relevant
|
| 139 |
+
jurisdictions.
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
1.1. License
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
1.1.1. License Grant
|
| 146 |
+
|
| 147 |
+
Subject to the terms of this Agreement, NVIDIA hereby grants
|
| 148 |
+
you a non-exclusive, non-transferable license, without the
|
| 149 |
+
right to sublicense (except as expressly provided in this
|
| 150 |
+
Agreement) to:
|
| 151 |
+
|
| 152 |
+
1. Install and use the SDK,
|
| 153 |
+
|
| 154 |
+
2. Modify and create derivative works of sample source code
|
| 155 |
+
delivered in the SDK, and
|
| 156 |
+
|
| 157 |
+
3. Distribute those portions of the SDK that are identified
|
| 158 |
+
in this Agreement as distributable, as incorporated in
|
| 159 |
+
object code format into a software application that meets
|
| 160 |
+
the distribution requirements indicated in this Agreement.
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
1.1.2. Distribution Requirements
|
| 164 |
+
|
| 165 |
+
These are the distribution requirements for you to exercise
|
| 166 |
+
the distribution grant:
|
| 167 |
+
|
| 168 |
+
1. Your application must have material additional
|
| 169 |
+
functionality, beyond the included portions of the SDK.
|
| 170 |
+
|
| 171 |
+
2. The distributable portions of the SDK shall only be
|
| 172 |
+
accessed by your application.
|
| 173 |
+
|
| 174 |
+
3. The following notice shall be included in modifications
|
| 175 |
+
and derivative works of sample source code distributed:
|
| 176 |
+
“This software contains source code provided by NVIDIA
|
| 177 |
+
Corporation.”
|
| 178 |
+
|
| 179 |
+
4. Unless a developer tool is identified in this Agreement
|
| 180 |
+
as distributable, it is delivered for your internal use
|
| 181 |
+
only.
|
| 182 |
+
|
| 183 |
+
5. The terms under which you distribute your application
|
| 184 |
+
must be consistent with the terms of this Agreement,
|
| 185 |
+
including (without limitation) terms relating to the
|
| 186 |
+
license grant and license restrictions and protection of
|
| 187 |
+
NVIDIA’s intellectual property rights. Additionally, you
|
| 188 |
+
agree that you will protect the privacy, security and
|
| 189 |
+
legal rights of your application users.
|
| 190 |
+
|
| 191 |
+
6. You agree to notify NVIDIA in writing of any known or
|
| 192 |
+
suspected distribution or use of the SDK not in compliance
|
| 193 |
+
with the requirements of this Agreement, and to enforce
|
| 194 |
+
the terms of your agreements with respect to distributed
|
| 195 |
+
SDK.
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
1.1.3. Authorized Users
|
| 199 |
+
|
| 200 |
+
You may allow employees and contractors of your entity or of
|
| 201 |
+
your subsidiary(ies) to access and use the SDK from your
|
| 202 |
+
secure network to perform work on your behalf.
|
| 203 |
+
|
| 204 |
+
If you are an academic institution you may allow users
|
| 205 |
+
enrolled or employed by the academic institution to access and
|
| 206 |
+
use the SDK from your secure network.
|
| 207 |
+
|
| 208 |
+
You are responsible for the compliance with the terms of this
|
| 209 |
+
Agreement by your authorized users. If you become aware that
|
| 210 |
+
your authorized users didn’t follow the terms of this
|
| 211 |
+
Agreement, you agree to take reasonable steps to resolve the
|
| 212 |
+
non-compliance and prevent new occurrences.
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
1.1.4. Pre-Release SDK
|
| 216 |
+
|
| 217 |
+
The SDK versions identified as alpha, beta, preview or
|
| 218 |
+
otherwise as pre-release, may not be fully functional, may
|
| 219 |
+
contain errors or design flaws, and may have reduced or
|
| 220 |
+
different security, privacy, accessibility, availability, and
|
| 221 |
+
reliability standards relative to commercial versions of
|
| 222 |
+
NVIDIA software and materials. Use of a pre-release SDK may
|
| 223 |
+
result in unexpected results, loss of data, project delays or
|
| 224 |
+
other unpredictable damage or loss.
|
| 225 |
+
|
| 226 |
+
You may use a pre-release SDK at your own risk, understanding
|
| 227 |
+
that pre-release SDKs are not intended for use in production
|
| 228 |
+
or business-critical systems.
|
| 229 |
+
|
| 230 |
+
NVIDIA may choose not to make available a commercial version
|
| 231 |
+
of any pre-release SDK. NVIDIA may also choose to abandon
|
| 232 |
+
development and terminate the availability of a pre-release
|
| 233 |
+
SDK at any time without liability.
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
1.1.5. Updates
|
| 237 |
+
|
| 238 |
+
NVIDIA may, at its option, make available patches, workarounds
|
| 239 |
+
or other updates to this SDK. Unless the updates are provided
|
| 240 |
+
with their separate governing terms, they are deemed part of
|
| 241 |
+
the SDK licensed to you as provided in this Agreement. You
|
| 242 |
+
agree that the form and content of the SDK that NVIDIA
|
| 243 |
+
provides may change without prior notice to you. While NVIDIA
|
| 244 |
+
generally maintains compatibility between versions, NVIDIA may
|
| 245 |
+
in some cases make changes that introduce incompatibilities in
|
| 246 |
+
future versions of the SDK.
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
1.1.6. Third Party Licenses
|
| 250 |
+
|
| 251 |
+
The SDK may come bundled with, or otherwise include or be
|
| 252 |
+
distributed with, third party software licensed by a NVIDIA
|
| 253 |
+
supplier and/or open source software provided under an open
|
| 254 |
+
source license. Use of third party software is subject to the
|
| 255 |
+
third-party license terms, or in the absence of third party
|
| 256 |
+
terms, the terms of this Agreement. Copyright to third party
|
| 257 |
+
software is held by the copyright holders indicated in the
|
| 258 |
+
third-party software or license.
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
1.1.7. Reservation of Rights
|
| 262 |
+
|
| 263 |
+
NVIDIA reserves all rights, title, and interest in and to the
|
| 264 |
+
SDK, not expressly granted to you under this Agreement.
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
1.2. Limitations
|
| 268 |
+
|
| 269 |
+
The following license limitations apply to your use of the
|
| 270 |
+
SDK:
|
| 271 |
+
|
| 272 |
+
1. You may not reverse engineer, decompile or disassemble,
|
| 273 |
+
or remove copyright or other proprietary notices from any
|
| 274 |
+
portion of the SDK or copies of the SDK.
|
| 275 |
+
|
| 276 |
+
2. Except as expressly provided in this Agreement, you may
|
| 277 |
+
not copy, sell, rent, sublicense, transfer, distribute,
|
| 278 |
+
modify, or create derivative works of any portion of the
|
| 279 |
+
SDK. For clarity, you may not distribute or sublicense the
|
| 280 |
+
SDK as a stand-alone product.
|
| 281 |
+
|
| 282 |
+
3. Unless you have an agreement with NVIDIA for this
|
| 283 |
+
purpose, you may not indicate that an application created
|
| 284 |
+
with the SDK is sponsored or endorsed by NVIDIA.
|
| 285 |
+
|
| 286 |
+
4. You may not bypass, disable, or circumvent any
|
| 287 |
+
encryption, security, digital rights management or
|
| 288 |
+
authentication mechanism in the SDK.
|
| 289 |
+
|
| 290 |
+
5. You may not use the SDK in any manner that would cause it
|
| 291 |
+
to become subject to an open source software license. As
|
| 292 |
+
examples, licenses that require as a condition of use,
|
| 293 |
+
modification, and/or distribution that the SDK be:
|
| 294 |
+
|
| 295 |
+
a. Disclosed or distributed in source code form;
|
| 296 |
+
|
| 297 |
+
b. Licensed for the purpose of making derivative works;
|
| 298 |
+
or
|
| 299 |
+
|
| 300 |
+
c. Redistributable at no charge.
|
| 301 |
+
|
| 302 |
+
6. Unless you have an agreement with NVIDIA for this
|
| 303 |
+
purpose, you may not use the SDK with any system or
|
| 304 |
+
application where the use or failure of the system or
|
| 305 |
+
application can reasonably be expected to threaten or
|
| 306 |
+
result in personal injury, death, or catastrophic loss.
|
| 307 |
+
Examples include use in avionics, navigation, military,
|
| 308 |
+
medical, life support or other life critical applications.
|
| 309 |
+
NVIDIA does not design, test or manufacture the SDK for
|
| 310 |
+
these critical uses and NVIDIA shall not be liable to you
|
| 311 |
+
or any third party, in whole or in part, for any claims or
|
| 312 |
+
damages arising from such uses.
|
| 313 |
+
|
| 314 |
+
7. You agree to defend, indemnify and hold harmless NVIDIA
|
| 315 |
+
and its affiliates, and their respective employees,
|
| 316 |
+
contractors, agents, officers and directors, from and
|
| 317 |
+
against any and all claims, damages, obligations, losses,
|
| 318 |
+
liabilities, costs or debt, fines, restitutions and
|
| 319 |
+
expenses (including but not limited to attorney’s fees
|
| 320 |
+
and costs incident to establishing the right of
|
| 321 |
+
indemnification) arising out of or related to your use of
|
| 322 |
+
the SDK outside of the scope of this Agreement, or not in
|
| 323 |
+
compliance with its terms.
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
1.3. Ownership
|
| 327 |
+
|
| 328 |
+
1. NVIDIA or its licensors hold all rights, title and
|
| 329 |
+
interest in and to the SDK and its modifications and
|
| 330 |
+
derivative works, including their respective intellectual
|
| 331 |
+
property rights, subject to your rights described in this
|
| 332 |
+
section. This SDK may include software and materials from
|
| 333 |
+
NVIDIA’s licensors, and these licensors are intended
|
| 334 |
+
third party beneficiaries that may enforce this Agreement
|
| 335 |
+
with respect to their intellectual property rights.
|
| 336 |
+
|
| 337 |
+
2. You hold all rights, title and interest in and to your
|
| 338 |
+
applications and your derivative works of the sample
|
| 339 |
+
source code delivered in the SDK, including their
|
| 340 |
+
respective intellectual property rights, subject to
|
| 341 |
+
NVIDIA’s rights described in this section.
|
| 342 |
+
|
| 343 |
+
3. You may, but don’t have to, provide to NVIDIA
|
| 344 |
+
suggestions, feature requests or other feedback regarding
|
| 345 |
+
the SDK, including possible enhancements or modifications
|
| 346 |
+
to the SDK. For any feedback that you voluntarily provide,
|
| 347 |
+
you hereby grant NVIDIA and its affiliates a perpetual,
|
| 348 |
+
non-exclusive, worldwide, irrevocable license to use,
|
| 349 |
+
reproduce, modify, license, sublicense (through multiple
|
| 350 |
+
tiers of sublicensees), and distribute (through multiple
|
| 351 |
+
tiers of distributors) it without the payment of any
|
| 352 |
+
royalties or fees to you. NVIDIA will use feedback at its
|
| 353 |
+
choice. NVIDIA is constantly looking for ways to improve
|
| 354 |
+
its products, so you may send feedback to NVIDIA through
|
| 355 |
+
the developer portal at https://developer.nvidia.com.
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
1.4. No Warranties
|
| 359 |
+
|
| 360 |
+
THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL
|
| 361 |
+
FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND
|
| 362 |
+
ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND
|
| 363 |
+
OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING,
|
| 364 |
+
BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS
|
| 365 |
+
FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE
|
| 366 |
+
ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO
|
| 367 |
+
WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF
|
| 368 |
+
DEALING OR COURSE OF TRADE.
|
| 369 |
+
|
| 370 |
+
|
| 371 |
+
1.5. Limitation of Liability
|
| 372 |
+
|
| 373 |
+
TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS
|
| 374 |
+
AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
|
| 375 |
+
PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS
|
| 376 |
+
OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF
|
| 377 |
+
PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION
|
| 378 |
+
WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK,
|
| 379 |
+
WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH
|
| 380 |
+
OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE),
|
| 381 |
+
PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF
|
| 382 |
+
LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES
|
| 383 |
+
TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS
|
| 384 |
+
AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE
|
| 385 |
+
NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS
|
| 386 |
+
LIMIT.
|
| 387 |
+
|
| 388 |
+
These exclusions and limitations of liability shall apply
|
| 389 |
+
regardless if NVIDIA or its affiliates have been advised of
|
| 390 |
+
the possibility of such damages, and regardless of whether a
|
| 391 |
+
remedy fails its essential purpose. These exclusions and
|
| 392 |
+
limitations of liability form an essential basis of the
|
| 393 |
+
bargain between the parties, and, absent any of these
|
| 394 |
+
exclusions or limitations of liability, the provisions of this
|
| 395 |
+
Agreement, including, without limitation, the economic terms,
|
| 396 |
+
would be substantially different.
|
| 397 |
+
|
| 398 |
+
|
| 399 |
+
1.6. Termination
|
| 400 |
+
|
| 401 |
+
1. This Agreement will continue to apply until terminated by
|
| 402 |
+
either you or NVIDIA as described below.
|
| 403 |
+
|
| 404 |
+
2. If you want to terminate this Agreement, you may do so by
|
| 405 |
+
stopping to use the SDK.
|
| 406 |
+
|
| 407 |
+
3. NVIDIA may, at any time, terminate this Agreement if:
|
| 408 |
+
|
| 409 |
+
a. (i) you fail to comply with any term of this
|
| 410 |
+
Agreement and the non-compliance is not fixed within
|
| 411 |
+
thirty (30) days following notice from NVIDIA (or
|
| 412 |
+
immediately if you violate NVIDIA’s intellectual
|
| 413 |
+
property rights);
|
| 414 |
+
|
| 415 |
+
b. (ii) you commence or participate in any legal
|
| 416 |
+
proceeding against NVIDIA with respect to the SDK; or
|
| 417 |
+
|
| 418 |
+
c. (iii) NVIDIA decides to no longer provide the SDK in
|
| 419 |
+
a country or, in NVIDIA’s sole discretion, the
|
| 420 |
+
continued use of it is no longer commercially viable.
|
| 421 |
+
|
| 422 |
+
4. Upon any termination of this Agreement, you agree to
|
| 423 |
+
promptly discontinue use of the SDK and destroy all copies
|
| 424 |
+
in your possession or control. Your prior distributions in
|
| 425 |
+
accordance with this Agreement are not affected by the
|
| 426 |
+
termination of this Agreement. Upon written request, you
|
| 427 |
+
will certify in writing that you have complied with your
|
| 428 |
+
commitments under this section. Upon any termination of
|
| 429 |
+
this Agreement all provisions survive except for the
|
| 430 |
+
license grant provisions.
|
| 431 |
+
|
| 432 |
+
|
| 433 |
+
1.7. General
|
| 434 |
+
|
| 435 |
+
If you wish to assign this Agreement or your rights and
|
| 436 |
+
obligations, including by merger, consolidation, dissolution
|
| 437 |
+
or operation of law, contact NVIDIA to ask for permission. Any
|
| 438 |
+
attempted assignment not approved by NVIDIA in writing shall
|
| 439 |
+
be void and of no effect. NVIDIA may assign, delegate or
|
| 440 |
+
transfer this Agreement and its rights and obligations, and if
|
| 441 |
+
to a non-affiliate you will be notified.
|
| 442 |
+
|
| 443 |
+
You agree to cooperate with NVIDIA and provide reasonably
|
| 444 |
+
requested information to verify your compliance with this
|
| 445 |
+
Agreement.
|
| 446 |
+
|
| 447 |
+
This Agreement will be governed in all respects by the laws of
|
| 448 |
+
the United States and of the State of Delaware as those laws
|
| 449 |
+
are applied to contracts entered into and performed entirely
|
| 450 |
+
within Delaware by Delaware residents, without regard to the
|
| 451 |
+
conflicts of laws principles. The United Nations Convention on
|
| 452 |
+
Contracts for the International Sale of Goods is specifically
|
| 453 |
+
disclaimed. You agree to all terms of this Agreement in the
|
| 454 |
+
English language.
|
| 455 |
+
|
| 456 |
+
The state or federal courts residing in Santa Clara County,
|
| 457 |
+
California shall have exclusive jurisdiction over any dispute
|
| 458 |
+
or claim arising out of this Agreement. Notwithstanding this,
|
| 459 |
+
you agree that NVIDIA shall still be allowed to apply for
|
| 460 |
+
injunctive remedies or an equivalent type of urgent legal
|
| 461 |
+
relief in any jurisdiction.
|
| 462 |
+
|
| 463 |
+
If any court of competent jurisdiction determines that any
|
| 464 |
+
provision of this Agreement is illegal, invalid or
|
| 465 |
+
unenforceable, such provision will be construed as limited to
|
| 466 |
+
the extent necessary to be consistent with and fully
|
| 467 |
+
enforceable under the law and the remaining provisions will
|
| 468 |
+
remain in full force and effect. Unless otherwise specified,
|
| 469 |
+
remedies are cumulative.
|
| 470 |
+
|
| 471 |
+
Each party acknowledges and agrees that the other is an
|
| 472 |
+
independent contractor in the performance of this Agreement.
|
| 473 |
+
|
| 474 |
+
The SDK has been developed entirely at private expense and is
|
| 475 |
+
“commercial items” consisting of “commercial computer
|
| 476 |
+
software” and “commercial computer software
|
| 477 |
+
documentation” provided with RESTRICTED RIGHTS. Use,
|
| 478 |
+
duplication or disclosure by the U.S. Government or a U.S.
|
| 479 |
+
Government subcontractor is subject to the restrictions in
|
| 480 |
+
this Agreement pursuant to DFARS 227.7202-3(a) or as set forth
|
| 481 |
+
in subparagraphs (c)(1) and (2) of the Commercial Computer
|
| 482 |
+
Software - Restricted Rights clause at FAR 52.227-19, as
|
| 483 |
+
applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas
|
| 484 |
+
Expressway, Santa Clara, CA 95051.
|
| 485 |
+
|
| 486 |
+
The SDK is subject to United States export laws and
|
| 487 |
+
regulations. You agree that you will not ship, transfer or
|
| 488 |
+
export the SDK into any country, or use the SDK in any manner,
|
| 489 |
+
prohibited by the United States Bureau of Industry and
|
| 490 |
+
Security or economic sanctions regulations administered by the
|
| 491 |
+
U.S. Department of Treasury’s Office of Foreign Assets
|
| 492 |
+
Control (OFAC), or any applicable export laws, restrictions or
|
| 493 |
+
regulations. These laws include restrictions on destinations,
|
| 494 |
+
end users and end use. By accepting this Agreement, you
|
| 495 |
+
confirm that you are not a resident or citizen of any country
|
| 496 |
+
currently embargoed by the U.S. and that you are not otherwise
|
| 497 |
+
prohibited from receiving the SDK.
|
| 498 |
+
|
| 499 |
+
Any notice delivered by NVIDIA to you under this Agreement
|
| 500 |
+
will be delivered via mail, email or fax. You agree that any
|
| 501 |
+
notices that NVIDIA sends you electronically will satisfy any
|
| 502 |
+
legal communication requirements. Please direct your legal
|
| 503 |
+
notices or other correspondence to NVIDIA Corporation, 2788
|
| 504 |
+
San Tomas Expressway, Santa Clara, California 95051, United
|
| 505 |
+
States of America, Attention: Legal Department.
|
| 506 |
+
|
| 507 |
+
This Agreement and any exhibits incorporated into this
|
| 508 |
+
Agreement constitute the entire agreement of the parties with
|
| 509 |
+
respect to the subject matter of this Agreement and supersede
|
| 510 |
+
all prior negotiations or documentation exchanged between the
|
| 511 |
+
parties relating to this SDK license. Any additional and/or
|
| 512 |
+
conflicting terms on documents issued by you are null, void,
|
| 513 |
+
and invalid. Any amendment or waiver under this Agreement
|
| 514 |
+
shall be in writing and signed by representatives of both
|
| 515 |
+
parties.
|
| 516 |
+
|
| 517 |
+
|
| 518 |
+
2. CUDA Toolkit Supplement to Software License Agreement for
|
| 519 |
+
NVIDIA Software Development Kits
|
| 520 |
+
------------------------------------------------------------
|
| 521 |
+
|
| 522 |
+
|
| 523 |
+
Release date: August 16, 2018
|
| 524 |
+
-----------------------------
|
| 525 |
+
|
| 526 |
+
The terms in this supplement govern your use of the NVIDIA
|
| 527 |
+
CUDA Toolkit SDK under the terms of your license agreement
|
| 528 |
+
(“Agreement”) as modified by this supplement. Capitalized
|
| 529 |
+
terms used but not defined below have the meaning assigned to
|
| 530 |
+
them in the Agreement.
|
| 531 |
+
|
| 532 |
+
This supplement is an exhibit to the Agreement and is
|
| 533 |
+
incorporated as an integral part of the Agreement. In the
|
| 534 |
+
event of conflict between the terms in this supplement and the
|
| 535 |
+
terms in the Agreement, the terms in this supplement govern.
|
| 536 |
+
|
| 537 |
+
|
| 538 |
+
2.1. License Scope
|
| 539 |
+
|
| 540 |
+
The SDK is licensed for you to develop applications only for
|
| 541 |
+
use in systems with NVIDIA GPUs.
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
2.2. Distribution
|
| 545 |
+
|
| 546 |
+
The portions of the SDK that are distributable under the
|
| 547 |
+
Agreement are listed in Attachment A.
|
| 548 |
+
|
| 549 |
+
|
| 550 |
+
2.3. Operating Systems
|
| 551 |
+
|
| 552 |
+
Those portions of the SDK designed exclusively for use on the
|
| 553 |
+
Linux or FreeBSD operating systems, or other operating systems
|
| 554 |
+
derived from the source code to these operating systems, may
|
| 555 |
+
be copied and redistributed for use in accordance with this
|
| 556 |
+
Agreement, provided that the object code files are not
|
| 557 |
+
modified in any way (except for unzipping of compressed
|
| 558 |
+
files).
|
| 559 |
+
|
| 560 |
+
|
| 561 |
+
2.4. Audio and Video Encoders and Decoders
|
| 562 |
+
|
| 563 |
+
You acknowledge and agree that it is your sole responsibility
|
| 564 |
+
to obtain any additional third-party licenses required to
|
| 565 |
+
make, have made, use, have used, sell, import, and offer for
|
| 566 |
+
sale your products or services that include or incorporate any
|
| 567 |
+
third-party software and content relating to audio and/or
|
| 568 |
+
video encoders and decoders from, including but not limited
|
| 569 |
+
to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A.,
|
| 570 |
+
MPEG-LA, and Coding Technologies. NVIDIA does not grant to you
|
| 571 |
+
under this Agreement any necessary patent or other rights with
|
| 572 |
+
respect to any audio and/or video encoders and decoders.
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
2.5. Licensing
|
| 576 |
+
|
| 577 |
+
If the distribution terms in this Agreement are not suitable
|
| 578 |
+
for your organization, or for any questions regarding this
|
| 579 |
+
Agreement, please contact NVIDIA at
|
| 580 |
+
nvidia-compute-license-questions@nvidia.com.
|
| 581 |
+
|
| 582 |
+
|
| 583 |
+
2.6. Attachment A
|
| 584 |
+
|
| 585 |
+
The following portions of the SDK are distributable under the
|
| 586 |
+
Agreement:
|
| 587 |
+
|
| 588 |
+
Component
|
| 589 |
+
|
| 590 |
+
CUDA Runtime
|
| 591 |
+
|
| 592 |
+
Windows
|
| 593 |
+
|
| 594 |
+
cudart.dll, cudart_static.lib, cudadevrt.lib
|
| 595 |
+
|
| 596 |
+
Mac OSX
|
| 597 |
+
|
| 598 |
+
libcudart.dylib, libcudart_static.a, libcudadevrt.a
|
| 599 |
+
|
| 600 |
+
Linux
|
| 601 |
+
|
| 602 |
+
libcudart.so, libcudart_static.a, libcudadevrt.a
|
| 603 |
+
|
| 604 |
+
Android
|
| 605 |
+
|
| 606 |
+
libcudart.so, libcudart_static.a, libcudadevrt.a
|
| 607 |
+
|
| 608 |
+
Component
|
| 609 |
+
|
| 610 |
+
CUDA FFT Library
|
| 611 |
+
|
| 612 |
+
Windows
|
| 613 |
+
|
| 614 |
+
cufft.dll, cufftw.dll, cufft.lib, cufftw.lib
|
| 615 |
+
|
| 616 |
+
Mac OSX
|
| 617 |
+
|
| 618 |
+
libcufft.dylib, libcufft_static.a, libcufftw.dylib,
|
| 619 |
+
libcufftw_static.a
|
| 620 |
+
|
| 621 |
+
Linux
|
| 622 |
+
|
| 623 |
+
libcufft.so, libcufft_static.a, libcufftw.so,
|
| 624 |
+
libcufftw_static.a
|
| 625 |
+
|
| 626 |
+
Android
|
| 627 |
+
|
| 628 |
+
libcufft.so, libcufft_static.a, libcufftw.so,
|
| 629 |
+
libcufftw_static.a
|
| 630 |
+
|
| 631 |
+
Component
|
| 632 |
+
|
| 633 |
+
CUDA BLAS Library
|
| 634 |
+
|
| 635 |
+
Windows
|
| 636 |
+
|
| 637 |
+
cublas.dll, cublasLt.dll
|
| 638 |
+
|
| 639 |
+
Mac OSX
|
| 640 |
+
|
| 641 |
+
libcublas.dylib, libcublasLt.dylib, libcublas_static.a,
|
| 642 |
+
libcublasLt_static.a
|
| 643 |
+
|
| 644 |
+
Linux
|
| 645 |
+
|
| 646 |
+
libcublas.so, libcublasLt.so, libcublas_static.a,
|
| 647 |
+
libcublasLt_static.a
|
| 648 |
+
|
| 649 |
+
Android
|
| 650 |
+
|
| 651 |
+
libcublas.so, libcublasLt.so, libcublas_static.a,
|
| 652 |
+
libcublasLt_static.a
|
| 653 |
+
|
| 654 |
+
Component
|
| 655 |
+
|
| 656 |
+
NVIDIA "Drop-in" BLAS Library
|
| 657 |
+
|
| 658 |
+
Windows
|
| 659 |
+
|
| 660 |
+
nvblas.dll
|
| 661 |
+
|
| 662 |
+
Mac OSX
|
| 663 |
+
|
| 664 |
+
libnvblas.dylib
|
| 665 |
+
|
| 666 |
+
Linux
|
| 667 |
+
|
| 668 |
+
libnvblas.so
|
| 669 |
+
|
| 670 |
+
Component
|
| 671 |
+
|
| 672 |
+
CUDA Sparse Matrix Library
|
| 673 |
+
|
| 674 |
+
Windows
|
| 675 |
+
|
| 676 |
+
cusparse.dll, cusparse.lib
|
| 677 |
+
|
| 678 |
+
Mac OSX
|
| 679 |
+
|
| 680 |
+
libcusparse.dylib, libcusparse_static.a
|
| 681 |
+
|
| 682 |
+
Linux
|
| 683 |
+
|
| 684 |
+
libcusparse.so, libcusparse_static.a
|
| 685 |
+
|
| 686 |
+
Android
|
| 687 |
+
|
| 688 |
+
libcusparse.so, libcusparse_static.a
|
| 689 |
+
|
| 690 |
+
Component
|
| 691 |
+
|
| 692 |
+
CUDA Linear Solver Library
|
| 693 |
+
|
| 694 |
+
Windows
|
| 695 |
+
|
| 696 |
+
cusolver.dll, cusolver.lib
|
| 697 |
+
|
| 698 |
+
Mac OSX
|
| 699 |
+
|
| 700 |
+
libcusolver.dylib, libcusolver_static.a
|
| 701 |
+
|
| 702 |
+
Linux
|
| 703 |
+
|
| 704 |
+
libcusolver.so, libcusolver_static.a
|
| 705 |
+
|
| 706 |
+
Android
|
| 707 |
+
|
| 708 |
+
libcusolver.so, libcusolver_static.a
|
| 709 |
+
|
| 710 |
+
Component
|
| 711 |
+
|
| 712 |
+
CUDA Random Number Generation Library
|
| 713 |
+
|
| 714 |
+
Windows
|
| 715 |
+
|
| 716 |
+
curand.dll, curand.lib
|
| 717 |
+
|
| 718 |
+
Mac OSX
|
| 719 |
+
|
| 720 |
+
libcurand.dylib, libcurand_static.a
|
| 721 |
+
|
| 722 |
+
Linux
|
| 723 |
+
|
| 724 |
+
libcurand.so, libcurand_static.a
|
| 725 |
+
|
| 726 |
+
Android
|
| 727 |
+
|
| 728 |
+
libcurand.so, libcurand_static.a
|
| 729 |
+
|
| 730 |
+
Component
|
| 731 |
+
|
| 732 |
+
CUDA Accelerated Graph Library
|
| 733 |
+
|
| 734 |
+
Component
|
| 735 |
+
|
| 736 |
+
NVIDIA Performance Primitives Library
|
| 737 |
+
|
| 738 |
+
Windows
|
| 739 |
+
|
| 740 |
+
nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll,
|
| 741 |
+
nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll,
|
| 742 |
+
nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib,
|
| 743 |
+
nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll,
|
| 744 |
+
nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib
|
| 745 |
+
|
| 746 |
+
Mac OSX
|
| 747 |
+
|
| 748 |
+
libnppc.dylib, libnppc_static.a, libnppial.dylib,
|
| 749 |
+
libnppial_static.a, libnppicc.dylib, libnppicc_static.a,
|
| 750 |
+
libnppicom.dylib, libnppicom_static.a, libnppidei.dylib,
|
| 751 |
+
libnppidei_static.a, libnppif.dylib, libnppif_static.a,
|
| 752 |
+
libnppig.dylib, libnppig_static.a, libnppim.dylib,
|
| 753 |
+
libnppisu_static.a, libnppitc.dylib, libnppitc_static.a,
|
| 754 |
+
libnpps.dylib, libnpps_static.a
|
| 755 |
+
|
| 756 |
+
Linux
|
| 757 |
+
|
| 758 |
+
libnppc.so, libnppc_static.a, libnppial.so,
|
| 759 |
+
libnppial_static.a, libnppicc.so, libnppicc_static.a,
|
| 760 |
+
libnppicom.so, libnppicom_static.a, libnppidei.so,
|
| 761 |
+
libnppidei_static.a, libnppif.so, libnppif_static.a
|
| 762 |
+
libnppig.so, libnppig_static.a, libnppim.so,
|
| 763 |
+
libnppim_static.a, libnppist.so, libnppist_static.a,
|
| 764 |
+
libnppisu.so, libnppisu_static.a, libnppitc.so
|
| 765 |
+
libnppitc_static.a, libnpps.so, libnpps_static.a
|
| 766 |
+
|
| 767 |
+
Android
|
| 768 |
+
|
| 769 |
+
libnppc.so, libnppc_static.a, libnppial.so,
|
| 770 |
+
libnppial_static.a, libnppicc.so, libnppicc_static.a,
|
| 771 |
+
libnppicom.so, libnppicom_static.a, libnppidei.so,
|
| 772 |
+
libnppidei_static.a, libnppif.so, libnppif_static.a
|
| 773 |
+
libnppig.so, libnppig_static.a, libnppim.so,
|
| 774 |
+
libnppim_static.a, libnppist.so, libnppist_static.a,
|
| 775 |
+
libnppisu.so, libnppisu_static.a, libnppitc.so
|
| 776 |
+
libnppitc_static.a, libnpps.so, libnpps_static.a
|
| 777 |
+
|
| 778 |
+
Component
|
| 779 |
+
|
| 780 |
+
NVIDIA JPEG Library
|
| 781 |
+
|
| 782 |
+
Linux
|
| 783 |
+
|
| 784 |
+
libnvjpeg.so, libnvjpeg_static.a
|
| 785 |
+
|
| 786 |
+
Component
|
| 787 |
+
|
| 788 |
+
Internal common library required for statically linking to
|
| 789 |
+
cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP
|
| 790 |
+
|
| 791 |
+
Mac OSX
|
| 792 |
+
|
| 793 |
+
libculibos.a
|
| 794 |
+
|
| 795 |
+
Linux
|
| 796 |
+
|
| 797 |
+
libculibos.a
|
| 798 |
+
|
| 799 |
+
Component
|
| 800 |
+
|
| 801 |
+
NVIDIA Runtime Compilation Library and Header
|
| 802 |
+
|
| 803 |
+
All
|
| 804 |
+
|
| 805 |
+
nvrtc.h
|
| 806 |
+
|
| 807 |
+
Windows
|
| 808 |
+
|
| 809 |
+
nvrtc.dll, nvrtc-builtins.dll
|
| 810 |
+
|
| 811 |
+
Mac OSX
|
| 812 |
+
|
| 813 |
+
libnvrtc.dylib, libnvrtc-builtins.dylib
|
| 814 |
+
|
| 815 |
+
Linux
|
| 816 |
+
|
| 817 |
+
libnvrtc.so, libnvrtc-builtins.so
|
| 818 |
+
|
| 819 |
+
Component
|
| 820 |
+
|
| 821 |
+
NVIDIA Optimizing Compiler Library
|
| 822 |
+
|
| 823 |
+
Windows
|
| 824 |
+
|
| 825 |
+
nvvm.dll
|
| 826 |
+
|
| 827 |
+
Mac OSX
|
| 828 |
+
|
| 829 |
+
libnvvm.dylib
|
| 830 |
+
|
| 831 |
+
Linux
|
| 832 |
+
|
| 833 |
+
libnvvm.so
|
| 834 |
+
|
| 835 |
+
Component
|
| 836 |
+
|
| 837 |
+
NVIDIA Common Device Math Functions Library
|
| 838 |
+
|
| 839 |
+
Windows
|
| 840 |
+
|
| 841 |
+
libdevice.10.bc
|
| 842 |
+
|
| 843 |
+
Mac OSX
|
| 844 |
+
|
| 845 |
+
libdevice.10.bc
|
| 846 |
+
|
| 847 |
+
Linux
|
| 848 |
+
|
| 849 |
+
libdevice.10.bc
|
| 850 |
+
|
| 851 |
+
Component
|
| 852 |
+
|
| 853 |
+
CUDA Occupancy Calculation Header Library
|
| 854 |
+
|
| 855 |
+
All
|
| 856 |
+
|
| 857 |
+
cuda_occupancy.h
|
| 858 |
+
|
| 859 |
+
Component
|
| 860 |
+
|
| 861 |
+
CUDA Half Precision Headers
|
| 862 |
+
|
| 863 |
+
All
|
| 864 |
+
|
| 865 |
+
cuda_fp16.h, cuda_fp16.hpp
|
| 866 |
+
|
| 867 |
+
Component
|
| 868 |
+
|
| 869 |
+
CUDA Profiling Tools Interface (CUPTI) Library
|
| 870 |
+
|
| 871 |
+
Windows
|
| 872 |
+
|
| 873 |
+
cupti.dll
|
| 874 |
+
|
| 875 |
+
Mac OSX
|
| 876 |
+
|
| 877 |
+
libcupti.dylib
|
| 878 |
+
|
| 879 |
+
Linux
|
| 880 |
+
|
| 881 |
+
libcupti.so
|
| 882 |
+
|
| 883 |
+
Component
|
| 884 |
+
|
| 885 |
+
NVIDIA Tools Extension Library
|
| 886 |
+
|
| 887 |
+
Windows
|
| 888 |
+
|
| 889 |
+
nvToolsExt.dll, nvToolsExt.lib
|
| 890 |
+
|
| 891 |
+
Mac OSX
|
| 892 |
+
|
| 893 |
+
libnvToolsExt.dylib
|
| 894 |
+
|
| 895 |
+
Linux
|
| 896 |
+
|
| 897 |
+
libnvToolsExt.so
|
| 898 |
+
|
| 899 |
+
Component
|
| 900 |
+
|
| 901 |
+
NVIDIA CUDA Driver Libraries
|
| 902 |
+
|
| 903 |
+
Linux
|
| 904 |
+
|
| 905 |
+
libcuda.so, libnvidia-fatbinaryloader.so,
|
| 906 |
+
libnvidia-ptxjitcompiler.so
|
| 907 |
+
|
| 908 |
+
The NVIDIA CUDA Driver Libraries are only distributable in
|
| 909 |
+
applications that meet this criteria:
|
| 910 |
+
|
| 911 |
+
1. The application was developed starting from a NVIDIA CUDA
|
| 912 |
+
container obtained from Docker Hub or the NVIDIA GPU
|
| 913 |
+
Cloud, and
|
| 914 |
+
|
| 915 |
+
2. The resulting application is packaged as a Docker
|
| 916 |
+
container and distributed to users on Docker Hub or the
|
| 917 |
+
NVIDIA GPU Cloud only.
|
| 918 |
+
|
| 919 |
+
|
| 920 |
+
2.7. Attachment B
|
| 921 |
+
|
| 922 |
+
|
| 923 |
+
Additional Licensing Obligations
|
| 924 |
+
|
| 925 |
+
The following third party components included in the SOFTWARE
|
| 926 |
+
are licensed to Licensee pursuant to the following terms and
|
| 927 |
+
conditions:
|
| 928 |
+
|
| 929 |
+
1. Licensee's use of the GDB third party component is
|
| 930 |
+
subject to the terms and conditions of GNU GPL v3:
|
| 931 |
+
|
| 932 |
+
This product includes copyrighted third-party software licensed
|
| 933 |
+
under the terms of the GNU General Public License v3 ("GPL v3").
|
| 934 |
+
All third-party software packages are copyright by their respective
|
| 935 |
+
authors. GPL v3 terms and conditions are hereby incorporated into
|
| 936 |
+
the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt
|
| 937 |
+
|
| 938 |
+
Consistent with these licensing requirements, the software
|
| 939 |
+
listed below is provided under the terms of the specified
|
| 940 |
+
open source software licenses. To obtain source code for
|
| 941 |
+
software provided under licenses that require
|
| 942 |
+
redistribution of source code, including the GNU General
|
| 943 |
+
Public License (GPL) and GNU Lesser General Public License
|
| 944 |
+
(LGPL), contact oss-requests@nvidia.com. This offer is
|
| 945 |
+
valid for a period of three (3) years from the date of the
|
| 946 |
+
distribution of this product by NVIDIA CORPORATION.
|
| 947 |
+
|
| 948 |
+
Component License
|
| 949 |
+
CUDA-GDB GPL v3
|
| 950 |
+
|
| 951 |
+
2. Licensee represents and warrants that any and all third
|
| 952 |
+
party licensing and/or royalty payment obligations in
|
| 953 |
+
connection with Licensee's use of the H.264 video codecs
|
| 954 |
+
are solely the responsibility of Licensee.
|
| 955 |
+
|
| 956 |
+
3. Licensee's use of the Thrust library is subject to the
|
| 957 |
+
terms and conditions of the Apache License Version 2.0.
|
| 958 |
+
All third-party software packages are copyright by their
|
| 959 |
+
respective authors. Apache License Version 2.0 terms and
|
| 960 |
+
conditions are hereby incorporated into the Agreement by
|
| 961 |
+
this reference.
|
| 962 |
+
http://www.apache.org/licenses/LICENSE-2.0.html
|
| 963 |
+
|
| 964 |
+
In addition, Licensee acknowledges the following notice:
|
| 965 |
+
Thrust includes source code from the Boost Iterator,
|
| 966 |
+
Tuple, System, and Random Number libraries.
|
| 967 |
+
|
| 968 |
+
Boost Software License - Version 1.0 - August 17th, 2003
|
| 969 |
+
. . . .
|
| 970 |
+
|
| 971 |
+
Permission is hereby granted, free of charge, to any person or
|
| 972 |
+
organization obtaining a copy of the software and accompanying
|
| 973 |
+
documentation covered by this license (the "Software") to use,
|
| 974 |
+
reproduce, display, distribute, execute, and transmit the Software,
|
| 975 |
+
and to prepare derivative works of the Software, and to permit
|
| 976 |
+
third-parties to whom the Software is furnished to do so, all
|
| 977 |
+
subject to the following:
|
| 978 |
+
|
| 979 |
+
The copyright notices in the Software and this entire statement,
|
| 980 |
+
including the above license grant, this restriction and the following
|
| 981 |
+
disclaimer, must be included in all copies of the Software, in whole
|
| 982 |
+
or in part, and all derivative works of the Software, unless such
|
| 983 |
+
copies or derivative works are solely in the form of machine-executable
|
| 984 |
+
object code generated by a source language processor.
|
| 985 |
+
|
| 986 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 987 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 988 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
|
| 989 |
+
NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
|
| 990 |
+
ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
|
| 991 |
+
OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
|
| 992 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
| 993 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
| 994 |
+
|
| 995 |
+
4. Licensee's use of the LLVM third party component is
|
| 996 |
+
subject to the following terms and conditions:
|
| 997 |
+
|
| 998 |
+
======================================================
|
| 999 |
+
LLVM Release License
|
| 1000 |
+
======================================================
|
| 1001 |
+
University of Illinois/NCSA
|
| 1002 |
+
Open Source License
|
| 1003 |
+
|
| 1004 |
+
Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign.
|
| 1005 |
+
All rights reserved.
|
| 1006 |
+
|
| 1007 |
+
Developed by:
|
| 1008 |
+
|
| 1009 |
+
LLVM Team
|
| 1010 |
+
|
| 1011 |
+
University of Illinois at Urbana-Champaign
|
| 1012 |
+
|
| 1013 |
+
http://llvm.org
|
| 1014 |
+
|
| 1015 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 1016 |
+
of this software and associated documentation files (the "Software"), to
|
| 1017 |
+
deal with the Software without restriction, including without limitation the
|
| 1018 |
+
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
| 1019 |
+
sell copies of the Software, and to permit persons to whom the Software is
|
| 1020 |
+
furnished to do so, subject to the following conditions:
|
| 1021 |
+
|
| 1022 |
+
* Redistributions of source code must retain the above copyright notice,
|
| 1023 |
+
this list of conditions and the following disclaimers.
|
| 1024 |
+
|
| 1025 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 1026 |
+
notice, this list of conditions and the following disclaimers in the
|
| 1027 |
+
documentation and/or other materials provided with the distribution.
|
| 1028 |
+
|
| 1029 |
+
* Neither the names of the LLVM Team, University of Illinois at Urbana-
|
| 1030 |
+
Champaign, nor the names of its contributors may be used to endorse or
|
| 1031 |
+
promote products derived from this Software without specific prior
|
| 1032 |
+
written permission.
|
| 1033 |
+
|
| 1034 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 1035 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 1036 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
| 1037 |
+
THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
| 1038 |
+
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
| 1039 |
+
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
| 1040 |
+
DEALINGS WITH THE SOFTWARE.
|
| 1041 |
+
|
| 1042 |
+
5. Licensee's use (e.g. nvprof) of the PCRE third party
|
| 1043 |
+
component is subject to the following terms and
|
| 1044 |
+
conditions:
|
| 1045 |
+
|
| 1046 |
+
------------
|
| 1047 |
+
PCRE LICENCE
|
| 1048 |
+
------------
|
| 1049 |
+
PCRE is a library of functions to support regular expressions whose syntax
|
| 1050 |
+
and semantics are as close as possible to those of the Perl 5 language.
|
| 1051 |
+
Release 8 of PCRE is distributed under the terms of the "BSD" licence, as
|
| 1052 |
+
specified below. The documentation for PCRE, supplied in the "doc"
|
| 1053 |
+
directory, is distributed under the same terms as the software itself. The
|
| 1054 |
+
basic library functions are written in C and are freestanding. Also
|
| 1055 |
+
included in the distribution is a set of C++ wrapper functions, and a just-
|
| 1056 |
+
in-time compiler that can be used to optimize pattern matching. These are
|
| 1057 |
+
both optional features that can be omitted when the library is built.
|
| 1058 |
+
|
| 1059 |
+
THE BASIC LIBRARY FUNCTIONS
|
| 1060 |
+
---------------------------
|
| 1061 |
+
Written by: Philip Hazel
|
| 1062 |
+
Email local part: ph10
|
| 1063 |
+
Email domain: cam.ac.uk
|
| 1064 |
+
University of Cambridge Computing Service,
|
| 1065 |
+
Cambridge, England.
|
| 1066 |
+
Copyright (c) 1997-2012 University of Cambridge
|
| 1067 |
+
All rights reserved.
|
| 1068 |
+
|
| 1069 |
+
PCRE JUST-IN-TIME COMPILATION SUPPORT
|
| 1070 |
+
-------------------------------------
|
| 1071 |
+
Written by: Zoltan Herczeg
|
| 1072 |
+
Email local part: hzmester
|
| 1073 |
+
Emain domain: freemail.hu
|
| 1074 |
+
Copyright(c) 2010-2012 Zoltan Herczeg
|
| 1075 |
+
All rights reserved.
|
| 1076 |
+
|
| 1077 |
+
STACK-LESS JUST-IN-TIME COMPILER
|
| 1078 |
+
--------------------------------
|
| 1079 |
+
Written by: Zoltan Herczeg
|
| 1080 |
+
Email local part: hzmester
|
| 1081 |
+
Emain domain: freemail.hu
|
| 1082 |
+
Copyright(c) 2009-2012 Zoltan Herczeg
|
| 1083 |
+
All rights reserved.
|
| 1084 |
+
|
| 1085 |
+
THE C++ WRAPPER FUNCTIONS
|
| 1086 |
+
-------------------------
|
| 1087 |
+
Contributed by: Google Inc.
|
| 1088 |
+
Copyright (c) 2007-2012, Google Inc.
|
| 1089 |
+
All rights reserved.
|
| 1090 |
+
|
| 1091 |
+
THE "BSD" LICENCE
|
| 1092 |
+
-----------------
|
| 1093 |
+
Redistribution and use in source and binary forms, with or without
|
| 1094 |
+
modification, are permitted provided that the following conditions are met:
|
| 1095 |
+
|
| 1096 |
+
* Redistributions of source code must retain the above copyright notice,
|
| 1097 |
+
this list of conditions and the following disclaimer.
|
| 1098 |
+
|
| 1099 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 1100 |
+
notice, this list of conditions and the following disclaimer in the
|
| 1101 |
+
documentation and/or other materials provided with the distribution.
|
| 1102 |
+
|
| 1103 |
+
* Neither the name of the University of Cambridge nor the name of Google
|
| 1104 |
+
Inc. nor the names of their contributors may be used to endorse or
|
| 1105 |
+
promote products derived from this software without specific prior
|
| 1106 |
+
written permission.
|
| 1107 |
+
|
| 1108 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
| 1109 |
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
| 1110 |
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
| 1111 |
+
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
| 1112 |
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
| 1113 |
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
| 1114 |
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
| 1115 |
+
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
| 1116 |
+
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
| 1117 |
+
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 1118 |
+
POSSIBILITY OF SUCH DAMAGE.
|
| 1119 |
+
|
| 1120 |
+
6. Some of the cuBLAS library routines were written by or
|
| 1121 |
+
derived from code written by Vasily Volkov and are subject
|
| 1122 |
+
to the Modified Berkeley Software Distribution License as
|
| 1123 |
+
follows:
|
| 1124 |
+
|
| 1125 |
+
Copyright (c) 2007-2009, Regents of the University of California
|
| 1126 |
+
|
| 1127 |
+
All rights reserved.
|
| 1128 |
+
|
| 1129 |
+
Redistribution and use in source and binary forms, with or without
|
| 1130 |
+
modification, are permitted provided that the following conditions are
|
| 1131 |
+
met:
|
| 1132 |
+
* Redistributions of source code must retain the above copyright
|
| 1133 |
+
notice, this list of conditions and the following disclaimer.
|
| 1134 |
+
* Redistributions in binary form must reproduce the above
|
| 1135 |
+
copyright notice, this list of conditions and the following
|
| 1136 |
+
disclaimer in the documentation and/or other materials provided
|
| 1137 |
+
with the distribution.
|
| 1138 |
+
* Neither the name of the University of California, Berkeley nor
|
| 1139 |
+
the names of its contributors may be used to endorse or promote
|
| 1140 |
+
products derived from this software without specific prior
|
| 1141 |
+
written permission.
|
| 1142 |
+
|
| 1143 |
+
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
|
| 1144 |
+
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
| 1145 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 1146 |
+
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
| 1147 |
+
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
| 1148 |
+
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 1149 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
| 1150 |
+
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
| 1151 |
+
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
| 1152 |
+
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 1153 |
+
POSSIBILITY OF SUCH DAMAGE.
|
| 1154 |
+
|
| 1155 |
+
7. Some of the cuBLAS library routines were written by or
|
| 1156 |
+
derived from code written by Davide Barbieri and are
|
| 1157 |
+
subject to the Modified Berkeley Software Distribution
|
| 1158 |
+
License as follows:
|
| 1159 |
+
|
| 1160 |
+
Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata.
|
| 1161 |
+
|
| 1162 |
+
All rights reserved.
|
| 1163 |
+
|
| 1164 |
+
Redistribution and use in source and binary forms, with or without
|
| 1165 |
+
modification, are permitted provided that the following conditions are
|
| 1166 |
+
met:
|
| 1167 |
+
* Redistributions of source code must retain the above copyright
|
| 1168 |
+
notice, this list of conditions and the following disclaimer.
|
| 1169 |
+
* Redistributions in binary form must reproduce the above
|
| 1170 |
+
copyright notice, this list of conditions and the following
|
| 1171 |
+
disclaimer in the documentation and/or other materials provided
|
| 1172 |
+
with the distribution.
|
| 1173 |
+
* The name of the author may not be used to endorse or promote
|
| 1174 |
+
products derived from this software without specific prior
|
| 1175 |
+
written permission.
|
| 1176 |
+
|
| 1177 |
+
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
|
| 1178 |
+
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
| 1179 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 1180 |
+
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
| 1181 |
+
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
| 1182 |
+
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 1183 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
| 1184 |
+
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
| 1185 |
+
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
| 1186 |
+
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 1187 |
+
POSSIBILITY OF SUCH DAMAGE.
|
| 1188 |
+
|
| 1189 |
+
8. Some of the cuBLAS library routines were derived from
|
| 1190 |
+
code developed by the University of Tennessee and are
|
| 1191 |
+
subject to the Modified Berkeley Software Distribution
|
| 1192 |
+
License as follows:
|
| 1193 |
+
|
| 1194 |
+
Copyright (c) 2010 The University of Tennessee.
|
| 1195 |
+
|
| 1196 |
+
All rights reserved.
|
| 1197 |
+
|
| 1198 |
+
Redistribution and use in source and binary forms, with or without
|
| 1199 |
+
modification, are permitted provided that the following conditions are
|
| 1200 |
+
met:
|
| 1201 |
+
* Redistributions of source code must retain the above copyright
|
| 1202 |
+
notice, this list of conditions and the following disclaimer.
|
| 1203 |
+
* Redistributions in binary form must reproduce the above
|
| 1204 |
+
copyright notice, this list of conditions and the following
|
| 1205 |
+
disclaimer listed in this license in the documentation and/or
|
| 1206 |
+
other materials provided with the distribution.
|
| 1207 |
+
* Neither the name of the copyright holders nor the names of its
|
| 1208 |
+
contributors may be used to endorse or promote products derived
|
| 1209 |
+
from this software without specific prior written permission.
|
| 1210 |
+
|
| 1211 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1212 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1213 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1214 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1215 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1216 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1217 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1218 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1219 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1220 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1221 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1222 |
+
|
| 1223 |
+
9. Some of the cuBLAS library routines were written by or
|
| 1224 |
+
derived from code written by Jonathan Hogg and are subject
|
| 1225 |
+
to the Modified Berkeley Software Distribution License as
|
| 1226 |
+
follows:
|
| 1227 |
+
|
| 1228 |
+
Copyright (c) 2012, The Science and Technology Facilities Council (STFC).
|
| 1229 |
+
|
| 1230 |
+
All rights reserved.
|
| 1231 |
+
|
| 1232 |
+
Redistribution and use in source and binary forms, with or without
|
| 1233 |
+
modification, are permitted provided that the following conditions are
|
| 1234 |
+
met:
|
| 1235 |
+
* Redistributions of source code must retain the above copyright
|
| 1236 |
+
notice, this list of conditions and the following disclaimer.
|
| 1237 |
+
* Redistributions in binary form must reproduce the above
|
| 1238 |
+
copyright notice, this list of conditions and the following
|
| 1239 |
+
disclaimer in the documentation and/or other materials provided
|
| 1240 |
+
with the distribution.
|
| 1241 |
+
* Neither the name of the STFC nor the names of its contributors
|
| 1242 |
+
may be used to endorse or promote products derived from this
|
| 1243 |
+
software without specific prior written permission.
|
| 1244 |
+
|
| 1245 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1246 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1247 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1248 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE
|
| 1249 |
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
| 1250 |
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
| 1251 |
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
| 1252 |
+
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
| 1253 |
+
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
| 1254 |
+
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
|
| 1255 |
+
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1256 |
+
|
| 1257 |
+
10. Some of the cuBLAS library routines were written by or
|
| 1258 |
+
derived from code written by Ahmad M. Abdelfattah, David
|
| 1259 |
+
Keyes, and Hatem Ltaief, and are subject to the Apache
|
| 1260 |
+
License, Version 2.0, as follows:
|
| 1261 |
+
|
| 1262 |
+
-- (C) Copyright 2013 King Abdullah University of Science and Technology
|
| 1263 |
+
Authors:
|
| 1264 |
+
Ahmad Abdelfattah (ahmad.ahmad@kaust.edu.sa)
|
| 1265 |
+
David Keyes (david.keyes@kaust.edu.sa)
|
| 1266 |
+
Hatem Ltaief (hatem.ltaief@kaust.edu.sa)
|
| 1267 |
+
|
| 1268 |
+
Redistribution and use in source and binary forms, with or without
|
| 1269 |
+
modification, are permitted provided that the following conditions
|
| 1270 |
+
are met:
|
| 1271 |
+
|
| 1272 |
+
* Redistributions of source code must retain the above copyright
|
| 1273 |
+
notice, this list of conditions and the following disclaimer.
|
| 1274 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 1275 |
+
notice, this list of conditions and the following disclaimer in the
|
| 1276 |
+
documentation and/or other materials provided with the distribution.
|
| 1277 |
+
* Neither the name of the King Abdullah University of Science and
|
| 1278 |
+
Technology nor the names of its contributors may be used to endorse
|
| 1279 |
+
or promote products derived from this software without specific prior
|
| 1280 |
+
written permission.
|
| 1281 |
+
|
| 1282 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1283 |
+
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1284 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1285 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1286 |
+
HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1287 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1288 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1289 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1290 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1291 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1292 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
|
| 1293 |
+
|
| 1294 |
+
11. Some of the cuSPARSE library routines were written by or
|
| 1295 |
+
derived from code written by Li-Wen Chang and are subject
|
| 1296 |
+
to the NCSA Open Source License as follows:
|
| 1297 |
+
|
| 1298 |
+
Copyright (c) 2012, University of Illinois.
|
| 1299 |
+
|
| 1300 |
+
All rights reserved.
|
| 1301 |
+
|
| 1302 |
+
Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu
|
| 1303 |
+
|
| 1304 |
+
Permission is hereby granted, free of charge, to any person obtaining
|
| 1305 |
+
a copy of this software and associated documentation files (the
|
| 1306 |
+
"Software"), to deal with the Software without restriction, including
|
| 1307 |
+
without limitation the rights to use, copy, modify, merge, publish,
|
| 1308 |
+
distribute, sublicense, and/or sell copies of the Software, and to
|
| 1309 |
+
permit persons to whom the Software is furnished to do so, subject to
|
| 1310 |
+
the following conditions:
|
| 1311 |
+
* Redistributions of source code must retain the above copyright
|
| 1312 |
+
notice, this list of conditions and the following disclaimer.
|
| 1313 |
+
* Redistributions in binary form must reproduce the above
|
| 1314 |
+
copyright notice, this list of conditions and the following
|
| 1315 |
+
disclaimers in the documentation and/or other materials provided
|
| 1316 |
+
with the distribution.
|
| 1317 |
+
* Neither the names of IMPACT Group, University of Illinois, nor
|
| 1318 |
+
the names of its contributors may be used to endorse or promote
|
| 1319 |
+
products derived from this Software without specific prior
|
| 1320 |
+
written permission.
|
| 1321 |
+
|
| 1322 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 1323 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 1324 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
| 1325 |
+
NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT
|
| 1326 |
+
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
| 1327 |
+
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
| 1328 |
+
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE
|
| 1329 |
+
SOFTWARE.
|
| 1330 |
+
|
| 1331 |
+
12. Some of the cuRAND library routines were written by or
|
| 1332 |
+
derived from code written by Mutsuo Saito and Makoto
|
| 1333 |
+
Matsumoto and are subject to the following license:
|
| 1334 |
+
|
| 1335 |
+
Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima
|
| 1336 |
+
University. All rights reserved.
|
| 1337 |
+
|
| 1338 |
+
Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima
|
| 1339 |
+
University and University of Tokyo. All rights reserved.
|
| 1340 |
+
|
| 1341 |
+
Redistribution and use in source and binary forms, with or without
|
| 1342 |
+
modification, are permitted provided that the following conditions are
|
| 1343 |
+
met:
|
| 1344 |
+
* Redistributions of source code must retain the above copyright
|
| 1345 |
+
notice, this list of conditions and the following disclaimer.
|
| 1346 |
+
* Redistributions in binary form must reproduce the above
|
| 1347 |
+
copyright notice, this list of conditions and the following
|
| 1348 |
+
disclaimer in the documentation and/or other materials provided
|
| 1349 |
+
with the distribution.
|
| 1350 |
+
* Neither the name of the Hiroshima University nor the names of
|
| 1351 |
+
its contributors may be used to endorse or promote products
|
| 1352 |
+
derived from this software without specific prior written
|
| 1353 |
+
permission.
|
| 1354 |
+
|
| 1355 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1356 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1357 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1358 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1359 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1360 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1361 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1362 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1363 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1364 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1365 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1366 |
+
|
| 1367 |
+
13. Some of the cuRAND library routines were derived from
|
| 1368 |
+
code developed by D. E. Shaw Research and are subject to
|
| 1369 |
+
the following license:
|
| 1370 |
+
|
| 1371 |
+
Copyright 2010-2011, D. E. Shaw Research.
|
| 1372 |
+
|
| 1373 |
+
All rights reserved.
|
| 1374 |
+
|
| 1375 |
+
Redistribution and use in source and binary forms, with or without
|
| 1376 |
+
modification, are permitted provided that the following conditions are
|
| 1377 |
+
met:
|
| 1378 |
+
* Redistributions of source code must retain the above copyright
|
| 1379 |
+
notice, this list of conditions, and the following disclaimer.
|
| 1380 |
+
* Redistributions in binary form must reproduce the above
|
| 1381 |
+
copyright notice, this list of conditions, and the following
|
| 1382 |
+
disclaimer in the documentation and/or other materials provided
|
| 1383 |
+
with the distribution.
|
| 1384 |
+
* Neither the name of D. E. Shaw Research nor the names of its
|
| 1385 |
+
contributors may be used to endorse or promote products derived
|
| 1386 |
+
from this software without specific prior written permission.
|
| 1387 |
+
|
| 1388 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1389 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1390 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1391 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1392 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1393 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1394 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1395 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1396 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1397 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1398 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1399 |
+
|
| 1400 |
+
14. Some of the Math library routines were written by or
|
| 1401 |
+
derived from code developed by Norbert Juffa and are
|
| 1402 |
+
subject to the following license:
|
| 1403 |
+
|
| 1404 |
+
Copyright (c) 2015-2017, Norbert Juffa
|
| 1405 |
+
All rights reserved.
|
| 1406 |
+
|
| 1407 |
+
Redistribution and use in source and binary forms, with or without
|
| 1408 |
+
modification, are permitted provided that the following conditions
|
| 1409 |
+
are met:
|
| 1410 |
+
|
| 1411 |
+
1. Redistributions of source code must retain the above copyright
|
| 1412 |
+
notice, this list of conditions and the following disclaimer.
|
| 1413 |
+
|
| 1414 |
+
2. Redistributions in binary form must reproduce the above copyright
|
| 1415 |
+
notice, this list of conditions and the following disclaimer in the
|
| 1416 |
+
documentation and/or other materials provided with the distribution.
|
| 1417 |
+
|
| 1418 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1419 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1420 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1421 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1422 |
+
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1423 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1424 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1425 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1426 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1427 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1428 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1429 |
+
|
| 1430 |
+
15. Licensee's use of the lz4 third party component is
|
| 1431 |
+
subject to the following terms and conditions:
|
| 1432 |
+
|
| 1433 |
+
Copyright (C) 2011-2013, Yann Collet.
|
| 1434 |
+
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
|
| 1435 |
+
|
| 1436 |
+
Redistribution and use in source and binary forms, with or without
|
| 1437 |
+
modification, are permitted provided that the following conditions are
|
| 1438 |
+
met:
|
| 1439 |
+
|
| 1440 |
+
* Redistributions of source code must retain the above copyright
|
| 1441 |
+
notice, this list of conditions and the following disclaimer.
|
| 1442 |
+
* Redistributions in binary form must reproduce the above
|
| 1443 |
+
copyright notice, this list of conditions and the following disclaimer
|
| 1444 |
+
in the documentation and/or other materials provided with the
|
| 1445 |
+
distribution.
|
| 1446 |
+
|
| 1447 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1448 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1449 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1450 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1451 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1452 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1453 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1454 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1455 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1456 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1457 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1458 |
+
|
| 1459 |
+
16. The NPP library uses code from the Boost Math Toolkit,
|
| 1460 |
+
and is subject to the following license:
|
| 1461 |
+
|
| 1462 |
+
Boost Software License - Version 1.0 - August 17th, 2003
|
| 1463 |
+
. . . .
|
| 1464 |
+
|
| 1465 |
+
Permission is hereby granted, free of charge, to any person or
|
| 1466 |
+
organization obtaining a copy of the software and accompanying
|
| 1467 |
+
documentation covered by this license (the "Software") to use,
|
| 1468 |
+
reproduce, display, distribute, execute, and transmit the Software,
|
| 1469 |
+
and to prepare derivative works of the Software, and to permit
|
| 1470 |
+
third-parties to whom the Software is furnished to do so, all
|
| 1471 |
+
subject to the following:
|
| 1472 |
+
|
| 1473 |
+
The copyright notices in the Software and this entire statement,
|
| 1474 |
+
including the above license grant, this restriction and the following
|
| 1475 |
+
disclaimer, must be included in all copies of the Software, in whole
|
| 1476 |
+
or in part, and all derivative works of the Software, unless such
|
| 1477 |
+
copies or derivative works are solely in the form of machine-executable
|
| 1478 |
+
object code generated by a source language processor.
|
| 1479 |
+
|
| 1480 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 1481 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 1482 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
|
| 1483 |
+
NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
|
| 1484 |
+
ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
|
| 1485 |
+
OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
|
| 1486 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
| 1487 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
| 1488 |
+
|
| 1489 |
+
17. Portions of the Nsight Eclipse Edition is subject to the
|
| 1490 |
+
following license:
|
| 1491 |
+
|
| 1492 |
+
The Eclipse Foundation makes available all content in this plug-in
|
| 1493 |
+
("Content"). Unless otherwise indicated below, the Content is provided
|
| 1494 |
+
to you under the terms and conditions of the Eclipse Public License
|
| 1495 |
+
Version 1.0 ("EPL"). A copy of the EPL is available at http://
|
| 1496 |
+
www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program"
|
| 1497 |
+
will mean the Content.
|
| 1498 |
+
|
| 1499 |
+
If you did not receive this Content directly from the Eclipse
|
| 1500 |
+
Foundation, the Content is being redistributed by another party
|
| 1501 |
+
("Redistributor") and different terms and conditions may apply to your
|
| 1502 |
+
use of any object code in the Content. Check the Redistributor's
|
| 1503 |
+
license that was provided with the Content. If no such license exists,
|
| 1504 |
+
contact the Redistributor. Unless otherwise indicated below, the terms
|
| 1505 |
+
and conditions of the EPL still apply to any source code in the
|
| 1506 |
+
Content and such source code may be obtained at http://www.eclipse.org.
|
| 1507 |
+
|
| 1508 |
+
18. Some of the cuBLAS library routines uses code from
|
| 1509 |
+
OpenAI, which is subject to the following license:
|
| 1510 |
+
|
| 1511 |
+
License URL
|
| 1512 |
+
https://github.com/openai/openai-gemm/blob/master/LICENSE
|
| 1513 |
+
|
| 1514 |
+
License Text
|
| 1515 |
+
The MIT License
|
| 1516 |
+
|
| 1517 |
+
Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc.
|
| 1518 |
+
|
| 1519 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 1520 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 1521 |
+
in the Software without restriction, including without limitation the rights
|
| 1522 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 1523 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 1524 |
+
furnished to do so, subject to the following conditions:
|
| 1525 |
+
|
| 1526 |
+
The above copyright notice and this permission notice shall be included in
|
| 1527 |
+
all copies or substantial portions of the Software.
|
| 1528 |
+
|
| 1529 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 1530 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 1531 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 1532 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 1533 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 1534 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
| 1535 |
+
THE SOFTWARE.
|
| 1536 |
+
|
| 1537 |
+
19. Licensee's use of the Visual Studio Setup Configuration
|
| 1538 |
+
Samples is subject to the following license:
|
| 1539 |
+
|
| 1540 |
+
The MIT License (MIT)
|
| 1541 |
+
Copyright (C) Microsoft Corporation. All rights reserved.
|
| 1542 |
+
|
| 1543 |
+
Permission is hereby granted, free of charge, to any person
|
| 1544 |
+
obtaining a copy of this software and associated documentation
|
| 1545 |
+
files (the "Software"), to deal in the Software without restriction,
|
| 1546 |
+
including without limitation the rights to use, copy, modify, merge,
|
| 1547 |
+
publish, distribute, sublicense, and/or sell copies of the Software,
|
| 1548 |
+
and to permit persons to whom the Software is furnished to do so,
|
| 1549 |
+
subject to the following conditions:
|
| 1550 |
+
|
| 1551 |
+
The above copyright notice and this permission notice shall be included
|
| 1552 |
+
in all copies or substantial portions of the Software.
|
| 1553 |
+
|
| 1554 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
| 1555 |
+
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 1556 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 1557 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 1558 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 1559 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
| 1560 |
+
|
| 1561 |
+
20. Licensee's use of linmath.h header for CPU functions for
|
| 1562 |
+
GL vector/matrix operations from lunarG is subject to the
|
| 1563 |
+
Apache License Version 2.0.
|
| 1564 |
+
|
| 1565 |
+
21. The DX12-CUDA sample uses the d3dx12.h header, which is
|
| 1566 |
+
subject to the MIT license .
|
| 1567 |
+
|
| 1568 |
+
-----------------
|
minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/METADATA
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: nvidia-cuda-cupti-cu12
|
| 3 |
+
Version: 12.4.127
|
| 4 |
+
Summary: CUDA profiling tools runtime libs.
|
| 5 |
+
Home-page: https://developer.nvidia.com/cuda-zone
|
| 6 |
+
Author: Nvidia CUDA Installer Team
|
| 7 |
+
Author-email: cuda_installer@nvidia.com
|
| 8 |
+
License: NVIDIA Proprietary Software
|
| 9 |
+
Keywords: cuda,nvidia,runtime,machine learning,deep learning
|
| 10 |
+
Classifier: Development Status :: 4 - Beta
|
| 11 |
+
Classifier: Intended Audience :: Developers
|
| 12 |
+
Classifier: Intended Audience :: Education
|
| 13 |
+
Classifier: Intended Audience :: Science/Research
|
| 14 |
+
Classifier: License :: Other/Proprietary License
|
| 15 |
+
Classifier: Natural Language :: English
|
| 16 |
+
Classifier: Programming Language :: Python :: 3
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.5
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.6
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.7
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 24 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 25 |
+
Classifier: Topic :: Scientific/Engineering
|
| 26 |
+
Classifier: Topic :: Scientific/Engineering :: Mathematics
|
| 27 |
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
| 28 |
+
Classifier: Topic :: Software Development
|
| 29 |
+
Classifier: Topic :: Software Development :: Libraries
|
| 30 |
+
Classifier: Operating System :: Microsoft :: Windows
|
| 31 |
+
Classifier: Operating System :: POSIX :: Linux
|
| 32 |
+
Requires-Python: >=3
|
| 33 |
+
License-File: License.txt
|
| 34 |
+
|
| 35 |
+
Provides libraries to enable third party tools using GPU profiling APIs.
|
minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/RECORD
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
nvidia/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 2 |
+
nvidia/__pycache__/__init__.cpython-310.pyc,,
|
| 3 |
+
nvidia/cuda_cupti/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 4 |
+
nvidia/cuda_cupti/__pycache__/__init__.cpython-310.pyc,,
|
| 5 |
+
nvidia/cuda_cupti/include/Openacc/cupti_openacc.h,sha256=Z0OM5e_hbd3cxdXyn3SCHqBBQawLg4QORnlm57Cr2-M,3513
|
| 6 |
+
nvidia/cuda_cupti/include/Openmp/cupti_openmp.h,sha256=E1WNmeb_7HaUSmBegtUNe4IV1i7pXeNxgzIlyKn1zrM,3491
|
| 7 |
+
nvidia/cuda_cupti/include/Openmp/omp-tools.h,sha256=AmuC_xPC7VPu3B-W4PmXuCNufFawhY8PjNXePaQFAOg,37403
|
| 8 |
+
nvidia/cuda_cupti/include/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 9 |
+
nvidia/cuda_cupti/include/__pycache__/__init__.cpython-310.pyc,,
|
| 10 |
+
nvidia/cuda_cupti/include/cuda_stdint.h,sha256=XbFOk9CtJjKqk7PpYNqbSVsDxAsVM8avA4rWpPi0BjQ,4093
|
| 11 |
+
nvidia/cuda_cupti/include/cupti.h,sha256=JkVyAGTIMYzwm62dfVqas3nMcILhgP_Wdz6fh4_NED0,4697
|
| 12 |
+
nvidia/cuda_cupti/include/cupti_activity.h,sha256=RB7VYrdiOBpdH_LbXb2o-CcGthnk3-NE2Cqq-jQbN7Q,210611
|
| 13 |
+
nvidia/cuda_cupti/include/cupti_activity_deprecated.h,sha256=rYJsoAJxA2BTT50-olN8EYcSzdlXBpRbR1ATLG3rVIM,121526
|
| 14 |
+
nvidia/cuda_cupti/include/cupti_callbacks.h,sha256=zrEVRb0hubSfD69QUmHsJiL8oAfvqyuKGcTVRihQrnc,29729
|
| 15 |
+
nvidia/cuda_cupti/include/cupti_checkpoint.h,sha256=rTz8JoWxqESBXyZWUhZJGm4xeYcx4OJOtJ7Ld13T_b0,5264
|
| 16 |
+
nvidia/cuda_cupti/include/cupti_common.h,sha256=85m74bxUgXp3tEaPQpezeazmpsNMw41PsjNSYmQdT20,3514
|
| 17 |
+
nvidia/cuda_cupti/include/cupti_driver_cbid.h,sha256=dHKyQYZbBbdlxixzFkIoNHg5IfGXdgriyjN1Bu1i6g4,74462
|
| 18 |
+
nvidia/cuda_cupti/include/cupti_events.h,sha256=f7lLGmD2e8FzvMhRgnn0-v7U0vTpUkiQHIpQxgARGb0,51896
|
| 19 |
+
nvidia/cuda_cupti/include/cupti_metrics.h,sha256=iLAOlDrcbHEsIIUmgq0Tp1ZOY9O3Ot3wj2-bI8iYbSs,32148
|
| 20 |
+
nvidia/cuda_cupti/include/cupti_nvtx_cbid.h,sha256=_azPtR1g4qivvX7qbvHRUg0RHCWF7iEOJyHMN9qZe9E,5912
|
| 21 |
+
nvidia/cuda_cupti/include/cupti_pcsampling.h,sha256=ycJHT36DmPIaVzHsB3xxjXkhFyEfMCJOl3LbCsHFgyA,32144
|
| 22 |
+
nvidia/cuda_cupti/include/cupti_pcsampling_util.h,sha256=lx8CaNXowJe5Zvc06LE-u_Zry_jODs1mM6j9Q5WIX9E,12430
|
| 23 |
+
nvidia/cuda_cupti/include/cupti_profiler_target.h,sha256=JsceoDuhllWNEzaO0xxT81dJ55NrbF0UtRJJgit0P_E,32131
|
| 24 |
+
nvidia/cuda_cupti/include/cupti_result.h,sha256=a-C4Y7LAYCiCT1ngOfoDuTi2stEG1YTafwwn6UfL-LU,12603
|
| 25 |
+
nvidia/cuda_cupti/include/cupti_runtime_cbid.h,sha256=11pXl0MdmTtxUngel-ru4JdqWvF_gEIG14aQExRyfzI,46436
|
| 26 |
+
nvidia/cuda_cupti/include/cupti_sass_metrics.h,sha256=3RW9snJuFQdOhrEn3wDJOru05q0V_zssWrqD7tvVJKw,19674
|
| 27 |
+
nvidia/cuda_cupti/include/cupti_target.h,sha256=x4Vz1Upb6m9ixmVpmGaKQldDWYQI3OZ-ocEXGzNK0EE,1263
|
| 28 |
+
nvidia/cuda_cupti/include/cupti_version.h,sha256=sjd-aUoTGkEWyvA2VUWIpZwXyXAaclqC8gbwNnuK5D0,4425
|
| 29 |
+
nvidia/cuda_cupti/include/generated_cudaGL_meta.h,sha256=dfd2QuaRdEjbStOKvaQLi1Md_qrpRQh8PfyZznJ8bWY,3115
|
| 30 |
+
nvidia/cuda_cupti/include/generated_cudaVDPAU_meta.h,sha256=fAedsoQxaU3hIAApAWDOKsa9kgcuQw4tdyf8klLm-3k,1453
|
| 31 |
+
nvidia/cuda_cupti/include/generated_cuda_gl_interop_meta.h,sha256=LXOqvQCej0sCgAT1LUKKYZ466EFxN4hIwf9oIhXOLF0,2250
|
| 32 |
+
nvidia/cuda_cupti/include/generated_cuda_meta.h,sha256=hawYpDe0xpaDFDnClXI91JjwCRxWb-AS0FS8ydUMgxc,94639
|
| 33 |
+
nvidia/cuda_cupti/include/generated_cuda_runtime_api_meta.h,sha256=D8CbAN3-jLuF2KGfsBHXEELSgL92KrUAiDvugWE8B8M,69706
|
| 34 |
+
nvidia/cuda_cupti/include/generated_cuda_vdpau_interop_meta.h,sha256=8OLqWN26aEYpTWUXtbHJvA5GYhVv3ybYVOTW7yK37z8,1367
|
| 35 |
+
nvidia/cuda_cupti/include/generated_cudart_removed_meta.h,sha256=X3I5WXmhtsJNNlgY7coJ5vg4t11G5FRR6Xo7MboIeck,5172
|
| 36 |
+
nvidia/cuda_cupti/include/generated_nvtx_meta.h,sha256=YHb_RD8g3s4m8PJn7Z0wnxvUHarl7BOAX5ADr-BL3HI,7513
|
| 37 |
+
nvidia/cuda_cupti/include/nvperf_common.h,sha256=BqPml9AxyN10-ptWT3hQzh2JUWqQX57Q5BjQ3ZuaKNs,17255
|
| 38 |
+
nvidia/cuda_cupti/include/nvperf_cuda_host.h,sha256=aBnyIr_hexPDGBkP6WSujN1mI_DYP25sEIXWYY1O7VI,8298
|
| 39 |
+
nvidia/cuda_cupti/include/nvperf_host.h,sha256=afdHG6eraeo4ltlF9ihskqhU7IccxcRCaZDZ6_ikjkg,68506
|
| 40 |
+
nvidia/cuda_cupti/include/nvperf_target.h,sha256=ZDA-JI459tLBW4iLLCQjYYRAMeHwfqDIgXbVqVLDYZ4,22539
|
| 41 |
+
nvidia/cuda_cupti/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 42 |
+
nvidia/cuda_cupti/lib/__pycache__/__init__.cpython-310.pyc,,
|
| 43 |
+
nvidia/cuda_cupti/lib/libcheckpoint.so,sha256=cCTAB7_UNqvoU7zKMCHkklcmM1GGr3atZmZoZksdAKM,1501336
|
| 44 |
+
nvidia/cuda_cupti/lib/libcupti.so.12,sha256=-yp8WxXITflQXdR-VT_kbzEhpX0wOR_KJBedIC9z8_c,7748112
|
| 45 |
+
nvidia/cuda_cupti/lib/libnvperf_host.so,sha256=tZsmsdNdAik8jdiVaro3V8FGa3FzLGaHq6QSxQ2VC2k,28132984
|
| 46 |
+
nvidia/cuda_cupti/lib/libnvperf_target.so,sha256=ztN3NKnf_9XyEogyuHjyOAcTvqYBn6lE0psxejPTeYw,5592368
|
| 47 |
+
nvidia/cuda_cupti/lib/libpcsamplingutil.so,sha256=ZDY0bEGLzy-pA3yfFtc6jfvo-Cu8vWwUCQYatGJrb0Q,912728
|
| 48 |
+
nvidia_cuda_cupti_cu12-12.4.127.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 49 |
+
nvidia_cuda_cupti_cu12-12.4.127.dist-info/License.txt,sha256=rW9YU_ugyg0VnQ9Y1JrkmDDC-Mk_epJki5zpCttMbM0,59262
|
| 50 |
+
nvidia_cuda_cupti_cu12-12.4.127.dist-info/METADATA,sha256=UiXYPD5hc55tQSSNiYNq5AqkD68jq1KHNCtG-PJvPds,1553
|
| 51 |
+
nvidia_cuda_cupti_cu12-12.4.127.dist-info/RECORD,,
|
| 52 |
+
nvidia_cuda_cupti_cu12-12.4.127.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 53 |
+
nvidia_cuda_cupti_cu12-12.4.127.dist-info/WHEEL,sha256=XDTs3wIbcE-BcRO08VJlZpA6z9OaC1mOKPCGGGwuM2g,109
|
| 54 |
+
nvidia_cuda_cupti_cu12-12.4.127.dist-info/top_level.txt,sha256=fTkAtiFuL16nUrB9ytDDtpytz2t0B4NvYTnRzwAhO14,7
|
minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/REQUESTED
ADDED
|
File without changes
|
minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.42.0)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-manylinux2014_x86_64
|
| 5 |
+
|
minigpt2/lib/python3.10/site-packages/nvidia_cuda_cupti_cu12-12.4.127.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
nvidia
|
minigpt2/lib/python3.10/site-packages/shellingham/__pycache__/_core.cpython-310.pyc
ADDED
|
Binary file (530 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/shellingham/__pycache__/nt.cpython-310.pyc
ADDED
|
Binary file (3.72 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/shellingham/_core.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
SHELL_NAMES = (
|
| 2 |
+
{"sh", "bash", "dash", "ash"} # Bourne.
|
| 3 |
+
| {"csh", "tcsh"} # C.
|
| 4 |
+
| {"ksh", "zsh", "fish"} # Common alternatives.
|
| 5 |
+
| {"cmd", "powershell", "pwsh"} # Microsoft.
|
| 6 |
+
| {"elvish", "xonsh", "nu"} # More exotic.
|
| 7 |
+
)
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class ShellDetectionFailure(EnvironmentError):
|
| 11 |
+
pass
|
minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/_optical_flow.cpython-310.pyc
ADDED
|
Binary file (17.8 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/cifar.cpython-310.pyc
ADDED
|
Binary file (5.94 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/cityscapes.cpython-310.pyc
ADDED
|
Binary file (8.62 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/coco.cpython-310.pyc
ADDED
|
Binary file (5.24 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/eurosat.cpython-310.pyc
ADDED
|
Binary file (2.74 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/fakedata.cpython-310.pyc
ADDED
|
Binary file (2.67 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/food101.cpython-310.pyc
ADDED
|
Binary file (4.43 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/kinetics.cpython-310.pyc
ADDED
|
Binary file (9.88 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/lsun.cpython-310.pyc
ADDED
|
Binary file (5.94 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/mnist.cpython-310.pyc
ADDED
|
Binary file (21.4 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/torchvision/datasets/__pycache__/moving_mnist.cpython-310.pyc
ADDED
|
Binary file (4 kB). View file
|
|
|