Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +1 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/__main__.py +6 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/__pycache__/interpolatablePlot.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/__pycache__/interpolatableTestContourOrder.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/cff.py +631 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/instancer/__pycache__/featureVars.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/instancer/featureVars.py +190 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/instancer/names.py +388 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/mutator.py +516 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/mvar.py +40 -0
- evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/hls.CFPBCiRi.js.br +3 -0
- evalkit_tf437/lib/python3.10/site-packages/httpx/__pycache__/__version__.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/httpx/__pycache__/_decoders.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/httpx/__pycache__/_models.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/httpx/__pycache__/_status_codes.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/httpx/__pycache__/_utils.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/httpx/_transports/__pycache__/base.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/httpx/_urls.py +648 -0
- evalkit_tf437/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/LICENSE +202 -0
- evalkit_tf437/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/METADATA +133 -0
- evalkit_tf437/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/RECORD +32 -0
- evalkit_tf437/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/WHEEL +5 -0
- evalkit_tf437/lib/python3.10/site-packages/markdown2-2.5.1.dist-info/WHEEL +6 -0
- evalkit_tf437/lib/python3.10/site-packages/markdown2-2.5.1.dist-info/top_level.txt +1 -0
- evalkit_tf437/lib/python3.10/site-packages/nvidia_nvjitlink_cu12-12.6.77.dist-info/INSTALLER +1 -0
- evalkit_tf437/lib/python3.10/site-packages/nvidia_nvjitlink_cu12-12.6.77.dist-info/License.txt +1568 -0
- evalkit_tf437/lib/python3.10/site-packages/nvidia_nvjitlink_cu12-12.6.77.dist-info/WHEEL +5 -0
- evalkit_tf437/lib/python3.10/site-packages/nvidia_nvjitlink_cu12-12.6.77.dist-info/top_level.txt +1 -0
- evalkit_tf437/lib/python3.10/site-packages/oauthlib-3.2.2.dist-info/METADATA +179 -0
- evalkit_tf437/lib/python3.10/site-packages/oauthlib-3.2.2.dist-info/RECORD +143 -0
- evalkit_tf437/lib/python3.10/site-packages/oauthlib-3.2.2.dist-info/WHEEL +5 -0
- evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/INSTALLER +1 -0
- evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/LICENSE-3RD-PARTY.txt +0 -0
- evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/LICENSE.txt +21 -0
- evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/METADATA +305 -0
- evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/RECORD +144 -0
- evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/REQUESTED +0 -0
- evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/WHEEL +6 -0
- evalkit_tf437/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE.BSD +23 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/__init__.py +82 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/filter.py +70 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/formatters/__pycache__/other.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/formatters/img.py +685 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/formatters/latex.py +518 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/formatters/svg.py +185 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/formatters/terminal.py +127 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexer.py +961 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/_tsql_builtins.py +1003 -0
- evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/ada.py +144 -0
.gitattributes
CHANGED
|
@@ -281,3 +281,4 @@ evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_a
|
|
| 281 |
evalkit_tf437/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 282 |
evalkit_tf437/lib/python3.10/site-packages/sklearn/metrics/_dist_metrics.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 283 |
evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 281 |
evalkit_tf437/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 282 |
evalkit_tf437/lib/python3.10/site-packages/sklearn/metrics/_dist_metrics.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 283 |
evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 284 |
+
evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/hls.CFPBCiRi.js.br filter=lfs diff=lfs merge=lfs -text
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/__main__.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from fontTools.varLib import main
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
sys.exit(main())
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (39.5 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/__pycache__/interpolatablePlot.cpython-310.pyc
ADDED
|
Binary file (26.9 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/__pycache__/interpolatableTestContourOrder.cpython-310.pyc
ADDED
|
Binary file (1.54 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/cff.py
ADDED
|
@@ -0,0 +1,631 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from collections import namedtuple
|
| 2 |
+
from fontTools.cffLib import (
|
| 3 |
+
maxStackLimit,
|
| 4 |
+
TopDictIndex,
|
| 5 |
+
buildOrder,
|
| 6 |
+
topDictOperators,
|
| 7 |
+
topDictOperators2,
|
| 8 |
+
privateDictOperators,
|
| 9 |
+
privateDictOperators2,
|
| 10 |
+
FDArrayIndex,
|
| 11 |
+
FontDict,
|
| 12 |
+
VarStoreData,
|
| 13 |
+
)
|
| 14 |
+
from io import BytesIO
|
| 15 |
+
from fontTools.cffLib.specializer import specializeCommands, commandsToProgram
|
| 16 |
+
from fontTools.ttLib import newTable
|
| 17 |
+
from fontTools import varLib
|
| 18 |
+
from fontTools.varLib.models import allEqual
|
| 19 |
+
from fontTools.misc.loggingTools import deprecateFunction
|
| 20 |
+
from fontTools.misc.roundTools import roundFunc
|
| 21 |
+
from fontTools.misc.psCharStrings import T2CharString, T2OutlineExtractor
|
| 22 |
+
from fontTools.pens.t2CharStringPen import T2CharStringPen
|
| 23 |
+
from functools import partial
|
| 24 |
+
|
| 25 |
+
from .errors import (
|
| 26 |
+
VarLibCFFDictMergeError,
|
| 27 |
+
VarLibCFFPointTypeMergeError,
|
| 28 |
+
VarLibCFFHintTypeMergeError,
|
| 29 |
+
VarLibMergeError,
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
# Backwards compatibility
|
| 34 |
+
MergeDictError = VarLibCFFDictMergeError
|
| 35 |
+
MergeTypeError = VarLibCFFPointTypeMergeError
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def addCFFVarStore(varFont, varModel, varDataList, masterSupports):
|
| 39 |
+
fvarTable = varFont["fvar"]
|
| 40 |
+
axisKeys = [axis.axisTag for axis in fvarTable.axes]
|
| 41 |
+
varTupleList = varLib.builder.buildVarRegionList(masterSupports, axisKeys)
|
| 42 |
+
varStoreCFFV = varLib.builder.buildVarStore(varTupleList, varDataList)
|
| 43 |
+
|
| 44 |
+
topDict = varFont["CFF2"].cff.topDictIndex[0]
|
| 45 |
+
topDict.VarStore = VarStoreData(otVarStore=varStoreCFFV)
|
| 46 |
+
if topDict.FDArray[0].vstore is None:
|
| 47 |
+
fdArray = topDict.FDArray
|
| 48 |
+
for fontDict in fdArray:
|
| 49 |
+
if hasattr(fontDict, "Private"):
|
| 50 |
+
fontDict.Private.vstore = topDict.VarStore
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
@deprecateFunction("Use fontTools.cffLib.CFFToCFF2.convertCFFToCFF2 instead.")
|
| 54 |
+
def convertCFFtoCFF2(varFont):
|
| 55 |
+
from fontTools.cffLib.CFFToCFF2 import convertCFFToCFF2
|
| 56 |
+
|
| 57 |
+
return convertCFFToCFF2(varFont)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def conv_to_int(num):
|
| 61 |
+
if isinstance(num, float) and num.is_integer():
|
| 62 |
+
return int(num)
|
| 63 |
+
return num
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
pd_blend_fields = (
|
| 67 |
+
"BlueValues",
|
| 68 |
+
"OtherBlues",
|
| 69 |
+
"FamilyBlues",
|
| 70 |
+
"FamilyOtherBlues",
|
| 71 |
+
"BlueScale",
|
| 72 |
+
"BlueShift",
|
| 73 |
+
"BlueFuzz",
|
| 74 |
+
"StdHW",
|
| 75 |
+
"StdVW",
|
| 76 |
+
"StemSnapH",
|
| 77 |
+
"StemSnapV",
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def get_private(regionFDArrays, fd_index, ri, fd_map):
|
| 82 |
+
region_fdArray = regionFDArrays[ri]
|
| 83 |
+
region_fd_map = fd_map[fd_index]
|
| 84 |
+
if ri in region_fd_map:
|
| 85 |
+
region_fdIndex = region_fd_map[ri]
|
| 86 |
+
private = region_fdArray[region_fdIndex].Private
|
| 87 |
+
else:
|
| 88 |
+
private = None
|
| 89 |
+
return private
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def merge_PrivateDicts(top_dicts, vsindex_dict, var_model, fd_map):
|
| 93 |
+
"""
|
| 94 |
+
I step through the FontDicts in the FDArray of the varfont TopDict.
|
| 95 |
+
For each varfont FontDict:
|
| 96 |
+
|
| 97 |
+
* step through each key in FontDict.Private.
|
| 98 |
+
* For each key, step through each relevant source font Private dict, and
|
| 99 |
+
build a list of values to blend.
|
| 100 |
+
|
| 101 |
+
The 'relevant' source fonts are selected by first getting the right
|
| 102 |
+
submodel using ``vsindex_dict[vsindex]``. The indices of the
|
| 103 |
+
``subModel.locations`` are mapped to source font list indices by
|
| 104 |
+
assuming the latter order is the same as the order of the
|
| 105 |
+
``var_model.locations``. I can then get the index of each subModel
|
| 106 |
+
location in the list of ``var_model.locations``.
|
| 107 |
+
"""
|
| 108 |
+
|
| 109 |
+
topDict = top_dicts[0]
|
| 110 |
+
region_top_dicts = top_dicts[1:]
|
| 111 |
+
if hasattr(region_top_dicts[0], "FDArray"):
|
| 112 |
+
regionFDArrays = [fdTopDict.FDArray for fdTopDict in region_top_dicts]
|
| 113 |
+
else:
|
| 114 |
+
regionFDArrays = [[fdTopDict] for fdTopDict in region_top_dicts]
|
| 115 |
+
for fd_index, font_dict in enumerate(topDict.FDArray):
|
| 116 |
+
private_dict = font_dict.Private
|
| 117 |
+
vsindex = getattr(private_dict, "vsindex", 0)
|
| 118 |
+
# At the moment, no PrivateDict has a vsindex key, but let's support
|
| 119 |
+
# how it should work. See comment at end of
|
| 120 |
+
# merge_charstrings() - still need to optimize use of vsindex.
|
| 121 |
+
sub_model, _ = vsindex_dict[vsindex]
|
| 122 |
+
master_indices = []
|
| 123 |
+
for loc in sub_model.locations[1:]:
|
| 124 |
+
i = var_model.locations.index(loc) - 1
|
| 125 |
+
master_indices.append(i)
|
| 126 |
+
pds = [private_dict]
|
| 127 |
+
last_pd = private_dict
|
| 128 |
+
for ri in master_indices:
|
| 129 |
+
pd = get_private(regionFDArrays, fd_index, ri, fd_map)
|
| 130 |
+
# If the region font doesn't have this FontDict, just reference
|
| 131 |
+
# the last one used.
|
| 132 |
+
if pd is None:
|
| 133 |
+
pd = last_pd
|
| 134 |
+
else:
|
| 135 |
+
last_pd = pd
|
| 136 |
+
pds.append(pd)
|
| 137 |
+
num_masters = len(pds)
|
| 138 |
+
for key, value in private_dict.rawDict.items():
|
| 139 |
+
dataList = []
|
| 140 |
+
if key not in pd_blend_fields:
|
| 141 |
+
continue
|
| 142 |
+
if isinstance(value, list):
|
| 143 |
+
try:
|
| 144 |
+
values = [pd.rawDict[key] for pd in pds]
|
| 145 |
+
except KeyError:
|
| 146 |
+
print(
|
| 147 |
+
"Warning: {key} in default font Private dict is "
|
| 148 |
+
"missing from another font, and was "
|
| 149 |
+
"discarded.".format(key=key)
|
| 150 |
+
)
|
| 151 |
+
continue
|
| 152 |
+
try:
|
| 153 |
+
values = zip(*values)
|
| 154 |
+
except IndexError:
|
| 155 |
+
raise VarLibCFFDictMergeError(key, value, values)
|
| 156 |
+
"""
|
| 157 |
+
Row 0 contains the first value from each master.
|
| 158 |
+
Convert each row from absolute values to relative
|
| 159 |
+
values from the previous row.
|
| 160 |
+
e.g for three masters, a list of values was:
|
| 161 |
+
master 0 OtherBlues = [-217,-205]
|
| 162 |
+
master 1 OtherBlues = [-234,-222]
|
| 163 |
+
master 1 OtherBlues = [-188,-176]
|
| 164 |
+
The call to zip() converts this to:
|
| 165 |
+
[(-217, -234, -188), (-205, -222, -176)]
|
| 166 |
+
and is converted finally to:
|
| 167 |
+
OtherBlues = [[-217, 17.0, 46.0], [-205, 0.0, 0.0]]
|
| 168 |
+
"""
|
| 169 |
+
prev_val_list = [0] * num_masters
|
| 170 |
+
any_points_differ = False
|
| 171 |
+
for val_list in values:
|
| 172 |
+
rel_list = [
|
| 173 |
+
(val - prev_val_list[i]) for (i, val) in enumerate(val_list)
|
| 174 |
+
]
|
| 175 |
+
if (not any_points_differ) and not allEqual(rel_list):
|
| 176 |
+
any_points_differ = True
|
| 177 |
+
prev_val_list = val_list
|
| 178 |
+
deltas = sub_model.getDeltas(rel_list)
|
| 179 |
+
# For PrivateDict BlueValues, the default font
|
| 180 |
+
# values are absolute, not relative to the prior value.
|
| 181 |
+
deltas[0] = val_list[0]
|
| 182 |
+
dataList.append(deltas)
|
| 183 |
+
# If there are no blend values,then
|
| 184 |
+
# we can collapse the blend lists.
|
| 185 |
+
if not any_points_differ:
|
| 186 |
+
dataList = [data[0] for data in dataList]
|
| 187 |
+
else:
|
| 188 |
+
values = [pd.rawDict[key] for pd in pds]
|
| 189 |
+
if not allEqual(values):
|
| 190 |
+
dataList = sub_model.getDeltas(values)
|
| 191 |
+
else:
|
| 192 |
+
dataList = values[0]
|
| 193 |
+
|
| 194 |
+
# Convert numbers with no decimal part to an int
|
| 195 |
+
if isinstance(dataList, list):
|
| 196 |
+
for i, item in enumerate(dataList):
|
| 197 |
+
if isinstance(item, list):
|
| 198 |
+
for j, jtem in enumerate(item):
|
| 199 |
+
dataList[i][j] = conv_to_int(jtem)
|
| 200 |
+
else:
|
| 201 |
+
dataList[i] = conv_to_int(item)
|
| 202 |
+
else:
|
| 203 |
+
dataList = conv_to_int(dataList)
|
| 204 |
+
|
| 205 |
+
private_dict.rawDict[key] = dataList
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
def _cff_or_cff2(font):
|
| 209 |
+
if "CFF " in font:
|
| 210 |
+
return font["CFF "]
|
| 211 |
+
return font["CFF2"]
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def getfd_map(varFont, fonts_list):
|
| 215 |
+
"""Since a subset source font may have fewer FontDicts in their
|
| 216 |
+
FDArray than the default font, we have to match up the FontDicts in
|
| 217 |
+
the different fonts . We do this with the FDSelect array, and by
|
| 218 |
+
assuming that the same glyph will reference matching FontDicts in
|
| 219 |
+
each source font. We return a mapping from fdIndex in the default
|
| 220 |
+
font to a dictionary which maps each master list index of each
|
| 221 |
+
region font to the equivalent fdIndex in the region font."""
|
| 222 |
+
fd_map = {}
|
| 223 |
+
default_font = fonts_list[0]
|
| 224 |
+
region_fonts = fonts_list[1:]
|
| 225 |
+
num_regions = len(region_fonts)
|
| 226 |
+
topDict = _cff_or_cff2(default_font).cff.topDictIndex[0]
|
| 227 |
+
if not hasattr(topDict, "FDSelect"):
|
| 228 |
+
# All glyphs reference only one FontDict.
|
| 229 |
+
# Map the FD index for regions to index 0.
|
| 230 |
+
fd_map[0] = {ri: 0 for ri in range(num_regions)}
|
| 231 |
+
return fd_map
|
| 232 |
+
|
| 233 |
+
gname_mapping = {}
|
| 234 |
+
default_fdSelect = topDict.FDSelect
|
| 235 |
+
glyphOrder = default_font.getGlyphOrder()
|
| 236 |
+
for gid, fdIndex in enumerate(default_fdSelect):
|
| 237 |
+
gname_mapping[glyphOrder[gid]] = fdIndex
|
| 238 |
+
if fdIndex not in fd_map:
|
| 239 |
+
fd_map[fdIndex] = {}
|
| 240 |
+
for ri, region_font in enumerate(region_fonts):
|
| 241 |
+
region_glyphOrder = region_font.getGlyphOrder()
|
| 242 |
+
region_topDict = _cff_or_cff2(region_font).cff.topDictIndex[0]
|
| 243 |
+
if not hasattr(region_topDict, "FDSelect"):
|
| 244 |
+
# All the glyphs share the same FontDict. Pick any glyph.
|
| 245 |
+
default_fdIndex = gname_mapping[region_glyphOrder[0]]
|
| 246 |
+
fd_map[default_fdIndex][ri] = 0
|
| 247 |
+
else:
|
| 248 |
+
region_fdSelect = region_topDict.FDSelect
|
| 249 |
+
for gid, fdIndex in enumerate(region_fdSelect):
|
| 250 |
+
default_fdIndex = gname_mapping[region_glyphOrder[gid]]
|
| 251 |
+
region_map = fd_map[default_fdIndex]
|
| 252 |
+
if ri not in region_map:
|
| 253 |
+
region_map[ri] = fdIndex
|
| 254 |
+
return fd_map
|
| 255 |
+
|
| 256 |
+
|
| 257 |
+
CVarData = namedtuple("CVarData", "varDataList masterSupports vsindex_dict")
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def merge_region_fonts(varFont, model, ordered_fonts_list, glyphOrder):
|
| 261 |
+
topDict = varFont["CFF2"].cff.topDictIndex[0]
|
| 262 |
+
top_dicts = [topDict] + [
|
| 263 |
+
_cff_or_cff2(ttFont).cff.topDictIndex[0] for ttFont in ordered_fonts_list[1:]
|
| 264 |
+
]
|
| 265 |
+
num_masters = len(model.mapping)
|
| 266 |
+
cvData = merge_charstrings(glyphOrder, num_masters, top_dicts, model)
|
| 267 |
+
fd_map = getfd_map(varFont, ordered_fonts_list)
|
| 268 |
+
merge_PrivateDicts(top_dicts, cvData.vsindex_dict, model, fd_map)
|
| 269 |
+
addCFFVarStore(varFont, model, cvData.varDataList, cvData.masterSupports)
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
def _get_cs(charstrings, glyphName, filterEmpty=False):
|
| 273 |
+
if glyphName not in charstrings:
|
| 274 |
+
return None
|
| 275 |
+
cs = charstrings[glyphName]
|
| 276 |
+
|
| 277 |
+
if filterEmpty:
|
| 278 |
+
cs.decompile()
|
| 279 |
+
if cs.program == []: # CFF2 empty charstring
|
| 280 |
+
return None
|
| 281 |
+
elif (
|
| 282 |
+
len(cs.program) <= 2
|
| 283 |
+
and cs.program[-1] == "endchar"
|
| 284 |
+
and (len(cs.program) == 1 or type(cs.program[0]) in (int, float))
|
| 285 |
+
): # CFF1 empty charstring
|
| 286 |
+
return None
|
| 287 |
+
|
| 288 |
+
return cs
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
def _add_new_vsindex(
|
| 292 |
+
model, key, masterSupports, vsindex_dict, vsindex_by_key, varDataList
|
| 293 |
+
):
|
| 294 |
+
varTupleIndexes = []
|
| 295 |
+
for support in model.supports[1:]:
|
| 296 |
+
if support not in masterSupports:
|
| 297 |
+
masterSupports.append(support)
|
| 298 |
+
varTupleIndexes.append(masterSupports.index(support))
|
| 299 |
+
var_data = varLib.builder.buildVarData(varTupleIndexes, None, False)
|
| 300 |
+
vsindex = len(vsindex_dict)
|
| 301 |
+
vsindex_by_key[key] = vsindex
|
| 302 |
+
vsindex_dict[vsindex] = (model, [key])
|
| 303 |
+
varDataList.append(var_data)
|
| 304 |
+
return vsindex
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
def merge_charstrings(glyphOrder, num_masters, top_dicts, masterModel):
|
| 308 |
+
vsindex_dict = {}
|
| 309 |
+
vsindex_by_key = {}
|
| 310 |
+
varDataList = []
|
| 311 |
+
masterSupports = []
|
| 312 |
+
default_charstrings = top_dicts[0].CharStrings
|
| 313 |
+
for gid, gname in enumerate(glyphOrder):
|
| 314 |
+
# interpret empty non-default masters as missing glyphs from a sparse master
|
| 315 |
+
all_cs = [
|
| 316 |
+
_get_cs(td.CharStrings, gname, i != 0) for i, td in enumerate(top_dicts)
|
| 317 |
+
]
|
| 318 |
+
model, model_cs = masterModel.getSubModel(all_cs)
|
| 319 |
+
# create the first pass CFF2 charstring, from
|
| 320 |
+
# the default charstring.
|
| 321 |
+
default_charstring = model_cs[0]
|
| 322 |
+
var_pen = CFF2CharStringMergePen([], gname, num_masters, 0)
|
| 323 |
+
# We need to override outlineExtractor because these
|
| 324 |
+
# charstrings do have widths in the 'program'; we need to drop these
|
| 325 |
+
# values rather than post assertion error for them.
|
| 326 |
+
default_charstring.outlineExtractor = MergeOutlineExtractor
|
| 327 |
+
default_charstring.draw(var_pen)
|
| 328 |
+
|
| 329 |
+
# Add the coordinates from all the other regions to the
|
| 330 |
+
# blend lists in the CFF2 charstring.
|
| 331 |
+
region_cs = model_cs[1:]
|
| 332 |
+
for region_idx, region_charstring in enumerate(region_cs, start=1):
|
| 333 |
+
var_pen.restart(region_idx)
|
| 334 |
+
region_charstring.outlineExtractor = MergeOutlineExtractor
|
| 335 |
+
region_charstring.draw(var_pen)
|
| 336 |
+
|
| 337 |
+
# Collapse each coordinate list to a blend operator and its args.
|
| 338 |
+
new_cs = var_pen.getCharString(
|
| 339 |
+
private=default_charstring.private,
|
| 340 |
+
globalSubrs=default_charstring.globalSubrs,
|
| 341 |
+
var_model=model,
|
| 342 |
+
optimize=True,
|
| 343 |
+
)
|
| 344 |
+
default_charstrings[gname] = new_cs
|
| 345 |
+
|
| 346 |
+
if not region_cs:
|
| 347 |
+
continue
|
| 348 |
+
|
| 349 |
+
if (not var_pen.seen_moveto) or ("blend" not in new_cs.program):
|
| 350 |
+
# If this is not a marking glyph, or if there are no blend
|
| 351 |
+
# arguments, then we can use vsindex 0. No need to
|
| 352 |
+
# check if we need a new vsindex.
|
| 353 |
+
continue
|
| 354 |
+
|
| 355 |
+
# If the charstring required a new model, create
|
| 356 |
+
# a VarData table to go with, and set vsindex.
|
| 357 |
+
key = tuple(v is not None for v in all_cs)
|
| 358 |
+
try:
|
| 359 |
+
vsindex = vsindex_by_key[key]
|
| 360 |
+
except KeyError:
|
| 361 |
+
vsindex = _add_new_vsindex(
|
| 362 |
+
model, key, masterSupports, vsindex_dict, vsindex_by_key, varDataList
|
| 363 |
+
)
|
| 364 |
+
# We do not need to check for an existing new_cs.private.vsindex,
|
| 365 |
+
# as we know it doesn't exist yet.
|
| 366 |
+
if vsindex != 0:
|
| 367 |
+
new_cs.program[:0] = [vsindex, "vsindex"]
|
| 368 |
+
|
| 369 |
+
# If there is no variation in any of the charstrings, then vsindex_dict
|
| 370 |
+
# never gets built. This could still be needed if there is variation
|
| 371 |
+
# in the PrivatDict, so we will build the default data for vsindex = 0.
|
| 372 |
+
if not vsindex_dict:
|
| 373 |
+
key = (True,) * num_masters
|
| 374 |
+
_add_new_vsindex(
|
| 375 |
+
masterModel, key, masterSupports, vsindex_dict, vsindex_by_key, varDataList
|
| 376 |
+
)
|
| 377 |
+
cvData = CVarData(
|
| 378 |
+
varDataList=varDataList,
|
| 379 |
+
masterSupports=masterSupports,
|
| 380 |
+
vsindex_dict=vsindex_dict,
|
| 381 |
+
)
|
| 382 |
+
# XXX To do: optimize use of vsindex between the PrivateDicts and
|
| 383 |
+
# charstrings
|
| 384 |
+
return cvData
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
class CFFToCFF2OutlineExtractor(T2OutlineExtractor):
|
| 388 |
+
"""This class is used to remove the initial width from the CFF
|
| 389 |
+
charstring without trying to add the width to self.nominalWidthX,
|
| 390 |
+
which is None."""
|
| 391 |
+
|
| 392 |
+
def popallWidth(self, evenOdd=0):
|
| 393 |
+
args = self.popall()
|
| 394 |
+
if not self.gotWidth:
|
| 395 |
+
if evenOdd ^ (len(args) % 2):
|
| 396 |
+
args = args[1:]
|
| 397 |
+
self.width = self.defaultWidthX
|
| 398 |
+
self.gotWidth = 1
|
| 399 |
+
return args
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
class MergeOutlineExtractor(CFFToCFF2OutlineExtractor):
|
| 403 |
+
"""Used to extract the charstring commands - including hints - from a
|
| 404 |
+
CFF charstring in order to merge it as another set of region data
|
| 405 |
+
into a CFF2 variable font charstring."""
|
| 406 |
+
|
| 407 |
+
def __init__(
|
| 408 |
+
self,
|
| 409 |
+
pen,
|
| 410 |
+
localSubrs,
|
| 411 |
+
globalSubrs,
|
| 412 |
+
nominalWidthX,
|
| 413 |
+
defaultWidthX,
|
| 414 |
+
private=None,
|
| 415 |
+
blender=None,
|
| 416 |
+
):
|
| 417 |
+
super().__init__(
|
| 418 |
+
pen, localSubrs, globalSubrs, nominalWidthX, defaultWidthX, private, blender
|
| 419 |
+
)
|
| 420 |
+
|
| 421 |
+
def countHints(self):
|
| 422 |
+
args = self.popallWidth()
|
| 423 |
+
self.hintCount = self.hintCount + len(args) // 2
|
| 424 |
+
return args
|
| 425 |
+
|
| 426 |
+
def _hint_op(self, type, args):
|
| 427 |
+
self.pen.add_hint(type, args)
|
| 428 |
+
|
| 429 |
+
def op_hstem(self, index):
|
| 430 |
+
args = self.countHints()
|
| 431 |
+
self._hint_op("hstem", args)
|
| 432 |
+
|
| 433 |
+
def op_vstem(self, index):
|
| 434 |
+
args = self.countHints()
|
| 435 |
+
self._hint_op("vstem", args)
|
| 436 |
+
|
| 437 |
+
def op_hstemhm(self, index):
|
| 438 |
+
args = self.countHints()
|
| 439 |
+
self._hint_op("hstemhm", args)
|
| 440 |
+
|
| 441 |
+
def op_vstemhm(self, index):
|
| 442 |
+
args = self.countHints()
|
| 443 |
+
self._hint_op("vstemhm", args)
|
| 444 |
+
|
| 445 |
+
def _get_hintmask(self, index):
|
| 446 |
+
if not self.hintMaskBytes:
|
| 447 |
+
args = self.countHints()
|
| 448 |
+
if args:
|
| 449 |
+
self._hint_op("vstemhm", args)
|
| 450 |
+
self.hintMaskBytes = (self.hintCount + 7) // 8
|
| 451 |
+
hintMaskBytes, index = self.callingStack[-1].getBytes(index, self.hintMaskBytes)
|
| 452 |
+
return index, hintMaskBytes
|
| 453 |
+
|
| 454 |
+
def op_hintmask(self, index):
|
| 455 |
+
index, hintMaskBytes = self._get_hintmask(index)
|
| 456 |
+
self.pen.add_hintmask("hintmask", [hintMaskBytes])
|
| 457 |
+
return hintMaskBytes, index
|
| 458 |
+
|
| 459 |
+
def op_cntrmask(self, index):
|
| 460 |
+
index, hintMaskBytes = self._get_hintmask(index)
|
| 461 |
+
self.pen.add_hintmask("cntrmask", [hintMaskBytes])
|
| 462 |
+
return hintMaskBytes, index
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
class CFF2CharStringMergePen(T2CharStringPen):
|
| 466 |
+
"""Pen to merge Type 2 CharStrings."""
|
| 467 |
+
|
| 468 |
+
def __init__(
|
| 469 |
+
self, default_commands, glyphName, num_masters, master_idx, roundTolerance=0.01
|
| 470 |
+
):
|
| 471 |
+
# For roundTolerance see https://github.com/fonttools/fonttools/issues/2838
|
| 472 |
+
super().__init__(
|
| 473 |
+
width=None, glyphSet=None, CFF2=True, roundTolerance=roundTolerance
|
| 474 |
+
)
|
| 475 |
+
self.pt_index = 0
|
| 476 |
+
self._commands = default_commands
|
| 477 |
+
self.m_index = master_idx
|
| 478 |
+
self.num_masters = num_masters
|
| 479 |
+
self.prev_move_idx = 0
|
| 480 |
+
self.seen_moveto = False
|
| 481 |
+
self.glyphName = glyphName
|
| 482 |
+
self.round = roundFunc(roundTolerance, round=round)
|
| 483 |
+
|
| 484 |
+
def add_point(self, point_type, pt_coords):
|
| 485 |
+
if self.m_index == 0:
|
| 486 |
+
self._commands.append([point_type, [pt_coords]])
|
| 487 |
+
else:
|
| 488 |
+
cmd = self._commands[self.pt_index]
|
| 489 |
+
if cmd[0] != point_type:
|
| 490 |
+
raise VarLibCFFPointTypeMergeError(
|
| 491 |
+
point_type, self.pt_index, len(cmd[1]), cmd[0], self.glyphName
|
| 492 |
+
)
|
| 493 |
+
cmd[1].append(pt_coords)
|
| 494 |
+
self.pt_index += 1
|
| 495 |
+
|
| 496 |
+
def add_hint(self, hint_type, args):
|
| 497 |
+
if self.m_index == 0:
|
| 498 |
+
self._commands.append([hint_type, [args]])
|
| 499 |
+
else:
|
| 500 |
+
cmd = self._commands[self.pt_index]
|
| 501 |
+
if cmd[0] != hint_type:
|
| 502 |
+
raise VarLibCFFHintTypeMergeError(
|
| 503 |
+
hint_type, self.pt_index, len(cmd[1]), cmd[0], self.glyphName
|
| 504 |
+
)
|
| 505 |
+
cmd[1].append(args)
|
| 506 |
+
self.pt_index += 1
|
| 507 |
+
|
| 508 |
+
def add_hintmask(self, hint_type, abs_args):
|
| 509 |
+
# For hintmask, fonttools.cffLib.specializer.py expects
|
| 510 |
+
# each of these to be represented by two sequential commands:
|
| 511 |
+
# first holding only the operator name, with an empty arg list,
|
| 512 |
+
# second with an empty string as the op name, and the mask arg list.
|
| 513 |
+
if self.m_index == 0:
|
| 514 |
+
self._commands.append([hint_type, []])
|
| 515 |
+
self._commands.append(["", [abs_args]])
|
| 516 |
+
else:
|
| 517 |
+
cmd = self._commands[self.pt_index]
|
| 518 |
+
if cmd[0] != hint_type:
|
| 519 |
+
raise VarLibCFFHintTypeMergeError(
|
| 520 |
+
hint_type, self.pt_index, len(cmd[1]), cmd[0], self.glyphName
|
| 521 |
+
)
|
| 522 |
+
self.pt_index += 1
|
| 523 |
+
cmd = self._commands[self.pt_index]
|
| 524 |
+
cmd[1].append(abs_args)
|
| 525 |
+
self.pt_index += 1
|
| 526 |
+
|
| 527 |
+
def _moveTo(self, pt):
|
| 528 |
+
if not self.seen_moveto:
|
| 529 |
+
self.seen_moveto = True
|
| 530 |
+
pt_coords = self._p(pt)
|
| 531 |
+
self.add_point("rmoveto", pt_coords)
|
| 532 |
+
# I set prev_move_idx here because add_point()
|
| 533 |
+
# can change self.pt_index.
|
| 534 |
+
self.prev_move_idx = self.pt_index - 1
|
| 535 |
+
|
| 536 |
+
def _lineTo(self, pt):
|
| 537 |
+
pt_coords = self._p(pt)
|
| 538 |
+
self.add_point("rlineto", pt_coords)
|
| 539 |
+
|
| 540 |
+
def _curveToOne(self, pt1, pt2, pt3):
|
| 541 |
+
_p = self._p
|
| 542 |
+
pt_coords = _p(pt1) + _p(pt2) + _p(pt3)
|
| 543 |
+
self.add_point("rrcurveto", pt_coords)
|
| 544 |
+
|
| 545 |
+
def _closePath(self):
|
| 546 |
+
pass
|
| 547 |
+
|
| 548 |
+
def _endPath(self):
|
| 549 |
+
pass
|
| 550 |
+
|
| 551 |
+
def restart(self, region_idx):
|
| 552 |
+
self.pt_index = 0
|
| 553 |
+
self.m_index = region_idx
|
| 554 |
+
self._p0 = (0, 0)
|
| 555 |
+
|
| 556 |
+
def getCommands(self):
|
| 557 |
+
return self._commands
|
| 558 |
+
|
| 559 |
+
def reorder_blend_args(self, commands, get_delta_func):
|
| 560 |
+
"""
|
| 561 |
+
We first re-order the master coordinate values.
|
| 562 |
+
For a moveto to lineto, the args are now arranged as::
|
| 563 |
+
|
| 564 |
+
[ [master_0 x,y], [master_1 x,y], [master_2 x,y] ]
|
| 565 |
+
|
| 566 |
+
We re-arrange this to::
|
| 567 |
+
|
| 568 |
+
[ [master_0 x, master_1 x, master_2 x],
|
| 569 |
+
[master_0 y, master_1 y, master_2 y]
|
| 570 |
+
]
|
| 571 |
+
|
| 572 |
+
If the master values are all the same, we collapse the list to
|
| 573 |
+
as single value instead of a list.
|
| 574 |
+
|
| 575 |
+
We then convert this to::
|
| 576 |
+
|
| 577 |
+
[ [master_0 x] + [x delta tuple] + [numBlends=1]
|
| 578 |
+
[master_0 y] + [y delta tuple] + [numBlends=1]
|
| 579 |
+
]
|
| 580 |
+
"""
|
| 581 |
+
for cmd in commands:
|
| 582 |
+
# arg[i] is the set of arguments for this operator from master i.
|
| 583 |
+
args = cmd[1]
|
| 584 |
+
m_args = zip(*args)
|
| 585 |
+
# m_args[n] is now all num_master args for the i'th argument
|
| 586 |
+
# for this operation.
|
| 587 |
+
cmd[1] = list(m_args)
|
| 588 |
+
lastOp = None
|
| 589 |
+
for cmd in commands:
|
| 590 |
+
op = cmd[0]
|
| 591 |
+
# masks are represented by two cmd's: first has only op names,
|
| 592 |
+
# second has only args.
|
| 593 |
+
if lastOp in ["hintmask", "cntrmask"]:
|
| 594 |
+
coord = list(cmd[1])
|
| 595 |
+
if not allEqual(coord):
|
| 596 |
+
raise VarLibMergeError(
|
| 597 |
+
"Hintmask values cannot differ between source fonts."
|
| 598 |
+
)
|
| 599 |
+
cmd[1] = [coord[0][0]]
|
| 600 |
+
else:
|
| 601 |
+
coords = cmd[1]
|
| 602 |
+
new_coords = []
|
| 603 |
+
for coord in coords:
|
| 604 |
+
if allEqual(coord):
|
| 605 |
+
new_coords.append(coord[0])
|
| 606 |
+
else:
|
| 607 |
+
# convert to deltas
|
| 608 |
+
deltas = get_delta_func(coord)[1:]
|
| 609 |
+
coord = [coord[0]] + deltas
|
| 610 |
+
coord.append(1)
|
| 611 |
+
new_coords.append(coord)
|
| 612 |
+
cmd[1] = new_coords
|
| 613 |
+
lastOp = op
|
| 614 |
+
return commands
|
| 615 |
+
|
| 616 |
+
def getCharString(
|
| 617 |
+
self, private=None, globalSubrs=None, var_model=None, optimize=True
|
| 618 |
+
):
|
| 619 |
+
commands = self._commands
|
| 620 |
+
commands = self.reorder_blend_args(
|
| 621 |
+
commands, partial(var_model.getDeltas, round=self.round)
|
| 622 |
+
)
|
| 623 |
+
if optimize:
|
| 624 |
+
commands = specializeCommands(
|
| 625 |
+
commands, generalizeFirst=False, maxstack=maxStackLimit
|
| 626 |
+
)
|
| 627 |
+
program = commandsToProgram(commands)
|
| 628 |
+
charString = T2CharString(
|
| 629 |
+
program=program, private=private, globalSubrs=globalSubrs
|
| 630 |
+
)
|
| 631 |
+
return charString
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/instancer/__pycache__/featureVars.cpython-310.pyc
ADDED
|
Binary file (4.07 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/instancer/featureVars.py
ADDED
|
@@ -0,0 +1,190 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fontTools.ttLib.tables import otTables as ot
|
| 2 |
+
from copy import deepcopy
|
| 3 |
+
import logging
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
log = logging.getLogger("fontTools.varLib.instancer")
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def _featureVariationRecordIsUnique(rec, seen):
|
| 10 |
+
conditionSet = []
|
| 11 |
+
conditionSets = (
|
| 12 |
+
rec.ConditionSet.ConditionTable if rec.ConditionSet is not None else []
|
| 13 |
+
)
|
| 14 |
+
for cond in conditionSets:
|
| 15 |
+
if cond.Format != 1:
|
| 16 |
+
# can't tell whether this is duplicate, assume is unique
|
| 17 |
+
return True
|
| 18 |
+
conditionSet.append(
|
| 19 |
+
(cond.AxisIndex, cond.FilterRangeMinValue, cond.FilterRangeMaxValue)
|
| 20 |
+
)
|
| 21 |
+
# besides the set of conditions, we also include the FeatureTableSubstitution
|
| 22 |
+
# version to identify unique FeatureVariationRecords, even though only one
|
| 23 |
+
# version is currently defined. It's theoretically possible that multiple
|
| 24 |
+
# records with same conditions but different substitution table version be
|
| 25 |
+
# present in the same font for backward compatibility.
|
| 26 |
+
recordKey = frozenset([rec.FeatureTableSubstitution.Version] + conditionSet)
|
| 27 |
+
if recordKey in seen:
|
| 28 |
+
return False
|
| 29 |
+
else:
|
| 30 |
+
seen.add(recordKey) # side effect
|
| 31 |
+
return True
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def _limitFeatureVariationConditionRange(condition, axisLimit):
|
| 35 |
+
minValue = condition.FilterRangeMinValue
|
| 36 |
+
maxValue = condition.FilterRangeMaxValue
|
| 37 |
+
|
| 38 |
+
if (
|
| 39 |
+
minValue > maxValue
|
| 40 |
+
or minValue > axisLimit.maximum
|
| 41 |
+
or maxValue < axisLimit.minimum
|
| 42 |
+
):
|
| 43 |
+
# condition invalid or out of range
|
| 44 |
+
return
|
| 45 |
+
|
| 46 |
+
return tuple(
|
| 47 |
+
axisLimit.renormalizeValue(v, extrapolate=False) for v in (minValue, maxValue)
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def _instantiateFeatureVariationRecord(
|
| 52 |
+
record, recIdx, axisLimits, fvarAxes, axisIndexMap
|
| 53 |
+
):
|
| 54 |
+
applies = True
|
| 55 |
+
shouldKeep = False
|
| 56 |
+
newConditions = []
|
| 57 |
+
from fontTools.varLib.instancer import NormalizedAxisTripleAndDistances
|
| 58 |
+
|
| 59 |
+
default_triple = NormalizedAxisTripleAndDistances(-1, 0, +1)
|
| 60 |
+
if record.ConditionSet is None:
|
| 61 |
+
record.ConditionSet = ot.ConditionSet()
|
| 62 |
+
record.ConditionSet.ConditionTable = []
|
| 63 |
+
record.ConditionSet.ConditionCount = 0
|
| 64 |
+
for i, condition in enumerate(record.ConditionSet.ConditionTable):
|
| 65 |
+
if condition.Format == 1:
|
| 66 |
+
axisIdx = condition.AxisIndex
|
| 67 |
+
axisTag = fvarAxes[axisIdx].axisTag
|
| 68 |
+
|
| 69 |
+
minValue = condition.FilterRangeMinValue
|
| 70 |
+
maxValue = condition.FilterRangeMaxValue
|
| 71 |
+
triple = axisLimits.get(axisTag, default_triple)
|
| 72 |
+
|
| 73 |
+
if not (minValue <= triple.default <= maxValue):
|
| 74 |
+
applies = False
|
| 75 |
+
|
| 76 |
+
# if condition not met, remove entire record
|
| 77 |
+
if triple.minimum > maxValue or triple.maximum < minValue:
|
| 78 |
+
newConditions = None
|
| 79 |
+
break
|
| 80 |
+
|
| 81 |
+
if axisTag in axisIndexMap:
|
| 82 |
+
# remap axis index
|
| 83 |
+
condition.AxisIndex = axisIndexMap[axisTag]
|
| 84 |
+
|
| 85 |
+
# remap condition limits
|
| 86 |
+
newRange = _limitFeatureVariationConditionRange(condition, triple)
|
| 87 |
+
if newRange:
|
| 88 |
+
# keep condition with updated limits
|
| 89 |
+
minimum, maximum = newRange
|
| 90 |
+
condition.FilterRangeMinValue = minimum
|
| 91 |
+
condition.FilterRangeMaxValue = maximum
|
| 92 |
+
shouldKeep = True
|
| 93 |
+
if minimum != -1 or maximum != +1:
|
| 94 |
+
newConditions.append(condition)
|
| 95 |
+
else:
|
| 96 |
+
# condition out of range, remove entire record
|
| 97 |
+
newConditions = None
|
| 98 |
+
break
|
| 99 |
+
|
| 100 |
+
else:
|
| 101 |
+
log.warning(
|
| 102 |
+
"Condition table {0} of FeatureVariationRecord {1} has "
|
| 103 |
+
"unsupported format ({2}); ignored".format(i, recIdx, condition.Format)
|
| 104 |
+
)
|
| 105 |
+
applies = False
|
| 106 |
+
newConditions.append(condition)
|
| 107 |
+
|
| 108 |
+
if newConditions is not None and shouldKeep:
|
| 109 |
+
record.ConditionSet.ConditionTable = newConditions
|
| 110 |
+
if not newConditions:
|
| 111 |
+
record.ConditionSet = None
|
| 112 |
+
shouldKeep = True
|
| 113 |
+
else:
|
| 114 |
+
shouldKeep = False
|
| 115 |
+
|
| 116 |
+
# Does this *always* apply?
|
| 117 |
+
universal = shouldKeep and not newConditions
|
| 118 |
+
|
| 119 |
+
return applies, shouldKeep, universal
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def _instantiateFeatureVariations(table, fvarAxes, axisLimits):
|
| 123 |
+
pinnedAxes = set(axisLimits.pinnedLocation())
|
| 124 |
+
axisOrder = [axis.axisTag for axis in fvarAxes if axis.axisTag not in pinnedAxes]
|
| 125 |
+
axisIndexMap = {axisTag: axisOrder.index(axisTag) for axisTag in axisOrder}
|
| 126 |
+
|
| 127 |
+
featureVariationApplied = False
|
| 128 |
+
uniqueRecords = set()
|
| 129 |
+
newRecords = []
|
| 130 |
+
defaultsSubsts = None
|
| 131 |
+
|
| 132 |
+
for i, record in enumerate(table.FeatureVariations.FeatureVariationRecord):
|
| 133 |
+
applies, shouldKeep, universal = _instantiateFeatureVariationRecord(
|
| 134 |
+
record, i, axisLimits, fvarAxes, axisIndexMap
|
| 135 |
+
)
|
| 136 |
+
|
| 137 |
+
if shouldKeep and _featureVariationRecordIsUnique(record, uniqueRecords):
|
| 138 |
+
newRecords.append(record)
|
| 139 |
+
|
| 140 |
+
if applies and not featureVariationApplied:
|
| 141 |
+
assert record.FeatureTableSubstitution.Version == 0x00010000
|
| 142 |
+
defaultsSubsts = deepcopy(record.FeatureTableSubstitution)
|
| 143 |
+
for default, rec in zip(
|
| 144 |
+
defaultsSubsts.SubstitutionRecord,
|
| 145 |
+
record.FeatureTableSubstitution.SubstitutionRecord,
|
| 146 |
+
):
|
| 147 |
+
default.Feature = deepcopy(
|
| 148 |
+
table.FeatureList.FeatureRecord[rec.FeatureIndex].Feature
|
| 149 |
+
)
|
| 150 |
+
table.FeatureList.FeatureRecord[rec.FeatureIndex].Feature = deepcopy(
|
| 151 |
+
rec.Feature
|
| 152 |
+
)
|
| 153 |
+
# Set variations only once
|
| 154 |
+
featureVariationApplied = True
|
| 155 |
+
|
| 156 |
+
# Further records don't have a chance to apply after a universal record
|
| 157 |
+
if universal:
|
| 158 |
+
break
|
| 159 |
+
|
| 160 |
+
# Insert a catch-all record to reinstate the old features if necessary
|
| 161 |
+
if featureVariationApplied and newRecords and not universal:
|
| 162 |
+
defaultRecord = ot.FeatureVariationRecord()
|
| 163 |
+
defaultRecord.ConditionSet = ot.ConditionSet()
|
| 164 |
+
defaultRecord.ConditionSet.ConditionTable = []
|
| 165 |
+
defaultRecord.ConditionSet.ConditionCount = 0
|
| 166 |
+
defaultRecord.FeatureTableSubstitution = defaultsSubsts
|
| 167 |
+
|
| 168 |
+
newRecords.append(defaultRecord)
|
| 169 |
+
|
| 170 |
+
if newRecords:
|
| 171 |
+
table.FeatureVariations.FeatureVariationRecord = newRecords
|
| 172 |
+
table.FeatureVariations.FeatureVariationCount = len(newRecords)
|
| 173 |
+
else:
|
| 174 |
+
del table.FeatureVariations
|
| 175 |
+
# downgrade table version if there are no FeatureVariations left
|
| 176 |
+
table.Version = 0x00010000
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
def instantiateFeatureVariations(varfont, axisLimits):
|
| 180 |
+
for tableTag in ("GPOS", "GSUB"):
|
| 181 |
+
if tableTag not in varfont or not getattr(
|
| 182 |
+
varfont[tableTag].table, "FeatureVariations", None
|
| 183 |
+
):
|
| 184 |
+
continue
|
| 185 |
+
log.info("Instantiating FeatureVariations of %s table", tableTag)
|
| 186 |
+
_instantiateFeatureVariations(
|
| 187 |
+
varfont[tableTag].table, varfont["fvar"].axes, axisLimits
|
| 188 |
+
)
|
| 189 |
+
# remove unreferenced lookups
|
| 190 |
+
varfont[tableTag].prune_lookups()
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/instancer/names.py
ADDED
|
@@ -0,0 +1,388 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helpers for instantiating name table records."""
|
| 2 |
+
|
| 3 |
+
from contextlib import contextmanager
|
| 4 |
+
from copy import deepcopy
|
| 5 |
+
from enum import IntEnum
|
| 6 |
+
import re
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class NameID(IntEnum):
|
| 10 |
+
FAMILY_NAME = 1
|
| 11 |
+
SUBFAMILY_NAME = 2
|
| 12 |
+
UNIQUE_FONT_IDENTIFIER = 3
|
| 13 |
+
FULL_FONT_NAME = 4
|
| 14 |
+
VERSION_STRING = 5
|
| 15 |
+
POSTSCRIPT_NAME = 6
|
| 16 |
+
TYPOGRAPHIC_FAMILY_NAME = 16
|
| 17 |
+
TYPOGRAPHIC_SUBFAMILY_NAME = 17
|
| 18 |
+
VARIATIONS_POSTSCRIPT_NAME_PREFIX = 25
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
ELIDABLE_AXIS_VALUE_NAME = 2
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def getVariationNameIDs(varfont):
|
| 25 |
+
used = []
|
| 26 |
+
if "fvar" in varfont:
|
| 27 |
+
fvar = varfont["fvar"]
|
| 28 |
+
for axis in fvar.axes:
|
| 29 |
+
used.append(axis.axisNameID)
|
| 30 |
+
for instance in fvar.instances:
|
| 31 |
+
used.append(instance.subfamilyNameID)
|
| 32 |
+
if instance.postscriptNameID != 0xFFFF:
|
| 33 |
+
used.append(instance.postscriptNameID)
|
| 34 |
+
if "STAT" in varfont:
|
| 35 |
+
stat = varfont["STAT"].table
|
| 36 |
+
for axis in stat.DesignAxisRecord.Axis if stat.DesignAxisRecord else ():
|
| 37 |
+
used.append(axis.AxisNameID)
|
| 38 |
+
for value in stat.AxisValueArray.AxisValue if stat.AxisValueArray else ():
|
| 39 |
+
used.append(value.ValueNameID)
|
| 40 |
+
elidedFallbackNameID = getattr(stat, "ElidedFallbackNameID", None)
|
| 41 |
+
if elidedFallbackNameID is not None:
|
| 42 |
+
used.append(elidedFallbackNameID)
|
| 43 |
+
# nameIDs <= 255 are reserved by OT spec so we don't touch them
|
| 44 |
+
return {nameID for nameID in used if nameID > 255}
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
@contextmanager
|
| 48 |
+
def pruningUnusedNames(varfont):
|
| 49 |
+
from . import log
|
| 50 |
+
|
| 51 |
+
origNameIDs = getVariationNameIDs(varfont)
|
| 52 |
+
|
| 53 |
+
yield
|
| 54 |
+
|
| 55 |
+
log.info("Pruning name table")
|
| 56 |
+
exclude = origNameIDs - getVariationNameIDs(varfont)
|
| 57 |
+
varfont["name"].names[:] = [
|
| 58 |
+
record for record in varfont["name"].names if record.nameID not in exclude
|
| 59 |
+
]
|
| 60 |
+
if "ltag" in varfont:
|
| 61 |
+
# Drop the whole 'ltag' table if all the language-dependent Unicode name
|
| 62 |
+
# records that reference it have been dropped.
|
| 63 |
+
# TODO: Only prune unused ltag tags, renumerating langIDs accordingly.
|
| 64 |
+
# Note ltag can also be used by feat or morx tables, so check those too.
|
| 65 |
+
if not any(
|
| 66 |
+
record
|
| 67 |
+
for record in varfont["name"].names
|
| 68 |
+
if record.platformID == 0 and record.langID != 0xFFFF
|
| 69 |
+
):
|
| 70 |
+
del varfont["ltag"]
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def updateNameTable(varfont, axisLimits):
|
| 74 |
+
"""Update instatiated variable font's name table using STAT AxisValues.
|
| 75 |
+
|
| 76 |
+
Raises ValueError if the STAT table is missing or an Axis Value table is
|
| 77 |
+
missing for requested axis locations.
|
| 78 |
+
|
| 79 |
+
First, collect all STAT AxisValues that match the new default axis locations
|
| 80 |
+
(excluding "elided" ones); concatenate the strings in design axis order,
|
| 81 |
+
while giving priority to "synthetic" values (Format 4), to form the
|
| 82 |
+
typographic subfamily name associated with the new default instance.
|
| 83 |
+
Finally, update all related records in the name table, making sure that
|
| 84 |
+
legacy family/sub-family names conform to the the R/I/B/BI (Regular, Italic,
|
| 85 |
+
Bold, Bold Italic) naming model.
|
| 86 |
+
|
| 87 |
+
Example: Updating a partial variable font:
|
| 88 |
+
| >>> ttFont = TTFont("OpenSans[wdth,wght].ttf")
|
| 89 |
+
| >>> updateNameTable(ttFont, {"wght": (400, 900), "wdth": 75})
|
| 90 |
+
|
| 91 |
+
The name table records will be updated in the following manner:
|
| 92 |
+
NameID 1 familyName: "Open Sans" --> "Open Sans Condensed"
|
| 93 |
+
NameID 2 subFamilyName: "Regular" --> "Regular"
|
| 94 |
+
NameID 3 Unique font identifier: "3.000;GOOG;OpenSans-Regular" --> \
|
| 95 |
+
"3.000;GOOG;OpenSans-Condensed"
|
| 96 |
+
NameID 4 Full font name: "Open Sans Regular" --> "Open Sans Condensed"
|
| 97 |
+
NameID 6 PostScript name: "OpenSans-Regular" --> "OpenSans-Condensed"
|
| 98 |
+
NameID 16 Typographic Family name: None --> "Open Sans"
|
| 99 |
+
NameID 17 Typographic Subfamily name: None --> "Condensed"
|
| 100 |
+
|
| 101 |
+
References:
|
| 102 |
+
https://docs.microsoft.com/en-us/typography/opentype/spec/stat
|
| 103 |
+
https://docs.microsoft.com/en-us/typography/opentype/spec/name#name-ids
|
| 104 |
+
"""
|
| 105 |
+
from . import AxisLimits, axisValuesFromAxisLimits
|
| 106 |
+
|
| 107 |
+
if "STAT" not in varfont:
|
| 108 |
+
raise ValueError("Cannot update name table since there is no STAT table.")
|
| 109 |
+
stat = varfont["STAT"].table
|
| 110 |
+
if not stat.AxisValueArray:
|
| 111 |
+
raise ValueError("Cannot update name table since there are no STAT Axis Values")
|
| 112 |
+
fvar = varfont["fvar"]
|
| 113 |
+
|
| 114 |
+
# The updated name table will reflect the new 'zero origin' of the font.
|
| 115 |
+
# If we're instantiating a partial font, we will populate the unpinned
|
| 116 |
+
# axes with their default axis values from fvar.
|
| 117 |
+
axisLimits = AxisLimits(axisLimits).limitAxesAndPopulateDefaults(varfont)
|
| 118 |
+
partialDefaults = axisLimits.defaultLocation()
|
| 119 |
+
fvarDefaults = {a.axisTag: a.defaultValue for a in fvar.axes}
|
| 120 |
+
defaultAxisCoords = AxisLimits({**fvarDefaults, **partialDefaults})
|
| 121 |
+
assert all(v.minimum == v.maximum for v in defaultAxisCoords.values())
|
| 122 |
+
|
| 123 |
+
axisValueTables = axisValuesFromAxisLimits(stat, defaultAxisCoords)
|
| 124 |
+
checkAxisValuesExist(stat, axisValueTables, defaultAxisCoords.pinnedLocation())
|
| 125 |
+
|
| 126 |
+
# ignore "elidable" axis values, should be omitted in application font menus.
|
| 127 |
+
axisValueTables = [
|
| 128 |
+
v for v in axisValueTables if not v.Flags & ELIDABLE_AXIS_VALUE_NAME
|
| 129 |
+
]
|
| 130 |
+
axisValueTables = _sortAxisValues(axisValueTables)
|
| 131 |
+
_updateNameRecords(varfont, axisValueTables)
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
def checkAxisValuesExist(stat, axisValues, axisCoords):
|
| 135 |
+
seen = set()
|
| 136 |
+
designAxes = stat.DesignAxisRecord.Axis
|
| 137 |
+
hasValues = set()
|
| 138 |
+
for value in stat.AxisValueArray.AxisValue:
|
| 139 |
+
if value.Format in (1, 2, 3):
|
| 140 |
+
hasValues.add(designAxes[value.AxisIndex].AxisTag)
|
| 141 |
+
elif value.Format == 4:
|
| 142 |
+
for rec in value.AxisValueRecord:
|
| 143 |
+
hasValues.add(designAxes[rec.AxisIndex].AxisTag)
|
| 144 |
+
|
| 145 |
+
for axisValueTable in axisValues:
|
| 146 |
+
axisValueFormat = axisValueTable.Format
|
| 147 |
+
if axisValueTable.Format in (1, 2, 3):
|
| 148 |
+
axisTag = designAxes[axisValueTable.AxisIndex].AxisTag
|
| 149 |
+
if axisValueFormat == 2:
|
| 150 |
+
axisValue = axisValueTable.NominalValue
|
| 151 |
+
else:
|
| 152 |
+
axisValue = axisValueTable.Value
|
| 153 |
+
if axisTag in axisCoords and axisValue == axisCoords[axisTag]:
|
| 154 |
+
seen.add(axisTag)
|
| 155 |
+
elif axisValueTable.Format == 4:
|
| 156 |
+
for rec in axisValueTable.AxisValueRecord:
|
| 157 |
+
axisTag = designAxes[rec.AxisIndex].AxisTag
|
| 158 |
+
if axisTag in axisCoords and rec.Value == axisCoords[axisTag]:
|
| 159 |
+
seen.add(axisTag)
|
| 160 |
+
|
| 161 |
+
missingAxes = (set(axisCoords) - seen) & hasValues
|
| 162 |
+
if missingAxes:
|
| 163 |
+
missing = ", ".join(f"'{i}': {axisCoords[i]}" for i in missingAxes)
|
| 164 |
+
raise ValueError(f"Cannot find Axis Values {{{missing}}}")
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def _sortAxisValues(axisValues):
|
| 168 |
+
# Sort by axis index, remove duplicates and ensure that format 4 AxisValues
|
| 169 |
+
# are dominant.
|
| 170 |
+
# The MS Spec states: "if a format 1, format 2 or format 3 table has a
|
| 171 |
+
# (nominal) value used in a format 4 table that also has values for
|
| 172 |
+
# other axes, the format 4 table, being the more specific match, is used",
|
| 173 |
+
# https://docs.microsoft.com/en-us/typography/opentype/spec/stat#axis-value-table-format-4
|
| 174 |
+
results = []
|
| 175 |
+
seenAxes = set()
|
| 176 |
+
# Sort format 4 axes so the tables with the most AxisValueRecords are first
|
| 177 |
+
format4 = sorted(
|
| 178 |
+
[v for v in axisValues if v.Format == 4],
|
| 179 |
+
key=lambda v: len(v.AxisValueRecord),
|
| 180 |
+
reverse=True,
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
for val in format4:
|
| 184 |
+
axisIndexes = set(r.AxisIndex for r in val.AxisValueRecord)
|
| 185 |
+
minIndex = min(axisIndexes)
|
| 186 |
+
if not seenAxes & axisIndexes:
|
| 187 |
+
seenAxes |= axisIndexes
|
| 188 |
+
results.append((minIndex, val))
|
| 189 |
+
|
| 190 |
+
for val in axisValues:
|
| 191 |
+
if val in format4:
|
| 192 |
+
continue
|
| 193 |
+
axisIndex = val.AxisIndex
|
| 194 |
+
if axisIndex not in seenAxes:
|
| 195 |
+
seenAxes.add(axisIndex)
|
| 196 |
+
results.append((axisIndex, val))
|
| 197 |
+
|
| 198 |
+
return [axisValue for _, axisValue in sorted(results)]
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def _updateNameRecords(varfont, axisValues):
|
| 202 |
+
# Update nametable based on the axisValues using the R/I/B/BI model.
|
| 203 |
+
nametable = varfont["name"]
|
| 204 |
+
stat = varfont["STAT"].table
|
| 205 |
+
|
| 206 |
+
axisValueNameIDs = [a.ValueNameID for a in axisValues]
|
| 207 |
+
ribbiNameIDs = [n for n in axisValueNameIDs if _isRibbi(nametable, n)]
|
| 208 |
+
nonRibbiNameIDs = [n for n in axisValueNameIDs if n not in ribbiNameIDs]
|
| 209 |
+
elidedNameID = stat.ElidedFallbackNameID
|
| 210 |
+
elidedNameIsRibbi = _isRibbi(nametable, elidedNameID)
|
| 211 |
+
|
| 212 |
+
getName = nametable.getName
|
| 213 |
+
platforms = set((r.platformID, r.platEncID, r.langID) for r in nametable.names)
|
| 214 |
+
for platform in platforms:
|
| 215 |
+
if not all(getName(i, *platform) for i in (1, 2, elidedNameID)):
|
| 216 |
+
# Since no family name and subfamily name records were found,
|
| 217 |
+
# we cannot update this set of name Records.
|
| 218 |
+
continue
|
| 219 |
+
|
| 220 |
+
subFamilyName = " ".join(
|
| 221 |
+
getName(n, *platform).toUnicode() for n in ribbiNameIDs
|
| 222 |
+
)
|
| 223 |
+
if nonRibbiNameIDs:
|
| 224 |
+
typoSubFamilyName = " ".join(
|
| 225 |
+
getName(n, *platform).toUnicode() for n in axisValueNameIDs
|
| 226 |
+
)
|
| 227 |
+
else:
|
| 228 |
+
typoSubFamilyName = None
|
| 229 |
+
|
| 230 |
+
# If neither subFamilyName and typographic SubFamilyName exist,
|
| 231 |
+
# we will use the STAT's elidedFallbackName
|
| 232 |
+
if not typoSubFamilyName and not subFamilyName:
|
| 233 |
+
if elidedNameIsRibbi:
|
| 234 |
+
subFamilyName = getName(elidedNameID, *platform).toUnicode()
|
| 235 |
+
else:
|
| 236 |
+
typoSubFamilyName = getName(elidedNameID, *platform).toUnicode()
|
| 237 |
+
|
| 238 |
+
familyNameSuffix = " ".join(
|
| 239 |
+
getName(n, *platform).toUnicode() for n in nonRibbiNameIDs
|
| 240 |
+
)
|
| 241 |
+
|
| 242 |
+
_updateNameTableStyleRecords(
|
| 243 |
+
varfont,
|
| 244 |
+
familyNameSuffix,
|
| 245 |
+
subFamilyName,
|
| 246 |
+
typoSubFamilyName,
|
| 247 |
+
*platform,
|
| 248 |
+
)
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
def _isRibbi(nametable, nameID):
|
| 252 |
+
englishRecord = nametable.getName(nameID, 3, 1, 0x409)
|
| 253 |
+
return (
|
| 254 |
+
True
|
| 255 |
+
if englishRecord is not None
|
| 256 |
+
and englishRecord.toUnicode() in ("Regular", "Italic", "Bold", "Bold Italic")
|
| 257 |
+
else False
|
| 258 |
+
)
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
def _updateNameTableStyleRecords(
|
| 262 |
+
varfont,
|
| 263 |
+
familyNameSuffix,
|
| 264 |
+
subFamilyName,
|
| 265 |
+
typoSubFamilyName,
|
| 266 |
+
platformID=3,
|
| 267 |
+
platEncID=1,
|
| 268 |
+
langID=0x409,
|
| 269 |
+
):
|
| 270 |
+
# TODO (Marc F) It may be nice to make this part a standalone
|
| 271 |
+
# font renamer in the future.
|
| 272 |
+
nametable = varfont["name"]
|
| 273 |
+
platform = (platformID, platEncID, langID)
|
| 274 |
+
|
| 275 |
+
currentFamilyName = nametable.getName(
|
| 276 |
+
NameID.TYPOGRAPHIC_FAMILY_NAME, *platform
|
| 277 |
+
) or nametable.getName(NameID.FAMILY_NAME, *platform)
|
| 278 |
+
|
| 279 |
+
currentStyleName = nametable.getName(
|
| 280 |
+
NameID.TYPOGRAPHIC_SUBFAMILY_NAME, *platform
|
| 281 |
+
) or nametable.getName(NameID.SUBFAMILY_NAME, *platform)
|
| 282 |
+
|
| 283 |
+
if not all([currentFamilyName, currentStyleName]):
|
| 284 |
+
raise ValueError(f"Missing required NameIDs 1 and 2 for platform {platform}")
|
| 285 |
+
|
| 286 |
+
currentFamilyName = currentFamilyName.toUnicode()
|
| 287 |
+
currentStyleName = currentStyleName.toUnicode()
|
| 288 |
+
|
| 289 |
+
nameIDs = {
|
| 290 |
+
NameID.FAMILY_NAME: currentFamilyName,
|
| 291 |
+
NameID.SUBFAMILY_NAME: subFamilyName or "Regular",
|
| 292 |
+
}
|
| 293 |
+
if typoSubFamilyName:
|
| 294 |
+
nameIDs[NameID.FAMILY_NAME] = f"{currentFamilyName} {familyNameSuffix}".strip()
|
| 295 |
+
nameIDs[NameID.TYPOGRAPHIC_FAMILY_NAME] = currentFamilyName
|
| 296 |
+
nameIDs[NameID.TYPOGRAPHIC_SUBFAMILY_NAME] = typoSubFamilyName
|
| 297 |
+
else:
|
| 298 |
+
# Remove previous Typographic Family and SubFamily names since they're
|
| 299 |
+
# no longer required
|
| 300 |
+
for nameID in (
|
| 301 |
+
NameID.TYPOGRAPHIC_FAMILY_NAME,
|
| 302 |
+
NameID.TYPOGRAPHIC_SUBFAMILY_NAME,
|
| 303 |
+
):
|
| 304 |
+
nametable.removeNames(nameID=nameID)
|
| 305 |
+
|
| 306 |
+
newFamilyName = (
|
| 307 |
+
nameIDs.get(NameID.TYPOGRAPHIC_FAMILY_NAME) or nameIDs[NameID.FAMILY_NAME]
|
| 308 |
+
)
|
| 309 |
+
newStyleName = (
|
| 310 |
+
nameIDs.get(NameID.TYPOGRAPHIC_SUBFAMILY_NAME) or nameIDs[NameID.SUBFAMILY_NAME]
|
| 311 |
+
)
|
| 312 |
+
|
| 313 |
+
nameIDs[NameID.FULL_FONT_NAME] = f"{newFamilyName} {newStyleName}"
|
| 314 |
+
nameIDs[NameID.POSTSCRIPT_NAME] = _updatePSNameRecord(
|
| 315 |
+
varfont, newFamilyName, newStyleName, platform
|
| 316 |
+
)
|
| 317 |
+
|
| 318 |
+
uniqueID = _updateUniqueIdNameRecord(varfont, nameIDs, platform)
|
| 319 |
+
if uniqueID:
|
| 320 |
+
nameIDs[NameID.UNIQUE_FONT_IDENTIFIER] = uniqueID
|
| 321 |
+
|
| 322 |
+
for nameID, string in nameIDs.items():
|
| 323 |
+
assert string, nameID
|
| 324 |
+
nametable.setName(string, nameID, *platform)
|
| 325 |
+
|
| 326 |
+
if "fvar" not in varfont:
|
| 327 |
+
nametable.removeNames(NameID.VARIATIONS_POSTSCRIPT_NAME_PREFIX)
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
def _updatePSNameRecord(varfont, familyName, styleName, platform):
|
| 331 |
+
# Implementation based on Adobe Technical Note #5902 :
|
| 332 |
+
# https://wwwimages2.adobe.com/content/dam/acom/en/devnet/font/pdfs/5902.AdobePSNameGeneration.pdf
|
| 333 |
+
nametable = varfont["name"]
|
| 334 |
+
|
| 335 |
+
family_prefix = nametable.getName(
|
| 336 |
+
NameID.VARIATIONS_POSTSCRIPT_NAME_PREFIX, *platform
|
| 337 |
+
)
|
| 338 |
+
if family_prefix:
|
| 339 |
+
family_prefix = family_prefix.toUnicode()
|
| 340 |
+
else:
|
| 341 |
+
family_prefix = familyName
|
| 342 |
+
|
| 343 |
+
psName = f"{family_prefix}-{styleName}"
|
| 344 |
+
# Remove any characters other than uppercase Latin letters, lowercase
|
| 345 |
+
# Latin letters, digits and hyphens.
|
| 346 |
+
psName = re.sub(r"[^A-Za-z0-9-]", r"", psName)
|
| 347 |
+
|
| 348 |
+
if len(psName) > 127:
|
| 349 |
+
# Abbreviating the stylename so it fits within 127 characters whilst
|
| 350 |
+
# conforming to every vendor's specification is too complex. Instead
|
| 351 |
+
# we simply truncate the psname and add the required "..."
|
| 352 |
+
return f"{psName[:124]}..."
|
| 353 |
+
return psName
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
def _updateUniqueIdNameRecord(varfont, nameIDs, platform):
|
| 357 |
+
nametable = varfont["name"]
|
| 358 |
+
currentRecord = nametable.getName(NameID.UNIQUE_FONT_IDENTIFIER, *platform)
|
| 359 |
+
if not currentRecord:
|
| 360 |
+
return None
|
| 361 |
+
|
| 362 |
+
# Check if full name and postscript name are a substring of currentRecord
|
| 363 |
+
for nameID in (NameID.FULL_FONT_NAME, NameID.POSTSCRIPT_NAME):
|
| 364 |
+
nameRecord = nametable.getName(nameID, *platform)
|
| 365 |
+
if not nameRecord:
|
| 366 |
+
continue
|
| 367 |
+
if nameRecord.toUnicode() in currentRecord.toUnicode():
|
| 368 |
+
return currentRecord.toUnicode().replace(
|
| 369 |
+
nameRecord.toUnicode(), nameIDs[nameRecord.nameID]
|
| 370 |
+
)
|
| 371 |
+
|
| 372 |
+
# Create a new string since we couldn't find any substrings.
|
| 373 |
+
fontVersion = _fontVersion(varfont, platform)
|
| 374 |
+
achVendID = varfont["OS/2"].achVendID
|
| 375 |
+
# Remove non-ASCII characers and trailing spaces
|
| 376 |
+
vendor = re.sub(r"[^\x00-\x7F]", "", achVendID).strip()
|
| 377 |
+
psName = nameIDs[NameID.POSTSCRIPT_NAME]
|
| 378 |
+
return f"{fontVersion};{vendor};{psName}"
|
| 379 |
+
|
| 380 |
+
|
| 381 |
+
def _fontVersion(font, platform=(3, 1, 0x409)):
|
| 382 |
+
nameRecord = font["name"].getName(NameID.VERSION_STRING, *platform)
|
| 383 |
+
if nameRecord is None:
|
| 384 |
+
return f'{font["head"].fontRevision:.3f}'
|
| 385 |
+
# "Version 1.101; ttfautohint (v1.8.1.43-b0c9)" --> "1.101"
|
| 386 |
+
# Also works fine with inputs "Version 1.101" or "1.101" etc
|
| 387 |
+
versionNumber = nameRecord.toUnicode().split(";")[0]
|
| 388 |
+
return versionNumber.lstrip("Version ").strip()
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/mutator.py
ADDED
|
@@ -0,0 +1,516 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Instantiate a variation font. Run, eg:
|
| 3 |
+
|
| 4 |
+
.. code-block:: sh
|
| 5 |
+
|
| 6 |
+
$ fonttools varLib.mutator ./NotoSansArabic-VF.ttf wght=140 wdth=85
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from fontTools.misc.fixedTools import floatToFixedToFloat, floatToFixed
|
| 10 |
+
from fontTools.misc.roundTools import otRound
|
| 11 |
+
from fontTools.pens.boundsPen import BoundsPen
|
| 12 |
+
from fontTools.ttLib import TTFont, newTable
|
| 13 |
+
from fontTools.ttLib.tables import ttProgram
|
| 14 |
+
from fontTools.ttLib.tables._g_l_y_f import (
|
| 15 |
+
GlyphCoordinates,
|
| 16 |
+
flagOverlapSimple,
|
| 17 |
+
OVERLAP_COMPOUND,
|
| 18 |
+
)
|
| 19 |
+
from fontTools.varLib.models import (
|
| 20 |
+
supportScalar,
|
| 21 |
+
normalizeLocation,
|
| 22 |
+
piecewiseLinearMap,
|
| 23 |
+
)
|
| 24 |
+
from fontTools.varLib.merger import MutatorMerger
|
| 25 |
+
from fontTools.varLib.varStore import VarStoreInstancer
|
| 26 |
+
from fontTools.varLib.mvar import MVAR_ENTRIES
|
| 27 |
+
from fontTools.varLib.iup import iup_delta
|
| 28 |
+
import fontTools.subset.cff
|
| 29 |
+
import os.path
|
| 30 |
+
import logging
|
| 31 |
+
from io import BytesIO
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
log = logging.getLogger("fontTools.varlib.mutator")
|
| 35 |
+
|
| 36 |
+
# map 'wdth' axis (1..200) to OS/2.usWidthClass (1..9), rounding to closest
|
| 37 |
+
OS2_WIDTH_CLASS_VALUES = {}
|
| 38 |
+
percents = [50.0, 62.5, 75.0, 87.5, 100.0, 112.5, 125.0, 150.0, 200.0]
|
| 39 |
+
for i, (prev, curr) in enumerate(zip(percents[:-1], percents[1:]), start=1):
|
| 40 |
+
half = (prev + curr) / 2
|
| 41 |
+
OS2_WIDTH_CLASS_VALUES[half] = i
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def interpolate_cff2_PrivateDict(topDict, interpolateFromDeltas):
|
| 45 |
+
pd_blend_lists = (
|
| 46 |
+
"BlueValues",
|
| 47 |
+
"OtherBlues",
|
| 48 |
+
"FamilyBlues",
|
| 49 |
+
"FamilyOtherBlues",
|
| 50 |
+
"StemSnapH",
|
| 51 |
+
"StemSnapV",
|
| 52 |
+
)
|
| 53 |
+
pd_blend_values = ("BlueScale", "BlueShift", "BlueFuzz", "StdHW", "StdVW")
|
| 54 |
+
for fontDict in topDict.FDArray:
|
| 55 |
+
pd = fontDict.Private
|
| 56 |
+
vsindex = pd.vsindex if (hasattr(pd, "vsindex")) else 0
|
| 57 |
+
for key, value in pd.rawDict.items():
|
| 58 |
+
if (key in pd_blend_values) and isinstance(value, list):
|
| 59 |
+
delta = interpolateFromDeltas(vsindex, value[1:])
|
| 60 |
+
pd.rawDict[key] = otRound(value[0] + delta)
|
| 61 |
+
elif (key in pd_blend_lists) and isinstance(value[0], list):
|
| 62 |
+
"""If any argument in a BlueValues list is a blend list,
|
| 63 |
+
then they all are. The first value of each list is an
|
| 64 |
+
absolute value. The delta tuples are calculated from
|
| 65 |
+
relative master values, hence we need to append all the
|
| 66 |
+
deltas to date to each successive absolute value."""
|
| 67 |
+
delta = 0
|
| 68 |
+
for i, val_list in enumerate(value):
|
| 69 |
+
delta += otRound(interpolateFromDeltas(vsindex, val_list[1:]))
|
| 70 |
+
value[i] = val_list[0] + delta
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def interpolate_cff2_charstrings(topDict, interpolateFromDeltas, glyphOrder):
|
| 74 |
+
charstrings = topDict.CharStrings
|
| 75 |
+
for gname in glyphOrder:
|
| 76 |
+
# Interpolate charstring
|
| 77 |
+
# e.g replace blend op args with regular args,
|
| 78 |
+
# and use and discard vsindex op.
|
| 79 |
+
charstring = charstrings[gname]
|
| 80 |
+
new_program = []
|
| 81 |
+
vsindex = 0
|
| 82 |
+
last_i = 0
|
| 83 |
+
for i, token in enumerate(charstring.program):
|
| 84 |
+
if token == "vsindex":
|
| 85 |
+
vsindex = charstring.program[i - 1]
|
| 86 |
+
if last_i != 0:
|
| 87 |
+
new_program.extend(charstring.program[last_i : i - 1])
|
| 88 |
+
last_i = i + 1
|
| 89 |
+
elif token == "blend":
|
| 90 |
+
num_regions = charstring.getNumRegions(vsindex)
|
| 91 |
+
numMasters = 1 + num_regions
|
| 92 |
+
num_args = charstring.program[i - 1]
|
| 93 |
+
# The program list starting at program[i] is now:
|
| 94 |
+
# ..args for following operations
|
| 95 |
+
# num_args values from the default font
|
| 96 |
+
# num_args tuples, each with numMasters-1 delta values
|
| 97 |
+
# num_blend_args
|
| 98 |
+
# 'blend'
|
| 99 |
+
argi = i - (num_args * numMasters + 1)
|
| 100 |
+
end_args = tuplei = argi + num_args
|
| 101 |
+
while argi < end_args:
|
| 102 |
+
next_ti = tuplei + num_regions
|
| 103 |
+
deltas = charstring.program[tuplei:next_ti]
|
| 104 |
+
delta = interpolateFromDeltas(vsindex, deltas)
|
| 105 |
+
charstring.program[argi] += otRound(delta)
|
| 106 |
+
tuplei = next_ti
|
| 107 |
+
argi += 1
|
| 108 |
+
new_program.extend(charstring.program[last_i:end_args])
|
| 109 |
+
last_i = i + 1
|
| 110 |
+
if last_i != 0:
|
| 111 |
+
new_program.extend(charstring.program[last_i:])
|
| 112 |
+
charstring.program = new_program
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def interpolate_cff2_metrics(varfont, topDict, glyphOrder, loc):
|
| 116 |
+
"""Unlike TrueType glyphs, neither advance width nor bounding box
|
| 117 |
+
info is stored in a CFF2 charstring. The width data exists only in
|
| 118 |
+
the hmtx and HVAR tables. Since LSB data cannot be interpolated
|
| 119 |
+
reliably from the master LSB values in the hmtx table, we traverse
|
| 120 |
+
the charstring to determine the actual bound box."""
|
| 121 |
+
|
| 122 |
+
charstrings = topDict.CharStrings
|
| 123 |
+
boundsPen = BoundsPen(glyphOrder)
|
| 124 |
+
hmtx = varfont["hmtx"]
|
| 125 |
+
hvar_table = None
|
| 126 |
+
if "HVAR" in varfont:
|
| 127 |
+
hvar_table = varfont["HVAR"].table
|
| 128 |
+
fvar = varfont["fvar"]
|
| 129 |
+
varStoreInstancer = VarStoreInstancer(hvar_table.VarStore, fvar.axes, loc)
|
| 130 |
+
|
| 131 |
+
for gid, gname in enumerate(glyphOrder):
|
| 132 |
+
entry = list(hmtx[gname])
|
| 133 |
+
# get width delta.
|
| 134 |
+
if hvar_table:
|
| 135 |
+
if hvar_table.AdvWidthMap:
|
| 136 |
+
width_idx = hvar_table.AdvWidthMap.mapping[gname]
|
| 137 |
+
else:
|
| 138 |
+
width_idx = gid
|
| 139 |
+
width_delta = otRound(varStoreInstancer[width_idx])
|
| 140 |
+
else:
|
| 141 |
+
width_delta = 0
|
| 142 |
+
|
| 143 |
+
# get LSB.
|
| 144 |
+
boundsPen.init()
|
| 145 |
+
charstring = charstrings[gname]
|
| 146 |
+
charstring.draw(boundsPen)
|
| 147 |
+
if boundsPen.bounds is None:
|
| 148 |
+
# Happens with non-marking glyphs
|
| 149 |
+
lsb_delta = 0
|
| 150 |
+
else:
|
| 151 |
+
lsb = otRound(boundsPen.bounds[0])
|
| 152 |
+
lsb_delta = entry[1] - lsb
|
| 153 |
+
|
| 154 |
+
if lsb_delta or width_delta:
|
| 155 |
+
if width_delta:
|
| 156 |
+
entry[0] = max(0, entry[0] + width_delta)
|
| 157 |
+
if lsb_delta:
|
| 158 |
+
entry[1] = lsb
|
| 159 |
+
hmtx[gname] = tuple(entry)
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
def instantiateVariableFont(varfont, location, inplace=False, overlap=True):
|
| 163 |
+
"""Generate a static instance from a variable TTFont and a dictionary
|
| 164 |
+
defining the desired location along the variable font's axes.
|
| 165 |
+
The location values must be specified as user-space coordinates, e.g.:
|
| 166 |
+
|
| 167 |
+
.. code-block::
|
| 168 |
+
|
| 169 |
+
{'wght': 400, 'wdth': 100}
|
| 170 |
+
|
| 171 |
+
By default, a new TTFont object is returned. If ``inplace`` is True, the
|
| 172 |
+
input varfont is modified and reduced to a static font.
|
| 173 |
+
|
| 174 |
+
When the overlap parameter is defined as True,
|
| 175 |
+
OVERLAP_SIMPLE and OVERLAP_COMPOUND bits are set to 1. See
|
| 176 |
+
https://docs.microsoft.com/en-us/typography/opentype/spec/glyf
|
| 177 |
+
"""
|
| 178 |
+
if not inplace:
|
| 179 |
+
# make a copy to leave input varfont unmodified
|
| 180 |
+
stream = BytesIO()
|
| 181 |
+
varfont.save(stream)
|
| 182 |
+
stream.seek(0)
|
| 183 |
+
varfont = TTFont(stream)
|
| 184 |
+
|
| 185 |
+
fvar = varfont["fvar"]
|
| 186 |
+
axes = {a.axisTag: (a.minValue, a.defaultValue, a.maxValue) for a in fvar.axes}
|
| 187 |
+
loc = normalizeLocation(location, axes)
|
| 188 |
+
if "avar" in varfont:
|
| 189 |
+
maps = varfont["avar"].segments
|
| 190 |
+
loc = {k: piecewiseLinearMap(v, maps[k]) for k, v in loc.items()}
|
| 191 |
+
# Quantize to F2Dot14, to avoid surprise interpolations.
|
| 192 |
+
loc = {k: floatToFixedToFloat(v, 14) for k, v in loc.items()}
|
| 193 |
+
# Location is normalized now
|
| 194 |
+
log.info("Normalized location: %s", loc)
|
| 195 |
+
|
| 196 |
+
if "gvar" in varfont:
|
| 197 |
+
log.info("Mutating glyf/gvar tables")
|
| 198 |
+
gvar = varfont["gvar"]
|
| 199 |
+
glyf = varfont["glyf"]
|
| 200 |
+
hMetrics = varfont["hmtx"].metrics
|
| 201 |
+
vMetrics = getattr(varfont.get("vmtx"), "metrics", None)
|
| 202 |
+
# get list of glyph names in gvar sorted by component depth
|
| 203 |
+
glyphnames = sorted(
|
| 204 |
+
gvar.variations.keys(),
|
| 205 |
+
key=lambda name: (
|
| 206 |
+
(
|
| 207 |
+
glyf[name].getCompositeMaxpValues(glyf).maxComponentDepth
|
| 208 |
+
if glyf[name].isComposite()
|
| 209 |
+
else 0
|
| 210 |
+
),
|
| 211 |
+
name,
|
| 212 |
+
),
|
| 213 |
+
)
|
| 214 |
+
for glyphname in glyphnames:
|
| 215 |
+
variations = gvar.variations[glyphname]
|
| 216 |
+
coordinates, _ = glyf._getCoordinatesAndControls(
|
| 217 |
+
glyphname, hMetrics, vMetrics
|
| 218 |
+
)
|
| 219 |
+
origCoords, endPts = None, None
|
| 220 |
+
for var in variations:
|
| 221 |
+
scalar = supportScalar(loc, var.axes)
|
| 222 |
+
if not scalar:
|
| 223 |
+
continue
|
| 224 |
+
delta = var.coordinates
|
| 225 |
+
if None in delta:
|
| 226 |
+
if origCoords is None:
|
| 227 |
+
origCoords, g = glyf._getCoordinatesAndControls(
|
| 228 |
+
glyphname, hMetrics, vMetrics
|
| 229 |
+
)
|
| 230 |
+
delta = iup_delta(delta, origCoords, g.endPts)
|
| 231 |
+
coordinates += GlyphCoordinates(delta) * scalar
|
| 232 |
+
glyf._setCoordinates(glyphname, coordinates, hMetrics, vMetrics)
|
| 233 |
+
else:
|
| 234 |
+
glyf = None
|
| 235 |
+
|
| 236 |
+
if "DSIG" in varfont:
|
| 237 |
+
del varfont["DSIG"]
|
| 238 |
+
|
| 239 |
+
if "cvar" in varfont:
|
| 240 |
+
log.info("Mutating cvt/cvar tables")
|
| 241 |
+
cvar = varfont["cvar"]
|
| 242 |
+
cvt = varfont["cvt "]
|
| 243 |
+
deltas = {}
|
| 244 |
+
for var in cvar.variations:
|
| 245 |
+
scalar = supportScalar(loc, var.axes)
|
| 246 |
+
if not scalar:
|
| 247 |
+
continue
|
| 248 |
+
for i, c in enumerate(var.coordinates):
|
| 249 |
+
if c is not None:
|
| 250 |
+
deltas[i] = deltas.get(i, 0) + scalar * c
|
| 251 |
+
for i, delta in deltas.items():
|
| 252 |
+
cvt[i] += otRound(delta)
|
| 253 |
+
|
| 254 |
+
if "CFF2" in varfont:
|
| 255 |
+
log.info("Mutating CFF2 table")
|
| 256 |
+
glyphOrder = varfont.getGlyphOrder()
|
| 257 |
+
CFF2 = varfont["CFF2"]
|
| 258 |
+
topDict = CFF2.cff.topDictIndex[0]
|
| 259 |
+
vsInstancer = VarStoreInstancer(topDict.VarStore.otVarStore, fvar.axes, loc)
|
| 260 |
+
interpolateFromDeltas = vsInstancer.interpolateFromDeltas
|
| 261 |
+
interpolate_cff2_PrivateDict(topDict, interpolateFromDeltas)
|
| 262 |
+
CFF2.desubroutinize()
|
| 263 |
+
interpolate_cff2_charstrings(topDict, interpolateFromDeltas, glyphOrder)
|
| 264 |
+
interpolate_cff2_metrics(varfont, topDict, glyphOrder, loc)
|
| 265 |
+
del topDict.rawDict["VarStore"]
|
| 266 |
+
del topDict.VarStore
|
| 267 |
+
|
| 268 |
+
if "MVAR" in varfont:
|
| 269 |
+
log.info("Mutating MVAR table")
|
| 270 |
+
mvar = varfont["MVAR"].table
|
| 271 |
+
varStoreInstancer = VarStoreInstancer(mvar.VarStore, fvar.axes, loc)
|
| 272 |
+
records = mvar.ValueRecord
|
| 273 |
+
for rec in records:
|
| 274 |
+
mvarTag = rec.ValueTag
|
| 275 |
+
if mvarTag not in MVAR_ENTRIES:
|
| 276 |
+
continue
|
| 277 |
+
tableTag, itemName = MVAR_ENTRIES[mvarTag]
|
| 278 |
+
delta = otRound(varStoreInstancer[rec.VarIdx])
|
| 279 |
+
if not delta:
|
| 280 |
+
continue
|
| 281 |
+
setattr(
|
| 282 |
+
varfont[tableTag],
|
| 283 |
+
itemName,
|
| 284 |
+
getattr(varfont[tableTag], itemName) + delta,
|
| 285 |
+
)
|
| 286 |
+
|
| 287 |
+
log.info("Mutating FeatureVariations")
|
| 288 |
+
for tableTag in "GSUB", "GPOS":
|
| 289 |
+
if not tableTag in varfont:
|
| 290 |
+
continue
|
| 291 |
+
table = varfont[tableTag].table
|
| 292 |
+
if not getattr(table, "FeatureVariations", None):
|
| 293 |
+
continue
|
| 294 |
+
variations = table.FeatureVariations
|
| 295 |
+
for record in variations.FeatureVariationRecord:
|
| 296 |
+
applies = True
|
| 297 |
+
for condition in record.ConditionSet.ConditionTable:
|
| 298 |
+
if condition.Format == 1:
|
| 299 |
+
axisIdx = condition.AxisIndex
|
| 300 |
+
axisTag = fvar.axes[axisIdx].axisTag
|
| 301 |
+
Min = condition.FilterRangeMinValue
|
| 302 |
+
Max = condition.FilterRangeMaxValue
|
| 303 |
+
v = loc[axisTag]
|
| 304 |
+
if not (Min <= v <= Max):
|
| 305 |
+
applies = False
|
| 306 |
+
else:
|
| 307 |
+
applies = False
|
| 308 |
+
if not applies:
|
| 309 |
+
break
|
| 310 |
+
|
| 311 |
+
if applies:
|
| 312 |
+
assert record.FeatureTableSubstitution.Version == 0x00010000
|
| 313 |
+
for rec in record.FeatureTableSubstitution.SubstitutionRecord:
|
| 314 |
+
table.FeatureList.FeatureRecord[rec.FeatureIndex].Feature = (
|
| 315 |
+
rec.Feature
|
| 316 |
+
)
|
| 317 |
+
break
|
| 318 |
+
del table.FeatureVariations
|
| 319 |
+
|
| 320 |
+
if "GDEF" in varfont and varfont["GDEF"].table.Version >= 0x00010003:
|
| 321 |
+
log.info("Mutating GDEF/GPOS/GSUB tables")
|
| 322 |
+
gdef = varfont["GDEF"].table
|
| 323 |
+
instancer = VarStoreInstancer(gdef.VarStore, fvar.axes, loc)
|
| 324 |
+
|
| 325 |
+
merger = MutatorMerger(varfont, instancer)
|
| 326 |
+
merger.mergeTables(varfont, [varfont], ["GDEF", "GPOS"])
|
| 327 |
+
|
| 328 |
+
# Downgrade GDEF.
|
| 329 |
+
del gdef.VarStore
|
| 330 |
+
gdef.Version = 0x00010002
|
| 331 |
+
if gdef.MarkGlyphSetsDef is None:
|
| 332 |
+
del gdef.MarkGlyphSetsDef
|
| 333 |
+
gdef.Version = 0x00010000
|
| 334 |
+
|
| 335 |
+
if not (
|
| 336 |
+
gdef.LigCaretList
|
| 337 |
+
or gdef.MarkAttachClassDef
|
| 338 |
+
or gdef.GlyphClassDef
|
| 339 |
+
or gdef.AttachList
|
| 340 |
+
or (gdef.Version >= 0x00010002 and gdef.MarkGlyphSetsDef)
|
| 341 |
+
):
|
| 342 |
+
del varfont["GDEF"]
|
| 343 |
+
|
| 344 |
+
addidef = False
|
| 345 |
+
if glyf:
|
| 346 |
+
for glyph in glyf.glyphs.values():
|
| 347 |
+
if hasattr(glyph, "program"):
|
| 348 |
+
instructions = glyph.program.getAssembly()
|
| 349 |
+
# If GETVARIATION opcode is used in bytecode of any glyph add IDEF
|
| 350 |
+
addidef = any(op.startswith("GETVARIATION") for op in instructions)
|
| 351 |
+
if addidef:
|
| 352 |
+
break
|
| 353 |
+
if overlap:
|
| 354 |
+
for glyph_name in glyf.keys():
|
| 355 |
+
glyph = glyf[glyph_name]
|
| 356 |
+
# Set OVERLAP_COMPOUND bit for compound glyphs
|
| 357 |
+
if glyph.isComposite():
|
| 358 |
+
glyph.components[0].flags |= OVERLAP_COMPOUND
|
| 359 |
+
# Set OVERLAP_SIMPLE bit for simple glyphs
|
| 360 |
+
elif glyph.numberOfContours > 0:
|
| 361 |
+
glyph.flags[0] |= flagOverlapSimple
|
| 362 |
+
if addidef:
|
| 363 |
+
log.info("Adding IDEF to fpgm table for GETVARIATION opcode")
|
| 364 |
+
asm = []
|
| 365 |
+
if "fpgm" in varfont:
|
| 366 |
+
fpgm = varfont["fpgm"]
|
| 367 |
+
asm = fpgm.program.getAssembly()
|
| 368 |
+
else:
|
| 369 |
+
fpgm = newTable("fpgm")
|
| 370 |
+
fpgm.program = ttProgram.Program()
|
| 371 |
+
varfont["fpgm"] = fpgm
|
| 372 |
+
asm.append("PUSHB[000] 145")
|
| 373 |
+
asm.append("IDEF[ ]")
|
| 374 |
+
args = [str(len(loc))]
|
| 375 |
+
for a in fvar.axes:
|
| 376 |
+
args.append(str(floatToFixed(loc[a.axisTag], 14)))
|
| 377 |
+
asm.append("NPUSHW[ ] " + " ".join(args))
|
| 378 |
+
asm.append("ENDF[ ]")
|
| 379 |
+
fpgm.program.fromAssembly(asm)
|
| 380 |
+
|
| 381 |
+
# Change maxp attributes as IDEF is added
|
| 382 |
+
if "maxp" in varfont:
|
| 383 |
+
maxp = varfont["maxp"]
|
| 384 |
+
setattr(
|
| 385 |
+
maxp, "maxInstructionDefs", 1 + getattr(maxp, "maxInstructionDefs", 0)
|
| 386 |
+
)
|
| 387 |
+
setattr(
|
| 388 |
+
maxp,
|
| 389 |
+
"maxStackElements",
|
| 390 |
+
max(len(loc), getattr(maxp, "maxStackElements", 0)),
|
| 391 |
+
)
|
| 392 |
+
|
| 393 |
+
if "name" in varfont:
|
| 394 |
+
log.info("Pruning name table")
|
| 395 |
+
exclude = {a.axisNameID for a in fvar.axes}
|
| 396 |
+
for i in fvar.instances:
|
| 397 |
+
exclude.add(i.subfamilyNameID)
|
| 398 |
+
exclude.add(i.postscriptNameID)
|
| 399 |
+
if "ltag" in varfont:
|
| 400 |
+
# Drop the whole 'ltag' table if all its language tags are referenced by
|
| 401 |
+
# name records to be pruned.
|
| 402 |
+
# TODO: prune unused ltag tags and re-enumerate langIDs accordingly
|
| 403 |
+
excludedUnicodeLangIDs = [
|
| 404 |
+
n.langID
|
| 405 |
+
for n in varfont["name"].names
|
| 406 |
+
if n.nameID in exclude and n.platformID == 0 and n.langID != 0xFFFF
|
| 407 |
+
]
|
| 408 |
+
if set(excludedUnicodeLangIDs) == set(range(len((varfont["ltag"].tags)))):
|
| 409 |
+
del varfont["ltag"]
|
| 410 |
+
varfont["name"].names[:] = [
|
| 411 |
+
n for n in varfont["name"].names if n.nameID not in exclude
|
| 412 |
+
]
|
| 413 |
+
|
| 414 |
+
if "wght" in location and "OS/2" in varfont:
|
| 415 |
+
varfont["OS/2"].usWeightClass = otRound(max(1, min(location["wght"], 1000)))
|
| 416 |
+
if "wdth" in location:
|
| 417 |
+
wdth = location["wdth"]
|
| 418 |
+
for percent, widthClass in sorted(OS2_WIDTH_CLASS_VALUES.items()):
|
| 419 |
+
if wdth < percent:
|
| 420 |
+
varfont["OS/2"].usWidthClass = widthClass
|
| 421 |
+
break
|
| 422 |
+
else:
|
| 423 |
+
varfont["OS/2"].usWidthClass = 9
|
| 424 |
+
if "slnt" in location and "post" in varfont:
|
| 425 |
+
varfont["post"].italicAngle = max(-90, min(location["slnt"], 90))
|
| 426 |
+
|
| 427 |
+
log.info("Removing variable tables")
|
| 428 |
+
for tag in ("avar", "cvar", "fvar", "gvar", "HVAR", "MVAR", "VVAR", "STAT"):
|
| 429 |
+
if tag in varfont:
|
| 430 |
+
del varfont[tag]
|
| 431 |
+
|
| 432 |
+
return varfont
|
| 433 |
+
|
| 434 |
+
|
| 435 |
+
def main(args=None):
|
| 436 |
+
"""Instantiate a variation font"""
|
| 437 |
+
from fontTools import configLogger
|
| 438 |
+
import argparse
|
| 439 |
+
|
| 440 |
+
parser = argparse.ArgumentParser(
|
| 441 |
+
"fonttools varLib.mutator", description="Instantiate a variable font"
|
| 442 |
+
)
|
| 443 |
+
parser.add_argument("input", metavar="INPUT.ttf", help="Input variable TTF file.")
|
| 444 |
+
parser.add_argument(
|
| 445 |
+
"locargs",
|
| 446 |
+
metavar="AXIS=LOC",
|
| 447 |
+
nargs="*",
|
| 448 |
+
help="List of space separated locations. A location consist in "
|
| 449 |
+
"the name of a variation axis, followed by '=' and a number. E.g.: "
|
| 450 |
+
" wght=700 wdth=80. The default is the location of the base master.",
|
| 451 |
+
)
|
| 452 |
+
parser.add_argument(
|
| 453 |
+
"-o",
|
| 454 |
+
"--output",
|
| 455 |
+
metavar="OUTPUT.ttf",
|
| 456 |
+
default=None,
|
| 457 |
+
help="Output instance TTF file (default: INPUT-instance.ttf).",
|
| 458 |
+
)
|
| 459 |
+
parser.add_argument(
|
| 460 |
+
"--no-recalc-timestamp",
|
| 461 |
+
dest="recalc_timestamp",
|
| 462 |
+
action="store_false",
|
| 463 |
+
help="Don't set the output font's timestamp to the current time.",
|
| 464 |
+
)
|
| 465 |
+
logging_group = parser.add_mutually_exclusive_group(required=False)
|
| 466 |
+
logging_group.add_argument(
|
| 467 |
+
"-v", "--verbose", action="store_true", help="Run more verbosely."
|
| 468 |
+
)
|
| 469 |
+
logging_group.add_argument(
|
| 470 |
+
"-q", "--quiet", action="store_true", help="Turn verbosity off."
|
| 471 |
+
)
|
| 472 |
+
parser.add_argument(
|
| 473 |
+
"--no-overlap",
|
| 474 |
+
dest="overlap",
|
| 475 |
+
action="store_false",
|
| 476 |
+
help="Don't set OVERLAP_SIMPLE/OVERLAP_COMPOUND glyf flags.",
|
| 477 |
+
)
|
| 478 |
+
options = parser.parse_args(args)
|
| 479 |
+
|
| 480 |
+
varfilename = options.input
|
| 481 |
+
outfile = (
|
| 482 |
+
os.path.splitext(varfilename)[0] + "-instance.ttf"
|
| 483 |
+
if not options.output
|
| 484 |
+
else options.output
|
| 485 |
+
)
|
| 486 |
+
configLogger(
|
| 487 |
+
level=("DEBUG" if options.verbose else "ERROR" if options.quiet else "INFO")
|
| 488 |
+
)
|
| 489 |
+
|
| 490 |
+
loc = {}
|
| 491 |
+
for arg in options.locargs:
|
| 492 |
+
try:
|
| 493 |
+
tag, val = arg.split("=")
|
| 494 |
+
assert len(tag) <= 4
|
| 495 |
+
loc[tag.ljust(4)] = float(val)
|
| 496 |
+
except (ValueError, AssertionError):
|
| 497 |
+
parser.error("invalid location argument format: %r" % arg)
|
| 498 |
+
log.info("Location: %s", loc)
|
| 499 |
+
|
| 500 |
+
log.info("Loading variable font")
|
| 501 |
+
varfont = TTFont(varfilename, recalcTimestamp=options.recalc_timestamp)
|
| 502 |
+
|
| 503 |
+
instantiateVariableFont(varfont, loc, inplace=True, overlap=options.overlap)
|
| 504 |
+
|
| 505 |
+
log.info("Saving instance font %s", outfile)
|
| 506 |
+
varfont.save(outfile)
|
| 507 |
+
|
| 508 |
+
|
| 509 |
+
if __name__ == "__main__":
|
| 510 |
+
import sys
|
| 511 |
+
|
| 512 |
+
if len(sys.argv) > 1:
|
| 513 |
+
sys.exit(main())
|
| 514 |
+
import doctest
|
| 515 |
+
|
| 516 |
+
sys.exit(doctest.testmod().failed)
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/varLib/mvar.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MVAR_ENTRIES = {
|
| 2 |
+
"hasc": ("OS/2", "sTypoAscender"), # horizontal ascender
|
| 3 |
+
"hdsc": ("OS/2", "sTypoDescender"), # horizontal descender
|
| 4 |
+
"hlgp": ("OS/2", "sTypoLineGap"), # horizontal line gap
|
| 5 |
+
"hcla": ("OS/2", "usWinAscent"), # horizontal clipping ascent
|
| 6 |
+
"hcld": ("OS/2", "usWinDescent"), # horizontal clipping descent
|
| 7 |
+
"vasc": ("vhea", "ascent"), # vertical ascender
|
| 8 |
+
"vdsc": ("vhea", "descent"), # vertical descender
|
| 9 |
+
"vlgp": ("vhea", "lineGap"), # vertical line gap
|
| 10 |
+
"hcrs": ("hhea", "caretSlopeRise"), # horizontal caret rise
|
| 11 |
+
"hcrn": ("hhea", "caretSlopeRun"), # horizontal caret run
|
| 12 |
+
"hcof": ("hhea", "caretOffset"), # horizontal caret offset
|
| 13 |
+
"vcrs": ("vhea", "caretSlopeRise"), # vertical caret rise
|
| 14 |
+
"vcrn": ("vhea", "caretSlopeRun"), # vertical caret run
|
| 15 |
+
"vcof": ("vhea", "caretOffset"), # vertical caret offset
|
| 16 |
+
"xhgt": ("OS/2", "sxHeight"), # x height
|
| 17 |
+
"cpht": ("OS/2", "sCapHeight"), # cap height
|
| 18 |
+
"sbxs": ("OS/2", "ySubscriptXSize"), # subscript em x size
|
| 19 |
+
"sbys": ("OS/2", "ySubscriptYSize"), # subscript em y size
|
| 20 |
+
"sbxo": ("OS/2", "ySubscriptXOffset"), # subscript em x offset
|
| 21 |
+
"sbyo": ("OS/2", "ySubscriptYOffset"), # subscript em y offset
|
| 22 |
+
"spxs": ("OS/2", "ySuperscriptXSize"), # superscript em x size
|
| 23 |
+
"spys": ("OS/2", "ySuperscriptYSize"), # superscript em y size
|
| 24 |
+
"spxo": ("OS/2", "ySuperscriptXOffset"), # superscript em x offset
|
| 25 |
+
"spyo": ("OS/2", "ySuperscriptYOffset"), # superscript em y offset
|
| 26 |
+
"strs": ("OS/2", "yStrikeoutSize"), # strikeout size
|
| 27 |
+
"stro": ("OS/2", "yStrikeoutPosition"), # strikeout offset
|
| 28 |
+
"unds": ("post", "underlineThickness"), # underline size
|
| 29 |
+
"undo": ("post", "underlinePosition"), # underline offset
|
| 30 |
+
#'gsp0': ('gasp', 'gaspRange[0].rangeMaxPPEM'), # gaspRange[0]
|
| 31 |
+
#'gsp1': ('gasp', 'gaspRange[1].rangeMaxPPEM'), # gaspRange[1]
|
| 32 |
+
#'gsp2': ('gasp', 'gaspRange[2].rangeMaxPPEM'), # gaspRange[2]
|
| 33 |
+
#'gsp3': ('gasp', 'gaspRange[3].rangeMaxPPEM'), # gaspRange[3]
|
| 34 |
+
#'gsp4': ('gasp', 'gaspRange[4].rangeMaxPPEM'), # gaspRange[4]
|
| 35 |
+
#'gsp5': ('gasp', 'gaspRange[5].rangeMaxPPEM'), # gaspRange[5]
|
| 36 |
+
#'gsp6': ('gasp', 'gaspRange[6].rangeMaxPPEM'), # gaspRange[6]
|
| 37 |
+
#'gsp7': ('gasp', 'gaspRange[7].rangeMaxPPEM'), # gaspRange[7]
|
| 38 |
+
#'gsp8': ('gasp', 'gaspRange[8].rangeMaxPPEM'), # gaspRange[8]
|
| 39 |
+
#'gsp9': ('gasp', 'gaspRange[9].rangeMaxPPEM'), # gaspRange[9]
|
| 40 |
+
}
|
evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/hls.CFPBCiRi.js.br
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1f37e5693b0d00d90490c8f1c03ee182aef292a9bf3b0a00bdd8ee5675d34f4b
|
| 3 |
+
size 141745
|
evalkit_tf437/lib/python3.10/site-packages/httpx/__pycache__/__version__.cpython-310.pyc
ADDED
|
Binary file (282 Bytes). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/httpx/__pycache__/_decoders.cpython-310.pyc
ADDED
|
Binary file (10.7 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/httpx/__pycache__/_models.cpython-310.pyc
ADDED
|
Binary file (36.7 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/httpx/__pycache__/_status_codes.cpython-310.pyc
ADDED
|
Binary file (6.31 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/httpx/__pycache__/_utils.cpython-310.pyc
ADDED
|
Binary file (12.9 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/httpx/_transports/__pycache__/base.cpython-310.pyc
ADDED
|
Binary file (3.43 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/httpx/_urls.py
ADDED
|
@@ -0,0 +1,648 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import typing
|
| 4 |
+
from urllib.parse import parse_qs, unquote
|
| 5 |
+
|
| 6 |
+
import idna
|
| 7 |
+
|
| 8 |
+
from ._types import QueryParamTypes, RawURL
|
| 9 |
+
from ._urlparse import urlencode, urlparse
|
| 10 |
+
from ._utils import primitive_value_to_str
|
| 11 |
+
|
| 12 |
+
__all__ = ["URL", "QueryParams"]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class URL:
|
| 16 |
+
"""
|
| 17 |
+
url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink")
|
| 18 |
+
|
| 19 |
+
assert url.scheme == "https"
|
| 20 |
+
assert url.username == "jo@email.com"
|
| 21 |
+
assert url.password == "a secret"
|
| 22 |
+
assert url.userinfo == b"jo%40email.com:a%20secret"
|
| 23 |
+
assert url.host == "müller.de"
|
| 24 |
+
assert url.raw_host == b"xn--mller-kva.de"
|
| 25 |
+
assert url.port == 1234
|
| 26 |
+
assert url.netloc == b"xn--mller-kva.de:1234"
|
| 27 |
+
assert url.path == "/pa th"
|
| 28 |
+
assert url.query == b"?search=ab"
|
| 29 |
+
assert url.raw_path == b"/pa%20th?search=ab"
|
| 30 |
+
assert url.fragment == "anchorlink"
|
| 31 |
+
|
| 32 |
+
The components of a URL are broken down like this:
|
| 33 |
+
|
| 34 |
+
https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink
|
| 35 |
+
[scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment]
|
| 36 |
+
[ userinfo ] [ netloc ][ raw_path ]
|
| 37 |
+
|
| 38 |
+
Note that:
|
| 39 |
+
|
| 40 |
+
* `url.scheme` is normalized to always be lowercased.
|
| 41 |
+
|
| 42 |
+
* `url.host` is normalized to always be lowercased. Internationalized domain
|
| 43 |
+
names are represented in unicode, without IDNA encoding applied. For instance:
|
| 44 |
+
|
| 45 |
+
url = httpx.URL("http://中国.icom.museum")
|
| 46 |
+
assert url.host == "中国.icom.museum"
|
| 47 |
+
url = httpx.URL("http://xn--fiqs8s.icom.museum")
|
| 48 |
+
assert url.host == "中国.icom.museum"
|
| 49 |
+
|
| 50 |
+
* `url.raw_host` is normalized to always be lowercased, and is IDNA encoded.
|
| 51 |
+
|
| 52 |
+
url = httpx.URL("http://中国.icom.museum")
|
| 53 |
+
assert url.raw_host == b"xn--fiqs8s.icom.museum"
|
| 54 |
+
url = httpx.URL("http://xn--fiqs8s.icom.museum")
|
| 55 |
+
assert url.raw_host == b"xn--fiqs8s.icom.museum"
|
| 56 |
+
|
| 57 |
+
* `url.port` is either None or an integer. URLs that include the default port for
|
| 58 |
+
"http", "https", "ws", "wss", and "ftp" schemes have their port
|
| 59 |
+
normalized to `None`.
|
| 60 |
+
|
| 61 |
+
assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80")
|
| 62 |
+
assert httpx.URL("http://example.com").port is None
|
| 63 |
+
assert httpx.URL("http://example.com:80").port is None
|
| 64 |
+
|
| 65 |
+
* `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work
|
| 66 |
+
with `url.username` and `url.password` instead, which handle the URL escaping.
|
| 67 |
+
|
| 68 |
+
* `url.raw_path` is raw bytes of both the path and query, without URL escaping.
|
| 69 |
+
This portion is used as the target when constructing HTTP requests. Usually you'll
|
| 70 |
+
want to work with `url.path` instead.
|
| 71 |
+
|
| 72 |
+
* `url.query` is raw bytes, without URL escaping. A URL query string portion can
|
| 73 |
+
only be properly URL escaped when decoding the parameter names and values
|
| 74 |
+
themselves.
|
| 75 |
+
"""
|
| 76 |
+
|
| 77 |
+
def __init__(self, url: URL | str = "", **kwargs: typing.Any) -> None:
|
| 78 |
+
if kwargs:
|
| 79 |
+
allowed = {
|
| 80 |
+
"scheme": str,
|
| 81 |
+
"username": str,
|
| 82 |
+
"password": str,
|
| 83 |
+
"userinfo": bytes,
|
| 84 |
+
"host": str,
|
| 85 |
+
"port": int,
|
| 86 |
+
"netloc": bytes,
|
| 87 |
+
"path": str,
|
| 88 |
+
"query": bytes,
|
| 89 |
+
"raw_path": bytes,
|
| 90 |
+
"fragment": str,
|
| 91 |
+
"params": object,
|
| 92 |
+
}
|
| 93 |
+
|
| 94 |
+
# Perform type checking for all supported keyword arguments.
|
| 95 |
+
for key, value in kwargs.items():
|
| 96 |
+
if key not in allowed:
|
| 97 |
+
message = f"{key!r} is an invalid keyword argument for URL()"
|
| 98 |
+
raise TypeError(message)
|
| 99 |
+
if value is not None and not isinstance(value, allowed[key]):
|
| 100 |
+
expected = allowed[key].__name__
|
| 101 |
+
seen = type(value).__name__
|
| 102 |
+
message = f"Argument {key!r} must be {expected} but got {seen}"
|
| 103 |
+
raise TypeError(message)
|
| 104 |
+
if isinstance(value, bytes):
|
| 105 |
+
kwargs[key] = value.decode("ascii")
|
| 106 |
+
|
| 107 |
+
if "params" in kwargs:
|
| 108 |
+
# Replace any "params" keyword with the raw "query" instead.
|
| 109 |
+
#
|
| 110 |
+
# Ensure that empty params use `kwargs["query"] = None` rather
|
| 111 |
+
# than `kwargs["query"] = ""`, so that generated URLs do not
|
| 112 |
+
# include an empty trailing "?".
|
| 113 |
+
params = kwargs.pop("params")
|
| 114 |
+
kwargs["query"] = None if not params else str(QueryParams(params))
|
| 115 |
+
|
| 116 |
+
if isinstance(url, str):
|
| 117 |
+
self._uri_reference = urlparse(url, **kwargs)
|
| 118 |
+
elif isinstance(url, URL):
|
| 119 |
+
self._uri_reference = url._uri_reference.copy_with(**kwargs)
|
| 120 |
+
else:
|
| 121 |
+
raise TypeError(
|
| 122 |
+
"Invalid type for url. Expected str or httpx.URL,"
|
| 123 |
+
f" got {type(url)}: {url!r}"
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
@property
|
| 127 |
+
def scheme(self) -> str:
|
| 128 |
+
"""
|
| 129 |
+
The URL scheme, such as "http", "https".
|
| 130 |
+
Always normalised to lowercase.
|
| 131 |
+
"""
|
| 132 |
+
return self._uri_reference.scheme
|
| 133 |
+
|
| 134 |
+
@property
|
| 135 |
+
def raw_scheme(self) -> bytes:
|
| 136 |
+
"""
|
| 137 |
+
The raw bytes representation of the URL scheme, such as b"http", b"https".
|
| 138 |
+
Always normalised to lowercase.
|
| 139 |
+
"""
|
| 140 |
+
return self._uri_reference.scheme.encode("ascii")
|
| 141 |
+
|
| 142 |
+
@property
|
| 143 |
+
def userinfo(self) -> bytes:
|
| 144 |
+
"""
|
| 145 |
+
The URL userinfo as a raw bytestring.
|
| 146 |
+
For example: b"jo%40email.com:a%20secret".
|
| 147 |
+
"""
|
| 148 |
+
return self._uri_reference.userinfo.encode("ascii")
|
| 149 |
+
|
| 150 |
+
@property
|
| 151 |
+
def username(self) -> str:
|
| 152 |
+
"""
|
| 153 |
+
The URL username as a string, with URL decoding applied.
|
| 154 |
+
For example: "jo@email.com"
|
| 155 |
+
"""
|
| 156 |
+
userinfo = self._uri_reference.userinfo
|
| 157 |
+
return unquote(userinfo.partition(":")[0])
|
| 158 |
+
|
| 159 |
+
@property
|
| 160 |
+
def password(self) -> str:
|
| 161 |
+
"""
|
| 162 |
+
The URL password as a string, with URL decoding applied.
|
| 163 |
+
For example: "a secret"
|
| 164 |
+
"""
|
| 165 |
+
userinfo = self._uri_reference.userinfo
|
| 166 |
+
return unquote(userinfo.partition(":")[2])
|
| 167 |
+
|
| 168 |
+
@property
|
| 169 |
+
def host(self) -> str:
|
| 170 |
+
"""
|
| 171 |
+
The URL host as a string.
|
| 172 |
+
Always normalized to lowercase, with IDNA hosts decoded into unicode.
|
| 173 |
+
|
| 174 |
+
Examples:
|
| 175 |
+
|
| 176 |
+
url = httpx.URL("http://www.EXAMPLE.org")
|
| 177 |
+
assert url.host == "www.example.org"
|
| 178 |
+
|
| 179 |
+
url = httpx.URL("http://中国.icom.museum")
|
| 180 |
+
assert url.host == "中国.icom.museum"
|
| 181 |
+
|
| 182 |
+
url = httpx.URL("http://xn--fiqs8s.icom.museum")
|
| 183 |
+
assert url.host == "中国.icom.museum"
|
| 184 |
+
|
| 185 |
+
url = httpx.URL("https://[::ffff:192.168.0.1]")
|
| 186 |
+
assert url.host == "::ffff:192.168.0.1"
|
| 187 |
+
"""
|
| 188 |
+
host: str = self._uri_reference.host
|
| 189 |
+
|
| 190 |
+
if host.startswith("xn--"):
|
| 191 |
+
host = idna.decode(host)
|
| 192 |
+
|
| 193 |
+
return host
|
| 194 |
+
|
| 195 |
+
@property
|
| 196 |
+
def raw_host(self) -> bytes:
|
| 197 |
+
"""
|
| 198 |
+
The raw bytes representation of the URL host.
|
| 199 |
+
Always normalized to lowercase, and IDNA encoded.
|
| 200 |
+
|
| 201 |
+
Examples:
|
| 202 |
+
|
| 203 |
+
url = httpx.URL("http://www.EXAMPLE.org")
|
| 204 |
+
assert url.raw_host == b"www.example.org"
|
| 205 |
+
|
| 206 |
+
url = httpx.URL("http://中国.icom.museum")
|
| 207 |
+
assert url.raw_host == b"xn--fiqs8s.icom.museum"
|
| 208 |
+
|
| 209 |
+
url = httpx.URL("http://xn--fiqs8s.icom.museum")
|
| 210 |
+
assert url.raw_host == b"xn--fiqs8s.icom.museum"
|
| 211 |
+
|
| 212 |
+
url = httpx.URL("https://[::ffff:192.168.0.1]")
|
| 213 |
+
assert url.raw_host == b"::ffff:192.168.0.1"
|
| 214 |
+
"""
|
| 215 |
+
return self._uri_reference.host.encode("ascii")
|
| 216 |
+
|
| 217 |
+
@property
|
| 218 |
+
def port(self) -> int | None:
|
| 219 |
+
"""
|
| 220 |
+
The URL port as an integer.
|
| 221 |
+
|
| 222 |
+
Note that the URL class performs port normalization as per the WHATWG spec.
|
| 223 |
+
Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always
|
| 224 |
+
treated as `None`.
|
| 225 |
+
|
| 226 |
+
For example:
|
| 227 |
+
|
| 228 |
+
assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80")
|
| 229 |
+
assert httpx.URL("http://www.example.com:80").port is None
|
| 230 |
+
"""
|
| 231 |
+
return self._uri_reference.port
|
| 232 |
+
|
| 233 |
+
@property
|
| 234 |
+
def netloc(self) -> bytes:
|
| 235 |
+
"""
|
| 236 |
+
Either `<host>` or `<host>:<port>` as bytes.
|
| 237 |
+
Always normalized to lowercase, and IDNA encoded.
|
| 238 |
+
|
| 239 |
+
This property may be used for generating the value of a request
|
| 240 |
+
"Host" header.
|
| 241 |
+
"""
|
| 242 |
+
return self._uri_reference.netloc.encode("ascii")
|
| 243 |
+
|
| 244 |
+
@property
|
| 245 |
+
def path(self) -> str:
|
| 246 |
+
"""
|
| 247 |
+
The URL path as a string. Excluding the query string, and URL decoded.
|
| 248 |
+
|
| 249 |
+
For example:
|
| 250 |
+
|
| 251 |
+
url = httpx.URL("https://example.com/pa%20th")
|
| 252 |
+
assert url.path == "/pa th"
|
| 253 |
+
"""
|
| 254 |
+
path = self._uri_reference.path or "/"
|
| 255 |
+
return unquote(path)
|
| 256 |
+
|
| 257 |
+
@property
|
| 258 |
+
def query(self) -> bytes:
|
| 259 |
+
"""
|
| 260 |
+
The URL query string, as raw bytes, excluding the leading b"?".
|
| 261 |
+
|
| 262 |
+
This is necessarily a bytewise interface, because we cannot
|
| 263 |
+
perform URL decoding of this representation until we've parsed
|
| 264 |
+
the keys and values into a QueryParams instance.
|
| 265 |
+
|
| 266 |
+
For example:
|
| 267 |
+
|
| 268 |
+
url = httpx.URL("https://example.com/?filter=some%20search%20terms")
|
| 269 |
+
assert url.query == b"filter=some%20search%20terms"
|
| 270 |
+
"""
|
| 271 |
+
query = self._uri_reference.query or ""
|
| 272 |
+
return query.encode("ascii")
|
| 273 |
+
|
| 274 |
+
@property
|
| 275 |
+
def params(self) -> QueryParams:
|
| 276 |
+
"""
|
| 277 |
+
The URL query parameters, neatly parsed and packaged into an immutable
|
| 278 |
+
multidict representation.
|
| 279 |
+
"""
|
| 280 |
+
return QueryParams(self._uri_reference.query)
|
| 281 |
+
|
| 282 |
+
@property
|
| 283 |
+
def raw_path(self) -> bytes:
|
| 284 |
+
"""
|
| 285 |
+
The complete URL path and query string as raw bytes.
|
| 286 |
+
Used as the target when constructing HTTP requests.
|
| 287 |
+
|
| 288 |
+
For example:
|
| 289 |
+
|
| 290 |
+
GET /users?search=some%20text HTTP/1.1
|
| 291 |
+
Host: www.example.org
|
| 292 |
+
Connection: close
|
| 293 |
+
"""
|
| 294 |
+
path = self._uri_reference.path or "/"
|
| 295 |
+
if self._uri_reference.query is not None:
|
| 296 |
+
path += "?" + self._uri_reference.query
|
| 297 |
+
return path.encode("ascii")
|
| 298 |
+
|
| 299 |
+
@property
|
| 300 |
+
def fragment(self) -> str:
|
| 301 |
+
"""
|
| 302 |
+
The URL fragments, as used in HTML anchors.
|
| 303 |
+
As a string, without the leading '#'.
|
| 304 |
+
"""
|
| 305 |
+
return unquote(self._uri_reference.fragment or "")
|
| 306 |
+
|
| 307 |
+
@property
|
| 308 |
+
def raw(self) -> RawURL:
|
| 309 |
+
"""
|
| 310 |
+
Provides the (scheme, host, port, target) for the outgoing request.
|
| 311 |
+
|
| 312 |
+
In older versions of `httpx` this was used in the low-level transport API.
|
| 313 |
+
We no longer use `RawURL`, and this property will be deprecated
|
| 314 |
+
in a future release.
|
| 315 |
+
"""
|
| 316 |
+
return RawURL(
|
| 317 |
+
self.raw_scheme,
|
| 318 |
+
self.raw_host,
|
| 319 |
+
self.port,
|
| 320 |
+
self.raw_path,
|
| 321 |
+
)
|
| 322 |
+
|
| 323 |
+
@property
|
| 324 |
+
def is_absolute_url(self) -> bool:
|
| 325 |
+
"""
|
| 326 |
+
Return `True` for absolute URLs such as 'http://example.com/path',
|
| 327 |
+
and `False` for relative URLs such as '/path'.
|
| 328 |
+
"""
|
| 329 |
+
# We don't use `.is_absolute` from `rfc3986` because it treats
|
| 330 |
+
# URLs with a fragment portion as not absolute.
|
| 331 |
+
# What we actually care about is if the URL provides
|
| 332 |
+
# a scheme and hostname to which connections should be made.
|
| 333 |
+
return bool(self._uri_reference.scheme and self._uri_reference.host)
|
| 334 |
+
|
| 335 |
+
@property
|
| 336 |
+
def is_relative_url(self) -> bool:
|
| 337 |
+
"""
|
| 338 |
+
Return `False` for absolute URLs such as 'http://example.com/path',
|
| 339 |
+
and `True` for relative URLs such as '/path'.
|
| 340 |
+
"""
|
| 341 |
+
return not self.is_absolute_url
|
| 342 |
+
|
| 343 |
+
def copy_with(self, **kwargs: typing.Any) -> URL:
|
| 344 |
+
"""
|
| 345 |
+
Copy this URL, returning a new URL with some components altered.
|
| 346 |
+
Accepts the same set of parameters as the components that are made
|
| 347 |
+
available via properties on the `URL` class.
|
| 348 |
+
|
| 349 |
+
For example:
|
| 350 |
+
|
| 351 |
+
url = httpx.URL("https://www.example.com").copy_with(
|
| 352 |
+
username="jo@gmail.com", password="a secret"
|
| 353 |
+
)
|
| 354 |
+
assert url == "https://jo%40email.com:a%20secret@www.example.com"
|
| 355 |
+
"""
|
| 356 |
+
return URL(self, **kwargs)
|
| 357 |
+
|
| 358 |
+
def copy_set_param(self, key: str, value: typing.Any = None) -> URL:
|
| 359 |
+
return self.copy_with(params=self.params.set(key, value))
|
| 360 |
+
|
| 361 |
+
def copy_add_param(self, key: str, value: typing.Any = None) -> URL:
|
| 362 |
+
return self.copy_with(params=self.params.add(key, value))
|
| 363 |
+
|
| 364 |
+
def copy_remove_param(self, key: str) -> URL:
|
| 365 |
+
return self.copy_with(params=self.params.remove(key))
|
| 366 |
+
|
| 367 |
+
def copy_merge_params(self, params: QueryParamTypes) -> URL:
|
| 368 |
+
return self.copy_with(params=self.params.merge(params))
|
| 369 |
+
|
| 370 |
+
def join(self, url: URL | str) -> URL:
|
| 371 |
+
"""
|
| 372 |
+
Return an absolute URL, using this URL as the base.
|
| 373 |
+
|
| 374 |
+
Eg.
|
| 375 |
+
|
| 376 |
+
url = httpx.URL("https://www.example.com/test")
|
| 377 |
+
url = url.join("/new/path")
|
| 378 |
+
assert url == "https://www.example.com/new/path"
|
| 379 |
+
"""
|
| 380 |
+
from urllib.parse import urljoin
|
| 381 |
+
|
| 382 |
+
return URL(urljoin(str(self), str(URL(url))))
|
| 383 |
+
|
| 384 |
+
def __hash__(self) -> int:
|
| 385 |
+
return hash(str(self))
|
| 386 |
+
|
| 387 |
+
def __eq__(self, other: typing.Any) -> bool:
|
| 388 |
+
return isinstance(other, (URL, str)) and str(self) == str(URL(other))
|
| 389 |
+
|
| 390 |
+
def __str__(self) -> str:
|
| 391 |
+
return str(self._uri_reference)
|
| 392 |
+
|
| 393 |
+
def __repr__(self) -> str:
|
| 394 |
+
scheme, userinfo, host, port, path, query, fragment = self._uri_reference
|
| 395 |
+
|
| 396 |
+
if ":" in userinfo:
|
| 397 |
+
# Mask any password component.
|
| 398 |
+
userinfo = f'{userinfo.split(":")[0]}:[secure]'
|
| 399 |
+
|
| 400 |
+
authority = "".join(
|
| 401 |
+
[
|
| 402 |
+
f"{userinfo}@" if userinfo else "",
|
| 403 |
+
f"[{host}]" if ":" in host else host,
|
| 404 |
+
f":{port}" if port is not None else "",
|
| 405 |
+
]
|
| 406 |
+
)
|
| 407 |
+
url = "".join(
|
| 408 |
+
[
|
| 409 |
+
f"{self.scheme}:" if scheme else "",
|
| 410 |
+
f"//{authority}" if authority else "",
|
| 411 |
+
path,
|
| 412 |
+
f"?{query}" if query is not None else "",
|
| 413 |
+
f"#{fragment}" if fragment is not None else "",
|
| 414 |
+
]
|
| 415 |
+
)
|
| 416 |
+
|
| 417 |
+
return f"{self.__class__.__name__}({url!r})"
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
class QueryParams(typing.Mapping[str, str]):
|
| 421 |
+
"""
|
| 422 |
+
URL query parameters, as a multi-dict.
|
| 423 |
+
"""
|
| 424 |
+
|
| 425 |
+
def __init__(self, *args: QueryParamTypes | None, **kwargs: typing.Any) -> None:
|
| 426 |
+
assert len(args) < 2, "Too many arguments."
|
| 427 |
+
assert not (args and kwargs), "Cannot mix named and unnamed arguments."
|
| 428 |
+
|
| 429 |
+
value = args[0] if args else kwargs
|
| 430 |
+
|
| 431 |
+
if value is None or isinstance(value, (str, bytes)):
|
| 432 |
+
value = value.decode("ascii") if isinstance(value, bytes) else value
|
| 433 |
+
self._dict = parse_qs(value, keep_blank_values=True)
|
| 434 |
+
elif isinstance(value, QueryParams):
|
| 435 |
+
self._dict = {k: list(v) for k, v in value._dict.items()}
|
| 436 |
+
else:
|
| 437 |
+
dict_value: dict[typing.Any, list[typing.Any]] = {}
|
| 438 |
+
if isinstance(value, (list, tuple)):
|
| 439 |
+
# Convert list inputs like:
|
| 440 |
+
# [("a", "123"), ("a", "456"), ("b", "789")]
|
| 441 |
+
# To a dict representation, like:
|
| 442 |
+
# {"a": ["123", "456"], "b": ["789"]}
|
| 443 |
+
for item in value:
|
| 444 |
+
dict_value.setdefault(item[0], []).append(item[1])
|
| 445 |
+
else:
|
| 446 |
+
# Convert dict inputs like:
|
| 447 |
+
# {"a": "123", "b": ["456", "789"]}
|
| 448 |
+
# To dict inputs where values are always lists, like:
|
| 449 |
+
# {"a": ["123"], "b": ["456", "789"]}
|
| 450 |
+
dict_value = {
|
| 451 |
+
k: list(v) if isinstance(v, (list, tuple)) else [v]
|
| 452 |
+
for k, v in value.items()
|
| 453 |
+
}
|
| 454 |
+
|
| 455 |
+
# Ensure that keys and values are neatly coerced to strings.
|
| 456 |
+
# We coerce values `True` and `False` to JSON-like "true" and "false"
|
| 457 |
+
# representations, and coerce `None` values to the empty string.
|
| 458 |
+
self._dict = {
|
| 459 |
+
str(k): [primitive_value_to_str(item) for item in v]
|
| 460 |
+
for k, v in dict_value.items()
|
| 461 |
+
}
|
| 462 |
+
|
| 463 |
+
def keys(self) -> typing.KeysView[str]:
|
| 464 |
+
"""
|
| 465 |
+
Return all the keys in the query params.
|
| 466 |
+
|
| 467 |
+
Usage:
|
| 468 |
+
|
| 469 |
+
q = httpx.QueryParams("a=123&a=456&b=789")
|
| 470 |
+
assert list(q.keys()) == ["a", "b"]
|
| 471 |
+
"""
|
| 472 |
+
return self._dict.keys()
|
| 473 |
+
|
| 474 |
+
def values(self) -> typing.ValuesView[str]:
|
| 475 |
+
"""
|
| 476 |
+
Return all the values in the query params. If a key occurs more than once
|
| 477 |
+
only the first item for that key is returned.
|
| 478 |
+
|
| 479 |
+
Usage:
|
| 480 |
+
|
| 481 |
+
q = httpx.QueryParams("a=123&a=456&b=789")
|
| 482 |
+
assert list(q.values()) == ["123", "789"]
|
| 483 |
+
"""
|
| 484 |
+
return {k: v[0] for k, v in self._dict.items()}.values()
|
| 485 |
+
|
| 486 |
+
def items(self) -> typing.ItemsView[str, str]:
|
| 487 |
+
"""
|
| 488 |
+
Return all items in the query params. If a key occurs more than once
|
| 489 |
+
only the first item for that key is returned.
|
| 490 |
+
|
| 491 |
+
Usage:
|
| 492 |
+
|
| 493 |
+
q = httpx.QueryParams("a=123&a=456&b=789")
|
| 494 |
+
assert list(q.items()) == [("a", "123"), ("b", "789")]
|
| 495 |
+
"""
|
| 496 |
+
return {k: v[0] for k, v in self._dict.items()}.items()
|
| 497 |
+
|
| 498 |
+
def multi_items(self) -> list[tuple[str, str]]:
|
| 499 |
+
"""
|
| 500 |
+
Return all items in the query params. Allow duplicate keys to occur.
|
| 501 |
+
|
| 502 |
+
Usage:
|
| 503 |
+
|
| 504 |
+
q = httpx.QueryParams("a=123&a=456&b=789")
|
| 505 |
+
assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")]
|
| 506 |
+
"""
|
| 507 |
+
multi_items: list[tuple[str, str]] = []
|
| 508 |
+
for k, v in self._dict.items():
|
| 509 |
+
multi_items.extend([(k, i) for i in v])
|
| 510 |
+
return multi_items
|
| 511 |
+
|
| 512 |
+
def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any:
|
| 513 |
+
"""
|
| 514 |
+
Get a value from the query param for a given key. If the key occurs
|
| 515 |
+
more than once, then only the first value is returned.
|
| 516 |
+
|
| 517 |
+
Usage:
|
| 518 |
+
|
| 519 |
+
q = httpx.QueryParams("a=123&a=456&b=789")
|
| 520 |
+
assert q.get("a") == "123"
|
| 521 |
+
"""
|
| 522 |
+
if key in self._dict:
|
| 523 |
+
return self._dict[str(key)][0]
|
| 524 |
+
return default
|
| 525 |
+
|
| 526 |
+
def get_list(self, key: str) -> list[str]:
|
| 527 |
+
"""
|
| 528 |
+
Get all values from the query param for a given key.
|
| 529 |
+
|
| 530 |
+
Usage:
|
| 531 |
+
|
| 532 |
+
q = httpx.QueryParams("a=123&a=456&b=789")
|
| 533 |
+
assert q.get_list("a") == ["123", "456"]
|
| 534 |
+
"""
|
| 535 |
+
return list(self._dict.get(str(key), []))
|
| 536 |
+
|
| 537 |
+
def set(self, key: str, value: typing.Any = None) -> QueryParams:
|
| 538 |
+
"""
|
| 539 |
+
Return a new QueryParams instance, setting the value of a key.
|
| 540 |
+
|
| 541 |
+
Usage:
|
| 542 |
+
|
| 543 |
+
q = httpx.QueryParams("a=123")
|
| 544 |
+
q = q.set("a", "456")
|
| 545 |
+
assert q == httpx.QueryParams("a=456")
|
| 546 |
+
"""
|
| 547 |
+
q = QueryParams()
|
| 548 |
+
q._dict = dict(self._dict)
|
| 549 |
+
q._dict[str(key)] = [primitive_value_to_str(value)]
|
| 550 |
+
return q
|
| 551 |
+
|
| 552 |
+
def add(self, key: str, value: typing.Any = None) -> QueryParams:
|
| 553 |
+
"""
|
| 554 |
+
Return a new QueryParams instance, setting or appending the value of a key.
|
| 555 |
+
|
| 556 |
+
Usage:
|
| 557 |
+
|
| 558 |
+
q = httpx.QueryParams("a=123")
|
| 559 |
+
q = q.add("a", "456")
|
| 560 |
+
assert q == httpx.QueryParams("a=123&a=456")
|
| 561 |
+
"""
|
| 562 |
+
q = QueryParams()
|
| 563 |
+
q._dict = dict(self._dict)
|
| 564 |
+
q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)]
|
| 565 |
+
return q
|
| 566 |
+
|
| 567 |
+
def remove(self, key: str) -> QueryParams:
|
| 568 |
+
"""
|
| 569 |
+
Return a new QueryParams instance, removing the value of a key.
|
| 570 |
+
|
| 571 |
+
Usage:
|
| 572 |
+
|
| 573 |
+
q = httpx.QueryParams("a=123")
|
| 574 |
+
q = q.remove("a")
|
| 575 |
+
assert q == httpx.QueryParams("")
|
| 576 |
+
"""
|
| 577 |
+
q = QueryParams()
|
| 578 |
+
q._dict = dict(self._dict)
|
| 579 |
+
q._dict.pop(str(key), None)
|
| 580 |
+
return q
|
| 581 |
+
|
| 582 |
+
def merge(self, params: QueryParamTypes | None = None) -> QueryParams:
|
| 583 |
+
"""
|
| 584 |
+
Return a new QueryParams instance, updated with.
|
| 585 |
+
|
| 586 |
+
Usage:
|
| 587 |
+
|
| 588 |
+
q = httpx.QueryParams("a=123")
|
| 589 |
+
q = q.merge({"b": "456"})
|
| 590 |
+
assert q == httpx.QueryParams("a=123&b=456")
|
| 591 |
+
|
| 592 |
+
q = httpx.QueryParams("a=123")
|
| 593 |
+
q = q.merge({"a": "456", "b": "789"})
|
| 594 |
+
assert q == httpx.QueryParams("a=456&b=789")
|
| 595 |
+
"""
|
| 596 |
+
q = QueryParams(params)
|
| 597 |
+
q._dict = {**self._dict, **q._dict}
|
| 598 |
+
return q
|
| 599 |
+
|
| 600 |
+
def __getitem__(self, key: typing.Any) -> str:
|
| 601 |
+
return self._dict[key][0]
|
| 602 |
+
|
| 603 |
+
def __contains__(self, key: typing.Any) -> bool:
|
| 604 |
+
return key in self._dict
|
| 605 |
+
|
| 606 |
+
def __iter__(self) -> typing.Iterator[typing.Any]:
|
| 607 |
+
return iter(self.keys())
|
| 608 |
+
|
| 609 |
+
def __len__(self) -> int:
|
| 610 |
+
return len(self._dict)
|
| 611 |
+
|
| 612 |
+
def __bool__(self) -> bool:
|
| 613 |
+
return bool(self._dict)
|
| 614 |
+
|
| 615 |
+
def __hash__(self) -> int:
|
| 616 |
+
return hash(str(self))
|
| 617 |
+
|
| 618 |
+
def __eq__(self, other: typing.Any) -> bool:
|
| 619 |
+
if not isinstance(other, self.__class__):
|
| 620 |
+
return False
|
| 621 |
+
return sorted(self.multi_items()) == sorted(other.multi_items())
|
| 622 |
+
|
| 623 |
+
def __str__(self) -> str:
|
| 624 |
+
"""
|
| 625 |
+
Note that we use '%20' encoding for spaces, and treat '/' as a safe
|
| 626 |
+
character.
|
| 627 |
+
|
| 628 |
+
See https://github.com/encode/httpx/issues/2536 and
|
| 629 |
+
https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode
|
| 630 |
+
"""
|
| 631 |
+
return urlencode(self.multi_items())
|
| 632 |
+
|
| 633 |
+
def __repr__(self) -> str:
|
| 634 |
+
class_name = self.__class__.__name__
|
| 635 |
+
query_string = str(self)
|
| 636 |
+
return f"{class_name}({query_string!r})"
|
| 637 |
+
|
| 638 |
+
def update(self, params: QueryParamTypes | None = None) -> None:
|
| 639 |
+
raise RuntimeError(
|
| 640 |
+
"QueryParams are immutable since 0.18.0. "
|
| 641 |
+
"Use `q = q.merge(...)` to create an updated copy."
|
| 642 |
+
)
|
| 643 |
+
|
| 644 |
+
def __setitem__(self, key: str, value: str) -> None:
|
| 645 |
+
raise RuntimeError(
|
| 646 |
+
"QueryParams are immutable since 0.18.0. "
|
| 647 |
+
"Use `q = q.set(key, value)` to create an updated copy."
|
| 648 |
+
)
|
evalkit_tf437/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
Apache License
|
| 3 |
+
Version 2.0, January 2004
|
| 4 |
+
http://www.apache.org/licenses/
|
| 5 |
+
|
| 6 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 7 |
+
|
| 8 |
+
1. Definitions.
|
| 9 |
+
|
| 10 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 11 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 12 |
+
|
| 13 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 14 |
+
the copyright owner that is granting the License.
|
| 15 |
+
|
| 16 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 17 |
+
other entities that control, are controlled by, or are under common
|
| 18 |
+
control with that entity. For the purposes of this definition,
|
| 19 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 20 |
+
direction or management of such entity, whether by contract or
|
| 21 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 22 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 23 |
+
|
| 24 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 25 |
+
exercising permissions granted by this License.
|
| 26 |
+
|
| 27 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 28 |
+
including but not limited to software source code, documentation
|
| 29 |
+
source, and configuration files.
|
| 30 |
+
|
| 31 |
+
"Object" form shall mean any form resulting from mechanical
|
| 32 |
+
transformation or translation of a Source form, including but
|
| 33 |
+
not limited to compiled object code, generated documentation,
|
| 34 |
+
and conversions to other media types.
|
| 35 |
+
|
| 36 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 37 |
+
Object form, made available under the License, as indicated by a
|
| 38 |
+
copyright notice that is included in or attached to the work
|
| 39 |
+
(an example is provided in the Appendix below).
|
| 40 |
+
|
| 41 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 42 |
+
form, that is based on (or derived from) the Work and for which the
|
| 43 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 44 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 45 |
+
of this License, Derivative Works shall not include works that remain
|
| 46 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 47 |
+
the Work and Derivative Works thereof.
|
| 48 |
+
|
| 49 |
+
"Contribution" shall mean any work of authorship, including
|
| 50 |
+
the original version of the Work and any modifications or additions
|
| 51 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 52 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 53 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 54 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 55 |
+
means any form of electronic, verbal, or written communication sent
|
| 56 |
+
to the Licensor or its representatives, including but not limited to
|
| 57 |
+
communication on electronic mailing lists, source code control systems,
|
| 58 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 59 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 60 |
+
excluding communication that is conspicuously marked or otherwise
|
| 61 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 62 |
+
|
| 63 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 64 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 65 |
+
subsequently incorporated within the Work.
|
| 66 |
+
|
| 67 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 68 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 69 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 70 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 71 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 72 |
+
Work and such Derivative Works in Source or Object form.
|
| 73 |
+
|
| 74 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 75 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 76 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 77 |
+
(except as stated in this section) patent license to make, have made,
|
| 78 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 79 |
+
where such license applies only to those patent claims licensable
|
| 80 |
+
by such Contributor that are necessarily infringed by their
|
| 81 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 82 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 83 |
+
institute patent litigation against any entity (including a
|
| 84 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 85 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 86 |
+
or contributory patent infringement, then any patent licenses
|
| 87 |
+
granted to You under this License for that Work shall terminate
|
| 88 |
+
as of the date such litigation is filed.
|
| 89 |
+
|
| 90 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 91 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 92 |
+
modifications, and in Source or Object form, provided that You
|
| 93 |
+
meet the following conditions:
|
| 94 |
+
|
| 95 |
+
(a) You must give any other recipients of the Work or
|
| 96 |
+
Derivative Works a copy of this License; and
|
| 97 |
+
|
| 98 |
+
(b) You must cause any modified files to carry prominent notices
|
| 99 |
+
stating that You changed the files; and
|
| 100 |
+
|
| 101 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 102 |
+
that You distribute, all copyright, patent, trademark, and
|
| 103 |
+
attribution notices from the Source form of the Work,
|
| 104 |
+
excluding those notices that do not pertain to any part of
|
| 105 |
+
the Derivative Works; and
|
| 106 |
+
|
| 107 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 108 |
+
distribution, then any Derivative Works that You distribute must
|
| 109 |
+
include a readable copy of the attribution notices contained
|
| 110 |
+
within such NOTICE file, excluding those notices that do not
|
| 111 |
+
pertain to any part of the Derivative Works, in at least one
|
| 112 |
+
of the following places: within a NOTICE text file distributed
|
| 113 |
+
as part of the Derivative Works; within the Source form or
|
| 114 |
+
documentation, if provided along with the Derivative Works; or,
|
| 115 |
+
within a display generated by the Derivative Works, if and
|
| 116 |
+
wherever such third-party notices normally appear. The contents
|
| 117 |
+
of the NOTICE file are for informational purposes only and
|
| 118 |
+
do not modify the License. You may add Your own attribution
|
| 119 |
+
notices within Derivative Works that You distribute, alongside
|
| 120 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 121 |
+
that such additional attribution notices cannot be construed
|
| 122 |
+
as modifying the License.
|
| 123 |
+
|
| 124 |
+
You may add Your own copyright statement to Your modifications and
|
| 125 |
+
may provide additional or different license terms and conditions
|
| 126 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 127 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 128 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 129 |
+
the conditions stated in this License.
|
| 130 |
+
|
| 131 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 132 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 133 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 134 |
+
this License, without any additional terms or conditions.
|
| 135 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 136 |
+
the terms of any separate license agreement you may have executed
|
| 137 |
+
with Licensor regarding such Contributions.
|
| 138 |
+
|
| 139 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 140 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 141 |
+
except as required for reasonable and customary use in describing the
|
| 142 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 143 |
+
|
| 144 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 145 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 146 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 147 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 148 |
+
implied, including, without limitation, any warranties or conditions
|
| 149 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 150 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 151 |
+
appropriateness of using or redistributing the Work and assume any
|
| 152 |
+
risks associated with Your exercise of permissions under this License.
|
| 153 |
+
|
| 154 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 155 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 156 |
+
unless required by applicable law (such as deliberate and grossly
|
| 157 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 158 |
+
liable to You for damages, including any direct, indirect, special,
|
| 159 |
+
incidental, or consequential damages of any character arising as a
|
| 160 |
+
result of this License or out of the use or inability to use the
|
| 161 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 162 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 163 |
+
other commercial damages or losses), even if such Contributor
|
| 164 |
+
has been advised of the possibility of such damages.
|
| 165 |
+
|
| 166 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 167 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 168 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 169 |
+
or other liability obligations and/or rights consistent with this
|
| 170 |
+
License. However, in accepting such obligations, You may act only
|
| 171 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 172 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 173 |
+
defend, and hold each Contributor harmless for any liability
|
| 174 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 175 |
+
of your accepting any such warranty or additional liability.
|
| 176 |
+
|
| 177 |
+
END OF TERMS AND CONDITIONS
|
| 178 |
+
|
| 179 |
+
APPENDIX: How to apply the Apache License to your work.
|
| 180 |
+
|
| 181 |
+
To apply the Apache License to your work, attach the following
|
| 182 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
| 183 |
+
replaced with your own identifying information. (Don't include
|
| 184 |
+
the brackets!) The text should be enclosed in the appropriate
|
| 185 |
+
comment syntax for the file format. We also recommend that a
|
| 186 |
+
file or class name and description of purpose be included on the
|
| 187 |
+
same "printed page" as the copyright notice for easier
|
| 188 |
+
identification within third-party archives.
|
| 189 |
+
|
| 190 |
+
Copyright [yyyy] [name of copyright owner]
|
| 191 |
+
|
| 192 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 193 |
+
you may not use this file except in compliance with the License.
|
| 194 |
+
You may obtain a copy of the License at
|
| 195 |
+
|
| 196 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 197 |
+
|
| 198 |
+
Unless required by applicable law or agreed to in writing, software
|
| 199 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 200 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 201 |
+
See the License for the specific language governing permissions and
|
| 202 |
+
limitations under the License.
|
evalkit_tf437/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: importlib_metadata
|
| 3 |
+
Version: 8.5.0
|
| 4 |
+
Summary: Read metadata from Python packages
|
| 5 |
+
Author-email: "Jason R. Coombs" <jaraco@jaraco.com>
|
| 6 |
+
Project-URL: Source, https://github.com/python/importlib_metadata
|
| 7 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 8 |
+
Classifier: Intended Audience :: Developers
|
| 9 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
| 10 |
+
Classifier: Programming Language :: Python :: 3
|
| 11 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 12 |
+
Requires-Python: >=3.8
|
| 13 |
+
Description-Content-Type: text/x-rst
|
| 14 |
+
License-File: LICENSE
|
| 15 |
+
Requires-Dist: zipp >=3.20
|
| 16 |
+
Requires-Dist: typing-extensions >=3.6.4 ; python_version < "3.8"
|
| 17 |
+
Provides-Extra: check
|
| 18 |
+
Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'check'
|
| 19 |
+
Requires-Dist: pytest-ruff >=0.2.1 ; (sys_platform != "cygwin") and extra == 'check'
|
| 20 |
+
Provides-Extra: cover
|
| 21 |
+
Requires-Dist: pytest-cov ; extra == 'cover'
|
| 22 |
+
Provides-Extra: doc
|
| 23 |
+
Requires-Dist: sphinx >=3.5 ; extra == 'doc'
|
| 24 |
+
Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
|
| 25 |
+
Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
|
| 26 |
+
Requires-Dist: furo ; extra == 'doc'
|
| 27 |
+
Requires-Dist: sphinx-lint ; extra == 'doc'
|
| 28 |
+
Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
|
| 29 |
+
Provides-Extra: enabler
|
| 30 |
+
Requires-Dist: pytest-enabler >=2.2 ; extra == 'enabler'
|
| 31 |
+
Provides-Extra: perf
|
| 32 |
+
Requires-Dist: ipython ; extra == 'perf'
|
| 33 |
+
Provides-Extra: test
|
| 34 |
+
Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
|
| 35 |
+
Requires-Dist: packaging ; extra == 'test'
|
| 36 |
+
Requires-Dist: pyfakefs ; extra == 'test'
|
| 37 |
+
Requires-Dist: flufl.flake8 ; extra == 'test'
|
| 38 |
+
Requires-Dist: pytest-perf >=0.9.2 ; extra == 'test'
|
| 39 |
+
Requires-Dist: jaraco.test >=5.4 ; extra == 'test'
|
| 40 |
+
Requires-Dist: importlib-resources >=1.3 ; (python_version < "3.9") and extra == 'test'
|
| 41 |
+
Provides-Extra: type
|
| 42 |
+
Requires-Dist: pytest-mypy ; extra == 'type'
|
| 43 |
+
|
| 44 |
+
.. image:: https://img.shields.io/pypi/v/importlib_metadata.svg
|
| 45 |
+
:target: https://pypi.org/project/importlib_metadata
|
| 46 |
+
|
| 47 |
+
.. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg
|
| 48 |
+
|
| 49 |
+
.. image:: https://github.com/python/importlib_metadata/actions/workflows/main.yml/badge.svg
|
| 50 |
+
:target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22
|
| 51 |
+
:alt: tests
|
| 52 |
+
|
| 53 |
+
.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
|
| 54 |
+
:target: https://github.com/astral-sh/ruff
|
| 55 |
+
:alt: Ruff
|
| 56 |
+
|
| 57 |
+
.. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest
|
| 58 |
+
:target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest
|
| 59 |
+
|
| 60 |
+
.. image:: https://img.shields.io/badge/skeleton-2024-informational
|
| 61 |
+
:target: https://blog.jaraco.com/skeleton
|
| 62 |
+
|
| 63 |
+
.. image:: https://tidelift.com/badges/package/pypi/importlib-metadata
|
| 64 |
+
:target: https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=readme
|
| 65 |
+
|
| 66 |
+
Library to access the metadata for a Python package.
|
| 67 |
+
|
| 68 |
+
This package supplies third-party access to the functionality of
|
| 69 |
+
`importlib.metadata <https://docs.python.org/3/library/importlib.metadata.html>`_
|
| 70 |
+
including improvements added to subsequent Python versions.
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
Compatibility
|
| 74 |
+
=============
|
| 75 |
+
|
| 76 |
+
New features are introduced in this third-party library and later merged
|
| 77 |
+
into CPython. The following table indicates which versions of this library
|
| 78 |
+
were contributed to different versions in the standard library:
|
| 79 |
+
|
| 80 |
+
.. list-table::
|
| 81 |
+
:header-rows: 1
|
| 82 |
+
|
| 83 |
+
* - importlib_metadata
|
| 84 |
+
- stdlib
|
| 85 |
+
* - 7.0
|
| 86 |
+
- 3.13
|
| 87 |
+
* - 6.5
|
| 88 |
+
- 3.12
|
| 89 |
+
* - 4.13
|
| 90 |
+
- 3.11
|
| 91 |
+
* - 4.6
|
| 92 |
+
- 3.10
|
| 93 |
+
* - 1.4
|
| 94 |
+
- 3.8
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
Usage
|
| 98 |
+
=====
|
| 99 |
+
|
| 100 |
+
See the `online documentation <https://importlib-metadata.readthedocs.io/>`_
|
| 101 |
+
for usage details.
|
| 102 |
+
|
| 103 |
+
`Finder authors
|
| 104 |
+
<https://docs.python.org/3/reference/import.html#finders-and-loaders>`_ can
|
| 105 |
+
also add support for custom package installers. See the above documentation
|
| 106 |
+
for details.
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
Caveats
|
| 110 |
+
=======
|
| 111 |
+
|
| 112 |
+
This project primarily supports third-party packages installed by PyPA
|
| 113 |
+
tools (or other conforming packages). It does not support:
|
| 114 |
+
|
| 115 |
+
- Packages in the stdlib.
|
| 116 |
+
- Packages installed without metadata.
|
| 117 |
+
|
| 118 |
+
Project details
|
| 119 |
+
===============
|
| 120 |
+
|
| 121 |
+
* Project home: https://github.com/python/importlib_metadata
|
| 122 |
+
* Report bugs at: https://github.com/python/importlib_metadata/issues
|
| 123 |
+
* Code hosting: https://github.com/python/importlib_metadata
|
| 124 |
+
* Documentation: https://importlib-metadata.readthedocs.io/
|
| 125 |
+
|
| 126 |
+
For Enterprise
|
| 127 |
+
==============
|
| 128 |
+
|
| 129 |
+
Available as part of the Tidelift Subscription.
|
| 130 |
+
|
| 131 |
+
This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
|
| 132 |
+
|
| 133 |
+
`Learn more <https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=referral&utm_campaign=github>`_.
|
evalkit_tf437/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
importlib_metadata-8.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
importlib_metadata-8.5.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
|
| 3 |
+
importlib_metadata-8.5.0.dist-info/METADATA,sha256=HXikDpZut4rY2D4ZuhAxXnLUDb-f_XP_Cyp9iYmF4G0,4775
|
| 4 |
+
importlib_metadata-8.5.0.dist-info/RECORD,,
|
| 5 |
+
importlib_metadata-8.5.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 6 |
+
importlib_metadata-8.5.0.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
|
| 7 |
+
importlib_metadata-8.5.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19
|
| 8 |
+
importlib_metadata/__init__.py,sha256=-Sk7aVqfmzLecdjSOpLKo1P_PegQanR__HsMMyEq0PI,35853
|
| 9 |
+
importlib_metadata/__pycache__/__init__.cpython-310.pyc,,
|
| 10 |
+
importlib_metadata/__pycache__/_adapters.cpython-310.pyc,,
|
| 11 |
+
importlib_metadata/__pycache__/_collections.cpython-310.pyc,,
|
| 12 |
+
importlib_metadata/__pycache__/_compat.cpython-310.pyc,,
|
| 13 |
+
importlib_metadata/__pycache__/_functools.cpython-310.pyc,,
|
| 14 |
+
importlib_metadata/__pycache__/_itertools.cpython-310.pyc,,
|
| 15 |
+
importlib_metadata/__pycache__/_meta.cpython-310.pyc,,
|
| 16 |
+
importlib_metadata/__pycache__/_text.cpython-310.pyc,,
|
| 17 |
+
importlib_metadata/__pycache__/diagnose.cpython-310.pyc,,
|
| 18 |
+
importlib_metadata/_adapters.py,sha256=wdyNWoVblu1r4z8v4t6iQEyjnqAujEyqWAp9wTCVluI,2317
|
| 19 |
+
importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743
|
| 20 |
+
importlib_metadata/_compat.py,sha256=VC5ZDLlT-BcshauCShdFJvMNLntJJfZzNK1meGa-enw,1313
|
| 21 |
+
importlib_metadata/_functools.py,sha256=bSbAqC9-2niWM9364FYBx9GWtetnJEfo4mdLv8uMl7c,2895
|
| 22 |
+
importlib_metadata/_itertools.py,sha256=nMvp9SfHAQ_JYwK4L2i64lr3GRXGlYlikGTVzWbys_E,5351
|
| 23 |
+
importlib_metadata/_meta.py,sha256=JzuqMG4za5MoaBPCPv61c26fUBdQPZ4by3pbaQA_E_o,1823
|
| 24 |
+
importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166
|
| 25 |
+
importlib_metadata/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 26 |
+
importlib_metadata/compat/__pycache__/__init__.cpython-310.pyc,,
|
| 27 |
+
importlib_metadata/compat/__pycache__/py311.cpython-310.pyc,,
|
| 28 |
+
importlib_metadata/compat/__pycache__/py39.cpython-310.pyc,,
|
| 29 |
+
importlib_metadata/compat/py311.py,sha256=uqm-K-uohyj1042TH4a9Er_I5o7667DvulcD-gC_fSA,608
|
| 30 |
+
importlib_metadata/compat/py39.py,sha256=cPkMv6-0ilK-0Jw_Tkn0xYbOKJZc4WJKQHow0c2T44w,1102
|
| 31 |
+
importlib_metadata/diagnose.py,sha256=nkSRMiowlmkhLYhKhvCg9glmt_11Cox-EmLzEbqYTa8,379
|
| 32 |
+
importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
evalkit_tf437/lib/python3.10/site-packages/importlib_metadata-8.5.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (74.1.2)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
evalkit_tf437/lib/python3.10/site-packages/markdown2-2.5.1.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.38.4)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py2-none-any
|
| 5 |
+
Tag: py3-none-any
|
| 6 |
+
|
evalkit_tf437/lib/python3.10/site-packages/markdown2-2.5.1.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
markdown2
|
evalkit_tf437/lib/python3.10/site-packages/nvidia_nvjitlink_cu12-12.6.77.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
evalkit_tf437/lib/python3.10/site-packages/nvidia_nvjitlink_cu12-12.6.77.dist-info/License.txt
ADDED
|
@@ -0,0 +1,1568 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
End User License Agreement
|
| 2 |
+
--------------------------
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
Preface
|
| 6 |
+
-------
|
| 7 |
+
|
| 8 |
+
The Software License Agreement in Chapter 1 and the Supplement
|
| 9 |
+
in Chapter 2 contain license terms and conditions that govern
|
| 10 |
+
the use of NVIDIA software. By accepting this agreement, you
|
| 11 |
+
agree to comply with all the terms and conditions applicable
|
| 12 |
+
to the product(s) included herein.
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
NVIDIA Driver
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
Description
|
| 19 |
+
|
| 20 |
+
This package contains the operating system driver and
|
| 21 |
+
fundamental system software components for NVIDIA GPUs.
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
NVIDIA CUDA Toolkit
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
Description
|
| 28 |
+
|
| 29 |
+
The NVIDIA CUDA Toolkit provides command-line and graphical
|
| 30 |
+
tools for building, debugging and optimizing the performance
|
| 31 |
+
of applications accelerated by NVIDIA GPUs, runtime and math
|
| 32 |
+
libraries, and documentation including programming guides,
|
| 33 |
+
user manuals, and API references.
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
Default Install Location of CUDA Toolkit
|
| 37 |
+
|
| 38 |
+
Windows platform:
|
| 39 |
+
|
| 40 |
+
%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.#
|
| 41 |
+
|
| 42 |
+
Linux platform:
|
| 43 |
+
|
| 44 |
+
/usr/local/cuda-#.#
|
| 45 |
+
|
| 46 |
+
Mac platform:
|
| 47 |
+
|
| 48 |
+
/Developer/NVIDIA/CUDA-#.#
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
NVIDIA CUDA Samples
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
Description
|
| 55 |
+
|
| 56 |
+
This package includes over 100+ CUDA examples that demonstrate
|
| 57 |
+
various CUDA programming principles, and efficient CUDA
|
| 58 |
+
implementation of algorithms in specific application domains.
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
Default Install Location of CUDA Samples
|
| 62 |
+
|
| 63 |
+
Windows platform:
|
| 64 |
+
|
| 65 |
+
%ProgramData%\NVIDIA Corporation\CUDA Samples\v#.#
|
| 66 |
+
|
| 67 |
+
Linux platform:
|
| 68 |
+
|
| 69 |
+
/usr/local/cuda-#.#/samples
|
| 70 |
+
|
| 71 |
+
and
|
| 72 |
+
|
| 73 |
+
$HOME/NVIDIA_CUDA-#.#_Samples
|
| 74 |
+
|
| 75 |
+
Mac platform:
|
| 76 |
+
|
| 77 |
+
/Developer/NVIDIA/CUDA-#.#/samples
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
NVIDIA Nsight Visual Studio Edition (Windows only)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
Description
|
| 84 |
+
|
| 85 |
+
NVIDIA Nsight Development Platform, Visual Studio Edition is a
|
| 86 |
+
development environment integrated into Microsoft Visual
|
| 87 |
+
Studio that provides tools for debugging, profiling, analyzing
|
| 88 |
+
and optimizing your GPU computing and graphics applications.
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
Default Install Location of Nsight Visual Studio Edition
|
| 92 |
+
|
| 93 |
+
Windows platform:
|
| 94 |
+
|
| 95 |
+
%ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.#
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
1. License Agreement for NVIDIA Software Development Kits
|
| 99 |
+
---------------------------------------------------------
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
Release Date: July 26, 2018
|
| 103 |
+
---------------------------
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
Important NoticeRead before downloading, installing,
|
| 107 |
+
copying or using the licensed software:
|
| 108 |
+
-------------------------------------------------------
|
| 109 |
+
|
| 110 |
+
This license agreement, including exhibits attached
|
| 111 |
+
("Agreement”) is a legal agreement between you and NVIDIA
|
| 112 |
+
Corporation ("NVIDIA") and governs your use of a NVIDIA
|
| 113 |
+
software development kit (“SDK”).
|
| 114 |
+
|
| 115 |
+
Each SDK has its own set of software and materials, but here
|
| 116 |
+
is a description of the types of items that may be included in
|
| 117 |
+
a SDK: source code, header files, APIs, data sets and assets
|
| 118 |
+
(examples include images, textures, models, scenes, videos,
|
| 119 |
+
native API input/output files), binary software, sample code,
|
| 120 |
+
libraries, utility programs, programming code and
|
| 121 |
+
documentation.
|
| 122 |
+
|
| 123 |
+
This Agreement can be accepted only by an adult of legal age
|
| 124 |
+
of majority in the country in which the SDK is used.
|
| 125 |
+
|
| 126 |
+
If you are entering into this Agreement on behalf of a company
|
| 127 |
+
or other legal entity, you represent that you have the legal
|
| 128 |
+
authority to bind the entity to this Agreement, in which case
|
| 129 |
+
“you” will mean the entity you represent.
|
| 130 |
+
|
| 131 |
+
If you don’t have the required age or authority to accept
|
| 132 |
+
this Agreement, or if you don’t accept all the terms and
|
| 133 |
+
conditions of this Agreement, do not download, install or use
|
| 134 |
+
the SDK.
|
| 135 |
+
|
| 136 |
+
You agree to use the SDK only for purposes that are permitted
|
| 137 |
+
by (a) this Agreement, and (b) any applicable law, regulation
|
| 138 |
+
or generally accepted practices or guidelines in the relevant
|
| 139 |
+
jurisdictions.
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
1.1. License
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
1.1.1. License Grant
|
| 146 |
+
|
| 147 |
+
Subject to the terms of this Agreement, NVIDIA hereby grants
|
| 148 |
+
you a non-exclusive, non-transferable license, without the
|
| 149 |
+
right to sublicense (except as expressly provided in this
|
| 150 |
+
Agreement) to:
|
| 151 |
+
|
| 152 |
+
1. Install and use the SDK,
|
| 153 |
+
|
| 154 |
+
2. Modify and create derivative works of sample source code
|
| 155 |
+
delivered in the SDK, and
|
| 156 |
+
|
| 157 |
+
3. Distribute those portions of the SDK that are identified
|
| 158 |
+
in this Agreement as distributable, as incorporated in
|
| 159 |
+
object code format into a software application that meets
|
| 160 |
+
the distribution requirements indicated in this Agreement.
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
1.1.2. Distribution Requirements
|
| 164 |
+
|
| 165 |
+
These are the distribution requirements for you to exercise
|
| 166 |
+
the distribution grant:
|
| 167 |
+
|
| 168 |
+
1. Your application must have material additional
|
| 169 |
+
functionality, beyond the included portions of the SDK.
|
| 170 |
+
|
| 171 |
+
2. The distributable portions of the SDK shall only be
|
| 172 |
+
accessed by your application.
|
| 173 |
+
|
| 174 |
+
3. The following notice shall be included in modifications
|
| 175 |
+
and derivative works of sample source code distributed:
|
| 176 |
+
“This software contains source code provided by NVIDIA
|
| 177 |
+
Corporation.”
|
| 178 |
+
|
| 179 |
+
4. Unless a developer tool is identified in this Agreement
|
| 180 |
+
as distributable, it is delivered for your internal use
|
| 181 |
+
only.
|
| 182 |
+
|
| 183 |
+
5. The terms under which you distribute your application
|
| 184 |
+
must be consistent with the terms of this Agreement,
|
| 185 |
+
including (without limitation) terms relating to the
|
| 186 |
+
license grant and license restrictions and protection of
|
| 187 |
+
NVIDIA’s intellectual property rights. Additionally, you
|
| 188 |
+
agree that you will protect the privacy, security and
|
| 189 |
+
legal rights of your application users.
|
| 190 |
+
|
| 191 |
+
6. You agree to notify NVIDIA in writing of any known or
|
| 192 |
+
suspected distribution or use of the SDK not in compliance
|
| 193 |
+
with the requirements of this Agreement, and to enforce
|
| 194 |
+
the terms of your agreements with respect to distributed
|
| 195 |
+
SDK.
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
1.1.3. Authorized Users
|
| 199 |
+
|
| 200 |
+
You may allow employees and contractors of your entity or of
|
| 201 |
+
your subsidiary(ies) to access and use the SDK from your
|
| 202 |
+
secure network to perform work on your behalf.
|
| 203 |
+
|
| 204 |
+
If you are an academic institution you may allow users
|
| 205 |
+
enrolled or employed by the academic institution to access and
|
| 206 |
+
use the SDK from your secure network.
|
| 207 |
+
|
| 208 |
+
You are responsible for the compliance with the terms of this
|
| 209 |
+
Agreement by your authorized users. If you become aware that
|
| 210 |
+
your authorized users didn’t follow the terms of this
|
| 211 |
+
Agreement, you agree to take reasonable steps to resolve the
|
| 212 |
+
non-compliance and prevent new occurrences.
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
1.1.4. Pre-Release SDK
|
| 216 |
+
|
| 217 |
+
The SDK versions identified as alpha, beta, preview or
|
| 218 |
+
otherwise as pre-release, may not be fully functional, may
|
| 219 |
+
contain errors or design flaws, and may have reduced or
|
| 220 |
+
different security, privacy, accessibility, availability, and
|
| 221 |
+
reliability standards relative to commercial versions of
|
| 222 |
+
NVIDIA software and materials. Use of a pre-release SDK may
|
| 223 |
+
result in unexpected results, loss of data, project delays or
|
| 224 |
+
other unpredictable damage or loss.
|
| 225 |
+
|
| 226 |
+
You may use a pre-release SDK at your own risk, understanding
|
| 227 |
+
that pre-release SDKs are not intended for use in production
|
| 228 |
+
or business-critical systems.
|
| 229 |
+
|
| 230 |
+
NVIDIA may choose not to make available a commercial version
|
| 231 |
+
of any pre-release SDK. NVIDIA may also choose to abandon
|
| 232 |
+
development and terminate the availability of a pre-release
|
| 233 |
+
SDK at any time without liability.
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
1.1.5. Updates
|
| 237 |
+
|
| 238 |
+
NVIDIA may, at its option, make available patches, workarounds
|
| 239 |
+
or other updates to this SDK. Unless the updates are provided
|
| 240 |
+
with their separate governing terms, they are deemed part of
|
| 241 |
+
the SDK licensed to you as provided in this Agreement. You
|
| 242 |
+
agree that the form and content of the SDK that NVIDIA
|
| 243 |
+
provides may change without prior notice to you. While NVIDIA
|
| 244 |
+
generally maintains compatibility between versions, NVIDIA may
|
| 245 |
+
in some cases make changes that introduce incompatibilities in
|
| 246 |
+
future versions of the SDK.
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
1.1.6. Third Party Licenses
|
| 250 |
+
|
| 251 |
+
The SDK may come bundled with, or otherwise include or be
|
| 252 |
+
distributed with, third party software licensed by a NVIDIA
|
| 253 |
+
supplier and/or open source software provided under an open
|
| 254 |
+
source license. Use of third party software is subject to the
|
| 255 |
+
third-party license terms, or in the absence of third party
|
| 256 |
+
terms, the terms of this Agreement. Copyright to third party
|
| 257 |
+
software is held by the copyright holders indicated in the
|
| 258 |
+
third-party software or license.
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
1.1.7. Reservation of Rights
|
| 262 |
+
|
| 263 |
+
NVIDIA reserves all rights, title, and interest in and to the
|
| 264 |
+
SDK, not expressly granted to you under this Agreement.
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
1.2. Limitations
|
| 268 |
+
|
| 269 |
+
The following license limitations apply to your use of the
|
| 270 |
+
SDK:
|
| 271 |
+
|
| 272 |
+
1. You may not reverse engineer, decompile or disassemble,
|
| 273 |
+
or remove copyright or other proprietary notices from any
|
| 274 |
+
portion of the SDK or copies of the SDK.
|
| 275 |
+
|
| 276 |
+
2. Except as expressly provided in this Agreement, you may
|
| 277 |
+
not copy, sell, rent, sublicense, transfer, distribute,
|
| 278 |
+
modify, or create derivative works of any portion of the
|
| 279 |
+
SDK. For clarity, you may not distribute or sublicense the
|
| 280 |
+
SDK as a stand-alone product.
|
| 281 |
+
|
| 282 |
+
3. Unless you have an agreement with NVIDIA for this
|
| 283 |
+
purpose, you may not indicate that an application created
|
| 284 |
+
with the SDK is sponsored or endorsed by NVIDIA.
|
| 285 |
+
|
| 286 |
+
4. You may not bypass, disable, or circumvent any
|
| 287 |
+
encryption, security, digital rights management or
|
| 288 |
+
authentication mechanism in the SDK.
|
| 289 |
+
|
| 290 |
+
5. You may not use the SDK in any manner that would cause it
|
| 291 |
+
to become subject to an open source software license. As
|
| 292 |
+
examples, licenses that require as a condition of use,
|
| 293 |
+
modification, and/or distribution that the SDK be:
|
| 294 |
+
|
| 295 |
+
a. Disclosed or distributed in source code form;
|
| 296 |
+
|
| 297 |
+
b. Licensed for the purpose of making derivative works;
|
| 298 |
+
or
|
| 299 |
+
|
| 300 |
+
c. Redistributable at no charge.
|
| 301 |
+
|
| 302 |
+
6. Unless you have an agreement with NVIDIA for this
|
| 303 |
+
purpose, you may not use the SDK with any system or
|
| 304 |
+
application where the use or failure of the system or
|
| 305 |
+
application can reasonably be expected to threaten or
|
| 306 |
+
result in personal injury, death, or catastrophic loss.
|
| 307 |
+
Examples include use in avionics, navigation, military,
|
| 308 |
+
medical, life support or other life critical applications.
|
| 309 |
+
NVIDIA does not design, test or manufacture the SDK for
|
| 310 |
+
these critical uses and NVIDIA shall not be liable to you
|
| 311 |
+
or any third party, in whole or in part, for any claims or
|
| 312 |
+
damages arising from such uses.
|
| 313 |
+
|
| 314 |
+
7. You agree to defend, indemnify and hold harmless NVIDIA
|
| 315 |
+
and its affiliates, and their respective employees,
|
| 316 |
+
contractors, agents, officers and directors, from and
|
| 317 |
+
against any and all claims, damages, obligations, losses,
|
| 318 |
+
liabilities, costs or debt, fines, restitutions and
|
| 319 |
+
expenses (including but not limited to attorney’s fees
|
| 320 |
+
and costs incident to establishing the right of
|
| 321 |
+
indemnification) arising out of or related to your use of
|
| 322 |
+
the SDK outside of the scope of this Agreement, or not in
|
| 323 |
+
compliance with its terms.
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
1.3. Ownership
|
| 327 |
+
|
| 328 |
+
1. NVIDIA or its licensors hold all rights, title and
|
| 329 |
+
interest in and to the SDK and its modifications and
|
| 330 |
+
derivative works, including their respective intellectual
|
| 331 |
+
property rights, subject to your rights described in this
|
| 332 |
+
section. This SDK may include software and materials from
|
| 333 |
+
NVIDIA’s licensors, and these licensors are intended
|
| 334 |
+
third party beneficiaries that may enforce this Agreement
|
| 335 |
+
with respect to their intellectual property rights.
|
| 336 |
+
|
| 337 |
+
2. You hold all rights, title and interest in and to your
|
| 338 |
+
applications and your derivative works of the sample
|
| 339 |
+
source code delivered in the SDK, including their
|
| 340 |
+
respective intellectual property rights, subject to
|
| 341 |
+
NVIDIA’s rights described in this section.
|
| 342 |
+
|
| 343 |
+
3. You may, but don’t have to, provide to NVIDIA
|
| 344 |
+
suggestions, feature requests or other feedback regarding
|
| 345 |
+
the SDK, including possible enhancements or modifications
|
| 346 |
+
to the SDK. For any feedback that you voluntarily provide,
|
| 347 |
+
you hereby grant NVIDIA and its affiliates a perpetual,
|
| 348 |
+
non-exclusive, worldwide, irrevocable license to use,
|
| 349 |
+
reproduce, modify, license, sublicense (through multiple
|
| 350 |
+
tiers of sublicensees), and distribute (through multiple
|
| 351 |
+
tiers of distributors) it without the payment of any
|
| 352 |
+
royalties or fees to you. NVIDIA will use feedback at its
|
| 353 |
+
choice. NVIDIA is constantly looking for ways to improve
|
| 354 |
+
its products, so you may send feedback to NVIDIA through
|
| 355 |
+
the developer portal at https://developer.nvidia.com.
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
1.4. No Warranties
|
| 359 |
+
|
| 360 |
+
THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL
|
| 361 |
+
FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND
|
| 362 |
+
ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND
|
| 363 |
+
OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING,
|
| 364 |
+
BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS
|
| 365 |
+
FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE
|
| 366 |
+
ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO
|
| 367 |
+
WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF
|
| 368 |
+
DEALING OR COURSE OF TRADE.
|
| 369 |
+
|
| 370 |
+
|
| 371 |
+
1.5. Limitation of Liability
|
| 372 |
+
|
| 373 |
+
TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS
|
| 374 |
+
AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
|
| 375 |
+
PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS
|
| 376 |
+
OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF
|
| 377 |
+
PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION
|
| 378 |
+
WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK,
|
| 379 |
+
WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH
|
| 380 |
+
OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE),
|
| 381 |
+
PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF
|
| 382 |
+
LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES
|
| 383 |
+
TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS
|
| 384 |
+
AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE
|
| 385 |
+
NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS
|
| 386 |
+
LIMIT.
|
| 387 |
+
|
| 388 |
+
These exclusions and limitations of liability shall apply
|
| 389 |
+
regardless if NVIDIA or its affiliates have been advised of
|
| 390 |
+
the possibility of such damages, and regardless of whether a
|
| 391 |
+
remedy fails its essential purpose. These exclusions and
|
| 392 |
+
limitations of liability form an essential basis of the
|
| 393 |
+
bargain between the parties, and, absent any of these
|
| 394 |
+
exclusions or limitations of liability, the provisions of this
|
| 395 |
+
Agreement, including, without limitation, the economic terms,
|
| 396 |
+
would be substantially different.
|
| 397 |
+
|
| 398 |
+
|
| 399 |
+
1.6. Termination
|
| 400 |
+
|
| 401 |
+
1. This Agreement will continue to apply until terminated by
|
| 402 |
+
either you or NVIDIA as described below.
|
| 403 |
+
|
| 404 |
+
2. If you want to terminate this Agreement, you may do so by
|
| 405 |
+
stopping to use the SDK.
|
| 406 |
+
|
| 407 |
+
3. NVIDIA may, at any time, terminate this Agreement if:
|
| 408 |
+
|
| 409 |
+
a. (i) you fail to comply with any term of this
|
| 410 |
+
Agreement and the non-compliance is not fixed within
|
| 411 |
+
thirty (30) days following notice from NVIDIA (or
|
| 412 |
+
immediately if you violate NVIDIA’s intellectual
|
| 413 |
+
property rights);
|
| 414 |
+
|
| 415 |
+
b. (ii) you commence or participate in any legal
|
| 416 |
+
proceeding against NVIDIA with respect to the SDK; or
|
| 417 |
+
|
| 418 |
+
c. (iii) NVIDIA decides to no longer provide the SDK in
|
| 419 |
+
a country or, in NVIDIA’s sole discretion, the
|
| 420 |
+
continued use of it is no longer commercially viable.
|
| 421 |
+
|
| 422 |
+
4. Upon any termination of this Agreement, you agree to
|
| 423 |
+
promptly discontinue use of the SDK and destroy all copies
|
| 424 |
+
in your possession or control. Your prior distributions in
|
| 425 |
+
accordance with this Agreement are not affected by the
|
| 426 |
+
termination of this Agreement. Upon written request, you
|
| 427 |
+
will certify in writing that you have complied with your
|
| 428 |
+
commitments under this section. Upon any termination of
|
| 429 |
+
this Agreement all provisions survive except for the
|
| 430 |
+
license grant provisions.
|
| 431 |
+
|
| 432 |
+
|
| 433 |
+
1.7. General
|
| 434 |
+
|
| 435 |
+
If you wish to assign this Agreement or your rights and
|
| 436 |
+
obligations, including by merger, consolidation, dissolution
|
| 437 |
+
or operation of law, contact NVIDIA to ask for permission. Any
|
| 438 |
+
attempted assignment not approved by NVIDIA in writing shall
|
| 439 |
+
be void and of no effect. NVIDIA may assign, delegate or
|
| 440 |
+
transfer this Agreement and its rights and obligations, and if
|
| 441 |
+
to a non-affiliate you will be notified.
|
| 442 |
+
|
| 443 |
+
You agree to cooperate with NVIDIA and provide reasonably
|
| 444 |
+
requested information to verify your compliance with this
|
| 445 |
+
Agreement.
|
| 446 |
+
|
| 447 |
+
This Agreement will be governed in all respects by the laws of
|
| 448 |
+
the United States and of the State of Delaware as those laws
|
| 449 |
+
are applied to contracts entered into and performed entirely
|
| 450 |
+
within Delaware by Delaware residents, without regard to the
|
| 451 |
+
conflicts of laws principles. The United Nations Convention on
|
| 452 |
+
Contracts for the International Sale of Goods is specifically
|
| 453 |
+
disclaimed. You agree to all terms of this Agreement in the
|
| 454 |
+
English language.
|
| 455 |
+
|
| 456 |
+
The state or federal courts residing in Santa Clara County,
|
| 457 |
+
California shall have exclusive jurisdiction over any dispute
|
| 458 |
+
or claim arising out of this Agreement. Notwithstanding this,
|
| 459 |
+
you agree that NVIDIA shall still be allowed to apply for
|
| 460 |
+
injunctive remedies or an equivalent type of urgent legal
|
| 461 |
+
relief in any jurisdiction.
|
| 462 |
+
|
| 463 |
+
If any court of competent jurisdiction determines that any
|
| 464 |
+
provision of this Agreement is illegal, invalid or
|
| 465 |
+
unenforceable, such provision will be construed as limited to
|
| 466 |
+
the extent necessary to be consistent with and fully
|
| 467 |
+
enforceable under the law and the remaining provisions will
|
| 468 |
+
remain in full force and effect. Unless otherwise specified,
|
| 469 |
+
remedies are cumulative.
|
| 470 |
+
|
| 471 |
+
Each party acknowledges and agrees that the other is an
|
| 472 |
+
independent contractor in the performance of this Agreement.
|
| 473 |
+
|
| 474 |
+
The SDK has been developed entirely at private expense and is
|
| 475 |
+
“commercial items” consisting of “commercial computer
|
| 476 |
+
software” and “commercial computer software
|
| 477 |
+
documentation” provided with RESTRICTED RIGHTS. Use,
|
| 478 |
+
duplication or disclosure by the U.S. Government or a U.S.
|
| 479 |
+
Government subcontractor is subject to the restrictions in
|
| 480 |
+
this Agreement pursuant to DFARS 227.7202-3(a) or as set forth
|
| 481 |
+
in subparagraphs (c)(1) and (2) of the Commercial Computer
|
| 482 |
+
Software - Restricted Rights clause at FAR 52.227-19, as
|
| 483 |
+
applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas
|
| 484 |
+
Expressway, Santa Clara, CA 95051.
|
| 485 |
+
|
| 486 |
+
The SDK is subject to United States export laws and
|
| 487 |
+
regulations. You agree that you will not ship, transfer or
|
| 488 |
+
export the SDK into any country, or use the SDK in any manner,
|
| 489 |
+
prohibited by the United States Bureau of Industry and
|
| 490 |
+
Security or economic sanctions regulations administered by the
|
| 491 |
+
U.S. Department of Treasury’s Office of Foreign Assets
|
| 492 |
+
Control (OFAC), or any applicable export laws, restrictions or
|
| 493 |
+
regulations. These laws include restrictions on destinations,
|
| 494 |
+
end users and end use. By accepting this Agreement, you
|
| 495 |
+
confirm that you are not a resident or citizen of any country
|
| 496 |
+
currently embargoed by the U.S. and that you are not otherwise
|
| 497 |
+
prohibited from receiving the SDK.
|
| 498 |
+
|
| 499 |
+
Any notice delivered by NVIDIA to you under this Agreement
|
| 500 |
+
will be delivered via mail, email or fax. You agree that any
|
| 501 |
+
notices that NVIDIA sends you electronically will satisfy any
|
| 502 |
+
legal communication requirements. Please direct your legal
|
| 503 |
+
notices or other correspondence to NVIDIA Corporation, 2788
|
| 504 |
+
San Tomas Expressway, Santa Clara, California 95051, United
|
| 505 |
+
States of America, Attention: Legal Department.
|
| 506 |
+
|
| 507 |
+
This Agreement and any exhibits incorporated into this
|
| 508 |
+
Agreement constitute the entire agreement of the parties with
|
| 509 |
+
respect to the subject matter of this Agreement and supersede
|
| 510 |
+
all prior negotiations or documentation exchanged between the
|
| 511 |
+
parties relating to this SDK license. Any additional and/or
|
| 512 |
+
conflicting terms on documents issued by you are null, void,
|
| 513 |
+
and invalid. Any amendment or waiver under this Agreement
|
| 514 |
+
shall be in writing and signed by representatives of both
|
| 515 |
+
parties.
|
| 516 |
+
|
| 517 |
+
|
| 518 |
+
2. CUDA Toolkit Supplement to Software License Agreement for
|
| 519 |
+
NVIDIA Software Development Kits
|
| 520 |
+
------------------------------------------------------------
|
| 521 |
+
|
| 522 |
+
|
| 523 |
+
Release date: August 16, 2018
|
| 524 |
+
-----------------------------
|
| 525 |
+
|
| 526 |
+
The terms in this supplement govern your use of the NVIDIA
|
| 527 |
+
CUDA Toolkit SDK under the terms of your license agreement
|
| 528 |
+
(“Agreement”) as modified by this supplement. Capitalized
|
| 529 |
+
terms used but not defined below have the meaning assigned to
|
| 530 |
+
them in the Agreement.
|
| 531 |
+
|
| 532 |
+
This supplement is an exhibit to the Agreement and is
|
| 533 |
+
incorporated as an integral part of the Agreement. In the
|
| 534 |
+
event of conflict between the terms in this supplement and the
|
| 535 |
+
terms in the Agreement, the terms in this supplement govern.
|
| 536 |
+
|
| 537 |
+
|
| 538 |
+
2.1. License Scope
|
| 539 |
+
|
| 540 |
+
The SDK is licensed for you to develop applications only for
|
| 541 |
+
use in systems with NVIDIA GPUs.
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
2.2. Distribution
|
| 545 |
+
|
| 546 |
+
The portions of the SDK that are distributable under the
|
| 547 |
+
Agreement are listed in Attachment A.
|
| 548 |
+
|
| 549 |
+
|
| 550 |
+
2.3. Operating Systems
|
| 551 |
+
|
| 552 |
+
Those portions of the SDK designed exclusively for use on the
|
| 553 |
+
Linux or FreeBSD operating systems, or other operating systems
|
| 554 |
+
derived from the source code to these operating systems, may
|
| 555 |
+
be copied and redistributed for use in accordance with this
|
| 556 |
+
Agreement, provided that the object code files are not
|
| 557 |
+
modified in any way (except for unzipping of compressed
|
| 558 |
+
files).
|
| 559 |
+
|
| 560 |
+
|
| 561 |
+
2.4. Audio and Video Encoders and Decoders
|
| 562 |
+
|
| 563 |
+
You acknowledge and agree that it is your sole responsibility
|
| 564 |
+
to obtain any additional third-party licenses required to
|
| 565 |
+
make, have made, use, have used, sell, import, and offer for
|
| 566 |
+
sale your products or services that include or incorporate any
|
| 567 |
+
third-party software and content relating to audio and/or
|
| 568 |
+
video encoders and decoders from, including but not limited
|
| 569 |
+
to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A.,
|
| 570 |
+
MPEG-LA, and Coding Technologies. NVIDIA does not grant to you
|
| 571 |
+
under this Agreement any necessary patent or other rights with
|
| 572 |
+
respect to any audio and/or video encoders and decoders.
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
2.5. Licensing
|
| 576 |
+
|
| 577 |
+
If the distribution terms in this Agreement are not suitable
|
| 578 |
+
for your organization, or for any questions regarding this
|
| 579 |
+
Agreement, please contact NVIDIA at
|
| 580 |
+
nvidia-compute-license-questions@nvidia.com.
|
| 581 |
+
|
| 582 |
+
|
| 583 |
+
2.6. Attachment A
|
| 584 |
+
|
| 585 |
+
The following portions of the SDK are distributable under the
|
| 586 |
+
Agreement:
|
| 587 |
+
|
| 588 |
+
Component
|
| 589 |
+
|
| 590 |
+
CUDA Runtime
|
| 591 |
+
|
| 592 |
+
Windows
|
| 593 |
+
|
| 594 |
+
cudart.dll, cudart_static.lib, cudadevrt.lib
|
| 595 |
+
|
| 596 |
+
Mac OSX
|
| 597 |
+
|
| 598 |
+
libcudart.dylib, libcudart_static.a, libcudadevrt.a
|
| 599 |
+
|
| 600 |
+
Linux
|
| 601 |
+
|
| 602 |
+
libcudart.so, libcudart_static.a, libcudadevrt.a
|
| 603 |
+
|
| 604 |
+
Android
|
| 605 |
+
|
| 606 |
+
libcudart.so, libcudart_static.a, libcudadevrt.a
|
| 607 |
+
|
| 608 |
+
Component
|
| 609 |
+
|
| 610 |
+
CUDA FFT Library
|
| 611 |
+
|
| 612 |
+
Windows
|
| 613 |
+
|
| 614 |
+
cufft.dll, cufftw.dll, cufft.lib, cufftw.lib
|
| 615 |
+
|
| 616 |
+
Mac OSX
|
| 617 |
+
|
| 618 |
+
libcufft.dylib, libcufft_static.a, libcufftw.dylib,
|
| 619 |
+
libcufftw_static.a
|
| 620 |
+
|
| 621 |
+
Linux
|
| 622 |
+
|
| 623 |
+
libcufft.so, libcufft_static.a, libcufftw.so,
|
| 624 |
+
libcufftw_static.a
|
| 625 |
+
|
| 626 |
+
Android
|
| 627 |
+
|
| 628 |
+
libcufft.so, libcufft_static.a, libcufftw.so,
|
| 629 |
+
libcufftw_static.a
|
| 630 |
+
|
| 631 |
+
Component
|
| 632 |
+
|
| 633 |
+
CUDA BLAS Library
|
| 634 |
+
|
| 635 |
+
Windows
|
| 636 |
+
|
| 637 |
+
cublas.dll, cublasLt.dll
|
| 638 |
+
|
| 639 |
+
Mac OSX
|
| 640 |
+
|
| 641 |
+
libcublas.dylib, libcublasLt.dylib, libcublas_static.a,
|
| 642 |
+
libcublasLt_static.a
|
| 643 |
+
|
| 644 |
+
Linux
|
| 645 |
+
|
| 646 |
+
libcublas.so, libcublasLt.so, libcublas_static.a,
|
| 647 |
+
libcublasLt_static.a
|
| 648 |
+
|
| 649 |
+
Android
|
| 650 |
+
|
| 651 |
+
libcublas.so, libcublasLt.so, libcublas_static.a,
|
| 652 |
+
libcublasLt_static.a
|
| 653 |
+
|
| 654 |
+
Component
|
| 655 |
+
|
| 656 |
+
NVIDIA "Drop-in" BLAS Library
|
| 657 |
+
|
| 658 |
+
Windows
|
| 659 |
+
|
| 660 |
+
nvblas.dll
|
| 661 |
+
|
| 662 |
+
Mac OSX
|
| 663 |
+
|
| 664 |
+
libnvblas.dylib
|
| 665 |
+
|
| 666 |
+
Linux
|
| 667 |
+
|
| 668 |
+
libnvblas.so
|
| 669 |
+
|
| 670 |
+
Component
|
| 671 |
+
|
| 672 |
+
CUDA Sparse Matrix Library
|
| 673 |
+
|
| 674 |
+
Windows
|
| 675 |
+
|
| 676 |
+
cusparse.dll, cusparse.lib
|
| 677 |
+
|
| 678 |
+
Mac OSX
|
| 679 |
+
|
| 680 |
+
libcusparse.dylib, libcusparse_static.a
|
| 681 |
+
|
| 682 |
+
Linux
|
| 683 |
+
|
| 684 |
+
libcusparse.so, libcusparse_static.a
|
| 685 |
+
|
| 686 |
+
Android
|
| 687 |
+
|
| 688 |
+
libcusparse.so, libcusparse_static.a
|
| 689 |
+
|
| 690 |
+
Component
|
| 691 |
+
|
| 692 |
+
CUDA Linear Solver Library
|
| 693 |
+
|
| 694 |
+
Windows
|
| 695 |
+
|
| 696 |
+
cusolver.dll, cusolver.lib
|
| 697 |
+
|
| 698 |
+
Mac OSX
|
| 699 |
+
|
| 700 |
+
libcusolver.dylib, libcusolver_static.a
|
| 701 |
+
|
| 702 |
+
Linux
|
| 703 |
+
|
| 704 |
+
libcusolver.so, libcusolver_static.a
|
| 705 |
+
|
| 706 |
+
Android
|
| 707 |
+
|
| 708 |
+
libcusolver.so, libcusolver_static.a
|
| 709 |
+
|
| 710 |
+
Component
|
| 711 |
+
|
| 712 |
+
CUDA Random Number Generation Library
|
| 713 |
+
|
| 714 |
+
Windows
|
| 715 |
+
|
| 716 |
+
curand.dll, curand.lib
|
| 717 |
+
|
| 718 |
+
Mac OSX
|
| 719 |
+
|
| 720 |
+
libcurand.dylib, libcurand_static.a
|
| 721 |
+
|
| 722 |
+
Linux
|
| 723 |
+
|
| 724 |
+
libcurand.so, libcurand_static.a
|
| 725 |
+
|
| 726 |
+
Android
|
| 727 |
+
|
| 728 |
+
libcurand.so, libcurand_static.a
|
| 729 |
+
|
| 730 |
+
Component
|
| 731 |
+
|
| 732 |
+
CUDA Accelerated Graph Library
|
| 733 |
+
|
| 734 |
+
Component
|
| 735 |
+
|
| 736 |
+
NVIDIA Performance Primitives Library
|
| 737 |
+
|
| 738 |
+
Windows
|
| 739 |
+
|
| 740 |
+
nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll,
|
| 741 |
+
nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll,
|
| 742 |
+
nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib,
|
| 743 |
+
nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll,
|
| 744 |
+
nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib
|
| 745 |
+
|
| 746 |
+
Mac OSX
|
| 747 |
+
|
| 748 |
+
libnppc.dylib, libnppc_static.a, libnppial.dylib,
|
| 749 |
+
libnppial_static.a, libnppicc.dylib, libnppicc_static.a,
|
| 750 |
+
libnppicom.dylib, libnppicom_static.a, libnppidei.dylib,
|
| 751 |
+
libnppidei_static.a, libnppif.dylib, libnppif_static.a,
|
| 752 |
+
libnppig.dylib, libnppig_static.a, libnppim.dylib,
|
| 753 |
+
libnppisu_static.a, libnppitc.dylib, libnppitc_static.a,
|
| 754 |
+
libnpps.dylib, libnpps_static.a
|
| 755 |
+
|
| 756 |
+
Linux
|
| 757 |
+
|
| 758 |
+
libnppc.so, libnppc_static.a, libnppial.so,
|
| 759 |
+
libnppial_static.a, libnppicc.so, libnppicc_static.a,
|
| 760 |
+
libnppicom.so, libnppicom_static.a, libnppidei.so,
|
| 761 |
+
libnppidei_static.a, libnppif.so, libnppif_static.a
|
| 762 |
+
libnppig.so, libnppig_static.a, libnppim.so,
|
| 763 |
+
libnppim_static.a, libnppist.so, libnppist_static.a,
|
| 764 |
+
libnppisu.so, libnppisu_static.a, libnppitc.so
|
| 765 |
+
libnppitc_static.a, libnpps.so, libnpps_static.a
|
| 766 |
+
|
| 767 |
+
Android
|
| 768 |
+
|
| 769 |
+
libnppc.so, libnppc_static.a, libnppial.so,
|
| 770 |
+
libnppial_static.a, libnppicc.so, libnppicc_static.a,
|
| 771 |
+
libnppicom.so, libnppicom_static.a, libnppidei.so,
|
| 772 |
+
libnppidei_static.a, libnppif.so, libnppif_static.a
|
| 773 |
+
libnppig.so, libnppig_static.a, libnppim.so,
|
| 774 |
+
libnppim_static.a, libnppist.so, libnppist_static.a,
|
| 775 |
+
libnppisu.so, libnppisu_static.a, libnppitc.so
|
| 776 |
+
libnppitc_static.a, libnpps.so, libnpps_static.a
|
| 777 |
+
|
| 778 |
+
Component
|
| 779 |
+
|
| 780 |
+
NVIDIA JPEG Library
|
| 781 |
+
|
| 782 |
+
Linux
|
| 783 |
+
|
| 784 |
+
libnvjpeg.so, libnvjpeg_static.a
|
| 785 |
+
|
| 786 |
+
Component
|
| 787 |
+
|
| 788 |
+
Internal common library required for statically linking to
|
| 789 |
+
cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP
|
| 790 |
+
|
| 791 |
+
Mac OSX
|
| 792 |
+
|
| 793 |
+
libculibos.a
|
| 794 |
+
|
| 795 |
+
Linux
|
| 796 |
+
|
| 797 |
+
libculibos.a
|
| 798 |
+
|
| 799 |
+
Component
|
| 800 |
+
|
| 801 |
+
NVIDIA Runtime Compilation Library and Header
|
| 802 |
+
|
| 803 |
+
All
|
| 804 |
+
|
| 805 |
+
nvrtc.h
|
| 806 |
+
|
| 807 |
+
Windows
|
| 808 |
+
|
| 809 |
+
nvrtc.dll, nvrtc-builtins.dll
|
| 810 |
+
|
| 811 |
+
Mac OSX
|
| 812 |
+
|
| 813 |
+
libnvrtc.dylib, libnvrtc-builtins.dylib
|
| 814 |
+
|
| 815 |
+
Linux
|
| 816 |
+
|
| 817 |
+
libnvrtc.so, libnvrtc-builtins.so
|
| 818 |
+
|
| 819 |
+
Component
|
| 820 |
+
|
| 821 |
+
NVIDIA Optimizing Compiler Library
|
| 822 |
+
|
| 823 |
+
Windows
|
| 824 |
+
|
| 825 |
+
nvvm.dll
|
| 826 |
+
|
| 827 |
+
Mac OSX
|
| 828 |
+
|
| 829 |
+
libnvvm.dylib
|
| 830 |
+
|
| 831 |
+
Linux
|
| 832 |
+
|
| 833 |
+
libnvvm.so
|
| 834 |
+
|
| 835 |
+
Component
|
| 836 |
+
|
| 837 |
+
NVIDIA Common Device Math Functions Library
|
| 838 |
+
|
| 839 |
+
Windows
|
| 840 |
+
|
| 841 |
+
libdevice.10.bc
|
| 842 |
+
|
| 843 |
+
Mac OSX
|
| 844 |
+
|
| 845 |
+
libdevice.10.bc
|
| 846 |
+
|
| 847 |
+
Linux
|
| 848 |
+
|
| 849 |
+
libdevice.10.bc
|
| 850 |
+
|
| 851 |
+
Component
|
| 852 |
+
|
| 853 |
+
CUDA Occupancy Calculation Header Library
|
| 854 |
+
|
| 855 |
+
All
|
| 856 |
+
|
| 857 |
+
cuda_occupancy.h
|
| 858 |
+
|
| 859 |
+
Component
|
| 860 |
+
|
| 861 |
+
CUDA Half Precision Headers
|
| 862 |
+
|
| 863 |
+
All
|
| 864 |
+
|
| 865 |
+
cuda_fp16.h, cuda_fp16.hpp
|
| 866 |
+
|
| 867 |
+
Component
|
| 868 |
+
|
| 869 |
+
CUDA Profiling Tools Interface (CUPTI) Library
|
| 870 |
+
|
| 871 |
+
Windows
|
| 872 |
+
|
| 873 |
+
cupti.dll
|
| 874 |
+
|
| 875 |
+
Mac OSX
|
| 876 |
+
|
| 877 |
+
libcupti.dylib
|
| 878 |
+
|
| 879 |
+
Linux
|
| 880 |
+
|
| 881 |
+
libcupti.so
|
| 882 |
+
|
| 883 |
+
Component
|
| 884 |
+
|
| 885 |
+
NVIDIA Tools Extension Library
|
| 886 |
+
|
| 887 |
+
Windows
|
| 888 |
+
|
| 889 |
+
nvToolsExt.dll, nvToolsExt.lib
|
| 890 |
+
|
| 891 |
+
Mac OSX
|
| 892 |
+
|
| 893 |
+
libnvToolsExt.dylib
|
| 894 |
+
|
| 895 |
+
Linux
|
| 896 |
+
|
| 897 |
+
libnvToolsExt.so
|
| 898 |
+
|
| 899 |
+
Component
|
| 900 |
+
|
| 901 |
+
NVIDIA CUDA Driver Libraries
|
| 902 |
+
|
| 903 |
+
Linux
|
| 904 |
+
|
| 905 |
+
libcuda.so, libnvidia-fatbinaryloader.so,
|
| 906 |
+
libnvidia-ptxjitcompiler.so
|
| 907 |
+
|
| 908 |
+
The NVIDIA CUDA Driver Libraries are only distributable in
|
| 909 |
+
applications that meet this criteria:
|
| 910 |
+
|
| 911 |
+
1. The application was developed starting from a NVIDIA CUDA
|
| 912 |
+
container obtained from Docker Hub or the NVIDIA GPU
|
| 913 |
+
Cloud, and
|
| 914 |
+
|
| 915 |
+
2. The resulting application is packaged as a Docker
|
| 916 |
+
container and distributed to users on Docker Hub or the
|
| 917 |
+
NVIDIA GPU Cloud only.
|
| 918 |
+
|
| 919 |
+
|
| 920 |
+
2.7. Attachment B
|
| 921 |
+
|
| 922 |
+
|
| 923 |
+
Additional Licensing Obligations
|
| 924 |
+
|
| 925 |
+
The following third party components included in the SOFTWARE
|
| 926 |
+
are licensed to Licensee pursuant to the following terms and
|
| 927 |
+
conditions:
|
| 928 |
+
|
| 929 |
+
1. Licensee's use of the GDB third party component is
|
| 930 |
+
subject to the terms and conditions of GNU GPL v3:
|
| 931 |
+
|
| 932 |
+
This product includes copyrighted third-party software licensed
|
| 933 |
+
under the terms of the GNU General Public License v3 ("GPL v3").
|
| 934 |
+
All third-party software packages are copyright by their respective
|
| 935 |
+
authors. GPL v3 terms and conditions are hereby incorporated into
|
| 936 |
+
the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt
|
| 937 |
+
|
| 938 |
+
Consistent with these licensing requirements, the software
|
| 939 |
+
listed below is provided under the terms of the specified
|
| 940 |
+
open source software licenses. To obtain source code for
|
| 941 |
+
software provided under licenses that require
|
| 942 |
+
redistribution of source code, including the GNU General
|
| 943 |
+
Public License (GPL) and GNU Lesser General Public License
|
| 944 |
+
(LGPL), contact oss-requests@nvidia.com. This offer is
|
| 945 |
+
valid for a period of three (3) years from the date of the
|
| 946 |
+
distribution of this product by NVIDIA CORPORATION.
|
| 947 |
+
|
| 948 |
+
Component License
|
| 949 |
+
CUDA-GDB GPL v3
|
| 950 |
+
|
| 951 |
+
2. Licensee represents and warrants that any and all third
|
| 952 |
+
party licensing and/or royalty payment obligations in
|
| 953 |
+
connection with Licensee's use of the H.264 video codecs
|
| 954 |
+
are solely the responsibility of Licensee.
|
| 955 |
+
|
| 956 |
+
3. Licensee's use of the Thrust library is subject to the
|
| 957 |
+
terms and conditions of the Apache License Version 2.0.
|
| 958 |
+
All third-party software packages are copyright by their
|
| 959 |
+
respective authors. Apache License Version 2.0 terms and
|
| 960 |
+
conditions are hereby incorporated into the Agreement by
|
| 961 |
+
this reference.
|
| 962 |
+
http://www.apache.org/licenses/LICENSE-2.0.html
|
| 963 |
+
|
| 964 |
+
In addition, Licensee acknowledges the following notice:
|
| 965 |
+
Thrust includes source code from the Boost Iterator,
|
| 966 |
+
Tuple, System, and Random Number libraries.
|
| 967 |
+
|
| 968 |
+
Boost Software License - Version 1.0 - August 17th, 2003
|
| 969 |
+
. . . .
|
| 970 |
+
|
| 971 |
+
Permission is hereby granted, free of charge, to any person or
|
| 972 |
+
organization obtaining a copy of the software and accompanying
|
| 973 |
+
documentation covered by this license (the "Software") to use,
|
| 974 |
+
reproduce, display, distribute, execute, and transmit the Software,
|
| 975 |
+
and to prepare derivative works of the Software, and to permit
|
| 976 |
+
third-parties to whom the Software is furnished to do so, all
|
| 977 |
+
subject to the following:
|
| 978 |
+
|
| 979 |
+
The copyright notices in the Software and this entire statement,
|
| 980 |
+
including the above license grant, this restriction and the following
|
| 981 |
+
disclaimer, must be included in all copies of the Software, in whole
|
| 982 |
+
or in part, and all derivative works of the Software, unless such
|
| 983 |
+
copies or derivative works are solely in the form of machine-executable
|
| 984 |
+
object code generated by a source language processor.
|
| 985 |
+
|
| 986 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 987 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 988 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
|
| 989 |
+
NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
|
| 990 |
+
ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
|
| 991 |
+
OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
|
| 992 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
| 993 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
| 994 |
+
|
| 995 |
+
4. Licensee's use of the LLVM third party component is
|
| 996 |
+
subject to the following terms and conditions:
|
| 997 |
+
|
| 998 |
+
======================================================
|
| 999 |
+
LLVM Release License
|
| 1000 |
+
======================================================
|
| 1001 |
+
University of Illinois/NCSA
|
| 1002 |
+
Open Source License
|
| 1003 |
+
|
| 1004 |
+
Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign.
|
| 1005 |
+
All rights reserved.
|
| 1006 |
+
|
| 1007 |
+
Developed by:
|
| 1008 |
+
|
| 1009 |
+
LLVM Team
|
| 1010 |
+
|
| 1011 |
+
University of Illinois at Urbana-Champaign
|
| 1012 |
+
|
| 1013 |
+
http://llvm.org
|
| 1014 |
+
|
| 1015 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 1016 |
+
of this software and associated documentation files (the "Software"), to
|
| 1017 |
+
deal with the Software without restriction, including without limitation the
|
| 1018 |
+
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
| 1019 |
+
sell copies of the Software, and to permit persons to whom the Software is
|
| 1020 |
+
furnished to do so, subject to the following conditions:
|
| 1021 |
+
|
| 1022 |
+
* Redistributions of source code must retain the above copyright notice,
|
| 1023 |
+
this list of conditions and the following disclaimers.
|
| 1024 |
+
|
| 1025 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 1026 |
+
notice, this list of conditions and the following disclaimers in the
|
| 1027 |
+
documentation and/or other materials provided with the distribution.
|
| 1028 |
+
|
| 1029 |
+
* Neither the names of the LLVM Team, University of Illinois at Urbana-
|
| 1030 |
+
Champaign, nor the names of its contributors may be used to endorse or
|
| 1031 |
+
promote products derived from this Software without specific prior
|
| 1032 |
+
written permission.
|
| 1033 |
+
|
| 1034 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 1035 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 1036 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
| 1037 |
+
THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
| 1038 |
+
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
| 1039 |
+
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
| 1040 |
+
DEALINGS WITH THE SOFTWARE.
|
| 1041 |
+
|
| 1042 |
+
5. Licensee's use (e.g. nvprof) of the PCRE third party
|
| 1043 |
+
component is subject to the following terms and
|
| 1044 |
+
conditions:
|
| 1045 |
+
|
| 1046 |
+
------------
|
| 1047 |
+
PCRE LICENCE
|
| 1048 |
+
------------
|
| 1049 |
+
PCRE is a library of functions to support regular expressions whose syntax
|
| 1050 |
+
and semantics are as close as possible to those of the Perl 5 language.
|
| 1051 |
+
Release 8 of PCRE is distributed under the terms of the "BSD" licence, as
|
| 1052 |
+
specified below. The documentation for PCRE, supplied in the "doc"
|
| 1053 |
+
directory, is distributed under the same terms as the software itself. The
|
| 1054 |
+
basic library functions are written in C and are freestanding. Also
|
| 1055 |
+
included in the distribution is a set of C++ wrapper functions, and a just-
|
| 1056 |
+
in-time compiler that can be used to optimize pattern matching. These are
|
| 1057 |
+
both optional features that can be omitted when the library is built.
|
| 1058 |
+
|
| 1059 |
+
THE BASIC LIBRARY FUNCTIONS
|
| 1060 |
+
---------------------------
|
| 1061 |
+
Written by: Philip Hazel
|
| 1062 |
+
Email local part: ph10
|
| 1063 |
+
Email domain: cam.ac.uk
|
| 1064 |
+
University of Cambridge Computing Service,
|
| 1065 |
+
Cambridge, England.
|
| 1066 |
+
Copyright (c) 1997-2012 University of Cambridge
|
| 1067 |
+
All rights reserved.
|
| 1068 |
+
|
| 1069 |
+
PCRE JUST-IN-TIME COMPILATION SUPPORT
|
| 1070 |
+
-------------------------------------
|
| 1071 |
+
Written by: Zoltan Herczeg
|
| 1072 |
+
Email local part: hzmester
|
| 1073 |
+
Emain domain: freemail.hu
|
| 1074 |
+
Copyright(c) 2010-2012 Zoltan Herczeg
|
| 1075 |
+
All rights reserved.
|
| 1076 |
+
|
| 1077 |
+
STACK-LESS JUST-IN-TIME COMPILER
|
| 1078 |
+
--------------------------------
|
| 1079 |
+
Written by: Zoltan Herczeg
|
| 1080 |
+
Email local part: hzmester
|
| 1081 |
+
Emain domain: freemail.hu
|
| 1082 |
+
Copyright(c) 2009-2012 Zoltan Herczeg
|
| 1083 |
+
All rights reserved.
|
| 1084 |
+
|
| 1085 |
+
THE C++ WRAPPER FUNCTIONS
|
| 1086 |
+
-------------------------
|
| 1087 |
+
Contributed by: Google Inc.
|
| 1088 |
+
Copyright (c) 2007-2012, Google Inc.
|
| 1089 |
+
All rights reserved.
|
| 1090 |
+
|
| 1091 |
+
THE "BSD" LICENCE
|
| 1092 |
+
-----------------
|
| 1093 |
+
Redistribution and use in source and binary forms, with or without
|
| 1094 |
+
modification, are permitted provided that the following conditions are met:
|
| 1095 |
+
|
| 1096 |
+
* Redistributions of source code must retain the above copyright notice,
|
| 1097 |
+
this list of conditions and the following disclaimer.
|
| 1098 |
+
|
| 1099 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 1100 |
+
notice, this list of conditions and the following disclaimer in the
|
| 1101 |
+
documentation and/or other materials provided with the distribution.
|
| 1102 |
+
|
| 1103 |
+
* Neither the name of the University of Cambridge nor the name of Google
|
| 1104 |
+
Inc. nor the names of their contributors may be used to endorse or
|
| 1105 |
+
promote products derived from this software without specific prior
|
| 1106 |
+
written permission.
|
| 1107 |
+
|
| 1108 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
| 1109 |
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
| 1110 |
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
| 1111 |
+
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
| 1112 |
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
| 1113 |
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
| 1114 |
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
| 1115 |
+
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
| 1116 |
+
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
| 1117 |
+
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 1118 |
+
POSSIBILITY OF SUCH DAMAGE.
|
| 1119 |
+
|
| 1120 |
+
6. Some of the cuBLAS library routines were written by or
|
| 1121 |
+
derived from code written by Vasily Volkov and are subject
|
| 1122 |
+
to the Modified Berkeley Software Distribution License as
|
| 1123 |
+
follows:
|
| 1124 |
+
|
| 1125 |
+
Copyright (c) 2007-2009, Regents of the University of California
|
| 1126 |
+
|
| 1127 |
+
All rights reserved.
|
| 1128 |
+
|
| 1129 |
+
Redistribution and use in source and binary forms, with or without
|
| 1130 |
+
modification, are permitted provided that the following conditions are
|
| 1131 |
+
met:
|
| 1132 |
+
* Redistributions of source code must retain the above copyright
|
| 1133 |
+
notice, this list of conditions and the following disclaimer.
|
| 1134 |
+
* Redistributions in binary form must reproduce the above
|
| 1135 |
+
copyright notice, this list of conditions and the following
|
| 1136 |
+
disclaimer in the documentation and/or other materials provided
|
| 1137 |
+
with the distribution.
|
| 1138 |
+
* Neither the name of the University of California, Berkeley nor
|
| 1139 |
+
the names of its contributors may be used to endorse or promote
|
| 1140 |
+
products derived from this software without specific prior
|
| 1141 |
+
written permission.
|
| 1142 |
+
|
| 1143 |
+
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
|
| 1144 |
+
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
| 1145 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 1146 |
+
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
| 1147 |
+
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
| 1148 |
+
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 1149 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
| 1150 |
+
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
| 1151 |
+
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
| 1152 |
+
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 1153 |
+
POSSIBILITY OF SUCH DAMAGE.
|
| 1154 |
+
|
| 1155 |
+
7. Some of the cuBLAS library routines were written by or
|
| 1156 |
+
derived from code written by Davide Barbieri and are
|
| 1157 |
+
subject to the Modified Berkeley Software Distribution
|
| 1158 |
+
License as follows:
|
| 1159 |
+
|
| 1160 |
+
Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata.
|
| 1161 |
+
|
| 1162 |
+
All rights reserved.
|
| 1163 |
+
|
| 1164 |
+
Redistribution and use in source and binary forms, with or without
|
| 1165 |
+
modification, are permitted provided that the following conditions are
|
| 1166 |
+
met:
|
| 1167 |
+
* Redistributions of source code must retain the above copyright
|
| 1168 |
+
notice, this list of conditions and the following disclaimer.
|
| 1169 |
+
* Redistributions in binary form must reproduce the above
|
| 1170 |
+
copyright notice, this list of conditions and the following
|
| 1171 |
+
disclaimer in the documentation and/or other materials provided
|
| 1172 |
+
with the distribution.
|
| 1173 |
+
* The name of the author may not be used to endorse or promote
|
| 1174 |
+
products derived from this software without specific prior
|
| 1175 |
+
written permission.
|
| 1176 |
+
|
| 1177 |
+
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
|
| 1178 |
+
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
| 1179 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 1180 |
+
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
| 1181 |
+
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
| 1182 |
+
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 1183 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
| 1184 |
+
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
| 1185 |
+
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
| 1186 |
+
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
| 1187 |
+
POSSIBILITY OF SUCH DAMAGE.
|
| 1188 |
+
|
| 1189 |
+
8. Some of the cuBLAS library routines were derived from
|
| 1190 |
+
code developed by the University of Tennessee and are
|
| 1191 |
+
subject to the Modified Berkeley Software Distribution
|
| 1192 |
+
License as follows:
|
| 1193 |
+
|
| 1194 |
+
Copyright (c) 2010 The University of Tennessee.
|
| 1195 |
+
|
| 1196 |
+
All rights reserved.
|
| 1197 |
+
|
| 1198 |
+
Redistribution and use in source and binary forms, with or without
|
| 1199 |
+
modification, are permitted provided that the following conditions are
|
| 1200 |
+
met:
|
| 1201 |
+
* Redistributions of source code must retain the above copyright
|
| 1202 |
+
notice, this list of conditions and the following disclaimer.
|
| 1203 |
+
* Redistributions in binary form must reproduce the above
|
| 1204 |
+
copyright notice, this list of conditions and the following
|
| 1205 |
+
disclaimer listed in this license in the documentation and/or
|
| 1206 |
+
other materials provided with the distribution.
|
| 1207 |
+
* Neither the name of the copyright holders nor the names of its
|
| 1208 |
+
contributors may be used to endorse or promote products derived
|
| 1209 |
+
from this software without specific prior written permission.
|
| 1210 |
+
|
| 1211 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1212 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1213 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1214 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1215 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1216 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1217 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1218 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1219 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1220 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1221 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1222 |
+
|
| 1223 |
+
9. Some of the cuBLAS library routines were written by or
|
| 1224 |
+
derived from code written by Jonathan Hogg and are subject
|
| 1225 |
+
to the Modified Berkeley Software Distribution License as
|
| 1226 |
+
follows:
|
| 1227 |
+
|
| 1228 |
+
Copyright (c) 2012, The Science and Technology Facilities Council (STFC).
|
| 1229 |
+
|
| 1230 |
+
All rights reserved.
|
| 1231 |
+
|
| 1232 |
+
Redistribution and use in source and binary forms, with or without
|
| 1233 |
+
modification, are permitted provided that the following conditions are
|
| 1234 |
+
met:
|
| 1235 |
+
* Redistributions of source code must retain the above copyright
|
| 1236 |
+
notice, this list of conditions and the following disclaimer.
|
| 1237 |
+
* Redistributions in binary form must reproduce the above
|
| 1238 |
+
copyright notice, this list of conditions and the following
|
| 1239 |
+
disclaimer in the documentation and/or other materials provided
|
| 1240 |
+
with the distribution.
|
| 1241 |
+
* Neither the name of the STFC nor the names of its contributors
|
| 1242 |
+
may be used to endorse or promote products derived from this
|
| 1243 |
+
software without specific prior written permission.
|
| 1244 |
+
|
| 1245 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1246 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1247 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1248 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE
|
| 1249 |
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
| 1250 |
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
| 1251 |
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
| 1252 |
+
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
| 1253 |
+
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
| 1254 |
+
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
|
| 1255 |
+
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1256 |
+
|
| 1257 |
+
10. Some of the cuBLAS library routines were written by or
|
| 1258 |
+
derived from code written by Ahmad M. Abdelfattah, David
|
| 1259 |
+
Keyes, and Hatem Ltaief, and are subject to the Apache
|
| 1260 |
+
License, Version 2.0, as follows:
|
| 1261 |
+
|
| 1262 |
+
-- (C) Copyright 2013 King Abdullah University of Science and Technology
|
| 1263 |
+
Authors:
|
| 1264 |
+
Ahmad Abdelfattah (ahmad.ahmad@kaust.edu.sa)
|
| 1265 |
+
David Keyes (david.keyes@kaust.edu.sa)
|
| 1266 |
+
Hatem Ltaief (hatem.ltaief@kaust.edu.sa)
|
| 1267 |
+
|
| 1268 |
+
Redistribution and use in source and binary forms, with or without
|
| 1269 |
+
modification, are permitted provided that the following conditions
|
| 1270 |
+
are met:
|
| 1271 |
+
|
| 1272 |
+
* Redistributions of source code must retain the above copyright
|
| 1273 |
+
notice, this list of conditions and the following disclaimer.
|
| 1274 |
+
* Redistributions in binary form must reproduce the above copyright
|
| 1275 |
+
notice, this list of conditions and the following disclaimer in the
|
| 1276 |
+
documentation and/or other materials provided with the distribution.
|
| 1277 |
+
* Neither the name of the King Abdullah University of Science and
|
| 1278 |
+
Technology nor the names of its contributors may be used to endorse
|
| 1279 |
+
or promote products derived from this software without specific prior
|
| 1280 |
+
written permission.
|
| 1281 |
+
|
| 1282 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1283 |
+
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1284 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1285 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1286 |
+
HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1287 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1288 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1289 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1290 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1291 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1292 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
|
| 1293 |
+
|
| 1294 |
+
11. Some of the cuSPARSE library routines were written by or
|
| 1295 |
+
derived from code written by Li-Wen Chang and are subject
|
| 1296 |
+
to the NCSA Open Source License as follows:
|
| 1297 |
+
|
| 1298 |
+
Copyright (c) 2012, University of Illinois.
|
| 1299 |
+
|
| 1300 |
+
All rights reserved.
|
| 1301 |
+
|
| 1302 |
+
Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu
|
| 1303 |
+
|
| 1304 |
+
Permission is hereby granted, free of charge, to any person obtaining
|
| 1305 |
+
a copy of this software and associated documentation files (the
|
| 1306 |
+
"Software"), to deal with the Software without restriction, including
|
| 1307 |
+
without limitation the rights to use, copy, modify, merge, publish,
|
| 1308 |
+
distribute, sublicense, and/or sell copies of the Software, and to
|
| 1309 |
+
permit persons to whom the Software is furnished to do so, subject to
|
| 1310 |
+
the following conditions:
|
| 1311 |
+
* Redistributions of source code must retain the above copyright
|
| 1312 |
+
notice, this list of conditions and the following disclaimer.
|
| 1313 |
+
* Redistributions in binary form must reproduce the above
|
| 1314 |
+
copyright notice, this list of conditions and the following
|
| 1315 |
+
disclaimers in the documentation and/or other materials provided
|
| 1316 |
+
with the distribution.
|
| 1317 |
+
* Neither the names of IMPACT Group, University of Illinois, nor
|
| 1318 |
+
the names of its contributors may be used to endorse or promote
|
| 1319 |
+
products derived from this Software without specific prior
|
| 1320 |
+
written permission.
|
| 1321 |
+
|
| 1322 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 1323 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 1324 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
| 1325 |
+
NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT
|
| 1326 |
+
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
| 1327 |
+
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
| 1328 |
+
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE
|
| 1329 |
+
SOFTWARE.
|
| 1330 |
+
|
| 1331 |
+
12. Some of the cuRAND library routines were written by or
|
| 1332 |
+
derived from code written by Mutsuo Saito and Makoto
|
| 1333 |
+
Matsumoto and are subject to the following license:
|
| 1334 |
+
|
| 1335 |
+
Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima
|
| 1336 |
+
University. All rights reserved.
|
| 1337 |
+
|
| 1338 |
+
Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima
|
| 1339 |
+
University and University of Tokyo. All rights reserved.
|
| 1340 |
+
|
| 1341 |
+
Redistribution and use in source and binary forms, with or without
|
| 1342 |
+
modification, are permitted provided that the following conditions are
|
| 1343 |
+
met:
|
| 1344 |
+
* Redistributions of source code must retain the above copyright
|
| 1345 |
+
notice, this list of conditions and the following disclaimer.
|
| 1346 |
+
* Redistributions in binary form must reproduce the above
|
| 1347 |
+
copyright notice, this list of conditions and the following
|
| 1348 |
+
disclaimer in the documentation and/or other materials provided
|
| 1349 |
+
with the distribution.
|
| 1350 |
+
* Neither the name of the Hiroshima University nor the names of
|
| 1351 |
+
its contributors may be used to endorse or promote products
|
| 1352 |
+
derived from this software without specific prior written
|
| 1353 |
+
permission.
|
| 1354 |
+
|
| 1355 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1356 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1357 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1358 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1359 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1360 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1361 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1362 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1363 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1364 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1365 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1366 |
+
|
| 1367 |
+
13. Some of the cuRAND library routines were derived from
|
| 1368 |
+
code developed by D. E. Shaw Research and are subject to
|
| 1369 |
+
the following license:
|
| 1370 |
+
|
| 1371 |
+
Copyright 2010-2011, D. E. Shaw Research.
|
| 1372 |
+
|
| 1373 |
+
All rights reserved.
|
| 1374 |
+
|
| 1375 |
+
Redistribution and use in source and binary forms, with or without
|
| 1376 |
+
modification, are permitted provided that the following conditions are
|
| 1377 |
+
met:
|
| 1378 |
+
* Redistributions of source code must retain the above copyright
|
| 1379 |
+
notice, this list of conditions, and the following disclaimer.
|
| 1380 |
+
* Redistributions in binary form must reproduce the above
|
| 1381 |
+
copyright notice, this list of conditions, and the following
|
| 1382 |
+
disclaimer in the documentation and/or other materials provided
|
| 1383 |
+
with the distribution.
|
| 1384 |
+
* Neither the name of D. E. Shaw Research nor the names of its
|
| 1385 |
+
contributors may be used to endorse or promote products derived
|
| 1386 |
+
from this software without specific prior written permission.
|
| 1387 |
+
|
| 1388 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1389 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1390 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1391 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1392 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1393 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1394 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1395 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1396 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1397 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1398 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1399 |
+
|
| 1400 |
+
14. Some of the Math library routines were written by or
|
| 1401 |
+
derived from code developed by Norbert Juffa and are
|
| 1402 |
+
subject to the following license:
|
| 1403 |
+
|
| 1404 |
+
Copyright (c) 2015-2017, Norbert Juffa
|
| 1405 |
+
All rights reserved.
|
| 1406 |
+
|
| 1407 |
+
Redistribution and use in source and binary forms, with or without
|
| 1408 |
+
modification, are permitted provided that the following conditions
|
| 1409 |
+
are met:
|
| 1410 |
+
|
| 1411 |
+
1. Redistributions of source code must retain the above copyright
|
| 1412 |
+
notice, this list of conditions and the following disclaimer.
|
| 1413 |
+
|
| 1414 |
+
2. Redistributions in binary form must reproduce the above copyright
|
| 1415 |
+
notice, this list of conditions and the following disclaimer in the
|
| 1416 |
+
documentation and/or other materials provided with the distribution.
|
| 1417 |
+
|
| 1418 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1419 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1420 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1421 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1422 |
+
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1423 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1424 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1425 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1426 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1427 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1428 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1429 |
+
|
| 1430 |
+
15. Licensee's use of the lz4 third party component is
|
| 1431 |
+
subject to the following terms and conditions:
|
| 1432 |
+
|
| 1433 |
+
Copyright (C) 2011-2013, Yann Collet.
|
| 1434 |
+
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
|
| 1435 |
+
|
| 1436 |
+
Redistribution and use in source and binary forms, with or without
|
| 1437 |
+
modification, are permitted provided that the following conditions are
|
| 1438 |
+
met:
|
| 1439 |
+
|
| 1440 |
+
* Redistributions of source code must retain the above copyright
|
| 1441 |
+
notice, this list of conditions and the following disclaimer.
|
| 1442 |
+
* Redistributions in binary form must reproduce the above
|
| 1443 |
+
copyright notice, this list of conditions and the following disclaimer
|
| 1444 |
+
in the documentation and/or other materials provided with the
|
| 1445 |
+
distribution.
|
| 1446 |
+
|
| 1447 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 1448 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 1449 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 1450 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 1451 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 1452 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 1453 |
+
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 1454 |
+
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 1455 |
+
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 1456 |
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 1457 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 1458 |
+
|
| 1459 |
+
16. The NPP library uses code from the Boost Math Toolkit,
|
| 1460 |
+
and is subject to the following license:
|
| 1461 |
+
|
| 1462 |
+
Boost Software License - Version 1.0 - August 17th, 2003
|
| 1463 |
+
. . . .
|
| 1464 |
+
|
| 1465 |
+
Permission is hereby granted, free of charge, to any person or
|
| 1466 |
+
organization obtaining a copy of the software and accompanying
|
| 1467 |
+
documentation covered by this license (the "Software") to use,
|
| 1468 |
+
reproduce, display, distribute, execute, and transmit the Software,
|
| 1469 |
+
and to prepare derivative works of the Software, and to permit
|
| 1470 |
+
third-parties to whom the Software is furnished to do so, all
|
| 1471 |
+
subject to the following:
|
| 1472 |
+
|
| 1473 |
+
The copyright notices in the Software and this entire statement,
|
| 1474 |
+
including the above license grant, this restriction and the following
|
| 1475 |
+
disclaimer, must be included in all copies of the Software, in whole
|
| 1476 |
+
or in part, and all derivative works of the Software, unless such
|
| 1477 |
+
copies or derivative works are solely in the form of machine-executable
|
| 1478 |
+
object code generated by a source language processor.
|
| 1479 |
+
|
| 1480 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 1481 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 1482 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
|
| 1483 |
+
NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
|
| 1484 |
+
ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
|
| 1485 |
+
OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
|
| 1486 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
| 1487 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
| 1488 |
+
|
| 1489 |
+
17. Portions of the Nsight Eclipse Edition is subject to the
|
| 1490 |
+
following license:
|
| 1491 |
+
|
| 1492 |
+
The Eclipse Foundation makes available all content in this plug-in
|
| 1493 |
+
("Content"). Unless otherwise indicated below, the Content is provided
|
| 1494 |
+
to you under the terms and conditions of the Eclipse Public License
|
| 1495 |
+
Version 1.0 ("EPL"). A copy of the EPL is available at http://
|
| 1496 |
+
www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program"
|
| 1497 |
+
will mean the Content.
|
| 1498 |
+
|
| 1499 |
+
If you did not receive this Content directly from the Eclipse
|
| 1500 |
+
Foundation, the Content is being redistributed by another party
|
| 1501 |
+
("Redistributor") and different terms and conditions may apply to your
|
| 1502 |
+
use of any object code in the Content. Check the Redistributor's
|
| 1503 |
+
license that was provided with the Content. If no such license exists,
|
| 1504 |
+
contact the Redistributor. Unless otherwise indicated below, the terms
|
| 1505 |
+
and conditions of the EPL still apply to any source code in the
|
| 1506 |
+
Content and such source code may be obtained at http://www.eclipse.org.
|
| 1507 |
+
|
| 1508 |
+
18. Some of the cuBLAS library routines uses code from
|
| 1509 |
+
OpenAI, which is subject to the following license:
|
| 1510 |
+
|
| 1511 |
+
License URL
|
| 1512 |
+
https://github.com/openai/openai-gemm/blob/master/LICENSE
|
| 1513 |
+
|
| 1514 |
+
License Text
|
| 1515 |
+
The MIT License
|
| 1516 |
+
|
| 1517 |
+
Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc.
|
| 1518 |
+
|
| 1519 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 1520 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 1521 |
+
in the Software without restriction, including without limitation the rights
|
| 1522 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 1523 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 1524 |
+
furnished to do so, subject to the following conditions:
|
| 1525 |
+
|
| 1526 |
+
The above copyright notice and this permission notice shall be included in
|
| 1527 |
+
all copies or substantial portions of the Software.
|
| 1528 |
+
|
| 1529 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 1530 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 1531 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 1532 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 1533 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 1534 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
| 1535 |
+
THE SOFTWARE.
|
| 1536 |
+
|
| 1537 |
+
19. Licensee's use of the Visual Studio Setup Configuration
|
| 1538 |
+
Samples is subject to the following license:
|
| 1539 |
+
|
| 1540 |
+
The MIT License (MIT)
|
| 1541 |
+
Copyright (C) Microsoft Corporation. All rights reserved.
|
| 1542 |
+
|
| 1543 |
+
Permission is hereby granted, free of charge, to any person
|
| 1544 |
+
obtaining a copy of this software and associated documentation
|
| 1545 |
+
files (the "Software"), to deal in the Software without restriction,
|
| 1546 |
+
including without limitation the rights to use, copy, modify, merge,
|
| 1547 |
+
publish, distribute, sublicense, and/or sell copies of the Software,
|
| 1548 |
+
and to permit persons to whom the Software is furnished to do so,
|
| 1549 |
+
subject to the following conditions:
|
| 1550 |
+
|
| 1551 |
+
The above copyright notice and this permission notice shall be included
|
| 1552 |
+
in all copies or substantial portions of the Software.
|
| 1553 |
+
|
| 1554 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
| 1555 |
+
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 1556 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 1557 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 1558 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 1559 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
| 1560 |
+
|
| 1561 |
+
20. Licensee's use of linmath.h header for CPU functions for
|
| 1562 |
+
GL vector/matrix operations from lunarG is subject to the
|
| 1563 |
+
Apache License Version 2.0.
|
| 1564 |
+
|
| 1565 |
+
21. The DX12-CUDA sample uses the d3dx12.h header, which is
|
| 1566 |
+
subject to the MIT license .
|
| 1567 |
+
|
| 1568 |
+
-----------------
|
evalkit_tf437/lib/python3.10/site-packages/nvidia_nvjitlink_cu12-12.6.77.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (74.1.2)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-manylinux2014_x86_64
|
| 5 |
+
|
evalkit_tf437/lib/python3.10/site-packages/nvidia_nvjitlink_cu12-12.6.77.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
nvidia
|
evalkit_tf437/lib/python3.10/site-packages/oauthlib-3.2.2.dist-info/METADATA
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: oauthlib
|
| 3 |
+
Version: 3.2.2
|
| 4 |
+
Summary: A generic, spec-compliant, thorough implementation of the OAuth request-signing logic
|
| 5 |
+
Home-page: https://github.com/oauthlib/oauthlib
|
| 6 |
+
Author: The OAuthlib Community
|
| 7 |
+
Author-email: idan@gazit.me
|
| 8 |
+
Maintainer: Ib Lundgren
|
| 9 |
+
Maintainer-email: ib.lundgren@gmail.com
|
| 10 |
+
License: BSD
|
| 11 |
+
Platform: any
|
| 12 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 13 |
+
Classifier: Environment :: Web Environment
|
| 14 |
+
Classifier: Intended Audience :: Developers
|
| 15 |
+
Classifier: License :: OSI Approved
|
| 16 |
+
Classifier: License :: OSI Approved :: BSD License
|
| 17 |
+
Classifier: Operating System :: MacOS
|
| 18 |
+
Classifier: Operating System :: POSIX
|
| 19 |
+
Classifier: Operating System :: POSIX :: Linux
|
| 20 |
+
Classifier: Programming Language :: Python
|
| 21 |
+
Classifier: Programming Language :: Python :: 3
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.6
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.7
|
| 24 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 25 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 26 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 27 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 28 |
+
Classifier: Programming Language :: Python :: Implementation
|
| 29 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 30 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 31 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 32 |
+
Requires-Python: >=3.6
|
| 33 |
+
Description-Content-Type: text/x-rst
|
| 34 |
+
License-File: LICENSE
|
| 35 |
+
Provides-Extra: rsa
|
| 36 |
+
Requires-Dist: cryptography (>=3.0.0) ; extra == 'rsa'
|
| 37 |
+
Provides-Extra: signals
|
| 38 |
+
Requires-Dist: blinker (>=1.4.0) ; extra == 'signals'
|
| 39 |
+
Provides-Extra: signedtoken
|
| 40 |
+
Requires-Dist: cryptography (>=3.0.0) ; extra == 'signedtoken'
|
| 41 |
+
Requires-Dist: pyjwt (<3,>=2.0.0) ; extra == 'signedtoken'
|
| 42 |
+
|
| 43 |
+
OAuthLib - Python Framework for OAuth1 & OAuth2
|
| 44 |
+
===============================================
|
| 45 |
+
|
| 46 |
+
*A generic, spec-compliant, thorough implementation of the OAuth request-signing
|
| 47 |
+
logic for Python 3.6+.*
|
| 48 |
+
|
| 49 |
+
.. image:: https://app.travis-ci.com/oauthlib/oauthlib.svg?branch=master
|
| 50 |
+
:target: https://app.travis-ci.com/oauthlib/oauthlib
|
| 51 |
+
:alt: Travis
|
| 52 |
+
.. image:: https://coveralls.io/repos/oauthlib/oauthlib/badge.svg?branch=master
|
| 53 |
+
:target: https://coveralls.io/r/oauthlib/oauthlib
|
| 54 |
+
:alt: Coveralls
|
| 55 |
+
.. image:: https://img.shields.io/pypi/pyversions/oauthlib.svg
|
| 56 |
+
:target: https://pypi.org/project/oauthlib/
|
| 57 |
+
:alt: Download from PyPI
|
| 58 |
+
.. image:: https://img.shields.io/pypi/l/oauthlib.svg
|
| 59 |
+
:target: https://pypi.org/project/oauthlib/
|
| 60 |
+
:alt: License
|
| 61 |
+
.. image:: https://app.fossa.io/api/projects/git%2Bgithub.com%2Foauthlib%2Foauthlib.svg?type=shield
|
| 62 |
+
:target: https://app.fossa.io/projects/git%2Bgithub.com%2Foauthlib%2Foauthlib?ref=badge_shield
|
| 63 |
+
:alt: FOSSA Status
|
| 64 |
+
.. image:: https://img.shields.io/readthedocs/oauthlib.svg
|
| 65 |
+
:target: https://oauthlib.readthedocs.io/en/latest/index.html
|
| 66 |
+
:alt: Read the Docs
|
| 67 |
+
.. image:: https://badges.gitter.im/oauthlib/oauthlib.svg
|
| 68 |
+
:target: https://gitter.im/oauthlib/Lobby
|
| 69 |
+
:alt: Chat on Gitter
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
.. image:: https://raw.githubusercontent.com/oauthlib/oauthlib/8d71b161fd145d11c40d55c9ab66ac134a303253/docs/logo/oauthlib-banner-700x192.png
|
| 73 |
+
:target: https://github.com/oauthlib/oauthlib/
|
| 74 |
+
:alt: OAuth + Python = OAuthlib Python Framework
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
OAuth often seems complicated and difficult-to-implement. There are several
|
| 78 |
+
prominent libraries for handling OAuth requests, but they all suffer from one or
|
| 79 |
+
both of the following:
|
| 80 |
+
|
| 81 |
+
1. They predate the `OAuth 1.0 spec`_, AKA RFC 5849.
|
| 82 |
+
2. They predate the `OAuth 2.0 spec`_, AKA RFC 6749.
|
| 83 |
+
3. They assume the usage of a specific HTTP request library.
|
| 84 |
+
|
| 85 |
+
.. _`OAuth 1.0 spec`: https://tools.ietf.org/html/rfc5849
|
| 86 |
+
.. _`OAuth 2.0 spec`: https://tools.ietf.org/html/rfc6749
|
| 87 |
+
|
| 88 |
+
OAuthLib is a framework which implements the logic of OAuth1 or OAuth2 without
|
| 89 |
+
assuming a specific HTTP request object or web framework. Use it to graft OAuth
|
| 90 |
+
client support onto your favorite HTTP library, or provide support onto your
|
| 91 |
+
favourite web framework. If you're a maintainer of such a library, write a thin
|
| 92 |
+
veneer on top of OAuthLib and get OAuth support for very little effort.
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
Documentation
|
| 96 |
+
--------------
|
| 97 |
+
|
| 98 |
+
Full documentation is available on `Read the Docs`_. All contributions are very
|
| 99 |
+
welcome! The documentation is still quite sparse, please open an issue for what
|
| 100 |
+
you'd like to know, or discuss it in our `Gitter community`_, or even better, send a
|
| 101 |
+
pull request!
|
| 102 |
+
|
| 103 |
+
.. _`Gitter community`: https://gitter.im/oauthlib/Lobby
|
| 104 |
+
.. _`Read the Docs`: https://oauthlib.readthedocs.io/en/latest/index.html
|
| 105 |
+
|
| 106 |
+
Interested in making OAuth requests?
|
| 107 |
+
------------------------------------
|
| 108 |
+
|
| 109 |
+
Then you might be more interested in using `requests`_ which has OAuthLib
|
| 110 |
+
powered OAuth support provided by the `requests-oauthlib`_ library.
|
| 111 |
+
|
| 112 |
+
.. _`requests`: https://github.com/requests/requests
|
| 113 |
+
.. _`requests-oauthlib`: https://github.com/requests/requests-oauthlib
|
| 114 |
+
|
| 115 |
+
Which web frameworks are supported?
|
| 116 |
+
-----------------------------------
|
| 117 |
+
|
| 118 |
+
The following packages provide OAuth support using OAuthLib.
|
| 119 |
+
|
| 120 |
+
- For Django there is `django-oauth-toolkit`_, which includes `Django REST framework`_ support.
|
| 121 |
+
- For Flask there is `flask-oauthlib`_ and `Flask-Dance`_.
|
| 122 |
+
- For Pyramid there is `pyramid-oauthlib`_.
|
| 123 |
+
- For Bottle there is `bottle-oauthlib`_.
|
| 124 |
+
|
| 125 |
+
If you have written an OAuthLib package that supports your favorite framework,
|
| 126 |
+
please open a Pull Request, updating the documentation.
|
| 127 |
+
|
| 128 |
+
.. _`django-oauth-toolkit`: https://github.com/evonove/django-oauth-toolkit
|
| 129 |
+
.. _`flask-oauthlib`: https://github.com/lepture/flask-oauthlib
|
| 130 |
+
.. _`Django REST framework`: http://django-rest-framework.org
|
| 131 |
+
.. _`Flask-Dance`: https://github.com/singingwolfboy/flask-dance
|
| 132 |
+
.. _`pyramid-oauthlib`: https://github.com/tilgovi/pyramid-oauthlib
|
| 133 |
+
.. _`bottle-oauthlib`: https://github.com/thomsonreuters/bottle-oauthlib
|
| 134 |
+
|
| 135 |
+
Using OAuthLib? Please get in touch!
|
| 136 |
+
------------------------------------
|
| 137 |
+
Patching OAuth support onto an http request framework? Creating an OAuth
|
| 138 |
+
provider extension for a web framework? Simply using OAuthLib to Get Things Done
|
| 139 |
+
or to learn?
|
| 140 |
+
|
| 141 |
+
No matter which we'd love to hear from you in our `Gitter community`_ or if you have
|
| 142 |
+
anything in particular you would like to have, change or comment on don't
|
| 143 |
+
hesitate for a second to send a pull request or open an issue. We might be quite
|
| 144 |
+
busy and therefore slow to reply but we love feedback!
|
| 145 |
+
|
| 146 |
+
Chances are you have run into something annoying that you wish there was
|
| 147 |
+
documentation for, if you wish to gain eternal fame and glory, and a drink if we
|
| 148 |
+
have the pleasure to run into each other, please send a docs pull request =)
|
| 149 |
+
|
| 150 |
+
.. _`Gitter community`: https://gitter.im/oauthlib/Lobby
|
| 151 |
+
|
| 152 |
+
License
|
| 153 |
+
-------
|
| 154 |
+
|
| 155 |
+
OAuthLib is yours to use and abuse according to the terms of the BSD license.
|
| 156 |
+
Check the LICENSE file for full details.
|
| 157 |
+
|
| 158 |
+
Credits
|
| 159 |
+
-------
|
| 160 |
+
|
| 161 |
+
OAuthLib has been started and maintained several years by Idan Gazit and other
|
| 162 |
+
amazing `AUTHORS`_. Thanks to their wonderful work, the open-source `community`_
|
| 163 |
+
creation has been possible and the project can stay active and reactive to users
|
| 164 |
+
requests.
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
.. _`AUTHORS`: https://github.com/oauthlib/oauthlib/blob/master/AUTHORS
|
| 168 |
+
.. _`community`: https://github.com/oauthlib/
|
| 169 |
+
|
| 170 |
+
Changelog
|
| 171 |
+
---------
|
| 172 |
+
|
| 173 |
+
*OAuthLib is in active development, with the core of both OAuth1 and OAuth2
|
| 174 |
+
completed, for providers as well as clients.* See `supported features`_ for
|
| 175 |
+
details.
|
| 176 |
+
|
| 177 |
+
.. _`supported features`: https://oauthlib.readthedocs.io/en/latest/feature_matrix.html
|
| 178 |
+
|
| 179 |
+
For a full changelog see ``CHANGELOG.rst``.
|
evalkit_tf437/lib/python3.10/site-packages/oauthlib-3.2.2.dist-info/RECORD
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
oauthlib-3.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
oauthlib-3.2.2.dist-info/LICENSE,sha256=PR4S2KxSwLbBSK9tKR9yQAuHIO0WwKxKiYaLbRSxyTk,1530
|
| 3 |
+
oauthlib-3.2.2.dist-info/METADATA,sha256=zEM0Qha7hvi0hlwugHjMoKISoAJG8X-SeMDK3CECSG4,7454
|
| 4 |
+
oauthlib-3.2.2.dist-info/RECORD,,
|
| 5 |
+
oauthlib-3.2.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 6 |
+
oauthlib-3.2.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
| 7 |
+
oauthlib-3.2.2.dist-info/top_level.txt,sha256=gz2py0fFs1AhG1O7KpHPcIXOgXOwdIiCaSnmLkiR12Q,9
|
| 8 |
+
oauthlib/__init__.py,sha256=XeBZVT7i8Hx3nnEIb0MaNWrFQmxS3sc4SvGkOKhnht4,686
|
| 9 |
+
oauthlib/__pycache__/__init__.cpython-310.pyc,,
|
| 10 |
+
oauthlib/__pycache__/common.cpython-310.pyc,,
|
| 11 |
+
oauthlib/__pycache__/signals.cpython-310.pyc,,
|
| 12 |
+
oauthlib/__pycache__/uri_validate.cpython-310.pyc,,
|
| 13 |
+
oauthlib/common.py,sha256=VL5A-cXOkRbjMu7E79OxDymV7tLOt0Cm2y1xchEw_Ig,13377
|
| 14 |
+
oauthlib/oauth1/__init__.py,sha256=47hEQ7s_FZXLyUt6XVE-DPC8vUMVsJl7_-HCkNM2IlM,822
|
| 15 |
+
oauthlib/oauth1/__pycache__/__init__.cpython-310.pyc,,
|
| 16 |
+
oauthlib/oauth1/rfc5849/__init__.py,sha256=-5sJHDG3JRZQRJYlCjkj3CP2jZgqEg0OY5pVIxE4mxE,16744
|
| 17 |
+
oauthlib/oauth1/rfc5849/__pycache__/__init__.cpython-310.pyc,,
|
| 18 |
+
oauthlib/oauth1/rfc5849/__pycache__/errors.cpython-310.pyc,,
|
| 19 |
+
oauthlib/oauth1/rfc5849/__pycache__/parameters.cpython-310.pyc,,
|
| 20 |
+
oauthlib/oauth1/rfc5849/__pycache__/request_validator.cpython-310.pyc,,
|
| 21 |
+
oauthlib/oauth1/rfc5849/__pycache__/signature.cpython-310.pyc,,
|
| 22 |
+
oauthlib/oauth1/rfc5849/__pycache__/utils.cpython-310.pyc,,
|
| 23 |
+
oauthlib/oauth1/rfc5849/endpoints/__init__.py,sha256=SeIECziJ-Sv_NCGnowG3P9UnX_VdFNldRqRywEaJvxY,327
|
| 24 |
+
oauthlib/oauth1/rfc5849/endpoints/__pycache__/__init__.cpython-310.pyc,,
|
| 25 |
+
oauthlib/oauth1/rfc5849/endpoints/__pycache__/access_token.cpython-310.pyc,,
|
| 26 |
+
oauthlib/oauth1/rfc5849/endpoints/__pycache__/authorization.cpython-310.pyc,,
|
| 27 |
+
oauthlib/oauth1/rfc5849/endpoints/__pycache__/base.cpython-310.pyc,,
|
| 28 |
+
oauthlib/oauth1/rfc5849/endpoints/__pycache__/pre_configured.cpython-310.pyc,,
|
| 29 |
+
oauthlib/oauth1/rfc5849/endpoints/__pycache__/request_token.cpython-310.pyc,,
|
| 30 |
+
oauthlib/oauth1/rfc5849/endpoints/__pycache__/resource.cpython-310.pyc,,
|
| 31 |
+
oauthlib/oauth1/rfc5849/endpoints/__pycache__/signature_only.cpython-310.pyc,,
|
| 32 |
+
oauthlib/oauth1/rfc5849/endpoints/access_token.py,sha256=CRgLV5DqDiwVvbo8MiHySbTEKrxETJV29-VGu-2kQ7Y,9347
|
| 33 |
+
oauthlib/oauth1/rfc5849/endpoints/authorization.py,sha256=zbU7TzO6nB6853UIqtTkhxUV-JTHOdOc-CvdQsIQKWk,6724
|
| 34 |
+
oauthlib/oauth1/rfc5849/endpoints/base.py,sha256=W0IxgaFM7sNFpsis-mo3Ky59wbLl2PNlYjIeqUCvyak,11637
|
| 35 |
+
oauthlib/oauth1/rfc5849/endpoints/pre_configured.py,sha256=Ie5oBUq_JTsXQdvfWhcMRjhH3OOxS_mRHbKBQ9TpsGg,543
|
| 36 |
+
oauthlib/oauth1/rfc5849/endpoints/request_token.py,sha256=1eljiIUPkObutaNDD6J7Kx5Ens1bknqHIEnnEkQGF7k,9291
|
| 37 |
+
oauthlib/oauth1/rfc5849/endpoints/resource.py,sha256=F6f2AecZ1fTdrC7DOERrIFUp2YQ5MLq8-a6VbQLM2ds,7374
|
| 38 |
+
oauthlib/oauth1/rfc5849/endpoints/signature_only.py,sha256=MX5zV66v4-wrR4cu7OmOd_GF3L8ysM60HmEiHtRR0l8,3327
|
| 39 |
+
oauthlib/oauth1/rfc5849/errors.py,sha256=WPvKVjPlgkCYp6TXvcwC8VETkhsZBzphKCkTJKDPNfM,2474
|
| 40 |
+
oauthlib/oauth1/rfc5849/parameters.py,sha256=Abnxpix_Yy7P3A3vbkrV2bkFxtnR5TRTTKdOu9MKydo,4802
|
| 41 |
+
oauthlib/oauth1/rfc5849/request_validator.py,sha256=7Tt1uyt4LAWhKCMrQc9GR_EShZyckPigDXkxDNvxiBE,30987
|
| 42 |
+
oauthlib/oauth1/rfc5849/signature.py,sha256=WY0ArNQg-9HBU2Hl4cpfFfW0Y3WUsW7XRqgL4w5jLmE,32076
|
| 43 |
+
oauthlib/oauth1/rfc5849/utils.py,sha256=IapG_jM6iMe4e0DYWWds1jp-wce2Lf_cuhFrtCP_2ls,2613
|
| 44 |
+
oauthlib/oauth2/__init__.py,sha256=uPkdHF2NEpIM6Ybz-jPPEKU5e56eHptaOz2NPwppyys,1597
|
| 45 |
+
oauthlib/oauth2/__pycache__/__init__.cpython-310.pyc,,
|
| 46 |
+
oauthlib/oauth2/rfc6749/__init__.py,sha256=sJcxfdG6HTloXzhkG8-PTJTVQWoCeNtnw6ODNCJNw58,404
|
| 47 |
+
oauthlib/oauth2/rfc6749/__pycache__/__init__.cpython-310.pyc,,
|
| 48 |
+
oauthlib/oauth2/rfc6749/__pycache__/errors.cpython-310.pyc,,
|
| 49 |
+
oauthlib/oauth2/rfc6749/__pycache__/parameters.cpython-310.pyc,,
|
| 50 |
+
oauthlib/oauth2/rfc6749/__pycache__/request_validator.cpython-310.pyc,,
|
| 51 |
+
oauthlib/oauth2/rfc6749/__pycache__/tokens.cpython-310.pyc,,
|
| 52 |
+
oauthlib/oauth2/rfc6749/__pycache__/utils.cpython-310.pyc,,
|
| 53 |
+
oauthlib/oauth2/rfc6749/clients/__init__.py,sha256=TuYtiErfo0_Ej0816tIv5rBsrwA9BjYz3tu_ZM0X364,504
|
| 54 |
+
oauthlib/oauth2/rfc6749/clients/__pycache__/__init__.cpython-310.pyc,,
|
| 55 |
+
oauthlib/oauth2/rfc6749/clients/__pycache__/backend_application.cpython-310.pyc,,
|
| 56 |
+
oauthlib/oauth2/rfc6749/clients/__pycache__/base.cpython-310.pyc,,
|
| 57 |
+
oauthlib/oauth2/rfc6749/clients/__pycache__/legacy_application.cpython-310.pyc,,
|
| 58 |
+
oauthlib/oauth2/rfc6749/clients/__pycache__/mobile_application.cpython-310.pyc,,
|
| 59 |
+
oauthlib/oauth2/rfc6749/clients/__pycache__/service_application.cpython-310.pyc,,
|
| 60 |
+
oauthlib/oauth2/rfc6749/clients/__pycache__/web_application.cpython-310.pyc,,
|
| 61 |
+
oauthlib/oauth2/rfc6749/clients/backend_application.py,sha256=2kEw6T5Ii2TMSpvHlvi697_eMV9fXjkjqc8DY5sG310,3224
|
| 62 |
+
oauthlib/oauth2/rfc6749/clients/base.py,sha256=6ZW6ewf4SdS2BBGc-rZunfsGYalOqeucToNN8j8f7lc,26652
|
| 63 |
+
oauthlib/oauth2/rfc6749/clients/legacy_application.py,sha256=9V-PGgToIoQcvmG14g9WiQjsDgWs7OnvLpZfmiA2Z24,4032
|
| 64 |
+
oauthlib/oauth2/rfc6749/clients/mobile_application.py,sha256=eos0OLJmy-e5FFRHD-sjzDjMu7DlQxqayslZfbBtTNE,8878
|
| 65 |
+
oauthlib/oauth2/rfc6749/clients/service_application.py,sha256=wksSW2I-sth3ykXps_CssI0m7GC20lOh5jIl_mIjdQU,7812
|
| 66 |
+
oauthlib/oauth2/rfc6749/clients/web_application.py,sha256=RMk0_wzV-36TdMtcoeZ0I-lSB5sHWDpyWFTg5JFYVFM,12088
|
| 67 |
+
oauthlib/oauth2/rfc6749/endpoints/__init__.py,sha256=RL_txhULl35A74dbvlJ7nvqwp3GMCSCpg_4TvjoO-Xk,553
|
| 68 |
+
oauthlib/oauth2/rfc6749/endpoints/__pycache__/__init__.cpython-310.pyc,,
|
| 69 |
+
oauthlib/oauth2/rfc6749/endpoints/__pycache__/authorization.cpython-310.pyc,,
|
| 70 |
+
oauthlib/oauth2/rfc6749/endpoints/__pycache__/base.cpython-310.pyc,,
|
| 71 |
+
oauthlib/oauth2/rfc6749/endpoints/__pycache__/introspect.cpython-310.pyc,,
|
| 72 |
+
oauthlib/oauth2/rfc6749/endpoints/__pycache__/metadata.cpython-310.pyc,,
|
| 73 |
+
oauthlib/oauth2/rfc6749/endpoints/__pycache__/pre_configured.cpython-310.pyc,,
|
| 74 |
+
oauthlib/oauth2/rfc6749/endpoints/__pycache__/resource.cpython-310.pyc,,
|
| 75 |
+
oauthlib/oauth2/rfc6749/endpoints/__pycache__/revocation.cpython-310.pyc,,
|
| 76 |
+
oauthlib/oauth2/rfc6749/endpoints/__pycache__/token.cpython-310.pyc,,
|
| 77 |
+
oauthlib/oauth2/rfc6749/endpoints/authorization.py,sha256=2N2Cb_TQtpUPcqDIclsJnZERtaMKmH9uSgGoMZLFnUI,4584
|
| 78 |
+
oauthlib/oauth2/rfc6749/endpoints/base.py,sha256=fUhCGaftD5bm5PstA6L2CqUNb9kHDpUj4_BsvLRbi4w,4130
|
| 79 |
+
oauthlib/oauth2/rfc6749/endpoints/introspect.py,sha256=zMkbHNJUC0Ww4aqs-Px_Yil_bNPSDDcMlZVA4C1nOb0,4947
|
| 80 |
+
oauthlib/oauth2/rfc6749/endpoints/metadata.py,sha256=NOqXwoD34n94pC3IczakrpXMrZSPDruLMUl2_2MjW6Q,10530
|
| 81 |
+
oauthlib/oauth2/rfc6749/endpoints/pre_configured.py,sha256=ChhORao78XGGlnikJsLb6d_FZvKaLGBUM-te-84NeJ8,11954
|
| 82 |
+
oauthlib/oauth2/rfc6749/endpoints/resource.py,sha256=vpXoovgpmByY-IuW0PDccS5IJGFoFiLVjLLUpGFmXX4,3248
|
| 83 |
+
oauthlib/oauth2/rfc6749/endpoints/revocation.py,sha256=68Ukipz7UOdeBCmO5KTRo0vwbUFd8tTG22Ck0hFlumw,5212
|
| 84 |
+
oauthlib/oauth2/rfc6749/endpoints/token.py,sha256=iJDlaSkVR8U6s1_T9fiyVnLgfCgOWsq9PFDcmzL74H4,4595
|
| 85 |
+
oauthlib/oauth2/rfc6749/errors.py,sha256=5EE4Qs3ru34d33wqaFo-WGOofLLYK1jTov9sqG92CW0,12947
|
| 86 |
+
oauthlib/oauth2/rfc6749/grant_types/__init__.py,sha256=im_XwEWmw3dhmzcdfyhkN38xZopBhL3cRShmmCtqQs0,368
|
| 87 |
+
oauthlib/oauth2/rfc6749/grant_types/__pycache__/__init__.cpython-310.pyc,,
|
| 88 |
+
oauthlib/oauth2/rfc6749/grant_types/__pycache__/authorization_code.cpython-310.pyc,,
|
| 89 |
+
oauthlib/oauth2/rfc6749/grant_types/__pycache__/base.cpython-310.pyc,,
|
| 90 |
+
oauthlib/oauth2/rfc6749/grant_types/__pycache__/client_credentials.cpython-310.pyc,,
|
| 91 |
+
oauthlib/oauth2/rfc6749/grant_types/__pycache__/implicit.cpython-310.pyc,,
|
| 92 |
+
oauthlib/oauth2/rfc6749/grant_types/__pycache__/refresh_token.cpython-310.pyc,,
|
| 93 |
+
oauthlib/oauth2/rfc6749/grant_types/__pycache__/resource_owner_password_credentials.cpython-310.pyc,,
|
| 94 |
+
oauthlib/oauth2/rfc6749/grant_types/authorization_code.py,sha256=RPXH4qdy0byAFjgsEvD0iiOxSrGkbEj7hkweeT8pZRQ,26102
|
| 95 |
+
oauthlib/oauth2/rfc6749/grant_types/base.py,sha256=x5Q6Fm9iScvbkiXU6J_dUzxpXJq-qb_N-TNPYuIiObU,10969
|
| 96 |
+
oauthlib/oauth2/rfc6749/grant_types/client_credentials.py,sha256=Wr0CpWDVmHrIfOBPTYp9RxnISTfYdp5SjSaRAu77vUY,5079
|
| 97 |
+
oauthlib/oauth2/rfc6749/grant_types/implicit.py,sha256=hYAEYOwToxo3eNpGRC9SyJue93tu37jZVL7MYiaErDs,16852
|
| 98 |
+
oauthlib/oauth2/rfc6749/grant_types/refresh_token.py,sha256=3oYMvW0gVhIrjs17RP_SFyZrTNi6hjVrEh45Z1GcIA0,5891
|
| 99 |
+
oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.py,sha256=9FsDbrSNNylWKkEvgdafJDzlNncTRCOhIZODo-f4ZIM,8516
|
| 100 |
+
oauthlib/oauth2/rfc6749/parameters.py,sha256=BUGD3N_H9pBRctUYEDCD7dwncqjcHv7hW1ZcHMMQJb4,19016
|
| 101 |
+
oauthlib/oauth2/rfc6749/request_validator.py,sha256=BZj1OURgQh_sxG5agbSpLTeo_IVxvYUqqDKkoAGqCbM,28851
|
| 102 |
+
oauthlib/oauth2/rfc6749/tokens.py,sha256=RMcY7F4fCAoxZbyUUDN9el_B_XRp4ELrY3pj5awA1Do,11116
|
| 103 |
+
oauthlib/oauth2/rfc6749/utils.py,sha256=EKlU_U-FcYkdd8PvXo1irtHTqBXF7gKqdFKBadteZ64,2207
|
| 104 |
+
oauthlib/oauth2/rfc8628/__init__.py,sha256=yfG2QHuDxrp7_9HNKPEeXYXA_qBVZqiRrhI7q2cG4NM,232
|
| 105 |
+
oauthlib/oauth2/rfc8628/__pycache__/__init__.cpython-310.pyc,,
|
| 106 |
+
oauthlib/oauth2/rfc8628/clients/__init__.py,sha256=indCdGycy9cekvLOBxYbCwtyezEVhl3uKZzoShml-aY,201
|
| 107 |
+
oauthlib/oauth2/rfc8628/clients/__pycache__/__init__.cpython-310.pyc,,
|
| 108 |
+
oauthlib/oauth2/rfc8628/clients/__pycache__/device.cpython-310.pyc,,
|
| 109 |
+
oauthlib/oauth2/rfc8628/clients/device.py,sha256=0QgAfokzgzh6Jx5Nbnkwi2F67N7HruEAc_ZRCWr-JH0,4052
|
| 110 |
+
oauthlib/openid/__init__.py,sha256=qZQCKCdQt40myte_nxSYrWvzf1VVADqRl8om0-t6LzE,162
|
| 111 |
+
oauthlib/openid/__pycache__/__init__.cpython-310.pyc,,
|
| 112 |
+
oauthlib/openid/connect/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 113 |
+
oauthlib/openid/connect/__pycache__/__init__.cpython-310.pyc,,
|
| 114 |
+
oauthlib/openid/connect/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 115 |
+
oauthlib/openid/connect/core/__pycache__/__init__.cpython-310.pyc,,
|
| 116 |
+
oauthlib/openid/connect/core/__pycache__/exceptions.cpython-310.pyc,,
|
| 117 |
+
oauthlib/openid/connect/core/__pycache__/request_validator.cpython-310.pyc,,
|
| 118 |
+
oauthlib/openid/connect/core/__pycache__/tokens.cpython-310.pyc,,
|
| 119 |
+
oauthlib/openid/connect/core/endpoints/__init__.py,sha256=nQ6mGniUaM9X1ENG0tZlPgWgbLdlFESWGK-5_e8mp5Y,229
|
| 120 |
+
oauthlib/openid/connect/core/endpoints/__pycache__/__init__.cpython-310.pyc,,
|
| 121 |
+
oauthlib/openid/connect/core/endpoints/__pycache__/pre_configured.cpython-310.pyc,,
|
| 122 |
+
oauthlib/openid/connect/core/endpoints/__pycache__/userinfo.cpython-310.pyc,,
|
| 123 |
+
oauthlib/openid/connect/core/endpoints/pre_configured.py,sha256=p4Bq4HHUTvCBYXlTHr3PXktABKjHFGC3yBmwxWifzKc,5426
|
| 124 |
+
oauthlib/openid/connect/core/endpoints/userinfo.py,sha256=kc1Q3DN8xByk3Qe_S0LAlmJR2MkXnCnNFqLqVr8y3zU,4096
|
| 125 |
+
oauthlib/openid/connect/core/exceptions.py,sha256=uMMjE7VMc16jyL7TIhpbCx48_MsHD2C_atoMIemBKVA,4790
|
| 126 |
+
oauthlib/openid/connect/core/grant_types/__init__.py,sha256=geSZh6OFlupoC2tg9Bqqsnd31nu1-EheWNobzu86ZqU,426
|
| 127 |
+
oauthlib/openid/connect/core/grant_types/__pycache__/__init__.cpython-310.pyc,,
|
| 128 |
+
oauthlib/openid/connect/core/grant_types/__pycache__/authorization_code.cpython-310.pyc,,
|
| 129 |
+
oauthlib/openid/connect/core/grant_types/__pycache__/base.cpython-310.pyc,,
|
| 130 |
+
oauthlib/openid/connect/core/grant_types/__pycache__/dispatchers.cpython-310.pyc,,
|
| 131 |
+
oauthlib/openid/connect/core/grant_types/__pycache__/hybrid.cpython-310.pyc,,
|
| 132 |
+
oauthlib/openid/connect/core/grant_types/__pycache__/implicit.cpython-310.pyc,,
|
| 133 |
+
oauthlib/openid/connect/core/grant_types/__pycache__/refresh_token.cpython-310.pyc,,
|
| 134 |
+
oauthlib/openid/connect/core/grant_types/authorization_code.py,sha256=WOlS5RlSjIk2VNNmC5O4svxfTeUJiXpL3o5Mqn5EULk,1441
|
| 135 |
+
oauthlib/openid/connect/core/grant_types/base.py,sha256=o0Nn_k6X0VB6m6iKpvlm-QMbtWcYzGagX6Uq2XaBS20,15385
|
| 136 |
+
oauthlib/openid/connect/core/grant_types/dispatchers.py,sha256=RsIHHV8ne8fvlU1bL0jIUNb2pb2DbtJ1RYGV2_Z5GVQ,3978
|
| 137 |
+
oauthlib/openid/connect/core/grant_types/hybrid.py,sha256=PHWBazxe3qpJq02rpU93jaK7URxI_r1zl0Ee4ibcaPA,2742
|
| 138 |
+
oauthlib/openid/connect/core/grant_types/implicit.py,sha256=UICxnDNoePZfTUbL5QCBWA231o8XIQEnxocSrPp9gzw,1971
|
| 139 |
+
oauthlib/openid/connect/core/grant_types/refresh_token.py,sha256=8X0i1EHLgBIrlqP10rwJ5lXWO3f8iupmfn2E6DlLmnw,1035
|
| 140 |
+
oauthlib/openid/connect/core/request_validator.py,sha256=-lo1BnAhMWVkCj2Qhpn22LbV6CDmx4Nh4tCOntCg9tQ,13767
|
| 141 |
+
oauthlib/openid/connect/core/tokens.py,sha256=j8WPb5T8SbmprFUJdM2DjafNU_fa1eohkjUeaJagn0s,1605
|
| 142 |
+
oauthlib/signals.py,sha256=_PKDXWqKW6X3IbQUxGqW4eJ5Yi3p8jdOqXPAKfI956E,1489
|
| 143 |
+
oauthlib/uri_validate.py,sha256=1LwAANg5vBjaypKgbDF_qrt_u7rJPlfg1hTrD9oanEU,6112
|
evalkit_tf437/lib/python3.10/site-packages/oauthlib-3.2.2.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.37.1)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/LICENSE-3RD-PARTY.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/LICENSE.txt
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MIT License
|
| 2 |
+
|
| 3 |
+
Copyright (c) Olli-Pekka Heinisuo
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 21 |
+
SOFTWARE.
|
evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/METADATA
ADDED
|
@@ -0,0 +1,305 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: opencv-python
|
| 3 |
+
Version: 4.10.0.84
|
| 4 |
+
Summary: Wrapper package for OpenCV python bindings.
|
| 5 |
+
Home-page: https://github.com/opencv/opencv-python
|
| 6 |
+
Maintainer: OpenCV Team
|
| 7 |
+
License: Apache 2.0
|
| 8 |
+
Platform: UNKNOWN
|
| 9 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 10 |
+
Classifier: Environment :: Console
|
| 11 |
+
Classifier: Intended Audience :: Developers
|
| 12 |
+
Classifier: Intended Audience :: Education
|
| 13 |
+
Classifier: Intended Audience :: Information Technology
|
| 14 |
+
Classifier: Intended Audience :: Science/Research
|
| 15 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
| 16 |
+
Classifier: Operating System :: MacOS
|
| 17 |
+
Classifier: Operating System :: Microsoft :: Windows
|
| 18 |
+
Classifier: Operating System :: POSIX
|
| 19 |
+
Classifier: Operating System :: Unix
|
| 20 |
+
Classifier: Programming Language :: Python
|
| 21 |
+
Classifier: Programming Language :: Python :: 3
|
| 22 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.6
|
| 24 |
+
Classifier: Programming Language :: Python :: 3.7
|
| 25 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 26 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 27 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 28 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 29 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 30 |
+
Classifier: Programming Language :: C++
|
| 31 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 32 |
+
Classifier: Topic :: Scientific/Engineering
|
| 33 |
+
Classifier: Topic :: Scientific/Engineering :: Image Recognition
|
| 34 |
+
Classifier: Topic :: Software Development
|
| 35 |
+
Requires-Python: >=3.6
|
| 36 |
+
Description-Content-Type: text/markdown
|
| 37 |
+
License-File: LICENSE-3RD-PARTY.txt
|
| 38 |
+
License-File: LICENSE.txt
|
| 39 |
+
Requires-Dist: numpy >=1.13.3 ; python_version < "3.7"
|
| 40 |
+
Requires-Dist: numpy >=1.21.0 ; python_version <= "3.9" and platform_system == "Darwin" and platform_machine == "arm64"
|
| 41 |
+
Requires-Dist: numpy >=1.21.2 ; python_version >= "3.10"
|
| 42 |
+
Requires-Dist: numpy >=1.21.4 ; python_version >= "3.10" and platform_system == "Darwin"
|
| 43 |
+
Requires-Dist: numpy >=1.23.5 ; python_version >= "3.11"
|
| 44 |
+
Requires-Dist: numpy >=1.26.0 ; python_version >= "3.12"
|
| 45 |
+
Requires-Dist: numpy >=1.19.3 ; python_version >= "3.6" and platform_system == "Linux" and platform_machine == "aarch64"
|
| 46 |
+
Requires-Dist: numpy >=1.17.0 ; python_version >= "3.7"
|
| 47 |
+
Requires-Dist: numpy >=1.17.3 ; python_version >= "3.8"
|
| 48 |
+
Requires-Dist: numpy >=1.19.3 ; python_version >= "3.9"
|
| 49 |
+
|
| 50 |
+
[](http://pepy.tech/project/opencv-python)
|
| 51 |
+
|
| 52 |
+
### Keep OpenCV Free
|
| 53 |
+
|
| 54 |
+
OpenCV is raising funds to keep the library free for everyone, and we need the support of the entire community to do it. [Donate to OpenCV on Github](https://github.com/sponsors/opencv) to show your support.
|
| 55 |
+
|
| 56 |
+
- [OpenCV on Wheels](#opencv-on-wheels)
|
| 57 |
+
- [Installation and Usage](#installation-and-usage)
|
| 58 |
+
- [Frequently Asked Questions](#frequently-asked-questions)
|
| 59 |
+
- [Documentation for opencv-python](#documentation-for-opencv-python)
|
| 60 |
+
- [CI build process](#ci-build-process)
|
| 61 |
+
- [Manual builds](#manual-builds)
|
| 62 |
+
- [Manual debug builds](#manual-debug-builds)
|
| 63 |
+
- [Source distributions](#source-distributions)
|
| 64 |
+
- [Licensing](#licensing)
|
| 65 |
+
- [Versioning](#versioning)
|
| 66 |
+
- [Releases](#releases)
|
| 67 |
+
- [Development builds](#development-builds)
|
| 68 |
+
- [Manylinux wheels](#manylinux-wheels)
|
| 69 |
+
- [Supported Python versions](#supported-python-versions)
|
| 70 |
+
- [Backward compatibility](#backward-compatibility)
|
| 71 |
+
|
| 72 |
+
## OpenCV on Wheels
|
| 73 |
+
|
| 74 |
+
Pre-built CPU-only OpenCV packages for Python.
|
| 75 |
+
|
| 76 |
+
Check the manual build section if you wish to compile the bindings from source to enable additional modules such as CUDA.
|
| 77 |
+
|
| 78 |
+
### Installation and Usage
|
| 79 |
+
|
| 80 |
+
1. If you have previous/other manually installed (= not installed via ``pip``) version of OpenCV installed (e.g. cv2 module in the root of Python's site-packages), remove it before installation to avoid conflicts.
|
| 81 |
+
2. Make sure that your `pip` version is up-to-date (19.3 is the minimum supported version): `pip install --upgrade pip`. Check version with `pip -V`. For example Linux distributions ship usually with very old `pip` versions which cause a lot of unexpected problems especially with the `manylinux` format.
|
| 82 |
+
3. Select the correct package for your environment:
|
| 83 |
+
|
| 84 |
+
There are four different packages (see options 1, 2, 3 and 4 below) and you should **SELECT ONLY ONE OF THEM**. Do not install multiple different packages in the same environment. There is no plugin architecture: all the packages use the same namespace (`cv2`). If you installed multiple different packages in the same environment, uninstall them all with ``pip uninstall`` and reinstall only one package.
|
| 85 |
+
|
| 86 |
+
**a.** Packages for standard desktop environments (Windows, macOS, almost any GNU/Linux distribution)
|
| 87 |
+
|
| 88 |
+
- Option 1 - Main modules package: ``pip install opencv-python``
|
| 89 |
+
- Option 2 - Full package (contains both main modules and contrib/extra modules): ``pip install opencv-contrib-python`` (check contrib/extra modules listing from [OpenCV documentation](https://docs.opencv.org/master/))
|
| 90 |
+
|
| 91 |
+
**b.** Packages for server (headless) environments (such as Docker, cloud environments etc.), no GUI library dependencies
|
| 92 |
+
|
| 93 |
+
These packages are smaller than the two other packages above because they do not contain any GUI functionality (not compiled with Qt / other GUI components). This means that the packages avoid a heavy dependency chain to X11 libraries and you will have for example smaller Docker images as a result. You should always use these packages if you do not use `cv2.imshow` et al. or you are using some other package (such as PyQt) than OpenCV to create your GUI.
|
| 94 |
+
|
| 95 |
+
- Option 3 - Headless main modules package: ``pip install opencv-python-headless``
|
| 96 |
+
- Option 4 - Headless full package (contains both main modules and contrib/extra modules): ``pip install opencv-contrib-python-headless`` (check contrib/extra modules listing from [OpenCV documentation](https://docs.opencv.org/master/))
|
| 97 |
+
|
| 98 |
+
4. Import the package:
|
| 99 |
+
|
| 100 |
+
``import cv2``
|
| 101 |
+
|
| 102 |
+
All packages contain Haar cascade files. ``cv2.data.haarcascades`` can be used as a shortcut to the data folder. For example:
|
| 103 |
+
|
| 104 |
+
``cv2.CascadeClassifier(cv2.data.haarcascades + "haarcascade_frontalface_default.xml")``
|
| 105 |
+
|
| 106 |
+
5. Read [OpenCV documentation](https://docs.opencv.org/master/)
|
| 107 |
+
|
| 108 |
+
6. Before opening a new issue, read the FAQ below and have a look at the other issues which are already open.
|
| 109 |
+
|
| 110 |
+
Frequently Asked Questions
|
| 111 |
+
--------------------------
|
| 112 |
+
|
| 113 |
+
**Q: Do I need to install also OpenCV separately?**
|
| 114 |
+
|
| 115 |
+
A: No, the packages are special wheel binary packages and they already contain statically built OpenCV binaries.
|
| 116 |
+
|
| 117 |
+
**Q: Pip install fails with ``ModuleNotFoundError: No module named 'skbuild'``?**
|
| 118 |
+
|
| 119 |
+
Since ``opencv-python`` version 4.3.0.\*, ``manylinux1`` wheels were replaced by ``manylinux2014`` wheels. If your pip is too old, it will try to use the new source distribution introduced in 4.3.0.38 to manually build OpenCV because it does not know how to install ``manylinux2014`` wheels. However, source build will also fail because of too old ``pip`` because it does not understand build dependencies in ``pyproject.toml``. To use the new ``manylinux2014`` pre-built wheels (or to build from source), your ``pip`` version must be >= 19.3. Please upgrade ``pip`` with ``pip install --upgrade pip``.
|
| 120 |
+
|
| 121 |
+
**Q: Import fails on Windows: ``ImportError: DLL load failed: The specified module could not be found.``?**
|
| 122 |
+
|
| 123 |
+
A: If the import fails on Windows, make sure you have [Visual C++ redistributable 2015](https://www.microsoft.com/en-us/download/details.aspx?id=48145) installed. If you are using older Windows version than Windows 10 and latest system updates are not installed, [Universal C Runtime](https://support.microsoft.com/en-us/help/2999226/update-for-universal-c-runtime-in-windows) might be also required.
|
| 124 |
+
|
| 125 |
+
Windows N and KN editions do not include Media Feature Pack which is required by OpenCV. If you are using Windows N or KN edition, please install also [Windows Media Feature Pack](https://support.microsoft.com/en-us/help/3145500/media-feature-pack-list-for-windows-n-editions).
|
| 126 |
+
|
| 127 |
+
If you have Windows Server 2012+, media DLLs are probably missing too; please install the Feature called "Media Foundation" in the Server Manager. Beware, some posts advise to install "Windows Server Essentials Media Pack", but this one requires the "Windows Server Essentials Experience" role, and this role will deeply affect your Windows Server configuration (by enforcing active directory integration etc.); so just installing the "Media Foundation" should be a safer choice.
|
| 128 |
+
|
| 129 |
+
If the above does not help, check if you are using Anaconda. Old Anaconda versions have a bug which causes the error, see [this issue](https://github.com/opencv/opencv-python/issues/36) for a manual fix.
|
| 130 |
+
|
| 131 |
+
If you still encounter the error after you have checked all the previous solutions, download [Dependencies](https://github.com/lucasg/Dependencies) and open the ``cv2.pyd`` (located usually at ``C:\Users\username\AppData\Local\Programs\Python\PythonXX\Lib\site-packages\cv2``) file with it to debug missing DLL issues.
|
| 132 |
+
|
| 133 |
+
**Q: I have some other import errors?**
|
| 134 |
+
|
| 135 |
+
A: Make sure you have removed old manual installations of OpenCV Python bindings (cv2.so or cv2.pyd in site-packages).
|
| 136 |
+
|
| 137 |
+
**Q: Function foo() or method bar() returns wrong result, throws exception or crashes interpreter. What should I do?**
|
| 138 |
+
|
| 139 |
+
A: The repository contains only OpenCV-Python package build scripts, but not OpenCV itself. Python bindings for OpenCV are developed in official OpenCV repository and it's the best place to report issues. Also please check [OpenCV wiki](https://github.com/opencv/opencv/wiki) and [the official OpenCV forum](https://forum.opencv.org/) before file new bugs.
|
| 140 |
+
|
| 141 |
+
**Q: Why the packages do not include non-free algorithms?**
|
| 142 |
+
|
| 143 |
+
A: Non-free algorithms such as SURF are not included in these packages because they are patented / non-free and therefore cannot be distributed as built binaries. Note that SIFT is included in the builds due to patent expiration since OpenCV versions 4.3.0 and 3.4.10. See this issue for more info: https://github.com/skvark/opencv-python/issues/126
|
| 144 |
+
|
| 145 |
+
**Q: Why the package and import are different (opencv-python vs. cv2)?**
|
| 146 |
+
|
| 147 |
+
A: It's easier for users to understand ``opencv-python`` than ``cv2`` and it makes it easier to find the package with search engines. `cv2` (old interface in old OpenCV versions was named as `cv`) is the name that OpenCV developers chose when they created the binding generators. This is kept as the import name to be consistent with different kind of tutorials around the internet. Changing the import name or behaviour would be also confusing to experienced users who are accustomed to the ``import cv2``.
|
| 148 |
+
|
| 149 |
+
## Documentation for opencv-python
|
| 150 |
+
|
| 151 |
+
[](https://github.com/opencv/opencv-python/actions/workflows/build_wheels_windows.yml)
|
| 152 |
+
[](https://github.com/opencv/opencv-python/actions/workflows/build_wheels_linux.yml)
|
| 153 |
+
[](https://github.com/opencv/opencv-python/actions/workflows/build_wheels_macos.yml)
|
| 154 |
+
|
| 155 |
+
The aim of this repository is to provide means to package each new [OpenCV release](https://github.com/opencv/opencv/releases) for the most used Python versions and platforms.
|
| 156 |
+
|
| 157 |
+
### CI build process
|
| 158 |
+
|
| 159 |
+
The project is structured like a normal Python package with a standard ``setup.py`` file.
|
| 160 |
+
The build process for a single entry in the build matrices is as follows (see for example `.github/workflows/build_wheels_linux.yml` file):
|
| 161 |
+
|
| 162 |
+
0. In Linux and MacOS build: get OpenCV's optional C dependencies that we compile against
|
| 163 |
+
|
| 164 |
+
1. Checkout repository and submodules
|
| 165 |
+
|
| 166 |
+
- OpenCV is included as submodule and the version is updated
|
| 167 |
+
manually by maintainers when a new OpenCV release has been made
|
| 168 |
+
- Contrib modules are also included as a submodule
|
| 169 |
+
|
| 170 |
+
2. Find OpenCV version from the sources
|
| 171 |
+
|
| 172 |
+
3. Build OpenCV
|
| 173 |
+
|
| 174 |
+
- tests are disabled, otherwise build time increases too much
|
| 175 |
+
- there are 4 build matrix entries for each build combination: with and without contrib modules, with and without GUI (headless)
|
| 176 |
+
- Linux builds run in manylinux Docker containers (CentOS 5)
|
| 177 |
+
- source distributions are separate entries in the build matrix
|
| 178 |
+
|
| 179 |
+
4. Rearrange OpenCV's build result, add our custom files and generate wheel
|
| 180 |
+
|
| 181 |
+
5. Linux and macOS wheels are transformed with auditwheel and delocate, correspondingly
|
| 182 |
+
|
| 183 |
+
6. Install the generated wheel
|
| 184 |
+
7. Test that Python can import the library and run some sanity checks
|
| 185 |
+
8. Use twine to upload the generated wheel to PyPI (only in release builds)
|
| 186 |
+
|
| 187 |
+
Steps 1--4 are handled by ``pip wheel``.
|
| 188 |
+
|
| 189 |
+
The build can be customized with environment variables. In addition to any variables that OpenCV's build accepts, we recognize:
|
| 190 |
+
|
| 191 |
+
- ``CI_BUILD``. Set to ``1`` to emulate the CI environment build behaviour. Used only in CI builds to force certain build flags on in ``setup.py``. Do not use this unless you know what you are doing.
|
| 192 |
+
- ``ENABLE_CONTRIB`` and ``ENABLE_HEADLESS``. Set to ``1`` to build the contrib and/or headless version
|
| 193 |
+
- ``ENABLE_JAVA``, Set to ``1`` to enable the Java client build. This is disabled by default.
|
| 194 |
+
- ``CMAKE_ARGS``. Additional arguments for OpenCV's CMake invocation. You can use this to make a custom build.
|
| 195 |
+
|
| 196 |
+
See the next section for more info about manual builds outside the CI environment.
|
| 197 |
+
|
| 198 |
+
### Manual builds
|
| 199 |
+
|
| 200 |
+
If some dependency is not enabled in the pre-built wheels, you can also run the build locally to create a custom wheel.
|
| 201 |
+
|
| 202 |
+
1. Clone this repository: `git clone --recursive https://github.com/opencv/opencv-python.git`
|
| 203 |
+
2. ``cd opencv-python``
|
| 204 |
+
- you can use `git` to checkout some other version of OpenCV in the `opencv` and `opencv_contrib` submodules if needed
|
| 205 |
+
3. Add custom Cmake flags if needed, for example: `export CMAKE_ARGS="-DSOME_FLAG=ON -DSOME_OTHER_FLAG=OFF"` (in Windows you need to set environment variables differently depending on Command Line or PowerShell)
|
| 206 |
+
4. Select the package flavor which you wish to build with `ENABLE_CONTRIB` and `ENABLE_HEADLESS`: i.e. `export ENABLE_CONTRIB=1` if you wish to build `opencv-contrib-python`
|
| 207 |
+
5. Run ``pip wheel . --verbose``. NOTE: make sure you have the latest ``pip`` version, the ``pip wheel`` command replaces the old ``python setup.py bdist_wheel`` command which does not support ``pyproject.toml``.
|
| 208 |
+
- this might take anything from 5 minutes to over 2 hours depending on your hardware
|
| 209 |
+
6. Pip will print fresh will location at the end of build procedure. If you use old approach with `setup.py` file wheel package will be placed in `dist` folder. Package is ready and you can do with that whatever you wish.
|
| 210 |
+
- Optional: on Linux use some of the `manylinux` images as a build hosts if maximum portability is needed and run `auditwheel` for the wheel after build
|
| 211 |
+
- Optional: on macOS use ``delocate`` (same as ``auditwheel`` but for macOS) for better portability
|
| 212 |
+
|
| 213 |
+
#### Manual debug builds
|
| 214 |
+
|
| 215 |
+
In order to build `opencv-python` in an unoptimized debug build, you need to side-step the normal process a bit.
|
| 216 |
+
|
| 217 |
+
1. Install the packages `scikit-build` and `numpy` via pip.
|
| 218 |
+
2. Run the command `python setup.py bdist_wheel --build-type=Debug`.
|
| 219 |
+
3. Install the generated wheel file in the `dist/` folder with `pip install dist/wheelname.whl`.
|
| 220 |
+
|
| 221 |
+
If you would like the build produce all compiler commands, then the following combination of flags and environment variables has been tested to work on Linux:
|
| 222 |
+
```
|
| 223 |
+
export CMAKE_ARGS='-DCMAKE_VERBOSE_MAKEFILE=ON'
|
| 224 |
+
export VERBOSE=1
|
| 225 |
+
|
| 226 |
+
python3 setup.py bdist_wheel --build-type=Debug
|
| 227 |
+
```
|
| 228 |
+
|
| 229 |
+
See this issue for more discussion: https://github.com/opencv/opencv-python/issues/424
|
| 230 |
+
|
| 231 |
+
#### Source distributions
|
| 232 |
+
|
| 233 |
+
Since OpenCV version 4.3.0, also source distributions are provided in PyPI. This means that if your system is not compatible with any of the wheels in PyPI, ``pip`` will attempt to build OpenCV from sources. If you need a OpenCV version which is not available in PyPI as a source distribution, please follow the manual build guidance above instead of this one.
|
| 234 |
+
|
| 235 |
+
You can also force ``pip`` to build the wheels from the source distribution. Some examples:
|
| 236 |
+
|
| 237 |
+
- ``pip install --no-binary opencv-python opencv-python``
|
| 238 |
+
- ``pip install --no-binary :all: opencv-python``
|
| 239 |
+
|
| 240 |
+
If you need contrib modules or headless version, just change the package name (step 4 in the previous section is not needed). However, any additional CMake flags can be provided via environment variables as described in step 3 of the manual build section. If none are provided, OpenCV's CMake scripts will attempt to find and enable any suitable dependencies. Headless distributions have hard coded CMake flags which disable all possible GUI dependencies.
|
| 241 |
+
|
| 242 |
+
On slow systems such as Raspberry Pi the full build may take several hours. On a 8-core Ryzen 7 3700X the build takes about 6 minutes.
|
| 243 |
+
|
| 244 |
+
### Licensing
|
| 245 |
+
|
| 246 |
+
Opencv-python package (scripts in this repository) is available under MIT license.
|
| 247 |
+
|
| 248 |
+
OpenCV itself is available under [Apache 2](https://github.com/opencv/opencv/blob/master/LICENSE) license.
|
| 249 |
+
|
| 250 |
+
Third party package licenses are at [LICENSE-3RD-PARTY.txt](https://github.com/opencv/opencv-python/blob/master/LICENSE-3RD-PARTY.txt).
|
| 251 |
+
|
| 252 |
+
All wheels ship with [FFmpeg](http://ffmpeg.org) licensed under the [LGPLv2.1](http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html).
|
| 253 |
+
|
| 254 |
+
Non-headless Linux wheels ship with [Qt 5](http://doc.qt.io/qt-5/lgpl.html) licensed under the [LGPLv3](http://www.gnu.org/licenses/lgpl-3.0.html).
|
| 255 |
+
|
| 256 |
+
The packages include also other binaries. Full list of licenses can be found from [LICENSE-3RD-PARTY.txt](https://github.com/opencv/opencv-python/blob/master/LICENSE-3RD-PARTY.txt).
|
| 257 |
+
|
| 258 |
+
### Versioning
|
| 259 |
+
|
| 260 |
+
``find_version.py`` script searches for the version information from OpenCV sources and appends also a revision number specific to this repository to the version string. It saves the version information to ``version.py`` file under ``cv2`` in addition to some other flags.
|
| 261 |
+
|
| 262 |
+
### Releases
|
| 263 |
+
|
| 264 |
+
A release is made and uploaded to PyPI when a new tag is pushed to master branch. These tags differentiate packages (this repo might have modifications but OpenCV version stays same) and should be incremented sequentially. In practice, release version numbers look like this:
|
| 265 |
+
|
| 266 |
+
``cv_major.cv_minor.cv_revision.package_revision`` e.g. ``3.1.0.0``
|
| 267 |
+
|
| 268 |
+
The master branch follows OpenCV master branch releases. 3.4 branch follows OpenCV 3.4 bugfix releases.
|
| 269 |
+
|
| 270 |
+
### Development builds
|
| 271 |
+
|
| 272 |
+
Every commit to the master branch of this repo will be built. Possible build artifacts use local version identifiers:
|
| 273 |
+
|
| 274 |
+
``cv_major.cv_minor.cv_revision+git_hash_of_this_repo`` e.g. ``3.1.0+14a8d39``
|
| 275 |
+
|
| 276 |
+
These artifacts can't be and will not be uploaded to PyPI.
|
| 277 |
+
|
| 278 |
+
### Manylinux wheels
|
| 279 |
+
|
| 280 |
+
Linux wheels are built using [manylinux2014](https://github.com/pypa/manylinux). These wheels should work out of the box for most of the distros (which use GNU C standard library) out there since they are built against an old version of glibc.
|
| 281 |
+
|
| 282 |
+
The default ``manylinux2014`` images have been extended with some OpenCV dependencies. See [Docker folder](https://github.com/skvark/opencv-python/tree/master/docker) for more info.
|
| 283 |
+
|
| 284 |
+
### Supported Python versions
|
| 285 |
+
|
| 286 |
+
Python 3.x compatible pre-built wheels are provided for the officially supported Python versions (not in EOL):
|
| 287 |
+
|
| 288 |
+
- 3.7
|
| 289 |
+
- 3.8
|
| 290 |
+
- 3.9
|
| 291 |
+
- 3.10
|
| 292 |
+
- 3.11
|
| 293 |
+
- 3.12
|
| 294 |
+
|
| 295 |
+
### Backward compatibility
|
| 296 |
+
|
| 297 |
+
Starting from 4.2.0 and 3.4.9 builds the macOS Travis build environment was updated to XCode 9.4. The change effectively dropped support for older than 10.13 macOS versions.
|
| 298 |
+
|
| 299 |
+
Starting from 4.3.0 and 3.4.10 builds the Linux build environment was updated from `manylinux1` to `manylinux2014`. This dropped support for old Linux distributions.
|
| 300 |
+
|
| 301 |
+
Starting from version 4.7.0 the Mac OS GitHub Actions build environment was update to version 11. Mac OS 10.x support depricated. See https://github.com/actions/runner-images/issues/5583
|
| 302 |
+
|
| 303 |
+
Starting from version 4.9.0 the Mac OS GitHub Actions build environment was update to version 12. Mac OS 10.x support depricated by Brew and most of used packages.
|
| 304 |
+
|
| 305 |
+
|
evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/RECORD
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cv2/Error/__init__.pyi,sha256=A6NKtoMeZAvZWHC6DrJiwMVChY7LLxFfvuZ2dW4KSm8,4076
|
| 2 |
+
cv2/LICENSE-3RD-PARTY.txt,sha256=T8PBE9U-ldoPPEM2VaZwZ7cxDlOvMettKA6UzGkno2M,152360
|
| 3 |
+
cv2/LICENSE.txt,sha256=CdcZBY54Kse8cbohyUThE2zeK7lXwOiIEh8CGNa18Cw,1070
|
| 4 |
+
cv2/__init__.py,sha256=k2vZTFpd6_AhL8dRr3nToWNlLz6FAlnfIVnbaqPtitg,6612
|
| 5 |
+
cv2/__init__.pyi,sha256=OhpFobK-D08EJnTFveROVi0u4TwA5_7wuDpCCN4M01k,297966
|
| 6 |
+
cv2/__pycache__/__init__.cpython-310.pyc,,
|
| 7 |
+
cv2/__pycache__/config-3.cpython-310.pyc,,
|
| 8 |
+
cv2/__pycache__/config.cpython-310.pyc,,
|
| 9 |
+
cv2/__pycache__/load_config_py2.cpython-310.pyc,,
|
| 10 |
+
cv2/__pycache__/load_config_py3.cpython-310.pyc,,
|
| 11 |
+
cv2/__pycache__/version.cpython-310.pyc,,
|
| 12 |
+
cv2/aruco/__init__.pyi,sha256=XOaNz4SbfQ0UFH8guZ9WgTybx8gekTOWr8452Yjz54E,13995
|
| 13 |
+
cv2/barcode/__init__.pyi,sha256=19t0bbiTB8nxuT0DyqcTwEWGBynXm6NkaZg646flAL0,1441
|
| 14 |
+
cv2/config-3.py,sha256=mnqt9yS4IgAfXpY7Af1ON11F4su-Mo0sp7QqRAwIOhw,724
|
| 15 |
+
cv2/config.py,sha256=l04tQJbuGpqaNB3xvzPhaXNoO_GsczAG3if_LyO8WE0,111
|
| 16 |
+
cv2/cuda/__init__.pyi,sha256=gNkBAoEdrvkxwo4brAXNBCU_RDWixz575CWi2YEvYK4,16036
|
| 17 |
+
cv2/cv2.abi3.so,sha256=xBOUM7irWmVsE7EmEChZnYJyVMISaN0XMSxvbK4efcQ,65859185
|
| 18 |
+
cv2/data/__init__.py,sha256=125Pcte_OtB55ZxjWg5ko8ugpnogZ1sRMyP48dtBCMw,70
|
| 19 |
+
cv2/data/__pycache__/__init__.cpython-310.pyc,,
|
| 20 |
+
cv2/data/haarcascade_eye.xml,sha256=ccxk_DBaNV3GAGeID2-71D3RVb1j7jhEZhob2jSy_Yw,341406
|
| 21 |
+
cv2/data/haarcascade_eye_tree_eyeglasses.xml,sha256=4y-cZ5NcM-nRMx6xT6WFVP8Xg1wDdCZjvLl6iS6Talc,601661
|
| 22 |
+
cv2/data/haarcascade_frontalcatface.xml,sha256=rCusk07yQoTviisunY5X7vhKwdaUO00R5cnoWE3Aacg,411388
|
| 23 |
+
cv2/data/haarcascade_frontalcatface_extended.xml,sha256=_9DR0o8H0DdsidtMmEUAnChVzHbIz_dj1TMdyTYdqFQ,382918
|
| 24 |
+
cv2/data/haarcascade_frontalface_alt.xml,sha256=YoHfE0Wcwhj_BH0Csq44WbEv8UqT_-iVL3sz-te5aXs,676709
|
| 25 |
+
cv2/data/haarcascade_frontalface_alt2.xml,sha256=ewyWfZq7373gJeuceGlH0VG2QmBA0HqPlWLtj9kHJLQ,540616
|
| 26 |
+
cv2/data/haarcascade_frontalface_alt_tree.xml,sha256=Dl7kfswTJp1U3XpV-LU3UhZ8Ulh3IId3MjiPsHigSAo,2689040
|
| 27 |
+
cv2/data/haarcascade_frontalface_default.xml,sha256=D31FJ4ROtRTUpJSOgi2pD7sWo0oLu7xq3GSYdHpar7A,930127
|
| 28 |
+
cv2/data/haarcascade_fullbody.xml,sha256=BBdFxx7vG1yGrvIk8XznWwQtMzFMyPZ1dCT4vYzTCqE,476827
|
| 29 |
+
cv2/data/haarcascade_lefteye_2splits.xml,sha256=dMMjx4yBR1_JFY-sv7hmuwzKBr5B9XHfR9SsjQH5zkw,195369
|
| 30 |
+
cv2/data/haarcascade_license_plate_rus_16stages.xml,sha256=TRxEv3obxOIE-iWwRu0Kz_1_cTzBP-KVi2l3Elxg3eo,47775
|
| 31 |
+
cv2/data/haarcascade_lowerbody.xml,sha256=HmluHHxmxDmuIpz_-IcfQgN8NX6eHgkKK1nrwfj_XLs,395322
|
| 32 |
+
cv2/data/haarcascade_profileface.xml,sha256=s5pKO-RVOdsUan_B0-dhopLBluuIQhGF5qYVswVeYS0,828514
|
| 33 |
+
cv2/data/haarcascade_righteye_2splits.xml,sha256=TPDXK-pzB-mvfrmdSsvhXXEBpnwi_Nz77v1pKtN893Y,196170
|
| 34 |
+
cv2/data/haarcascade_russian_plate_number.xml,sha256=gUy1lUaCr1cOWDYfnl-LW1E6QRJ3a7nsrO-fDkymwtc,75482
|
| 35 |
+
cv2/data/haarcascade_smile.xml,sha256=TKHzBOq9C1rjAYDIGstT4Walhn5b4Xsxa9PzLP34fYo,188506
|
| 36 |
+
cv2/data/haarcascade_upperbody.xml,sha256=cyirT9sVkvU9mNfqWxudkOAa9dlfISrzeMfrV5BIu18,785819
|
| 37 |
+
cv2/detail/__init__.pyi,sha256=FXndW6oxsE46hjgKBezLvqJ_iEAcOCmNOAZSpbSM_-8,22374
|
| 38 |
+
cv2/dnn/__init__.pyi,sha256=v_SSO59MvE3Ys1To0zcO0QpJVK9XANaJf8JUxgjtjqI,22811
|
| 39 |
+
cv2/fisheye/__init__.pyi,sha256=Nbxh4ounDQfzsAxkM_hJAPp7zxiIO9ZNqke0JjFG3hs,8520
|
| 40 |
+
cv2/flann/__init__.pyi,sha256=ZxYG07bhFyFRA2d1lbPmAm_KEknsTcE1_NNw_Ksz1HQ,2677
|
| 41 |
+
cv2/gapi/__init__.py,sha256=6WBAjfq1FCiRADgYXGAKITHdBB6t0_jZ8hkTU8Biz-M,10298
|
| 42 |
+
cv2/gapi/__init__.pyi,sha256=zCLTsHvmbiGmlDUXPWqOGdgFcj66_iw7FXiTr4Y91m0,14636
|
| 43 |
+
cv2/gapi/__pycache__/__init__.cpython-310.pyc,,
|
| 44 |
+
cv2/gapi/core/__init__.pyi,sha256=_3OM_ITOrZomn7gs4HM-DRk8ngbjWkdr26KrmH3t4ks,142
|
| 45 |
+
cv2/gapi/core/cpu/__init__.pyi,sha256=MfRTDEPtcQekGnrvoaSSadxyylXPfa2lz8ucAkzjmh8,93
|
| 46 |
+
cv2/gapi/core/fluid/__init__.pyi,sha256=MfRTDEPtcQekGnrvoaSSadxyylXPfa2lz8ucAkzjmh8,93
|
| 47 |
+
cv2/gapi/core/ocl/__init__.pyi,sha256=MfRTDEPtcQekGnrvoaSSadxyylXPfa2lz8ucAkzjmh8,93
|
| 48 |
+
cv2/gapi/ie/__init__.pyi,sha256=rbOXOU39Wpt9Lhh1o1qr7Zj7qljqAu6aqoYsm4433yQ,1117
|
| 49 |
+
cv2/gapi/ie/detail/__init__.pyi,sha256=hGTS3yIiIq1B-djXgSQIPmeF7VDyeyucUuZOnd4O0OQ,269
|
| 50 |
+
cv2/gapi/imgproc/__init__.pyi,sha256=UUtPJcDK_UaE_TKN8K9Oz1TEChCQHDDB_eTI08mVXmU,71
|
| 51 |
+
cv2/gapi/imgproc/fluid/__init__.pyi,sha256=MfRTDEPtcQekGnrvoaSSadxyylXPfa2lz8ucAkzjmh8,93
|
| 52 |
+
cv2/gapi/oak/__init__.pyi,sha256=Tb7YXytKxnBFZZ8qTqHSZsDEpRt2937NXtbOQK23Ksc,1734
|
| 53 |
+
cv2/gapi/onnx/__init__.pyi,sha256=XAQ4M2p7kcm0gSL_2OJkjoI8h5AzlHQh6xDQEX7z5e4,1344
|
| 54 |
+
cv2/gapi/onnx/ep/__init__.pyi,sha256=dUYUbcjIjWtx7peQLPKU60qUzMqEH8On9mU4lsdXbmQ,1357
|
| 55 |
+
cv2/gapi/ot/__init__.pyi,sha256=XTMT90lnElxl_KfhFi5xDwQWvB0g5N8tf7Cgb8VHcAY,720
|
| 56 |
+
cv2/gapi/ot/cpu/__init__.pyi,sha256=MfRTDEPtcQekGnrvoaSSadxyylXPfa2lz8ucAkzjmh8,93
|
| 57 |
+
cv2/gapi/ov/__init__.pyi,sha256=3BqKzC_lV-wzhwu2cawCBvGbMG_zxt5D6anjhORXvuM,2647
|
| 58 |
+
cv2/gapi/own/__init__.pyi,sha256=GzL91pOQQNsGcBGmZ_XDAXaLoF4N9qVgj_IaYzduSNc,69
|
| 59 |
+
cv2/gapi/own/detail/__init__.pyi,sha256=sTC8JFcjDcVxnaFfFc-VmuxjHBg6RMzfafFHtS8yrFU,140
|
| 60 |
+
cv2/gapi/render/__init__.pyi,sha256=S4FWzy_CJqqs3dPYl3bXJoLQSGeVZdoBK7EmHvbPVOM,66
|
| 61 |
+
cv2/gapi/render/ocv/__init__.pyi,sha256=MfRTDEPtcQekGnrvoaSSadxyylXPfa2lz8ucAkzjmh8,93
|
| 62 |
+
cv2/gapi/streaming/__init__.pyi,sha256=qIOndKlPMevrSglTW-vVugzy_n7nITT6lr_zrlUv9cI,813
|
| 63 |
+
cv2/gapi/video/__init__.pyi,sha256=V0Emspufw7x2-knfd7kE8LnLjY_ujIz_TaxR_oIyAps,150
|
| 64 |
+
cv2/gapi/wip/__init__.pyi,sha256=f7mz60ehM9yrK0_Vt28NP--WietDE65EjM5O91LVx5M,1086
|
| 65 |
+
cv2/gapi/wip/draw/__init__.pyi,sha256=x2BhywI5C-uMHF1H6L9AwrgjRtKHFr032TOnqtE9a9Q,3162
|
| 66 |
+
cv2/gapi/wip/gst/__init__.pyi,sha256=8VtSKP9duTmY7ETAACwzVEWP9xdDW0pW82UtL_8Z7Aw,467
|
| 67 |
+
cv2/gapi/wip/onevpl/__init__.pyi,sha256=eLbVPey7JCU5YdRSUH6lLlD1eT-1s7YqZrQh6xNdIlo,397
|
| 68 |
+
cv2/ipp/__init__.pyi,sha256=WSHVIqIT97vmudtuJjhOJYiZ0iBdYx4AtB0iJqtdD0o,223
|
| 69 |
+
cv2/load_config_py2.py,sha256=xP_h2pObzfbN8tONV7CAQmGh94fQ-0t0HysrXDDlt_Q,151
|
| 70 |
+
cv2/load_config_py3.py,sha256=A9wfETdKZnybfbEN1SdtZAsMLVsueGa0zO93JzK9OFI,262
|
| 71 |
+
cv2/mat_wrapper/__init__.py,sha256=i2JwY6kmDL_s7YXzIl-JZuWCMVYkRi4F6j60W3j4P9A,1124
|
| 72 |
+
cv2/mat_wrapper/__pycache__/__init__.cpython-310.pyc,,
|
| 73 |
+
cv2/misc/__init__.py,sha256=yr9PkxKslxRc87hhtIJRn5RommP9jaqksYr-ZDuj7cU,37
|
| 74 |
+
cv2/misc/__pycache__/__init__.cpython-310.pyc,,
|
| 75 |
+
cv2/misc/__pycache__/version.cpython-310.pyc,,
|
| 76 |
+
cv2/misc/version.py,sha256=iTExq1jwGgAv3jtYQHRI8pSpmfzPsjkG9brsH0bdYhk,90
|
| 77 |
+
cv2/ml/__init__.pyi,sha256=KGiSrNBU8YWqJzhV3owS_b_nKl_40EXwdGrmC1e41J4,22803
|
| 78 |
+
cv2/ocl/__init__.pyi,sha256=qv_ilpHZosfPEMHEEqqQLe6cJpsb9PiiwIZMbd---ho,5527
|
| 79 |
+
cv2/ogl/__init__.pyi,sha256=KxTX9DHYyXg2ipvOJiFeAsRivAjmvBkqeiLZV-0snII,1472
|
| 80 |
+
cv2/parallel/__init__.pyi,sha256=tc5nNoWrTkD7VAfhbajumKF79LBolpqlKjYX-lY2__8,129
|
| 81 |
+
cv2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 82 |
+
cv2/qt/fonts/DejaVuSans-Bold.ttf,sha256=5JIhu_F8A2EnQUMWn35vFthxX2XUnx2fIW6z1mFAAwg,672300
|
| 83 |
+
cv2/qt/fonts/DejaVuSans-BoldOblique.ttf,sha256=c1891zgbb04MpRm3JiDHWqlT7o7om4jL5POMo8I9ank,611212
|
| 84 |
+
cv2/qt/fonts/DejaVuSans-ExtraLight.ttf,sha256=kI1uyALygVXI3oYZK1p3qftBeS8HLgNSbwU2wjTz6aA,345204
|
| 85 |
+
cv2/qt/fonts/DejaVuSans-Oblique.ttf,sha256=O8nAL-_K3VF-WhWLLzQjPdNU1n9DAkhtiOhLykZ9HUM,611556
|
| 86 |
+
cv2/qt/fonts/DejaVuSans.ttf,sha256=FdotjxLmlQABscyCJcG6ct3OGTiDfTdwL_Ppv215vV4,720012
|
| 87 |
+
cv2/qt/fonts/DejaVuSansCondensed-Bold.ttf,sha256=-GxtQKUv_itA8Z0Llyykuc40f8BNz8TQtOkneocSwN0,631992
|
| 88 |
+
cv2/qt/fonts/DejaVuSansCondensed-BoldOblique.ttf,sha256=ZldpOhjs7-4mZ9nQ7LGrtoUk1ieZm9g2UDnBngS0I4E,580168
|
| 89 |
+
cv2/qt/fonts/DejaVuSansCondensed-Oblique.ttf,sha256=SPmU6BKEZmq0v4nvTXMIWwf65sLH4oggqyQ-mUHEgp4,576004
|
| 90 |
+
cv2/qt/fonts/DejaVuSansCondensed.ttf,sha256=afE1XJ7vCj0RpsBvPL8dRuq_2tzJk1iaO-k6RO2GeLQ,643852
|
| 91 |
+
cv2/qt/plugins/platforms/libqxcb.so,sha256=7HhQQTyC8dbU57JkVYKssT5yGaQgv3CNcQv5gowmyzY,29313
|
| 92 |
+
cv2/samples/__init__.pyi,sha256=cjSW5vo2oMpIWHwP-3IY4hWjlKUTz8gd1MX7pLOCWKo,324
|
| 93 |
+
cv2/segmentation/__init__.pyi,sha256=jwKBUCRaXhHAM3FdzpLuGucGfNLWxWu5CDfLOpkcan4,1739
|
| 94 |
+
cv2/typing/__init__.py,sha256=sWWvL-Dx0gZaxy7xgu9Tg4d4_NIpVnt6XYPggdO1-2Y,5256
|
| 95 |
+
cv2/typing/__pycache__/__init__.cpython-310.pyc,,
|
| 96 |
+
cv2/utils/__init__.py,sha256=fuw4GHHOXsxxKc-AadAEOKQq_I1Gr4G3yMlRvAbTP30,330
|
| 97 |
+
cv2/utils/__init__.pyi,sha256=q7PpnVUH597R_sF7AGrsRVDOIGKflT0b77ll-mkmb7g,3592
|
| 98 |
+
cv2/utils/__pycache__/__init__.cpython-310.pyc,,
|
| 99 |
+
cv2/utils/fs/__init__.pyi,sha256=lu2cK1Dbd7wRTOTju_kVVCvU4mNB5v5hSVpBxSXXvJg,87
|
| 100 |
+
cv2/utils/nested/__init__.pyi,sha256=n2J3aSxC2MrPKaKb4igY_d49luuuQqW7A_YTx6eZz9Q,573
|
| 101 |
+
cv2/version.py,sha256=hXBqbFBstDpTra5kKI-U1LIU68W0rcuCSw80FuvXTzI,93
|
| 102 |
+
cv2/videoio_registry/__init__.pyi,sha256=h-7AlM3cFG5xxcPwZiVQ3n3ibe7BpGPlhgDcWOqZPA4,783
|
| 103 |
+
opencv_python-4.10.0.84.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 104 |
+
opencv_python-4.10.0.84.dist-info/LICENSE-3RD-PARTY.txt,sha256=T8PBE9U-ldoPPEM2VaZwZ7cxDlOvMettKA6UzGkno2M,152360
|
| 105 |
+
opencv_python-4.10.0.84.dist-info/LICENSE.txt,sha256=CdcZBY54Kse8cbohyUThE2zeK7lXwOiIEh8CGNa18Cw,1070
|
| 106 |
+
opencv_python-4.10.0.84.dist-info/METADATA,sha256=FNehjB0CiaUMOgXtL287MF_XY5jQZu7qyl3fm0pPR0w,20259
|
| 107 |
+
opencv_python-4.10.0.84.dist-info/RECORD,,
|
| 108 |
+
opencv_python-4.10.0.84.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 109 |
+
opencv_python-4.10.0.84.dist-info/WHEEL,sha256=EAtJEfUIJ_UiIhsbDQlddQslMIo1TSnut_vtG8YV5KA,142
|
| 110 |
+
opencv_python-4.10.0.84.dist-info/top_level.txt,sha256=SY8vrf_sYOg99OP9euhz7q36pPy_2VK5vbeEWXwwSoc,4
|
| 111 |
+
opencv_python.libs/libQt5Core-e6d3f451.so.5.15.13,sha256=ar-dm8wYktpSOPmQQWhcpl564zXk_ZE9AsSMW6RgGug,7403809
|
| 112 |
+
opencv_python.libs/libQt5Gui-5dea4132.so.5.15.13,sha256=PFINZkfTRm9ImCGmfO3wMpB4vaszHVIwF9idigbZBiY,8873169
|
| 113 |
+
opencv_python.libs/libQt5Test-d435aae7.so.5.15.13,sha256=OQMLlNnt76CRyIrJFsv2THkSidYr_eaWmjnpK96q-_8,428265
|
| 114 |
+
opencv_python.libs/libQt5Widgets-e392eaf8.so.5.15.13,sha256=txw7I4UIrB83wazf4PWQ1GS5RSGqTuDI8rRcDqY0Zxo,8930793
|
| 115 |
+
opencv_python.libs/libQt5XcbQpa-3bf8a55a.so.5.15.13,sha256=IxXxw8TF_3UfnbBOj9zqpLzICiqc7aRo827pWi2Y6t4,1837473
|
| 116 |
+
opencv_python.libs/libX11-xcb-0e257303.so.1.0.0,sha256=qM9S9n-2hEv6GdR_37Z4GgKvPKav54up-1ijFLk2XXI,8873
|
| 117 |
+
opencv_python.libs/libXau-00ec42fe.so.6.0.0,sha256=JjysEtjYterX3CORw1X-n8k5lA4eoi7ZjuVqjLYc5oQ,17049
|
| 118 |
+
opencv_python.libs/libavcodec-9aae324f.so.59.37.100,sha256=gqg2Ki-6C7bWuPVlxHLyrjD6-g9oNsdwGg6kZB2tBhY,13448513
|
| 119 |
+
opencv_python.libs/libavformat-3ff1be5b.so.59.27.100,sha256=_A2syd44-eJSf4nnEmfX337E9XT5WstE8IOb2bfs8Gg,2571489
|
| 120 |
+
opencv_python.libs/libavutil-a0a0531e.so.57.28.100,sha256=_HhiKqfwZH7fZ95HlYWD9p3ANOucUPLvqFPHvhxTq6Y,844673
|
| 121 |
+
opencv_python.libs/libcrypto-337dac8b.so.1.1,sha256=0opzjndvX1wXs1d7FrbfwJMTIGBhJ2nQPPQEjroQt6o,3481345
|
| 122 |
+
opencv_python.libs/libgfortran-91cc3cb1.so.3.0.0,sha256=VePrZzBsL_F-b4oIEOqg3LJulM2DkkxQZdUEDoeBRgg,1259665
|
| 123 |
+
opencv_python.libs/libopenblas-r0-f650aae0.3.3.so,sha256=eewCtT9XPNcRaonwTDl0cwGOf9oFcgs1TUNQXBnUeVg,37325001
|
| 124 |
+
opencv_python.libs/libpng16-1bde1c40.so.16.43.0,sha256=02j5YLlUW3rzjlXdakRnHd852_9hWJ6dbvZ-Kwoex2Y,1105201
|
| 125 |
+
opencv_python.libs/libquadmath-96973f99.so.0.0.0,sha256=k0wi3tDn0WnE1GeIdslgUa3z2UVF2pYvYLQWWbB12js,247609
|
| 126 |
+
opencv_python.libs/libssl-28bef1ac.so.1.1,sha256=ztxM3ZFLkgmYMbZoTqNGqj_ycgrn64a6Wa9Ni66AWmU,736177
|
| 127 |
+
opencv_python.libs/libswresample-2ec4394e.so.4.7.100,sha256=53S-M_Gn06zoAaUbYkdaMuLvXEWu2Mv1_YLkiW2oJ9I,132417
|
| 128 |
+
opencv_python.libs/libswscale-2c3c8be7.so.6.7.100,sha256=Lp2HzwvDYmIHUUay0z4VqLo5jICmVQr3Z4uD1C1IXVA,619945
|
| 129 |
+
opencv_python.libs/libvpx-c3a7933e.so.9.0.0,sha256=IGHYF4IPzg_AB5f9LeyGhur0ZGy4xgi_j_cJBUbdVF8,3508265
|
| 130 |
+
opencv_python.libs/libxcb-icccm-413c9f41.so.4.0.0,sha256=KrtUIHu46x9mIwMEkEYflhOFmYFjvUB3Ok1Dn9936eI,24377
|
| 131 |
+
opencv_python.libs/libxcb-image-e82a276d.so.0.0.0,sha256=QYC_KsToCXKQ2u87uOb2WJmK6Z-S4yynjqYWiI3stTY,25601
|
| 132 |
+
opencv_python.libs/libxcb-keysyms-21015570.so.1.0.0,sha256=PjX3WLcXNZucKONqtqBW4wPbmcaukPVyLPu2JCXZ7QQ,13209
|
| 133 |
+
opencv_python.libs/libxcb-randr-a96a5a87.so.0.1.0,sha256=LZmVHqS5soTrAUfIJ4cy0BKHrBk0Q8cy7IBJFbhsHvY,93921
|
| 134 |
+
opencv_python.libs/libxcb-render-637b984a.so.0.0.0,sha256=COOiubLk9Kv2S4wVA5QaRzgllJYpLLGXjYQAKM3hs2c,78105
|
| 135 |
+
opencv_python.libs/libxcb-render-util-43ce00f5.so.0.0.0,sha256=N0OPbas7C-jZx7kb3--foJiJPc5odPSj-hdma1yRG2E,22161
|
| 136 |
+
opencv_python.libs/libxcb-shape-25c2b258.so.0.0.0,sha256=8xHTe9DQmFzk-5HtT33th8bvgCroLJiEvXdAiN3i1io,21769
|
| 137 |
+
opencv_python.libs/libxcb-shm-7a199f70.so.0.0.0,sha256=XrF9nlIKkNrLG9HkXnn_XIeIHPwr20hRrTWETbzVGwE,21377
|
| 138 |
+
opencv_python.libs/libxcb-sync-89374f40.so.1.0.0,sha256=-w1wV0pfEQbSmW-QGzsRSADRNReahcQtlYgqIjKgHeE,35673
|
| 139 |
+
opencv_python.libs/libxcb-util-4d666913.so.1.0.0,sha256=44mg7PRdg-AK2vHz0GT1yzW0iN8d_GUFvhFGlrLtMo8,26281
|
| 140 |
+
opencv_python.libs/libxcb-xfixes-9be3ba6f.so.0.0.0,sha256=n5_94_1LwyIvg9S1I1dbu6a3ROBn28MQgT-maLnRtFM,45337
|
| 141 |
+
opencv_python.libs/libxcb-xinerama-ae147f87.so.0.0.0,sha256=iUXAB0Ox6t7vVAJOQEzTK4GVjW3AbnHOFsWyxml6RNo,17529
|
| 142 |
+
opencv_python.libs/libxcb-xkb-9ba31ab3.so.1.0.0,sha256=4toATK-D72nN4FjDv7ZCXjkMpU1Giroj5hr2ebVlOjk,157921
|
| 143 |
+
opencv_python.libs/libxkbcommon-71ae2972.so.0.0.0,sha256=H8s4pka9HOHar2gq0pty5lv99noGM1snj46Z0LdTAhI,269865
|
| 144 |
+
opencv_python.libs/libxkbcommon-x11-c65ed502.so.0.0.0,sha256=NLByawCP4Fm3AgQmUIDl2zSvrMCvKhJpitbDiuEWbVQ,48097
|
evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/REQUESTED
ADDED
|
File without changes
|
evalkit_tf437/lib/python3.10/site-packages/opencv_python-4.10.0.84.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: skbuild 0.17.6
|
| 3 |
+
Root-Is-Purelib: false
|
| 4 |
+
Tag: cp37-abi3-manylinux_2_17_x86_64
|
| 5 |
+
Tag: cp37-abi3-manylinux2014_x86_64
|
| 6 |
+
|
evalkit_tf437/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE.BSD
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) Donald Stufft and individual contributors.
|
| 2 |
+
All rights reserved.
|
| 3 |
+
|
| 4 |
+
Redistribution and use in source and binary forms, with or without
|
| 5 |
+
modification, are permitted provided that the following conditions are met:
|
| 6 |
+
|
| 7 |
+
1. Redistributions of source code must retain the above copyright notice,
|
| 8 |
+
this list of conditions and the following disclaimer.
|
| 9 |
+
|
| 10 |
+
2. Redistributions in binary form must reproduce the above copyright
|
| 11 |
+
notice, this list of conditions and the following disclaimer in the
|
| 12 |
+
documentation and/or other materials provided with the distribution.
|
| 13 |
+
|
| 14 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
| 15 |
+
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
| 16 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 17 |
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
| 18 |
+
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
| 19 |
+
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 20 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
| 21 |
+
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
| 22 |
+
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 23 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
evalkit_tf437/lib/python3.10/site-packages/pygments/__init__.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Pygments
|
| 3 |
+
~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Pygments is a syntax highlighting package written in Python.
|
| 6 |
+
|
| 7 |
+
It is a generic syntax highlighter for general use in all kinds of software
|
| 8 |
+
such as forum systems, wikis or other applications that need to prettify
|
| 9 |
+
source code. Highlights are:
|
| 10 |
+
|
| 11 |
+
* a wide range of common languages and markup formats is supported
|
| 12 |
+
* special attention is paid to details, increasing quality by a fair amount
|
| 13 |
+
* support for new languages and formats are added easily
|
| 14 |
+
* a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
|
| 15 |
+
formats that PIL supports, and ANSI sequences
|
| 16 |
+
* it is usable as a command-line tool and as a library
|
| 17 |
+
* ... and it highlights even Brainfuck!
|
| 18 |
+
|
| 19 |
+
The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
|
| 20 |
+
|
| 21 |
+
.. _Pygments master branch:
|
| 22 |
+
https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
|
| 23 |
+
|
| 24 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 25 |
+
:license: BSD, see LICENSE for details.
|
| 26 |
+
"""
|
| 27 |
+
from io import StringIO, BytesIO
|
| 28 |
+
|
| 29 |
+
__version__ = '2.18.0'
|
| 30 |
+
__docformat__ = 'restructuredtext'
|
| 31 |
+
|
| 32 |
+
__all__ = ['lex', 'format', 'highlight']
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def lex(code, lexer):
|
| 36 |
+
"""
|
| 37 |
+
Lex `code` with the `lexer` (must be a `Lexer` instance)
|
| 38 |
+
and return an iterable of tokens. Currently, this only calls
|
| 39 |
+
`lexer.get_tokens()`.
|
| 40 |
+
"""
|
| 41 |
+
try:
|
| 42 |
+
return lexer.get_tokens(code)
|
| 43 |
+
except TypeError:
|
| 44 |
+
# Heuristic to catch a common mistake.
|
| 45 |
+
from pygments.lexer import RegexLexer
|
| 46 |
+
if isinstance(lexer, type) and issubclass(lexer, RegexLexer):
|
| 47 |
+
raise TypeError('lex() argument must be a lexer instance, '
|
| 48 |
+
'not a class')
|
| 49 |
+
raise
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin
|
| 53 |
+
"""
|
| 54 |
+
Format ``tokens`` (an iterable of tokens) with the formatter ``formatter``
|
| 55 |
+
(a `Formatter` instance).
|
| 56 |
+
|
| 57 |
+
If ``outfile`` is given and a valid file object (an object with a
|
| 58 |
+
``write`` method), the result will be written to it, otherwise it
|
| 59 |
+
is returned as a string.
|
| 60 |
+
"""
|
| 61 |
+
try:
|
| 62 |
+
if not outfile:
|
| 63 |
+
realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO()
|
| 64 |
+
formatter.format(tokens, realoutfile)
|
| 65 |
+
return realoutfile.getvalue()
|
| 66 |
+
else:
|
| 67 |
+
formatter.format(tokens, outfile)
|
| 68 |
+
except TypeError:
|
| 69 |
+
# Heuristic to catch a common mistake.
|
| 70 |
+
from pygments.formatter import Formatter
|
| 71 |
+
if isinstance(formatter, type) and issubclass(formatter, Formatter):
|
| 72 |
+
raise TypeError('format() argument must be a formatter instance, '
|
| 73 |
+
'not a class')
|
| 74 |
+
raise
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def highlight(code, lexer, formatter, outfile=None):
|
| 78 |
+
"""
|
| 79 |
+
This is the most high-level highlighting function. It combines `lex` and
|
| 80 |
+
`format` in one function.
|
| 81 |
+
"""
|
| 82 |
+
return format(lex(code, lexer), formatter, outfile)
|
evalkit_tf437/lib/python3.10/site-packages/pygments/filter.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.filter
|
| 3 |
+
~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Module that implements the default filter.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def apply_filters(stream, filters, lexer=None):
|
| 13 |
+
"""
|
| 14 |
+
Use this method to apply an iterable of filters to
|
| 15 |
+
a stream. If lexer is given it's forwarded to the
|
| 16 |
+
filter, otherwise the filter receives `None`.
|
| 17 |
+
"""
|
| 18 |
+
def _apply(filter_, stream):
|
| 19 |
+
yield from filter_.filter(lexer, stream)
|
| 20 |
+
for filter_ in filters:
|
| 21 |
+
stream = _apply(filter_, stream)
|
| 22 |
+
return stream
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def simplefilter(f):
|
| 26 |
+
"""
|
| 27 |
+
Decorator that converts a function into a filter::
|
| 28 |
+
|
| 29 |
+
@simplefilter
|
| 30 |
+
def lowercase(self, lexer, stream, options):
|
| 31 |
+
for ttype, value in stream:
|
| 32 |
+
yield ttype, value.lower()
|
| 33 |
+
"""
|
| 34 |
+
return type(f.__name__, (FunctionFilter,), {
|
| 35 |
+
'__module__': getattr(f, '__module__'),
|
| 36 |
+
'__doc__': f.__doc__,
|
| 37 |
+
'function': f,
|
| 38 |
+
})
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class Filter:
|
| 42 |
+
"""
|
| 43 |
+
Default filter. Subclass this class or use the `simplefilter`
|
| 44 |
+
decorator to create own filters.
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
def __init__(self, **options):
|
| 48 |
+
self.options = options
|
| 49 |
+
|
| 50 |
+
def filter(self, lexer, stream):
|
| 51 |
+
raise NotImplementedError()
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class FunctionFilter(Filter):
|
| 55 |
+
"""
|
| 56 |
+
Abstract class used by `simplefilter` to create simple
|
| 57 |
+
function filters on the fly. The `simplefilter` decorator
|
| 58 |
+
automatically creates subclasses of this class for
|
| 59 |
+
functions passed to it.
|
| 60 |
+
"""
|
| 61 |
+
function = None
|
| 62 |
+
|
| 63 |
+
def __init__(self, **options):
|
| 64 |
+
if not hasattr(self, 'function'):
|
| 65 |
+
raise TypeError(f'{self.__class__.__name__!r} used without bound function')
|
| 66 |
+
Filter.__init__(self, **options)
|
| 67 |
+
|
| 68 |
+
def filter(self, lexer, stream):
|
| 69 |
+
# pylint: disable=not-callable
|
| 70 |
+
yield from self.function(lexer, stream, self.options)
|
evalkit_tf437/lib/python3.10/site-packages/pygments/formatters/__pycache__/other.cpython-310.pyc
ADDED
|
Binary file (4.74 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/pygments/formatters/img.py
ADDED
|
@@ -0,0 +1,685 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.formatters.img
|
| 3 |
+
~~~~~~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Formatter for Pixmap output.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
import os
|
| 11 |
+
import sys
|
| 12 |
+
|
| 13 |
+
from pygments.formatter import Formatter
|
| 14 |
+
from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
|
| 15 |
+
get_choice_opt
|
| 16 |
+
|
| 17 |
+
import subprocess
|
| 18 |
+
|
| 19 |
+
# Import this carefully
|
| 20 |
+
try:
|
| 21 |
+
from PIL import Image, ImageDraw, ImageFont
|
| 22 |
+
pil_available = True
|
| 23 |
+
except ImportError:
|
| 24 |
+
pil_available = False
|
| 25 |
+
|
| 26 |
+
try:
|
| 27 |
+
import _winreg
|
| 28 |
+
except ImportError:
|
| 29 |
+
try:
|
| 30 |
+
import winreg as _winreg
|
| 31 |
+
except ImportError:
|
| 32 |
+
_winreg = None
|
| 33 |
+
|
| 34 |
+
__all__ = ['ImageFormatter', 'GifImageFormatter', 'JpgImageFormatter',
|
| 35 |
+
'BmpImageFormatter']
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
# For some unknown reason every font calls it something different
|
| 39 |
+
STYLES = {
|
| 40 |
+
'NORMAL': ['', 'Roman', 'Book', 'Normal', 'Regular', 'Medium'],
|
| 41 |
+
'ITALIC': ['Oblique', 'Italic'],
|
| 42 |
+
'BOLD': ['Bold'],
|
| 43 |
+
'BOLDITALIC': ['Bold Oblique', 'Bold Italic'],
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
# A sane default for modern systems
|
| 47 |
+
DEFAULT_FONT_NAME_NIX = 'DejaVu Sans Mono'
|
| 48 |
+
DEFAULT_FONT_NAME_WIN = 'Courier New'
|
| 49 |
+
DEFAULT_FONT_NAME_MAC = 'Menlo'
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class PilNotAvailable(ImportError):
|
| 53 |
+
"""When Python imaging library is not available"""
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class FontNotFound(Exception):
|
| 57 |
+
"""When there are no usable fonts specified"""
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class FontManager:
|
| 61 |
+
"""
|
| 62 |
+
Manages a set of fonts: normal, italic, bold, etc...
|
| 63 |
+
"""
|
| 64 |
+
|
| 65 |
+
def __init__(self, font_name, font_size=14):
|
| 66 |
+
self.font_name = font_name
|
| 67 |
+
self.font_size = font_size
|
| 68 |
+
self.fonts = {}
|
| 69 |
+
self.encoding = None
|
| 70 |
+
self.variable = False
|
| 71 |
+
if hasattr(font_name, 'read') or os.path.isfile(font_name):
|
| 72 |
+
font = ImageFont.truetype(font_name, self.font_size)
|
| 73 |
+
self.variable = True
|
| 74 |
+
for style in STYLES:
|
| 75 |
+
self.fonts[style] = font
|
| 76 |
+
|
| 77 |
+
return
|
| 78 |
+
|
| 79 |
+
if sys.platform.startswith('win'):
|
| 80 |
+
if not font_name:
|
| 81 |
+
self.font_name = DEFAULT_FONT_NAME_WIN
|
| 82 |
+
self._create_win()
|
| 83 |
+
elif sys.platform.startswith('darwin'):
|
| 84 |
+
if not font_name:
|
| 85 |
+
self.font_name = DEFAULT_FONT_NAME_MAC
|
| 86 |
+
self._create_mac()
|
| 87 |
+
else:
|
| 88 |
+
if not font_name:
|
| 89 |
+
self.font_name = DEFAULT_FONT_NAME_NIX
|
| 90 |
+
self._create_nix()
|
| 91 |
+
|
| 92 |
+
def _get_nix_font_path(self, name, style):
|
| 93 |
+
proc = subprocess.Popen(['fc-list', f"{name}:style={style}", 'file'],
|
| 94 |
+
stdout=subprocess.PIPE, stderr=None)
|
| 95 |
+
stdout, _ = proc.communicate()
|
| 96 |
+
if proc.returncode == 0:
|
| 97 |
+
lines = stdout.splitlines()
|
| 98 |
+
for line in lines:
|
| 99 |
+
if line.startswith(b'Fontconfig warning:'):
|
| 100 |
+
continue
|
| 101 |
+
path = line.decode().strip().strip(':')
|
| 102 |
+
if path:
|
| 103 |
+
return path
|
| 104 |
+
return None
|
| 105 |
+
|
| 106 |
+
def _create_nix(self):
|
| 107 |
+
for name in STYLES['NORMAL']:
|
| 108 |
+
path = self._get_nix_font_path(self.font_name, name)
|
| 109 |
+
if path is not None:
|
| 110 |
+
self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
|
| 111 |
+
break
|
| 112 |
+
else:
|
| 113 |
+
raise FontNotFound(f'No usable fonts named: "{self.font_name}"')
|
| 114 |
+
for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
|
| 115 |
+
for stylename in STYLES[style]:
|
| 116 |
+
path = self._get_nix_font_path(self.font_name, stylename)
|
| 117 |
+
if path is not None:
|
| 118 |
+
self.fonts[style] = ImageFont.truetype(path, self.font_size)
|
| 119 |
+
break
|
| 120 |
+
else:
|
| 121 |
+
if style == 'BOLDITALIC':
|
| 122 |
+
self.fonts[style] = self.fonts['BOLD']
|
| 123 |
+
else:
|
| 124 |
+
self.fonts[style] = self.fonts['NORMAL']
|
| 125 |
+
|
| 126 |
+
def _get_mac_font_path(self, font_map, name, style):
|
| 127 |
+
return font_map.get((name + ' ' + style).strip().lower())
|
| 128 |
+
|
| 129 |
+
def _create_mac(self):
|
| 130 |
+
font_map = {}
|
| 131 |
+
for font_dir in (os.path.join(os.getenv("HOME"), 'Library/Fonts/'),
|
| 132 |
+
'/Library/Fonts/', '/System/Library/Fonts/'):
|
| 133 |
+
font_map.update(
|
| 134 |
+
(os.path.splitext(f)[0].lower(), os.path.join(font_dir, f))
|
| 135 |
+
for f in os.listdir(font_dir)
|
| 136 |
+
if f.lower().endswith(('ttf', 'ttc')))
|
| 137 |
+
|
| 138 |
+
for name in STYLES['NORMAL']:
|
| 139 |
+
path = self._get_mac_font_path(font_map, self.font_name, name)
|
| 140 |
+
if path is not None:
|
| 141 |
+
self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
|
| 142 |
+
break
|
| 143 |
+
else:
|
| 144 |
+
raise FontNotFound(f'No usable fonts named: "{self.font_name}"')
|
| 145 |
+
for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
|
| 146 |
+
for stylename in STYLES[style]:
|
| 147 |
+
path = self._get_mac_font_path(font_map, self.font_name, stylename)
|
| 148 |
+
if path is not None:
|
| 149 |
+
self.fonts[style] = ImageFont.truetype(path, self.font_size)
|
| 150 |
+
break
|
| 151 |
+
else:
|
| 152 |
+
if style == 'BOLDITALIC':
|
| 153 |
+
self.fonts[style] = self.fonts['BOLD']
|
| 154 |
+
else:
|
| 155 |
+
self.fonts[style] = self.fonts['NORMAL']
|
| 156 |
+
|
| 157 |
+
def _lookup_win(self, key, basename, styles, fail=False):
|
| 158 |
+
for suffix in ('', ' (TrueType)'):
|
| 159 |
+
for style in styles:
|
| 160 |
+
try:
|
| 161 |
+
valname = '{}{}{}'.format(basename, style and ' '+style, suffix)
|
| 162 |
+
val, _ = _winreg.QueryValueEx(key, valname)
|
| 163 |
+
return val
|
| 164 |
+
except OSError:
|
| 165 |
+
continue
|
| 166 |
+
else:
|
| 167 |
+
if fail:
|
| 168 |
+
raise FontNotFound(f'Font {basename} ({styles[0]}) not found in registry')
|
| 169 |
+
return None
|
| 170 |
+
|
| 171 |
+
def _create_win(self):
|
| 172 |
+
lookuperror = None
|
| 173 |
+
keynames = [ (_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'),
|
| 174 |
+
(_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Fonts'),
|
| 175 |
+
(_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'),
|
| 176 |
+
(_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows\CurrentVersion\Fonts') ]
|
| 177 |
+
for keyname in keynames:
|
| 178 |
+
try:
|
| 179 |
+
key = _winreg.OpenKey(*keyname)
|
| 180 |
+
try:
|
| 181 |
+
path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True)
|
| 182 |
+
self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
|
| 183 |
+
for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
|
| 184 |
+
path = self._lookup_win(key, self.font_name, STYLES[style])
|
| 185 |
+
if path:
|
| 186 |
+
self.fonts[style] = ImageFont.truetype(path, self.font_size)
|
| 187 |
+
else:
|
| 188 |
+
if style == 'BOLDITALIC':
|
| 189 |
+
self.fonts[style] = self.fonts['BOLD']
|
| 190 |
+
else:
|
| 191 |
+
self.fonts[style] = self.fonts['NORMAL']
|
| 192 |
+
return
|
| 193 |
+
except FontNotFound as err:
|
| 194 |
+
lookuperror = err
|
| 195 |
+
finally:
|
| 196 |
+
_winreg.CloseKey(key)
|
| 197 |
+
except OSError:
|
| 198 |
+
pass
|
| 199 |
+
else:
|
| 200 |
+
# If we get here, we checked all registry keys and had no luck
|
| 201 |
+
# We can be in one of two situations now:
|
| 202 |
+
# * All key lookups failed. In this case lookuperror is None and we
|
| 203 |
+
# will raise a generic error
|
| 204 |
+
# * At least one lookup failed with a FontNotFound error. In this
|
| 205 |
+
# case, we will raise that as a more specific error
|
| 206 |
+
if lookuperror:
|
| 207 |
+
raise lookuperror
|
| 208 |
+
raise FontNotFound('Can\'t open Windows font registry key')
|
| 209 |
+
|
| 210 |
+
def get_char_size(self):
|
| 211 |
+
"""
|
| 212 |
+
Get the character size.
|
| 213 |
+
"""
|
| 214 |
+
return self.get_text_size('M')
|
| 215 |
+
|
| 216 |
+
def get_text_size(self, text):
|
| 217 |
+
"""
|
| 218 |
+
Get the text size (width, height).
|
| 219 |
+
"""
|
| 220 |
+
font = self.fonts['NORMAL']
|
| 221 |
+
if hasattr(font, 'getbbox'): # Pillow >= 9.2.0
|
| 222 |
+
return font.getbbox(text)[2:4]
|
| 223 |
+
else:
|
| 224 |
+
return font.getsize(text)
|
| 225 |
+
|
| 226 |
+
def get_font(self, bold, oblique):
|
| 227 |
+
"""
|
| 228 |
+
Get the font based on bold and italic flags.
|
| 229 |
+
"""
|
| 230 |
+
if bold and oblique:
|
| 231 |
+
if self.variable:
|
| 232 |
+
return self.get_style('BOLDITALIC')
|
| 233 |
+
|
| 234 |
+
return self.fonts['BOLDITALIC']
|
| 235 |
+
elif bold:
|
| 236 |
+
if self.variable:
|
| 237 |
+
return self.get_style('BOLD')
|
| 238 |
+
|
| 239 |
+
return self.fonts['BOLD']
|
| 240 |
+
elif oblique:
|
| 241 |
+
if self.variable:
|
| 242 |
+
return self.get_style('ITALIC')
|
| 243 |
+
|
| 244 |
+
return self.fonts['ITALIC']
|
| 245 |
+
else:
|
| 246 |
+
if self.variable:
|
| 247 |
+
return self.get_style('NORMAL')
|
| 248 |
+
|
| 249 |
+
return self.fonts['NORMAL']
|
| 250 |
+
|
| 251 |
+
def get_style(self, style):
|
| 252 |
+
"""
|
| 253 |
+
Get the specified style of the font if it is a variable font.
|
| 254 |
+
If not found, return the normal font.
|
| 255 |
+
"""
|
| 256 |
+
font = self.fonts[style]
|
| 257 |
+
for style_name in STYLES[style]:
|
| 258 |
+
try:
|
| 259 |
+
font.set_variation_by_name(style_name)
|
| 260 |
+
return font
|
| 261 |
+
except ValueError:
|
| 262 |
+
pass
|
| 263 |
+
except OSError:
|
| 264 |
+
return font
|
| 265 |
+
|
| 266 |
+
return font
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
class ImageFormatter(Formatter):
|
| 270 |
+
"""
|
| 271 |
+
Create a PNG image from source code. This uses the Python Imaging Library to
|
| 272 |
+
generate a pixmap from the source code.
|
| 273 |
+
|
| 274 |
+
.. versionadded:: 0.10
|
| 275 |
+
|
| 276 |
+
Additional options accepted:
|
| 277 |
+
|
| 278 |
+
`image_format`
|
| 279 |
+
An image format to output to that is recognised by PIL, these include:
|
| 280 |
+
|
| 281 |
+
* "PNG" (default)
|
| 282 |
+
* "JPEG"
|
| 283 |
+
* "BMP"
|
| 284 |
+
* "GIF"
|
| 285 |
+
|
| 286 |
+
`line_pad`
|
| 287 |
+
The extra spacing (in pixels) between each line of text.
|
| 288 |
+
|
| 289 |
+
Default: 2
|
| 290 |
+
|
| 291 |
+
`font_name`
|
| 292 |
+
The font name to be used as the base font from which others, such as
|
| 293 |
+
bold and italic fonts will be generated. This really should be a
|
| 294 |
+
monospace font to look sane.
|
| 295 |
+
If a filename or a file-like object is specified, the user must
|
| 296 |
+
provide different styles of the font.
|
| 297 |
+
|
| 298 |
+
Default: "Courier New" on Windows, "Menlo" on Mac OS, and
|
| 299 |
+
"DejaVu Sans Mono" on \\*nix
|
| 300 |
+
|
| 301 |
+
`font_size`
|
| 302 |
+
The font size in points to be used.
|
| 303 |
+
|
| 304 |
+
Default: 14
|
| 305 |
+
|
| 306 |
+
`image_pad`
|
| 307 |
+
The padding, in pixels to be used at each edge of the resulting image.
|
| 308 |
+
|
| 309 |
+
Default: 10
|
| 310 |
+
|
| 311 |
+
`line_numbers`
|
| 312 |
+
Whether line numbers should be shown: True/False
|
| 313 |
+
|
| 314 |
+
Default: True
|
| 315 |
+
|
| 316 |
+
`line_number_start`
|
| 317 |
+
The line number of the first line.
|
| 318 |
+
|
| 319 |
+
Default: 1
|
| 320 |
+
|
| 321 |
+
`line_number_step`
|
| 322 |
+
The step used when printing line numbers.
|
| 323 |
+
|
| 324 |
+
Default: 1
|
| 325 |
+
|
| 326 |
+
`line_number_bg`
|
| 327 |
+
The background colour (in "#123456" format) of the line number bar, or
|
| 328 |
+
None to use the style background color.
|
| 329 |
+
|
| 330 |
+
Default: "#eed"
|
| 331 |
+
|
| 332 |
+
`line_number_fg`
|
| 333 |
+
The text color of the line numbers (in "#123456"-like format).
|
| 334 |
+
|
| 335 |
+
Default: "#886"
|
| 336 |
+
|
| 337 |
+
`line_number_chars`
|
| 338 |
+
The number of columns of line numbers allowable in the line number
|
| 339 |
+
margin.
|
| 340 |
+
|
| 341 |
+
Default: 2
|
| 342 |
+
|
| 343 |
+
`line_number_bold`
|
| 344 |
+
Whether line numbers will be bold: True/False
|
| 345 |
+
|
| 346 |
+
Default: False
|
| 347 |
+
|
| 348 |
+
`line_number_italic`
|
| 349 |
+
Whether line numbers will be italicized: True/False
|
| 350 |
+
|
| 351 |
+
Default: False
|
| 352 |
+
|
| 353 |
+
`line_number_separator`
|
| 354 |
+
Whether a line will be drawn between the line number area and the
|
| 355 |
+
source code area: True/False
|
| 356 |
+
|
| 357 |
+
Default: True
|
| 358 |
+
|
| 359 |
+
`line_number_pad`
|
| 360 |
+
The horizontal padding (in pixels) between the line number margin, and
|
| 361 |
+
the source code area.
|
| 362 |
+
|
| 363 |
+
Default: 6
|
| 364 |
+
|
| 365 |
+
`hl_lines`
|
| 366 |
+
Specify a list of lines to be highlighted.
|
| 367 |
+
|
| 368 |
+
.. versionadded:: 1.2
|
| 369 |
+
|
| 370 |
+
Default: empty list
|
| 371 |
+
|
| 372 |
+
`hl_color`
|
| 373 |
+
Specify the color for highlighting lines.
|
| 374 |
+
|
| 375 |
+
.. versionadded:: 1.2
|
| 376 |
+
|
| 377 |
+
Default: highlight color of the selected style
|
| 378 |
+
"""
|
| 379 |
+
|
| 380 |
+
# Required by the pygments mapper
|
| 381 |
+
name = 'img'
|
| 382 |
+
aliases = ['img', 'IMG', 'png']
|
| 383 |
+
filenames = ['*.png']
|
| 384 |
+
|
| 385 |
+
unicodeoutput = False
|
| 386 |
+
|
| 387 |
+
default_image_format = 'png'
|
| 388 |
+
|
| 389 |
+
def __init__(self, **options):
|
| 390 |
+
"""
|
| 391 |
+
See the class docstring for explanation of options.
|
| 392 |
+
"""
|
| 393 |
+
if not pil_available:
|
| 394 |
+
raise PilNotAvailable(
|
| 395 |
+
'Python Imaging Library is required for this formatter')
|
| 396 |
+
Formatter.__init__(self, **options)
|
| 397 |
+
self.encoding = 'latin1' # let pygments.format() do the right thing
|
| 398 |
+
# Read the style
|
| 399 |
+
self.styles = dict(self.style)
|
| 400 |
+
if self.style.background_color is None:
|
| 401 |
+
self.background_color = '#fff'
|
| 402 |
+
else:
|
| 403 |
+
self.background_color = self.style.background_color
|
| 404 |
+
# Image options
|
| 405 |
+
self.image_format = get_choice_opt(
|
| 406 |
+
options, 'image_format', ['png', 'jpeg', 'gif', 'bmp'],
|
| 407 |
+
self.default_image_format, normcase=True)
|
| 408 |
+
self.image_pad = get_int_opt(options, 'image_pad', 10)
|
| 409 |
+
self.line_pad = get_int_opt(options, 'line_pad', 2)
|
| 410 |
+
# The fonts
|
| 411 |
+
fontsize = get_int_opt(options, 'font_size', 14)
|
| 412 |
+
self.fonts = FontManager(options.get('font_name', ''), fontsize)
|
| 413 |
+
self.fontw, self.fonth = self.fonts.get_char_size()
|
| 414 |
+
# Line number options
|
| 415 |
+
self.line_number_fg = options.get('line_number_fg', '#886')
|
| 416 |
+
self.line_number_bg = options.get('line_number_bg', '#eed')
|
| 417 |
+
self.line_number_chars = get_int_opt(options,
|
| 418 |
+
'line_number_chars', 2)
|
| 419 |
+
self.line_number_bold = get_bool_opt(options,
|
| 420 |
+
'line_number_bold', False)
|
| 421 |
+
self.line_number_italic = get_bool_opt(options,
|
| 422 |
+
'line_number_italic', False)
|
| 423 |
+
self.line_number_pad = get_int_opt(options, 'line_number_pad', 6)
|
| 424 |
+
self.line_numbers = get_bool_opt(options, 'line_numbers', True)
|
| 425 |
+
self.line_number_separator = get_bool_opt(options,
|
| 426 |
+
'line_number_separator', True)
|
| 427 |
+
self.line_number_step = get_int_opt(options, 'line_number_step', 1)
|
| 428 |
+
self.line_number_start = get_int_opt(options, 'line_number_start', 1)
|
| 429 |
+
if self.line_numbers:
|
| 430 |
+
self.line_number_width = (self.fontw * self.line_number_chars +
|
| 431 |
+
self.line_number_pad * 2)
|
| 432 |
+
else:
|
| 433 |
+
self.line_number_width = 0
|
| 434 |
+
self.hl_lines = []
|
| 435 |
+
hl_lines_str = get_list_opt(options, 'hl_lines', [])
|
| 436 |
+
for line in hl_lines_str:
|
| 437 |
+
try:
|
| 438 |
+
self.hl_lines.append(int(line))
|
| 439 |
+
except ValueError:
|
| 440 |
+
pass
|
| 441 |
+
self.hl_color = options.get('hl_color',
|
| 442 |
+
self.style.highlight_color) or '#f90'
|
| 443 |
+
self.drawables = []
|
| 444 |
+
|
| 445 |
+
def get_style_defs(self, arg=''):
|
| 446 |
+
raise NotImplementedError('The -S option is meaningless for the image '
|
| 447 |
+
'formatter. Use -O style=<stylename> instead.')
|
| 448 |
+
|
| 449 |
+
def _get_line_height(self):
|
| 450 |
+
"""
|
| 451 |
+
Get the height of a line.
|
| 452 |
+
"""
|
| 453 |
+
return self.fonth + self.line_pad
|
| 454 |
+
|
| 455 |
+
def _get_line_y(self, lineno):
|
| 456 |
+
"""
|
| 457 |
+
Get the Y coordinate of a line number.
|
| 458 |
+
"""
|
| 459 |
+
return lineno * self._get_line_height() + self.image_pad
|
| 460 |
+
|
| 461 |
+
def _get_char_width(self):
|
| 462 |
+
"""
|
| 463 |
+
Get the width of a character.
|
| 464 |
+
"""
|
| 465 |
+
return self.fontw
|
| 466 |
+
|
| 467 |
+
def _get_char_x(self, linelength):
|
| 468 |
+
"""
|
| 469 |
+
Get the X coordinate of a character position.
|
| 470 |
+
"""
|
| 471 |
+
return linelength + self.image_pad + self.line_number_width
|
| 472 |
+
|
| 473 |
+
def _get_text_pos(self, linelength, lineno):
|
| 474 |
+
"""
|
| 475 |
+
Get the actual position for a character and line position.
|
| 476 |
+
"""
|
| 477 |
+
return self._get_char_x(linelength), self._get_line_y(lineno)
|
| 478 |
+
|
| 479 |
+
def _get_linenumber_pos(self, lineno):
|
| 480 |
+
"""
|
| 481 |
+
Get the actual position for the start of a line number.
|
| 482 |
+
"""
|
| 483 |
+
return (self.image_pad, self._get_line_y(lineno))
|
| 484 |
+
|
| 485 |
+
def _get_text_color(self, style):
|
| 486 |
+
"""
|
| 487 |
+
Get the correct color for the token from the style.
|
| 488 |
+
"""
|
| 489 |
+
if style['color'] is not None:
|
| 490 |
+
fill = '#' + style['color']
|
| 491 |
+
else:
|
| 492 |
+
fill = '#000'
|
| 493 |
+
return fill
|
| 494 |
+
|
| 495 |
+
def _get_text_bg_color(self, style):
|
| 496 |
+
"""
|
| 497 |
+
Get the correct background color for the token from the style.
|
| 498 |
+
"""
|
| 499 |
+
if style['bgcolor'] is not None:
|
| 500 |
+
bg_color = '#' + style['bgcolor']
|
| 501 |
+
else:
|
| 502 |
+
bg_color = None
|
| 503 |
+
return bg_color
|
| 504 |
+
|
| 505 |
+
def _get_style_font(self, style):
|
| 506 |
+
"""
|
| 507 |
+
Get the correct font for the style.
|
| 508 |
+
"""
|
| 509 |
+
return self.fonts.get_font(style['bold'], style['italic'])
|
| 510 |
+
|
| 511 |
+
def _get_image_size(self, maxlinelength, maxlineno):
|
| 512 |
+
"""
|
| 513 |
+
Get the required image size.
|
| 514 |
+
"""
|
| 515 |
+
return (self._get_char_x(maxlinelength) + self.image_pad,
|
| 516 |
+
self._get_line_y(maxlineno + 0) + self.image_pad)
|
| 517 |
+
|
| 518 |
+
def _draw_linenumber(self, posno, lineno):
|
| 519 |
+
"""
|
| 520 |
+
Remember a line number drawable to paint later.
|
| 521 |
+
"""
|
| 522 |
+
self._draw_text(
|
| 523 |
+
self._get_linenumber_pos(posno),
|
| 524 |
+
str(lineno).rjust(self.line_number_chars),
|
| 525 |
+
font=self.fonts.get_font(self.line_number_bold,
|
| 526 |
+
self.line_number_italic),
|
| 527 |
+
text_fg=self.line_number_fg,
|
| 528 |
+
text_bg=None,
|
| 529 |
+
)
|
| 530 |
+
|
| 531 |
+
def _draw_text(self, pos, text, font, text_fg, text_bg):
|
| 532 |
+
"""
|
| 533 |
+
Remember a single drawable tuple to paint later.
|
| 534 |
+
"""
|
| 535 |
+
self.drawables.append((pos, text, font, text_fg, text_bg))
|
| 536 |
+
|
| 537 |
+
def _create_drawables(self, tokensource):
|
| 538 |
+
"""
|
| 539 |
+
Create drawables for the token content.
|
| 540 |
+
"""
|
| 541 |
+
lineno = charno = maxcharno = 0
|
| 542 |
+
maxlinelength = linelength = 0
|
| 543 |
+
for ttype, value in tokensource:
|
| 544 |
+
while ttype not in self.styles:
|
| 545 |
+
ttype = ttype.parent
|
| 546 |
+
style = self.styles[ttype]
|
| 547 |
+
# TODO: make sure tab expansion happens earlier in the chain. It
|
| 548 |
+
# really ought to be done on the input, as to do it right here is
|
| 549 |
+
# quite complex.
|
| 550 |
+
value = value.expandtabs(4)
|
| 551 |
+
lines = value.splitlines(True)
|
| 552 |
+
# print lines
|
| 553 |
+
for i, line in enumerate(lines):
|
| 554 |
+
temp = line.rstrip('\n')
|
| 555 |
+
if temp:
|
| 556 |
+
self._draw_text(
|
| 557 |
+
self._get_text_pos(linelength, lineno),
|
| 558 |
+
temp,
|
| 559 |
+
font = self._get_style_font(style),
|
| 560 |
+
text_fg = self._get_text_color(style),
|
| 561 |
+
text_bg = self._get_text_bg_color(style),
|
| 562 |
+
)
|
| 563 |
+
temp_width, _ = self.fonts.get_text_size(temp)
|
| 564 |
+
linelength += temp_width
|
| 565 |
+
maxlinelength = max(maxlinelength, linelength)
|
| 566 |
+
charno += len(temp)
|
| 567 |
+
maxcharno = max(maxcharno, charno)
|
| 568 |
+
if line.endswith('\n'):
|
| 569 |
+
# add a line for each extra line in the value
|
| 570 |
+
linelength = 0
|
| 571 |
+
charno = 0
|
| 572 |
+
lineno += 1
|
| 573 |
+
self.maxlinelength = maxlinelength
|
| 574 |
+
self.maxcharno = maxcharno
|
| 575 |
+
self.maxlineno = lineno
|
| 576 |
+
|
| 577 |
+
def _draw_line_numbers(self):
|
| 578 |
+
"""
|
| 579 |
+
Create drawables for the line numbers.
|
| 580 |
+
"""
|
| 581 |
+
if not self.line_numbers:
|
| 582 |
+
return
|
| 583 |
+
for p in range(self.maxlineno):
|
| 584 |
+
n = p + self.line_number_start
|
| 585 |
+
if (n % self.line_number_step) == 0:
|
| 586 |
+
self._draw_linenumber(p, n)
|
| 587 |
+
|
| 588 |
+
def _paint_line_number_bg(self, im):
|
| 589 |
+
"""
|
| 590 |
+
Paint the line number background on the image.
|
| 591 |
+
"""
|
| 592 |
+
if not self.line_numbers:
|
| 593 |
+
return
|
| 594 |
+
if self.line_number_fg is None:
|
| 595 |
+
return
|
| 596 |
+
draw = ImageDraw.Draw(im)
|
| 597 |
+
recth = im.size[-1]
|
| 598 |
+
rectw = self.image_pad + self.line_number_width - self.line_number_pad
|
| 599 |
+
draw.rectangle([(0, 0), (rectw, recth)],
|
| 600 |
+
fill=self.line_number_bg)
|
| 601 |
+
if self.line_number_separator:
|
| 602 |
+
draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
|
| 603 |
+
del draw
|
| 604 |
+
|
| 605 |
+
def format(self, tokensource, outfile):
|
| 606 |
+
"""
|
| 607 |
+
Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
|
| 608 |
+
tuples and write it into ``outfile``.
|
| 609 |
+
|
| 610 |
+
This implementation calculates where it should draw each token on the
|
| 611 |
+
pixmap, then calculates the required pixmap size and draws the items.
|
| 612 |
+
"""
|
| 613 |
+
self._create_drawables(tokensource)
|
| 614 |
+
self._draw_line_numbers()
|
| 615 |
+
im = Image.new(
|
| 616 |
+
'RGB',
|
| 617 |
+
self._get_image_size(self.maxlinelength, self.maxlineno),
|
| 618 |
+
self.background_color
|
| 619 |
+
)
|
| 620 |
+
self._paint_line_number_bg(im)
|
| 621 |
+
draw = ImageDraw.Draw(im)
|
| 622 |
+
# Highlight
|
| 623 |
+
if self.hl_lines:
|
| 624 |
+
x = self.image_pad + self.line_number_width - self.line_number_pad + 1
|
| 625 |
+
recth = self._get_line_height()
|
| 626 |
+
rectw = im.size[0] - x
|
| 627 |
+
for linenumber in self.hl_lines:
|
| 628 |
+
y = self._get_line_y(linenumber - 1)
|
| 629 |
+
draw.rectangle([(x, y), (x + rectw, y + recth)],
|
| 630 |
+
fill=self.hl_color)
|
| 631 |
+
for pos, value, font, text_fg, text_bg in self.drawables:
|
| 632 |
+
if text_bg:
|
| 633 |
+
# see deprecations https://pillow.readthedocs.io/en/stable/releasenotes/9.2.0.html#font-size-and-offset-methods
|
| 634 |
+
if hasattr(draw, 'textsize'):
|
| 635 |
+
text_size = draw.textsize(text=value, font=font)
|
| 636 |
+
else:
|
| 637 |
+
text_size = font.getbbox(value)[2:]
|
| 638 |
+
draw.rectangle([pos[0], pos[1], pos[0] + text_size[0], pos[1] + text_size[1]], fill=text_bg)
|
| 639 |
+
draw.text(pos, value, font=font, fill=text_fg)
|
| 640 |
+
im.save(outfile, self.image_format.upper())
|
| 641 |
+
|
| 642 |
+
|
| 643 |
+
# Add one formatter per format, so that the "-f gif" option gives the correct result
|
| 644 |
+
# when used in pygmentize.
|
| 645 |
+
|
| 646 |
+
class GifImageFormatter(ImageFormatter):
|
| 647 |
+
"""
|
| 648 |
+
Create a GIF image from source code. This uses the Python Imaging Library to
|
| 649 |
+
generate a pixmap from the source code.
|
| 650 |
+
|
| 651 |
+
.. versionadded:: 1.0
|
| 652 |
+
"""
|
| 653 |
+
|
| 654 |
+
name = 'img_gif'
|
| 655 |
+
aliases = ['gif']
|
| 656 |
+
filenames = ['*.gif']
|
| 657 |
+
default_image_format = 'gif'
|
| 658 |
+
|
| 659 |
+
|
| 660 |
+
class JpgImageFormatter(ImageFormatter):
|
| 661 |
+
"""
|
| 662 |
+
Create a JPEG image from source code. This uses the Python Imaging Library to
|
| 663 |
+
generate a pixmap from the source code.
|
| 664 |
+
|
| 665 |
+
.. versionadded:: 1.0
|
| 666 |
+
"""
|
| 667 |
+
|
| 668 |
+
name = 'img_jpg'
|
| 669 |
+
aliases = ['jpg', 'jpeg']
|
| 670 |
+
filenames = ['*.jpg']
|
| 671 |
+
default_image_format = 'jpeg'
|
| 672 |
+
|
| 673 |
+
|
| 674 |
+
class BmpImageFormatter(ImageFormatter):
|
| 675 |
+
"""
|
| 676 |
+
Create a bitmap image from source code. This uses the Python Imaging Library to
|
| 677 |
+
generate a pixmap from the source code.
|
| 678 |
+
|
| 679 |
+
.. versionadded:: 1.0
|
| 680 |
+
"""
|
| 681 |
+
|
| 682 |
+
name = 'img_bmp'
|
| 683 |
+
aliases = ['bmp', 'bitmap']
|
| 684 |
+
filenames = ['*.bmp']
|
| 685 |
+
default_image_format = 'bmp'
|
evalkit_tf437/lib/python3.10/site-packages/pygments/formatters/latex.py
ADDED
|
@@ -0,0 +1,518 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.formatters.latex
|
| 3 |
+
~~~~~~~~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Formatter for LaTeX fancyvrb output.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from io import StringIO
|
| 12 |
+
|
| 13 |
+
from pygments.formatter import Formatter
|
| 14 |
+
from pygments.lexer import Lexer, do_insertions
|
| 15 |
+
from pygments.token import Token, STANDARD_TYPES
|
| 16 |
+
from pygments.util import get_bool_opt, get_int_opt
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
__all__ = ['LatexFormatter']
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def escape_tex(text, commandprefix):
|
| 23 |
+
return text.replace('\\', '\x00'). \
|
| 24 |
+
replace('{', '\x01'). \
|
| 25 |
+
replace('}', '\x02'). \
|
| 26 |
+
replace('\x00', rf'\{commandprefix}Zbs{{}}'). \
|
| 27 |
+
replace('\x01', rf'\{commandprefix}Zob{{}}'). \
|
| 28 |
+
replace('\x02', rf'\{commandprefix}Zcb{{}}'). \
|
| 29 |
+
replace('^', rf'\{commandprefix}Zca{{}}'). \
|
| 30 |
+
replace('_', rf'\{commandprefix}Zus{{}}'). \
|
| 31 |
+
replace('&', rf'\{commandprefix}Zam{{}}'). \
|
| 32 |
+
replace('<', rf'\{commandprefix}Zlt{{}}'). \
|
| 33 |
+
replace('>', rf'\{commandprefix}Zgt{{}}'). \
|
| 34 |
+
replace('#', rf'\{commandprefix}Zsh{{}}'). \
|
| 35 |
+
replace('%', rf'\{commandprefix}Zpc{{}}'). \
|
| 36 |
+
replace('$', rf'\{commandprefix}Zdl{{}}'). \
|
| 37 |
+
replace('-', rf'\{commandprefix}Zhy{{}}'). \
|
| 38 |
+
replace("'", rf'\{commandprefix}Zsq{{}}'). \
|
| 39 |
+
replace('"', rf'\{commandprefix}Zdq{{}}'). \
|
| 40 |
+
replace('~', rf'\{commandprefix}Zti{{}}')
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
DOC_TEMPLATE = r'''
|
| 44 |
+
\documentclass{%(docclass)s}
|
| 45 |
+
\usepackage{fancyvrb}
|
| 46 |
+
\usepackage{color}
|
| 47 |
+
\usepackage[%(encoding)s]{inputenc}
|
| 48 |
+
%(preamble)s
|
| 49 |
+
|
| 50 |
+
%(styledefs)s
|
| 51 |
+
|
| 52 |
+
\begin{document}
|
| 53 |
+
|
| 54 |
+
\section*{%(title)s}
|
| 55 |
+
|
| 56 |
+
%(code)s
|
| 57 |
+
\end{document}
|
| 58 |
+
'''
|
| 59 |
+
|
| 60 |
+
## Small explanation of the mess below :)
|
| 61 |
+
#
|
| 62 |
+
# The previous version of the LaTeX formatter just assigned a command to
|
| 63 |
+
# each token type defined in the current style. That obviously is
|
| 64 |
+
# problematic if the highlighted code is produced for a different style
|
| 65 |
+
# than the style commands themselves.
|
| 66 |
+
#
|
| 67 |
+
# This version works much like the HTML formatter which assigns multiple
|
| 68 |
+
# CSS classes to each <span> tag, from the most specific to the least
|
| 69 |
+
# specific token type, thus falling back to the parent token type if one
|
| 70 |
+
# is not defined. Here, the classes are there too and use the same short
|
| 71 |
+
# forms given in token.STANDARD_TYPES.
|
| 72 |
+
#
|
| 73 |
+
# Highlighted code now only uses one custom command, which by default is
|
| 74 |
+
# \PY and selectable by the commandprefix option (and in addition the
|
| 75 |
+
# escapes \PYZat, \PYZlb and \PYZrb which haven't been renamed for
|
| 76 |
+
# backwards compatibility purposes).
|
| 77 |
+
#
|
| 78 |
+
# \PY has two arguments: the classes, separated by +, and the text to
|
| 79 |
+
# render in that style. The classes are resolved into the respective
|
| 80 |
+
# style commands by magic, which serves to ignore unknown classes.
|
| 81 |
+
#
|
| 82 |
+
# The magic macros are:
|
| 83 |
+
# * \PY@it, \PY@bf, etc. are unconditionally wrapped around the text
|
| 84 |
+
# to render in \PY@do. Their definition determines the style.
|
| 85 |
+
# * \PY@reset resets \PY@it etc. to do nothing.
|
| 86 |
+
# * \PY@toks parses the list of classes, using magic inspired by the
|
| 87 |
+
# keyval package (but modified to use plusses instead of commas
|
| 88 |
+
# because fancyvrb redefines commas inside its environments).
|
| 89 |
+
# * \PY@tok processes one class, calling the \PY@tok@classname command
|
| 90 |
+
# if it exists.
|
| 91 |
+
# * \PY@tok@classname sets the \PY@it etc. to reflect the chosen style
|
| 92 |
+
# for its class.
|
| 93 |
+
# * \PY resets the style, parses the classnames and then calls \PY@do.
|
| 94 |
+
#
|
| 95 |
+
# Tip: to read this code, print it out in substituted form using e.g.
|
| 96 |
+
# >>> print STYLE_TEMPLATE % {'cp': 'PY'}
|
| 97 |
+
|
| 98 |
+
STYLE_TEMPLATE = r'''
|
| 99 |
+
\makeatletter
|
| 100 |
+
\def\%(cp)s@reset{\let\%(cp)s@it=\relax \let\%(cp)s@bf=\relax%%
|
| 101 |
+
\let\%(cp)s@ul=\relax \let\%(cp)s@tc=\relax%%
|
| 102 |
+
\let\%(cp)s@bc=\relax \let\%(cp)s@ff=\relax}
|
| 103 |
+
\def\%(cp)s@tok#1{\csname %(cp)s@tok@#1\endcsname}
|
| 104 |
+
\def\%(cp)s@toks#1+{\ifx\relax#1\empty\else%%
|
| 105 |
+
\%(cp)s@tok{#1}\expandafter\%(cp)s@toks\fi}
|
| 106 |
+
\def\%(cp)s@do#1{\%(cp)s@bc{\%(cp)s@tc{\%(cp)s@ul{%%
|
| 107 |
+
\%(cp)s@it{\%(cp)s@bf{\%(cp)s@ff{#1}}}}}}}
|
| 108 |
+
\def\%(cp)s#1#2{\%(cp)s@reset\%(cp)s@toks#1+\relax+\%(cp)s@do{#2}}
|
| 109 |
+
|
| 110 |
+
%(styles)s
|
| 111 |
+
|
| 112 |
+
\def\%(cp)sZbs{\char`\\}
|
| 113 |
+
\def\%(cp)sZus{\char`\_}
|
| 114 |
+
\def\%(cp)sZob{\char`\{}
|
| 115 |
+
\def\%(cp)sZcb{\char`\}}
|
| 116 |
+
\def\%(cp)sZca{\char`\^}
|
| 117 |
+
\def\%(cp)sZam{\char`\&}
|
| 118 |
+
\def\%(cp)sZlt{\char`\<}
|
| 119 |
+
\def\%(cp)sZgt{\char`\>}
|
| 120 |
+
\def\%(cp)sZsh{\char`\#}
|
| 121 |
+
\def\%(cp)sZpc{\char`\%%}
|
| 122 |
+
\def\%(cp)sZdl{\char`\$}
|
| 123 |
+
\def\%(cp)sZhy{\char`\-}
|
| 124 |
+
\def\%(cp)sZsq{\char`\'}
|
| 125 |
+
\def\%(cp)sZdq{\char`\"}
|
| 126 |
+
\def\%(cp)sZti{\char`\~}
|
| 127 |
+
%% for compatibility with earlier versions
|
| 128 |
+
\def\%(cp)sZat{@}
|
| 129 |
+
\def\%(cp)sZlb{[}
|
| 130 |
+
\def\%(cp)sZrb{]}
|
| 131 |
+
\makeatother
|
| 132 |
+
'''
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def _get_ttype_name(ttype):
|
| 136 |
+
fname = STANDARD_TYPES.get(ttype)
|
| 137 |
+
if fname:
|
| 138 |
+
return fname
|
| 139 |
+
aname = ''
|
| 140 |
+
while fname is None:
|
| 141 |
+
aname = ttype[-1] + aname
|
| 142 |
+
ttype = ttype.parent
|
| 143 |
+
fname = STANDARD_TYPES.get(ttype)
|
| 144 |
+
return fname + aname
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
class LatexFormatter(Formatter):
|
| 148 |
+
r"""
|
| 149 |
+
Format tokens as LaTeX code. This needs the `fancyvrb` and `color`
|
| 150 |
+
standard packages.
|
| 151 |
+
|
| 152 |
+
Without the `full` option, code is formatted as one ``Verbatim``
|
| 153 |
+
environment, like this:
|
| 154 |
+
|
| 155 |
+
.. sourcecode:: latex
|
| 156 |
+
|
| 157 |
+
\begin{Verbatim}[commandchars=\\\{\}]
|
| 158 |
+
\PY{k}{def }\PY{n+nf}{foo}(\PY{n}{bar}):
|
| 159 |
+
\PY{k}{pass}
|
| 160 |
+
\end{Verbatim}
|
| 161 |
+
|
| 162 |
+
Wrapping can be disabled using the `nowrap` option.
|
| 163 |
+
|
| 164 |
+
The special command used here (``\PY``) and all the other macros it needs
|
| 165 |
+
are output by the `get_style_defs` method.
|
| 166 |
+
|
| 167 |
+
With the `full` option, a complete LaTeX document is output, including
|
| 168 |
+
the command definitions in the preamble.
|
| 169 |
+
|
| 170 |
+
The `get_style_defs()` method of a `LatexFormatter` returns a string
|
| 171 |
+
containing ``\def`` commands defining the macros needed inside the
|
| 172 |
+
``Verbatim`` environments.
|
| 173 |
+
|
| 174 |
+
Additional options accepted:
|
| 175 |
+
|
| 176 |
+
`nowrap`
|
| 177 |
+
If set to ``True``, don't wrap the tokens at all, not even inside a
|
| 178 |
+
``\begin{Verbatim}`` environment. This disables most other options
|
| 179 |
+
(default: ``False``).
|
| 180 |
+
|
| 181 |
+
`style`
|
| 182 |
+
The style to use, can be a string or a Style subclass (default:
|
| 183 |
+
``'default'``).
|
| 184 |
+
|
| 185 |
+
`full`
|
| 186 |
+
Tells the formatter to output a "full" document, i.e. a complete
|
| 187 |
+
self-contained document (default: ``False``).
|
| 188 |
+
|
| 189 |
+
`title`
|
| 190 |
+
If `full` is true, the title that should be used to caption the
|
| 191 |
+
document (default: ``''``).
|
| 192 |
+
|
| 193 |
+
`docclass`
|
| 194 |
+
If the `full` option is enabled, this is the document class to use
|
| 195 |
+
(default: ``'article'``).
|
| 196 |
+
|
| 197 |
+
`preamble`
|
| 198 |
+
If the `full` option is enabled, this can be further preamble commands,
|
| 199 |
+
e.g. ``\usepackage`` (default: ``''``).
|
| 200 |
+
|
| 201 |
+
`linenos`
|
| 202 |
+
If set to ``True``, output line numbers (default: ``False``).
|
| 203 |
+
|
| 204 |
+
`linenostart`
|
| 205 |
+
The line number for the first line (default: ``1``).
|
| 206 |
+
|
| 207 |
+
`linenostep`
|
| 208 |
+
If set to a number n > 1, only every nth line number is printed.
|
| 209 |
+
|
| 210 |
+
`verboptions`
|
| 211 |
+
Additional options given to the Verbatim environment (see the *fancyvrb*
|
| 212 |
+
docs for possible values) (default: ``''``).
|
| 213 |
+
|
| 214 |
+
`commandprefix`
|
| 215 |
+
The LaTeX commands used to produce colored output are constructed
|
| 216 |
+
using this prefix and some letters (default: ``'PY'``).
|
| 217 |
+
|
| 218 |
+
.. versionadded:: 0.7
|
| 219 |
+
.. versionchanged:: 0.10
|
| 220 |
+
The default is now ``'PY'`` instead of ``'C'``.
|
| 221 |
+
|
| 222 |
+
`texcomments`
|
| 223 |
+
If set to ``True``, enables LaTeX comment lines. That is, LaTex markup
|
| 224 |
+
in comment tokens is not escaped so that LaTeX can render it (default:
|
| 225 |
+
``False``).
|
| 226 |
+
|
| 227 |
+
.. versionadded:: 1.2
|
| 228 |
+
|
| 229 |
+
`mathescape`
|
| 230 |
+
If set to ``True``, enables LaTeX math mode escape in comments. That
|
| 231 |
+
is, ``'$...$'`` inside a comment will trigger math mode (default:
|
| 232 |
+
``False``).
|
| 233 |
+
|
| 234 |
+
.. versionadded:: 1.2
|
| 235 |
+
|
| 236 |
+
`escapeinside`
|
| 237 |
+
If set to a string of length 2, enables escaping to LaTeX. Text
|
| 238 |
+
delimited by these 2 characters is read as LaTeX code and
|
| 239 |
+
typeset accordingly. It has no effect in string literals. It has
|
| 240 |
+
no effect in comments if `texcomments` or `mathescape` is
|
| 241 |
+
set. (default: ``''``).
|
| 242 |
+
|
| 243 |
+
.. versionadded:: 2.0
|
| 244 |
+
|
| 245 |
+
`envname`
|
| 246 |
+
Allows you to pick an alternative environment name replacing Verbatim.
|
| 247 |
+
The alternate environment still has to support Verbatim's option syntax.
|
| 248 |
+
(default: ``'Verbatim'``).
|
| 249 |
+
|
| 250 |
+
.. versionadded:: 2.0
|
| 251 |
+
"""
|
| 252 |
+
name = 'LaTeX'
|
| 253 |
+
aliases = ['latex', 'tex']
|
| 254 |
+
filenames = ['*.tex']
|
| 255 |
+
|
| 256 |
+
def __init__(self, **options):
|
| 257 |
+
Formatter.__init__(self, **options)
|
| 258 |
+
self.nowrap = get_bool_opt(options, 'nowrap', False)
|
| 259 |
+
self.docclass = options.get('docclass', 'article')
|
| 260 |
+
self.preamble = options.get('preamble', '')
|
| 261 |
+
self.linenos = get_bool_opt(options, 'linenos', False)
|
| 262 |
+
self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
|
| 263 |
+
self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
|
| 264 |
+
self.verboptions = options.get('verboptions', '')
|
| 265 |
+
self.nobackground = get_bool_opt(options, 'nobackground', False)
|
| 266 |
+
self.commandprefix = options.get('commandprefix', 'PY')
|
| 267 |
+
self.texcomments = get_bool_opt(options, 'texcomments', False)
|
| 268 |
+
self.mathescape = get_bool_opt(options, 'mathescape', False)
|
| 269 |
+
self.escapeinside = options.get('escapeinside', '')
|
| 270 |
+
if len(self.escapeinside) == 2:
|
| 271 |
+
self.left = self.escapeinside[0]
|
| 272 |
+
self.right = self.escapeinside[1]
|
| 273 |
+
else:
|
| 274 |
+
self.escapeinside = ''
|
| 275 |
+
self.envname = options.get('envname', 'Verbatim')
|
| 276 |
+
|
| 277 |
+
self._create_stylesheet()
|
| 278 |
+
|
| 279 |
+
def _create_stylesheet(self):
|
| 280 |
+
t2n = self.ttype2name = {Token: ''}
|
| 281 |
+
c2d = self.cmd2def = {}
|
| 282 |
+
cp = self.commandprefix
|
| 283 |
+
|
| 284 |
+
def rgbcolor(col):
|
| 285 |
+
if col:
|
| 286 |
+
return ','.join(['%.2f' % (int(col[i] + col[i + 1], 16) / 255.0)
|
| 287 |
+
for i in (0, 2, 4)])
|
| 288 |
+
else:
|
| 289 |
+
return '1,1,1'
|
| 290 |
+
|
| 291 |
+
for ttype, ndef in self.style:
|
| 292 |
+
name = _get_ttype_name(ttype)
|
| 293 |
+
cmndef = ''
|
| 294 |
+
if ndef['bold']:
|
| 295 |
+
cmndef += r'\let\$$@bf=\textbf'
|
| 296 |
+
if ndef['italic']:
|
| 297 |
+
cmndef += r'\let\$$@it=\textit'
|
| 298 |
+
if ndef['underline']:
|
| 299 |
+
cmndef += r'\let\$$@ul=\underline'
|
| 300 |
+
if ndef['roman']:
|
| 301 |
+
cmndef += r'\let\$$@ff=\textrm'
|
| 302 |
+
if ndef['sans']:
|
| 303 |
+
cmndef += r'\let\$$@ff=\textsf'
|
| 304 |
+
if ndef['mono']:
|
| 305 |
+
cmndef += r'\let\$$@ff=\textsf'
|
| 306 |
+
if ndef['color']:
|
| 307 |
+
cmndef += (r'\def\$$@tc##1{{\textcolor[rgb]{{{}}}{{##1}}}}'.format(rgbcolor(ndef['color'])))
|
| 308 |
+
if ndef['border']:
|
| 309 |
+
cmndef += (r'\def\$$@bc##1{{{{\setlength{{\fboxsep}}{{\string -\fboxrule}}'
|
| 310 |
+
r'\fcolorbox[rgb]{{{}}}{{{}}}{{\strut ##1}}}}}}'.format(rgbcolor(ndef['border']),
|
| 311 |
+
rgbcolor(ndef['bgcolor'])))
|
| 312 |
+
elif ndef['bgcolor']:
|
| 313 |
+
cmndef += (r'\def\$$@bc##1{{{{\setlength{{\fboxsep}}{{0pt}}'
|
| 314 |
+
r'\colorbox[rgb]{{{}}}{{\strut ##1}}}}}}'.format(rgbcolor(ndef['bgcolor'])))
|
| 315 |
+
if cmndef == '':
|
| 316 |
+
continue
|
| 317 |
+
cmndef = cmndef.replace('$$', cp)
|
| 318 |
+
t2n[ttype] = name
|
| 319 |
+
c2d[name] = cmndef
|
| 320 |
+
|
| 321 |
+
def get_style_defs(self, arg=''):
|
| 322 |
+
"""
|
| 323 |
+
Return the command sequences needed to define the commands
|
| 324 |
+
used to format text in the verbatim environment. ``arg`` is ignored.
|
| 325 |
+
"""
|
| 326 |
+
cp = self.commandprefix
|
| 327 |
+
styles = []
|
| 328 |
+
for name, definition in self.cmd2def.items():
|
| 329 |
+
styles.append(rf'\@namedef{{{cp}@tok@{name}}}{{{definition}}}')
|
| 330 |
+
return STYLE_TEMPLATE % {'cp': self.commandprefix,
|
| 331 |
+
'styles': '\n'.join(styles)}
|
| 332 |
+
|
| 333 |
+
def format_unencoded(self, tokensource, outfile):
|
| 334 |
+
# TODO: add support for background colors
|
| 335 |
+
t2n = self.ttype2name
|
| 336 |
+
cp = self.commandprefix
|
| 337 |
+
|
| 338 |
+
if self.full:
|
| 339 |
+
realoutfile = outfile
|
| 340 |
+
outfile = StringIO()
|
| 341 |
+
|
| 342 |
+
if not self.nowrap:
|
| 343 |
+
outfile.write('\\begin{' + self.envname + '}[commandchars=\\\\\\{\\}')
|
| 344 |
+
if self.linenos:
|
| 345 |
+
start, step = self.linenostart, self.linenostep
|
| 346 |
+
outfile.write(',numbers=left' +
|
| 347 |
+
(start and ',firstnumber=%d' % start or '') +
|
| 348 |
+
(step and ',stepnumber=%d' % step or ''))
|
| 349 |
+
if self.mathescape or self.texcomments or self.escapeinside:
|
| 350 |
+
outfile.write(',codes={\\catcode`\\$=3\\catcode`\\^=7'
|
| 351 |
+
'\\catcode`\\_=8\\relax}')
|
| 352 |
+
if self.verboptions:
|
| 353 |
+
outfile.write(',' + self.verboptions)
|
| 354 |
+
outfile.write(']\n')
|
| 355 |
+
|
| 356 |
+
for ttype, value in tokensource:
|
| 357 |
+
if ttype in Token.Comment:
|
| 358 |
+
if self.texcomments:
|
| 359 |
+
# Try to guess comment starting lexeme and escape it ...
|
| 360 |
+
start = value[0:1]
|
| 361 |
+
for i in range(1, len(value)):
|
| 362 |
+
if start[0] != value[i]:
|
| 363 |
+
break
|
| 364 |
+
start += value[i]
|
| 365 |
+
|
| 366 |
+
value = value[len(start):]
|
| 367 |
+
start = escape_tex(start, cp)
|
| 368 |
+
|
| 369 |
+
# ... but do not escape inside comment.
|
| 370 |
+
value = start + value
|
| 371 |
+
elif self.mathescape:
|
| 372 |
+
# Only escape parts not inside a math environment.
|
| 373 |
+
parts = value.split('$')
|
| 374 |
+
in_math = False
|
| 375 |
+
for i, part in enumerate(parts):
|
| 376 |
+
if not in_math:
|
| 377 |
+
parts[i] = escape_tex(part, cp)
|
| 378 |
+
in_math = not in_math
|
| 379 |
+
value = '$'.join(parts)
|
| 380 |
+
elif self.escapeinside:
|
| 381 |
+
text = value
|
| 382 |
+
value = ''
|
| 383 |
+
while text:
|
| 384 |
+
a, sep1, text = text.partition(self.left)
|
| 385 |
+
if sep1:
|
| 386 |
+
b, sep2, text = text.partition(self.right)
|
| 387 |
+
if sep2:
|
| 388 |
+
value += escape_tex(a, cp) + b
|
| 389 |
+
else:
|
| 390 |
+
value += escape_tex(a + sep1 + b, cp)
|
| 391 |
+
else:
|
| 392 |
+
value += escape_tex(a, cp)
|
| 393 |
+
else:
|
| 394 |
+
value = escape_tex(value, cp)
|
| 395 |
+
elif ttype not in Token.Escape:
|
| 396 |
+
value = escape_tex(value, cp)
|
| 397 |
+
styles = []
|
| 398 |
+
while ttype is not Token:
|
| 399 |
+
try:
|
| 400 |
+
styles.append(t2n[ttype])
|
| 401 |
+
except KeyError:
|
| 402 |
+
# not in current style
|
| 403 |
+
styles.append(_get_ttype_name(ttype))
|
| 404 |
+
ttype = ttype.parent
|
| 405 |
+
styleval = '+'.join(reversed(styles))
|
| 406 |
+
if styleval:
|
| 407 |
+
spl = value.split('\n')
|
| 408 |
+
for line in spl[:-1]:
|
| 409 |
+
if line:
|
| 410 |
+
outfile.write(f"\\{cp}{{{styleval}}}{{{line}}}")
|
| 411 |
+
outfile.write('\n')
|
| 412 |
+
if spl[-1]:
|
| 413 |
+
outfile.write(f"\\{cp}{{{styleval}}}{{{spl[-1]}}}")
|
| 414 |
+
else:
|
| 415 |
+
outfile.write(value)
|
| 416 |
+
|
| 417 |
+
if not self.nowrap:
|
| 418 |
+
outfile.write('\\end{' + self.envname + '}\n')
|
| 419 |
+
|
| 420 |
+
if self.full:
|
| 421 |
+
encoding = self.encoding or 'utf8'
|
| 422 |
+
# map known existings encodings from LaTeX distribution
|
| 423 |
+
encoding = {
|
| 424 |
+
'utf_8': 'utf8',
|
| 425 |
+
'latin_1': 'latin1',
|
| 426 |
+
'iso_8859_1': 'latin1',
|
| 427 |
+
}.get(encoding.replace('-', '_'), encoding)
|
| 428 |
+
realoutfile.write(DOC_TEMPLATE %
|
| 429 |
+
dict(docclass = self.docclass,
|
| 430 |
+
preamble = self.preamble,
|
| 431 |
+
title = self.title,
|
| 432 |
+
encoding = encoding,
|
| 433 |
+
styledefs = self.get_style_defs(),
|
| 434 |
+
code = outfile.getvalue()))
|
| 435 |
+
|
| 436 |
+
|
| 437 |
+
class LatexEmbeddedLexer(Lexer):
|
| 438 |
+
"""
|
| 439 |
+
This lexer takes one lexer as argument, the lexer for the language
|
| 440 |
+
being formatted, and the left and right delimiters for escaped text.
|
| 441 |
+
|
| 442 |
+
First everything is scanned using the language lexer to obtain
|
| 443 |
+
strings and comments. All other consecutive tokens are merged and
|
| 444 |
+
the resulting text is scanned for escaped segments, which are given
|
| 445 |
+
the Token.Escape type. Finally text that is not escaped is scanned
|
| 446 |
+
again with the language lexer.
|
| 447 |
+
"""
|
| 448 |
+
def __init__(self, left, right, lang, **options):
|
| 449 |
+
self.left = left
|
| 450 |
+
self.right = right
|
| 451 |
+
self.lang = lang
|
| 452 |
+
Lexer.__init__(self, **options)
|
| 453 |
+
|
| 454 |
+
def get_tokens_unprocessed(self, text):
|
| 455 |
+
# find and remove all the escape tokens (replace with an empty string)
|
| 456 |
+
# this is very similar to DelegatingLexer.get_tokens_unprocessed.
|
| 457 |
+
buffered = ''
|
| 458 |
+
insertions = []
|
| 459 |
+
insertion_buf = []
|
| 460 |
+
for i, t, v in self._find_safe_escape_tokens(text):
|
| 461 |
+
if t is None:
|
| 462 |
+
if insertion_buf:
|
| 463 |
+
insertions.append((len(buffered), insertion_buf))
|
| 464 |
+
insertion_buf = []
|
| 465 |
+
buffered += v
|
| 466 |
+
else:
|
| 467 |
+
insertion_buf.append((i, t, v))
|
| 468 |
+
if insertion_buf:
|
| 469 |
+
insertions.append((len(buffered), insertion_buf))
|
| 470 |
+
return do_insertions(insertions,
|
| 471 |
+
self.lang.get_tokens_unprocessed(buffered))
|
| 472 |
+
|
| 473 |
+
def _find_safe_escape_tokens(self, text):
|
| 474 |
+
""" find escape tokens that are not in strings or comments """
|
| 475 |
+
for i, t, v in self._filter_to(
|
| 476 |
+
self.lang.get_tokens_unprocessed(text),
|
| 477 |
+
lambda t: t in Token.Comment or t in Token.String
|
| 478 |
+
):
|
| 479 |
+
if t is None:
|
| 480 |
+
for i2, t2, v2 in self._find_escape_tokens(v):
|
| 481 |
+
yield i + i2, t2, v2
|
| 482 |
+
else:
|
| 483 |
+
yield i, None, v
|
| 484 |
+
|
| 485 |
+
def _filter_to(self, it, pred):
|
| 486 |
+
""" Keep only the tokens that match `pred`, merge the others together """
|
| 487 |
+
buf = ''
|
| 488 |
+
idx = 0
|
| 489 |
+
for i, t, v in it:
|
| 490 |
+
if pred(t):
|
| 491 |
+
if buf:
|
| 492 |
+
yield idx, None, buf
|
| 493 |
+
buf = ''
|
| 494 |
+
yield i, t, v
|
| 495 |
+
else:
|
| 496 |
+
if not buf:
|
| 497 |
+
idx = i
|
| 498 |
+
buf += v
|
| 499 |
+
if buf:
|
| 500 |
+
yield idx, None, buf
|
| 501 |
+
|
| 502 |
+
def _find_escape_tokens(self, text):
|
| 503 |
+
""" Find escape tokens within text, give token=None otherwise """
|
| 504 |
+
index = 0
|
| 505 |
+
while text:
|
| 506 |
+
a, sep1, text = text.partition(self.left)
|
| 507 |
+
if a:
|
| 508 |
+
yield index, None, a
|
| 509 |
+
index += len(a)
|
| 510 |
+
if sep1:
|
| 511 |
+
b, sep2, text = text.partition(self.right)
|
| 512 |
+
if sep2:
|
| 513 |
+
yield index + len(sep1), Token.Escape, b
|
| 514 |
+
index += len(sep1) + len(b) + len(sep2)
|
| 515 |
+
else:
|
| 516 |
+
yield index, Token.Error, sep1
|
| 517 |
+
index += len(sep1)
|
| 518 |
+
text = b
|
evalkit_tf437/lib/python3.10/site-packages/pygments/formatters/svg.py
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.formatters.svg
|
| 3 |
+
~~~~~~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Formatter for SVG output.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from pygments.formatter import Formatter
|
| 12 |
+
from pygments.token import Comment
|
| 13 |
+
from pygments.util import get_bool_opt, get_int_opt
|
| 14 |
+
|
| 15 |
+
__all__ = ['SvgFormatter']
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def escape_html(text):
|
| 19 |
+
"""Escape &, <, > as well as single and double quotes for HTML."""
|
| 20 |
+
return text.replace('&', '&'). \
|
| 21 |
+
replace('<', '<'). \
|
| 22 |
+
replace('>', '>'). \
|
| 23 |
+
replace('"', '"'). \
|
| 24 |
+
replace("'", ''')
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class2style = {}
|
| 28 |
+
|
| 29 |
+
class SvgFormatter(Formatter):
|
| 30 |
+
"""
|
| 31 |
+
Format tokens as an SVG graphics file. This formatter is still experimental.
|
| 32 |
+
Each line of code is a ``<text>`` element with explicit ``x`` and ``y``
|
| 33 |
+
coordinates containing ``<tspan>`` elements with the individual token styles.
|
| 34 |
+
|
| 35 |
+
By default, this formatter outputs a full SVG document including doctype
|
| 36 |
+
declaration and the ``<svg>`` root element.
|
| 37 |
+
|
| 38 |
+
.. versionadded:: 0.9
|
| 39 |
+
|
| 40 |
+
Additional options accepted:
|
| 41 |
+
|
| 42 |
+
`nowrap`
|
| 43 |
+
Don't wrap the SVG ``<text>`` elements in ``<svg><g>`` elements and
|
| 44 |
+
don't add a XML declaration and a doctype. If true, the `fontfamily`
|
| 45 |
+
and `fontsize` options are ignored. Defaults to ``False``.
|
| 46 |
+
|
| 47 |
+
`fontfamily`
|
| 48 |
+
The value to give the wrapping ``<g>`` element's ``font-family``
|
| 49 |
+
attribute, defaults to ``"monospace"``.
|
| 50 |
+
|
| 51 |
+
`fontsize`
|
| 52 |
+
The value to give the wrapping ``<g>`` element's ``font-size``
|
| 53 |
+
attribute, defaults to ``"14px"``.
|
| 54 |
+
|
| 55 |
+
`linenos`
|
| 56 |
+
If ``True``, add line numbers (default: ``False``).
|
| 57 |
+
|
| 58 |
+
`linenostart`
|
| 59 |
+
The line number for the first line (default: ``1``).
|
| 60 |
+
|
| 61 |
+
`linenostep`
|
| 62 |
+
If set to a number n > 1, only every nth line number is printed.
|
| 63 |
+
|
| 64 |
+
`linenowidth`
|
| 65 |
+
Maximum width devoted to line numbers (default: ``3*ystep``, sufficient
|
| 66 |
+
for up to 4-digit line numbers. Increase width for longer code blocks).
|
| 67 |
+
|
| 68 |
+
`xoffset`
|
| 69 |
+
Starting offset in X direction, defaults to ``0``.
|
| 70 |
+
|
| 71 |
+
`yoffset`
|
| 72 |
+
Starting offset in Y direction, defaults to the font size if it is given
|
| 73 |
+
in pixels, or ``20`` else. (This is necessary since text coordinates
|
| 74 |
+
refer to the text baseline, not the top edge.)
|
| 75 |
+
|
| 76 |
+
`ystep`
|
| 77 |
+
Offset to add to the Y coordinate for each subsequent line. This should
|
| 78 |
+
roughly be the text size plus 5. It defaults to that value if the text
|
| 79 |
+
size is given in pixels, or ``25`` else.
|
| 80 |
+
|
| 81 |
+
`spacehack`
|
| 82 |
+
Convert spaces in the source to `` ``, which are non-breaking
|
| 83 |
+
spaces. SVG provides the ``xml:space`` attribute to control how
|
| 84 |
+
whitespace inside tags is handled, in theory, the ``preserve`` value
|
| 85 |
+
could be used to keep all whitespace as-is. However, many current SVG
|
| 86 |
+
viewers don't obey that rule, so this option is provided as a workaround
|
| 87 |
+
and defaults to ``True``.
|
| 88 |
+
"""
|
| 89 |
+
name = 'SVG'
|
| 90 |
+
aliases = ['svg']
|
| 91 |
+
filenames = ['*.svg']
|
| 92 |
+
|
| 93 |
+
def __init__(self, **options):
|
| 94 |
+
Formatter.__init__(self, **options)
|
| 95 |
+
self.nowrap = get_bool_opt(options, 'nowrap', False)
|
| 96 |
+
self.fontfamily = options.get('fontfamily', 'monospace')
|
| 97 |
+
self.fontsize = options.get('fontsize', '14px')
|
| 98 |
+
self.xoffset = get_int_opt(options, 'xoffset', 0)
|
| 99 |
+
fs = self.fontsize.strip()
|
| 100 |
+
if fs.endswith('px'):
|
| 101 |
+
fs = fs[:-2].strip()
|
| 102 |
+
try:
|
| 103 |
+
int_fs = int(fs)
|
| 104 |
+
except ValueError:
|
| 105 |
+
int_fs = 20
|
| 106 |
+
self.yoffset = get_int_opt(options, 'yoffset', int_fs)
|
| 107 |
+
self.ystep = get_int_opt(options, 'ystep', int_fs + 5)
|
| 108 |
+
self.spacehack = get_bool_opt(options, 'spacehack', True)
|
| 109 |
+
self.linenos = get_bool_opt(options,'linenos',False)
|
| 110 |
+
self.linenostart = get_int_opt(options,'linenostart',1)
|
| 111 |
+
self.linenostep = get_int_opt(options,'linenostep',1)
|
| 112 |
+
self.linenowidth = get_int_opt(options,'linenowidth', 3*self.ystep)
|
| 113 |
+
self._stylecache = {}
|
| 114 |
+
|
| 115 |
+
def format_unencoded(self, tokensource, outfile):
|
| 116 |
+
"""
|
| 117 |
+
Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
|
| 118 |
+
tuples and write it into ``outfile``.
|
| 119 |
+
|
| 120 |
+
For our implementation we put all lines in their own 'line group'.
|
| 121 |
+
"""
|
| 122 |
+
x = self.xoffset
|
| 123 |
+
y = self.yoffset
|
| 124 |
+
if not self.nowrap:
|
| 125 |
+
if self.encoding:
|
| 126 |
+
outfile.write(f'<?xml version="1.0" encoding="{self.encoding}"?>\n')
|
| 127 |
+
else:
|
| 128 |
+
outfile.write('<?xml version="1.0"?>\n')
|
| 129 |
+
outfile.write('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" '
|
| 130 |
+
'"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/'
|
| 131 |
+
'svg10.dtd">\n')
|
| 132 |
+
outfile.write('<svg xmlns="http://www.w3.org/2000/svg">\n')
|
| 133 |
+
outfile.write(f'<g font-family="{self.fontfamily}" font-size="{self.fontsize}">\n')
|
| 134 |
+
|
| 135 |
+
counter = self.linenostart
|
| 136 |
+
counter_step = self.linenostep
|
| 137 |
+
counter_style = self._get_style(Comment)
|
| 138 |
+
line_x = x
|
| 139 |
+
|
| 140 |
+
if self.linenos:
|
| 141 |
+
if counter % counter_step == 0:
|
| 142 |
+
outfile.write(f'<text x="{x+self.linenowidth}" y="{y}" {counter_style} text-anchor="end">{counter}</text>')
|
| 143 |
+
line_x += self.linenowidth + self.ystep
|
| 144 |
+
counter += 1
|
| 145 |
+
|
| 146 |
+
outfile.write(f'<text x="{line_x}" y="{y}" xml:space="preserve">')
|
| 147 |
+
for ttype, value in tokensource:
|
| 148 |
+
style = self._get_style(ttype)
|
| 149 |
+
tspan = style and '<tspan' + style + '>' or ''
|
| 150 |
+
tspanend = tspan and '</tspan>' or ''
|
| 151 |
+
value = escape_html(value)
|
| 152 |
+
if self.spacehack:
|
| 153 |
+
value = value.expandtabs().replace(' ', ' ')
|
| 154 |
+
parts = value.split('\n')
|
| 155 |
+
for part in parts[:-1]:
|
| 156 |
+
outfile.write(tspan + part + tspanend)
|
| 157 |
+
y += self.ystep
|
| 158 |
+
outfile.write('</text>\n')
|
| 159 |
+
if self.linenos and counter % counter_step == 0:
|
| 160 |
+
outfile.write(f'<text x="{x+self.linenowidth}" y="{y}" text-anchor="end" {counter_style}>{counter}</text>')
|
| 161 |
+
|
| 162 |
+
counter += 1
|
| 163 |
+
outfile.write(f'<text x="{line_x}" y="{y}" ' 'xml:space="preserve">')
|
| 164 |
+
outfile.write(tspan + parts[-1] + tspanend)
|
| 165 |
+
outfile.write('</text>')
|
| 166 |
+
|
| 167 |
+
if not self.nowrap:
|
| 168 |
+
outfile.write('</g></svg>\n')
|
| 169 |
+
|
| 170 |
+
def _get_style(self, tokentype):
|
| 171 |
+
if tokentype in self._stylecache:
|
| 172 |
+
return self._stylecache[tokentype]
|
| 173 |
+
otokentype = tokentype
|
| 174 |
+
while not self.style.styles_token(tokentype):
|
| 175 |
+
tokentype = tokentype.parent
|
| 176 |
+
value = self.style.style_for_token(tokentype)
|
| 177 |
+
result = ''
|
| 178 |
+
if value['color']:
|
| 179 |
+
result = ' fill="#' + value['color'] + '"'
|
| 180 |
+
if value['bold']:
|
| 181 |
+
result += ' font-weight="bold"'
|
| 182 |
+
if value['italic']:
|
| 183 |
+
result += ' font-style="italic"'
|
| 184 |
+
self._stylecache[otokentype] = result
|
| 185 |
+
return result
|
evalkit_tf437/lib/python3.10/site-packages/pygments/formatters/terminal.py
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.formatters.terminal
|
| 3 |
+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Formatter for terminal output with ANSI sequences.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from pygments.formatter import Formatter
|
| 12 |
+
from pygments.token import Keyword, Name, Comment, String, Error, \
|
| 13 |
+
Number, Operator, Generic, Token, Whitespace
|
| 14 |
+
from pygments.console import ansiformat
|
| 15 |
+
from pygments.util import get_choice_opt
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
__all__ = ['TerminalFormatter']
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
#: Map token types to a tuple of color values for light and dark
|
| 22 |
+
#: backgrounds.
|
| 23 |
+
TERMINAL_COLORS = {
|
| 24 |
+
Token: ('', ''),
|
| 25 |
+
|
| 26 |
+
Whitespace: ('gray', 'brightblack'),
|
| 27 |
+
Comment: ('gray', 'brightblack'),
|
| 28 |
+
Comment.Preproc: ('cyan', 'brightcyan'),
|
| 29 |
+
Keyword: ('blue', 'brightblue'),
|
| 30 |
+
Keyword.Type: ('cyan', 'brightcyan'),
|
| 31 |
+
Operator.Word: ('magenta', 'brightmagenta'),
|
| 32 |
+
Name.Builtin: ('cyan', 'brightcyan'),
|
| 33 |
+
Name.Function: ('green', 'brightgreen'),
|
| 34 |
+
Name.Namespace: ('_cyan_', '_brightcyan_'),
|
| 35 |
+
Name.Class: ('_green_', '_brightgreen_'),
|
| 36 |
+
Name.Exception: ('cyan', 'brightcyan'),
|
| 37 |
+
Name.Decorator: ('brightblack', 'gray'),
|
| 38 |
+
Name.Variable: ('red', 'brightred'),
|
| 39 |
+
Name.Constant: ('red', 'brightred'),
|
| 40 |
+
Name.Attribute: ('cyan', 'brightcyan'),
|
| 41 |
+
Name.Tag: ('brightblue', 'brightblue'),
|
| 42 |
+
String: ('yellow', 'yellow'),
|
| 43 |
+
Number: ('blue', 'brightblue'),
|
| 44 |
+
|
| 45 |
+
Generic.Deleted: ('brightred', 'brightred'),
|
| 46 |
+
Generic.Inserted: ('green', 'brightgreen'),
|
| 47 |
+
Generic.Heading: ('**', '**'),
|
| 48 |
+
Generic.Subheading: ('*magenta*', '*brightmagenta*'),
|
| 49 |
+
Generic.Prompt: ('**', '**'),
|
| 50 |
+
Generic.Error: ('brightred', 'brightred'),
|
| 51 |
+
|
| 52 |
+
Error: ('_brightred_', '_brightred_'),
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class TerminalFormatter(Formatter):
|
| 57 |
+
r"""
|
| 58 |
+
Format tokens with ANSI color sequences, for output in a text console.
|
| 59 |
+
Color sequences are terminated at newlines, so that paging the output
|
| 60 |
+
works correctly.
|
| 61 |
+
|
| 62 |
+
The `get_style_defs()` method doesn't do anything special since there is
|
| 63 |
+
no support for common styles.
|
| 64 |
+
|
| 65 |
+
Options accepted:
|
| 66 |
+
|
| 67 |
+
`bg`
|
| 68 |
+
Set to ``"light"`` or ``"dark"`` depending on the terminal's background
|
| 69 |
+
(default: ``"light"``).
|
| 70 |
+
|
| 71 |
+
`colorscheme`
|
| 72 |
+
A dictionary mapping token types to (lightbg, darkbg) color names or
|
| 73 |
+
``None`` (default: ``None`` = use builtin colorscheme).
|
| 74 |
+
|
| 75 |
+
`linenos`
|
| 76 |
+
Set to ``True`` to have line numbers on the terminal output as well
|
| 77 |
+
(default: ``False`` = no line numbers).
|
| 78 |
+
"""
|
| 79 |
+
name = 'Terminal'
|
| 80 |
+
aliases = ['terminal', 'console']
|
| 81 |
+
filenames = []
|
| 82 |
+
|
| 83 |
+
def __init__(self, **options):
|
| 84 |
+
Formatter.__init__(self, **options)
|
| 85 |
+
self.darkbg = get_choice_opt(options, 'bg',
|
| 86 |
+
['light', 'dark'], 'light') == 'dark'
|
| 87 |
+
self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
|
| 88 |
+
self.linenos = options.get('linenos', False)
|
| 89 |
+
self._lineno = 0
|
| 90 |
+
|
| 91 |
+
def format(self, tokensource, outfile):
|
| 92 |
+
return Formatter.format(self, tokensource, outfile)
|
| 93 |
+
|
| 94 |
+
def _write_lineno(self, outfile):
|
| 95 |
+
self._lineno += 1
|
| 96 |
+
outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno))
|
| 97 |
+
|
| 98 |
+
def _get_color(self, ttype):
|
| 99 |
+
# self.colorscheme is a dict containing usually generic types, so we
|
| 100 |
+
# have to walk the tree of dots. The base Token type must be a key,
|
| 101 |
+
# even if it's empty string, as in the default above.
|
| 102 |
+
colors = self.colorscheme.get(ttype)
|
| 103 |
+
while colors is None:
|
| 104 |
+
ttype = ttype.parent
|
| 105 |
+
colors = self.colorscheme.get(ttype)
|
| 106 |
+
return colors[self.darkbg]
|
| 107 |
+
|
| 108 |
+
def format_unencoded(self, tokensource, outfile):
|
| 109 |
+
if self.linenos:
|
| 110 |
+
self._write_lineno(outfile)
|
| 111 |
+
|
| 112 |
+
for ttype, value in tokensource:
|
| 113 |
+
color = self._get_color(ttype)
|
| 114 |
+
|
| 115 |
+
for line in value.splitlines(True):
|
| 116 |
+
if color:
|
| 117 |
+
outfile.write(ansiformat(color, line.rstrip('\n')))
|
| 118 |
+
else:
|
| 119 |
+
outfile.write(line.rstrip('\n'))
|
| 120 |
+
if line.endswith('\n'):
|
| 121 |
+
if self.linenos:
|
| 122 |
+
self._write_lineno(outfile)
|
| 123 |
+
else:
|
| 124 |
+
outfile.write('\n')
|
| 125 |
+
|
| 126 |
+
if self.linenos:
|
| 127 |
+
outfile.write("\n")
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexer.py
ADDED
|
@@ -0,0 +1,961 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.lexer
|
| 3 |
+
~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Base lexer classes.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import re
|
| 12 |
+
import sys
|
| 13 |
+
import time
|
| 14 |
+
|
| 15 |
+
from pygments.filter import apply_filters, Filter
|
| 16 |
+
from pygments.filters import get_filter_by_name
|
| 17 |
+
from pygments.token import Error, Text, Other, Whitespace, _TokenType
|
| 18 |
+
from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
|
| 19 |
+
make_analysator, Future, guess_decode
|
| 20 |
+
from pygments.regexopt import regex_opt
|
| 21 |
+
|
| 22 |
+
__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
|
| 23 |
+
'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this',
|
| 24 |
+
'default', 'words', 'line_re']
|
| 25 |
+
|
| 26 |
+
line_re = re.compile('.*?\n')
|
| 27 |
+
|
| 28 |
+
_encoding_map = [(b'\xef\xbb\xbf', 'utf-8'),
|
| 29 |
+
(b'\xff\xfe\0\0', 'utf-32'),
|
| 30 |
+
(b'\0\0\xfe\xff', 'utf-32be'),
|
| 31 |
+
(b'\xff\xfe', 'utf-16'),
|
| 32 |
+
(b'\xfe\xff', 'utf-16be')]
|
| 33 |
+
|
| 34 |
+
_default_analyse = staticmethod(lambda x: 0.0)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class LexerMeta(type):
|
| 38 |
+
"""
|
| 39 |
+
This metaclass automagically converts ``analyse_text`` methods into
|
| 40 |
+
static methods which always return float values.
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
def __new__(mcs, name, bases, d):
|
| 44 |
+
if 'analyse_text' in d:
|
| 45 |
+
d['analyse_text'] = make_analysator(d['analyse_text'])
|
| 46 |
+
return type.__new__(mcs, name, bases, d)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class Lexer(metaclass=LexerMeta):
|
| 50 |
+
"""
|
| 51 |
+
Lexer for a specific language.
|
| 52 |
+
|
| 53 |
+
See also :doc:`lexerdevelopment`, a high-level guide to writing
|
| 54 |
+
lexers.
|
| 55 |
+
|
| 56 |
+
Lexer classes have attributes used for choosing the most appropriate
|
| 57 |
+
lexer based on various criteria.
|
| 58 |
+
|
| 59 |
+
.. autoattribute:: name
|
| 60 |
+
:no-value:
|
| 61 |
+
.. autoattribute:: aliases
|
| 62 |
+
:no-value:
|
| 63 |
+
.. autoattribute:: filenames
|
| 64 |
+
:no-value:
|
| 65 |
+
.. autoattribute:: alias_filenames
|
| 66 |
+
.. autoattribute:: mimetypes
|
| 67 |
+
:no-value:
|
| 68 |
+
.. autoattribute:: priority
|
| 69 |
+
|
| 70 |
+
Lexers included in Pygments should have two additional attributes:
|
| 71 |
+
|
| 72 |
+
.. autoattribute:: url
|
| 73 |
+
:no-value:
|
| 74 |
+
.. autoattribute:: version_added
|
| 75 |
+
:no-value:
|
| 76 |
+
|
| 77 |
+
Lexers included in Pygments may have additional attributes:
|
| 78 |
+
|
| 79 |
+
.. autoattribute:: _example
|
| 80 |
+
:no-value:
|
| 81 |
+
|
| 82 |
+
You can pass options to the constructor. The basic options recognized
|
| 83 |
+
by all lexers and processed by the base `Lexer` class are:
|
| 84 |
+
|
| 85 |
+
``stripnl``
|
| 86 |
+
Strip leading and trailing newlines from the input (default: True).
|
| 87 |
+
``stripall``
|
| 88 |
+
Strip all leading and trailing whitespace from the input
|
| 89 |
+
(default: False).
|
| 90 |
+
``ensurenl``
|
| 91 |
+
Make sure that the input ends with a newline (default: True). This
|
| 92 |
+
is required for some lexers that consume input linewise.
|
| 93 |
+
|
| 94 |
+
.. versionadded:: 1.3
|
| 95 |
+
|
| 96 |
+
``tabsize``
|
| 97 |
+
If given and greater than 0, expand tabs in the input (default: 0).
|
| 98 |
+
``encoding``
|
| 99 |
+
If given, must be an encoding name. This encoding will be used to
|
| 100 |
+
convert the input string to Unicode, if it is not already a Unicode
|
| 101 |
+
string (default: ``'guess'``, which uses a simple UTF-8 / Locale /
|
| 102 |
+
Latin1 detection. Can also be ``'chardet'`` to use the chardet
|
| 103 |
+
library, if it is installed.
|
| 104 |
+
``inencoding``
|
| 105 |
+
Overrides the ``encoding`` if given.
|
| 106 |
+
"""
|
| 107 |
+
|
| 108 |
+
#: Full name of the lexer, in human-readable form
|
| 109 |
+
name = None
|
| 110 |
+
|
| 111 |
+
#: A list of short, unique identifiers that can be used to look
|
| 112 |
+
#: up the lexer from a list, e.g., using `get_lexer_by_name()`.
|
| 113 |
+
aliases = []
|
| 114 |
+
|
| 115 |
+
#: A list of `fnmatch` patterns that match filenames which contain
|
| 116 |
+
#: content for this lexer. The patterns in this list should be unique among
|
| 117 |
+
#: all lexers.
|
| 118 |
+
filenames = []
|
| 119 |
+
|
| 120 |
+
#: A list of `fnmatch` patterns that match filenames which may or may not
|
| 121 |
+
#: contain content for this lexer. This list is used by the
|
| 122 |
+
#: :func:`.guess_lexer_for_filename()` function, to determine which lexers
|
| 123 |
+
#: are then included in guessing the correct one. That means that
|
| 124 |
+
#: e.g. every lexer for HTML and a template language should include
|
| 125 |
+
#: ``\*.html`` in this list.
|
| 126 |
+
alias_filenames = []
|
| 127 |
+
|
| 128 |
+
#: A list of MIME types for content that can be lexed with this lexer.
|
| 129 |
+
mimetypes = []
|
| 130 |
+
|
| 131 |
+
#: Priority, should multiple lexers match and no content is provided
|
| 132 |
+
priority = 0
|
| 133 |
+
|
| 134 |
+
#: URL of the language specification/definition. Used in the Pygments
|
| 135 |
+
#: documentation. Set to an empty string to disable.
|
| 136 |
+
url = None
|
| 137 |
+
|
| 138 |
+
#: Version of Pygments in which the lexer was added.
|
| 139 |
+
version_added = None
|
| 140 |
+
|
| 141 |
+
#: Example file name. Relative to the ``tests/examplefiles`` directory.
|
| 142 |
+
#: This is used by the documentation generator to show an example.
|
| 143 |
+
_example = None
|
| 144 |
+
|
| 145 |
+
def __init__(self, **options):
|
| 146 |
+
"""
|
| 147 |
+
This constructor takes arbitrary options as keyword arguments.
|
| 148 |
+
Every subclass must first process its own options and then call
|
| 149 |
+
the `Lexer` constructor, since it processes the basic
|
| 150 |
+
options like `stripnl`.
|
| 151 |
+
|
| 152 |
+
An example looks like this:
|
| 153 |
+
|
| 154 |
+
.. sourcecode:: python
|
| 155 |
+
|
| 156 |
+
def __init__(self, **options):
|
| 157 |
+
self.compress = options.get('compress', '')
|
| 158 |
+
Lexer.__init__(self, **options)
|
| 159 |
+
|
| 160 |
+
As these options must all be specifiable as strings (due to the
|
| 161 |
+
command line usage), there are various utility functions
|
| 162 |
+
available to help with that, see `Utilities`_.
|
| 163 |
+
"""
|
| 164 |
+
self.options = options
|
| 165 |
+
self.stripnl = get_bool_opt(options, 'stripnl', True)
|
| 166 |
+
self.stripall = get_bool_opt(options, 'stripall', False)
|
| 167 |
+
self.ensurenl = get_bool_opt(options, 'ensurenl', True)
|
| 168 |
+
self.tabsize = get_int_opt(options, 'tabsize', 0)
|
| 169 |
+
self.encoding = options.get('encoding', 'guess')
|
| 170 |
+
self.encoding = options.get('inencoding') or self.encoding
|
| 171 |
+
self.filters = []
|
| 172 |
+
for filter_ in get_list_opt(options, 'filters', ()):
|
| 173 |
+
self.add_filter(filter_)
|
| 174 |
+
|
| 175 |
+
def __repr__(self):
|
| 176 |
+
if self.options:
|
| 177 |
+
return f'<pygments.lexers.{self.__class__.__name__} with {self.options!r}>'
|
| 178 |
+
else:
|
| 179 |
+
return f'<pygments.lexers.{self.__class__.__name__}>'
|
| 180 |
+
|
| 181 |
+
def add_filter(self, filter_, **options):
|
| 182 |
+
"""
|
| 183 |
+
Add a new stream filter to this lexer.
|
| 184 |
+
"""
|
| 185 |
+
if not isinstance(filter_, Filter):
|
| 186 |
+
filter_ = get_filter_by_name(filter_, **options)
|
| 187 |
+
self.filters.append(filter_)
|
| 188 |
+
|
| 189 |
+
def analyse_text(text):
|
| 190 |
+
"""
|
| 191 |
+
A static method which is called for lexer guessing.
|
| 192 |
+
|
| 193 |
+
It should analyse the text and return a float in the range
|
| 194 |
+
from ``0.0`` to ``1.0``. If it returns ``0.0``, the lexer
|
| 195 |
+
will not be selected as the most probable one, if it returns
|
| 196 |
+
``1.0``, it will be selected immediately. This is used by
|
| 197 |
+
`guess_lexer`.
|
| 198 |
+
|
| 199 |
+
The `LexerMeta` metaclass automatically wraps this function so
|
| 200 |
+
that it works like a static method (no ``self`` or ``cls``
|
| 201 |
+
parameter) and the return value is automatically converted to
|
| 202 |
+
`float`. If the return value is an object that is boolean `False`
|
| 203 |
+
it's the same as if the return values was ``0.0``.
|
| 204 |
+
"""
|
| 205 |
+
|
| 206 |
+
def _preprocess_lexer_input(self, text):
|
| 207 |
+
"""Apply preprocessing such as decoding the input, removing BOM and normalizing newlines."""
|
| 208 |
+
|
| 209 |
+
if not isinstance(text, str):
|
| 210 |
+
if self.encoding == 'guess':
|
| 211 |
+
text, _ = guess_decode(text)
|
| 212 |
+
elif self.encoding == 'chardet':
|
| 213 |
+
try:
|
| 214 |
+
import chardet
|
| 215 |
+
except ImportError as e:
|
| 216 |
+
raise ImportError('To enable chardet encoding guessing, '
|
| 217 |
+
'please install the chardet library '
|
| 218 |
+
'from http://chardet.feedparser.org/') from e
|
| 219 |
+
# check for BOM first
|
| 220 |
+
decoded = None
|
| 221 |
+
for bom, encoding in _encoding_map:
|
| 222 |
+
if text.startswith(bom):
|
| 223 |
+
decoded = text[len(bom):].decode(encoding, 'replace')
|
| 224 |
+
break
|
| 225 |
+
# no BOM found, so use chardet
|
| 226 |
+
if decoded is None:
|
| 227 |
+
enc = chardet.detect(text[:1024]) # Guess using first 1KB
|
| 228 |
+
decoded = text.decode(enc.get('encoding') or 'utf-8',
|
| 229 |
+
'replace')
|
| 230 |
+
text = decoded
|
| 231 |
+
else:
|
| 232 |
+
text = text.decode(self.encoding)
|
| 233 |
+
if text.startswith('\ufeff'):
|
| 234 |
+
text = text[len('\ufeff'):]
|
| 235 |
+
else:
|
| 236 |
+
if text.startswith('\ufeff'):
|
| 237 |
+
text = text[len('\ufeff'):]
|
| 238 |
+
|
| 239 |
+
# text now *is* a unicode string
|
| 240 |
+
text = text.replace('\r\n', '\n')
|
| 241 |
+
text = text.replace('\r', '\n')
|
| 242 |
+
if self.stripall:
|
| 243 |
+
text = text.strip()
|
| 244 |
+
elif self.stripnl:
|
| 245 |
+
text = text.strip('\n')
|
| 246 |
+
if self.tabsize > 0:
|
| 247 |
+
text = text.expandtabs(self.tabsize)
|
| 248 |
+
if self.ensurenl and not text.endswith('\n'):
|
| 249 |
+
text += '\n'
|
| 250 |
+
|
| 251 |
+
return text
|
| 252 |
+
|
| 253 |
+
def get_tokens(self, text, unfiltered=False):
|
| 254 |
+
"""
|
| 255 |
+
This method is the basic interface of a lexer. It is called by
|
| 256 |
+
the `highlight()` function. It must process the text and return an
|
| 257 |
+
iterable of ``(tokentype, value)`` pairs from `text`.
|
| 258 |
+
|
| 259 |
+
Normally, you don't need to override this method. The default
|
| 260 |
+
implementation processes the options recognized by all lexers
|
| 261 |
+
(`stripnl`, `stripall` and so on), and then yields all tokens
|
| 262 |
+
from `get_tokens_unprocessed()`, with the ``index`` dropped.
|
| 263 |
+
|
| 264 |
+
If `unfiltered` is set to `True`, the filtering mechanism is
|
| 265 |
+
bypassed even if filters are defined.
|
| 266 |
+
"""
|
| 267 |
+
text = self._preprocess_lexer_input(text)
|
| 268 |
+
|
| 269 |
+
def streamer():
|
| 270 |
+
for _, t, v in self.get_tokens_unprocessed(text):
|
| 271 |
+
yield t, v
|
| 272 |
+
stream = streamer()
|
| 273 |
+
if not unfiltered:
|
| 274 |
+
stream = apply_filters(stream, self.filters, self)
|
| 275 |
+
return stream
|
| 276 |
+
|
| 277 |
+
def get_tokens_unprocessed(self, text):
|
| 278 |
+
"""
|
| 279 |
+
This method should process the text and return an iterable of
|
| 280 |
+
``(index, tokentype, value)`` tuples where ``index`` is the starting
|
| 281 |
+
position of the token within the input text.
|
| 282 |
+
|
| 283 |
+
It must be overridden by subclasses. It is recommended to
|
| 284 |
+
implement it as a generator to maximize effectiveness.
|
| 285 |
+
"""
|
| 286 |
+
raise NotImplementedError
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
class DelegatingLexer(Lexer):
|
| 290 |
+
"""
|
| 291 |
+
This lexer takes two lexer as arguments. A root lexer and
|
| 292 |
+
a language lexer. First everything is scanned using the language
|
| 293 |
+
lexer, afterwards all ``Other`` tokens are lexed using the root
|
| 294 |
+
lexer.
|
| 295 |
+
|
| 296 |
+
The lexers from the ``template`` lexer package use this base lexer.
|
| 297 |
+
"""
|
| 298 |
+
|
| 299 |
+
def __init__(self, _root_lexer, _language_lexer, _needle=Other, **options):
|
| 300 |
+
self.root_lexer = _root_lexer(**options)
|
| 301 |
+
self.language_lexer = _language_lexer(**options)
|
| 302 |
+
self.needle = _needle
|
| 303 |
+
Lexer.__init__(self, **options)
|
| 304 |
+
|
| 305 |
+
def get_tokens_unprocessed(self, text):
|
| 306 |
+
buffered = ''
|
| 307 |
+
insertions = []
|
| 308 |
+
lng_buffer = []
|
| 309 |
+
for i, t, v in self.language_lexer.get_tokens_unprocessed(text):
|
| 310 |
+
if t is self.needle:
|
| 311 |
+
if lng_buffer:
|
| 312 |
+
insertions.append((len(buffered), lng_buffer))
|
| 313 |
+
lng_buffer = []
|
| 314 |
+
buffered += v
|
| 315 |
+
else:
|
| 316 |
+
lng_buffer.append((i, t, v))
|
| 317 |
+
if lng_buffer:
|
| 318 |
+
insertions.append((len(buffered), lng_buffer))
|
| 319 |
+
return do_insertions(insertions,
|
| 320 |
+
self.root_lexer.get_tokens_unprocessed(buffered))
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
# ------------------------------------------------------------------------------
|
| 324 |
+
# RegexLexer and ExtendedRegexLexer
|
| 325 |
+
#
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
class include(str): # pylint: disable=invalid-name
|
| 329 |
+
"""
|
| 330 |
+
Indicates that a state should include rules from another state.
|
| 331 |
+
"""
|
| 332 |
+
pass
|
| 333 |
+
|
| 334 |
+
|
| 335 |
+
class _inherit:
|
| 336 |
+
"""
|
| 337 |
+
Indicates the a state should inherit from its superclass.
|
| 338 |
+
"""
|
| 339 |
+
def __repr__(self):
|
| 340 |
+
return 'inherit'
|
| 341 |
+
|
| 342 |
+
inherit = _inherit() # pylint: disable=invalid-name
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
class combined(tuple): # pylint: disable=invalid-name
|
| 346 |
+
"""
|
| 347 |
+
Indicates a state combined from multiple states.
|
| 348 |
+
"""
|
| 349 |
+
|
| 350 |
+
def __new__(cls, *args):
|
| 351 |
+
return tuple.__new__(cls, args)
|
| 352 |
+
|
| 353 |
+
def __init__(self, *args):
|
| 354 |
+
# tuple.__init__ doesn't do anything
|
| 355 |
+
pass
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
class _PseudoMatch:
|
| 359 |
+
"""
|
| 360 |
+
A pseudo match object constructed from a string.
|
| 361 |
+
"""
|
| 362 |
+
|
| 363 |
+
def __init__(self, start, text):
|
| 364 |
+
self._text = text
|
| 365 |
+
self._start = start
|
| 366 |
+
|
| 367 |
+
def start(self, arg=None):
|
| 368 |
+
return self._start
|
| 369 |
+
|
| 370 |
+
def end(self, arg=None):
|
| 371 |
+
return self._start + len(self._text)
|
| 372 |
+
|
| 373 |
+
def group(self, arg=None):
|
| 374 |
+
if arg:
|
| 375 |
+
raise IndexError('No such group')
|
| 376 |
+
return self._text
|
| 377 |
+
|
| 378 |
+
def groups(self):
|
| 379 |
+
return (self._text,)
|
| 380 |
+
|
| 381 |
+
def groupdict(self):
|
| 382 |
+
return {}
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
def bygroups(*args):
|
| 386 |
+
"""
|
| 387 |
+
Callback that yields multiple actions for each group in the match.
|
| 388 |
+
"""
|
| 389 |
+
def callback(lexer, match, ctx=None):
|
| 390 |
+
for i, action in enumerate(args):
|
| 391 |
+
if action is None:
|
| 392 |
+
continue
|
| 393 |
+
elif type(action) is _TokenType:
|
| 394 |
+
data = match.group(i + 1)
|
| 395 |
+
if data:
|
| 396 |
+
yield match.start(i + 1), action, data
|
| 397 |
+
else:
|
| 398 |
+
data = match.group(i + 1)
|
| 399 |
+
if data is not None:
|
| 400 |
+
if ctx:
|
| 401 |
+
ctx.pos = match.start(i + 1)
|
| 402 |
+
for item in action(lexer,
|
| 403 |
+
_PseudoMatch(match.start(i + 1), data), ctx):
|
| 404 |
+
if item:
|
| 405 |
+
yield item
|
| 406 |
+
if ctx:
|
| 407 |
+
ctx.pos = match.end()
|
| 408 |
+
return callback
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
class _This:
|
| 412 |
+
"""
|
| 413 |
+
Special singleton used for indicating the caller class.
|
| 414 |
+
Used by ``using``.
|
| 415 |
+
"""
|
| 416 |
+
|
| 417 |
+
this = _This()
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
def using(_other, **kwargs):
|
| 421 |
+
"""
|
| 422 |
+
Callback that processes the match with a different lexer.
|
| 423 |
+
|
| 424 |
+
The keyword arguments are forwarded to the lexer, except `state` which
|
| 425 |
+
is handled separately.
|
| 426 |
+
|
| 427 |
+
`state` specifies the state that the new lexer will start in, and can
|
| 428 |
+
be an enumerable such as ('root', 'inline', 'string') or a simple
|
| 429 |
+
string which is assumed to be on top of the root state.
|
| 430 |
+
|
| 431 |
+
Note: For that to work, `_other` must not be an `ExtendedRegexLexer`.
|
| 432 |
+
"""
|
| 433 |
+
gt_kwargs = {}
|
| 434 |
+
if 'state' in kwargs:
|
| 435 |
+
s = kwargs.pop('state')
|
| 436 |
+
if isinstance(s, (list, tuple)):
|
| 437 |
+
gt_kwargs['stack'] = s
|
| 438 |
+
else:
|
| 439 |
+
gt_kwargs['stack'] = ('root', s)
|
| 440 |
+
|
| 441 |
+
if _other is this:
|
| 442 |
+
def callback(lexer, match, ctx=None):
|
| 443 |
+
# if keyword arguments are given the callback
|
| 444 |
+
# function has to create a new lexer instance
|
| 445 |
+
if kwargs:
|
| 446 |
+
# XXX: cache that somehow
|
| 447 |
+
kwargs.update(lexer.options)
|
| 448 |
+
lx = lexer.__class__(**kwargs)
|
| 449 |
+
else:
|
| 450 |
+
lx = lexer
|
| 451 |
+
s = match.start()
|
| 452 |
+
for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
|
| 453 |
+
yield i + s, t, v
|
| 454 |
+
if ctx:
|
| 455 |
+
ctx.pos = match.end()
|
| 456 |
+
else:
|
| 457 |
+
def callback(lexer, match, ctx=None):
|
| 458 |
+
# XXX: cache that somehow
|
| 459 |
+
kwargs.update(lexer.options)
|
| 460 |
+
lx = _other(**kwargs)
|
| 461 |
+
|
| 462 |
+
s = match.start()
|
| 463 |
+
for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
|
| 464 |
+
yield i + s, t, v
|
| 465 |
+
if ctx:
|
| 466 |
+
ctx.pos = match.end()
|
| 467 |
+
return callback
|
| 468 |
+
|
| 469 |
+
|
| 470 |
+
class default:
|
| 471 |
+
"""
|
| 472 |
+
Indicates a state or state action (e.g. #pop) to apply.
|
| 473 |
+
For example default('#pop') is equivalent to ('', Token, '#pop')
|
| 474 |
+
Note that state tuples may be used as well.
|
| 475 |
+
|
| 476 |
+
.. versionadded:: 2.0
|
| 477 |
+
"""
|
| 478 |
+
def __init__(self, state):
|
| 479 |
+
self.state = state
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
class words(Future):
|
| 483 |
+
"""
|
| 484 |
+
Indicates a list of literal words that is transformed into an optimized
|
| 485 |
+
regex that matches any of the words.
|
| 486 |
+
|
| 487 |
+
.. versionadded:: 2.0
|
| 488 |
+
"""
|
| 489 |
+
def __init__(self, words, prefix='', suffix=''):
|
| 490 |
+
self.words = words
|
| 491 |
+
self.prefix = prefix
|
| 492 |
+
self.suffix = suffix
|
| 493 |
+
|
| 494 |
+
def get(self):
|
| 495 |
+
return regex_opt(self.words, prefix=self.prefix, suffix=self.suffix)
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
class RegexLexerMeta(LexerMeta):
|
| 499 |
+
"""
|
| 500 |
+
Metaclass for RegexLexer, creates the self._tokens attribute from
|
| 501 |
+
self.tokens on the first instantiation.
|
| 502 |
+
"""
|
| 503 |
+
|
| 504 |
+
def _process_regex(cls, regex, rflags, state):
|
| 505 |
+
"""Preprocess the regular expression component of a token definition."""
|
| 506 |
+
if isinstance(regex, Future):
|
| 507 |
+
regex = regex.get()
|
| 508 |
+
return re.compile(regex, rflags).match
|
| 509 |
+
|
| 510 |
+
def _process_token(cls, token):
|
| 511 |
+
"""Preprocess the token component of a token definition."""
|
| 512 |
+
assert type(token) is _TokenType or callable(token), \
|
| 513 |
+
f'token type must be simple type or callable, not {token!r}'
|
| 514 |
+
return token
|
| 515 |
+
|
| 516 |
+
def _process_new_state(cls, new_state, unprocessed, processed):
|
| 517 |
+
"""Preprocess the state transition action of a token definition."""
|
| 518 |
+
if isinstance(new_state, str):
|
| 519 |
+
# an existing state
|
| 520 |
+
if new_state == '#pop':
|
| 521 |
+
return -1
|
| 522 |
+
elif new_state in unprocessed:
|
| 523 |
+
return (new_state,)
|
| 524 |
+
elif new_state == '#push':
|
| 525 |
+
return new_state
|
| 526 |
+
elif new_state[:5] == '#pop:':
|
| 527 |
+
return -int(new_state[5:])
|
| 528 |
+
else:
|
| 529 |
+
assert False, f'unknown new state {new_state!r}'
|
| 530 |
+
elif isinstance(new_state, combined):
|
| 531 |
+
# combine a new state from existing ones
|
| 532 |
+
tmp_state = '_tmp_%d' % cls._tmpname
|
| 533 |
+
cls._tmpname += 1
|
| 534 |
+
itokens = []
|
| 535 |
+
for istate in new_state:
|
| 536 |
+
assert istate != new_state, f'circular state ref {istate!r}'
|
| 537 |
+
itokens.extend(cls._process_state(unprocessed,
|
| 538 |
+
processed, istate))
|
| 539 |
+
processed[tmp_state] = itokens
|
| 540 |
+
return (tmp_state,)
|
| 541 |
+
elif isinstance(new_state, tuple):
|
| 542 |
+
# push more than one state
|
| 543 |
+
for istate in new_state:
|
| 544 |
+
assert (istate in unprocessed or
|
| 545 |
+
istate in ('#pop', '#push')), \
|
| 546 |
+
'unknown new state ' + istate
|
| 547 |
+
return new_state
|
| 548 |
+
else:
|
| 549 |
+
assert False, f'unknown new state def {new_state!r}'
|
| 550 |
+
|
| 551 |
+
def _process_state(cls, unprocessed, processed, state):
|
| 552 |
+
"""Preprocess a single state definition."""
|
| 553 |
+
assert isinstance(state, str), f"wrong state name {state!r}"
|
| 554 |
+
assert state[0] != '#', f"invalid state name {state!r}"
|
| 555 |
+
if state in processed:
|
| 556 |
+
return processed[state]
|
| 557 |
+
tokens = processed[state] = []
|
| 558 |
+
rflags = cls.flags
|
| 559 |
+
for tdef in unprocessed[state]:
|
| 560 |
+
if isinstance(tdef, include):
|
| 561 |
+
# it's a state reference
|
| 562 |
+
assert tdef != state, f"circular state reference {state!r}"
|
| 563 |
+
tokens.extend(cls._process_state(unprocessed, processed,
|
| 564 |
+
str(tdef)))
|
| 565 |
+
continue
|
| 566 |
+
if isinstance(tdef, _inherit):
|
| 567 |
+
# should be processed already, but may not in the case of:
|
| 568 |
+
# 1. the state has no counterpart in any parent
|
| 569 |
+
# 2. the state includes more than one 'inherit'
|
| 570 |
+
continue
|
| 571 |
+
if isinstance(tdef, default):
|
| 572 |
+
new_state = cls._process_new_state(tdef.state, unprocessed, processed)
|
| 573 |
+
tokens.append((re.compile('').match, None, new_state))
|
| 574 |
+
continue
|
| 575 |
+
|
| 576 |
+
assert type(tdef) is tuple, f"wrong rule def {tdef!r}"
|
| 577 |
+
|
| 578 |
+
try:
|
| 579 |
+
rex = cls._process_regex(tdef[0], rflags, state)
|
| 580 |
+
except Exception as err:
|
| 581 |
+
raise ValueError(f"uncompilable regex {tdef[0]!r} in state {state!r} of {cls!r}: {err}") from err
|
| 582 |
+
|
| 583 |
+
token = cls._process_token(tdef[1])
|
| 584 |
+
|
| 585 |
+
if len(tdef) == 2:
|
| 586 |
+
new_state = None
|
| 587 |
+
else:
|
| 588 |
+
new_state = cls._process_new_state(tdef[2],
|
| 589 |
+
unprocessed, processed)
|
| 590 |
+
|
| 591 |
+
tokens.append((rex, token, new_state))
|
| 592 |
+
return tokens
|
| 593 |
+
|
| 594 |
+
def process_tokendef(cls, name, tokendefs=None):
|
| 595 |
+
"""Preprocess a dictionary of token definitions."""
|
| 596 |
+
processed = cls._all_tokens[name] = {}
|
| 597 |
+
tokendefs = tokendefs or cls.tokens[name]
|
| 598 |
+
for state in list(tokendefs):
|
| 599 |
+
cls._process_state(tokendefs, processed, state)
|
| 600 |
+
return processed
|
| 601 |
+
|
| 602 |
+
def get_tokendefs(cls):
|
| 603 |
+
"""
|
| 604 |
+
Merge tokens from superclasses in MRO order, returning a single tokendef
|
| 605 |
+
dictionary.
|
| 606 |
+
|
| 607 |
+
Any state that is not defined by a subclass will be inherited
|
| 608 |
+
automatically. States that *are* defined by subclasses will, by
|
| 609 |
+
default, override that state in the superclass. If a subclass wishes to
|
| 610 |
+
inherit definitions from a superclass, it can use the special value
|
| 611 |
+
"inherit", which will cause the superclass' state definition to be
|
| 612 |
+
included at that point in the state.
|
| 613 |
+
"""
|
| 614 |
+
tokens = {}
|
| 615 |
+
inheritable = {}
|
| 616 |
+
for c in cls.__mro__:
|
| 617 |
+
toks = c.__dict__.get('tokens', {})
|
| 618 |
+
|
| 619 |
+
for state, items in toks.items():
|
| 620 |
+
curitems = tokens.get(state)
|
| 621 |
+
if curitems is None:
|
| 622 |
+
# N.b. because this is assigned by reference, sufficiently
|
| 623 |
+
# deep hierarchies are processed incrementally (e.g. for
|
| 624 |
+
# A(B), B(C), C(RegexLexer), B will be premodified so X(B)
|
| 625 |
+
# will not see any inherits in B).
|
| 626 |
+
tokens[state] = items
|
| 627 |
+
try:
|
| 628 |
+
inherit_ndx = items.index(inherit)
|
| 629 |
+
except ValueError:
|
| 630 |
+
continue
|
| 631 |
+
inheritable[state] = inherit_ndx
|
| 632 |
+
continue
|
| 633 |
+
|
| 634 |
+
inherit_ndx = inheritable.pop(state, None)
|
| 635 |
+
if inherit_ndx is None:
|
| 636 |
+
continue
|
| 637 |
+
|
| 638 |
+
# Replace the "inherit" value with the items
|
| 639 |
+
curitems[inherit_ndx:inherit_ndx+1] = items
|
| 640 |
+
try:
|
| 641 |
+
# N.b. this is the index in items (that is, the superclass
|
| 642 |
+
# copy), so offset required when storing below.
|
| 643 |
+
new_inh_ndx = items.index(inherit)
|
| 644 |
+
except ValueError:
|
| 645 |
+
pass
|
| 646 |
+
else:
|
| 647 |
+
inheritable[state] = inherit_ndx + new_inh_ndx
|
| 648 |
+
|
| 649 |
+
return tokens
|
| 650 |
+
|
| 651 |
+
def __call__(cls, *args, **kwds):
|
| 652 |
+
"""Instantiate cls after preprocessing its token definitions."""
|
| 653 |
+
if '_tokens' not in cls.__dict__:
|
| 654 |
+
cls._all_tokens = {}
|
| 655 |
+
cls._tmpname = 0
|
| 656 |
+
if hasattr(cls, 'token_variants') and cls.token_variants:
|
| 657 |
+
# don't process yet
|
| 658 |
+
pass
|
| 659 |
+
else:
|
| 660 |
+
cls._tokens = cls.process_tokendef('', cls.get_tokendefs())
|
| 661 |
+
|
| 662 |
+
return type.__call__(cls, *args, **kwds)
|
| 663 |
+
|
| 664 |
+
|
| 665 |
+
class RegexLexer(Lexer, metaclass=RegexLexerMeta):
|
| 666 |
+
"""
|
| 667 |
+
Base for simple stateful regular expression-based lexers.
|
| 668 |
+
Simplifies the lexing process so that you need only
|
| 669 |
+
provide a list of states and regular expressions.
|
| 670 |
+
"""
|
| 671 |
+
|
| 672 |
+
#: Flags for compiling the regular expressions.
|
| 673 |
+
#: Defaults to MULTILINE.
|
| 674 |
+
flags = re.MULTILINE
|
| 675 |
+
|
| 676 |
+
#: At all time there is a stack of states. Initially, the stack contains
|
| 677 |
+
#: a single state 'root'. The top of the stack is called "the current state".
|
| 678 |
+
#:
|
| 679 |
+
#: Dict of ``{'state': [(regex, tokentype, new_state), ...], ...}``
|
| 680 |
+
#:
|
| 681 |
+
#: ``new_state`` can be omitted to signify no state transition.
|
| 682 |
+
#: If ``new_state`` is a string, it is pushed on the stack. This ensure
|
| 683 |
+
#: the new current state is ``new_state``.
|
| 684 |
+
#: If ``new_state`` is a tuple of strings, all of those strings are pushed
|
| 685 |
+
#: on the stack and the current state will be the last element of the list.
|
| 686 |
+
#: ``new_state`` can also be ``combined('state1', 'state2', ...)``
|
| 687 |
+
#: to signify a new, anonymous state combined from the rules of two
|
| 688 |
+
#: or more existing ones.
|
| 689 |
+
#: Furthermore, it can be '#pop' to signify going back one step in
|
| 690 |
+
#: the state stack, or '#push' to push the current state on the stack
|
| 691 |
+
#: again. Note that if you push while in a combined state, the combined
|
| 692 |
+
#: state itself is pushed, and not only the state in which the rule is
|
| 693 |
+
#: defined.
|
| 694 |
+
#:
|
| 695 |
+
#: The tuple can also be replaced with ``include('state')``, in which
|
| 696 |
+
#: case the rules from the state named by the string are included in the
|
| 697 |
+
#: current one.
|
| 698 |
+
tokens = {}
|
| 699 |
+
|
| 700 |
+
def get_tokens_unprocessed(self, text, stack=('root',)):
|
| 701 |
+
"""
|
| 702 |
+
Split ``text`` into (tokentype, text) pairs.
|
| 703 |
+
|
| 704 |
+
``stack`` is the initial stack (default: ``['root']``)
|
| 705 |
+
"""
|
| 706 |
+
pos = 0
|
| 707 |
+
tokendefs = self._tokens
|
| 708 |
+
statestack = list(stack)
|
| 709 |
+
statetokens = tokendefs[statestack[-1]]
|
| 710 |
+
while 1:
|
| 711 |
+
for rexmatch, action, new_state in statetokens:
|
| 712 |
+
m = rexmatch(text, pos)
|
| 713 |
+
if m:
|
| 714 |
+
if action is not None:
|
| 715 |
+
if type(action) is _TokenType:
|
| 716 |
+
yield pos, action, m.group()
|
| 717 |
+
else:
|
| 718 |
+
yield from action(self, m)
|
| 719 |
+
pos = m.end()
|
| 720 |
+
if new_state is not None:
|
| 721 |
+
# state transition
|
| 722 |
+
if isinstance(new_state, tuple):
|
| 723 |
+
for state in new_state:
|
| 724 |
+
if state == '#pop':
|
| 725 |
+
if len(statestack) > 1:
|
| 726 |
+
statestack.pop()
|
| 727 |
+
elif state == '#push':
|
| 728 |
+
statestack.append(statestack[-1])
|
| 729 |
+
else:
|
| 730 |
+
statestack.append(state)
|
| 731 |
+
elif isinstance(new_state, int):
|
| 732 |
+
# pop, but keep at least one state on the stack
|
| 733 |
+
# (random code leading to unexpected pops should
|
| 734 |
+
# not allow exceptions)
|
| 735 |
+
if abs(new_state) >= len(statestack):
|
| 736 |
+
del statestack[1:]
|
| 737 |
+
else:
|
| 738 |
+
del statestack[new_state:]
|
| 739 |
+
elif new_state == '#push':
|
| 740 |
+
statestack.append(statestack[-1])
|
| 741 |
+
else:
|
| 742 |
+
assert False, f"wrong state def: {new_state!r}"
|
| 743 |
+
statetokens = tokendefs[statestack[-1]]
|
| 744 |
+
break
|
| 745 |
+
else:
|
| 746 |
+
# We are here only if all state tokens have been considered
|
| 747 |
+
# and there was not a match on any of them.
|
| 748 |
+
try:
|
| 749 |
+
if text[pos] == '\n':
|
| 750 |
+
# at EOL, reset state to "root"
|
| 751 |
+
statestack = ['root']
|
| 752 |
+
statetokens = tokendefs['root']
|
| 753 |
+
yield pos, Whitespace, '\n'
|
| 754 |
+
pos += 1
|
| 755 |
+
continue
|
| 756 |
+
yield pos, Error, text[pos]
|
| 757 |
+
pos += 1
|
| 758 |
+
except IndexError:
|
| 759 |
+
break
|
| 760 |
+
|
| 761 |
+
|
| 762 |
+
class LexerContext:
|
| 763 |
+
"""
|
| 764 |
+
A helper object that holds lexer position data.
|
| 765 |
+
"""
|
| 766 |
+
|
| 767 |
+
def __init__(self, text, pos, stack=None, end=None):
|
| 768 |
+
self.text = text
|
| 769 |
+
self.pos = pos
|
| 770 |
+
self.end = end or len(text) # end=0 not supported ;-)
|
| 771 |
+
self.stack = stack or ['root']
|
| 772 |
+
|
| 773 |
+
def __repr__(self):
|
| 774 |
+
return f'LexerContext({self.text!r}, {self.pos!r}, {self.stack!r})'
|
| 775 |
+
|
| 776 |
+
|
| 777 |
+
class ExtendedRegexLexer(RegexLexer):
|
| 778 |
+
"""
|
| 779 |
+
A RegexLexer that uses a context object to store its state.
|
| 780 |
+
"""
|
| 781 |
+
|
| 782 |
+
def get_tokens_unprocessed(self, text=None, context=None):
|
| 783 |
+
"""
|
| 784 |
+
Split ``text`` into (tokentype, text) pairs.
|
| 785 |
+
If ``context`` is given, use this lexer context instead.
|
| 786 |
+
"""
|
| 787 |
+
tokendefs = self._tokens
|
| 788 |
+
if not context:
|
| 789 |
+
ctx = LexerContext(text, 0)
|
| 790 |
+
statetokens = tokendefs['root']
|
| 791 |
+
else:
|
| 792 |
+
ctx = context
|
| 793 |
+
statetokens = tokendefs[ctx.stack[-1]]
|
| 794 |
+
text = ctx.text
|
| 795 |
+
while 1:
|
| 796 |
+
for rexmatch, action, new_state in statetokens:
|
| 797 |
+
m = rexmatch(text, ctx.pos, ctx.end)
|
| 798 |
+
if m:
|
| 799 |
+
if action is not None:
|
| 800 |
+
if type(action) is _TokenType:
|
| 801 |
+
yield ctx.pos, action, m.group()
|
| 802 |
+
ctx.pos = m.end()
|
| 803 |
+
else:
|
| 804 |
+
yield from action(self, m, ctx)
|
| 805 |
+
if not new_state:
|
| 806 |
+
# altered the state stack?
|
| 807 |
+
statetokens = tokendefs[ctx.stack[-1]]
|
| 808 |
+
# CAUTION: callback must set ctx.pos!
|
| 809 |
+
if new_state is not None:
|
| 810 |
+
# state transition
|
| 811 |
+
if isinstance(new_state, tuple):
|
| 812 |
+
for state in new_state:
|
| 813 |
+
if state == '#pop':
|
| 814 |
+
if len(ctx.stack) > 1:
|
| 815 |
+
ctx.stack.pop()
|
| 816 |
+
elif state == '#push':
|
| 817 |
+
ctx.stack.append(ctx.stack[-1])
|
| 818 |
+
else:
|
| 819 |
+
ctx.stack.append(state)
|
| 820 |
+
elif isinstance(new_state, int):
|
| 821 |
+
# see RegexLexer for why this check is made
|
| 822 |
+
if abs(new_state) >= len(ctx.stack):
|
| 823 |
+
del ctx.stack[1:]
|
| 824 |
+
else:
|
| 825 |
+
del ctx.stack[new_state:]
|
| 826 |
+
elif new_state == '#push':
|
| 827 |
+
ctx.stack.append(ctx.stack[-1])
|
| 828 |
+
else:
|
| 829 |
+
assert False, f"wrong state def: {new_state!r}"
|
| 830 |
+
statetokens = tokendefs[ctx.stack[-1]]
|
| 831 |
+
break
|
| 832 |
+
else:
|
| 833 |
+
try:
|
| 834 |
+
if ctx.pos >= ctx.end:
|
| 835 |
+
break
|
| 836 |
+
if text[ctx.pos] == '\n':
|
| 837 |
+
# at EOL, reset state to "root"
|
| 838 |
+
ctx.stack = ['root']
|
| 839 |
+
statetokens = tokendefs['root']
|
| 840 |
+
yield ctx.pos, Text, '\n'
|
| 841 |
+
ctx.pos += 1
|
| 842 |
+
continue
|
| 843 |
+
yield ctx.pos, Error, text[ctx.pos]
|
| 844 |
+
ctx.pos += 1
|
| 845 |
+
except IndexError:
|
| 846 |
+
break
|
| 847 |
+
|
| 848 |
+
|
| 849 |
+
def do_insertions(insertions, tokens):
|
| 850 |
+
"""
|
| 851 |
+
Helper for lexers which must combine the results of several
|
| 852 |
+
sublexers.
|
| 853 |
+
|
| 854 |
+
``insertions`` is a list of ``(index, itokens)`` pairs.
|
| 855 |
+
Each ``itokens`` iterable should be inserted at position
|
| 856 |
+
``index`` into the token stream given by the ``tokens``
|
| 857 |
+
argument.
|
| 858 |
+
|
| 859 |
+
The result is a combined token stream.
|
| 860 |
+
|
| 861 |
+
TODO: clean up the code here.
|
| 862 |
+
"""
|
| 863 |
+
insertions = iter(insertions)
|
| 864 |
+
try:
|
| 865 |
+
index, itokens = next(insertions)
|
| 866 |
+
except StopIteration:
|
| 867 |
+
# no insertions
|
| 868 |
+
yield from tokens
|
| 869 |
+
return
|
| 870 |
+
|
| 871 |
+
realpos = None
|
| 872 |
+
insleft = True
|
| 873 |
+
|
| 874 |
+
# iterate over the token stream where we want to insert
|
| 875 |
+
# the tokens from the insertion list.
|
| 876 |
+
for i, t, v in tokens:
|
| 877 |
+
# first iteration. store the position of first item
|
| 878 |
+
if realpos is None:
|
| 879 |
+
realpos = i
|
| 880 |
+
oldi = 0
|
| 881 |
+
while insleft and i + len(v) >= index:
|
| 882 |
+
tmpval = v[oldi:index - i]
|
| 883 |
+
if tmpval:
|
| 884 |
+
yield realpos, t, tmpval
|
| 885 |
+
realpos += len(tmpval)
|
| 886 |
+
for it_index, it_token, it_value in itokens:
|
| 887 |
+
yield realpos, it_token, it_value
|
| 888 |
+
realpos += len(it_value)
|
| 889 |
+
oldi = index - i
|
| 890 |
+
try:
|
| 891 |
+
index, itokens = next(insertions)
|
| 892 |
+
except StopIteration:
|
| 893 |
+
insleft = False
|
| 894 |
+
break # not strictly necessary
|
| 895 |
+
if oldi < len(v):
|
| 896 |
+
yield realpos, t, v[oldi:]
|
| 897 |
+
realpos += len(v) - oldi
|
| 898 |
+
|
| 899 |
+
# leftover tokens
|
| 900 |
+
while insleft:
|
| 901 |
+
# no normal tokens, set realpos to zero
|
| 902 |
+
realpos = realpos or 0
|
| 903 |
+
for p, t, v in itokens:
|
| 904 |
+
yield realpos, t, v
|
| 905 |
+
realpos += len(v)
|
| 906 |
+
try:
|
| 907 |
+
index, itokens = next(insertions)
|
| 908 |
+
except StopIteration:
|
| 909 |
+
insleft = False
|
| 910 |
+
break # not strictly necessary
|
| 911 |
+
|
| 912 |
+
|
| 913 |
+
class ProfilingRegexLexerMeta(RegexLexerMeta):
|
| 914 |
+
"""Metaclass for ProfilingRegexLexer, collects regex timing info."""
|
| 915 |
+
|
| 916 |
+
def _process_regex(cls, regex, rflags, state):
|
| 917 |
+
if isinstance(regex, words):
|
| 918 |
+
rex = regex_opt(regex.words, prefix=regex.prefix,
|
| 919 |
+
suffix=regex.suffix)
|
| 920 |
+
else:
|
| 921 |
+
rex = regex
|
| 922 |
+
compiled = re.compile(rex, rflags)
|
| 923 |
+
|
| 924 |
+
def match_func(text, pos, endpos=sys.maxsize):
|
| 925 |
+
info = cls._prof_data[-1].setdefault((state, rex), [0, 0.0])
|
| 926 |
+
t0 = time.time()
|
| 927 |
+
res = compiled.match(text, pos, endpos)
|
| 928 |
+
t1 = time.time()
|
| 929 |
+
info[0] += 1
|
| 930 |
+
info[1] += t1 - t0
|
| 931 |
+
return res
|
| 932 |
+
return match_func
|
| 933 |
+
|
| 934 |
+
|
| 935 |
+
class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta):
|
| 936 |
+
"""Drop-in replacement for RegexLexer that does profiling of its regexes."""
|
| 937 |
+
|
| 938 |
+
_prof_data = []
|
| 939 |
+
_prof_sort_index = 4 # defaults to time per call
|
| 940 |
+
|
| 941 |
+
def get_tokens_unprocessed(self, text, stack=('root',)):
|
| 942 |
+
# this needs to be a stack, since using(this) will produce nested calls
|
| 943 |
+
self.__class__._prof_data.append({})
|
| 944 |
+
yield from RegexLexer.get_tokens_unprocessed(self, text, stack)
|
| 945 |
+
rawdata = self.__class__._prof_data.pop()
|
| 946 |
+
data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65],
|
| 947 |
+
n, 1000 * t, 1000 * t / n)
|
| 948 |
+
for ((s, r), (n, t)) in rawdata.items()),
|
| 949 |
+
key=lambda x: x[self._prof_sort_index],
|
| 950 |
+
reverse=True)
|
| 951 |
+
sum_total = sum(x[3] for x in data)
|
| 952 |
+
|
| 953 |
+
print()
|
| 954 |
+
print('Profiling result for %s lexing %d chars in %.3f ms' %
|
| 955 |
+
(self.__class__.__name__, len(text), sum_total))
|
| 956 |
+
print('=' * 110)
|
| 957 |
+
print('%-20s %-64s ncalls tottime percall' % ('state', 'regex'))
|
| 958 |
+
print('-' * 110)
|
| 959 |
+
for d in data:
|
| 960 |
+
print('%-20s %-65s %5d %8.4f %8.4f' % d)
|
| 961 |
+
print('=' * 110)
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/_tsql_builtins.py
ADDED
|
@@ -0,0 +1,1003 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.lexers._tsql_builtins
|
| 3 |
+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
These are manually translated lists from https://msdn.microsoft.com.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
# See https://msdn.microsoft.com/en-us/library/ms174986.aspx.
|
| 12 |
+
OPERATORS = (
|
| 13 |
+
'!<',
|
| 14 |
+
'!=',
|
| 15 |
+
'!>',
|
| 16 |
+
'<',
|
| 17 |
+
'<=',
|
| 18 |
+
'<>',
|
| 19 |
+
'=',
|
| 20 |
+
'>',
|
| 21 |
+
'>=',
|
| 22 |
+
'+',
|
| 23 |
+
'+=',
|
| 24 |
+
'-',
|
| 25 |
+
'-=',
|
| 26 |
+
'*',
|
| 27 |
+
'*=',
|
| 28 |
+
'/',
|
| 29 |
+
'/=',
|
| 30 |
+
'%',
|
| 31 |
+
'%=',
|
| 32 |
+
'&',
|
| 33 |
+
'&=',
|
| 34 |
+
'|',
|
| 35 |
+
'|=',
|
| 36 |
+
'^',
|
| 37 |
+
'^=',
|
| 38 |
+
'~',
|
| 39 |
+
'::',
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
OPERATOR_WORDS = (
|
| 43 |
+
'all',
|
| 44 |
+
'and',
|
| 45 |
+
'any',
|
| 46 |
+
'between',
|
| 47 |
+
'except',
|
| 48 |
+
'exists',
|
| 49 |
+
'in',
|
| 50 |
+
'intersect',
|
| 51 |
+
'like',
|
| 52 |
+
'not',
|
| 53 |
+
'or',
|
| 54 |
+
'some',
|
| 55 |
+
'union',
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
_KEYWORDS_SERVER = (
|
| 59 |
+
'add',
|
| 60 |
+
'all',
|
| 61 |
+
'alter',
|
| 62 |
+
'and',
|
| 63 |
+
'any',
|
| 64 |
+
'as',
|
| 65 |
+
'asc',
|
| 66 |
+
'authorization',
|
| 67 |
+
'backup',
|
| 68 |
+
'begin',
|
| 69 |
+
'between',
|
| 70 |
+
'break',
|
| 71 |
+
'browse',
|
| 72 |
+
'bulk',
|
| 73 |
+
'by',
|
| 74 |
+
'cascade',
|
| 75 |
+
'case',
|
| 76 |
+
'catch',
|
| 77 |
+
'check',
|
| 78 |
+
'checkpoint',
|
| 79 |
+
'close',
|
| 80 |
+
'clustered',
|
| 81 |
+
'coalesce',
|
| 82 |
+
'collate',
|
| 83 |
+
'column',
|
| 84 |
+
'commit',
|
| 85 |
+
'compute',
|
| 86 |
+
'constraint',
|
| 87 |
+
'contains',
|
| 88 |
+
'containstable',
|
| 89 |
+
'continue',
|
| 90 |
+
'convert',
|
| 91 |
+
'create',
|
| 92 |
+
'cross',
|
| 93 |
+
'current',
|
| 94 |
+
'current_date',
|
| 95 |
+
'current_time',
|
| 96 |
+
'current_timestamp',
|
| 97 |
+
'current_user',
|
| 98 |
+
'cursor',
|
| 99 |
+
'database',
|
| 100 |
+
'dbcc',
|
| 101 |
+
'deallocate',
|
| 102 |
+
'declare',
|
| 103 |
+
'default',
|
| 104 |
+
'delete',
|
| 105 |
+
'deny',
|
| 106 |
+
'desc',
|
| 107 |
+
'disk',
|
| 108 |
+
'distinct',
|
| 109 |
+
'distributed',
|
| 110 |
+
'double',
|
| 111 |
+
'drop',
|
| 112 |
+
'dump',
|
| 113 |
+
'else',
|
| 114 |
+
'end',
|
| 115 |
+
'errlvl',
|
| 116 |
+
'escape',
|
| 117 |
+
'except',
|
| 118 |
+
'exec',
|
| 119 |
+
'execute',
|
| 120 |
+
'exists',
|
| 121 |
+
'exit',
|
| 122 |
+
'external',
|
| 123 |
+
'fetch',
|
| 124 |
+
'file',
|
| 125 |
+
'fillfactor',
|
| 126 |
+
'for',
|
| 127 |
+
'foreign',
|
| 128 |
+
'freetext',
|
| 129 |
+
'freetexttable',
|
| 130 |
+
'from',
|
| 131 |
+
'full',
|
| 132 |
+
'function',
|
| 133 |
+
'goto',
|
| 134 |
+
'grant',
|
| 135 |
+
'group',
|
| 136 |
+
'having',
|
| 137 |
+
'holdlock',
|
| 138 |
+
'identity',
|
| 139 |
+
'identity_insert',
|
| 140 |
+
'identitycol',
|
| 141 |
+
'if',
|
| 142 |
+
'in',
|
| 143 |
+
'index',
|
| 144 |
+
'inner',
|
| 145 |
+
'insert',
|
| 146 |
+
'intersect',
|
| 147 |
+
'into',
|
| 148 |
+
'is',
|
| 149 |
+
'join',
|
| 150 |
+
'key',
|
| 151 |
+
'kill',
|
| 152 |
+
'left',
|
| 153 |
+
'like',
|
| 154 |
+
'lineno',
|
| 155 |
+
'load',
|
| 156 |
+
'merge',
|
| 157 |
+
'national',
|
| 158 |
+
'nocheck',
|
| 159 |
+
'nonclustered',
|
| 160 |
+
'not',
|
| 161 |
+
'null',
|
| 162 |
+
'nullif',
|
| 163 |
+
'of',
|
| 164 |
+
'off',
|
| 165 |
+
'offsets',
|
| 166 |
+
'on',
|
| 167 |
+
'open',
|
| 168 |
+
'opendatasource',
|
| 169 |
+
'openquery',
|
| 170 |
+
'openrowset',
|
| 171 |
+
'openxml',
|
| 172 |
+
'option',
|
| 173 |
+
'or',
|
| 174 |
+
'order',
|
| 175 |
+
'outer',
|
| 176 |
+
'over',
|
| 177 |
+
'percent',
|
| 178 |
+
'pivot',
|
| 179 |
+
'plan',
|
| 180 |
+
'precision',
|
| 181 |
+
'primary',
|
| 182 |
+
'print',
|
| 183 |
+
'proc',
|
| 184 |
+
'procedure',
|
| 185 |
+
'public',
|
| 186 |
+
'raiserror',
|
| 187 |
+
'read',
|
| 188 |
+
'readtext',
|
| 189 |
+
'reconfigure',
|
| 190 |
+
'references',
|
| 191 |
+
'replication',
|
| 192 |
+
'restore',
|
| 193 |
+
'restrict',
|
| 194 |
+
'return',
|
| 195 |
+
'revert',
|
| 196 |
+
'revoke',
|
| 197 |
+
'right',
|
| 198 |
+
'rollback',
|
| 199 |
+
'rowcount',
|
| 200 |
+
'rowguidcol',
|
| 201 |
+
'rule',
|
| 202 |
+
'save',
|
| 203 |
+
'schema',
|
| 204 |
+
'securityaudit',
|
| 205 |
+
'select',
|
| 206 |
+
'semantickeyphrasetable',
|
| 207 |
+
'semanticsimilaritydetailstable',
|
| 208 |
+
'semanticsimilaritytable',
|
| 209 |
+
'session_user',
|
| 210 |
+
'set',
|
| 211 |
+
'setuser',
|
| 212 |
+
'shutdown',
|
| 213 |
+
'some',
|
| 214 |
+
'statistics',
|
| 215 |
+
'system_user',
|
| 216 |
+
'table',
|
| 217 |
+
'tablesample',
|
| 218 |
+
'textsize',
|
| 219 |
+
'then',
|
| 220 |
+
'throw',
|
| 221 |
+
'to',
|
| 222 |
+
'top',
|
| 223 |
+
'tran',
|
| 224 |
+
'transaction',
|
| 225 |
+
'trigger',
|
| 226 |
+
'truncate',
|
| 227 |
+
'try',
|
| 228 |
+
'try_convert',
|
| 229 |
+
'tsequal',
|
| 230 |
+
'union',
|
| 231 |
+
'unique',
|
| 232 |
+
'unpivot',
|
| 233 |
+
'update',
|
| 234 |
+
'updatetext',
|
| 235 |
+
'use',
|
| 236 |
+
'user',
|
| 237 |
+
'values',
|
| 238 |
+
'varying',
|
| 239 |
+
'view',
|
| 240 |
+
'waitfor',
|
| 241 |
+
'when',
|
| 242 |
+
'where',
|
| 243 |
+
'while',
|
| 244 |
+
'with',
|
| 245 |
+
'within',
|
| 246 |
+
'writetext',
|
| 247 |
+
)
|
| 248 |
+
|
| 249 |
+
_KEYWORDS_FUTURE = (
|
| 250 |
+
'absolute',
|
| 251 |
+
'action',
|
| 252 |
+
'admin',
|
| 253 |
+
'after',
|
| 254 |
+
'aggregate',
|
| 255 |
+
'alias',
|
| 256 |
+
'allocate',
|
| 257 |
+
'are',
|
| 258 |
+
'array',
|
| 259 |
+
'asensitive',
|
| 260 |
+
'assertion',
|
| 261 |
+
'asymmetric',
|
| 262 |
+
'at',
|
| 263 |
+
'atomic',
|
| 264 |
+
'before',
|
| 265 |
+
'binary',
|
| 266 |
+
'bit',
|
| 267 |
+
'blob',
|
| 268 |
+
'boolean',
|
| 269 |
+
'both',
|
| 270 |
+
'breadth',
|
| 271 |
+
'call',
|
| 272 |
+
'called',
|
| 273 |
+
'cardinality',
|
| 274 |
+
'cascaded',
|
| 275 |
+
'cast',
|
| 276 |
+
'catalog',
|
| 277 |
+
'char',
|
| 278 |
+
'character',
|
| 279 |
+
'class',
|
| 280 |
+
'clob',
|
| 281 |
+
'collation',
|
| 282 |
+
'collect',
|
| 283 |
+
'completion',
|
| 284 |
+
'condition',
|
| 285 |
+
'connect',
|
| 286 |
+
'connection',
|
| 287 |
+
'constraints',
|
| 288 |
+
'constructor',
|
| 289 |
+
'corr',
|
| 290 |
+
'corresponding',
|
| 291 |
+
'covar_pop',
|
| 292 |
+
'covar_samp',
|
| 293 |
+
'cube',
|
| 294 |
+
'cume_dist',
|
| 295 |
+
'current_catalog',
|
| 296 |
+
'current_default_transform_group',
|
| 297 |
+
'current_path',
|
| 298 |
+
'current_role',
|
| 299 |
+
'current_schema',
|
| 300 |
+
'current_transform_group_for_type',
|
| 301 |
+
'cycle',
|
| 302 |
+
'data',
|
| 303 |
+
'date',
|
| 304 |
+
'day',
|
| 305 |
+
'dec',
|
| 306 |
+
'decimal',
|
| 307 |
+
'deferrable',
|
| 308 |
+
'deferred',
|
| 309 |
+
'depth',
|
| 310 |
+
'deref',
|
| 311 |
+
'describe',
|
| 312 |
+
'descriptor',
|
| 313 |
+
'destroy',
|
| 314 |
+
'destructor',
|
| 315 |
+
'deterministic',
|
| 316 |
+
'diagnostics',
|
| 317 |
+
'dictionary',
|
| 318 |
+
'disconnect',
|
| 319 |
+
'domain',
|
| 320 |
+
'dynamic',
|
| 321 |
+
'each',
|
| 322 |
+
'element',
|
| 323 |
+
'end-exec',
|
| 324 |
+
'equals',
|
| 325 |
+
'every',
|
| 326 |
+
'exception',
|
| 327 |
+
'false',
|
| 328 |
+
'filter',
|
| 329 |
+
'first',
|
| 330 |
+
'float',
|
| 331 |
+
'found',
|
| 332 |
+
'free',
|
| 333 |
+
'fulltexttable',
|
| 334 |
+
'fusion',
|
| 335 |
+
'general',
|
| 336 |
+
'get',
|
| 337 |
+
'global',
|
| 338 |
+
'go',
|
| 339 |
+
'grouping',
|
| 340 |
+
'hold',
|
| 341 |
+
'host',
|
| 342 |
+
'hour',
|
| 343 |
+
'ignore',
|
| 344 |
+
'immediate',
|
| 345 |
+
'indicator',
|
| 346 |
+
'initialize',
|
| 347 |
+
'initially',
|
| 348 |
+
'inout',
|
| 349 |
+
'input',
|
| 350 |
+
'int',
|
| 351 |
+
'integer',
|
| 352 |
+
'intersection',
|
| 353 |
+
'interval',
|
| 354 |
+
'isolation',
|
| 355 |
+
'iterate',
|
| 356 |
+
'language',
|
| 357 |
+
'large',
|
| 358 |
+
'last',
|
| 359 |
+
'lateral',
|
| 360 |
+
'leading',
|
| 361 |
+
'less',
|
| 362 |
+
'level',
|
| 363 |
+
'like_regex',
|
| 364 |
+
'limit',
|
| 365 |
+
'ln',
|
| 366 |
+
'local',
|
| 367 |
+
'localtime',
|
| 368 |
+
'localtimestamp',
|
| 369 |
+
'locator',
|
| 370 |
+
'map',
|
| 371 |
+
'match',
|
| 372 |
+
'member',
|
| 373 |
+
'method',
|
| 374 |
+
'minute',
|
| 375 |
+
'mod',
|
| 376 |
+
'modifies',
|
| 377 |
+
'modify',
|
| 378 |
+
'module',
|
| 379 |
+
'month',
|
| 380 |
+
'multiset',
|
| 381 |
+
'names',
|
| 382 |
+
'natural',
|
| 383 |
+
'nchar',
|
| 384 |
+
'nclob',
|
| 385 |
+
'new',
|
| 386 |
+
'next',
|
| 387 |
+
'no',
|
| 388 |
+
'none',
|
| 389 |
+
'normalize',
|
| 390 |
+
'numeric',
|
| 391 |
+
'object',
|
| 392 |
+
'occurrences_regex',
|
| 393 |
+
'old',
|
| 394 |
+
'only',
|
| 395 |
+
'operation',
|
| 396 |
+
'ordinality',
|
| 397 |
+
'out',
|
| 398 |
+
'output',
|
| 399 |
+
'overlay',
|
| 400 |
+
'pad',
|
| 401 |
+
'parameter',
|
| 402 |
+
'parameters',
|
| 403 |
+
'partial',
|
| 404 |
+
'partition',
|
| 405 |
+
'path',
|
| 406 |
+
'percent_rank',
|
| 407 |
+
'percentile_cont',
|
| 408 |
+
'percentile_disc',
|
| 409 |
+
'position_regex',
|
| 410 |
+
'postfix',
|
| 411 |
+
'prefix',
|
| 412 |
+
'preorder',
|
| 413 |
+
'prepare',
|
| 414 |
+
'preserve',
|
| 415 |
+
'prior',
|
| 416 |
+
'privileges',
|
| 417 |
+
'range',
|
| 418 |
+
'reads',
|
| 419 |
+
'real',
|
| 420 |
+
'recursive',
|
| 421 |
+
'ref',
|
| 422 |
+
'referencing',
|
| 423 |
+
'regr_avgx',
|
| 424 |
+
'regr_avgy',
|
| 425 |
+
'regr_count',
|
| 426 |
+
'regr_intercept',
|
| 427 |
+
'regr_r2',
|
| 428 |
+
'regr_slope',
|
| 429 |
+
'regr_sxx',
|
| 430 |
+
'regr_sxy',
|
| 431 |
+
'regr_syy',
|
| 432 |
+
'relative',
|
| 433 |
+
'release',
|
| 434 |
+
'result',
|
| 435 |
+
'returns',
|
| 436 |
+
'role',
|
| 437 |
+
'rollup',
|
| 438 |
+
'routine',
|
| 439 |
+
'row',
|
| 440 |
+
'rows',
|
| 441 |
+
'savepoint',
|
| 442 |
+
'scope',
|
| 443 |
+
'scroll',
|
| 444 |
+
'search',
|
| 445 |
+
'second',
|
| 446 |
+
'section',
|
| 447 |
+
'sensitive',
|
| 448 |
+
'sequence',
|
| 449 |
+
'session',
|
| 450 |
+
'sets',
|
| 451 |
+
'similar',
|
| 452 |
+
'size',
|
| 453 |
+
'smallint',
|
| 454 |
+
'space',
|
| 455 |
+
'specific',
|
| 456 |
+
'specifictype',
|
| 457 |
+
'sql',
|
| 458 |
+
'sqlexception',
|
| 459 |
+
'sqlstate',
|
| 460 |
+
'sqlwarning',
|
| 461 |
+
'start',
|
| 462 |
+
'state',
|
| 463 |
+
'statement',
|
| 464 |
+
'static',
|
| 465 |
+
'stddev_pop',
|
| 466 |
+
'stddev_samp',
|
| 467 |
+
'structure',
|
| 468 |
+
'submultiset',
|
| 469 |
+
'substring_regex',
|
| 470 |
+
'symmetric',
|
| 471 |
+
'system',
|
| 472 |
+
'temporary',
|
| 473 |
+
'terminate',
|
| 474 |
+
'than',
|
| 475 |
+
'time',
|
| 476 |
+
'timestamp',
|
| 477 |
+
'timezone_hour',
|
| 478 |
+
'timezone_minute',
|
| 479 |
+
'trailing',
|
| 480 |
+
'translate_regex',
|
| 481 |
+
'translation',
|
| 482 |
+
'treat',
|
| 483 |
+
'true',
|
| 484 |
+
'uescape',
|
| 485 |
+
'under',
|
| 486 |
+
'unknown',
|
| 487 |
+
'unnest',
|
| 488 |
+
'usage',
|
| 489 |
+
'using',
|
| 490 |
+
'value',
|
| 491 |
+
'var_pop',
|
| 492 |
+
'var_samp',
|
| 493 |
+
'varchar',
|
| 494 |
+
'variable',
|
| 495 |
+
'whenever',
|
| 496 |
+
'width_bucket',
|
| 497 |
+
'window',
|
| 498 |
+
'within',
|
| 499 |
+
'without',
|
| 500 |
+
'work',
|
| 501 |
+
'write',
|
| 502 |
+
'xmlagg',
|
| 503 |
+
'xmlattributes',
|
| 504 |
+
'xmlbinary',
|
| 505 |
+
'xmlcast',
|
| 506 |
+
'xmlcomment',
|
| 507 |
+
'xmlconcat',
|
| 508 |
+
'xmldocument',
|
| 509 |
+
'xmlelement',
|
| 510 |
+
'xmlexists',
|
| 511 |
+
'xmlforest',
|
| 512 |
+
'xmliterate',
|
| 513 |
+
'xmlnamespaces',
|
| 514 |
+
'xmlparse',
|
| 515 |
+
'xmlpi',
|
| 516 |
+
'xmlquery',
|
| 517 |
+
'xmlserialize',
|
| 518 |
+
'xmltable',
|
| 519 |
+
'xmltext',
|
| 520 |
+
'xmlvalidate',
|
| 521 |
+
'year',
|
| 522 |
+
'zone',
|
| 523 |
+
)
|
| 524 |
+
|
| 525 |
+
_KEYWORDS_ODBC = (
|
| 526 |
+
'absolute',
|
| 527 |
+
'action',
|
| 528 |
+
'ada',
|
| 529 |
+
'add',
|
| 530 |
+
'all',
|
| 531 |
+
'allocate',
|
| 532 |
+
'alter',
|
| 533 |
+
'and',
|
| 534 |
+
'any',
|
| 535 |
+
'are',
|
| 536 |
+
'as',
|
| 537 |
+
'asc',
|
| 538 |
+
'assertion',
|
| 539 |
+
'at',
|
| 540 |
+
'authorization',
|
| 541 |
+
'avg',
|
| 542 |
+
'begin',
|
| 543 |
+
'between',
|
| 544 |
+
'bit',
|
| 545 |
+
'bit_length',
|
| 546 |
+
'both',
|
| 547 |
+
'by',
|
| 548 |
+
'cascade',
|
| 549 |
+
'cascaded',
|
| 550 |
+
'case',
|
| 551 |
+
'cast',
|
| 552 |
+
'catalog',
|
| 553 |
+
'char',
|
| 554 |
+
'char_length',
|
| 555 |
+
'character',
|
| 556 |
+
'character_length',
|
| 557 |
+
'check',
|
| 558 |
+
'close',
|
| 559 |
+
'coalesce',
|
| 560 |
+
'collate',
|
| 561 |
+
'collation',
|
| 562 |
+
'column',
|
| 563 |
+
'commit',
|
| 564 |
+
'connect',
|
| 565 |
+
'connection',
|
| 566 |
+
'constraint',
|
| 567 |
+
'constraints',
|
| 568 |
+
'continue',
|
| 569 |
+
'convert',
|
| 570 |
+
'corresponding',
|
| 571 |
+
'count',
|
| 572 |
+
'create',
|
| 573 |
+
'cross',
|
| 574 |
+
'current',
|
| 575 |
+
'current_date',
|
| 576 |
+
'current_time',
|
| 577 |
+
'current_timestamp',
|
| 578 |
+
'current_user',
|
| 579 |
+
'cursor',
|
| 580 |
+
'date',
|
| 581 |
+
'day',
|
| 582 |
+
'deallocate',
|
| 583 |
+
'dec',
|
| 584 |
+
'decimal',
|
| 585 |
+
'declare',
|
| 586 |
+
'default',
|
| 587 |
+
'deferrable',
|
| 588 |
+
'deferred',
|
| 589 |
+
'delete',
|
| 590 |
+
'desc',
|
| 591 |
+
'describe',
|
| 592 |
+
'descriptor',
|
| 593 |
+
'diagnostics',
|
| 594 |
+
'disconnect',
|
| 595 |
+
'distinct',
|
| 596 |
+
'domain',
|
| 597 |
+
'double',
|
| 598 |
+
'drop',
|
| 599 |
+
'else',
|
| 600 |
+
'end',
|
| 601 |
+
'end-exec',
|
| 602 |
+
'escape',
|
| 603 |
+
'except',
|
| 604 |
+
'exception',
|
| 605 |
+
'exec',
|
| 606 |
+
'execute',
|
| 607 |
+
'exists',
|
| 608 |
+
'external',
|
| 609 |
+
'extract',
|
| 610 |
+
'false',
|
| 611 |
+
'fetch',
|
| 612 |
+
'first',
|
| 613 |
+
'float',
|
| 614 |
+
'for',
|
| 615 |
+
'foreign',
|
| 616 |
+
'fortran',
|
| 617 |
+
'found',
|
| 618 |
+
'from',
|
| 619 |
+
'full',
|
| 620 |
+
'get',
|
| 621 |
+
'global',
|
| 622 |
+
'go',
|
| 623 |
+
'goto',
|
| 624 |
+
'grant',
|
| 625 |
+
'group',
|
| 626 |
+
'having',
|
| 627 |
+
'hour',
|
| 628 |
+
'identity',
|
| 629 |
+
'immediate',
|
| 630 |
+
'in',
|
| 631 |
+
'include',
|
| 632 |
+
'index',
|
| 633 |
+
'indicator',
|
| 634 |
+
'initially',
|
| 635 |
+
'inner',
|
| 636 |
+
'input',
|
| 637 |
+
'insensitive',
|
| 638 |
+
'insert',
|
| 639 |
+
'int',
|
| 640 |
+
'integer',
|
| 641 |
+
'intersect',
|
| 642 |
+
'interval',
|
| 643 |
+
'into',
|
| 644 |
+
'is',
|
| 645 |
+
'isolation',
|
| 646 |
+
'join',
|
| 647 |
+
'key',
|
| 648 |
+
'language',
|
| 649 |
+
'last',
|
| 650 |
+
'leading',
|
| 651 |
+
'left',
|
| 652 |
+
'level',
|
| 653 |
+
'like',
|
| 654 |
+
'local',
|
| 655 |
+
'lower',
|
| 656 |
+
'match',
|
| 657 |
+
'max',
|
| 658 |
+
'min',
|
| 659 |
+
'minute',
|
| 660 |
+
'module',
|
| 661 |
+
'month',
|
| 662 |
+
'names',
|
| 663 |
+
'national',
|
| 664 |
+
'natural',
|
| 665 |
+
'nchar',
|
| 666 |
+
'next',
|
| 667 |
+
'no',
|
| 668 |
+
'none',
|
| 669 |
+
'not',
|
| 670 |
+
'null',
|
| 671 |
+
'nullif',
|
| 672 |
+
'numeric',
|
| 673 |
+
'octet_length',
|
| 674 |
+
'of',
|
| 675 |
+
'on',
|
| 676 |
+
'only',
|
| 677 |
+
'open',
|
| 678 |
+
'option',
|
| 679 |
+
'or',
|
| 680 |
+
'order',
|
| 681 |
+
'outer',
|
| 682 |
+
'output',
|
| 683 |
+
'overlaps',
|
| 684 |
+
'pad',
|
| 685 |
+
'partial',
|
| 686 |
+
'pascal',
|
| 687 |
+
'position',
|
| 688 |
+
'precision',
|
| 689 |
+
'prepare',
|
| 690 |
+
'preserve',
|
| 691 |
+
'primary',
|
| 692 |
+
'prior',
|
| 693 |
+
'privileges',
|
| 694 |
+
'procedure',
|
| 695 |
+
'public',
|
| 696 |
+
'read',
|
| 697 |
+
'real',
|
| 698 |
+
'references',
|
| 699 |
+
'relative',
|
| 700 |
+
'restrict',
|
| 701 |
+
'revoke',
|
| 702 |
+
'right',
|
| 703 |
+
'rollback',
|
| 704 |
+
'rows',
|
| 705 |
+
'schema',
|
| 706 |
+
'scroll',
|
| 707 |
+
'second',
|
| 708 |
+
'section',
|
| 709 |
+
'select',
|
| 710 |
+
'session',
|
| 711 |
+
'session_user',
|
| 712 |
+
'set',
|
| 713 |
+
'size',
|
| 714 |
+
'smallint',
|
| 715 |
+
'some',
|
| 716 |
+
'space',
|
| 717 |
+
'sql',
|
| 718 |
+
'sqlca',
|
| 719 |
+
'sqlcode',
|
| 720 |
+
'sqlerror',
|
| 721 |
+
'sqlstate',
|
| 722 |
+
'sqlwarning',
|
| 723 |
+
'substring',
|
| 724 |
+
'sum',
|
| 725 |
+
'system_user',
|
| 726 |
+
'table',
|
| 727 |
+
'temporary',
|
| 728 |
+
'then',
|
| 729 |
+
'time',
|
| 730 |
+
'timestamp',
|
| 731 |
+
'timezone_hour',
|
| 732 |
+
'timezone_minute',
|
| 733 |
+
'to',
|
| 734 |
+
'trailing',
|
| 735 |
+
'transaction',
|
| 736 |
+
'translate',
|
| 737 |
+
'translation',
|
| 738 |
+
'trim',
|
| 739 |
+
'true',
|
| 740 |
+
'union',
|
| 741 |
+
'unique',
|
| 742 |
+
'unknown',
|
| 743 |
+
'update',
|
| 744 |
+
'upper',
|
| 745 |
+
'usage',
|
| 746 |
+
'user',
|
| 747 |
+
'using',
|
| 748 |
+
'value',
|
| 749 |
+
'values',
|
| 750 |
+
'varchar',
|
| 751 |
+
'varying',
|
| 752 |
+
'view',
|
| 753 |
+
'when',
|
| 754 |
+
'whenever',
|
| 755 |
+
'where',
|
| 756 |
+
'with',
|
| 757 |
+
'work',
|
| 758 |
+
'write',
|
| 759 |
+
'year',
|
| 760 |
+
'zone',
|
| 761 |
+
)
|
| 762 |
+
|
| 763 |
+
# See https://msdn.microsoft.com/en-us/library/ms189822.aspx.
|
| 764 |
+
KEYWORDS = sorted(set(_KEYWORDS_FUTURE + _KEYWORDS_ODBC + _KEYWORDS_SERVER))
|
| 765 |
+
|
| 766 |
+
# See https://msdn.microsoft.com/en-us/library/ms187752.aspx.
|
| 767 |
+
TYPES = (
|
| 768 |
+
'bigint',
|
| 769 |
+
'binary',
|
| 770 |
+
'bit',
|
| 771 |
+
'char',
|
| 772 |
+
'cursor',
|
| 773 |
+
'date',
|
| 774 |
+
'datetime',
|
| 775 |
+
'datetime2',
|
| 776 |
+
'datetimeoffset',
|
| 777 |
+
'decimal',
|
| 778 |
+
'float',
|
| 779 |
+
'hierarchyid',
|
| 780 |
+
'image',
|
| 781 |
+
'int',
|
| 782 |
+
'money',
|
| 783 |
+
'nchar',
|
| 784 |
+
'ntext',
|
| 785 |
+
'numeric',
|
| 786 |
+
'nvarchar',
|
| 787 |
+
'real',
|
| 788 |
+
'smalldatetime',
|
| 789 |
+
'smallint',
|
| 790 |
+
'smallmoney',
|
| 791 |
+
'sql_variant',
|
| 792 |
+
'table',
|
| 793 |
+
'text',
|
| 794 |
+
'time',
|
| 795 |
+
'timestamp',
|
| 796 |
+
'tinyint',
|
| 797 |
+
'uniqueidentifier',
|
| 798 |
+
'varbinary',
|
| 799 |
+
'varchar',
|
| 800 |
+
'xml',
|
| 801 |
+
)
|
| 802 |
+
|
| 803 |
+
# See https://msdn.microsoft.com/en-us/library/ms174318.aspx.
|
| 804 |
+
FUNCTIONS = (
|
| 805 |
+
'$partition',
|
| 806 |
+
'abs',
|
| 807 |
+
'acos',
|
| 808 |
+
'app_name',
|
| 809 |
+
'applock_mode',
|
| 810 |
+
'applock_test',
|
| 811 |
+
'ascii',
|
| 812 |
+
'asin',
|
| 813 |
+
'assemblyproperty',
|
| 814 |
+
'atan',
|
| 815 |
+
'atn2',
|
| 816 |
+
'avg',
|
| 817 |
+
'binary_checksum',
|
| 818 |
+
'cast',
|
| 819 |
+
'ceiling',
|
| 820 |
+
'certencoded',
|
| 821 |
+
'certprivatekey',
|
| 822 |
+
'char',
|
| 823 |
+
'charindex',
|
| 824 |
+
'checksum',
|
| 825 |
+
'checksum_agg',
|
| 826 |
+
'choose',
|
| 827 |
+
'col_length',
|
| 828 |
+
'col_name',
|
| 829 |
+
'columnproperty',
|
| 830 |
+
'compress',
|
| 831 |
+
'concat',
|
| 832 |
+
'connectionproperty',
|
| 833 |
+
'context_info',
|
| 834 |
+
'convert',
|
| 835 |
+
'cos',
|
| 836 |
+
'cot',
|
| 837 |
+
'count',
|
| 838 |
+
'count_big',
|
| 839 |
+
'current_request_id',
|
| 840 |
+
'current_timestamp',
|
| 841 |
+
'current_transaction_id',
|
| 842 |
+
'current_user',
|
| 843 |
+
'cursor_status',
|
| 844 |
+
'database_principal_id',
|
| 845 |
+
'databasepropertyex',
|
| 846 |
+
'dateadd',
|
| 847 |
+
'datediff',
|
| 848 |
+
'datediff_big',
|
| 849 |
+
'datefromparts',
|
| 850 |
+
'datename',
|
| 851 |
+
'datepart',
|
| 852 |
+
'datetime2fromparts',
|
| 853 |
+
'datetimefromparts',
|
| 854 |
+
'datetimeoffsetfromparts',
|
| 855 |
+
'day',
|
| 856 |
+
'db_id',
|
| 857 |
+
'db_name',
|
| 858 |
+
'decompress',
|
| 859 |
+
'degrees',
|
| 860 |
+
'dense_rank',
|
| 861 |
+
'difference',
|
| 862 |
+
'eomonth',
|
| 863 |
+
'error_line',
|
| 864 |
+
'error_message',
|
| 865 |
+
'error_number',
|
| 866 |
+
'error_procedure',
|
| 867 |
+
'error_severity',
|
| 868 |
+
'error_state',
|
| 869 |
+
'exp',
|
| 870 |
+
'file_id',
|
| 871 |
+
'file_idex',
|
| 872 |
+
'file_name',
|
| 873 |
+
'filegroup_id',
|
| 874 |
+
'filegroup_name',
|
| 875 |
+
'filegroupproperty',
|
| 876 |
+
'fileproperty',
|
| 877 |
+
'floor',
|
| 878 |
+
'format',
|
| 879 |
+
'formatmessage',
|
| 880 |
+
'fulltextcatalogproperty',
|
| 881 |
+
'fulltextserviceproperty',
|
| 882 |
+
'get_filestream_transaction_context',
|
| 883 |
+
'getansinull',
|
| 884 |
+
'getdate',
|
| 885 |
+
'getutcdate',
|
| 886 |
+
'grouping',
|
| 887 |
+
'grouping_id',
|
| 888 |
+
'has_perms_by_name',
|
| 889 |
+
'host_id',
|
| 890 |
+
'host_name',
|
| 891 |
+
'iif',
|
| 892 |
+
'index_col',
|
| 893 |
+
'indexkey_property',
|
| 894 |
+
'indexproperty',
|
| 895 |
+
'is_member',
|
| 896 |
+
'is_rolemember',
|
| 897 |
+
'is_srvrolemember',
|
| 898 |
+
'isdate',
|
| 899 |
+
'isjson',
|
| 900 |
+
'isnull',
|
| 901 |
+
'isnumeric',
|
| 902 |
+
'json_modify',
|
| 903 |
+
'json_query',
|
| 904 |
+
'json_value',
|
| 905 |
+
'left',
|
| 906 |
+
'len',
|
| 907 |
+
'log',
|
| 908 |
+
'log10',
|
| 909 |
+
'lower',
|
| 910 |
+
'ltrim',
|
| 911 |
+
'max',
|
| 912 |
+
'min',
|
| 913 |
+
'min_active_rowversion',
|
| 914 |
+
'month',
|
| 915 |
+
'nchar',
|
| 916 |
+
'newid',
|
| 917 |
+
'newsequentialid',
|
| 918 |
+
'ntile',
|
| 919 |
+
'object_definition',
|
| 920 |
+
'object_id',
|
| 921 |
+
'object_name',
|
| 922 |
+
'object_schema_name',
|
| 923 |
+
'objectproperty',
|
| 924 |
+
'objectpropertyex',
|
| 925 |
+
'opendatasource',
|
| 926 |
+
'openjson',
|
| 927 |
+
'openquery',
|
| 928 |
+
'openrowset',
|
| 929 |
+
'openxml',
|
| 930 |
+
'original_db_name',
|
| 931 |
+
'original_login',
|
| 932 |
+
'parse',
|
| 933 |
+
'parsename',
|
| 934 |
+
'patindex',
|
| 935 |
+
'permissions',
|
| 936 |
+
'pi',
|
| 937 |
+
'power',
|
| 938 |
+
'pwdcompare',
|
| 939 |
+
'pwdencrypt',
|
| 940 |
+
'quotename',
|
| 941 |
+
'radians',
|
| 942 |
+
'rand',
|
| 943 |
+
'rank',
|
| 944 |
+
'replace',
|
| 945 |
+
'replicate',
|
| 946 |
+
'reverse',
|
| 947 |
+
'right',
|
| 948 |
+
'round',
|
| 949 |
+
'row_number',
|
| 950 |
+
'rowcount_big',
|
| 951 |
+
'rtrim',
|
| 952 |
+
'schema_id',
|
| 953 |
+
'schema_name',
|
| 954 |
+
'scope_identity',
|
| 955 |
+
'serverproperty',
|
| 956 |
+
'session_context',
|
| 957 |
+
'session_user',
|
| 958 |
+
'sign',
|
| 959 |
+
'sin',
|
| 960 |
+
'smalldatetimefromparts',
|
| 961 |
+
'soundex',
|
| 962 |
+
'sp_helplanguage',
|
| 963 |
+
'space',
|
| 964 |
+
'sqrt',
|
| 965 |
+
'square',
|
| 966 |
+
'stats_date',
|
| 967 |
+
'stdev',
|
| 968 |
+
'stdevp',
|
| 969 |
+
'str',
|
| 970 |
+
'string_escape',
|
| 971 |
+
'string_split',
|
| 972 |
+
'stuff',
|
| 973 |
+
'substring',
|
| 974 |
+
'sum',
|
| 975 |
+
'suser_id',
|
| 976 |
+
'suser_name',
|
| 977 |
+
'suser_sid',
|
| 978 |
+
'suser_sname',
|
| 979 |
+
'switchoffset',
|
| 980 |
+
'sysdatetime',
|
| 981 |
+
'sysdatetimeoffset',
|
| 982 |
+
'system_user',
|
| 983 |
+
'sysutcdatetime',
|
| 984 |
+
'tan',
|
| 985 |
+
'textptr',
|
| 986 |
+
'textvalid',
|
| 987 |
+
'timefromparts',
|
| 988 |
+
'todatetimeoffset',
|
| 989 |
+
'try_cast',
|
| 990 |
+
'try_convert',
|
| 991 |
+
'try_parse',
|
| 992 |
+
'type_id',
|
| 993 |
+
'type_name',
|
| 994 |
+
'typeproperty',
|
| 995 |
+
'unicode',
|
| 996 |
+
'upper',
|
| 997 |
+
'user_id',
|
| 998 |
+
'user_name',
|
| 999 |
+
'var',
|
| 1000 |
+
'varp',
|
| 1001 |
+
'xact_state',
|
| 1002 |
+
'year',
|
| 1003 |
+
)
|
evalkit_tf437/lib/python3.10/site-packages/pygments/lexers/ada.py
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.lexers.ada
|
| 3 |
+
~~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Lexers for Ada family languages.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import re
|
| 12 |
+
|
| 13 |
+
from pygments.lexer import RegexLexer, include, bygroups, words, using, this, \
|
| 14 |
+
default
|
| 15 |
+
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
|
| 16 |
+
Number, Punctuation
|
| 17 |
+
from pygments.lexers._ada_builtins import KEYWORD_LIST, BUILTIN_LIST
|
| 18 |
+
|
| 19 |
+
__all__ = ['AdaLexer']
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class AdaLexer(RegexLexer):
|
| 23 |
+
"""
|
| 24 |
+
For Ada source code.
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
name = 'Ada'
|
| 28 |
+
aliases = ['ada', 'ada95', 'ada2005']
|
| 29 |
+
filenames = ['*.adb', '*.ads', '*.ada']
|
| 30 |
+
mimetypes = ['text/x-ada']
|
| 31 |
+
url = 'https://www.adaic.org'
|
| 32 |
+
version_added = '1.3'
|
| 33 |
+
|
| 34 |
+
flags = re.MULTILINE | re.IGNORECASE
|
| 35 |
+
|
| 36 |
+
tokens = {
|
| 37 |
+
'root': [
|
| 38 |
+
(r'[^\S\n]+', Text),
|
| 39 |
+
(r'--.*?\n', Comment.Single),
|
| 40 |
+
(r'[^\S\n]+', Text),
|
| 41 |
+
(r'function|procedure|entry', Keyword.Declaration, 'subprogram'),
|
| 42 |
+
(r'(subtype|type)(\s+)(\w+)',
|
| 43 |
+
bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
|
| 44 |
+
(r'task|protected', Keyword.Declaration),
|
| 45 |
+
(r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)),
|
| 46 |
+
(r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'),
|
| 47 |
+
(r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text,
|
| 48 |
+
Comment.Preproc)),
|
| 49 |
+
(r'(true|false|null)\b', Keyword.Constant),
|
| 50 |
+
# builtin types
|
| 51 |
+
(words(BUILTIN_LIST, suffix=r'\b'), Keyword.Type),
|
| 52 |
+
(r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word),
|
| 53 |
+
(r'generic|private', Keyword.Declaration),
|
| 54 |
+
(r'package', Keyword.Declaration, 'package'),
|
| 55 |
+
(r'array\b', Keyword.Reserved, 'array_def'),
|
| 56 |
+
(r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
|
| 57 |
+
(r'(\w+)(\s*)(:)(\s*)(constant)',
|
| 58 |
+
bygroups(Name.Constant, Text, Punctuation, Text,
|
| 59 |
+
Keyword.Reserved)),
|
| 60 |
+
(r'<<\w+>>', Name.Label),
|
| 61 |
+
(r'(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)',
|
| 62 |
+
bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)),
|
| 63 |
+
# keywords
|
| 64 |
+
(words(KEYWORD_LIST, prefix=r'\b', suffix=r'\b'),
|
| 65 |
+
Keyword.Reserved),
|
| 66 |
+
(r'"[^"]*"', String),
|
| 67 |
+
include('attribute'),
|
| 68 |
+
include('numbers'),
|
| 69 |
+
(r"'[^']'", String.Character),
|
| 70 |
+
(r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
|
| 71 |
+
(r"(<>|=>|:=|@|[\[\]]|[()|:;,.'])", Punctuation),
|
| 72 |
+
(r'[*<>+=/&-]', Operator),
|
| 73 |
+
(r'\n+', Text),
|
| 74 |
+
],
|
| 75 |
+
'numbers': [
|
| 76 |
+
(r'[0-9_]+#[0-9a-f_\.]+#', Number.Hex),
|
| 77 |
+
(r'[0-9_]+\.[0-9_]*', Number.Float),
|
| 78 |
+
(r'[0-9_]+', Number.Integer),
|
| 79 |
+
],
|
| 80 |
+
'attribute': [
|
| 81 |
+
(r"(')(\w+)", bygroups(Punctuation, Name.Attribute)),
|
| 82 |
+
],
|
| 83 |
+
'subprogram': [
|
| 84 |
+
(r'\(', Punctuation, ('#pop', 'formal_part')),
|
| 85 |
+
(r';', Punctuation, '#pop'),
|
| 86 |
+
(r'is\b', Keyword.Reserved, '#pop'),
|
| 87 |
+
(r'"[^"]+"|\w+', Name.Function),
|
| 88 |
+
include('root'),
|
| 89 |
+
],
|
| 90 |
+
'end': [
|
| 91 |
+
('(if|case|record|loop|select)', Keyword.Reserved),
|
| 92 |
+
(r'"[^"]+"|[\w.]+', Name.Function),
|
| 93 |
+
(r'\s+', Text),
|
| 94 |
+
(';', Punctuation, '#pop'),
|
| 95 |
+
],
|
| 96 |
+
'type_def': [
|
| 97 |
+
(r';', Punctuation, '#pop'),
|
| 98 |
+
(r'\(', Punctuation, 'formal_part'),
|
| 99 |
+
(r'\[', Punctuation, 'formal_part'),
|
| 100 |
+
(r'with|and|use', Keyword.Reserved),
|
| 101 |
+
(r'array\b', Keyword.Reserved, ('#pop', 'array_def')),
|
| 102 |
+
(r'record\b', Keyword.Reserved, ('record_def')),
|
| 103 |
+
(r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'),
|
| 104 |
+
include('root'),
|
| 105 |
+
],
|
| 106 |
+
'array_def': [
|
| 107 |
+
(r';', Punctuation, '#pop'),
|
| 108 |
+
(r'(\w+)(\s+)(range)', bygroups(Keyword.Type, Text, Keyword.Reserved)),
|
| 109 |
+
include('root'),
|
| 110 |
+
],
|
| 111 |
+
'record_def': [
|
| 112 |
+
(r'end record', Keyword.Reserved, '#pop'),
|
| 113 |
+
include('root'),
|
| 114 |
+
],
|
| 115 |
+
'import': [
|
| 116 |
+
# TODO: use Name.Namespace if appropriate. This needs
|
| 117 |
+
# work to disinguish imports from aspects.
|
| 118 |
+
(r'[\w.]+', Name, '#pop'),
|
| 119 |
+
default('#pop'),
|
| 120 |
+
],
|
| 121 |
+
'formal_part': [
|
| 122 |
+
(r'\)', Punctuation, '#pop'),
|
| 123 |
+
(r'\]', Punctuation, '#pop'),
|
| 124 |
+
(r'\w+', Name.Variable),
|
| 125 |
+
(r',|:[^=]', Punctuation),
|
| 126 |
+
(r'(in|not|null|out|access)\b', Keyword.Reserved),
|
| 127 |
+
include('root'),
|
| 128 |
+
],
|
| 129 |
+
'package': [
|
| 130 |
+
('body', Keyword.Declaration),
|
| 131 |
+
(r'is\s+new|renames', Keyword.Reserved),
|
| 132 |
+
('is', Keyword.Reserved, '#pop'),
|
| 133 |
+
(';', Punctuation, '#pop'),
|
| 134 |
+
(r'\(', Punctuation, 'package_instantiation'),
|
| 135 |
+
(r'([\w.]+)', Name.Class),
|
| 136 |
+
include('root'),
|
| 137 |
+
],
|
| 138 |
+
'package_instantiation': [
|
| 139 |
+
(r'("[^"]+"|\w+)(\s+)(=>)', bygroups(Name.Variable, Text, Punctuation)),
|
| 140 |
+
(r'[\w.\'"]', Text),
|
| 141 |
+
(r'\)', Punctuation, '#pop'),
|
| 142 |
+
include('root'),
|
| 143 |
+
],
|
| 144 |
+
}
|