Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/CFF2ToCFF.py +187 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/__init__.py +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/__pycache__/CFF2ToCFF.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/__pycache__/CFFToCFF2.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/__pycache__/specializer.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/specializer.py +847 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/transforms.py +483 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/cu2qu/__main__.py +6 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/cu2qu/__pycache__/errors.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/cu2qu/cu2qu.py +534 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/cu2qu/errors.py +77 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/cu2qu/ufo.py +349 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/designspaceLib/__init__.py +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/designspaceLib/__pycache__/types.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/encodings/MacRoman.py +258 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/encodings/__pycache__/MacRoman.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/__pycache__/__main__.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/__pycache__/location.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/__pycache__/variableScalar.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/ast.py +2134 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/builder.py +1729 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/error.py +22 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so +3 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/parser.py +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/__pycache__/layout.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/util.py +143 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/lazyTools.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/py23.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/xmlReader.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/bezierTools.c +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/bezierTools.py +1493 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/classifyTools.py +170 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/configTools.py +349 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/encodingTools.py +72 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/etree.py +479 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/filenames.py +245 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/intTools.py +25 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/lazyTools.py +42 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/psOperators.py +572 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/sstruct.py +231 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/testTools.py +229 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/transform.py +507 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/mtiLib/__init__.py +1402 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/otlLib/__pycache__/error.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/otlLib/error.py +11 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/otlLib/maxContextCalc.py +96 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/otlLib/optimize/__init__.py +53 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/otlLib/optimize/__main__.py +6 -0
- evalkit_tf437/lib/python3.10/site-packages/fontTools/otlLib/optimize/__pycache__/__main__.cpython-310.pyc +0 -0
.gitattributes
CHANGED
|
@@ -280,3 +280,4 @@ evalkit_tf437/lib/python3.10/site-packages/sklearn/neighbors/_ball_tree.cpython-
|
|
| 280 |
evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/Index.DMrntMgy.js.br filter=lfs diff=lfs merge=lfs -text
|
| 281 |
evalkit_tf437/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 282 |
evalkit_tf437/lib/python3.10/site-packages/sklearn/metrics/_dist_metrics.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 280 |
evalkit_tf437/lib/python3.10/site-packages/gradio/templates/node/build/client/_app/immutable/chunks/Index.DMrntMgy.js.br filter=lfs diff=lfs merge=lfs -text
|
| 281 |
evalkit_tf437/lib/python3.10/site-packages/PIL/_imagingmath.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 282 |
evalkit_tf437/lib/python3.10/site-packages/sklearn/metrics/_dist_metrics.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 283 |
+
evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/CFF2ToCFF.py
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""CFF2 to CFF converter."""
|
| 2 |
+
|
| 3 |
+
from fontTools.ttLib import TTFont, newTable
|
| 4 |
+
from fontTools.misc.cliTools import makeOutputFileName
|
| 5 |
+
from fontTools.cffLib import (
|
| 6 |
+
TopDictIndex,
|
| 7 |
+
buildOrder,
|
| 8 |
+
buildDefaults,
|
| 9 |
+
topDictOperators,
|
| 10 |
+
privateDictOperators,
|
| 11 |
+
)
|
| 12 |
+
from .width import optimizeWidths
|
| 13 |
+
from collections import defaultdict
|
| 14 |
+
import logging
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
__all__ = ["convertCFF2ToCFF", "main"]
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
log = logging.getLogger("fontTools.cffLib")
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def _convertCFF2ToCFF(cff, otFont):
|
| 24 |
+
"""Converts this object from CFF2 format to CFF format. This conversion
|
| 25 |
+
is done 'in-place'. The conversion cannot be reversed.
|
| 26 |
+
|
| 27 |
+
The CFF2 font cannot be variable. (TODO Accept those and convert to the
|
| 28 |
+
default instance?)
|
| 29 |
+
|
| 30 |
+
This assumes a decompiled CFF table. (i.e. that the object has been
|
| 31 |
+
filled via :meth:`decompile` and e.g. not loaded from XML.)"""
|
| 32 |
+
|
| 33 |
+
cff.major = 1
|
| 34 |
+
|
| 35 |
+
topDictData = TopDictIndex(None, isCFF2=True)
|
| 36 |
+
for item in cff.topDictIndex:
|
| 37 |
+
# Iterate over, such that all are decompiled
|
| 38 |
+
topDictData.append(item)
|
| 39 |
+
cff.topDictIndex = topDictData
|
| 40 |
+
topDict = topDictData[0]
|
| 41 |
+
|
| 42 |
+
if hasattr(topDict, "VarStore"):
|
| 43 |
+
raise ValueError("Variable CFF2 font cannot be converted to CFF format.")
|
| 44 |
+
|
| 45 |
+
opOrder = buildOrder(topDictOperators)
|
| 46 |
+
topDict.order = opOrder
|
| 47 |
+
for key in topDict.rawDict.keys():
|
| 48 |
+
if key not in opOrder:
|
| 49 |
+
del topDict.rawDict[key]
|
| 50 |
+
if hasattr(topDict, key):
|
| 51 |
+
delattr(topDict, key)
|
| 52 |
+
|
| 53 |
+
fdArray = topDict.FDArray
|
| 54 |
+
charStrings = topDict.CharStrings
|
| 55 |
+
|
| 56 |
+
defaults = buildDefaults(privateDictOperators)
|
| 57 |
+
order = buildOrder(privateDictOperators)
|
| 58 |
+
for fd in fdArray:
|
| 59 |
+
fd.setCFF2(False)
|
| 60 |
+
privateDict = fd.Private
|
| 61 |
+
privateDict.order = order
|
| 62 |
+
for key in order:
|
| 63 |
+
if key not in privateDict.rawDict and key in defaults:
|
| 64 |
+
privateDict.rawDict[key] = defaults[key]
|
| 65 |
+
for key in privateDict.rawDict.keys():
|
| 66 |
+
if key not in order:
|
| 67 |
+
del privateDict.rawDict[key]
|
| 68 |
+
if hasattr(privateDict, key):
|
| 69 |
+
delattr(privateDict, key)
|
| 70 |
+
|
| 71 |
+
for cs in charStrings.values():
|
| 72 |
+
cs.decompile()
|
| 73 |
+
cs.program.append("endchar")
|
| 74 |
+
for subrSets in [cff.GlobalSubrs] + [
|
| 75 |
+
getattr(fd.Private, "Subrs", []) for fd in fdArray
|
| 76 |
+
]:
|
| 77 |
+
for cs in subrSets:
|
| 78 |
+
cs.program.append("return")
|
| 79 |
+
|
| 80 |
+
# Add (optimal) width to CharStrings that need it.
|
| 81 |
+
widths = defaultdict(list)
|
| 82 |
+
metrics = otFont["hmtx"].metrics
|
| 83 |
+
for glyphName in charStrings.keys():
|
| 84 |
+
cs, fdIndex = charStrings.getItemAndSelector(glyphName)
|
| 85 |
+
if fdIndex == None:
|
| 86 |
+
fdIndex = 0
|
| 87 |
+
widths[fdIndex].append(metrics[glyphName][0])
|
| 88 |
+
for fdIndex, widthList in widths.items():
|
| 89 |
+
bestDefault, bestNominal = optimizeWidths(widthList)
|
| 90 |
+
private = fdArray[fdIndex].Private
|
| 91 |
+
private.defaultWidthX = bestDefault
|
| 92 |
+
private.nominalWidthX = bestNominal
|
| 93 |
+
for glyphName in charStrings.keys():
|
| 94 |
+
cs, fdIndex = charStrings.getItemAndSelector(glyphName)
|
| 95 |
+
if fdIndex == None:
|
| 96 |
+
fdIndex = 0
|
| 97 |
+
private = fdArray[fdIndex].Private
|
| 98 |
+
width = metrics[glyphName][0]
|
| 99 |
+
if width != private.defaultWidthX:
|
| 100 |
+
cs.program.insert(0, width - private.nominalWidthX)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def convertCFF2ToCFF(font, *, updatePostTable=True):
|
| 104 |
+
cff = font["CFF2"].cff
|
| 105 |
+
_convertCFF2ToCFF(cff, font)
|
| 106 |
+
del font["CFF2"]
|
| 107 |
+
table = font["CFF "] = newTable("CFF ")
|
| 108 |
+
table.cff = cff
|
| 109 |
+
|
| 110 |
+
if updatePostTable and "post" in font:
|
| 111 |
+
# Only version supported for fonts with CFF table is 0x00030000 not 0x20000
|
| 112 |
+
post = font["post"]
|
| 113 |
+
if post.formatType == 2.0:
|
| 114 |
+
post.formatType = 3.0
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
def main(args=None):
|
| 118 |
+
"""Convert CFF OTF font to CFF2 OTF font"""
|
| 119 |
+
if args is None:
|
| 120 |
+
import sys
|
| 121 |
+
|
| 122 |
+
args = sys.argv[1:]
|
| 123 |
+
|
| 124 |
+
import argparse
|
| 125 |
+
|
| 126 |
+
parser = argparse.ArgumentParser(
|
| 127 |
+
"fonttools cffLib.CFFToCFF2",
|
| 128 |
+
description="Upgrade a CFF font to CFF2.",
|
| 129 |
+
)
|
| 130 |
+
parser.add_argument(
|
| 131 |
+
"input", metavar="INPUT.ttf", help="Input OTF file with CFF table."
|
| 132 |
+
)
|
| 133 |
+
parser.add_argument(
|
| 134 |
+
"-o",
|
| 135 |
+
"--output",
|
| 136 |
+
metavar="OUTPUT.ttf",
|
| 137 |
+
default=None,
|
| 138 |
+
help="Output instance OTF file (default: INPUT-CFF2.ttf).",
|
| 139 |
+
)
|
| 140 |
+
parser.add_argument(
|
| 141 |
+
"--no-recalc-timestamp",
|
| 142 |
+
dest="recalc_timestamp",
|
| 143 |
+
action="store_false",
|
| 144 |
+
help="Don't set the output font's timestamp to the current time.",
|
| 145 |
+
)
|
| 146 |
+
loggingGroup = parser.add_mutually_exclusive_group(required=False)
|
| 147 |
+
loggingGroup.add_argument(
|
| 148 |
+
"-v", "--verbose", action="store_true", help="Run more verbosely."
|
| 149 |
+
)
|
| 150 |
+
loggingGroup.add_argument(
|
| 151 |
+
"-q", "--quiet", action="store_true", help="Turn verbosity off."
|
| 152 |
+
)
|
| 153 |
+
options = parser.parse_args(args)
|
| 154 |
+
|
| 155 |
+
from fontTools import configLogger
|
| 156 |
+
|
| 157 |
+
configLogger(
|
| 158 |
+
level=("DEBUG" if options.verbose else "ERROR" if options.quiet else "INFO")
|
| 159 |
+
)
|
| 160 |
+
|
| 161 |
+
import os
|
| 162 |
+
|
| 163 |
+
infile = options.input
|
| 164 |
+
if not os.path.isfile(infile):
|
| 165 |
+
parser.error("No such file '{}'".format(infile))
|
| 166 |
+
|
| 167 |
+
outfile = (
|
| 168 |
+
makeOutputFileName(infile, overWrite=True, suffix="-CFF")
|
| 169 |
+
if not options.output
|
| 170 |
+
else options.output
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
font = TTFont(infile, recalcTimestamp=options.recalc_timestamp, recalcBBoxes=False)
|
| 174 |
+
|
| 175 |
+
convertCFF2ToCFF(font)
|
| 176 |
+
|
| 177 |
+
log.info(
|
| 178 |
+
"Saving %s",
|
| 179 |
+
outfile,
|
| 180 |
+
)
|
| 181 |
+
font.save(outfile)
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
if __name__ == "__main__":
|
| 185 |
+
import sys
|
| 186 |
+
|
| 187 |
+
sys.exit(main(sys.argv[1:]))
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/__init__.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/__pycache__/CFF2ToCFF.cpython-310.pyc
ADDED
|
Binary file (4.56 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/__pycache__/CFFToCFF2.cpython-310.pyc
ADDED
|
Binary file (5.84 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/__pycache__/specializer.cpython-310.pyc
ADDED
|
Binary file (17 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/specializer.py
ADDED
|
@@ -0,0 +1,847 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
|
| 3 |
+
"""T2CharString operator specializer and generalizer.
|
| 4 |
+
|
| 5 |
+
PostScript glyph drawing operations can be expressed in multiple different
|
| 6 |
+
ways. For example, as well as the ``lineto`` operator, there is also a
|
| 7 |
+
``hlineto`` operator which draws a horizontal line, removing the need to
|
| 8 |
+
specify a ``dx`` coordinate, and a ``vlineto`` operator which draws a
|
| 9 |
+
vertical line, removing the need to specify a ``dy`` coordinate. As well
|
| 10 |
+
as decompiling :class:`fontTools.misc.psCharStrings.T2CharString` objects
|
| 11 |
+
into lists of operations, this module allows for conversion between general
|
| 12 |
+
and specific forms of the operation.
|
| 13 |
+
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
from fontTools.cffLib import maxStackLimit
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def stringToProgram(string):
|
| 20 |
+
if isinstance(string, str):
|
| 21 |
+
string = string.split()
|
| 22 |
+
program = []
|
| 23 |
+
for token in string:
|
| 24 |
+
try:
|
| 25 |
+
token = int(token)
|
| 26 |
+
except ValueError:
|
| 27 |
+
try:
|
| 28 |
+
token = float(token)
|
| 29 |
+
except ValueError:
|
| 30 |
+
pass
|
| 31 |
+
program.append(token)
|
| 32 |
+
return program
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def programToString(program):
|
| 36 |
+
return " ".join(str(x) for x in program)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def programToCommands(program, getNumRegions=None):
|
| 40 |
+
"""Takes a T2CharString program list and returns list of commands.
|
| 41 |
+
Each command is a two-tuple of commandname,arg-list. The commandname might
|
| 42 |
+
be empty string if no commandname shall be emitted (used for glyph width,
|
| 43 |
+
hintmask/cntrmask argument, as well as stray arguments at the end of the
|
| 44 |
+
program (🤷).
|
| 45 |
+
'getNumRegions' may be None, or a callable object. It must return the
|
| 46 |
+
number of regions. 'getNumRegions' takes a single argument, vsindex. It
|
| 47 |
+
returns the numRegions for the vsindex.
|
| 48 |
+
The Charstring may or may not start with a width value. If the first
|
| 49 |
+
non-blend operator has an odd number of arguments, then the first argument is
|
| 50 |
+
a width, and is popped off. This is complicated with blend operators, as
|
| 51 |
+
there may be more than one before the first hint or moveto operator, and each
|
| 52 |
+
one reduces several arguments to just one list argument. We have to sum the
|
| 53 |
+
number of arguments that are not part of the blend arguments, and all the
|
| 54 |
+
'numBlends' values. We could instead have said that by definition, if there
|
| 55 |
+
is a blend operator, there is no width value, since CFF2 Charstrings don't
|
| 56 |
+
have width values. I discussed this with Behdad, and we are allowing for an
|
| 57 |
+
initial width value in this case because developers may assemble a CFF2
|
| 58 |
+
charstring from CFF Charstrings, which could have width values.
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
seenWidthOp = False
|
| 62 |
+
vsIndex = 0
|
| 63 |
+
lenBlendStack = 0
|
| 64 |
+
lastBlendIndex = 0
|
| 65 |
+
commands = []
|
| 66 |
+
stack = []
|
| 67 |
+
it = iter(program)
|
| 68 |
+
|
| 69 |
+
for token in it:
|
| 70 |
+
if not isinstance(token, str):
|
| 71 |
+
stack.append(token)
|
| 72 |
+
continue
|
| 73 |
+
|
| 74 |
+
if token == "blend":
|
| 75 |
+
assert getNumRegions is not None
|
| 76 |
+
numSourceFonts = 1 + getNumRegions(vsIndex)
|
| 77 |
+
# replace the blend op args on the stack with a single list
|
| 78 |
+
# containing all the blend op args.
|
| 79 |
+
numBlends = stack[-1]
|
| 80 |
+
numBlendArgs = numBlends * numSourceFonts + 1
|
| 81 |
+
# replace first blend op by a list of the blend ops.
|
| 82 |
+
stack[-numBlendArgs:] = [stack[-numBlendArgs:]]
|
| 83 |
+
lenBlendStack += numBlends + len(stack) - 1
|
| 84 |
+
lastBlendIndex = len(stack)
|
| 85 |
+
# if a blend op exists, this is or will be a CFF2 charstring.
|
| 86 |
+
continue
|
| 87 |
+
|
| 88 |
+
elif token == "vsindex":
|
| 89 |
+
vsIndex = stack[-1]
|
| 90 |
+
assert type(vsIndex) is int
|
| 91 |
+
|
| 92 |
+
elif (not seenWidthOp) and token in {
|
| 93 |
+
"hstem",
|
| 94 |
+
"hstemhm",
|
| 95 |
+
"vstem",
|
| 96 |
+
"vstemhm",
|
| 97 |
+
"cntrmask",
|
| 98 |
+
"hintmask",
|
| 99 |
+
"hmoveto",
|
| 100 |
+
"vmoveto",
|
| 101 |
+
"rmoveto",
|
| 102 |
+
"endchar",
|
| 103 |
+
}:
|
| 104 |
+
seenWidthOp = True
|
| 105 |
+
parity = token in {"hmoveto", "vmoveto"}
|
| 106 |
+
if lenBlendStack:
|
| 107 |
+
# lenBlendStack has the number of args represented by the last blend
|
| 108 |
+
# arg and all the preceding args. We need to now add the number of
|
| 109 |
+
# args following the last blend arg.
|
| 110 |
+
numArgs = lenBlendStack + len(stack[lastBlendIndex:])
|
| 111 |
+
else:
|
| 112 |
+
numArgs = len(stack)
|
| 113 |
+
if numArgs and (numArgs % 2) ^ parity:
|
| 114 |
+
width = stack.pop(0)
|
| 115 |
+
commands.append(("", [width]))
|
| 116 |
+
|
| 117 |
+
if token in {"hintmask", "cntrmask"}:
|
| 118 |
+
if stack:
|
| 119 |
+
commands.append(("", stack))
|
| 120 |
+
commands.append((token, []))
|
| 121 |
+
commands.append(("", [next(it)]))
|
| 122 |
+
else:
|
| 123 |
+
commands.append((token, stack))
|
| 124 |
+
stack = []
|
| 125 |
+
if stack:
|
| 126 |
+
commands.append(("", stack))
|
| 127 |
+
return commands
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def _flattenBlendArgs(args):
|
| 131 |
+
token_list = []
|
| 132 |
+
for arg in args:
|
| 133 |
+
if isinstance(arg, list):
|
| 134 |
+
token_list.extend(arg)
|
| 135 |
+
token_list.append("blend")
|
| 136 |
+
else:
|
| 137 |
+
token_list.append(arg)
|
| 138 |
+
return token_list
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def commandsToProgram(commands):
|
| 142 |
+
"""Takes a commands list as returned by programToCommands() and converts
|
| 143 |
+
it back to a T2CharString program list."""
|
| 144 |
+
program = []
|
| 145 |
+
for op, args in commands:
|
| 146 |
+
if any(isinstance(arg, list) for arg in args):
|
| 147 |
+
args = _flattenBlendArgs(args)
|
| 148 |
+
program.extend(args)
|
| 149 |
+
if op:
|
| 150 |
+
program.append(op)
|
| 151 |
+
return program
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def _everyN(el, n):
|
| 155 |
+
"""Group the list el into groups of size n"""
|
| 156 |
+
if len(el) % n != 0:
|
| 157 |
+
raise ValueError(el)
|
| 158 |
+
for i in range(0, len(el), n):
|
| 159 |
+
yield el[i : i + n]
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
class _GeneralizerDecombinerCommandsMap(object):
|
| 163 |
+
@staticmethod
|
| 164 |
+
def rmoveto(args):
|
| 165 |
+
if len(args) != 2:
|
| 166 |
+
raise ValueError(args)
|
| 167 |
+
yield ("rmoveto", args)
|
| 168 |
+
|
| 169 |
+
@staticmethod
|
| 170 |
+
def hmoveto(args):
|
| 171 |
+
if len(args) != 1:
|
| 172 |
+
raise ValueError(args)
|
| 173 |
+
yield ("rmoveto", [args[0], 0])
|
| 174 |
+
|
| 175 |
+
@staticmethod
|
| 176 |
+
def vmoveto(args):
|
| 177 |
+
if len(args) != 1:
|
| 178 |
+
raise ValueError(args)
|
| 179 |
+
yield ("rmoveto", [0, args[0]])
|
| 180 |
+
|
| 181 |
+
@staticmethod
|
| 182 |
+
def rlineto(args):
|
| 183 |
+
if not args:
|
| 184 |
+
raise ValueError(args)
|
| 185 |
+
for args in _everyN(args, 2):
|
| 186 |
+
yield ("rlineto", args)
|
| 187 |
+
|
| 188 |
+
@staticmethod
|
| 189 |
+
def hlineto(args):
|
| 190 |
+
if not args:
|
| 191 |
+
raise ValueError(args)
|
| 192 |
+
it = iter(args)
|
| 193 |
+
try:
|
| 194 |
+
while True:
|
| 195 |
+
yield ("rlineto", [next(it), 0])
|
| 196 |
+
yield ("rlineto", [0, next(it)])
|
| 197 |
+
except StopIteration:
|
| 198 |
+
pass
|
| 199 |
+
|
| 200 |
+
@staticmethod
|
| 201 |
+
def vlineto(args):
|
| 202 |
+
if not args:
|
| 203 |
+
raise ValueError(args)
|
| 204 |
+
it = iter(args)
|
| 205 |
+
try:
|
| 206 |
+
while True:
|
| 207 |
+
yield ("rlineto", [0, next(it)])
|
| 208 |
+
yield ("rlineto", [next(it), 0])
|
| 209 |
+
except StopIteration:
|
| 210 |
+
pass
|
| 211 |
+
|
| 212 |
+
@staticmethod
|
| 213 |
+
def rrcurveto(args):
|
| 214 |
+
if not args:
|
| 215 |
+
raise ValueError(args)
|
| 216 |
+
for args in _everyN(args, 6):
|
| 217 |
+
yield ("rrcurveto", args)
|
| 218 |
+
|
| 219 |
+
@staticmethod
|
| 220 |
+
def hhcurveto(args):
|
| 221 |
+
if len(args) < 4 or len(args) % 4 > 1:
|
| 222 |
+
raise ValueError(args)
|
| 223 |
+
if len(args) % 2 == 1:
|
| 224 |
+
yield ("rrcurveto", [args[1], args[0], args[2], args[3], args[4], 0])
|
| 225 |
+
args = args[5:]
|
| 226 |
+
for args in _everyN(args, 4):
|
| 227 |
+
yield ("rrcurveto", [args[0], 0, args[1], args[2], args[3], 0])
|
| 228 |
+
|
| 229 |
+
@staticmethod
|
| 230 |
+
def vvcurveto(args):
|
| 231 |
+
if len(args) < 4 or len(args) % 4 > 1:
|
| 232 |
+
raise ValueError(args)
|
| 233 |
+
if len(args) % 2 == 1:
|
| 234 |
+
yield ("rrcurveto", [args[0], args[1], args[2], args[3], 0, args[4]])
|
| 235 |
+
args = args[5:]
|
| 236 |
+
for args in _everyN(args, 4):
|
| 237 |
+
yield ("rrcurveto", [0, args[0], args[1], args[2], 0, args[3]])
|
| 238 |
+
|
| 239 |
+
@staticmethod
|
| 240 |
+
def hvcurveto(args):
|
| 241 |
+
if len(args) < 4 or len(args) % 8 not in {0, 1, 4, 5}:
|
| 242 |
+
raise ValueError(args)
|
| 243 |
+
last_args = None
|
| 244 |
+
if len(args) % 2 == 1:
|
| 245 |
+
lastStraight = len(args) % 8 == 5
|
| 246 |
+
args, last_args = args[:-5], args[-5:]
|
| 247 |
+
it = _everyN(args, 4)
|
| 248 |
+
try:
|
| 249 |
+
while True:
|
| 250 |
+
args = next(it)
|
| 251 |
+
yield ("rrcurveto", [args[0], 0, args[1], args[2], 0, args[3]])
|
| 252 |
+
args = next(it)
|
| 253 |
+
yield ("rrcurveto", [0, args[0], args[1], args[2], args[3], 0])
|
| 254 |
+
except StopIteration:
|
| 255 |
+
pass
|
| 256 |
+
if last_args:
|
| 257 |
+
args = last_args
|
| 258 |
+
if lastStraight:
|
| 259 |
+
yield ("rrcurveto", [args[0], 0, args[1], args[2], args[4], args[3]])
|
| 260 |
+
else:
|
| 261 |
+
yield ("rrcurveto", [0, args[0], args[1], args[2], args[3], args[4]])
|
| 262 |
+
|
| 263 |
+
@staticmethod
|
| 264 |
+
def vhcurveto(args):
|
| 265 |
+
if len(args) < 4 or len(args) % 8 not in {0, 1, 4, 5}:
|
| 266 |
+
raise ValueError(args)
|
| 267 |
+
last_args = None
|
| 268 |
+
if len(args) % 2 == 1:
|
| 269 |
+
lastStraight = len(args) % 8 == 5
|
| 270 |
+
args, last_args = args[:-5], args[-5:]
|
| 271 |
+
it = _everyN(args, 4)
|
| 272 |
+
try:
|
| 273 |
+
while True:
|
| 274 |
+
args = next(it)
|
| 275 |
+
yield ("rrcurveto", [0, args[0], args[1], args[2], args[3], 0])
|
| 276 |
+
args = next(it)
|
| 277 |
+
yield ("rrcurveto", [args[0], 0, args[1], args[2], 0, args[3]])
|
| 278 |
+
except StopIteration:
|
| 279 |
+
pass
|
| 280 |
+
if last_args:
|
| 281 |
+
args = last_args
|
| 282 |
+
if lastStraight:
|
| 283 |
+
yield ("rrcurveto", [0, args[0], args[1], args[2], args[3], args[4]])
|
| 284 |
+
else:
|
| 285 |
+
yield ("rrcurveto", [args[0], 0, args[1], args[2], args[4], args[3]])
|
| 286 |
+
|
| 287 |
+
@staticmethod
|
| 288 |
+
def rcurveline(args):
|
| 289 |
+
if len(args) < 8 or len(args) % 6 != 2:
|
| 290 |
+
raise ValueError(args)
|
| 291 |
+
args, last_args = args[:-2], args[-2:]
|
| 292 |
+
for args in _everyN(args, 6):
|
| 293 |
+
yield ("rrcurveto", args)
|
| 294 |
+
yield ("rlineto", last_args)
|
| 295 |
+
|
| 296 |
+
@staticmethod
|
| 297 |
+
def rlinecurve(args):
|
| 298 |
+
if len(args) < 8 or len(args) % 2 != 0:
|
| 299 |
+
raise ValueError(args)
|
| 300 |
+
args, last_args = args[:-6], args[-6:]
|
| 301 |
+
for args in _everyN(args, 2):
|
| 302 |
+
yield ("rlineto", args)
|
| 303 |
+
yield ("rrcurveto", last_args)
|
| 304 |
+
|
| 305 |
+
|
| 306 |
+
def _convertBlendOpToArgs(blendList):
|
| 307 |
+
# args is list of blend op args. Since we are supporting
|
| 308 |
+
# recursive blend op calls, some of these args may also
|
| 309 |
+
# be a list of blend op args, and need to be converted before
|
| 310 |
+
# we convert the current list.
|
| 311 |
+
if any([isinstance(arg, list) for arg in blendList]):
|
| 312 |
+
args = [
|
| 313 |
+
i
|
| 314 |
+
for e in blendList
|
| 315 |
+
for i in (_convertBlendOpToArgs(e) if isinstance(e, list) else [e])
|
| 316 |
+
]
|
| 317 |
+
else:
|
| 318 |
+
args = blendList
|
| 319 |
+
|
| 320 |
+
# We now know that blendList contains a blend op argument list, even if
|
| 321 |
+
# some of the args are lists that each contain a blend op argument list.
|
| 322 |
+
# Convert from:
|
| 323 |
+
# [default font arg sequence x0,...,xn] + [delta tuple for x0] + ... + [delta tuple for xn]
|
| 324 |
+
# to:
|
| 325 |
+
# [ [x0] + [delta tuple for x0],
|
| 326 |
+
# ...,
|
| 327 |
+
# [xn] + [delta tuple for xn] ]
|
| 328 |
+
numBlends = args[-1]
|
| 329 |
+
# Can't use args.pop() when the args are being used in a nested list
|
| 330 |
+
# comprehension. See calling context
|
| 331 |
+
args = args[:-1]
|
| 332 |
+
|
| 333 |
+
numRegions = len(args) // numBlends - 1
|
| 334 |
+
if not (numBlends * (numRegions + 1) == len(args)):
|
| 335 |
+
raise ValueError(blendList)
|
| 336 |
+
|
| 337 |
+
defaultArgs = [[arg] for arg in args[:numBlends]]
|
| 338 |
+
deltaArgs = args[numBlends:]
|
| 339 |
+
numDeltaValues = len(deltaArgs)
|
| 340 |
+
deltaList = [
|
| 341 |
+
deltaArgs[i : i + numRegions] for i in range(0, numDeltaValues, numRegions)
|
| 342 |
+
]
|
| 343 |
+
blend_args = [a + b + [1] for a, b in zip(defaultArgs, deltaList)]
|
| 344 |
+
return blend_args
|
| 345 |
+
|
| 346 |
+
|
| 347 |
+
def generalizeCommands(commands, ignoreErrors=False):
|
| 348 |
+
result = []
|
| 349 |
+
mapping = _GeneralizerDecombinerCommandsMap
|
| 350 |
+
for op, args in commands:
|
| 351 |
+
# First, generalize any blend args in the arg list.
|
| 352 |
+
if any([isinstance(arg, list) for arg in args]):
|
| 353 |
+
try:
|
| 354 |
+
args = [
|
| 355 |
+
n
|
| 356 |
+
for arg in args
|
| 357 |
+
for n in (
|
| 358 |
+
_convertBlendOpToArgs(arg) if isinstance(arg, list) else [arg]
|
| 359 |
+
)
|
| 360 |
+
]
|
| 361 |
+
except ValueError:
|
| 362 |
+
if ignoreErrors:
|
| 363 |
+
# Store op as data, such that consumers of commands do not have to
|
| 364 |
+
# deal with incorrect number of arguments.
|
| 365 |
+
result.append(("", args))
|
| 366 |
+
result.append(("", [op]))
|
| 367 |
+
else:
|
| 368 |
+
raise
|
| 369 |
+
|
| 370 |
+
func = getattr(mapping, op, None)
|
| 371 |
+
if not func:
|
| 372 |
+
result.append((op, args))
|
| 373 |
+
continue
|
| 374 |
+
try:
|
| 375 |
+
for command in func(args):
|
| 376 |
+
result.append(command)
|
| 377 |
+
except ValueError:
|
| 378 |
+
if ignoreErrors:
|
| 379 |
+
# Store op as data, such that consumers of commands do not have to
|
| 380 |
+
# deal with incorrect number of arguments.
|
| 381 |
+
result.append(("", args))
|
| 382 |
+
result.append(("", [op]))
|
| 383 |
+
else:
|
| 384 |
+
raise
|
| 385 |
+
return result
|
| 386 |
+
|
| 387 |
+
|
| 388 |
+
def generalizeProgram(program, getNumRegions=None, **kwargs):
|
| 389 |
+
return commandsToProgram(
|
| 390 |
+
generalizeCommands(programToCommands(program, getNumRegions), **kwargs)
|
| 391 |
+
)
|
| 392 |
+
|
| 393 |
+
|
| 394 |
+
def _categorizeVector(v):
|
| 395 |
+
"""
|
| 396 |
+
Takes X,Y vector v and returns one of r, h, v, or 0 depending on which
|
| 397 |
+
of X and/or Y are zero, plus tuple of nonzero ones. If both are zero,
|
| 398 |
+
it returns a single zero still.
|
| 399 |
+
|
| 400 |
+
>>> _categorizeVector((0,0))
|
| 401 |
+
('0', (0,))
|
| 402 |
+
>>> _categorizeVector((1,0))
|
| 403 |
+
('h', (1,))
|
| 404 |
+
>>> _categorizeVector((0,2))
|
| 405 |
+
('v', (2,))
|
| 406 |
+
>>> _categorizeVector((1,2))
|
| 407 |
+
('r', (1, 2))
|
| 408 |
+
"""
|
| 409 |
+
if not v[0]:
|
| 410 |
+
if not v[1]:
|
| 411 |
+
return "0", v[:1]
|
| 412 |
+
else:
|
| 413 |
+
return "v", v[1:]
|
| 414 |
+
else:
|
| 415 |
+
if not v[1]:
|
| 416 |
+
return "h", v[:1]
|
| 417 |
+
else:
|
| 418 |
+
return "r", v
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
def _mergeCategories(a, b):
|
| 422 |
+
if a == "0":
|
| 423 |
+
return b
|
| 424 |
+
if b == "0":
|
| 425 |
+
return a
|
| 426 |
+
if a == b:
|
| 427 |
+
return a
|
| 428 |
+
return None
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
def _negateCategory(a):
|
| 432 |
+
if a == "h":
|
| 433 |
+
return "v"
|
| 434 |
+
if a == "v":
|
| 435 |
+
return "h"
|
| 436 |
+
assert a in "0r"
|
| 437 |
+
return a
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
def _convertToBlendCmds(args):
|
| 441 |
+
# return a list of blend commands, and
|
| 442 |
+
# the remaining non-blended args, if any.
|
| 443 |
+
num_args = len(args)
|
| 444 |
+
stack_use = 0
|
| 445 |
+
new_args = []
|
| 446 |
+
i = 0
|
| 447 |
+
while i < num_args:
|
| 448 |
+
arg = args[i]
|
| 449 |
+
if not isinstance(arg, list):
|
| 450 |
+
new_args.append(arg)
|
| 451 |
+
i += 1
|
| 452 |
+
stack_use += 1
|
| 453 |
+
else:
|
| 454 |
+
prev_stack_use = stack_use
|
| 455 |
+
# The arg is a tuple of blend values.
|
| 456 |
+
# These are each (master 0,delta 1..delta n, 1)
|
| 457 |
+
# Combine as many successive tuples as we can,
|
| 458 |
+
# up to the max stack limit.
|
| 459 |
+
num_sources = len(arg) - 1
|
| 460 |
+
blendlist = [arg]
|
| 461 |
+
i += 1
|
| 462 |
+
stack_use += 1 + num_sources # 1 for the num_blends arg
|
| 463 |
+
while (i < num_args) and isinstance(args[i], list):
|
| 464 |
+
blendlist.append(args[i])
|
| 465 |
+
i += 1
|
| 466 |
+
stack_use += num_sources
|
| 467 |
+
if stack_use + num_sources > maxStackLimit:
|
| 468 |
+
# if we are here, max stack is the CFF2 max stack.
|
| 469 |
+
# I use the CFF2 max stack limit here rather than
|
| 470 |
+
# the 'maxstack' chosen by the client, as the default
|
| 471 |
+
# maxstack may have been used unintentionally. For all
|
| 472 |
+
# the other operators, this just produces a little less
|
| 473 |
+
# optimization, but here it puts a hard (and low) limit
|
| 474 |
+
# on the number of source fonts that can be used.
|
| 475 |
+
break
|
| 476 |
+
# blendList now contains as many single blend tuples as can be
|
| 477 |
+
# combined without exceeding the CFF2 stack limit.
|
| 478 |
+
num_blends = len(blendlist)
|
| 479 |
+
# append the 'num_blends' default font values
|
| 480 |
+
blend_args = []
|
| 481 |
+
for arg in blendlist:
|
| 482 |
+
blend_args.append(arg[0])
|
| 483 |
+
for arg in blendlist:
|
| 484 |
+
assert arg[-1] == 1
|
| 485 |
+
blend_args.extend(arg[1:-1])
|
| 486 |
+
blend_args.append(num_blends)
|
| 487 |
+
new_args.append(blend_args)
|
| 488 |
+
stack_use = prev_stack_use + num_blends
|
| 489 |
+
|
| 490 |
+
return new_args
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
def _addArgs(a, b):
|
| 494 |
+
if isinstance(b, list):
|
| 495 |
+
if isinstance(a, list):
|
| 496 |
+
if len(a) != len(b) or a[-1] != b[-1]:
|
| 497 |
+
raise ValueError()
|
| 498 |
+
return [_addArgs(va, vb) for va, vb in zip(a[:-1], b[:-1])] + [a[-1]]
|
| 499 |
+
else:
|
| 500 |
+
a, b = b, a
|
| 501 |
+
if isinstance(a, list):
|
| 502 |
+
assert a[-1] == 1
|
| 503 |
+
return [_addArgs(a[0], b)] + a[1:]
|
| 504 |
+
return a + b
|
| 505 |
+
|
| 506 |
+
|
| 507 |
+
def specializeCommands(
|
| 508 |
+
commands,
|
| 509 |
+
ignoreErrors=False,
|
| 510 |
+
generalizeFirst=True,
|
| 511 |
+
preserveTopology=False,
|
| 512 |
+
maxstack=48,
|
| 513 |
+
):
|
| 514 |
+
# We perform several rounds of optimizations. They are carefully ordered and are:
|
| 515 |
+
#
|
| 516 |
+
# 0. Generalize commands.
|
| 517 |
+
# This ensures that they are in our expected simple form, with each line/curve only
|
| 518 |
+
# having arguments for one segment, and using the generic form (rlineto/rrcurveto).
|
| 519 |
+
# If caller is sure the input is in this form, they can turn off generalization to
|
| 520 |
+
# save time.
|
| 521 |
+
#
|
| 522 |
+
# 1. Combine successive rmoveto operations.
|
| 523 |
+
#
|
| 524 |
+
# 2. Specialize rmoveto/rlineto/rrcurveto operators into horizontal/vertical variants.
|
| 525 |
+
# We specialize into some, made-up, variants as well, which simplifies following
|
| 526 |
+
# passes.
|
| 527 |
+
#
|
| 528 |
+
# 3. Merge or delete redundant operations, to the extent requested.
|
| 529 |
+
# OpenType spec declares point numbers in CFF undefined. As such, we happily
|
| 530 |
+
# change topology. If client relies on point numbers (in GPOS anchors, or for
|
| 531 |
+
# hinting purposes(what?)) they can turn this off.
|
| 532 |
+
#
|
| 533 |
+
# 4. Peephole optimization to revert back some of the h/v variants back into their
|
| 534 |
+
# original "relative" operator (rline/rrcurveto) if that saves a byte.
|
| 535 |
+
#
|
| 536 |
+
# 5. Combine adjacent operators when possible, minding not to go over max stack size.
|
| 537 |
+
#
|
| 538 |
+
# 6. Resolve any remaining made-up operators into real operators.
|
| 539 |
+
#
|
| 540 |
+
# I have convinced myself that this produces optimal bytecode (except for, possibly
|
| 541 |
+
# one byte each time maxstack size prohibits combining.) YMMV, but you'd be wrong. :-)
|
| 542 |
+
# A dynamic-programming approach can do the same but would be significantly slower.
|
| 543 |
+
#
|
| 544 |
+
# 7. For any args which are blend lists, convert them to a blend command.
|
| 545 |
+
|
| 546 |
+
# 0. Generalize commands.
|
| 547 |
+
if generalizeFirst:
|
| 548 |
+
commands = generalizeCommands(commands, ignoreErrors=ignoreErrors)
|
| 549 |
+
else:
|
| 550 |
+
commands = list(commands) # Make copy since we modify in-place later.
|
| 551 |
+
|
| 552 |
+
# 1. Combine successive rmoveto operations.
|
| 553 |
+
for i in range(len(commands) - 1, 0, -1):
|
| 554 |
+
if "rmoveto" == commands[i][0] == commands[i - 1][0]:
|
| 555 |
+
v1, v2 = commands[i - 1][1], commands[i][1]
|
| 556 |
+
commands[i - 1] = ("rmoveto", [v1[0] + v2[0], v1[1] + v2[1]])
|
| 557 |
+
del commands[i]
|
| 558 |
+
|
| 559 |
+
# 2. Specialize rmoveto/rlineto/rrcurveto operators into horizontal/vertical variants.
|
| 560 |
+
#
|
| 561 |
+
# We, in fact, specialize into more, made-up, variants that special-case when both
|
| 562 |
+
# X and Y components are zero. This simplifies the following optimization passes.
|
| 563 |
+
# This case is rare, but OCD does not let me skip it.
|
| 564 |
+
#
|
| 565 |
+
# After this round, we will have four variants that use the following mnemonics:
|
| 566 |
+
#
|
| 567 |
+
# - 'r' for relative, ie. non-zero X and non-zero Y,
|
| 568 |
+
# - 'h' for horizontal, ie. zero X and non-zero Y,
|
| 569 |
+
# - 'v' for vertical, ie. non-zero X and zero Y,
|
| 570 |
+
# - '0' for zeros, ie. zero X and zero Y.
|
| 571 |
+
#
|
| 572 |
+
# The '0' pseudo-operators are not part of the spec, but help simplify the following
|
| 573 |
+
# optimization rounds. We resolve them at the end. So, after this, we will have four
|
| 574 |
+
# moveto and four lineto variants:
|
| 575 |
+
#
|
| 576 |
+
# - 0moveto, 0lineto
|
| 577 |
+
# - hmoveto, hlineto
|
| 578 |
+
# - vmoveto, vlineto
|
| 579 |
+
# - rmoveto, rlineto
|
| 580 |
+
#
|
| 581 |
+
# and sixteen curveto variants. For example, a '0hcurveto' operator means a curve
|
| 582 |
+
# dx0,dy0,dx1,dy1,dx2,dy2,dx3,dy3 where dx0, dx1, and dy3 are zero but not dx3.
|
| 583 |
+
# An 'rvcurveto' means dx3 is zero but not dx0,dy0,dy3.
|
| 584 |
+
#
|
| 585 |
+
# There are nine different variants of curves without the '0'. Those nine map exactly
|
| 586 |
+
# to the existing curve variants in the spec: rrcurveto, and the four variants hhcurveto,
|
| 587 |
+
# vvcurveto, hvcurveto, and vhcurveto each cover two cases, one with an odd number of
|
| 588 |
+
# arguments and one without. Eg. an hhcurveto with an extra argument (odd number of
|
| 589 |
+
# arguments) is in fact an rhcurveto. The operators in the spec are designed such that
|
| 590 |
+
# all four of rhcurveto, rvcurveto, hrcurveto, and vrcurveto are encodable for one curve.
|
| 591 |
+
#
|
| 592 |
+
# Of the curve types with '0', the 00curveto is equivalent to a lineto variant. The rest
|
| 593 |
+
# of the curve types with a 0 need to be encoded as a h or v variant. Ie. a '0' can be
|
| 594 |
+
# thought of a "don't care" and can be used as either an 'h' or a 'v'. As such, we always
|
| 595 |
+
# encode a number 0 as argument when we use a '0' variant. Later on, we can just substitute
|
| 596 |
+
# the '0' with either 'h' or 'v' and it works.
|
| 597 |
+
#
|
| 598 |
+
# When we get to curve splines however, things become more complicated... XXX finish this.
|
| 599 |
+
# There's one more complexity with splines. If one side of the spline is not horizontal or
|
| 600 |
+
# vertical (or zero), ie. if it's 'r', then it limits which spline types we can encode.
|
| 601 |
+
# Only hhcurveto and vvcurveto operators can encode a spline starting with 'r', and
|
| 602 |
+
# only hvcurveto and vhcurveto operators can encode a spline ending with 'r'.
|
| 603 |
+
# This limits our merge opportunities later.
|
| 604 |
+
#
|
| 605 |
+
for i in range(len(commands)):
|
| 606 |
+
op, args = commands[i]
|
| 607 |
+
|
| 608 |
+
if op in {"rmoveto", "rlineto"}:
|
| 609 |
+
c, args = _categorizeVector(args)
|
| 610 |
+
commands[i] = c + op[1:], args
|
| 611 |
+
continue
|
| 612 |
+
|
| 613 |
+
if op == "rrcurveto":
|
| 614 |
+
c1, args1 = _categorizeVector(args[:2])
|
| 615 |
+
c2, args2 = _categorizeVector(args[-2:])
|
| 616 |
+
commands[i] = c1 + c2 + "curveto", args1 + args[2:4] + args2
|
| 617 |
+
continue
|
| 618 |
+
|
| 619 |
+
# 3. Merge or delete redundant operations, to the extent requested.
|
| 620 |
+
#
|
| 621 |
+
# TODO
|
| 622 |
+
# A 0moveto that comes before all other path operations can be removed.
|
| 623 |
+
# though I find conflicting evidence for this.
|
| 624 |
+
#
|
| 625 |
+
# TODO
|
| 626 |
+
# "If hstem and vstem hints are both declared at the beginning of a
|
| 627 |
+
# CharString, and this sequence is followed directly by the hintmask or
|
| 628 |
+
# cntrmask operators, then the vstem hint operator (or, if applicable,
|
| 629 |
+
# the vstemhm operator) need not be included."
|
| 630 |
+
#
|
| 631 |
+
# "The sequence and form of a CFF2 CharString program may be represented as:
|
| 632 |
+
# {hs* vs* cm* hm* mt subpath}? {mt subpath}*"
|
| 633 |
+
#
|
| 634 |
+
# https://www.microsoft.com/typography/otspec/cff2charstr.htm#section3.1
|
| 635 |
+
#
|
| 636 |
+
# For Type2 CharStrings the sequence is:
|
| 637 |
+
# w? {hs* vs* cm* hm* mt subpath}? {mt subpath}* endchar"
|
| 638 |
+
|
| 639 |
+
# Some other redundancies change topology (point numbers).
|
| 640 |
+
if not preserveTopology:
|
| 641 |
+
for i in range(len(commands) - 1, -1, -1):
|
| 642 |
+
op, args = commands[i]
|
| 643 |
+
|
| 644 |
+
# A 00curveto is demoted to a (specialized) lineto.
|
| 645 |
+
if op == "00curveto":
|
| 646 |
+
assert len(args) == 4
|
| 647 |
+
c, args = _categorizeVector(args[1:3])
|
| 648 |
+
op = c + "lineto"
|
| 649 |
+
commands[i] = op, args
|
| 650 |
+
# and then...
|
| 651 |
+
|
| 652 |
+
# A 0lineto can be deleted.
|
| 653 |
+
if op == "0lineto":
|
| 654 |
+
del commands[i]
|
| 655 |
+
continue
|
| 656 |
+
|
| 657 |
+
# Merge adjacent hlineto's and vlineto's.
|
| 658 |
+
# In CFF2 charstrings from variable fonts, each
|
| 659 |
+
# arg item may be a list of blendable values, one from
|
| 660 |
+
# each source font.
|
| 661 |
+
if i and op in {"hlineto", "vlineto"} and (op == commands[i - 1][0]):
|
| 662 |
+
_, other_args = commands[i - 1]
|
| 663 |
+
assert len(args) == 1 and len(other_args) == 1
|
| 664 |
+
try:
|
| 665 |
+
new_args = [_addArgs(args[0], other_args[0])]
|
| 666 |
+
except ValueError:
|
| 667 |
+
continue
|
| 668 |
+
commands[i - 1] = (op, new_args)
|
| 669 |
+
del commands[i]
|
| 670 |
+
continue
|
| 671 |
+
|
| 672 |
+
# 4. Peephole optimization to revert back some of the h/v variants back into their
|
| 673 |
+
# original "relative" operator (rline/rrcurveto) if that saves a byte.
|
| 674 |
+
for i in range(1, len(commands) - 1):
|
| 675 |
+
op, args = commands[i]
|
| 676 |
+
prv, nxt = commands[i - 1][0], commands[i + 1][0]
|
| 677 |
+
|
| 678 |
+
if op in {"0lineto", "hlineto", "vlineto"} and prv == nxt == "rlineto":
|
| 679 |
+
assert len(args) == 1
|
| 680 |
+
args = [0, args[0]] if op[0] == "v" else [args[0], 0]
|
| 681 |
+
commands[i] = ("rlineto", args)
|
| 682 |
+
continue
|
| 683 |
+
|
| 684 |
+
if op[2:] == "curveto" and len(args) == 5 and prv == nxt == "rrcurveto":
|
| 685 |
+
assert (op[0] == "r") ^ (op[1] == "r")
|
| 686 |
+
if op[0] == "v":
|
| 687 |
+
pos = 0
|
| 688 |
+
elif op[0] != "r":
|
| 689 |
+
pos = 1
|
| 690 |
+
elif op[1] == "v":
|
| 691 |
+
pos = 4
|
| 692 |
+
else:
|
| 693 |
+
pos = 5
|
| 694 |
+
# Insert, while maintaining the type of args (can be tuple or list).
|
| 695 |
+
args = args[:pos] + type(args)((0,)) + args[pos:]
|
| 696 |
+
commands[i] = ("rrcurveto", args)
|
| 697 |
+
continue
|
| 698 |
+
|
| 699 |
+
# 5. Combine adjacent operators when possible, minding not to go over max stack size.
|
| 700 |
+
for i in range(len(commands) - 1, 0, -1):
|
| 701 |
+
op1, args1 = commands[i - 1]
|
| 702 |
+
op2, args2 = commands[i]
|
| 703 |
+
new_op = None
|
| 704 |
+
|
| 705 |
+
# Merge logic...
|
| 706 |
+
if {op1, op2} <= {"rlineto", "rrcurveto"}:
|
| 707 |
+
if op1 == op2:
|
| 708 |
+
new_op = op1
|
| 709 |
+
else:
|
| 710 |
+
if op2 == "rrcurveto" and len(args2) == 6:
|
| 711 |
+
new_op = "rlinecurve"
|
| 712 |
+
elif len(args2) == 2:
|
| 713 |
+
new_op = "rcurveline"
|
| 714 |
+
|
| 715 |
+
elif (op1, op2) in {("rlineto", "rlinecurve"), ("rrcurveto", "rcurveline")}:
|
| 716 |
+
new_op = op2
|
| 717 |
+
|
| 718 |
+
elif {op1, op2} == {"vlineto", "hlineto"}:
|
| 719 |
+
new_op = op1
|
| 720 |
+
|
| 721 |
+
elif "curveto" == op1[2:] == op2[2:]:
|
| 722 |
+
d0, d1 = op1[:2]
|
| 723 |
+
d2, d3 = op2[:2]
|
| 724 |
+
|
| 725 |
+
if d1 == "r" or d2 == "r" or d0 == d3 == "r":
|
| 726 |
+
continue
|
| 727 |
+
|
| 728 |
+
d = _mergeCategories(d1, d2)
|
| 729 |
+
if d is None:
|
| 730 |
+
continue
|
| 731 |
+
if d0 == "r":
|
| 732 |
+
d = _mergeCategories(d, d3)
|
| 733 |
+
if d is None:
|
| 734 |
+
continue
|
| 735 |
+
new_op = "r" + d + "curveto"
|
| 736 |
+
elif d3 == "r":
|
| 737 |
+
d0 = _mergeCategories(d0, _negateCategory(d))
|
| 738 |
+
if d0 is None:
|
| 739 |
+
continue
|
| 740 |
+
new_op = d0 + "r" + "curveto"
|
| 741 |
+
else:
|
| 742 |
+
d0 = _mergeCategories(d0, d3)
|
| 743 |
+
if d0 is None:
|
| 744 |
+
continue
|
| 745 |
+
new_op = d0 + d + "curveto"
|
| 746 |
+
|
| 747 |
+
# Make sure the stack depth does not exceed (maxstack - 1), so
|
| 748 |
+
# that subroutinizer can insert subroutine calls at any point.
|
| 749 |
+
if new_op and len(args1) + len(args2) < maxstack:
|
| 750 |
+
commands[i - 1] = (new_op, args1 + args2)
|
| 751 |
+
del commands[i]
|
| 752 |
+
|
| 753 |
+
# 6. Resolve any remaining made-up operators into real operators.
|
| 754 |
+
for i in range(len(commands)):
|
| 755 |
+
op, args = commands[i]
|
| 756 |
+
|
| 757 |
+
if op in {"0moveto", "0lineto"}:
|
| 758 |
+
commands[i] = "h" + op[1:], args
|
| 759 |
+
continue
|
| 760 |
+
|
| 761 |
+
if op[2:] == "curveto" and op[:2] not in {"rr", "hh", "vv", "vh", "hv"}:
|
| 762 |
+
op0, op1 = op[:2]
|
| 763 |
+
if (op0 == "r") ^ (op1 == "r"):
|
| 764 |
+
assert len(args) % 2 == 1
|
| 765 |
+
if op0 == "0":
|
| 766 |
+
op0 = "h"
|
| 767 |
+
if op1 == "0":
|
| 768 |
+
op1 = "h"
|
| 769 |
+
if op0 == "r":
|
| 770 |
+
op0 = op1
|
| 771 |
+
if op1 == "r":
|
| 772 |
+
op1 = _negateCategory(op0)
|
| 773 |
+
assert {op0, op1} <= {"h", "v"}, (op0, op1)
|
| 774 |
+
|
| 775 |
+
if len(args) % 2:
|
| 776 |
+
if op0 != op1: # vhcurveto / hvcurveto
|
| 777 |
+
if (op0 == "h") ^ (len(args) % 8 == 1):
|
| 778 |
+
# Swap last two args order
|
| 779 |
+
args = args[:-2] + args[-1:] + args[-2:-1]
|
| 780 |
+
else: # hhcurveto / vvcurveto
|
| 781 |
+
if op0 == "h": # hhcurveto
|
| 782 |
+
# Swap first two args order
|
| 783 |
+
args = args[1:2] + args[:1] + args[2:]
|
| 784 |
+
|
| 785 |
+
commands[i] = op0 + op1 + "curveto", args
|
| 786 |
+
continue
|
| 787 |
+
|
| 788 |
+
# 7. For any series of args which are blend lists, convert the series to a single blend arg.
|
| 789 |
+
for i in range(len(commands)):
|
| 790 |
+
op, args = commands[i]
|
| 791 |
+
if any(isinstance(arg, list) for arg in args):
|
| 792 |
+
commands[i] = op, _convertToBlendCmds(args)
|
| 793 |
+
|
| 794 |
+
return commands
|
| 795 |
+
|
| 796 |
+
|
| 797 |
+
def specializeProgram(program, getNumRegions=None, **kwargs):
|
| 798 |
+
return commandsToProgram(
|
| 799 |
+
specializeCommands(programToCommands(program, getNumRegions), **kwargs)
|
| 800 |
+
)
|
| 801 |
+
|
| 802 |
+
|
| 803 |
+
if __name__ == "__main__":
|
| 804 |
+
import sys
|
| 805 |
+
|
| 806 |
+
if len(sys.argv) == 1:
|
| 807 |
+
import doctest
|
| 808 |
+
|
| 809 |
+
sys.exit(doctest.testmod().failed)
|
| 810 |
+
|
| 811 |
+
import argparse
|
| 812 |
+
|
| 813 |
+
parser = argparse.ArgumentParser(
|
| 814 |
+
"fonttools cffLib.specializer",
|
| 815 |
+
description="CFF CharString generalizer/specializer",
|
| 816 |
+
)
|
| 817 |
+
parser.add_argument("program", metavar="command", nargs="*", help="Commands.")
|
| 818 |
+
parser.add_argument(
|
| 819 |
+
"--num-regions",
|
| 820 |
+
metavar="NumRegions",
|
| 821 |
+
nargs="*",
|
| 822 |
+
default=None,
|
| 823 |
+
help="Number of variable-font regions for blend opertaions.",
|
| 824 |
+
)
|
| 825 |
+
|
| 826 |
+
options = parser.parse_args(sys.argv[1:])
|
| 827 |
+
|
| 828 |
+
getNumRegions = (
|
| 829 |
+
None
|
| 830 |
+
if options.num_regions is None
|
| 831 |
+
else lambda vsIndex: int(options.num_regions[0 if vsIndex is None else vsIndex])
|
| 832 |
+
)
|
| 833 |
+
|
| 834 |
+
program = stringToProgram(options.program)
|
| 835 |
+
print("Program:")
|
| 836 |
+
print(programToString(program))
|
| 837 |
+
commands = programToCommands(program, getNumRegions)
|
| 838 |
+
print("Commands:")
|
| 839 |
+
print(commands)
|
| 840 |
+
program2 = commandsToProgram(commands)
|
| 841 |
+
print("Program from commands:")
|
| 842 |
+
print(programToString(program2))
|
| 843 |
+
assert program == program2
|
| 844 |
+
print("Generalized program:")
|
| 845 |
+
print(programToString(generalizeProgram(program, getNumRegions)))
|
| 846 |
+
print("Specialized program:")
|
| 847 |
+
print(programToString(specializeProgram(program, getNumRegions)))
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/cffLib/transforms.py
ADDED
|
@@ -0,0 +1,483 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fontTools.misc.psCharStrings import (
|
| 2 |
+
SimpleT2Decompiler,
|
| 3 |
+
T2WidthExtractor,
|
| 4 |
+
calcSubrBias,
|
| 5 |
+
)
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def _uniq_sort(l):
|
| 9 |
+
return sorted(set(l))
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class StopHintCountEvent(Exception):
|
| 13 |
+
pass
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class _DesubroutinizingT2Decompiler(SimpleT2Decompiler):
|
| 17 |
+
stop_hintcount_ops = (
|
| 18 |
+
"op_hintmask",
|
| 19 |
+
"op_cntrmask",
|
| 20 |
+
"op_rmoveto",
|
| 21 |
+
"op_hmoveto",
|
| 22 |
+
"op_vmoveto",
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
def __init__(self, localSubrs, globalSubrs, private=None):
|
| 26 |
+
SimpleT2Decompiler.__init__(self, localSubrs, globalSubrs, private)
|
| 27 |
+
|
| 28 |
+
def execute(self, charString):
|
| 29 |
+
self.need_hintcount = True # until proven otherwise
|
| 30 |
+
for op_name in self.stop_hintcount_ops:
|
| 31 |
+
setattr(self, op_name, self.stop_hint_count)
|
| 32 |
+
|
| 33 |
+
if hasattr(charString, "_desubroutinized"):
|
| 34 |
+
# If a charstring has already been desubroutinized, we will still
|
| 35 |
+
# need to execute it if we need to count hints in order to
|
| 36 |
+
# compute the byte length for mask arguments, and haven't finished
|
| 37 |
+
# counting hints pairs.
|
| 38 |
+
if self.need_hintcount and self.callingStack:
|
| 39 |
+
try:
|
| 40 |
+
SimpleT2Decompiler.execute(self, charString)
|
| 41 |
+
except StopHintCountEvent:
|
| 42 |
+
del self.callingStack[-1]
|
| 43 |
+
return
|
| 44 |
+
|
| 45 |
+
charString._patches = []
|
| 46 |
+
SimpleT2Decompiler.execute(self, charString)
|
| 47 |
+
desubroutinized = charString.program[:]
|
| 48 |
+
for idx, expansion in reversed(charString._patches):
|
| 49 |
+
assert idx >= 2
|
| 50 |
+
assert desubroutinized[idx - 1] in [
|
| 51 |
+
"callsubr",
|
| 52 |
+
"callgsubr",
|
| 53 |
+
], desubroutinized[idx - 1]
|
| 54 |
+
assert type(desubroutinized[idx - 2]) == int
|
| 55 |
+
if expansion[-1] == "return":
|
| 56 |
+
expansion = expansion[:-1]
|
| 57 |
+
desubroutinized[idx - 2 : idx] = expansion
|
| 58 |
+
if not self.private.in_cff2:
|
| 59 |
+
if "endchar" in desubroutinized:
|
| 60 |
+
# Cut off after first endchar
|
| 61 |
+
desubroutinized = desubroutinized[
|
| 62 |
+
: desubroutinized.index("endchar") + 1
|
| 63 |
+
]
|
| 64 |
+
|
| 65 |
+
charString._desubroutinized = desubroutinized
|
| 66 |
+
del charString._patches
|
| 67 |
+
|
| 68 |
+
def op_callsubr(self, index):
|
| 69 |
+
subr = self.localSubrs[self.operandStack[-1] + self.localBias]
|
| 70 |
+
SimpleT2Decompiler.op_callsubr(self, index)
|
| 71 |
+
self.processSubr(index, subr)
|
| 72 |
+
|
| 73 |
+
def op_callgsubr(self, index):
|
| 74 |
+
subr = self.globalSubrs[self.operandStack[-1] + self.globalBias]
|
| 75 |
+
SimpleT2Decompiler.op_callgsubr(self, index)
|
| 76 |
+
self.processSubr(index, subr)
|
| 77 |
+
|
| 78 |
+
def stop_hint_count(self, *args):
|
| 79 |
+
self.need_hintcount = False
|
| 80 |
+
for op_name in self.stop_hintcount_ops:
|
| 81 |
+
setattr(self, op_name, None)
|
| 82 |
+
cs = self.callingStack[-1]
|
| 83 |
+
if hasattr(cs, "_desubroutinized"):
|
| 84 |
+
raise StopHintCountEvent()
|
| 85 |
+
|
| 86 |
+
def op_hintmask(self, index):
|
| 87 |
+
SimpleT2Decompiler.op_hintmask(self, index)
|
| 88 |
+
if self.need_hintcount:
|
| 89 |
+
self.stop_hint_count()
|
| 90 |
+
|
| 91 |
+
def processSubr(self, index, subr):
|
| 92 |
+
cs = self.callingStack[-1]
|
| 93 |
+
if not hasattr(cs, "_desubroutinized"):
|
| 94 |
+
cs._patches.append((index, subr._desubroutinized))
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def desubroutinize(cff):
|
| 98 |
+
for fontName in cff.fontNames:
|
| 99 |
+
font = cff[fontName]
|
| 100 |
+
cs = font.CharStrings
|
| 101 |
+
for c in cs.values():
|
| 102 |
+
c.decompile()
|
| 103 |
+
subrs = getattr(c.private, "Subrs", [])
|
| 104 |
+
decompiler = _DesubroutinizingT2Decompiler(subrs, c.globalSubrs, c.private)
|
| 105 |
+
decompiler.execute(c)
|
| 106 |
+
c.program = c._desubroutinized
|
| 107 |
+
del c._desubroutinized
|
| 108 |
+
# Delete all the local subrs
|
| 109 |
+
if hasattr(font, "FDArray"):
|
| 110 |
+
for fd in font.FDArray:
|
| 111 |
+
pd = fd.Private
|
| 112 |
+
if hasattr(pd, "Subrs"):
|
| 113 |
+
del pd.Subrs
|
| 114 |
+
if "Subrs" in pd.rawDict:
|
| 115 |
+
del pd.rawDict["Subrs"]
|
| 116 |
+
else:
|
| 117 |
+
pd = font.Private
|
| 118 |
+
if hasattr(pd, "Subrs"):
|
| 119 |
+
del pd.Subrs
|
| 120 |
+
if "Subrs" in pd.rawDict:
|
| 121 |
+
del pd.rawDict["Subrs"]
|
| 122 |
+
# as well as the global subrs
|
| 123 |
+
cff.GlobalSubrs.clear()
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
class _MarkingT2Decompiler(SimpleT2Decompiler):
|
| 127 |
+
def __init__(self, localSubrs, globalSubrs, private):
|
| 128 |
+
SimpleT2Decompiler.__init__(self, localSubrs, globalSubrs, private)
|
| 129 |
+
for subrs in [localSubrs, globalSubrs]:
|
| 130 |
+
if subrs and not hasattr(subrs, "_used"):
|
| 131 |
+
subrs._used = set()
|
| 132 |
+
|
| 133 |
+
def op_callsubr(self, index):
|
| 134 |
+
self.localSubrs._used.add(self.operandStack[-1] + self.localBias)
|
| 135 |
+
SimpleT2Decompiler.op_callsubr(self, index)
|
| 136 |
+
|
| 137 |
+
def op_callgsubr(self, index):
|
| 138 |
+
self.globalSubrs._used.add(self.operandStack[-1] + self.globalBias)
|
| 139 |
+
SimpleT2Decompiler.op_callgsubr(self, index)
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
class _DehintingT2Decompiler(T2WidthExtractor):
|
| 143 |
+
class Hints(object):
|
| 144 |
+
def __init__(self):
|
| 145 |
+
# Whether calling this charstring produces any hint stems
|
| 146 |
+
# Note that if a charstring starts with hintmask, it will
|
| 147 |
+
# have has_hint set to True, because it *might* produce an
|
| 148 |
+
# implicit vstem if called under certain conditions.
|
| 149 |
+
self.has_hint = False
|
| 150 |
+
# Index to start at to drop all hints
|
| 151 |
+
self.last_hint = 0
|
| 152 |
+
# Index up to which we know more hints are possible.
|
| 153 |
+
# Only relevant if status is 0 or 1.
|
| 154 |
+
self.last_checked = 0
|
| 155 |
+
# The status means:
|
| 156 |
+
# 0: after dropping hints, this charstring is empty
|
| 157 |
+
# 1: after dropping hints, there may be more hints
|
| 158 |
+
# continuing after this, or there might be
|
| 159 |
+
# other things. Not clear yet.
|
| 160 |
+
# 2: no more hints possible after this charstring
|
| 161 |
+
self.status = 0
|
| 162 |
+
# Has hintmask instructions; not recursive
|
| 163 |
+
self.has_hintmask = False
|
| 164 |
+
# List of indices of calls to empty subroutines to remove.
|
| 165 |
+
self.deletions = []
|
| 166 |
+
|
| 167 |
+
pass
|
| 168 |
+
|
| 169 |
+
def __init__(
|
| 170 |
+
self, css, localSubrs, globalSubrs, nominalWidthX, defaultWidthX, private=None
|
| 171 |
+
):
|
| 172 |
+
self._css = css
|
| 173 |
+
T2WidthExtractor.__init__(
|
| 174 |
+
self, localSubrs, globalSubrs, nominalWidthX, defaultWidthX
|
| 175 |
+
)
|
| 176 |
+
self.private = private
|
| 177 |
+
|
| 178 |
+
def execute(self, charString):
|
| 179 |
+
old_hints = charString._hints if hasattr(charString, "_hints") else None
|
| 180 |
+
charString._hints = self.Hints()
|
| 181 |
+
|
| 182 |
+
T2WidthExtractor.execute(self, charString)
|
| 183 |
+
|
| 184 |
+
hints = charString._hints
|
| 185 |
+
|
| 186 |
+
if hints.has_hint or hints.has_hintmask:
|
| 187 |
+
self._css.add(charString)
|
| 188 |
+
|
| 189 |
+
if hints.status != 2:
|
| 190 |
+
# Check from last_check, make sure we didn't have any operators.
|
| 191 |
+
for i in range(hints.last_checked, len(charString.program) - 1):
|
| 192 |
+
if isinstance(charString.program[i], str):
|
| 193 |
+
hints.status = 2
|
| 194 |
+
break
|
| 195 |
+
else:
|
| 196 |
+
hints.status = 1 # There's *something* here
|
| 197 |
+
hints.last_checked = len(charString.program)
|
| 198 |
+
|
| 199 |
+
if old_hints:
|
| 200 |
+
assert hints.__dict__ == old_hints.__dict__
|
| 201 |
+
|
| 202 |
+
def op_callsubr(self, index):
|
| 203 |
+
subr = self.localSubrs[self.operandStack[-1] + self.localBias]
|
| 204 |
+
T2WidthExtractor.op_callsubr(self, index)
|
| 205 |
+
self.processSubr(index, subr)
|
| 206 |
+
|
| 207 |
+
def op_callgsubr(self, index):
|
| 208 |
+
subr = self.globalSubrs[self.operandStack[-1] + self.globalBias]
|
| 209 |
+
T2WidthExtractor.op_callgsubr(self, index)
|
| 210 |
+
self.processSubr(index, subr)
|
| 211 |
+
|
| 212 |
+
def op_hstem(self, index):
|
| 213 |
+
T2WidthExtractor.op_hstem(self, index)
|
| 214 |
+
self.processHint(index)
|
| 215 |
+
|
| 216 |
+
def op_vstem(self, index):
|
| 217 |
+
T2WidthExtractor.op_vstem(self, index)
|
| 218 |
+
self.processHint(index)
|
| 219 |
+
|
| 220 |
+
def op_hstemhm(self, index):
|
| 221 |
+
T2WidthExtractor.op_hstemhm(self, index)
|
| 222 |
+
self.processHint(index)
|
| 223 |
+
|
| 224 |
+
def op_vstemhm(self, index):
|
| 225 |
+
T2WidthExtractor.op_vstemhm(self, index)
|
| 226 |
+
self.processHint(index)
|
| 227 |
+
|
| 228 |
+
def op_hintmask(self, index):
|
| 229 |
+
rv = T2WidthExtractor.op_hintmask(self, index)
|
| 230 |
+
self.processHintmask(index)
|
| 231 |
+
return rv
|
| 232 |
+
|
| 233 |
+
def op_cntrmask(self, index):
|
| 234 |
+
rv = T2WidthExtractor.op_cntrmask(self, index)
|
| 235 |
+
self.processHintmask(index)
|
| 236 |
+
return rv
|
| 237 |
+
|
| 238 |
+
def processHintmask(self, index):
|
| 239 |
+
cs = self.callingStack[-1]
|
| 240 |
+
hints = cs._hints
|
| 241 |
+
hints.has_hintmask = True
|
| 242 |
+
if hints.status != 2:
|
| 243 |
+
# Check from last_check, see if we may be an implicit vstem
|
| 244 |
+
for i in range(hints.last_checked, index - 1):
|
| 245 |
+
if isinstance(cs.program[i], str):
|
| 246 |
+
hints.status = 2
|
| 247 |
+
break
|
| 248 |
+
else:
|
| 249 |
+
# We are an implicit vstem
|
| 250 |
+
hints.has_hint = True
|
| 251 |
+
hints.last_hint = index + 1
|
| 252 |
+
hints.status = 0
|
| 253 |
+
hints.last_checked = index + 1
|
| 254 |
+
|
| 255 |
+
def processHint(self, index):
|
| 256 |
+
cs = self.callingStack[-1]
|
| 257 |
+
hints = cs._hints
|
| 258 |
+
hints.has_hint = True
|
| 259 |
+
hints.last_hint = index
|
| 260 |
+
hints.last_checked = index
|
| 261 |
+
|
| 262 |
+
def processSubr(self, index, subr):
|
| 263 |
+
cs = self.callingStack[-1]
|
| 264 |
+
hints = cs._hints
|
| 265 |
+
subr_hints = subr._hints
|
| 266 |
+
|
| 267 |
+
# Check from last_check, make sure we didn't have
|
| 268 |
+
# any operators.
|
| 269 |
+
if hints.status != 2:
|
| 270 |
+
for i in range(hints.last_checked, index - 1):
|
| 271 |
+
if isinstance(cs.program[i], str):
|
| 272 |
+
hints.status = 2
|
| 273 |
+
break
|
| 274 |
+
hints.last_checked = index
|
| 275 |
+
|
| 276 |
+
if hints.status != 2:
|
| 277 |
+
if subr_hints.has_hint:
|
| 278 |
+
hints.has_hint = True
|
| 279 |
+
|
| 280 |
+
# Decide where to chop off from
|
| 281 |
+
if subr_hints.status == 0:
|
| 282 |
+
hints.last_hint = index
|
| 283 |
+
else:
|
| 284 |
+
hints.last_hint = index - 2 # Leave the subr call in
|
| 285 |
+
|
| 286 |
+
elif subr_hints.status == 0:
|
| 287 |
+
hints.deletions.append(index)
|
| 288 |
+
|
| 289 |
+
hints.status = max(hints.status, subr_hints.status)
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
def _cs_subset_subroutines(charstring, subrs, gsubrs):
|
| 293 |
+
p = charstring.program
|
| 294 |
+
for i in range(1, len(p)):
|
| 295 |
+
if p[i] == "callsubr":
|
| 296 |
+
assert isinstance(p[i - 1], int)
|
| 297 |
+
p[i - 1] = subrs._used.index(p[i - 1] + subrs._old_bias) - subrs._new_bias
|
| 298 |
+
elif p[i] == "callgsubr":
|
| 299 |
+
assert isinstance(p[i - 1], int)
|
| 300 |
+
p[i - 1] = (
|
| 301 |
+
gsubrs._used.index(p[i - 1] + gsubrs._old_bias) - gsubrs._new_bias
|
| 302 |
+
)
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
def _cs_drop_hints(charstring):
|
| 306 |
+
hints = charstring._hints
|
| 307 |
+
|
| 308 |
+
if hints.deletions:
|
| 309 |
+
p = charstring.program
|
| 310 |
+
for idx in reversed(hints.deletions):
|
| 311 |
+
del p[idx - 2 : idx]
|
| 312 |
+
|
| 313 |
+
if hints.has_hint:
|
| 314 |
+
assert not hints.deletions or hints.last_hint <= hints.deletions[0]
|
| 315 |
+
charstring.program = charstring.program[hints.last_hint :]
|
| 316 |
+
if not charstring.program:
|
| 317 |
+
# TODO CFF2 no need for endchar.
|
| 318 |
+
charstring.program.append("endchar")
|
| 319 |
+
if hasattr(charstring, "width"):
|
| 320 |
+
# Insert width back if needed
|
| 321 |
+
if charstring.width != charstring.private.defaultWidthX:
|
| 322 |
+
# For CFF2 charstrings, this should never happen
|
| 323 |
+
assert (
|
| 324 |
+
charstring.private.defaultWidthX is not None
|
| 325 |
+
), "CFF2 CharStrings must not have an initial width value"
|
| 326 |
+
charstring.program.insert(
|
| 327 |
+
0, charstring.width - charstring.private.nominalWidthX
|
| 328 |
+
)
|
| 329 |
+
|
| 330 |
+
if hints.has_hintmask:
|
| 331 |
+
i = 0
|
| 332 |
+
p = charstring.program
|
| 333 |
+
while i < len(p):
|
| 334 |
+
if p[i] in ["hintmask", "cntrmask"]:
|
| 335 |
+
assert i + 1 <= len(p)
|
| 336 |
+
del p[i : i + 2]
|
| 337 |
+
continue
|
| 338 |
+
i += 1
|
| 339 |
+
|
| 340 |
+
assert len(charstring.program)
|
| 341 |
+
|
| 342 |
+
del charstring._hints
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
def remove_hints(cff, *, removeUnusedSubrs: bool = True):
|
| 346 |
+
for fontname in cff.keys():
|
| 347 |
+
font = cff[fontname]
|
| 348 |
+
cs = font.CharStrings
|
| 349 |
+
# This can be tricky, but doesn't have to. What we do is:
|
| 350 |
+
#
|
| 351 |
+
# - Run all used glyph charstrings and recurse into subroutines,
|
| 352 |
+
# - For each charstring (including subroutines), if it has any
|
| 353 |
+
# of the hint stem operators, we mark it as such.
|
| 354 |
+
# Upon returning, for each charstring we note all the
|
| 355 |
+
# subroutine calls it makes that (recursively) contain a stem,
|
| 356 |
+
# - Dropping hinting then consists of the following two ops:
|
| 357 |
+
# * Drop the piece of the program in each charstring before the
|
| 358 |
+
# last call to a stem op or a stem-calling subroutine,
|
| 359 |
+
# * Drop all hintmask operations.
|
| 360 |
+
# - It's trickier... A hintmask right after hints and a few numbers
|
| 361 |
+
# will act as an implicit vstemhm. As such, we track whether
|
| 362 |
+
# we have seen any non-hint operators so far and do the right
|
| 363 |
+
# thing, recursively... Good luck understanding that :(
|
| 364 |
+
css = set()
|
| 365 |
+
for c in cs.values():
|
| 366 |
+
c.decompile()
|
| 367 |
+
subrs = getattr(c.private, "Subrs", [])
|
| 368 |
+
decompiler = _DehintingT2Decompiler(
|
| 369 |
+
css,
|
| 370 |
+
subrs,
|
| 371 |
+
c.globalSubrs,
|
| 372 |
+
c.private.nominalWidthX,
|
| 373 |
+
c.private.defaultWidthX,
|
| 374 |
+
c.private,
|
| 375 |
+
)
|
| 376 |
+
decompiler.execute(c)
|
| 377 |
+
c.width = decompiler.width
|
| 378 |
+
for charstring in css:
|
| 379 |
+
_cs_drop_hints(charstring)
|
| 380 |
+
del css
|
| 381 |
+
|
| 382 |
+
# Drop font-wide hinting values
|
| 383 |
+
all_privs = []
|
| 384 |
+
if hasattr(font, "FDArray"):
|
| 385 |
+
all_privs.extend(fd.Private for fd in font.FDArray)
|
| 386 |
+
else:
|
| 387 |
+
all_privs.append(font.Private)
|
| 388 |
+
for priv in all_privs:
|
| 389 |
+
for k in [
|
| 390 |
+
"BlueValues",
|
| 391 |
+
"OtherBlues",
|
| 392 |
+
"FamilyBlues",
|
| 393 |
+
"FamilyOtherBlues",
|
| 394 |
+
"BlueScale",
|
| 395 |
+
"BlueShift",
|
| 396 |
+
"BlueFuzz",
|
| 397 |
+
"StemSnapH",
|
| 398 |
+
"StemSnapV",
|
| 399 |
+
"StdHW",
|
| 400 |
+
"StdVW",
|
| 401 |
+
"ForceBold",
|
| 402 |
+
"LanguageGroup",
|
| 403 |
+
"ExpansionFactor",
|
| 404 |
+
]:
|
| 405 |
+
if hasattr(priv, k):
|
| 406 |
+
setattr(priv, k, None)
|
| 407 |
+
if removeUnusedSubrs:
|
| 408 |
+
remove_unused_subroutines(cff)
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
def _pd_delete_empty_subrs(private_dict):
|
| 412 |
+
if hasattr(private_dict, "Subrs") and not private_dict.Subrs:
|
| 413 |
+
if "Subrs" in private_dict.rawDict:
|
| 414 |
+
del private_dict.rawDict["Subrs"]
|
| 415 |
+
del private_dict.Subrs
|
| 416 |
+
|
| 417 |
+
|
| 418 |
+
def remove_unused_subroutines(cff):
|
| 419 |
+
for fontname in cff.keys():
|
| 420 |
+
font = cff[fontname]
|
| 421 |
+
cs = font.CharStrings
|
| 422 |
+
# Renumber subroutines to remove unused ones
|
| 423 |
+
|
| 424 |
+
# Mark all used subroutines
|
| 425 |
+
for c in cs.values():
|
| 426 |
+
subrs = getattr(c.private, "Subrs", [])
|
| 427 |
+
decompiler = _MarkingT2Decompiler(subrs, c.globalSubrs, c.private)
|
| 428 |
+
decompiler.execute(c)
|
| 429 |
+
|
| 430 |
+
all_subrs = [font.GlobalSubrs]
|
| 431 |
+
if hasattr(font, "FDArray"):
|
| 432 |
+
all_subrs.extend(
|
| 433 |
+
fd.Private.Subrs
|
| 434 |
+
for fd in font.FDArray
|
| 435 |
+
if hasattr(fd.Private, "Subrs") and fd.Private.Subrs
|
| 436 |
+
)
|
| 437 |
+
elif hasattr(font.Private, "Subrs") and font.Private.Subrs:
|
| 438 |
+
all_subrs.append(font.Private.Subrs)
|
| 439 |
+
|
| 440 |
+
subrs = set(subrs) # Remove duplicates
|
| 441 |
+
|
| 442 |
+
# Prepare
|
| 443 |
+
for subrs in all_subrs:
|
| 444 |
+
if not hasattr(subrs, "_used"):
|
| 445 |
+
subrs._used = set()
|
| 446 |
+
subrs._used = _uniq_sort(subrs._used)
|
| 447 |
+
subrs._old_bias = calcSubrBias(subrs)
|
| 448 |
+
subrs._new_bias = calcSubrBias(subrs._used)
|
| 449 |
+
|
| 450 |
+
# Renumber glyph charstrings
|
| 451 |
+
for c in cs.values():
|
| 452 |
+
subrs = getattr(c.private, "Subrs", None)
|
| 453 |
+
_cs_subset_subroutines(c, subrs, font.GlobalSubrs)
|
| 454 |
+
|
| 455 |
+
# Renumber subroutines themselves
|
| 456 |
+
for subrs in all_subrs:
|
| 457 |
+
if subrs == font.GlobalSubrs:
|
| 458 |
+
if not hasattr(font, "FDArray") and hasattr(font.Private, "Subrs"):
|
| 459 |
+
local_subrs = font.Private.Subrs
|
| 460 |
+
else:
|
| 461 |
+
local_subrs = None
|
| 462 |
+
else:
|
| 463 |
+
local_subrs = subrs
|
| 464 |
+
|
| 465 |
+
subrs.items = [subrs.items[i] for i in subrs._used]
|
| 466 |
+
if hasattr(subrs, "file"):
|
| 467 |
+
del subrs.file
|
| 468 |
+
if hasattr(subrs, "offsets"):
|
| 469 |
+
del subrs.offsets
|
| 470 |
+
|
| 471 |
+
for subr in subrs.items:
|
| 472 |
+
_cs_subset_subroutines(subr, local_subrs, font.GlobalSubrs)
|
| 473 |
+
|
| 474 |
+
# Delete local SubrsIndex if empty
|
| 475 |
+
if hasattr(font, "FDArray"):
|
| 476 |
+
for fd in font.FDArray:
|
| 477 |
+
_pd_delete_empty_subrs(fd.Private)
|
| 478 |
+
else:
|
| 479 |
+
_pd_delete_empty_subrs(font.Private)
|
| 480 |
+
|
| 481 |
+
# Cleanup
|
| 482 |
+
for subrs in all_subrs:
|
| 483 |
+
del subrs._used, subrs._old_bias, subrs._new_bias
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/cu2qu/__main__.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from .cli import _main as main
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
sys.exit(main())
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/cu2qu/__pycache__/errors.cpython-310.pyc
ADDED
|
Binary file (3.59 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/cu2qu/cu2qu.py
ADDED
|
@@ -0,0 +1,534 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# cython: language_level=3
|
| 2 |
+
# distutils: define_macros=CYTHON_TRACE_NOGIL=1
|
| 3 |
+
|
| 4 |
+
# Copyright 2015 Google Inc. All Rights Reserved.
|
| 5 |
+
#
|
| 6 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 7 |
+
# you may not use this file except in compliance with the License.
|
| 8 |
+
# You may obtain a copy of the License at
|
| 9 |
+
#
|
| 10 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 11 |
+
#
|
| 12 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 13 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 14 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 15 |
+
# See the License for the specific language governing permissions and
|
| 16 |
+
# limitations under the License.
|
| 17 |
+
|
| 18 |
+
try:
|
| 19 |
+
import cython
|
| 20 |
+
|
| 21 |
+
COMPILED = cython.compiled
|
| 22 |
+
except (AttributeError, ImportError):
|
| 23 |
+
# if cython not installed, use mock module with no-op decorators and types
|
| 24 |
+
from fontTools.misc import cython
|
| 25 |
+
|
| 26 |
+
COMPILED = False
|
| 27 |
+
|
| 28 |
+
import math
|
| 29 |
+
|
| 30 |
+
from .errors import Error as Cu2QuError, ApproxNotFoundError
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
__all__ = ["curve_to_quadratic", "curves_to_quadratic"]
|
| 34 |
+
|
| 35 |
+
MAX_N = 100
|
| 36 |
+
|
| 37 |
+
NAN = float("NaN")
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
@cython.cfunc
|
| 41 |
+
@cython.inline
|
| 42 |
+
@cython.returns(cython.double)
|
| 43 |
+
@cython.locals(v1=cython.complex, v2=cython.complex)
|
| 44 |
+
def dot(v1, v2):
|
| 45 |
+
"""Return the dot product of two vectors.
|
| 46 |
+
|
| 47 |
+
Args:
|
| 48 |
+
v1 (complex): First vector.
|
| 49 |
+
v2 (complex): Second vector.
|
| 50 |
+
|
| 51 |
+
Returns:
|
| 52 |
+
double: Dot product.
|
| 53 |
+
"""
|
| 54 |
+
return (v1 * v2.conjugate()).real
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
@cython.cfunc
|
| 58 |
+
@cython.inline
|
| 59 |
+
@cython.locals(a=cython.complex, b=cython.complex, c=cython.complex, d=cython.complex)
|
| 60 |
+
@cython.locals(
|
| 61 |
+
_1=cython.complex, _2=cython.complex, _3=cython.complex, _4=cython.complex
|
| 62 |
+
)
|
| 63 |
+
def calc_cubic_points(a, b, c, d):
|
| 64 |
+
_1 = d
|
| 65 |
+
_2 = (c / 3.0) + d
|
| 66 |
+
_3 = (b + c) / 3.0 + _2
|
| 67 |
+
_4 = a + d + c + b
|
| 68 |
+
return _1, _2, _3, _4
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
@cython.cfunc
|
| 72 |
+
@cython.inline
|
| 73 |
+
@cython.locals(
|
| 74 |
+
p0=cython.complex, p1=cython.complex, p2=cython.complex, p3=cython.complex
|
| 75 |
+
)
|
| 76 |
+
@cython.locals(a=cython.complex, b=cython.complex, c=cython.complex, d=cython.complex)
|
| 77 |
+
def calc_cubic_parameters(p0, p1, p2, p3):
|
| 78 |
+
c = (p1 - p0) * 3.0
|
| 79 |
+
b = (p2 - p1) * 3.0 - c
|
| 80 |
+
d = p0
|
| 81 |
+
a = p3 - d - c - b
|
| 82 |
+
return a, b, c, d
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
@cython.cfunc
|
| 86 |
+
@cython.inline
|
| 87 |
+
@cython.locals(
|
| 88 |
+
p0=cython.complex, p1=cython.complex, p2=cython.complex, p3=cython.complex
|
| 89 |
+
)
|
| 90 |
+
def split_cubic_into_n_iter(p0, p1, p2, p3, n):
|
| 91 |
+
"""Split a cubic Bezier into n equal parts.
|
| 92 |
+
|
| 93 |
+
Splits the curve into `n` equal parts by curve time.
|
| 94 |
+
(t=0..1/n, t=1/n..2/n, ...)
|
| 95 |
+
|
| 96 |
+
Args:
|
| 97 |
+
p0 (complex): Start point of curve.
|
| 98 |
+
p1 (complex): First handle of curve.
|
| 99 |
+
p2 (complex): Second handle of curve.
|
| 100 |
+
p3 (complex): End point of curve.
|
| 101 |
+
|
| 102 |
+
Returns:
|
| 103 |
+
An iterator yielding the control points (four complex values) of the
|
| 104 |
+
subcurves.
|
| 105 |
+
"""
|
| 106 |
+
# Hand-coded special-cases
|
| 107 |
+
if n == 2:
|
| 108 |
+
return iter(split_cubic_into_two(p0, p1, p2, p3))
|
| 109 |
+
if n == 3:
|
| 110 |
+
return iter(split_cubic_into_three(p0, p1, p2, p3))
|
| 111 |
+
if n == 4:
|
| 112 |
+
a, b = split_cubic_into_two(p0, p1, p2, p3)
|
| 113 |
+
return iter(
|
| 114 |
+
split_cubic_into_two(a[0], a[1], a[2], a[3])
|
| 115 |
+
+ split_cubic_into_two(b[0], b[1], b[2], b[3])
|
| 116 |
+
)
|
| 117 |
+
if n == 6:
|
| 118 |
+
a, b = split_cubic_into_two(p0, p1, p2, p3)
|
| 119 |
+
return iter(
|
| 120 |
+
split_cubic_into_three(a[0], a[1], a[2], a[3])
|
| 121 |
+
+ split_cubic_into_three(b[0], b[1], b[2], b[3])
|
| 122 |
+
)
|
| 123 |
+
|
| 124 |
+
return _split_cubic_into_n_gen(p0, p1, p2, p3, n)
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
@cython.locals(
|
| 128 |
+
p0=cython.complex,
|
| 129 |
+
p1=cython.complex,
|
| 130 |
+
p2=cython.complex,
|
| 131 |
+
p3=cython.complex,
|
| 132 |
+
n=cython.int,
|
| 133 |
+
)
|
| 134 |
+
@cython.locals(a=cython.complex, b=cython.complex, c=cython.complex, d=cython.complex)
|
| 135 |
+
@cython.locals(
|
| 136 |
+
dt=cython.double, delta_2=cython.double, delta_3=cython.double, i=cython.int
|
| 137 |
+
)
|
| 138 |
+
@cython.locals(
|
| 139 |
+
a1=cython.complex, b1=cython.complex, c1=cython.complex, d1=cython.complex
|
| 140 |
+
)
|
| 141 |
+
def _split_cubic_into_n_gen(p0, p1, p2, p3, n):
|
| 142 |
+
a, b, c, d = calc_cubic_parameters(p0, p1, p2, p3)
|
| 143 |
+
dt = 1 / n
|
| 144 |
+
delta_2 = dt * dt
|
| 145 |
+
delta_3 = dt * delta_2
|
| 146 |
+
for i in range(n):
|
| 147 |
+
t1 = i * dt
|
| 148 |
+
t1_2 = t1 * t1
|
| 149 |
+
# calc new a, b, c and d
|
| 150 |
+
a1 = a * delta_3
|
| 151 |
+
b1 = (3 * a * t1 + b) * delta_2
|
| 152 |
+
c1 = (2 * b * t1 + c + 3 * a * t1_2) * dt
|
| 153 |
+
d1 = a * t1 * t1_2 + b * t1_2 + c * t1 + d
|
| 154 |
+
yield calc_cubic_points(a1, b1, c1, d1)
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
@cython.cfunc
|
| 158 |
+
@cython.inline
|
| 159 |
+
@cython.locals(
|
| 160 |
+
p0=cython.complex, p1=cython.complex, p2=cython.complex, p3=cython.complex
|
| 161 |
+
)
|
| 162 |
+
@cython.locals(mid=cython.complex, deriv3=cython.complex)
|
| 163 |
+
def split_cubic_into_two(p0, p1, p2, p3):
|
| 164 |
+
"""Split a cubic Bezier into two equal parts.
|
| 165 |
+
|
| 166 |
+
Splits the curve into two equal parts at t = 0.5
|
| 167 |
+
|
| 168 |
+
Args:
|
| 169 |
+
p0 (complex): Start point of curve.
|
| 170 |
+
p1 (complex): First handle of curve.
|
| 171 |
+
p2 (complex): Second handle of curve.
|
| 172 |
+
p3 (complex): End point of curve.
|
| 173 |
+
|
| 174 |
+
Returns:
|
| 175 |
+
tuple: Two cubic Beziers (each expressed as a tuple of four complex
|
| 176 |
+
values).
|
| 177 |
+
"""
|
| 178 |
+
mid = (p0 + 3 * (p1 + p2) + p3) * 0.125
|
| 179 |
+
deriv3 = (p3 + p2 - p1 - p0) * 0.125
|
| 180 |
+
return (
|
| 181 |
+
(p0, (p0 + p1) * 0.5, mid - deriv3, mid),
|
| 182 |
+
(mid, mid + deriv3, (p2 + p3) * 0.5, p3),
|
| 183 |
+
)
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
@cython.cfunc
|
| 187 |
+
@cython.inline
|
| 188 |
+
@cython.locals(
|
| 189 |
+
p0=cython.complex,
|
| 190 |
+
p1=cython.complex,
|
| 191 |
+
p2=cython.complex,
|
| 192 |
+
p3=cython.complex,
|
| 193 |
+
)
|
| 194 |
+
@cython.locals(
|
| 195 |
+
mid1=cython.complex,
|
| 196 |
+
deriv1=cython.complex,
|
| 197 |
+
mid2=cython.complex,
|
| 198 |
+
deriv2=cython.complex,
|
| 199 |
+
)
|
| 200 |
+
def split_cubic_into_three(p0, p1, p2, p3):
|
| 201 |
+
"""Split a cubic Bezier into three equal parts.
|
| 202 |
+
|
| 203 |
+
Splits the curve into three equal parts at t = 1/3 and t = 2/3
|
| 204 |
+
|
| 205 |
+
Args:
|
| 206 |
+
p0 (complex): Start point of curve.
|
| 207 |
+
p1 (complex): First handle of curve.
|
| 208 |
+
p2 (complex): Second handle of curve.
|
| 209 |
+
p3 (complex): End point of curve.
|
| 210 |
+
|
| 211 |
+
Returns:
|
| 212 |
+
tuple: Three cubic Beziers (each expressed as a tuple of four complex
|
| 213 |
+
values).
|
| 214 |
+
"""
|
| 215 |
+
mid1 = (8 * p0 + 12 * p1 + 6 * p2 + p3) * (1 / 27)
|
| 216 |
+
deriv1 = (p3 + 3 * p2 - 4 * p0) * (1 / 27)
|
| 217 |
+
mid2 = (p0 + 6 * p1 + 12 * p2 + 8 * p3) * (1 / 27)
|
| 218 |
+
deriv2 = (4 * p3 - 3 * p1 - p0) * (1 / 27)
|
| 219 |
+
return (
|
| 220 |
+
(p0, (2 * p0 + p1) / 3.0, mid1 - deriv1, mid1),
|
| 221 |
+
(mid1, mid1 + deriv1, mid2 - deriv2, mid2),
|
| 222 |
+
(mid2, mid2 + deriv2, (p2 + 2 * p3) / 3.0, p3),
|
| 223 |
+
)
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
@cython.cfunc
|
| 227 |
+
@cython.inline
|
| 228 |
+
@cython.returns(cython.complex)
|
| 229 |
+
@cython.locals(
|
| 230 |
+
t=cython.double,
|
| 231 |
+
p0=cython.complex,
|
| 232 |
+
p1=cython.complex,
|
| 233 |
+
p2=cython.complex,
|
| 234 |
+
p3=cython.complex,
|
| 235 |
+
)
|
| 236 |
+
@cython.locals(_p1=cython.complex, _p2=cython.complex)
|
| 237 |
+
def cubic_approx_control(t, p0, p1, p2, p3):
|
| 238 |
+
"""Approximate a cubic Bezier using a quadratic one.
|
| 239 |
+
|
| 240 |
+
Args:
|
| 241 |
+
t (double): Position of control point.
|
| 242 |
+
p0 (complex): Start point of curve.
|
| 243 |
+
p1 (complex): First handle of curve.
|
| 244 |
+
p2 (complex): Second handle of curve.
|
| 245 |
+
p3 (complex): End point of curve.
|
| 246 |
+
|
| 247 |
+
Returns:
|
| 248 |
+
complex: Location of candidate control point on quadratic curve.
|
| 249 |
+
"""
|
| 250 |
+
_p1 = p0 + (p1 - p0) * 1.5
|
| 251 |
+
_p2 = p3 + (p2 - p3) * 1.5
|
| 252 |
+
return _p1 + (_p2 - _p1) * t
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
@cython.cfunc
|
| 256 |
+
@cython.inline
|
| 257 |
+
@cython.returns(cython.complex)
|
| 258 |
+
@cython.locals(a=cython.complex, b=cython.complex, c=cython.complex, d=cython.complex)
|
| 259 |
+
@cython.locals(ab=cython.complex, cd=cython.complex, p=cython.complex, h=cython.double)
|
| 260 |
+
def calc_intersect(a, b, c, d):
|
| 261 |
+
"""Calculate the intersection of two lines.
|
| 262 |
+
|
| 263 |
+
Args:
|
| 264 |
+
a (complex): Start point of first line.
|
| 265 |
+
b (complex): End point of first line.
|
| 266 |
+
c (complex): Start point of second line.
|
| 267 |
+
d (complex): End point of second line.
|
| 268 |
+
|
| 269 |
+
Returns:
|
| 270 |
+
complex: Location of intersection if one present, ``complex(NaN,NaN)``
|
| 271 |
+
if no intersection was found.
|
| 272 |
+
"""
|
| 273 |
+
ab = b - a
|
| 274 |
+
cd = d - c
|
| 275 |
+
p = ab * 1j
|
| 276 |
+
try:
|
| 277 |
+
h = dot(p, a - c) / dot(p, cd)
|
| 278 |
+
except ZeroDivisionError:
|
| 279 |
+
return complex(NAN, NAN)
|
| 280 |
+
return c + cd * h
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
@cython.cfunc
|
| 284 |
+
@cython.returns(cython.int)
|
| 285 |
+
@cython.locals(
|
| 286 |
+
tolerance=cython.double,
|
| 287 |
+
p0=cython.complex,
|
| 288 |
+
p1=cython.complex,
|
| 289 |
+
p2=cython.complex,
|
| 290 |
+
p3=cython.complex,
|
| 291 |
+
)
|
| 292 |
+
@cython.locals(mid=cython.complex, deriv3=cython.complex)
|
| 293 |
+
def cubic_farthest_fit_inside(p0, p1, p2, p3, tolerance):
|
| 294 |
+
"""Check if a cubic Bezier lies within a given distance of the origin.
|
| 295 |
+
|
| 296 |
+
"Origin" means *the* origin (0,0), not the start of the curve. Note that no
|
| 297 |
+
checks are made on the start and end positions of the curve; this function
|
| 298 |
+
only checks the inside of the curve.
|
| 299 |
+
|
| 300 |
+
Args:
|
| 301 |
+
p0 (complex): Start point of curve.
|
| 302 |
+
p1 (complex): First handle of curve.
|
| 303 |
+
p2 (complex): Second handle of curve.
|
| 304 |
+
p3 (complex): End point of curve.
|
| 305 |
+
tolerance (double): Distance from origin.
|
| 306 |
+
|
| 307 |
+
Returns:
|
| 308 |
+
bool: True if the cubic Bezier ``p`` entirely lies within a distance
|
| 309 |
+
``tolerance`` of the origin, False otherwise.
|
| 310 |
+
"""
|
| 311 |
+
# First check p2 then p1, as p2 has higher error early on.
|
| 312 |
+
if abs(p2) <= tolerance and abs(p1) <= tolerance:
|
| 313 |
+
return True
|
| 314 |
+
|
| 315 |
+
# Split.
|
| 316 |
+
mid = (p0 + 3 * (p1 + p2) + p3) * 0.125
|
| 317 |
+
if abs(mid) > tolerance:
|
| 318 |
+
return False
|
| 319 |
+
deriv3 = (p3 + p2 - p1 - p0) * 0.125
|
| 320 |
+
return cubic_farthest_fit_inside(
|
| 321 |
+
p0, (p0 + p1) * 0.5, mid - deriv3, mid, tolerance
|
| 322 |
+
) and cubic_farthest_fit_inside(mid, mid + deriv3, (p2 + p3) * 0.5, p3, tolerance)
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
@cython.cfunc
|
| 326 |
+
@cython.inline
|
| 327 |
+
@cython.locals(tolerance=cython.double)
|
| 328 |
+
@cython.locals(
|
| 329 |
+
q1=cython.complex,
|
| 330 |
+
c0=cython.complex,
|
| 331 |
+
c1=cython.complex,
|
| 332 |
+
c2=cython.complex,
|
| 333 |
+
c3=cython.complex,
|
| 334 |
+
)
|
| 335 |
+
def cubic_approx_quadratic(cubic, tolerance):
|
| 336 |
+
"""Approximate a cubic Bezier with a single quadratic within a given tolerance.
|
| 337 |
+
|
| 338 |
+
Args:
|
| 339 |
+
cubic (sequence): Four complex numbers representing control points of
|
| 340 |
+
the cubic Bezier curve.
|
| 341 |
+
tolerance (double): Permitted deviation from the original curve.
|
| 342 |
+
|
| 343 |
+
Returns:
|
| 344 |
+
Three complex numbers representing control points of the quadratic
|
| 345 |
+
curve if it fits within the given tolerance, or ``None`` if no suitable
|
| 346 |
+
curve could be calculated.
|
| 347 |
+
"""
|
| 348 |
+
|
| 349 |
+
q1 = calc_intersect(cubic[0], cubic[1], cubic[2], cubic[3])
|
| 350 |
+
if math.isnan(q1.imag):
|
| 351 |
+
return None
|
| 352 |
+
c0 = cubic[0]
|
| 353 |
+
c3 = cubic[3]
|
| 354 |
+
c1 = c0 + (q1 - c0) * (2 / 3)
|
| 355 |
+
c2 = c3 + (q1 - c3) * (2 / 3)
|
| 356 |
+
if not cubic_farthest_fit_inside(0, c1 - cubic[1], c2 - cubic[2], 0, tolerance):
|
| 357 |
+
return None
|
| 358 |
+
return c0, q1, c3
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
@cython.cfunc
|
| 362 |
+
@cython.locals(n=cython.int, tolerance=cython.double)
|
| 363 |
+
@cython.locals(i=cython.int)
|
| 364 |
+
@cython.locals(all_quadratic=cython.int)
|
| 365 |
+
@cython.locals(
|
| 366 |
+
c0=cython.complex, c1=cython.complex, c2=cython.complex, c3=cython.complex
|
| 367 |
+
)
|
| 368 |
+
@cython.locals(
|
| 369 |
+
q0=cython.complex,
|
| 370 |
+
q1=cython.complex,
|
| 371 |
+
next_q1=cython.complex,
|
| 372 |
+
q2=cython.complex,
|
| 373 |
+
d1=cython.complex,
|
| 374 |
+
)
|
| 375 |
+
def cubic_approx_spline(cubic, n, tolerance, all_quadratic):
|
| 376 |
+
"""Approximate a cubic Bezier curve with a spline of n quadratics.
|
| 377 |
+
|
| 378 |
+
Args:
|
| 379 |
+
cubic (sequence): Four complex numbers representing control points of
|
| 380 |
+
the cubic Bezier curve.
|
| 381 |
+
n (int): Number of quadratic Bezier curves in the spline.
|
| 382 |
+
tolerance (double): Permitted deviation from the original curve.
|
| 383 |
+
|
| 384 |
+
Returns:
|
| 385 |
+
A list of ``n+2`` complex numbers, representing control points of the
|
| 386 |
+
quadratic spline if it fits within the given tolerance, or ``None`` if
|
| 387 |
+
no suitable spline could be calculated.
|
| 388 |
+
"""
|
| 389 |
+
|
| 390 |
+
if n == 1:
|
| 391 |
+
return cubic_approx_quadratic(cubic, tolerance)
|
| 392 |
+
if n == 2 and all_quadratic == False:
|
| 393 |
+
return cubic
|
| 394 |
+
|
| 395 |
+
cubics = split_cubic_into_n_iter(cubic[0], cubic[1], cubic[2], cubic[3], n)
|
| 396 |
+
|
| 397 |
+
# calculate the spline of quadratics and check errors at the same time.
|
| 398 |
+
next_cubic = next(cubics)
|
| 399 |
+
next_q1 = cubic_approx_control(
|
| 400 |
+
0, next_cubic[0], next_cubic[1], next_cubic[2], next_cubic[3]
|
| 401 |
+
)
|
| 402 |
+
q2 = cubic[0]
|
| 403 |
+
d1 = 0j
|
| 404 |
+
spline = [cubic[0], next_q1]
|
| 405 |
+
for i in range(1, n + 1):
|
| 406 |
+
# Current cubic to convert
|
| 407 |
+
c0, c1, c2, c3 = next_cubic
|
| 408 |
+
|
| 409 |
+
# Current quadratic approximation of current cubic
|
| 410 |
+
q0 = q2
|
| 411 |
+
q1 = next_q1
|
| 412 |
+
if i < n:
|
| 413 |
+
next_cubic = next(cubics)
|
| 414 |
+
next_q1 = cubic_approx_control(
|
| 415 |
+
i / (n - 1), next_cubic[0], next_cubic[1], next_cubic[2], next_cubic[3]
|
| 416 |
+
)
|
| 417 |
+
spline.append(next_q1)
|
| 418 |
+
q2 = (q1 + next_q1) * 0.5
|
| 419 |
+
else:
|
| 420 |
+
q2 = c3
|
| 421 |
+
|
| 422 |
+
# End-point deltas
|
| 423 |
+
d0 = d1
|
| 424 |
+
d1 = q2 - c3
|
| 425 |
+
|
| 426 |
+
if abs(d1) > tolerance or not cubic_farthest_fit_inside(
|
| 427 |
+
d0,
|
| 428 |
+
q0 + (q1 - q0) * (2 / 3) - c1,
|
| 429 |
+
q2 + (q1 - q2) * (2 / 3) - c2,
|
| 430 |
+
d1,
|
| 431 |
+
tolerance,
|
| 432 |
+
):
|
| 433 |
+
return None
|
| 434 |
+
spline.append(cubic[3])
|
| 435 |
+
|
| 436 |
+
return spline
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
@cython.locals(max_err=cython.double)
|
| 440 |
+
@cython.locals(n=cython.int)
|
| 441 |
+
@cython.locals(all_quadratic=cython.int)
|
| 442 |
+
def curve_to_quadratic(curve, max_err, all_quadratic=True):
|
| 443 |
+
"""Approximate a cubic Bezier curve with a spline of n quadratics.
|
| 444 |
+
|
| 445 |
+
Args:
|
| 446 |
+
cubic (sequence): Four 2D tuples representing control points of
|
| 447 |
+
the cubic Bezier curve.
|
| 448 |
+
max_err (double): Permitted deviation from the original curve.
|
| 449 |
+
all_quadratic (bool): If True (default) returned value is a
|
| 450 |
+
quadratic spline. If False, it's either a single quadratic
|
| 451 |
+
curve or a single cubic curve.
|
| 452 |
+
|
| 453 |
+
Returns:
|
| 454 |
+
If all_quadratic is True: A list of 2D tuples, representing
|
| 455 |
+
control points of the quadratic spline if it fits within the
|
| 456 |
+
given tolerance, or ``None`` if no suitable spline could be
|
| 457 |
+
calculated.
|
| 458 |
+
|
| 459 |
+
If all_quadratic is False: Either a quadratic curve (if length
|
| 460 |
+
of output is 3), or a cubic curve (if length of output is 4).
|
| 461 |
+
"""
|
| 462 |
+
|
| 463 |
+
curve = [complex(*p) for p in curve]
|
| 464 |
+
|
| 465 |
+
for n in range(1, MAX_N + 1):
|
| 466 |
+
spline = cubic_approx_spline(curve, n, max_err, all_quadratic)
|
| 467 |
+
if spline is not None:
|
| 468 |
+
# done. go home
|
| 469 |
+
return [(s.real, s.imag) for s in spline]
|
| 470 |
+
|
| 471 |
+
raise ApproxNotFoundError(curve)
|
| 472 |
+
|
| 473 |
+
|
| 474 |
+
@cython.locals(l=cython.int, last_i=cython.int, i=cython.int)
|
| 475 |
+
@cython.locals(all_quadratic=cython.int)
|
| 476 |
+
def curves_to_quadratic(curves, max_errors, all_quadratic=True):
|
| 477 |
+
"""Return quadratic Bezier splines approximating the input cubic Beziers.
|
| 478 |
+
|
| 479 |
+
Args:
|
| 480 |
+
curves: A sequence of *n* curves, each curve being a sequence of four
|
| 481 |
+
2D tuples.
|
| 482 |
+
max_errors: A sequence of *n* floats representing the maximum permissible
|
| 483 |
+
deviation from each of the cubic Bezier curves.
|
| 484 |
+
all_quadratic (bool): If True (default) returned values are a
|
| 485 |
+
quadratic spline. If False, they are either a single quadratic
|
| 486 |
+
curve or a single cubic curve.
|
| 487 |
+
|
| 488 |
+
Example::
|
| 489 |
+
|
| 490 |
+
>>> curves_to_quadratic( [
|
| 491 |
+
... [ (50,50), (100,100), (150,100), (200,50) ],
|
| 492 |
+
... [ (75,50), (120,100), (150,75), (200,60) ]
|
| 493 |
+
... ], [1,1] )
|
| 494 |
+
[[(50.0, 50.0), (75.0, 75.0), (125.0, 91.66666666666666), (175.0, 75.0), (200.0, 50.0)], [(75.0, 50.0), (97.5, 75.0), (135.41666666666666, 82.08333333333333), (175.0, 67.5), (200.0, 60.0)]]
|
| 495 |
+
|
| 496 |
+
The returned splines have "implied oncurve points" suitable for use in
|
| 497 |
+
TrueType ``glif`` outlines - i.e. in the first spline returned above,
|
| 498 |
+
the first quadratic segment runs from (50,50) to
|
| 499 |
+
( (75 + 125)/2 , (120 + 91.666..)/2 ) = (100, 83.333...).
|
| 500 |
+
|
| 501 |
+
Returns:
|
| 502 |
+
If all_quadratic is True, a list of splines, each spline being a list
|
| 503 |
+
of 2D tuples.
|
| 504 |
+
|
| 505 |
+
If all_quadratic is False, a list of curves, each curve being a quadratic
|
| 506 |
+
(length 3), or cubic (length 4).
|
| 507 |
+
|
| 508 |
+
Raises:
|
| 509 |
+
fontTools.cu2qu.Errors.ApproxNotFoundError: if no suitable approximation
|
| 510 |
+
can be found for all curves with the given parameters.
|
| 511 |
+
"""
|
| 512 |
+
|
| 513 |
+
curves = [[complex(*p) for p in curve] for curve in curves]
|
| 514 |
+
assert len(max_errors) == len(curves)
|
| 515 |
+
|
| 516 |
+
l = len(curves)
|
| 517 |
+
splines = [None] * l
|
| 518 |
+
last_i = i = 0
|
| 519 |
+
n = 1
|
| 520 |
+
while True:
|
| 521 |
+
spline = cubic_approx_spline(curves[i], n, max_errors[i], all_quadratic)
|
| 522 |
+
if spline is None:
|
| 523 |
+
if n == MAX_N:
|
| 524 |
+
break
|
| 525 |
+
n += 1
|
| 526 |
+
last_i = i
|
| 527 |
+
continue
|
| 528 |
+
splines[i] = spline
|
| 529 |
+
i = (i + 1) % l
|
| 530 |
+
if i == last_i:
|
| 531 |
+
# done. go home
|
| 532 |
+
return [[(s.real, s.imag) for s in spline] for spline in splines]
|
| 533 |
+
|
| 534 |
+
raise ApproxNotFoundError(curves)
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/cu2qu/errors.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2016 Google Inc. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class Error(Exception):
|
| 17 |
+
"""Base Cu2Qu exception class for all other errors."""
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class ApproxNotFoundError(Error):
|
| 21 |
+
def __init__(self, curve):
|
| 22 |
+
message = "no approximation found: %s" % curve
|
| 23 |
+
super().__init__(message)
|
| 24 |
+
self.curve = curve
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class UnequalZipLengthsError(Error):
|
| 28 |
+
pass
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class IncompatibleGlyphsError(Error):
|
| 32 |
+
def __init__(self, glyphs):
|
| 33 |
+
assert len(glyphs) > 1
|
| 34 |
+
self.glyphs = glyphs
|
| 35 |
+
names = set(repr(g.name) for g in glyphs)
|
| 36 |
+
if len(names) > 1:
|
| 37 |
+
self.combined_name = "{%s}" % ", ".join(sorted(names))
|
| 38 |
+
else:
|
| 39 |
+
self.combined_name = names.pop()
|
| 40 |
+
|
| 41 |
+
def __repr__(self):
|
| 42 |
+
return "<%s %s>" % (type(self).__name__, self.combined_name)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class IncompatibleSegmentNumberError(IncompatibleGlyphsError):
|
| 46 |
+
def __str__(self):
|
| 47 |
+
return "Glyphs named %s have different number of segments" % (
|
| 48 |
+
self.combined_name
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class IncompatibleSegmentTypesError(IncompatibleGlyphsError):
|
| 53 |
+
def __init__(self, glyphs, segments):
|
| 54 |
+
IncompatibleGlyphsError.__init__(self, glyphs)
|
| 55 |
+
self.segments = segments
|
| 56 |
+
|
| 57 |
+
def __str__(self):
|
| 58 |
+
lines = []
|
| 59 |
+
ndigits = len(str(max(self.segments)))
|
| 60 |
+
for i, tags in sorted(self.segments.items()):
|
| 61 |
+
lines.append(
|
| 62 |
+
"%s: (%s)" % (str(i).rjust(ndigits), ", ".join(repr(t) for t in tags))
|
| 63 |
+
)
|
| 64 |
+
return "Glyphs named %s have incompatible segment types:\n %s" % (
|
| 65 |
+
self.combined_name,
|
| 66 |
+
"\n ".join(lines),
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class IncompatibleFontsError(Error):
|
| 71 |
+
def __init__(self, glyph_errors):
|
| 72 |
+
self.glyph_errors = glyph_errors
|
| 73 |
+
|
| 74 |
+
def __str__(self):
|
| 75 |
+
return "fonts contains incompatible glyphs: %s" % (
|
| 76 |
+
", ".join(repr(g) for g in sorted(self.glyph_errors.keys()))
|
| 77 |
+
)
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/cu2qu/ufo.py
ADDED
|
@@ -0,0 +1,349 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2015 Google Inc. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
"""Converts cubic bezier curves to quadratic splines.
|
| 17 |
+
|
| 18 |
+
Conversion is performed such that the quadratic splines keep the same end-curve
|
| 19 |
+
tangents as the original cubics. The approach is iterative, increasing the
|
| 20 |
+
number of segments for a spline until the error gets below a bound.
|
| 21 |
+
|
| 22 |
+
Respective curves from multiple fonts will be converted at once to ensure that
|
| 23 |
+
the resulting splines are interpolation-compatible.
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
import logging
|
| 27 |
+
from fontTools.pens.basePen import AbstractPen
|
| 28 |
+
from fontTools.pens.pointPen import PointToSegmentPen
|
| 29 |
+
from fontTools.pens.reverseContourPen import ReverseContourPen
|
| 30 |
+
|
| 31 |
+
from . import curves_to_quadratic
|
| 32 |
+
from .errors import (
|
| 33 |
+
UnequalZipLengthsError,
|
| 34 |
+
IncompatibleSegmentNumberError,
|
| 35 |
+
IncompatibleSegmentTypesError,
|
| 36 |
+
IncompatibleGlyphsError,
|
| 37 |
+
IncompatibleFontsError,
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
__all__ = ["fonts_to_quadratic", "font_to_quadratic"]
|
| 42 |
+
|
| 43 |
+
# The default approximation error below is a relative value (1/1000 of the EM square).
|
| 44 |
+
# Later on, we convert it to absolute font units by multiplying it by a font's UPEM
|
| 45 |
+
# (see fonts_to_quadratic).
|
| 46 |
+
DEFAULT_MAX_ERR = 0.001
|
| 47 |
+
CURVE_TYPE_LIB_KEY = "com.github.googlei18n.cu2qu.curve_type"
|
| 48 |
+
|
| 49 |
+
logger = logging.getLogger(__name__)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
_zip = zip
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def zip(*args):
|
| 56 |
+
"""Ensure each argument to zip has the same length. Also make sure a list is
|
| 57 |
+
returned for python 2/3 compatibility.
|
| 58 |
+
"""
|
| 59 |
+
|
| 60 |
+
if len(set(len(a) for a in args)) != 1:
|
| 61 |
+
raise UnequalZipLengthsError(*args)
|
| 62 |
+
return list(_zip(*args))
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class GetSegmentsPen(AbstractPen):
|
| 66 |
+
"""Pen to collect segments into lists of points for conversion.
|
| 67 |
+
|
| 68 |
+
Curves always include their initial on-curve point, so some points are
|
| 69 |
+
duplicated between segments.
|
| 70 |
+
"""
|
| 71 |
+
|
| 72 |
+
def __init__(self):
|
| 73 |
+
self._last_pt = None
|
| 74 |
+
self.segments = []
|
| 75 |
+
|
| 76 |
+
def _add_segment(self, tag, *args):
|
| 77 |
+
if tag in ["move", "line", "qcurve", "curve"]:
|
| 78 |
+
self._last_pt = args[-1]
|
| 79 |
+
self.segments.append((tag, args))
|
| 80 |
+
|
| 81 |
+
def moveTo(self, pt):
|
| 82 |
+
self._add_segment("move", pt)
|
| 83 |
+
|
| 84 |
+
def lineTo(self, pt):
|
| 85 |
+
self._add_segment("line", pt)
|
| 86 |
+
|
| 87 |
+
def qCurveTo(self, *points):
|
| 88 |
+
self._add_segment("qcurve", self._last_pt, *points)
|
| 89 |
+
|
| 90 |
+
def curveTo(self, *points):
|
| 91 |
+
self._add_segment("curve", self._last_pt, *points)
|
| 92 |
+
|
| 93 |
+
def closePath(self):
|
| 94 |
+
self._add_segment("close")
|
| 95 |
+
|
| 96 |
+
def endPath(self):
|
| 97 |
+
self._add_segment("end")
|
| 98 |
+
|
| 99 |
+
def addComponent(self, glyphName, transformation):
|
| 100 |
+
pass
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def _get_segments(glyph):
|
| 104 |
+
"""Get a glyph's segments as extracted by GetSegmentsPen."""
|
| 105 |
+
|
| 106 |
+
pen = GetSegmentsPen()
|
| 107 |
+
# glyph.draw(pen)
|
| 108 |
+
# We can't simply draw the glyph with the pen, but we must initialize the
|
| 109 |
+
# PointToSegmentPen explicitly with outputImpliedClosingLine=True.
|
| 110 |
+
# By default PointToSegmentPen does not outputImpliedClosingLine -- unless
|
| 111 |
+
# last and first point on closed contour are duplicated. Because we are
|
| 112 |
+
# converting multiple glyphs at the same time, we want to make sure
|
| 113 |
+
# this function returns the same number of segments, whether or not
|
| 114 |
+
# the last and first point overlap.
|
| 115 |
+
# https://github.com/googlefonts/fontmake/issues/572
|
| 116 |
+
# https://github.com/fonttools/fonttools/pull/1720
|
| 117 |
+
pointPen = PointToSegmentPen(pen, outputImpliedClosingLine=True)
|
| 118 |
+
glyph.drawPoints(pointPen)
|
| 119 |
+
return pen.segments
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def _set_segments(glyph, segments, reverse_direction):
|
| 123 |
+
"""Draw segments as extracted by GetSegmentsPen back to a glyph."""
|
| 124 |
+
|
| 125 |
+
glyph.clearContours()
|
| 126 |
+
pen = glyph.getPen()
|
| 127 |
+
if reverse_direction:
|
| 128 |
+
pen = ReverseContourPen(pen)
|
| 129 |
+
for tag, args in segments:
|
| 130 |
+
if tag == "move":
|
| 131 |
+
pen.moveTo(*args)
|
| 132 |
+
elif tag == "line":
|
| 133 |
+
pen.lineTo(*args)
|
| 134 |
+
elif tag == "curve":
|
| 135 |
+
pen.curveTo(*args[1:])
|
| 136 |
+
elif tag == "qcurve":
|
| 137 |
+
pen.qCurveTo(*args[1:])
|
| 138 |
+
elif tag == "close":
|
| 139 |
+
pen.closePath()
|
| 140 |
+
elif tag == "end":
|
| 141 |
+
pen.endPath()
|
| 142 |
+
else:
|
| 143 |
+
raise AssertionError('Unhandled segment type "%s"' % tag)
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def _segments_to_quadratic(segments, max_err, stats, all_quadratic=True):
|
| 147 |
+
"""Return quadratic approximations of cubic segments."""
|
| 148 |
+
|
| 149 |
+
assert all(s[0] == "curve" for s in segments), "Non-cubic given to convert"
|
| 150 |
+
|
| 151 |
+
new_points = curves_to_quadratic([s[1] for s in segments], max_err, all_quadratic)
|
| 152 |
+
n = len(new_points[0])
|
| 153 |
+
assert all(len(s) == n for s in new_points[1:]), "Converted incompatibly"
|
| 154 |
+
|
| 155 |
+
spline_length = str(n - 2)
|
| 156 |
+
stats[spline_length] = stats.get(spline_length, 0) + 1
|
| 157 |
+
|
| 158 |
+
if all_quadratic or n == 3:
|
| 159 |
+
return [("qcurve", p) for p in new_points]
|
| 160 |
+
else:
|
| 161 |
+
return [("curve", p) for p in new_points]
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def _glyphs_to_quadratic(glyphs, max_err, reverse_direction, stats, all_quadratic=True):
|
| 165 |
+
"""Do the actual conversion of a set of compatible glyphs, after arguments
|
| 166 |
+
have been set up.
|
| 167 |
+
|
| 168 |
+
Return True if the glyphs were modified, else return False.
|
| 169 |
+
"""
|
| 170 |
+
|
| 171 |
+
try:
|
| 172 |
+
segments_by_location = zip(*[_get_segments(g) for g in glyphs])
|
| 173 |
+
except UnequalZipLengthsError:
|
| 174 |
+
raise IncompatibleSegmentNumberError(glyphs)
|
| 175 |
+
if not any(segments_by_location):
|
| 176 |
+
return False
|
| 177 |
+
|
| 178 |
+
# always modify input glyphs if reverse_direction is True
|
| 179 |
+
glyphs_modified = reverse_direction
|
| 180 |
+
|
| 181 |
+
new_segments_by_location = []
|
| 182 |
+
incompatible = {}
|
| 183 |
+
for i, segments in enumerate(segments_by_location):
|
| 184 |
+
tag = segments[0][0]
|
| 185 |
+
if not all(s[0] == tag for s in segments[1:]):
|
| 186 |
+
incompatible[i] = [s[0] for s in segments]
|
| 187 |
+
elif tag == "curve":
|
| 188 |
+
new_segments = _segments_to_quadratic(
|
| 189 |
+
segments, max_err, stats, all_quadratic
|
| 190 |
+
)
|
| 191 |
+
if all_quadratic or new_segments != segments:
|
| 192 |
+
glyphs_modified = True
|
| 193 |
+
segments = new_segments
|
| 194 |
+
new_segments_by_location.append(segments)
|
| 195 |
+
|
| 196 |
+
if glyphs_modified:
|
| 197 |
+
new_segments_by_glyph = zip(*new_segments_by_location)
|
| 198 |
+
for glyph, new_segments in zip(glyphs, new_segments_by_glyph):
|
| 199 |
+
_set_segments(glyph, new_segments, reverse_direction)
|
| 200 |
+
|
| 201 |
+
if incompatible:
|
| 202 |
+
raise IncompatibleSegmentTypesError(glyphs, segments=incompatible)
|
| 203 |
+
return glyphs_modified
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
def glyphs_to_quadratic(
|
| 207 |
+
glyphs, max_err=None, reverse_direction=False, stats=None, all_quadratic=True
|
| 208 |
+
):
|
| 209 |
+
"""Convert the curves of a set of compatible of glyphs to quadratic.
|
| 210 |
+
|
| 211 |
+
All curves will be converted to quadratic at once, ensuring interpolation
|
| 212 |
+
compatibility. If this is not required, calling glyphs_to_quadratic with one
|
| 213 |
+
glyph at a time may yield slightly more optimized results.
|
| 214 |
+
|
| 215 |
+
Return True if glyphs were modified, else return False.
|
| 216 |
+
|
| 217 |
+
Raises IncompatibleGlyphsError if glyphs have non-interpolatable outlines.
|
| 218 |
+
"""
|
| 219 |
+
if stats is None:
|
| 220 |
+
stats = {}
|
| 221 |
+
|
| 222 |
+
if not max_err:
|
| 223 |
+
# assume 1000 is the default UPEM
|
| 224 |
+
max_err = DEFAULT_MAX_ERR * 1000
|
| 225 |
+
|
| 226 |
+
if isinstance(max_err, (list, tuple)):
|
| 227 |
+
max_errors = max_err
|
| 228 |
+
else:
|
| 229 |
+
max_errors = [max_err] * len(glyphs)
|
| 230 |
+
assert len(max_errors) == len(glyphs)
|
| 231 |
+
|
| 232 |
+
return _glyphs_to_quadratic(
|
| 233 |
+
glyphs, max_errors, reverse_direction, stats, all_quadratic
|
| 234 |
+
)
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
def fonts_to_quadratic(
|
| 238 |
+
fonts,
|
| 239 |
+
max_err_em=None,
|
| 240 |
+
max_err=None,
|
| 241 |
+
reverse_direction=False,
|
| 242 |
+
stats=None,
|
| 243 |
+
dump_stats=False,
|
| 244 |
+
remember_curve_type=True,
|
| 245 |
+
all_quadratic=True,
|
| 246 |
+
):
|
| 247 |
+
"""Convert the curves of a collection of fonts to quadratic.
|
| 248 |
+
|
| 249 |
+
All curves will be converted to quadratic at once, ensuring interpolation
|
| 250 |
+
compatibility. If this is not required, calling fonts_to_quadratic with one
|
| 251 |
+
font at a time may yield slightly more optimized results.
|
| 252 |
+
|
| 253 |
+
Return the set of modified glyph names if any, else return an empty set.
|
| 254 |
+
|
| 255 |
+
By default, cu2qu stores the curve type in the fonts' lib, under a private
|
| 256 |
+
key "com.github.googlei18n.cu2qu.curve_type", and will not try to convert
|
| 257 |
+
them again if the curve type is already set to "quadratic".
|
| 258 |
+
Setting 'remember_curve_type' to False disables this optimization.
|
| 259 |
+
|
| 260 |
+
Raises IncompatibleFontsError if same-named glyphs from different fonts
|
| 261 |
+
have non-interpolatable outlines.
|
| 262 |
+
"""
|
| 263 |
+
|
| 264 |
+
if remember_curve_type:
|
| 265 |
+
curve_types = {f.lib.get(CURVE_TYPE_LIB_KEY, "cubic") for f in fonts}
|
| 266 |
+
if len(curve_types) == 1:
|
| 267 |
+
curve_type = next(iter(curve_types))
|
| 268 |
+
if curve_type in ("quadratic", "mixed"):
|
| 269 |
+
logger.info("Curves already converted to quadratic")
|
| 270 |
+
return False
|
| 271 |
+
elif curve_type == "cubic":
|
| 272 |
+
pass # keep converting
|
| 273 |
+
else:
|
| 274 |
+
raise NotImplementedError(curve_type)
|
| 275 |
+
elif len(curve_types) > 1:
|
| 276 |
+
# going to crash later if they do differ
|
| 277 |
+
logger.warning("fonts may contain different curve types")
|
| 278 |
+
|
| 279 |
+
if stats is None:
|
| 280 |
+
stats = {}
|
| 281 |
+
|
| 282 |
+
if max_err_em and max_err:
|
| 283 |
+
raise TypeError("Only one of max_err and max_err_em can be specified.")
|
| 284 |
+
if not (max_err_em or max_err):
|
| 285 |
+
max_err_em = DEFAULT_MAX_ERR
|
| 286 |
+
|
| 287 |
+
if isinstance(max_err, (list, tuple)):
|
| 288 |
+
assert len(max_err) == len(fonts)
|
| 289 |
+
max_errors = max_err
|
| 290 |
+
elif max_err:
|
| 291 |
+
max_errors = [max_err] * len(fonts)
|
| 292 |
+
|
| 293 |
+
if isinstance(max_err_em, (list, tuple)):
|
| 294 |
+
assert len(fonts) == len(max_err_em)
|
| 295 |
+
max_errors = [f.info.unitsPerEm * e for f, e in zip(fonts, max_err_em)]
|
| 296 |
+
elif max_err_em:
|
| 297 |
+
max_errors = [f.info.unitsPerEm * max_err_em for f in fonts]
|
| 298 |
+
|
| 299 |
+
modified = set()
|
| 300 |
+
glyph_errors = {}
|
| 301 |
+
for name in set().union(*(f.keys() for f in fonts)):
|
| 302 |
+
glyphs = []
|
| 303 |
+
cur_max_errors = []
|
| 304 |
+
for font, error in zip(fonts, max_errors):
|
| 305 |
+
if name in font:
|
| 306 |
+
glyphs.append(font[name])
|
| 307 |
+
cur_max_errors.append(error)
|
| 308 |
+
try:
|
| 309 |
+
if _glyphs_to_quadratic(
|
| 310 |
+
glyphs, cur_max_errors, reverse_direction, stats, all_quadratic
|
| 311 |
+
):
|
| 312 |
+
modified.add(name)
|
| 313 |
+
except IncompatibleGlyphsError as exc:
|
| 314 |
+
logger.error(exc)
|
| 315 |
+
glyph_errors[name] = exc
|
| 316 |
+
|
| 317 |
+
if glyph_errors:
|
| 318 |
+
raise IncompatibleFontsError(glyph_errors)
|
| 319 |
+
|
| 320 |
+
if modified and dump_stats:
|
| 321 |
+
spline_lengths = sorted(stats.keys())
|
| 322 |
+
logger.info(
|
| 323 |
+
"New spline lengths: %s"
|
| 324 |
+
% (", ".join("%s: %d" % (l, stats[l]) for l in spline_lengths))
|
| 325 |
+
)
|
| 326 |
+
|
| 327 |
+
if remember_curve_type:
|
| 328 |
+
for font in fonts:
|
| 329 |
+
curve_type = font.lib.get(CURVE_TYPE_LIB_KEY, "cubic")
|
| 330 |
+
new_curve_type = "quadratic" if all_quadratic else "mixed"
|
| 331 |
+
if curve_type != new_curve_type:
|
| 332 |
+
font.lib[CURVE_TYPE_LIB_KEY] = new_curve_type
|
| 333 |
+
return modified
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
def glyph_to_quadratic(glyph, **kwargs):
|
| 337 |
+
"""Convenience wrapper around glyphs_to_quadratic, for just one glyph.
|
| 338 |
+
Return True if the glyph was modified, else return False.
|
| 339 |
+
"""
|
| 340 |
+
|
| 341 |
+
return glyphs_to_quadratic([glyph], **kwargs)
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
def font_to_quadratic(font, **kwargs):
|
| 345 |
+
"""Convenience wrapper around fonts_to_quadratic, for just one font.
|
| 346 |
+
Return the set of modified glyph names if any, else return empty set.
|
| 347 |
+
"""
|
| 348 |
+
|
| 349 |
+
return fonts_to_quadratic([font], **kwargs)
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/designspaceLib/__init__.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/designspaceLib/__pycache__/types.cpython-310.pyc
ADDED
|
Binary file (3.73 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/encodings/MacRoman.py
ADDED
|
@@ -0,0 +1,258 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MacRoman = [
|
| 2 |
+
"NUL",
|
| 3 |
+
"Eth",
|
| 4 |
+
"eth",
|
| 5 |
+
"Lslash",
|
| 6 |
+
"lslash",
|
| 7 |
+
"Scaron",
|
| 8 |
+
"scaron",
|
| 9 |
+
"Yacute",
|
| 10 |
+
"yacute",
|
| 11 |
+
"HT",
|
| 12 |
+
"LF",
|
| 13 |
+
"Thorn",
|
| 14 |
+
"thorn",
|
| 15 |
+
"CR",
|
| 16 |
+
"Zcaron",
|
| 17 |
+
"zcaron",
|
| 18 |
+
"DLE",
|
| 19 |
+
"DC1",
|
| 20 |
+
"DC2",
|
| 21 |
+
"DC3",
|
| 22 |
+
"DC4",
|
| 23 |
+
"onehalf",
|
| 24 |
+
"onequarter",
|
| 25 |
+
"onesuperior",
|
| 26 |
+
"threequarters",
|
| 27 |
+
"threesuperior",
|
| 28 |
+
"twosuperior",
|
| 29 |
+
"brokenbar",
|
| 30 |
+
"minus",
|
| 31 |
+
"multiply",
|
| 32 |
+
"RS",
|
| 33 |
+
"US",
|
| 34 |
+
"space",
|
| 35 |
+
"exclam",
|
| 36 |
+
"quotedbl",
|
| 37 |
+
"numbersign",
|
| 38 |
+
"dollar",
|
| 39 |
+
"percent",
|
| 40 |
+
"ampersand",
|
| 41 |
+
"quotesingle",
|
| 42 |
+
"parenleft",
|
| 43 |
+
"parenright",
|
| 44 |
+
"asterisk",
|
| 45 |
+
"plus",
|
| 46 |
+
"comma",
|
| 47 |
+
"hyphen",
|
| 48 |
+
"period",
|
| 49 |
+
"slash",
|
| 50 |
+
"zero",
|
| 51 |
+
"one",
|
| 52 |
+
"two",
|
| 53 |
+
"three",
|
| 54 |
+
"four",
|
| 55 |
+
"five",
|
| 56 |
+
"six",
|
| 57 |
+
"seven",
|
| 58 |
+
"eight",
|
| 59 |
+
"nine",
|
| 60 |
+
"colon",
|
| 61 |
+
"semicolon",
|
| 62 |
+
"less",
|
| 63 |
+
"equal",
|
| 64 |
+
"greater",
|
| 65 |
+
"question",
|
| 66 |
+
"at",
|
| 67 |
+
"A",
|
| 68 |
+
"B",
|
| 69 |
+
"C",
|
| 70 |
+
"D",
|
| 71 |
+
"E",
|
| 72 |
+
"F",
|
| 73 |
+
"G",
|
| 74 |
+
"H",
|
| 75 |
+
"I",
|
| 76 |
+
"J",
|
| 77 |
+
"K",
|
| 78 |
+
"L",
|
| 79 |
+
"M",
|
| 80 |
+
"N",
|
| 81 |
+
"O",
|
| 82 |
+
"P",
|
| 83 |
+
"Q",
|
| 84 |
+
"R",
|
| 85 |
+
"S",
|
| 86 |
+
"T",
|
| 87 |
+
"U",
|
| 88 |
+
"V",
|
| 89 |
+
"W",
|
| 90 |
+
"X",
|
| 91 |
+
"Y",
|
| 92 |
+
"Z",
|
| 93 |
+
"bracketleft",
|
| 94 |
+
"backslash",
|
| 95 |
+
"bracketright",
|
| 96 |
+
"asciicircum",
|
| 97 |
+
"underscore",
|
| 98 |
+
"grave",
|
| 99 |
+
"a",
|
| 100 |
+
"b",
|
| 101 |
+
"c",
|
| 102 |
+
"d",
|
| 103 |
+
"e",
|
| 104 |
+
"f",
|
| 105 |
+
"g",
|
| 106 |
+
"h",
|
| 107 |
+
"i",
|
| 108 |
+
"j",
|
| 109 |
+
"k",
|
| 110 |
+
"l",
|
| 111 |
+
"m",
|
| 112 |
+
"n",
|
| 113 |
+
"o",
|
| 114 |
+
"p",
|
| 115 |
+
"q",
|
| 116 |
+
"r",
|
| 117 |
+
"s",
|
| 118 |
+
"t",
|
| 119 |
+
"u",
|
| 120 |
+
"v",
|
| 121 |
+
"w",
|
| 122 |
+
"x",
|
| 123 |
+
"y",
|
| 124 |
+
"z",
|
| 125 |
+
"braceleft",
|
| 126 |
+
"bar",
|
| 127 |
+
"braceright",
|
| 128 |
+
"asciitilde",
|
| 129 |
+
"DEL",
|
| 130 |
+
"Adieresis",
|
| 131 |
+
"Aring",
|
| 132 |
+
"Ccedilla",
|
| 133 |
+
"Eacute",
|
| 134 |
+
"Ntilde",
|
| 135 |
+
"Odieresis",
|
| 136 |
+
"Udieresis",
|
| 137 |
+
"aacute",
|
| 138 |
+
"agrave",
|
| 139 |
+
"acircumflex",
|
| 140 |
+
"adieresis",
|
| 141 |
+
"atilde",
|
| 142 |
+
"aring",
|
| 143 |
+
"ccedilla",
|
| 144 |
+
"eacute",
|
| 145 |
+
"egrave",
|
| 146 |
+
"ecircumflex",
|
| 147 |
+
"edieresis",
|
| 148 |
+
"iacute",
|
| 149 |
+
"igrave",
|
| 150 |
+
"icircumflex",
|
| 151 |
+
"idieresis",
|
| 152 |
+
"ntilde",
|
| 153 |
+
"oacute",
|
| 154 |
+
"ograve",
|
| 155 |
+
"ocircumflex",
|
| 156 |
+
"odieresis",
|
| 157 |
+
"otilde",
|
| 158 |
+
"uacute",
|
| 159 |
+
"ugrave",
|
| 160 |
+
"ucircumflex",
|
| 161 |
+
"udieresis",
|
| 162 |
+
"dagger",
|
| 163 |
+
"degree",
|
| 164 |
+
"cent",
|
| 165 |
+
"sterling",
|
| 166 |
+
"section",
|
| 167 |
+
"bullet",
|
| 168 |
+
"paragraph",
|
| 169 |
+
"germandbls",
|
| 170 |
+
"registered",
|
| 171 |
+
"copyright",
|
| 172 |
+
"trademark",
|
| 173 |
+
"acute",
|
| 174 |
+
"dieresis",
|
| 175 |
+
"notequal",
|
| 176 |
+
"AE",
|
| 177 |
+
"Oslash",
|
| 178 |
+
"infinity",
|
| 179 |
+
"plusminus",
|
| 180 |
+
"lessequal",
|
| 181 |
+
"greaterequal",
|
| 182 |
+
"yen",
|
| 183 |
+
"mu",
|
| 184 |
+
"partialdiff",
|
| 185 |
+
"summation",
|
| 186 |
+
"product",
|
| 187 |
+
"pi",
|
| 188 |
+
"integral",
|
| 189 |
+
"ordfeminine",
|
| 190 |
+
"ordmasculine",
|
| 191 |
+
"Omega",
|
| 192 |
+
"ae",
|
| 193 |
+
"oslash",
|
| 194 |
+
"questiondown",
|
| 195 |
+
"exclamdown",
|
| 196 |
+
"logicalnot",
|
| 197 |
+
"radical",
|
| 198 |
+
"florin",
|
| 199 |
+
"approxequal",
|
| 200 |
+
"Delta",
|
| 201 |
+
"guillemotleft",
|
| 202 |
+
"guillemotright",
|
| 203 |
+
"ellipsis",
|
| 204 |
+
"nbspace",
|
| 205 |
+
"Agrave",
|
| 206 |
+
"Atilde",
|
| 207 |
+
"Otilde",
|
| 208 |
+
"OE",
|
| 209 |
+
"oe",
|
| 210 |
+
"endash",
|
| 211 |
+
"emdash",
|
| 212 |
+
"quotedblleft",
|
| 213 |
+
"quotedblright",
|
| 214 |
+
"quoteleft",
|
| 215 |
+
"quoteright",
|
| 216 |
+
"divide",
|
| 217 |
+
"lozenge",
|
| 218 |
+
"ydieresis",
|
| 219 |
+
"Ydieresis",
|
| 220 |
+
"fraction",
|
| 221 |
+
"currency",
|
| 222 |
+
"guilsinglleft",
|
| 223 |
+
"guilsinglright",
|
| 224 |
+
"fi",
|
| 225 |
+
"fl",
|
| 226 |
+
"daggerdbl",
|
| 227 |
+
"periodcentered",
|
| 228 |
+
"quotesinglbase",
|
| 229 |
+
"quotedblbase",
|
| 230 |
+
"perthousand",
|
| 231 |
+
"Acircumflex",
|
| 232 |
+
"Ecircumflex",
|
| 233 |
+
"Aacute",
|
| 234 |
+
"Edieresis",
|
| 235 |
+
"Egrave",
|
| 236 |
+
"Iacute",
|
| 237 |
+
"Icircumflex",
|
| 238 |
+
"Idieresis",
|
| 239 |
+
"Igrave",
|
| 240 |
+
"Oacute",
|
| 241 |
+
"Ocircumflex",
|
| 242 |
+
"apple",
|
| 243 |
+
"Ograve",
|
| 244 |
+
"Uacute",
|
| 245 |
+
"Ucircumflex",
|
| 246 |
+
"Ugrave",
|
| 247 |
+
"dotlessi",
|
| 248 |
+
"circumflex",
|
| 249 |
+
"tilde",
|
| 250 |
+
"macron",
|
| 251 |
+
"breve",
|
| 252 |
+
"dotaccent",
|
| 253 |
+
"ring",
|
| 254 |
+
"cedilla",
|
| 255 |
+
"hungarumlaut",
|
| 256 |
+
"ogonek",
|
| 257 |
+
"caron",
|
| 258 |
+
]
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/encodings/__pycache__/MacRoman.cpython-310.pyc
ADDED
|
Binary file (2.22 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (2.17 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/__pycache__/location.cpython-310.pyc
ADDED
|
Binary file (672 Bytes). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/__pycache__/variableScalar.cpython-310.pyc
ADDED
|
Binary file (5.36 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/ast.py
ADDED
|
@@ -0,0 +1,2134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fontTools.feaLib.error import FeatureLibError
|
| 2 |
+
from fontTools.feaLib.location import FeatureLibLocation
|
| 3 |
+
from fontTools.misc.encodingTools import getEncoding
|
| 4 |
+
from fontTools.misc.textTools import byteord, tobytes
|
| 5 |
+
from collections import OrderedDict
|
| 6 |
+
import itertools
|
| 7 |
+
|
| 8 |
+
SHIFT = " " * 4
|
| 9 |
+
|
| 10 |
+
__all__ = [
|
| 11 |
+
"Element",
|
| 12 |
+
"FeatureFile",
|
| 13 |
+
"Comment",
|
| 14 |
+
"GlyphName",
|
| 15 |
+
"GlyphClass",
|
| 16 |
+
"GlyphClassName",
|
| 17 |
+
"MarkClassName",
|
| 18 |
+
"AnonymousBlock",
|
| 19 |
+
"Block",
|
| 20 |
+
"FeatureBlock",
|
| 21 |
+
"NestedBlock",
|
| 22 |
+
"LookupBlock",
|
| 23 |
+
"GlyphClassDefinition",
|
| 24 |
+
"GlyphClassDefStatement",
|
| 25 |
+
"MarkClass",
|
| 26 |
+
"MarkClassDefinition",
|
| 27 |
+
"AlternateSubstStatement",
|
| 28 |
+
"Anchor",
|
| 29 |
+
"AnchorDefinition",
|
| 30 |
+
"AttachStatement",
|
| 31 |
+
"AxisValueLocationStatement",
|
| 32 |
+
"BaseAxis",
|
| 33 |
+
"CVParametersNameStatement",
|
| 34 |
+
"ChainContextPosStatement",
|
| 35 |
+
"ChainContextSubstStatement",
|
| 36 |
+
"CharacterStatement",
|
| 37 |
+
"ConditionsetStatement",
|
| 38 |
+
"CursivePosStatement",
|
| 39 |
+
"ElidedFallbackName",
|
| 40 |
+
"ElidedFallbackNameID",
|
| 41 |
+
"Expression",
|
| 42 |
+
"FeatureNameStatement",
|
| 43 |
+
"FeatureReferenceStatement",
|
| 44 |
+
"FontRevisionStatement",
|
| 45 |
+
"HheaField",
|
| 46 |
+
"IgnorePosStatement",
|
| 47 |
+
"IgnoreSubstStatement",
|
| 48 |
+
"IncludeStatement",
|
| 49 |
+
"LanguageStatement",
|
| 50 |
+
"LanguageSystemStatement",
|
| 51 |
+
"LigatureCaretByIndexStatement",
|
| 52 |
+
"LigatureCaretByPosStatement",
|
| 53 |
+
"LigatureSubstStatement",
|
| 54 |
+
"LookupFlagStatement",
|
| 55 |
+
"LookupReferenceStatement",
|
| 56 |
+
"MarkBasePosStatement",
|
| 57 |
+
"MarkLigPosStatement",
|
| 58 |
+
"MarkMarkPosStatement",
|
| 59 |
+
"MultipleSubstStatement",
|
| 60 |
+
"NameRecord",
|
| 61 |
+
"OS2Field",
|
| 62 |
+
"PairPosStatement",
|
| 63 |
+
"ReverseChainSingleSubstStatement",
|
| 64 |
+
"ScriptStatement",
|
| 65 |
+
"SinglePosStatement",
|
| 66 |
+
"SingleSubstStatement",
|
| 67 |
+
"SizeParameters",
|
| 68 |
+
"Statement",
|
| 69 |
+
"STATAxisValueStatement",
|
| 70 |
+
"STATDesignAxisStatement",
|
| 71 |
+
"STATNameStatement",
|
| 72 |
+
"SubtableStatement",
|
| 73 |
+
"TableBlock",
|
| 74 |
+
"ValueRecord",
|
| 75 |
+
"ValueRecordDefinition",
|
| 76 |
+
"VheaField",
|
| 77 |
+
]
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def deviceToString(device):
|
| 81 |
+
if device is None:
|
| 82 |
+
return "<device NULL>"
|
| 83 |
+
else:
|
| 84 |
+
return "<device %s>" % ", ".join("%d %d" % t for t in device)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
fea_keywords = set(
|
| 88 |
+
[
|
| 89 |
+
"anchor",
|
| 90 |
+
"anchordef",
|
| 91 |
+
"anon",
|
| 92 |
+
"anonymous",
|
| 93 |
+
"by",
|
| 94 |
+
"contour",
|
| 95 |
+
"cursive",
|
| 96 |
+
"device",
|
| 97 |
+
"enum",
|
| 98 |
+
"enumerate",
|
| 99 |
+
"excludedflt",
|
| 100 |
+
"exclude_dflt",
|
| 101 |
+
"feature",
|
| 102 |
+
"from",
|
| 103 |
+
"ignore",
|
| 104 |
+
"ignorebaseglyphs",
|
| 105 |
+
"ignoreligatures",
|
| 106 |
+
"ignoremarks",
|
| 107 |
+
"include",
|
| 108 |
+
"includedflt",
|
| 109 |
+
"include_dflt",
|
| 110 |
+
"language",
|
| 111 |
+
"languagesystem",
|
| 112 |
+
"lookup",
|
| 113 |
+
"lookupflag",
|
| 114 |
+
"mark",
|
| 115 |
+
"markattachmenttype",
|
| 116 |
+
"markclass",
|
| 117 |
+
"nameid",
|
| 118 |
+
"null",
|
| 119 |
+
"parameters",
|
| 120 |
+
"pos",
|
| 121 |
+
"position",
|
| 122 |
+
"required",
|
| 123 |
+
"righttoleft",
|
| 124 |
+
"reversesub",
|
| 125 |
+
"rsub",
|
| 126 |
+
"script",
|
| 127 |
+
"sub",
|
| 128 |
+
"substitute",
|
| 129 |
+
"subtable",
|
| 130 |
+
"table",
|
| 131 |
+
"usemarkfilteringset",
|
| 132 |
+
"useextension",
|
| 133 |
+
"valuerecorddef",
|
| 134 |
+
"base",
|
| 135 |
+
"gdef",
|
| 136 |
+
"head",
|
| 137 |
+
"hhea",
|
| 138 |
+
"name",
|
| 139 |
+
"vhea",
|
| 140 |
+
"vmtx",
|
| 141 |
+
]
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def asFea(g):
|
| 146 |
+
if hasattr(g, "asFea"):
|
| 147 |
+
return g.asFea()
|
| 148 |
+
elif isinstance(g, tuple) and len(g) == 2:
|
| 149 |
+
return asFea(g[0]) + " - " + asFea(g[1]) # a range
|
| 150 |
+
elif g.lower() in fea_keywords:
|
| 151 |
+
return "\\" + g
|
| 152 |
+
else:
|
| 153 |
+
return g
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
class Element(object):
|
| 157 |
+
"""A base class representing "something" in a feature file."""
|
| 158 |
+
|
| 159 |
+
def __init__(self, location=None):
|
| 160 |
+
#: location of this element as a `FeatureLibLocation` object.
|
| 161 |
+
if location and not isinstance(location, FeatureLibLocation):
|
| 162 |
+
location = FeatureLibLocation(*location)
|
| 163 |
+
self.location = location
|
| 164 |
+
|
| 165 |
+
def build(self, builder):
|
| 166 |
+
pass
|
| 167 |
+
|
| 168 |
+
def asFea(self, indent=""):
|
| 169 |
+
"""Returns this element as a string of feature code. For block-type
|
| 170 |
+
elements (such as :class:`FeatureBlock`), the `indent` string is
|
| 171 |
+
added to the start of each line in the output."""
|
| 172 |
+
raise NotImplementedError
|
| 173 |
+
|
| 174 |
+
def __str__(self):
|
| 175 |
+
return self.asFea()
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
class Statement(Element):
|
| 179 |
+
pass
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
class Expression(Element):
|
| 183 |
+
pass
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
class Comment(Element):
|
| 187 |
+
"""A comment in a feature file."""
|
| 188 |
+
|
| 189 |
+
def __init__(self, text, location=None):
|
| 190 |
+
super(Comment, self).__init__(location)
|
| 191 |
+
#: Text of the comment
|
| 192 |
+
self.text = text
|
| 193 |
+
|
| 194 |
+
def asFea(self, indent=""):
|
| 195 |
+
return self.text
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
class NullGlyph(Expression):
|
| 199 |
+
"""The NULL glyph, used in glyph deletion substitutions."""
|
| 200 |
+
|
| 201 |
+
def __init__(self, location=None):
|
| 202 |
+
Expression.__init__(self, location)
|
| 203 |
+
#: The name itself as a string
|
| 204 |
+
|
| 205 |
+
def glyphSet(self):
|
| 206 |
+
"""The glyphs in this class as a tuple of :class:`GlyphName` objects."""
|
| 207 |
+
return ()
|
| 208 |
+
|
| 209 |
+
def asFea(self, indent=""):
|
| 210 |
+
return "NULL"
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
class GlyphName(Expression):
|
| 214 |
+
"""A single glyph name, such as ``cedilla``."""
|
| 215 |
+
|
| 216 |
+
def __init__(self, glyph, location=None):
|
| 217 |
+
Expression.__init__(self, location)
|
| 218 |
+
#: The name itself as a string
|
| 219 |
+
self.glyph = glyph
|
| 220 |
+
|
| 221 |
+
def glyphSet(self):
|
| 222 |
+
"""The glyphs in this class as a tuple of :class:`GlyphName` objects."""
|
| 223 |
+
return (self.glyph,)
|
| 224 |
+
|
| 225 |
+
def asFea(self, indent=""):
|
| 226 |
+
return asFea(self.glyph)
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
class GlyphClass(Expression):
|
| 230 |
+
"""A glyph class, such as ``[acute cedilla grave]``."""
|
| 231 |
+
|
| 232 |
+
def __init__(self, glyphs=None, location=None):
|
| 233 |
+
Expression.__init__(self, location)
|
| 234 |
+
#: The list of glyphs in this class, as :class:`GlyphName` objects.
|
| 235 |
+
self.glyphs = glyphs if glyphs is not None else []
|
| 236 |
+
self.original = []
|
| 237 |
+
self.curr = 0
|
| 238 |
+
|
| 239 |
+
def glyphSet(self):
|
| 240 |
+
"""The glyphs in this class as a tuple of :class:`GlyphName` objects."""
|
| 241 |
+
return tuple(self.glyphs)
|
| 242 |
+
|
| 243 |
+
def asFea(self, indent=""):
|
| 244 |
+
if len(self.original):
|
| 245 |
+
if self.curr < len(self.glyphs):
|
| 246 |
+
self.original.extend(self.glyphs[self.curr :])
|
| 247 |
+
self.curr = len(self.glyphs)
|
| 248 |
+
return "[" + " ".join(map(asFea, self.original)) + "]"
|
| 249 |
+
else:
|
| 250 |
+
return "[" + " ".join(map(asFea, self.glyphs)) + "]"
|
| 251 |
+
|
| 252 |
+
def extend(self, glyphs):
|
| 253 |
+
"""Add a list of :class:`GlyphName` objects to the class."""
|
| 254 |
+
self.glyphs.extend(glyphs)
|
| 255 |
+
|
| 256 |
+
def append(self, glyph):
|
| 257 |
+
"""Add a single :class:`GlyphName` object to the class."""
|
| 258 |
+
self.glyphs.append(glyph)
|
| 259 |
+
|
| 260 |
+
def add_range(self, start, end, glyphs):
|
| 261 |
+
"""Add a range (e.g. ``A-Z``) to the class. ``start`` and ``end``
|
| 262 |
+
are either :class:`GlyphName` objects or strings representing the
|
| 263 |
+
start and end glyphs in the class, and ``glyphs`` is the full list of
|
| 264 |
+
:class:`GlyphName` objects in the range."""
|
| 265 |
+
if self.curr < len(self.glyphs):
|
| 266 |
+
self.original.extend(self.glyphs[self.curr :])
|
| 267 |
+
self.original.append((start, end))
|
| 268 |
+
self.glyphs.extend(glyphs)
|
| 269 |
+
self.curr = len(self.glyphs)
|
| 270 |
+
|
| 271 |
+
def add_cid_range(self, start, end, glyphs):
|
| 272 |
+
"""Add a range to the class by glyph ID. ``start`` and ``end`` are the
|
| 273 |
+
initial and final IDs, and ``glyphs`` is the full list of
|
| 274 |
+
:class:`GlyphName` objects in the range."""
|
| 275 |
+
if self.curr < len(self.glyphs):
|
| 276 |
+
self.original.extend(self.glyphs[self.curr :])
|
| 277 |
+
self.original.append(("\\{}".format(start), "\\{}".format(end)))
|
| 278 |
+
self.glyphs.extend(glyphs)
|
| 279 |
+
self.curr = len(self.glyphs)
|
| 280 |
+
|
| 281 |
+
def add_class(self, gc):
|
| 282 |
+
"""Add glyphs from the given :class:`GlyphClassName` object to the
|
| 283 |
+
class."""
|
| 284 |
+
if self.curr < len(self.glyphs):
|
| 285 |
+
self.original.extend(self.glyphs[self.curr :])
|
| 286 |
+
self.original.append(gc)
|
| 287 |
+
self.glyphs.extend(gc.glyphSet())
|
| 288 |
+
self.curr = len(self.glyphs)
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
class GlyphClassName(Expression):
|
| 292 |
+
"""A glyph class name, such as ``@FRENCH_MARKS``. This must be instantiated
|
| 293 |
+
with a :class:`GlyphClassDefinition` object."""
|
| 294 |
+
|
| 295 |
+
def __init__(self, glyphclass, location=None):
|
| 296 |
+
Expression.__init__(self, location)
|
| 297 |
+
assert isinstance(glyphclass, GlyphClassDefinition)
|
| 298 |
+
self.glyphclass = glyphclass
|
| 299 |
+
|
| 300 |
+
def glyphSet(self):
|
| 301 |
+
"""The glyphs in this class as a tuple of :class:`GlyphName` objects."""
|
| 302 |
+
return tuple(self.glyphclass.glyphSet())
|
| 303 |
+
|
| 304 |
+
def asFea(self, indent=""):
|
| 305 |
+
return "@" + self.glyphclass.name
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
class MarkClassName(Expression):
|
| 309 |
+
"""A mark class name, such as ``@FRENCH_MARKS`` defined with ``markClass``.
|
| 310 |
+
This must be instantiated with a :class:`MarkClass` object."""
|
| 311 |
+
|
| 312 |
+
def __init__(self, markClass, location=None):
|
| 313 |
+
Expression.__init__(self, location)
|
| 314 |
+
assert isinstance(markClass, MarkClass)
|
| 315 |
+
self.markClass = markClass
|
| 316 |
+
|
| 317 |
+
def glyphSet(self):
|
| 318 |
+
"""The glyphs in this class as a tuple of :class:`GlyphName` objects."""
|
| 319 |
+
return self.markClass.glyphSet()
|
| 320 |
+
|
| 321 |
+
def asFea(self, indent=""):
|
| 322 |
+
return "@" + self.markClass.name
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
class AnonymousBlock(Statement):
|
| 326 |
+
"""An anonymous data block."""
|
| 327 |
+
|
| 328 |
+
def __init__(self, tag, content, location=None):
|
| 329 |
+
Statement.__init__(self, location)
|
| 330 |
+
self.tag = tag #: string containing the block's "tag"
|
| 331 |
+
self.content = content #: block data as string
|
| 332 |
+
|
| 333 |
+
def asFea(self, indent=""):
|
| 334 |
+
res = "anon {} {{\n".format(self.tag)
|
| 335 |
+
res += self.content
|
| 336 |
+
res += "}} {};\n\n".format(self.tag)
|
| 337 |
+
return res
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
class Block(Statement):
|
| 341 |
+
"""A block of statements: feature, lookup, etc."""
|
| 342 |
+
|
| 343 |
+
def __init__(self, location=None):
|
| 344 |
+
Statement.__init__(self, location)
|
| 345 |
+
self.statements = [] #: Statements contained in the block
|
| 346 |
+
|
| 347 |
+
def build(self, builder):
|
| 348 |
+
"""When handed a 'builder' object of comparable interface to
|
| 349 |
+
:class:`fontTools.feaLib.builder`, walks the statements in this
|
| 350 |
+
block, calling the builder callbacks."""
|
| 351 |
+
for s in self.statements:
|
| 352 |
+
s.build(builder)
|
| 353 |
+
|
| 354 |
+
def asFea(self, indent=""):
|
| 355 |
+
indent += SHIFT
|
| 356 |
+
return (
|
| 357 |
+
indent
|
| 358 |
+
+ ("\n" + indent).join([s.asFea(indent=indent) for s in self.statements])
|
| 359 |
+
+ "\n"
|
| 360 |
+
)
|
| 361 |
+
|
| 362 |
+
|
| 363 |
+
class FeatureFile(Block):
|
| 364 |
+
"""The top-level element of the syntax tree, containing the whole feature
|
| 365 |
+
file in its ``statements`` attribute."""
|
| 366 |
+
|
| 367 |
+
def __init__(self):
|
| 368 |
+
Block.__init__(self, location=None)
|
| 369 |
+
self.markClasses = {} # name --> ast.MarkClass
|
| 370 |
+
|
| 371 |
+
def asFea(self, indent=""):
|
| 372 |
+
return "\n".join(s.asFea(indent=indent) for s in self.statements)
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
class FeatureBlock(Block):
|
| 376 |
+
"""A named feature block."""
|
| 377 |
+
|
| 378 |
+
def __init__(self, name, use_extension=False, location=None):
|
| 379 |
+
Block.__init__(self, location)
|
| 380 |
+
self.name, self.use_extension = name, use_extension
|
| 381 |
+
|
| 382 |
+
def build(self, builder):
|
| 383 |
+
"""Call the ``start_feature`` callback on the builder object, visit
|
| 384 |
+
all the statements in this feature, and then call ``end_feature``."""
|
| 385 |
+
# TODO(sascha): Handle use_extension.
|
| 386 |
+
builder.start_feature(self.location, self.name)
|
| 387 |
+
# language exclude_dflt statements modify builder.features_
|
| 388 |
+
# limit them to this block with temporary builder.features_
|
| 389 |
+
features = builder.features_
|
| 390 |
+
builder.features_ = {}
|
| 391 |
+
Block.build(self, builder)
|
| 392 |
+
for key, value in builder.features_.items():
|
| 393 |
+
features.setdefault(key, []).extend(value)
|
| 394 |
+
builder.features_ = features
|
| 395 |
+
builder.end_feature()
|
| 396 |
+
|
| 397 |
+
def asFea(self, indent=""):
|
| 398 |
+
res = indent + "feature %s " % self.name.strip()
|
| 399 |
+
if self.use_extension:
|
| 400 |
+
res += "useExtension "
|
| 401 |
+
res += "{\n"
|
| 402 |
+
res += Block.asFea(self, indent=indent)
|
| 403 |
+
res += indent + "} %s;\n" % self.name.strip()
|
| 404 |
+
return res
|
| 405 |
+
|
| 406 |
+
|
| 407 |
+
class NestedBlock(Block):
|
| 408 |
+
"""A block inside another block, for example when found inside a
|
| 409 |
+
``cvParameters`` block."""
|
| 410 |
+
|
| 411 |
+
def __init__(self, tag, block_name, location=None):
|
| 412 |
+
Block.__init__(self, location)
|
| 413 |
+
self.tag = tag
|
| 414 |
+
self.block_name = block_name
|
| 415 |
+
|
| 416 |
+
def build(self, builder):
|
| 417 |
+
Block.build(self, builder)
|
| 418 |
+
if self.block_name == "ParamUILabelNameID":
|
| 419 |
+
builder.add_to_cv_num_named_params(self.tag)
|
| 420 |
+
|
| 421 |
+
def asFea(self, indent=""):
|
| 422 |
+
res = "{}{} {{\n".format(indent, self.block_name)
|
| 423 |
+
res += Block.asFea(self, indent=indent)
|
| 424 |
+
res += "{}}};\n".format(indent)
|
| 425 |
+
return res
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
class LookupBlock(Block):
|
| 429 |
+
"""A named lookup, containing ``statements``."""
|
| 430 |
+
|
| 431 |
+
def __init__(self, name, use_extension=False, location=None):
|
| 432 |
+
Block.__init__(self, location)
|
| 433 |
+
self.name, self.use_extension = name, use_extension
|
| 434 |
+
|
| 435 |
+
def build(self, builder):
|
| 436 |
+
# TODO(sascha): Handle use_extension.
|
| 437 |
+
builder.start_lookup_block(self.location, self.name)
|
| 438 |
+
Block.build(self, builder)
|
| 439 |
+
builder.end_lookup_block()
|
| 440 |
+
|
| 441 |
+
def asFea(self, indent=""):
|
| 442 |
+
res = "lookup {} ".format(self.name)
|
| 443 |
+
if self.use_extension:
|
| 444 |
+
res += "useExtension "
|
| 445 |
+
res += "{\n"
|
| 446 |
+
res += Block.asFea(self, indent=indent)
|
| 447 |
+
res += "{}}} {};\n".format(indent, self.name)
|
| 448 |
+
return res
|
| 449 |
+
|
| 450 |
+
|
| 451 |
+
class TableBlock(Block):
|
| 452 |
+
"""A ``table ... { }`` block."""
|
| 453 |
+
|
| 454 |
+
def __init__(self, name, location=None):
|
| 455 |
+
Block.__init__(self, location)
|
| 456 |
+
self.name = name
|
| 457 |
+
|
| 458 |
+
def asFea(self, indent=""):
|
| 459 |
+
res = "table {} {{\n".format(self.name.strip())
|
| 460 |
+
res += super(TableBlock, self).asFea(indent=indent)
|
| 461 |
+
res += "}} {};\n".format(self.name.strip())
|
| 462 |
+
return res
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
class GlyphClassDefinition(Statement):
|
| 466 |
+
"""Example: ``@UPPERCASE = [A-Z];``."""
|
| 467 |
+
|
| 468 |
+
def __init__(self, name, glyphs, location=None):
|
| 469 |
+
Statement.__init__(self, location)
|
| 470 |
+
self.name = name #: class name as a string, without initial ``@``
|
| 471 |
+
self.glyphs = glyphs #: a :class:`GlyphClass` object
|
| 472 |
+
|
| 473 |
+
def glyphSet(self):
|
| 474 |
+
"""The glyphs in this class as a tuple of :class:`GlyphName` objects."""
|
| 475 |
+
return tuple(self.glyphs.glyphSet())
|
| 476 |
+
|
| 477 |
+
def asFea(self, indent=""):
|
| 478 |
+
return "@" + self.name + " = " + self.glyphs.asFea() + ";"
|
| 479 |
+
|
| 480 |
+
|
| 481 |
+
class GlyphClassDefStatement(Statement):
|
| 482 |
+
"""Example: ``GlyphClassDef @UPPERCASE, [B], [C], [D];``. The parameters
|
| 483 |
+
must be either :class:`GlyphClass` or :class:`GlyphClassName` objects, or
|
| 484 |
+
``None``."""
|
| 485 |
+
|
| 486 |
+
def __init__(
|
| 487 |
+
self, baseGlyphs, markGlyphs, ligatureGlyphs, componentGlyphs, location=None
|
| 488 |
+
):
|
| 489 |
+
Statement.__init__(self, location)
|
| 490 |
+
self.baseGlyphs, self.markGlyphs = (baseGlyphs, markGlyphs)
|
| 491 |
+
self.ligatureGlyphs = ligatureGlyphs
|
| 492 |
+
self.componentGlyphs = componentGlyphs
|
| 493 |
+
|
| 494 |
+
def build(self, builder):
|
| 495 |
+
"""Calls the builder's ``add_glyphClassDef`` callback."""
|
| 496 |
+
base = self.baseGlyphs.glyphSet() if self.baseGlyphs else tuple()
|
| 497 |
+
liga = self.ligatureGlyphs.glyphSet() if self.ligatureGlyphs else tuple()
|
| 498 |
+
mark = self.markGlyphs.glyphSet() if self.markGlyphs else tuple()
|
| 499 |
+
comp = self.componentGlyphs.glyphSet() if self.componentGlyphs else tuple()
|
| 500 |
+
builder.add_glyphClassDef(self.location, base, liga, mark, comp)
|
| 501 |
+
|
| 502 |
+
def asFea(self, indent=""):
|
| 503 |
+
return "GlyphClassDef {}, {}, {}, {};".format(
|
| 504 |
+
self.baseGlyphs.asFea() if self.baseGlyphs else "",
|
| 505 |
+
self.ligatureGlyphs.asFea() if self.ligatureGlyphs else "",
|
| 506 |
+
self.markGlyphs.asFea() if self.markGlyphs else "",
|
| 507 |
+
self.componentGlyphs.asFea() if self.componentGlyphs else "",
|
| 508 |
+
)
|
| 509 |
+
|
| 510 |
+
|
| 511 |
+
class MarkClass(object):
|
| 512 |
+
"""One `or more` ``markClass`` statements for the same mark class.
|
| 513 |
+
|
| 514 |
+
While glyph classes can be defined only once, the feature file format
|
| 515 |
+
allows expanding mark classes with multiple definitions, each using
|
| 516 |
+
different glyphs and anchors. The following are two ``MarkClassDefinitions``
|
| 517 |
+
for the same ``MarkClass``::
|
| 518 |
+
|
| 519 |
+
markClass [acute grave] <anchor 350 800> @FRENCH_ACCENTS;
|
| 520 |
+
markClass [cedilla] <anchor 350 -200> @FRENCH_ACCENTS;
|
| 521 |
+
|
| 522 |
+
The ``MarkClass`` object is therefore just a container for a list of
|
| 523 |
+
:class:`MarkClassDefinition` statements.
|
| 524 |
+
"""
|
| 525 |
+
|
| 526 |
+
def __init__(self, name):
|
| 527 |
+
self.name = name
|
| 528 |
+
self.definitions = []
|
| 529 |
+
self.glyphs = OrderedDict() # glyph --> ast.MarkClassDefinitions
|
| 530 |
+
|
| 531 |
+
def addDefinition(self, definition):
|
| 532 |
+
"""Add a :class:`MarkClassDefinition` statement to this mark class."""
|
| 533 |
+
assert isinstance(definition, MarkClassDefinition)
|
| 534 |
+
self.definitions.append(definition)
|
| 535 |
+
for glyph in definition.glyphSet():
|
| 536 |
+
if glyph in self.glyphs:
|
| 537 |
+
otherLoc = self.glyphs[glyph].location
|
| 538 |
+
if otherLoc is None:
|
| 539 |
+
end = ""
|
| 540 |
+
else:
|
| 541 |
+
end = f" at {otherLoc}"
|
| 542 |
+
raise FeatureLibError(
|
| 543 |
+
"Glyph %s already defined%s" % (glyph, end), definition.location
|
| 544 |
+
)
|
| 545 |
+
self.glyphs[glyph] = definition
|
| 546 |
+
|
| 547 |
+
def glyphSet(self):
|
| 548 |
+
"""The glyphs in this class as a tuple of :class:`GlyphName` objects."""
|
| 549 |
+
return tuple(self.glyphs.keys())
|
| 550 |
+
|
| 551 |
+
def asFea(self, indent=""):
|
| 552 |
+
res = "\n".join(d.asFea() for d in self.definitions)
|
| 553 |
+
return res
|
| 554 |
+
|
| 555 |
+
|
| 556 |
+
class MarkClassDefinition(Statement):
|
| 557 |
+
"""A single ``markClass`` statement. The ``markClass`` should be a
|
| 558 |
+
:class:`MarkClass` object, the ``anchor`` an :class:`Anchor` object,
|
| 559 |
+
and the ``glyphs`` parameter should be a `glyph-containing object`_ .
|
| 560 |
+
|
| 561 |
+
Example:
|
| 562 |
+
|
| 563 |
+
.. code:: python
|
| 564 |
+
|
| 565 |
+
mc = MarkClass("FRENCH_ACCENTS")
|
| 566 |
+
mc.addDefinition( MarkClassDefinition(mc, Anchor(350, 800),
|
| 567 |
+
GlyphClass([ GlyphName("acute"), GlyphName("grave") ])
|
| 568 |
+
) )
|
| 569 |
+
mc.addDefinition( MarkClassDefinition(mc, Anchor(350, -200),
|
| 570 |
+
GlyphClass([ GlyphName("cedilla") ])
|
| 571 |
+
) )
|
| 572 |
+
|
| 573 |
+
mc.asFea()
|
| 574 |
+
# markClass [acute grave] <anchor 350 800> @FRENCH_ACCENTS;
|
| 575 |
+
# markClass [cedilla] <anchor 350 -200> @FRENCH_ACCENTS;
|
| 576 |
+
|
| 577 |
+
"""
|
| 578 |
+
|
| 579 |
+
def __init__(self, markClass, anchor, glyphs, location=None):
|
| 580 |
+
Statement.__init__(self, location)
|
| 581 |
+
assert isinstance(markClass, MarkClass)
|
| 582 |
+
assert isinstance(anchor, Anchor) and isinstance(glyphs, Expression)
|
| 583 |
+
self.markClass, self.anchor, self.glyphs = markClass, anchor, glyphs
|
| 584 |
+
|
| 585 |
+
def glyphSet(self):
|
| 586 |
+
"""The glyphs in this class as a tuple of :class:`GlyphName` objects."""
|
| 587 |
+
return self.glyphs.glyphSet()
|
| 588 |
+
|
| 589 |
+
def asFea(self, indent=""):
|
| 590 |
+
return "markClass {} {} @{};".format(
|
| 591 |
+
self.glyphs.asFea(), self.anchor.asFea(), self.markClass.name
|
| 592 |
+
)
|
| 593 |
+
|
| 594 |
+
|
| 595 |
+
class AlternateSubstStatement(Statement):
|
| 596 |
+
"""A ``sub ... from ...`` statement.
|
| 597 |
+
|
| 598 |
+
``prefix``, ``glyph``, ``suffix`` and ``replacement`` should be lists of
|
| 599 |
+
`glyph-containing objects`_. ``glyph`` should be a `one element list`."""
|
| 600 |
+
|
| 601 |
+
def __init__(self, prefix, glyph, suffix, replacement, location=None):
|
| 602 |
+
Statement.__init__(self, location)
|
| 603 |
+
self.prefix, self.glyph, self.suffix = (prefix, glyph, suffix)
|
| 604 |
+
self.replacement = replacement
|
| 605 |
+
|
| 606 |
+
def build(self, builder):
|
| 607 |
+
"""Calls the builder's ``add_alternate_subst`` callback."""
|
| 608 |
+
glyph = self.glyph.glyphSet()
|
| 609 |
+
assert len(glyph) == 1, glyph
|
| 610 |
+
glyph = list(glyph)[0]
|
| 611 |
+
prefix = [p.glyphSet() for p in self.prefix]
|
| 612 |
+
suffix = [s.glyphSet() for s in self.suffix]
|
| 613 |
+
replacement = self.replacement.glyphSet()
|
| 614 |
+
builder.add_alternate_subst(self.location, prefix, glyph, suffix, replacement)
|
| 615 |
+
|
| 616 |
+
def asFea(self, indent=""):
|
| 617 |
+
res = "sub "
|
| 618 |
+
if len(self.prefix) or len(self.suffix):
|
| 619 |
+
if len(self.prefix):
|
| 620 |
+
res += " ".join(map(asFea, self.prefix)) + " "
|
| 621 |
+
res += asFea(self.glyph) + "'" # even though we really only use 1
|
| 622 |
+
if len(self.suffix):
|
| 623 |
+
res += " " + " ".join(map(asFea, self.suffix))
|
| 624 |
+
else:
|
| 625 |
+
res += asFea(self.glyph)
|
| 626 |
+
res += " from "
|
| 627 |
+
res += asFea(self.replacement)
|
| 628 |
+
res += ";"
|
| 629 |
+
return res
|
| 630 |
+
|
| 631 |
+
|
| 632 |
+
class Anchor(Expression):
|
| 633 |
+
"""An ``Anchor`` element, used inside a ``pos`` rule.
|
| 634 |
+
|
| 635 |
+
If a ``name`` is given, this will be used in preference to the coordinates.
|
| 636 |
+
Other values should be integer.
|
| 637 |
+
"""
|
| 638 |
+
|
| 639 |
+
def __init__(
|
| 640 |
+
self,
|
| 641 |
+
x,
|
| 642 |
+
y,
|
| 643 |
+
name=None,
|
| 644 |
+
contourpoint=None,
|
| 645 |
+
xDeviceTable=None,
|
| 646 |
+
yDeviceTable=None,
|
| 647 |
+
location=None,
|
| 648 |
+
):
|
| 649 |
+
Expression.__init__(self, location)
|
| 650 |
+
self.name = name
|
| 651 |
+
self.x, self.y, self.contourpoint = x, y, contourpoint
|
| 652 |
+
self.xDeviceTable, self.yDeviceTable = xDeviceTable, yDeviceTable
|
| 653 |
+
|
| 654 |
+
def asFea(self, indent=""):
|
| 655 |
+
if self.name is not None:
|
| 656 |
+
return "<anchor {}>".format(self.name)
|
| 657 |
+
res = "<anchor {} {}".format(self.x, self.y)
|
| 658 |
+
if self.contourpoint:
|
| 659 |
+
res += " contourpoint {}".format(self.contourpoint)
|
| 660 |
+
if self.xDeviceTable or self.yDeviceTable:
|
| 661 |
+
res += " "
|
| 662 |
+
res += deviceToString(self.xDeviceTable)
|
| 663 |
+
res += " "
|
| 664 |
+
res += deviceToString(self.yDeviceTable)
|
| 665 |
+
res += ">"
|
| 666 |
+
return res
|
| 667 |
+
|
| 668 |
+
|
| 669 |
+
class AnchorDefinition(Statement):
|
| 670 |
+
"""A named anchor definition. (2.e.viii). ``name`` should be a string."""
|
| 671 |
+
|
| 672 |
+
def __init__(self, name, x, y, contourpoint=None, location=None):
|
| 673 |
+
Statement.__init__(self, location)
|
| 674 |
+
self.name, self.x, self.y, self.contourpoint = name, x, y, contourpoint
|
| 675 |
+
|
| 676 |
+
def asFea(self, indent=""):
|
| 677 |
+
res = "anchorDef {} {}".format(self.x, self.y)
|
| 678 |
+
if self.contourpoint:
|
| 679 |
+
res += " contourpoint {}".format(self.contourpoint)
|
| 680 |
+
res += " {};".format(self.name)
|
| 681 |
+
return res
|
| 682 |
+
|
| 683 |
+
|
| 684 |
+
class AttachStatement(Statement):
|
| 685 |
+
"""A ``GDEF`` table ``Attach`` statement."""
|
| 686 |
+
|
| 687 |
+
def __init__(self, glyphs, contourPoints, location=None):
|
| 688 |
+
Statement.__init__(self, location)
|
| 689 |
+
self.glyphs = glyphs #: A `glyph-containing object`_
|
| 690 |
+
self.contourPoints = contourPoints #: A list of integer contour points
|
| 691 |
+
|
| 692 |
+
def build(self, builder):
|
| 693 |
+
"""Calls the builder's ``add_attach_points`` callback."""
|
| 694 |
+
glyphs = self.glyphs.glyphSet()
|
| 695 |
+
builder.add_attach_points(self.location, glyphs, self.contourPoints)
|
| 696 |
+
|
| 697 |
+
def asFea(self, indent=""):
|
| 698 |
+
return "Attach {} {};".format(
|
| 699 |
+
self.glyphs.asFea(), " ".join(str(c) for c in self.contourPoints)
|
| 700 |
+
)
|
| 701 |
+
|
| 702 |
+
|
| 703 |
+
class ChainContextPosStatement(Statement):
|
| 704 |
+
r"""A chained contextual positioning statement.
|
| 705 |
+
|
| 706 |
+
``prefix``, ``glyphs``, and ``suffix`` should be lists of
|
| 707 |
+
`glyph-containing objects`_ .
|
| 708 |
+
|
| 709 |
+
``lookups`` should be a list of elements representing what lookups
|
| 710 |
+
to apply at each glyph position. Each element should be a
|
| 711 |
+
:class:`LookupBlock` to apply a single chaining lookup at the given
|
| 712 |
+
position, a list of :class:`LookupBlock`\ s to apply multiple
|
| 713 |
+
lookups, or ``None`` to apply no lookup. The length of the outer
|
| 714 |
+
list should equal the length of ``glyphs``; the inner lists can be
|
| 715 |
+
of variable length."""
|
| 716 |
+
|
| 717 |
+
def __init__(self, prefix, glyphs, suffix, lookups, location=None):
|
| 718 |
+
Statement.__init__(self, location)
|
| 719 |
+
self.prefix, self.glyphs, self.suffix = prefix, glyphs, suffix
|
| 720 |
+
self.lookups = list(lookups)
|
| 721 |
+
for i, lookup in enumerate(lookups):
|
| 722 |
+
if lookup:
|
| 723 |
+
try:
|
| 724 |
+
(_ for _ in lookup)
|
| 725 |
+
except TypeError:
|
| 726 |
+
self.lookups[i] = [lookup]
|
| 727 |
+
|
| 728 |
+
def build(self, builder):
|
| 729 |
+
"""Calls the builder's ``add_chain_context_pos`` callback."""
|
| 730 |
+
prefix = [p.glyphSet() for p in self.prefix]
|
| 731 |
+
glyphs = [g.glyphSet() for g in self.glyphs]
|
| 732 |
+
suffix = [s.glyphSet() for s in self.suffix]
|
| 733 |
+
builder.add_chain_context_pos(
|
| 734 |
+
self.location, prefix, glyphs, suffix, self.lookups
|
| 735 |
+
)
|
| 736 |
+
|
| 737 |
+
def asFea(self, indent=""):
|
| 738 |
+
res = "pos "
|
| 739 |
+
if (
|
| 740 |
+
len(self.prefix)
|
| 741 |
+
or len(self.suffix)
|
| 742 |
+
or any([x is not None for x in self.lookups])
|
| 743 |
+
):
|
| 744 |
+
if len(self.prefix):
|
| 745 |
+
res += " ".join(g.asFea() for g in self.prefix) + " "
|
| 746 |
+
for i, g in enumerate(self.glyphs):
|
| 747 |
+
res += g.asFea() + "'"
|
| 748 |
+
if self.lookups[i]:
|
| 749 |
+
for lu in self.lookups[i]:
|
| 750 |
+
res += " lookup " + lu.name
|
| 751 |
+
if i < len(self.glyphs) - 1:
|
| 752 |
+
res += " "
|
| 753 |
+
if len(self.suffix):
|
| 754 |
+
res += " " + " ".join(map(asFea, self.suffix))
|
| 755 |
+
else:
|
| 756 |
+
res += " ".join(map(asFea, self.glyph))
|
| 757 |
+
res += ";"
|
| 758 |
+
return res
|
| 759 |
+
|
| 760 |
+
|
| 761 |
+
class ChainContextSubstStatement(Statement):
|
| 762 |
+
r"""A chained contextual substitution statement.
|
| 763 |
+
|
| 764 |
+
``prefix``, ``glyphs``, and ``suffix`` should be lists of
|
| 765 |
+
`glyph-containing objects`_ .
|
| 766 |
+
|
| 767 |
+
``lookups`` should be a list of elements representing what lookups
|
| 768 |
+
to apply at each glyph position. Each element should be a
|
| 769 |
+
:class:`LookupBlock` to apply a single chaining lookup at the given
|
| 770 |
+
position, a list of :class:`LookupBlock`\ s to apply multiple
|
| 771 |
+
lookups, or ``None`` to apply no lookup. The length of the outer
|
| 772 |
+
list should equal the length of ``glyphs``; the inner lists can be
|
| 773 |
+
of variable length."""
|
| 774 |
+
|
| 775 |
+
def __init__(self, prefix, glyphs, suffix, lookups, location=None):
|
| 776 |
+
Statement.__init__(self, location)
|
| 777 |
+
self.prefix, self.glyphs, self.suffix = prefix, glyphs, suffix
|
| 778 |
+
self.lookups = list(lookups)
|
| 779 |
+
for i, lookup in enumerate(lookups):
|
| 780 |
+
if lookup:
|
| 781 |
+
try:
|
| 782 |
+
(_ for _ in lookup)
|
| 783 |
+
except TypeError:
|
| 784 |
+
self.lookups[i] = [lookup]
|
| 785 |
+
|
| 786 |
+
def build(self, builder):
|
| 787 |
+
"""Calls the builder's ``add_chain_context_subst`` callback."""
|
| 788 |
+
prefix = [p.glyphSet() for p in self.prefix]
|
| 789 |
+
glyphs = [g.glyphSet() for g in self.glyphs]
|
| 790 |
+
suffix = [s.glyphSet() for s in self.suffix]
|
| 791 |
+
builder.add_chain_context_subst(
|
| 792 |
+
self.location, prefix, glyphs, suffix, self.lookups
|
| 793 |
+
)
|
| 794 |
+
|
| 795 |
+
def asFea(self, indent=""):
|
| 796 |
+
res = "sub "
|
| 797 |
+
if (
|
| 798 |
+
len(self.prefix)
|
| 799 |
+
or len(self.suffix)
|
| 800 |
+
or any([x is not None for x in self.lookups])
|
| 801 |
+
):
|
| 802 |
+
if len(self.prefix):
|
| 803 |
+
res += " ".join(g.asFea() for g in self.prefix) + " "
|
| 804 |
+
for i, g in enumerate(self.glyphs):
|
| 805 |
+
res += g.asFea() + "'"
|
| 806 |
+
if self.lookups[i]:
|
| 807 |
+
for lu in self.lookups[i]:
|
| 808 |
+
res += " lookup " + lu.name
|
| 809 |
+
if i < len(self.glyphs) - 1:
|
| 810 |
+
res += " "
|
| 811 |
+
if len(self.suffix):
|
| 812 |
+
res += " " + " ".join(map(asFea, self.suffix))
|
| 813 |
+
else:
|
| 814 |
+
res += " ".join(map(asFea, self.glyph))
|
| 815 |
+
res += ";"
|
| 816 |
+
return res
|
| 817 |
+
|
| 818 |
+
|
| 819 |
+
class CursivePosStatement(Statement):
|
| 820 |
+
"""A cursive positioning statement. Entry and exit anchors can either
|
| 821 |
+
be :class:`Anchor` objects or ``None``."""
|
| 822 |
+
|
| 823 |
+
def __init__(self, glyphclass, entryAnchor, exitAnchor, location=None):
|
| 824 |
+
Statement.__init__(self, location)
|
| 825 |
+
self.glyphclass = glyphclass
|
| 826 |
+
self.entryAnchor, self.exitAnchor = entryAnchor, exitAnchor
|
| 827 |
+
|
| 828 |
+
def build(self, builder):
|
| 829 |
+
"""Calls the builder object's ``add_cursive_pos`` callback."""
|
| 830 |
+
builder.add_cursive_pos(
|
| 831 |
+
self.location, self.glyphclass.glyphSet(), self.entryAnchor, self.exitAnchor
|
| 832 |
+
)
|
| 833 |
+
|
| 834 |
+
def asFea(self, indent=""):
|
| 835 |
+
entry = self.entryAnchor.asFea() if self.entryAnchor else "<anchor NULL>"
|
| 836 |
+
exit = self.exitAnchor.asFea() if self.exitAnchor else "<anchor NULL>"
|
| 837 |
+
return "pos cursive {} {} {};".format(self.glyphclass.asFea(), entry, exit)
|
| 838 |
+
|
| 839 |
+
|
| 840 |
+
class FeatureReferenceStatement(Statement):
|
| 841 |
+
"""Example: ``feature salt;``"""
|
| 842 |
+
|
| 843 |
+
def __init__(self, featureName, location=None):
|
| 844 |
+
Statement.__init__(self, location)
|
| 845 |
+
self.location, self.featureName = (location, featureName)
|
| 846 |
+
|
| 847 |
+
def build(self, builder):
|
| 848 |
+
"""Calls the builder object's ``add_feature_reference`` callback."""
|
| 849 |
+
builder.add_feature_reference(self.location, self.featureName)
|
| 850 |
+
|
| 851 |
+
def asFea(self, indent=""):
|
| 852 |
+
return "feature {};".format(self.featureName)
|
| 853 |
+
|
| 854 |
+
|
| 855 |
+
class IgnorePosStatement(Statement):
|
| 856 |
+
"""An ``ignore pos`` statement, containing `one or more` contexts to ignore.
|
| 857 |
+
|
| 858 |
+
``chainContexts`` should be a list of ``(prefix, glyphs, suffix)`` tuples,
|
| 859 |
+
with each of ``prefix``, ``glyphs`` and ``suffix`` being
|
| 860 |
+
`glyph-containing objects`_ ."""
|
| 861 |
+
|
| 862 |
+
def __init__(self, chainContexts, location=None):
|
| 863 |
+
Statement.__init__(self, location)
|
| 864 |
+
self.chainContexts = chainContexts
|
| 865 |
+
|
| 866 |
+
def build(self, builder):
|
| 867 |
+
"""Calls the builder object's ``add_chain_context_pos`` callback on each
|
| 868 |
+
rule context."""
|
| 869 |
+
for prefix, glyphs, suffix in self.chainContexts:
|
| 870 |
+
prefix = [p.glyphSet() for p in prefix]
|
| 871 |
+
glyphs = [g.glyphSet() for g in glyphs]
|
| 872 |
+
suffix = [s.glyphSet() for s in suffix]
|
| 873 |
+
builder.add_chain_context_pos(self.location, prefix, glyphs, suffix, [])
|
| 874 |
+
|
| 875 |
+
def asFea(self, indent=""):
|
| 876 |
+
contexts = []
|
| 877 |
+
for prefix, glyphs, suffix in self.chainContexts:
|
| 878 |
+
res = ""
|
| 879 |
+
if len(prefix) or len(suffix):
|
| 880 |
+
if len(prefix):
|
| 881 |
+
res += " ".join(map(asFea, prefix)) + " "
|
| 882 |
+
res += " ".join(g.asFea() + "'" for g in glyphs)
|
| 883 |
+
if len(suffix):
|
| 884 |
+
res += " " + " ".join(map(asFea, suffix))
|
| 885 |
+
else:
|
| 886 |
+
res += " ".join(map(asFea, glyphs))
|
| 887 |
+
contexts.append(res)
|
| 888 |
+
return "ignore pos " + ", ".join(contexts) + ";"
|
| 889 |
+
|
| 890 |
+
|
| 891 |
+
class IgnoreSubstStatement(Statement):
|
| 892 |
+
"""An ``ignore sub`` statement, containing `one or more` contexts to ignore.
|
| 893 |
+
|
| 894 |
+
``chainContexts`` should be a list of ``(prefix, glyphs, suffix)`` tuples,
|
| 895 |
+
with each of ``prefix``, ``glyphs`` and ``suffix`` being
|
| 896 |
+
`glyph-containing objects`_ ."""
|
| 897 |
+
|
| 898 |
+
def __init__(self, chainContexts, location=None):
|
| 899 |
+
Statement.__init__(self, location)
|
| 900 |
+
self.chainContexts = chainContexts
|
| 901 |
+
|
| 902 |
+
def build(self, builder):
|
| 903 |
+
"""Calls the builder object's ``add_chain_context_subst`` callback on
|
| 904 |
+
each rule context."""
|
| 905 |
+
for prefix, glyphs, suffix in self.chainContexts:
|
| 906 |
+
prefix = [p.glyphSet() for p in prefix]
|
| 907 |
+
glyphs = [g.glyphSet() for g in glyphs]
|
| 908 |
+
suffix = [s.glyphSet() for s in suffix]
|
| 909 |
+
builder.add_chain_context_subst(self.location, prefix, glyphs, suffix, [])
|
| 910 |
+
|
| 911 |
+
def asFea(self, indent=""):
|
| 912 |
+
contexts = []
|
| 913 |
+
for prefix, glyphs, suffix in self.chainContexts:
|
| 914 |
+
res = ""
|
| 915 |
+
if len(prefix):
|
| 916 |
+
res += " ".join(map(asFea, prefix)) + " "
|
| 917 |
+
res += " ".join(g.asFea() + "'" for g in glyphs)
|
| 918 |
+
if len(suffix):
|
| 919 |
+
res += " " + " ".join(map(asFea, suffix))
|
| 920 |
+
contexts.append(res)
|
| 921 |
+
return "ignore sub " + ", ".join(contexts) + ";"
|
| 922 |
+
|
| 923 |
+
|
| 924 |
+
class IncludeStatement(Statement):
|
| 925 |
+
"""An ``include()`` statement."""
|
| 926 |
+
|
| 927 |
+
def __init__(self, filename, location=None):
|
| 928 |
+
super(IncludeStatement, self).__init__(location)
|
| 929 |
+
self.filename = filename #: String containing name of file to include
|
| 930 |
+
|
| 931 |
+
def build(self):
|
| 932 |
+
# TODO: consider lazy-loading the including parser/lexer?
|
| 933 |
+
raise FeatureLibError(
|
| 934 |
+
"Building an include statement is not implemented yet. "
|
| 935 |
+
"Instead, use Parser(..., followIncludes=True) for building.",
|
| 936 |
+
self.location,
|
| 937 |
+
)
|
| 938 |
+
|
| 939 |
+
def asFea(self, indent=""):
|
| 940 |
+
return indent + "include(%s);" % self.filename
|
| 941 |
+
|
| 942 |
+
|
| 943 |
+
class LanguageStatement(Statement):
|
| 944 |
+
"""A ``language`` statement within a feature."""
|
| 945 |
+
|
| 946 |
+
def __init__(self, language, include_default=True, required=False, location=None):
|
| 947 |
+
Statement.__init__(self, location)
|
| 948 |
+
assert len(language) == 4
|
| 949 |
+
self.language = language #: A four-character language tag
|
| 950 |
+
self.include_default = include_default #: If false, "exclude_dflt"
|
| 951 |
+
self.required = required
|
| 952 |
+
|
| 953 |
+
def build(self, builder):
|
| 954 |
+
"""Call the builder object's ``set_language`` callback."""
|
| 955 |
+
builder.set_language(
|
| 956 |
+
location=self.location,
|
| 957 |
+
language=self.language,
|
| 958 |
+
include_default=self.include_default,
|
| 959 |
+
required=self.required,
|
| 960 |
+
)
|
| 961 |
+
|
| 962 |
+
def asFea(self, indent=""):
|
| 963 |
+
res = "language {}".format(self.language.strip())
|
| 964 |
+
if not self.include_default:
|
| 965 |
+
res += " exclude_dflt"
|
| 966 |
+
if self.required:
|
| 967 |
+
res += " required"
|
| 968 |
+
res += ";"
|
| 969 |
+
return res
|
| 970 |
+
|
| 971 |
+
|
| 972 |
+
class LanguageSystemStatement(Statement):
|
| 973 |
+
"""A top-level ``languagesystem`` statement."""
|
| 974 |
+
|
| 975 |
+
def __init__(self, script, language, location=None):
|
| 976 |
+
Statement.__init__(self, location)
|
| 977 |
+
self.script, self.language = (script, language)
|
| 978 |
+
|
| 979 |
+
def build(self, builder):
|
| 980 |
+
"""Calls the builder object's ``add_language_system`` callback."""
|
| 981 |
+
builder.add_language_system(self.location, self.script, self.language)
|
| 982 |
+
|
| 983 |
+
def asFea(self, indent=""):
|
| 984 |
+
return "languagesystem {} {};".format(self.script, self.language.strip())
|
| 985 |
+
|
| 986 |
+
|
| 987 |
+
class FontRevisionStatement(Statement):
|
| 988 |
+
"""A ``head`` table ``FontRevision`` statement. ``revision`` should be a
|
| 989 |
+
number, and will be formatted to three significant decimal places."""
|
| 990 |
+
|
| 991 |
+
def __init__(self, revision, location=None):
|
| 992 |
+
Statement.__init__(self, location)
|
| 993 |
+
self.revision = revision
|
| 994 |
+
|
| 995 |
+
def build(self, builder):
|
| 996 |
+
builder.set_font_revision(self.location, self.revision)
|
| 997 |
+
|
| 998 |
+
def asFea(self, indent=""):
|
| 999 |
+
return "FontRevision {:.3f};".format(self.revision)
|
| 1000 |
+
|
| 1001 |
+
|
| 1002 |
+
class LigatureCaretByIndexStatement(Statement):
|
| 1003 |
+
"""A ``GDEF`` table ``LigatureCaretByIndex`` statement. ``glyphs`` should be
|
| 1004 |
+
a `glyph-containing object`_, and ``carets`` should be a list of integers."""
|
| 1005 |
+
|
| 1006 |
+
def __init__(self, glyphs, carets, location=None):
|
| 1007 |
+
Statement.__init__(self, location)
|
| 1008 |
+
self.glyphs, self.carets = (glyphs, carets)
|
| 1009 |
+
|
| 1010 |
+
def build(self, builder):
|
| 1011 |
+
"""Calls the builder object's ``add_ligatureCaretByIndex_`` callback."""
|
| 1012 |
+
glyphs = self.glyphs.glyphSet()
|
| 1013 |
+
builder.add_ligatureCaretByIndex_(self.location, glyphs, set(self.carets))
|
| 1014 |
+
|
| 1015 |
+
def asFea(self, indent=""):
|
| 1016 |
+
return "LigatureCaretByIndex {} {};".format(
|
| 1017 |
+
self.glyphs.asFea(), " ".join(str(x) for x in self.carets)
|
| 1018 |
+
)
|
| 1019 |
+
|
| 1020 |
+
|
| 1021 |
+
class LigatureCaretByPosStatement(Statement):
|
| 1022 |
+
"""A ``GDEF`` table ``LigatureCaretByPos`` statement. ``glyphs`` should be
|
| 1023 |
+
a `glyph-containing object`_, and ``carets`` should be a list of integers."""
|
| 1024 |
+
|
| 1025 |
+
def __init__(self, glyphs, carets, location=None):
|
| 1026 |
+
Statement.__init__(self, location)
|
| 1027 |
+
self.glyphs, self.carets = (glyphs, carets)
|
| 1028 |
+
|
| 1029 |
+
def build(self, builder):
|
| 1030 |
+
"""Calls the builder object's ``add_ligatureCaretByPos_`` callback."""
|
| 1031 |
+
glyphs = self.glyphs.glyphSet()
|
| 1032 |
+
builder.add_ligatureCaretByPos_(self.location, glyphs, set(self.carets))
|
| 1033 |
+
|
| 1034 |
+
def asFea(self, indent=""):
|
| 1035 |
+
return "LigatureCaretByPos {} {};".format(
|
| 1036 |
+
self.glyphs.asFea(), " ".join(str(x) for x in self.carets)
|
| 1037 |
+
)
|
| 1038 |
+
|
| 1039 |
+
|
| 1040 |
+
class LigatureSubstStatement(Statement):
|
| 1041 |
+
"""A chained contextual substitution statement.
|
| 1042 |
+
|
| 1043 |
+
``prefix``, ``glyphs``, and ``suffix`` should be lists of
|
| 1044 |
+
`glyph-containing objects`_; ``replacement`` should be a single
|
| 1045 |
+
`glyph-containing object`_.
|
| 1046 |
+
|
| 1047 |
+
If ``forceChain`` is True, this is expressed as a chaining rule
|
| 1048 |
+
(e.g. ``sub f' i' by f_i``) even when no context is given."""
|
| 1049 |
+
|
| 1050 |
+
def __init__(self, prefix, glyphs, suffix, replacement, forceChain, location=None):
|
| 1051 |
+
Statement.__init__(self, location)
|
| 1052 |
+
self.prefix, self.glyphs, self.suffix = (prefix, glyphs, suffix)
|
| 1053 |
+
self.replacement, self.forceChain = replacement, forceChain
|
| 1054 |
+
|
| 1055 |
+
def build(self, builder):
|
| 1056 |
+
prefix = [p.glyphSet() for p in self.prefix]
|
| 1057 |
+
glyphs = [g.glyphSet() for g in self.glyphs]
|
| 1058 |
+
suffix = [s.glyphSet() for s in self.suffix]
|
| 1059 |
+
builder.add_ligature_subst(
|
| 1060 |
+
self.location, prefix, glyphs, suffix, self.replacement, self.forceChain
|
| 1061 |
+
)
|
| 1062 |
+
|
| 1063 |
+
def asFea(self, indent=""):
|
| 1064 |
+
res = "sub "
|
| 1065 |
+
if len(self.prefix) or len(self.suffix) or self.forceChain:
|
| 1066 |
+
if len(self.prefix):
|
| 1067 |
+
res += " ".join(g.asFea() for g in self.prefix) + " "
|
| 1068 |
+
res += " ".join(g.asFea() + "'" for g in self.glyphs)
|
| 1069 |
+
if len(self.suffix):
|
| 1070 |
+
res += " " + " ".join(g.asFea() for g in self.suffix)
|
| 1071 |
+
else:
|
| 1072 |
+
res += " ".join(g.asFea() for g in self.glyphs)
|
| 1073 |
+
res += " by "
|
| 1074 |
+
res += asFea(self.replacement)
|
| 1075 |
+
res += ";"
|
| 1076 |
+
return res
|
| 1077 |
+
|
| 1078 |
+
|
| 1079 |
+
class LookupFlagStatement(Statement):
|
| 1080 |
+
"""A ``lookupflag`` statement. The ``value`` should be an integer value
|
| 1081 |
+
representing the flags in use, but not including the ``markAttachment``
|
| 1082 |
+
class and ``markFilteringSet`` values, which must be specified as
|
| 1083 |
+
glyph-containing objects."""
|
| 1084 |
+
|
| 1085 |
+
def __init__(
|
| 1086 |
+
self, value=0, markAttachment=None, markFilteringSet=None, location=None
|
| 1087 |
+
):
|
| 1088 |
+
Statement.__init__(self, location)
|
| 1089 |
+
self.value = value
|
| 1090 |
+
self.markAttachment = markAttachment
|
| 1091 |
+
self.markFilteringSet = markFilteringSet
|
| 1092 |
+
|
| 1093 |
+
def build(self, builder):
|
| 1094 |
+
"""Calls the builder object's ``set_lookup_flag`` callback."""
|
| 1095 |
+
markAttach = None
|
| 1096 |
+
if self.markAttachment is not None:
|
| 1097 |
+
markAttach = self.markAttachment.glyphSet()
|
| 1098 |
+
markFilter = None
|
| 1099 |
+
if self.markFilteringSet is not None:
|
| 1100 |
+
markFilter = self.markFilteringSet.glyphSet()
|
| 1101 |
+
builder.set_lookup_flag(self.location, self.value, markAttach, markFilter)
|
| 1102 |
+
|
| 1103 |
+
def asFea(self, indent=""):
|
| 1104 |
+
res = []
|
| 1105 |
+
flags = ["RightToLeft", "IgnoreBaseGlyphs", "IgnoreLigatures", "IgnoreMarks"]
|
| 1106 |
+
curr = 1
|
| 1107 |
+
for i in range(len(flags)):
|
| 1108 |
+
if self.value & curr != 0:
|
| 1109 |
+
res.append(flags[i])
|
| 1110 |
+
curr = curr << 1
|
| 1111 |
+
if self.markAttachment is not None:
|
| 1112 |
+
res.append("MarkAttachmentType {}".format(self.markAttachment.asFea()))
|
| 1113 |
+
if self.markFilteringSet is not None:
|
| 1114 |
+
res.append("UseMarkFilteringSet {}".format(self.markFilteringSet.asFea()))
|
| 1115 |
+
if not res:
|
| 1116 |
+
res = ["0"]
|
| 1117 |
+
return "lookupflag {};".format(" ".join(res))
|
| 1118 |
+
|
| 1119 |
+
|
| 1120 |
+
class LookupReferenceStatement(Statement):
|
| 1121 |
+
"""Represents a ``lookup ...;`` statement to include a lookup in a feature.
|
| 1122 |
+
|
| 1123 |
+
The ``lookup`` should be a :class:`LookupBlock` object."""
|
| 1124 |
+
|
| 1125 |
+
def __init__(self, lookup, location=None):
|
| 1126 |
+
Statement.__init__(self, location)
|
| 1127 |
+
self.location, self.lookup = (location, lookup)
|
| 1128 |
+
|
| 1129 |
+
def build(self, builder):
|
| 1130 |
+
"""Calls the builder object's ``add_lookup_call`` callback."""
|
| 1131 |
+
builder.add_lookup_call(self.lookup.name)
|
| 1132 |
+
|
| 1133 |
+
def asFea(self, indent=""):
|
| 1134 |
+
return "lookup {};".format(self.lookup.name)
|
| 1135 |
+
|
| 1136 |
+
|
| 1137 |
+
class MarkBasePosStatement(Statement):
|
| 1138 |
+
"""A mark-to-base positioning rule. The ``base`` should be a
|
| 1139 |
+
`glyph-containing object`_. The ``marks`` should be a list of
|
| 1140 |
+
(:class:`Anchor`, :class:`MarkClass`) tuples."""
|
| 1141 |
+
|
| 1142 |
+
def __init__(self, base, marks, location=None):
|
| 1143 |
+
Statement.__init__(self, location)
|
| 1144 |
+
self.base, self.marks = base, marks
|
| 1145 |
+
|
| 1146 |
+
def build(self, builder):
|
| 1147 |
+
"""Calls the builder object's ``add_mark_base_pos`` callback."""
|
| 1148 |
+
builder.add_mark_base_pos(self.location, self.base.glyphSet(), self.marks)
|
| 1149 |
+
|
| 1150 |
+
def asFea(self, indent=""):
|
| 1151 |
+
res = "pos base {}".format(self.base.asFea())
|
| 1152 |
+
for a, m in self.marks:
|
| 1153 |
+
res += "\n" + indent + SHIFT + "{} mark @{}".format(a.asFea(), m.name)
|
| 1154 |
+
res += ";"
|
| 1155 |
+
return res
|
| 1156 |
+
|
| 1157 |
+
|
| 1158 |
+
class MarkLigPosStatement(Statement):
|
| 1159 |
+
"""A mark-to-ligature positioning rule. The ``ligatures`` must be a
|
| 1160 |
+
`glyph-containing object`_. The ``marks`` should be a list of lists: each
|
| 1161 |
+
element in the top-level list represents a component glyph, and is made
|
| 1162 |
+
up of a list of (:class:`Anchor`, :class:`MarkClass`) tuples representing
|
| 1163 |
+
mark attachment points for that position.
|
| 1164 |
+
|
| 1165 |
+
Example::
|
| 1166 |
+
|
| 1167 |
+
m1 = MarkClass("TOP_MARKS")
|
| 1168 |
+
m2 = MarkClass("BOTTOM_MARKS")
|
| 1169 |
+
# ... add definitions to mark classes...
|
| 1170 |
+
|
| 1171 |
+
glyph = GlyphName("lam_meem_jeem")
|
| 1172 |
+
marks = [
|
| 1173 |
+
[ (Anchor(625,1800), m1) ], # Attachments on 1st component (lam)
|
| 1174 |
+
[ (Anchor(376,-378), m2) ], # Attachments on 2nd component (meem)
|
| 1175 |
+
[ ] # No attachments on the jeem
|
| 1176 |
+
]
|
| 1177 |
+
mlp = MarkLigPosStatement(glyph, marks)
|
| 1178 |
+
|
| 1179 |
+
mlp.asFea()
|
| 1180 |
+
# pos ligature lam_meem_jeem <anchor 625 1800> mark @TOP_MARKS
|
| 1181 |
+
# ligComponent <anchor 376 -378> mark @BOTTOM_MARKS;
|
| 1182 |
+
|
| 1183 |
+
"""
|
| 1184 |
+
|
| 1185 |
+
def __init__(self, ligatures, marks, location=None):
|
| 1186 |
+
Statement.__init__(self, location)
|
| 1187 |
+
self.ligatures, self.marks = ligatures, marks
|
| 1188 |
+
|
| 1189 |
+
def build(self, builder):
|
| 1190 |
+
"""Calls the builder object's ``add_mark_lig_pos`` callback."""
|
| 1191 |
+
builder.add_mark_lig_pos(self.location, self.ligatures.glyphSet(), self.marks)
|
| 1192 |
+
|
| 1193 |
+
def asFea(self, indent=""):
|
| 1194 |
+
res = "pos ligature {}".format(self.ligatures.asFea())
|
| 1195 |
+
ligs = []
|
| 1196 |
+
for l in self.marks:
|
| 1197 |
+
temp = ""
|
| 1198 |
+
if l is None or not len(l):
|
| 1199 |
+
temp = "\n" + indent + SHIFT * 2 + "<anchor NULL>"
|
| 1200 |
+
else:
|
| 1201 |
+
for a, m in l:
|
| 1202 |
+
temp += (
|
| 1203 |
+
"\n"
|
| 1204 |
+
+ indent
|
| 1205 |
+
+ SHIFT * 2
|
| 1206 |
+
+ "{} mark @{}".format(a.asFea(), m.name)
|
| 1207 |
+
)
|
| 1208 |
+
ligs.append(temp)
|
| 1209 |
+
res += ("\n" + indent + SHIFT + "ligComponent").join(ligs)
|
| 1210 |
+
res += ";"
|
| 1211 |
+
return res
|
| 1212 |
+
|
| 1213 |
+
|
| 1214 |
+
class MarkMarkPosStatement(Statement):
|
| 1215 |
+
"""A mark-to-mark positioning rule. The ``baseMarks`` must be a
|
| 1216 |
+
`glyph-containing object`_. The ``marks`` should be a list of
|
| 1217 |
+
(:class:`Anchor`, :class:`MarkClass`) tuples."""
|
| 1218 |
+
|
| 1219 |
+
def __init__(self, baseMarks, marks, location=None):
|
| 1220 |
+
Statement.__init__(self, location)
|
| 1221 |
+
self.baseMarks, self.marks = baseMarks, marks
|
| 1222 |
+
|
| 1223 |
+
def build(self, builder):
|
| 1224 |
+
"""Calls the builder object's ``add_mark_mark_pos`` callback."""
|
| 1225 |
+
builder.add_mark_mark_pos(self.location, self.baseMarks.glyphSet(), self.marks)
|
| 1226 |
+
|
| 1227 |
+
def asFea(self, indent=""):
|
| 1228 |
+
res = "pos mark {}".format(self.baseMarks.asFea())
|
| 1229 |
+
for a, m in self.marks:
|
| 1230 |
+
res += "\n" + indent + SHIFT + "{} mark @{}".format(a.asFea(), m.name)
|
| 1231 |
+
res += ";"
|
| 1232 |
+
return res
|
| 1233 |
+
|
| 1234 |
+
|
| 1235 |
+
class MultipleSubstStatement(Statement):
|
| 1236 |
+
"""A multiple substitution statement.
|
| 1237 |
+
|
| 1238 |
+
Args:
|
| 1239 |
+
prefix: a list of `glyph-containing objects`_.
|
| 1240 |
+
glyph: a single glyph-containing object.
|
| 1241 |
+
suffix: a list of glyph-containing objects.
|
| 1242 |
+
replacement: a list of glyph-containing objects.
|
| 1243 |
+
forceChain: If true, the statement is expressed as a chaining rule
|
| 1244 |
+
(e.g. ``sub f' i' by f_i``) even when no context is given.
|
| 1245 |
+
"""
|
| 1246 |
+
|
| 1247 |
+
def __init__(
|
| 1248 |
+
self, prefix, glyph, suffix, replacement, forceChain=False, location=None
|
| 1249 |
+
):
|
| 1250 |
+
Statement.__init__(self, location)
|
| 1251 |
+
self.prefix, self.glyph, self.suffix = prefix, glyph, suffix
|
| 1252 |
+
self.replacement = replacement
|
| 1253 |
+
self.forceChain = forceChain
|
| 1254 |
+
|
| 1255 |
+
def build(self, builder):
|
| 1256 |
+
"""Calls the builder object's ``add_multiple_subst`` callback."""
|
| 1257 |
+
prefix = [p.glyphSet() for p in self.prefix]
|
| 1258 |
+
suffix = [s.glyphSet() for s in self.suffix]
|
| 1259 |
+
if hasattr(self.glyph, "glyphSet"):
|
| 1260 |
+
originals = self.glyph.glyphSet()
|
| 1261 |
+
else:
|
| 1262 |
+
originals = [self.glyph]
|
| 1263 |
+
count = len(originals)
|
| 1264 |
+
replaces = []
|
| 1265 |
+
for r in self.replacement:
|
| 1266 |
+
if hasattr(r, "glyphSet"):
|
| 1267 |
+
replace = r.glyphSet()
|
| 1268 |
+
else:
|
| 1269 |
+
replace = [r]
|
| 1270 |
+
if len(replace) == 1 and len(replace) != count:
|
| 1271 |
+
replace = replace * count
|
| 1272 |
+
replaces.append(replace)
|
| 1273 |
+
replaces = list(zip(*replaces))
|
| 1274 |
+
|
| 1275 |
+
seen_originals = set()
|
| 1276 |
+
for i, original in enumerate(originals):
|
| 1277 |
+
if original not in seen_originals:
|
| 1278 |
+
seen_originals.add(original)
|
| 1279 |
+
builder.add_multiple_subst(
|
| 1280 |
+
self.location,
|
| 1281 |
+
prefix,
|
| 1282 |
+
original,
|
| 1283 |
+
suffix,
|
| 1284 |
+
replaces and replaces[i] or (),
|
| 1285 |
+
self.forceChain,
|
| 1286 |
+
)
|
| 1287 |
+
|
| 1288 |
+
def asFea(self, indent=""):
|
| 1289 |
+
res = "sub "
|
| 1290 |
+
if len(self.prefix) or len(self.suffix) or self.forceChain:
|
| 1291 |
+
if len(self.prefix):
|
| 1292 |
+
res += " ".join(map(asFea, self.prefix)) + " "
|
| 1293 |
+
res += asFea(self.glyph) + "'"
|
| 1294 |
+
if len(self.suffix):
|
| 1295 |
+
res += " " + " ".join(map(asFea, self.suffix))
|
| 1296 |
+
else:
|
| 1297 |
+
res += asFea(self.glyph)
|
| 1298 |
+
replacement = self.replacement or [NullGlyph()]
|
| 1299 |
+
res += " by "
|
| 1300 |
+
res += " ".join(map(asFea, replacement))
|
| 1301 |
+
res += ";"
|
| 1302 |
+
return res
|
| 1303 |
+
|
| 1304 |
+
|
| 1305 |
+
class PairPosStatement(Statement):
|
| 1306 |
+
"""A pair positioning statement.
|
| 1307 |
+
|
| 1308 |
+
``glyphs1`` and ``glyphs2`` should be `glyph-containing objects`_.
|
| 1309 |
+
``valuerecord1`` should be a :class:`ValueRecord` object;
|
| 1310 |
+
``valuerecord2`` should be either a :class:`ValueRecord` object or ``None``.
|
| 1311 |
+
If ``enumerated`` is true, then this is expressed as an
|
| 1312 |
+
`enumerated pair <https://adobe-type-tools.github.io/afdko/OpenTypeFeatureFileSpecification.html#6.b.ii>`_.
|
| 1313 |
+
"""
|
| 1314 |
+
|
| 1315 |
+
def __init__(
|
| 1316 |
+
self,
|
| 1317 |
+
glyphs1,
|
| 1318 |
+
valuerecord1,
|
| 1319 |
+
glyphs2,
|
| 1320 |
+
valuerecord2,
|
| 1321 |
+
enumerated=False,
|
| 1322 |
+
location=None,
|
| 1323 |
+
):
|
| 1324 |
+
Statement.__init__(self, location)
|
| 1325 |
+
self.enumerated = enumerated
|
| 1326 |
+
self.glyphs1, self.valuerecord1 = glyphs1, valuerecord1
|
| 1327 |
+
self.glyphs2, self.valuerecord2 = glyphs2, valuerecord2
|
| 1328 |
+
|
| 1329 |
+
def build(self, builder):
|
| 1330 |
+
"""Calls a callback on the builder object:
|
| 1331 |
+
|
| 1332 |
+
* If the rule is enumerated, calls ``add_specific_pair_pos`` on each
|
| 1333 |
+
combination of first and second glyphs.
|
| 1334 |
+
* If the glyphs are both single :class:`GlyphName` objects, calls
|
| 1335 |
+
``add_specific_pair_pos``.
|
| 1336 |
+
* Else, calls ``add_class_pair_pos``.
|
| 1337 |
+
"""
|
| 1338 |
+
if self.enumerated:
|
| 1339 |
+
g = [self.glyphs1.glyphSet(), self.glyphs2.glyphSet()]
|
| 1340 |
+
seen_pair = False
|
| 1341 |
+
for glyph1, glyph2 in itertools.product(*g):
|
| 1342 |
+
seen_pair = True
|
| 1343 |
+
builder.add_specific_pair_pos(
|
| 1344 |
+
self.location, glyph1, self.valuerecord1, glyph2, self.valuerecord2
|
| 1345 |
+
)
|
| 1346 |
+
if not seen_pair:
|
| 1347 |
+
raise FeatureLibError(
|
| 1348 |
+
"Empty glyph class in positioning rule", self.location
|
| 1349 |
+
)
|
| 1350 |
+
return
|
| 1351 |
+
|
| 1352 |
+
is_specific = isinstance(self.glyphs1, GlyphName) and isinstance(
|
| 1353 |
+
self.glyphs2, GlyphName
|
| 1354 |
+
)
|
| 1355 |
+
if is_specific:
|
| 1356 |
+
builder.add_specific_pair_pos(
|
| 1357 |
+
self.location,
|
| 1358 |
+
self.glyphs1.glyph,
|
| 1359 |
+
self.valuerecord1,
|
| 1360 |
+
self.glyphs2.glyph,
|
| 1361 |
+
self.valuerecord2,
|
| 1362 |
+
)
|
| 1363 |
+
else:
|
| 1364 |
+
builder.add_class_pair_pos(
|
| 1365 |
+
self.location,
|
| 1366 |
+
self.glyphs1.glyphSet(),
|
| 1367 |
+
self.valuerecord1,
|
| 1368 |
+
self.glyphs2.glyphSet(),
|
| 1369 |
+
self.valuerecord2,
|
| 1370 |
+
)
|
| 1371 |
+
|
| 1372 |
+
def asFea(self, indent=""):
|
| 1373 |
+
res = "enum " if self.enumerated else ""
|
| 1374 |
+
if self.valuerecord2:
|
| 1375 |
+
res += "pos {} {} {} {};".format(
|
| 1376 |
+
self.glyphs1.asFea(),
|
| 1377 |
+
self.valuerecord1.asFea(),
|
| 1378 |
+
self.glyphs2.asFea(),
|
| 1379 |
+
self.valuerecord2.asFea(),
|
| 1380 |
+
)
|
| 1381 |
+
else:
|
| 1382 |
+
res += "pos {} {} {};".format(
|
| 1383 |
+
self.glyphs1.asFea(), self.glyphs2.asFea(), self.valuerecord1.asFea()
|
| 1384 |
+
)
|
| 1385 |
+
return res
|
| 1386 |
+
|
| 1387 |
+
|
| 1388 |
+
class ReverseChainSingleSubstStatement(Statement):
|
| 1389 |
+
"""A reverse chaining substitution statement. You don't see those every day.
|
| 1390 |
+
|
| 1391 |
+
Note the unusual argument order: ``suffix`` comes `before` ``glyphs``.
|
| 1392 |
+
``old_prefix``, ``old_suffix``, ``glyphs`` and ``replacements`` should be
|
| 1393 |
+
lists of `glyph-containing objects`_. ``glyphs`` and ``replacements`` should
|
| 1394 |
+
be one-item lists.
|
| 1395 |
+
"""
|
| 1396 |
+
|
| 1397 |
+
def __init__(self, old_prefix, old_suffix, glyphs, replacements, location=None):
|
| 1398 |
+
Statement.__init__(self, location)
|
| 1399 |
+
self.old_prefix, self.old_suffix = old_prefix, old_suffix
|
| 1400 |
+
self.glyphs = glyphs
|
| 1401 |
+
self.replacements = replacements
|
| 1402 |
+
|
| 1403 |
+
def build(self, builder):
|
| 1404 |
+
prefix = [p.glyphSet() for p in self.old_prefix]
|
| 1405 |
+
suffix = [s.glyphSet() for s in self.old_suffix]
|
| 1406 |
+
originals = self.glyphs[0].glyphSet()
|
| 1407 |
+
replaces = self.replacements[0].glyphSet()
|
| 1408 |
+
if len(replaces) == 1:
|
| 1409 |
+
replaces = replaces * len(originals)
|
| 1410 |
+
builder.add_reverse_chain_single_subst(
|
| 1411 |
+
self.location, prefix, suffix, dict(zip(originals, replaces))
|
| 1412 |
+
)
|
| 1413 |
+
|
| 1414 |
+
def asFea(self, indent=""):
|
| 1415 |
+
res = "rsub "
|
| 1416 |
+
if len(self.old_prefix) or len(self.old_suffix):
|
| 1417 |
+
if len(self.old_prefix):
|
| 1418 |
+
res += " ".join(asFea(g) for g in self.old_prefix) + " "
|
| 1419 |
+
res += " ".join(asFea(g) + "'" for g in self.glyphs)
|
| 1420 |
+
if len(self.old_suffix):
|
| 1421 |
+
res += " " + " ".join(asFea(g) for g in self.old_suffix)
|
| 1422 |
+
else:
|
| 1423 |
+
res += " ".join(map(asFea, self.glyphs))
|
| 1424 |
+
res += " by {};".format(" ".join(asFea(g) for g in self.replacements))
|
| 1425 |
+
return res
|
| 1426 |
+
|
| 1427 |
+
|
| 1428 |
+
class SingleSubstStatement(Statement):
|
| 1429 |
+
"""A single substitution statement.
|
| 1430 |
+
|
| 1431 |
+
Note the unusual argument order: ``prefix`` and suffix come `after`
|
| 1432 |
+
the replacement ``glyphs``. ``prefix``, ``suffix``, ``glyphs`` and
|
| 1433 |
+
``replace`` should be lists of `glyph-containing objects`_. ``glyphs`` and
|
| 1434 |
+
``replace`` should be one-item lists.
|
| 1435 |
+
"""
|
| 1436 |
+
|
| 1437 |
+
def __init__(self, glyphs, replace, prefix, suffix, forceChain, location=None):
|
| 1438 |
+
Statement.__init__(self, location)
|
| 1439 |
+
self.prefix, self.suffix = prefix, suffix
|
| 1440 |
+
self.forceChain = forceChain
|
| 1441 |
+
self.glyphs = glyphs
|
| 1442 |
+
self.replacements = replace
|
| 1443 |
+
|
| 1444 |
+
def build(self, builder):
|
| 1445 |
+
"""Calls the builder object's ``add_single_subst`` callback."""
|
| 1446 |
+
prefix = [p.glyphSet() for p in self.prefix]
|
| 1447 |
+
suffix = [s.glyphSet() for s in self.suffix]
|
| 1448 |
+
originals = self.glyphs[0].glyphSet()
|
| 1449 |
+
replaces = self.replacements[0].glyphSet()
|
| 1450 |
+
if len(replaces) == 1:
|
| 1451 |
+
replaces = replaces * len(originals)
|
| 1452 |
+
builder.add_single_subst(
|
| 1453 |
+
self.location,
|
| 1454 |
+
prefix,
|
| 1455 |
+
suffix,
|
| 1456 |
+
OrderedDict(zip(originals, replaces)),
|
| 1457 |
+
self.forceChain,
|
| 1458 |
+
)
|
| 1459 |
+
|
| 1460 |
+
def asFea(self, indent=""):
|
| 1461 |
+
res = "sub "
|
| 1462 |
+
if len(self.prefix) or len(self.suffix) or self.forceChain:
|
| 1463 |
+
if len(self.prefix):
|
| 1464 |
+
res += " ".join(asFea(g) for g in self.prefix) + " "
|
| 1465 |
+
res += " ".join(asFea(g) + "'" for g in self.glyphs)
|
| 1466 |
+
if len(self.suffix):
|
| 1467 |
+
res += " " + " ".join(asFea(g) for g in self.suffix)
|
| 1468 |
+
else:
|
| 1469 |
+
res += " ".join(asFea(g) for g in self.glyphs)
|
| 1470 |
+
res += " by {};".format(" ".join(asFea(g) for g in self.replacements))
|
| 1471 |
+
return res
|
| 1472 |
+
|
| 1473 |
+
|
| 1474 |
+
class ScriptStatement(Statement):
|
| 1475 |
+
"""A ``script`` statement."""
|
| 1476 |
+
|
| 1477 |
+
def __init__(self, script, location=None):
|
| 1478 |
+
Statement.__init__(self, location)
|
| 1479 |
+
self.script = script #: the script code
|
| 1480 |
+
|
| 1481 |
+
def build(self, builder):
|
| 1482 |
+
"""Calls the builder's ``set_script`` callback."""
|
| 1483 |
+
builder.set_script(self.location, self.script)
|
| 1484 |
+
|
| 1485 |
+
def asFea(self, indent=""):
|
| 1486 |
+
return "script {};".format(self.script.strip())
|
| 1487 |
+
|
| 1488 |
+
|
| 1489 |
+
class SinglePosStatement(Statement):
|
| 1490 |
+
"""A single position statement. ``prefix`` and ``suffix`` should be
|
| 1491 |
+
lists of `glyph-containing objects`_.
|
| 1492 |
+
|
| 1493 |
+
``pos`` should be a one-element list containing a (`glyph-containing object`_,
|
| 1494 |
+
:class:`ValueRecord`) tuple."""
|
| 1495 |
+
|
| 1496 |
+
def __init__(self, pos, prefix, suffix, forceChain, location=None):
|
| 1497 |
+
Statement.__init__(self, location)
|
| 1498 |
+
self.pos, self.prefix, self.suffix = pos, prefix, suffix
|
| 1499 |
+
self.forceChain = forceChain
|
| 1500 |
+
|
| 1501 |
+
def build(self, builder):
|
| 1502 |
+
"""Calls the builder object's ``add_single_pos`` callback."""
|
| 1503 |
+
prefix = [p.glyphSet() for p in self.prefix]
|
| 1504 |
+
suffix = [s.glyphSet() for s in self.suffix]
|
| 1505 |
+
pos = [(g.glyphSet(), value) for g, value in self.pos]
|
| 1506 |
+
builder.add_single_pos(self.location, prefix, suffix, pos, self.forceChain)
|
| 1507 |
+
|
| 1508 |
+
def asFea(self, indent=""):
|
| 1509 |
+
res = "pos "
|
| 1510 |
+
if len(self.prefix) or len(self.suffix) or self.forceChain:
|
| 1511 |
+
if len(self.prefix):
|
| 1512 |
+
res += " ".join(map(asFea, self.prefix)) + " "
|
| 1513 |
+
res += " ".join(
|
| 1514 |
+
[
|
| 1515 |
+
asFea(x[0]) + "'" + ((" " + x[1].asFea()) if x[1] else "")
|
| 1516 |
+
for x in self.pos
|
| 1517 |
+
]
|
| 1518 |
+
)
|
| 1519 |
+
if len(self.suffix):
|
| 1520 |
+
res += " " + " ".join(map(asFea, self.suffix))
|
| 1521 |
+
else:
|
| 1522 |
+
res += " ".join(
|
| 1523 |
+
[asFea(x[0]) + " " + (x[1].asFea() if x[1] else "") for x in self.pos]
|
| 1524 |
+
)
|
| 1525 |
+
res += ";"
|
| 1526 |
+
return res
|
| 1527 |
+
|
| 1528 |
+
|
| 1529 |
+
class SubtableStatement(Statement):
|
| 1530 |
+
"""Represents a subtable break."""
|
| 1531 |
+
|
| 1532 |
+
def __init__(self, location=None):
|
| 1533 |
+
Statement.__init__(self, location)
|
| 1534 |
+
|
| 1535 |
+
def build(self, builder):
|
| 1536 |
+
"""Calls the builder objects's ``add_subtable_break`` callback."""
|
| 1537 |
+
builder.add_subtable_break(self.location)
|
| 1538 |
+
|
| 1539 |
+
def asFea(self, indent=""):
|
| 1540 |
+
return "subtable;"
|
| 1541 |
+
|
| 1542 |
+
|
| 1543 |
+
class ValueRecord(Expression):
|
| 1544 |
+
"""Represents a value record."""
|
| 1545 |
+
|
| 1546 |
+
def __init__(
|
| 1547 |
+
self,
|
| 1548 |
+
xPlacement=None,
|
| 1549 |
+
yPlacement=None,
|
| 1550 |
+
xAdvance=None,
|
| 1551 |
+
yAdvance=None,
|
| 1552 |
+
xPlaDevice=None,
|
| 1553 |
+
yPlaDevice=None,
|
| 1554 |
+
xAdvDevice=None,
|
| 1555 |
+
yAdvDevice=None,
|
| 1556 |
+
vertical=False,
|
| 1557 |
+
location=None,
|
| 1558 |
+
):
|
| 1559 |
+
Expression.__init__(self, location)
|
| 1560 |
+
self.xPlacement, self.yPlacement = (xPlacement, yPlacement)
|
| 1561 |
+
self.xAdvance, self.yAdvance = (xAdvance, yAdvance)
|
| 1562 |
+
self.xPlaDevice, self.yPlaDevice = (xPlaDevice, yPlaDevice)
|
| 1563 |
+
self.xAdvDevice, self.yAdvDevice = (xAdvDevice, yAdvDevice)
|
| 1564 |
+
self.vertical = vertical
|
| 1565 |
+
|
| 1566 |
+
def __eq__(self, other):
|
| 1567 |
+
return (
|
| 1568 |
+
self.xPlacement == other.xPlacement
|
| 1569 |
+
and self.yPlacement == other.yPlacement
|
| 1570 |
+
and self.xAdvance == other.xAdvance
|
| 1571 |
+
and self.yAdvance == other.yAdvance
|
| 1572 |
+
and self.xPlaDevice == other.xPlaDevice
|
| 1573 |
+
and self.xAdvDevice == other.xAdvDevice
|
| 1574 |
+
)
|
| 1575 |
+
|
| 1576 |
+
def __ne__(self, other):
|
| 1577 |
+
return not self.__eq__(other)
|
| 1578 |
+
|
| 1579 |
+
def __hash__(self):
|
| 1580 |
+
return (
|
| 1581 |
+
hash(self.xPlacement)
|
| 1582 |
+
^ hash(self.yPlacement)
|
| 1583 |
+
^ hash(self.xAdvance)
|
| 1584 |
+
^ hash(self.yAdvance)
|
| 1585 |
+
^ hash(self.xPlaDevice)
|
| 1586 |
+
^ hash(self.yPlaDevice)
|
| 1587 |
+
^ hash(self.xAdvDevice)
|
| 1588 |
+
^ hash(self.yAdvDevice)
|
| 1589 |
+
)
|
| 1590 |
+
|
| 1591 |
+
def asFea(self, indent=""):
|
| 1592 |
+
if not self:
|
| 1593 |
+
return "<NULL>"
|
| 1594 |
+
|
| 1595 |
+
x, y = self.xPlacement, self.yPlacement
|
| 1596 |
+
xAdvance, yAdvance = self.xAdvance, self.yAdvance
|
| 1597 |
+
xPlaDevice, yPlaDevice = self.xPlaDevice, self.yPlaDevice
|
| 1598 |
+
xAdvDevice, yAdvDevice = self.xAdvDevice, self.yAdvDevice
|
| 1599 |
+
vertical = self.vertical
|
| 1600 |
+
|
| 1601 |
+
# Try format A, if possible.
|
| 1602 |
+
if x is None and y is None:
|
| 1603 |
+
if xAdvance is None and vertical:
|
| 1604 |
+
return str(yAdvance)
|
| 1605 |
+
elif yAdvance is None and not vertical:
|
| 1606 |
+
return str(xAdvance)
|
| 1607 |
+
|
| 1608 |
+
# Make any remaining None value 0 to avoid generating invalid records.
|
| 1609 |
+
x = x or 0
|
| 1610 |
+
y = y or 0
|
| 1611 |
+
xAdvance = xAdvance or 0
|
| 1612 |
+
yAdvance = yAdvance or 0
|
| 1613 |
+
|
| 1614 |
+
# Try format B, if possible.
|
| 1615 |
+
if (
|
| 1616 |
+
xPlaDevice is None
|
| 1617 |
+
and yPlaDevice is None
|
| 1618 |
+
and xAdvDevice is None
|
| 1619 |
+
and yAdvDevice is None
|
| 1620 |
+
):
|
| 1621 |
+
return "<%s %s %s %s>" % (x, y, xAdvance, yAdvance)
|
| 1622 |
+
|
| 1623 |
+
# Last resort is format C.
|
| 1624 |
+
return "<%s %s %s %s %s %s %s %s>" % (
|
| 1625 |
+
x,
|
| 1626 |
+
y,
|
| 1627 |
+
xAdvance,
|
| 1628 |
+
yAdvance,
|
| 1629 |
+
deviceToString(xPlaDevice),
|
| 1630 |
+
deviceToString(yPlaDevice),
|
| 1631 |
+
deviceToString(xAdvDevice),
|
| 1632 |
+
deviceToString(yAdvDevice),
|
| 1633 |
+
)
|
| 1634 |
+
|
| 1635 |
+
def __bool__(self):
|
| 1636 |
+
return any(
|
| 1637 |
+
getattr(self, v) is not None
|
| 1638 |
+
for v in [
|
| 1639 |
+
"xPlacement",
|
| 1640 |
+
"yPlacement",
|
| 1641 |
+
"xAdvance",
|
| 1642 |
+
"yAdvance",
|
| 1643 |
+
"xPlaDevice",
|
| 1644 |
+
"yPlaDevice",
|
| 1645 |
+
"xAdvDevice",
|
| 1646 |
+
"yAdvDevice",
|
| 1647 |
+
]
|
| 1648 |
+
)
|
| 1649 |
+
|
| 1650 |
+
__nonzero__ = __bool__
|
| 1651 |
+
|
| 1652 |
+
|
| 1653 |
+
class ValueRecordDefinition(Statement):
|
| 1654 |
+
"""Represents a named value record definition."""
|
| 1655 |
+
|
| 1656 |
+
def __init__(self, name, value, location=None):
|
| 1657 |
+
Statement.__init__(self, location)
|
| 1658 |
+
self.name = name #: Value record name as string
|
| 1659 |
+
self.value = value #: :class:`ValueRecord` object
|
| 1660 |
+
|
| 1661 |
+
def asFea(self, indent=""):
|
| 1662 |
+
return "valueRecordDef {} {};".format(self.value.asFea(), self.name)
|
| 1663 |
+
|
| 1664 |
+
|
| 1665 |
+
def simplify_name_attributes(pid, eid, lid):
|
| 1666 |
+
if pid == 3 and eid == 1 and lid == 1033:
|
| 1667 |
+
return ""
|
| 1668 |
+
elif pid == 1 and eid == 0 and lid == 0:
|
| 1669 |
+
return "1"
|
| 1670 |
+
else:
|
| 1671 |
+
return "{} {} {}".format(pid, eid, lid)
|
| 1672 |
+
|
| 1673 |
+
|
| 1674 |
+
class NameRecord(Statement):
|
| 1675 |
+
"""Represents a name record. (`Section 9.e. <https://adobe-type-tools.github.io/afdko/OpenTypeFeatureFileSpecification.html#9.e>`_)"""
|
| 1676 |
+
|
| 1677 |
+
def __init__(self, nameID, platformID, platEncID, langID, string, location=None):
|
| 1678 |
+
Statement.__init__(self, location)
|
| 1679 |
+
self.nameID = nameID #: Name ID as integer (e.g. 9 for designer's name)
|
| 1680 |
+
self.platformID = platformID #: Platform ID as integer
|
| 1681 |
+
self.platEncID = platEncID #: Platform encoding ID as integer
|
| 1682 |
+
self.langID = langID #: Language ID as integer
|
| 1683 |
+
self.string = string #: Name record value
|
| 1684 |
+
|
| 1685 |
+
def build(self, builder):
|
| 1686 |
+
"""Calls the builder object's ``add_name_record`` callback."""
|
| 1687 |
+
builder.add_name_record(
|
| 1688 |
+
self.location,
|
| 1689 |
+
self.nameID,
|
| 1690 |
+
self.platformID,
|
| 1691 |
+
self.platEncID,
|
| 1692 |
+
self.langID,
|
| 1693 |
+
self.string,
|
| 1694 |
+
)
|
| 1695 |
+
|
| 1696 |
+
def asFea(self, indent=""):
|
| 1697 |
+
def escape(c, escape_pattern):
|
| 1698 |
+
# Also escape U+0022 QUOTATION MARK and U+005C REVERSE SOLIDUS
|
| 1699 |
+
if c >= 0x20 and c <= 0x7E and c not in (0x22, 0x5C):
|
| 1700 |
+
return chr(c)
|
| 1701 |
+
else:
|
| 1702 |
+
return escape_pattern % c
|
| 1703 |
+
|
| 1704 |
+
encoding = getEncoding(self.platformID, self.platEncID, self.langID)
|
| 1705 |
+
if encoding is None:
|
| 1706 |
+
raise FeatureLibError("Unsupported encoding", self.location)
|
| 1707 |
+
s = tobytes(self.string, encoding=encoding)
|
| 1708 |
+
if encoding == "utf_16_be":
|
| 1709 |
+
escaped_string = "".join(
|
| 1710 |
+
[
|
| 1711 |
+
escape(byteord(s[i]) * 256 + byteord(s[i + 1]), r"\%04x")
|
| 1712 |
+
for i in range(0, len(s), 2)
|
| 1713 |
+
]
|
| 1714 |
+
)
|
| 1715 |
+
else:
|
| 1716 |
+
escaped_string = "".join([escape(byteord(b), r"\%02x") for b in s])
|
| 1717 |
+
plat = simplify_name_attributes(self.platformID, self.platEncID, self.langID)
|
| 1718 |
+
if plat != "":
|
| 1719 |
+
plat += " "
|
| 1720 |
+
return 'nameid {} {}"{}";'.format(self.nameID, plat, escaped_string)
|
| 1721 |
+
|
| 1722 |
+
|
| 1723 |
+
class FeatureNameStatement(NameRecord):
|
| 1724 |
+
"""Represents a ``sizemenuname`` or ``name`` statement."""
|
| 1725 |
+
|
| 1726 |
+
def build(self, builder):
|
| 1727 |
+
"""Calls the builder object's ``add_featureName`` callback."""
|
| 1728 |
+
NameRecord.build(self, builder)
|
| 1729 |
+
builder.add_featureName(self.nameID)
|
| 1730 |
+
|
| 1731 |
+
def asFea(self, indent=""):
|
| 1732 |
+
if self.nameID == "size":
|
| 1733 |
+
tag = "sizemenuname"
|
| 1734 |
+
else:
|
| 1735 |
+
tag = "name"
|
| 1736 |
+
plat = simplify_name_attributes(self.platformID, self.platEncID, self.langID)
|
| 1737 |
+
if plat != "":
|
| 1738 |
+
plat += " "
|
| 1739 |
+
return '{} {}"{}";'.format(tag, plat, self.string)
|
| 1740 |
+
|
| 1741 |
+
|
| 1742 |
+
class STATNameStatement(NameRecord):
|
| 1743 |
+
"""Represents a STAT table ``name`` statement."""
|
| 1744 |
+
|
| 1745 |
+
def asFea(self, indent=""):
|
| 1746 |
+
plat = simplify_name_attributes(self.platformID, self.platEncID, self.langID)
|
| 1747 |
+
if plat != "":
|
| 1748 |
+
plat += " "
|
| 1749 |
+
return 'name {}"{}";'.format(plat, self.string)
|
| 1750 |
+
|
| 1751 |
+
|
| 1752 |
+
class SizeParameters(Statement):
|
| 1753 |
+
"""A ``parameters`` statement."""
|
| 1754 |
+
|
| 1755 |
+
def __init__(self, DesignSize, SubfamilyID, RangeStart, RangeEnd, location=None):
|
| 1756 |
+
Statement.__init__(self, location)
|
| 1757 |
+
self.DesignSize = DesignSize
|
| 1758 |
+
self.SubfamilyID = SubfamilyID
|
| 1759 |
+
self.RangeStart = RangeStart
|
| 1760 |
+
self.RangeEnd = RangeEnd
|
| 1761 |
+
|
| 1762 |
+
def build(self, builder):
|
| 1763 |
+
"""Calls the builder object's ``set_size_parameters`` callback."""
|
| 1764 |
+
builder.set_size_parameters(
|
| 1765 |
+
self.location,
|
| 1766 |
+
self.DesignSize,
|
| 1767 |
+
self.SubfamilyID,
|
| 1768 |
+
self.RangeStart,
|
| 1769 |
+
self.RangeEnd,
|
| 1770 |
+
)
|
| 1771 |
+
|
| 1772 |
+
def asFea(self, indent=""):
|
| 1773 |
+
res = "parameters {:.1f} {}".format(self.DesignSize, self.SubfamilyID)
|
| 1774 |
+
if self.RangeStart != 0 or self.RangeEnd != 0:
|
| 1775 |
+
res += " {} {}".format(int(self.RangeStart * 10), int(self.RangeEnd * 10))
|
| 1776 |
+
return res + ";"
|
| 1777 |
+
|
| 1778 |
+
|
| 1779 |
+
class CVParametersNameStatement(NameRecord):
|
| 1780 |
+
"""Represent a name statement inside a ``cvParameters`` block."""
|
| 1781 |
+
|
| 1782 |
+
def __init__(
|
| 1783 |
+
self, nameID, platformID, platEncID, langID, string, block_name, location=None
|
| 1784 |
+
):
|
| 1785 |
+
NameRecord.__init__(
|
| 1786 |
+
self, nameID, platformID, platEncID, langID, string, location=location
|
| 1787 |
+
)
|
| 1788 |
+
self.block_name = block_name
|
| 1789 |
+
|
| 1790 |
+
def build(self, builder):
|
| 1791 |
+
"""Calls the builder object's ``add_cv_parameter`` callback."""
|
| 1792 |
+
item = ""
|
| 1793 |
+
if self.block_name == "ParamUILabelNameID":
|
| 1794 |
+
item = "_{}".format(builder.cv_num_named_params_.get(self.nameID, 0))
|
| 1795 |
+
builder.add_cv_parameter(self.nameID)
|
| 1796 |
+
self.nameID = (self.nameID, self.block_name + item)
|
| 1797 |
+
NameRecord.build(self, builder)
|
| 1798 |
+
|
| 1799 |
+
def asFea(self, indent=""):
|
| 1800 |
+
plat = simplify_name_attributes(self.platformID, self.platEncID, self.langID)
|
| 1801 |
+
if plat != "":
|
| 1802 |
+
plat += " "
|
| 1803 |
+
return 'name {}"{}";'.format(plat, self.string)
|
| 1804 |
+
|
| 1805 |
+
|
| 1806 |
+
class CharacterStatement(Statement):
|
| 1807 |
+
"""
|
| 1808 |
+
Statement used in cvParameters blocks of Character Variant features (cvXX).
|
| 1809 |
+
The Unicode value may be written with either decimal or hexadecimal
|
| 1810 |
+
notation. The value must be preceded by '0x' if it is a hexadecimal value.
|
| 1811 |
+
The largest Unicode value allowed is 0xFFFFFF.
|
| 1812 |
+
"""
|
| 1813 |
+
|
| 1814 |
+
def __init__(self, character, tag, location=None):
|
| 1815 |
+
Statement.__init__(self, location)
|
| 1816 |
+
self.character = character
|
| 1817 |
+
self.tag = tag
|
| 1818 |
+
|
| 1819 |
+
def build(self, builder):
|
| 1820 |
+
"""Calls the builder object's ``add_cv_character`` callback."""
|
| 1821 |
+
builder.add_cv_character(self.character, self.tag)
|
| 1822 |
+
|
| 1823 |
+
def asFea(self, indent=""):
|
| 1824 |
+
return "Character {:#x};".format(self.character)
|
| 1825 |
+
|
| 1826 |
+
|
| 1827 |
+
class BaseAxis(Statement):
|
| 1828 |
+
"""An axis definition, being either a ``VertAxis.BaseTagList/BaseScriptList``
|
| 1829 |
+
pair or a ``HorizAxis.BaseTagList/BaseScriptList`` pair."""
|
| 1830 |
+
|
| 1831 |
+
def __init__(self, bases, scripts, vertical, location=None):
|
| 1832 |
+
Statement.__init__(self, location)
|
| 1833 |
+
self.bases = bases #: A list of baseline tag names as strings
|
| 1834 |
+
self.scripts = scripts #: A list of script record tuplets (script tag, default baseline tag, base coordinate)
|
| 1835 |
+
self.vertical = vertical #: Boolean; VertAxis if True, HorizAxis if False
|
| 1836 |
+
|
| 1837 |
+
def build(self, builder):
|
| 1838 |
+
"""Calls the builder object's ``set_base_axis`` callback."""
|
| 1839 |
+
builder.set_base_axis(self.bases, self.scripts, self.vertical)
|
| 1840 |
+
|
| 1841 |
+
def asFea(self, indent=""):
|
| 1842 |
+
direction = "Vert" if self.vertical else "Horiz"
|
| 1843 |
+
scripts = [
|
| 1844 |
+
"{} {} {}".format(a[0], a[1], " ".join(map(str, a[2])))
|
| 1845 |
+
for a in self.scripts
|
| 1846 |
+
]
|
| 1847 |
+
return "{}Axis.BaseTagList {};\n{}{}Axis.BaseScriptList {};".format(
|
| 1848 |
+
direction, " ".join(self.bases), indent, direction, ", ".join(scripts)
|
| 1849 |
+
)
|
| 1850 |
+
|
| 1851 |
+
|
| 1852 |
+
class OS2Field(Statement):
|
| 1853 |
+
"""An entry in the ``OS/2`` table. Most ``values`` should be numbers or
|
| 1854 |
+
strings, apart from when the key is ``UnicodeRange``, ``CodePageRange``
|
| 1855 |
+
or ``Panose``, in which case it should be an array of integers."""
|
| 1856 |
+
|
| 1857 |
+
def __init__(self, key, value, location=None):
|
| 1858 |
+
Statement.__init__(self, location)
|
| 1859 |
+
self.key = key
|
| 1860 |
+
self.value = value
|
| 1861 |
+
|
| 1862 |
+
def build(self, builder):
|
| 1863 |
+
"""Calls the builder object's ``add_os2_field`` callback."""
|
| 1864 |
+
builder.add_os2_field(self.key, self.value)
|
| 1865 |
+
|
| 1866 |
+
def asFea(self, indent=""):
|
| 1867 |
+
def intarr2str(x):
|
| 1868 |
+
return " ".join(map(str, x))
|
| 1869 |
+
|
| 1870 |
+
numbers = (
|
| 1871 |
+
"FSType",
|
| 1872 |
+
"TypoAscender",
|
| 1873 |
+
"TypoDescender",
|
| 1874 |
+
"TypoLineGap",
|
| 1875 |
+
"winAscent",
|
| 1876 |
+
"winDescent",
|
| 1877 |
+
"XHeight",
|
| 1878 |
+
"CapHeight",
|
| 1879 |
+
"WeightClass",
|
| 1880 |
+
"WidthClass",
|
| 1881 |
+
"LowerOpSize",
|
| 1882 |
+
"UpperOpSize",
|
| 1883 |
+
)
|
| 1884 |
+
ranges = ("UnicodeRange", "CodePageRange")
|
| 1885 |
+
keywords = dict([(x.lower(), [x, str]) for x in numbers])
|
| 1886 |
+
keywords.update([(x.lower(), [x, intarr2str]) for x in ranges])
|
| 1887 |
+
keywords["panose"] = ["Panose", intarr2str]
|
| 1888 |
+
keywords["vendor"] = ["Vendor", lambda y: '"{}"'.format(y)]
|
| 1889 |
+
if self.key in keywords:
|
| 1890 |
+
return "{} {};".format(
|
| 1891 |
+
keywords[self.key][0], keywords[self.key][1](self.value)
|
| 1892 |
+
)
|
| 1893 |
+
return "" # should raise exception
|
| 1894 |
+
|
| 1895 |
+
|
| 1896 |
+
class HheaField(Statement):
|
| 1897 |
+
"""An entry in the ``hhea`` table."""
|
| 1898 |
+
|
| 1899 |
+
def __init__(self, key, value, location=None):
|
| 1900 |
+
Statement.__init__(self, location)
|
| 1901 |
+
self.key = key
|
| 1902 |
+
self.value = value
|
| 1903 |
+
|
| 1904 |
+
def build(self, builder):
|
| 1905 |
+
"""Calls the builder object's ``add_hhea_field`` callback."""
|
| 1906 |
+
builder.add_hhea_field(self.key, self.value)
|
| 1907 |
+
|
| 1908 |
+
def asFea(self, indent=""):
|
| 1909 |
+
fields = ("CaretOffset", "Ascender", "Descender", "LineGap")
|
| 1910 |
+
keywords = dict([(x.lower(), x) for x in fields])
|
| 1911 |
+
return "{} {};".format(keywords[self.key], self.value)
|
| 1912 |
+
|
| 1913 |
+
|
| 1914 |
+
class VheaField(Statement):
|
| 1915 |
+
"""An entry in the ``vhea`` table."""
|
| 1916 |
+
|
| 1917 |
+
def __init__(self, key, value, location=None):
|
| 1918 |
+
Statement.__init__(self, location)
|
| 1919 |
+
self.key = key
|
| 1920 |
+
self.value = value
|
| 1921 |
+
|
| 1922 |
+
def build(self, builder):
|
| 1923 |
+
"""Calls the builder object's ``add_vhea_field`` callback."""
|
| 1924 |
+
builder.add_vhea_field(self.key, self.value)
|
| 1925 |
+
|
| 1926 |
+
def asFea(self, indent=""):
|
| 1927 |
+
fields = ("VertTypoAscender", "VertTypoDescender", "VertTypoLineGap")
|
| 1928 |
+
keywords = dict([(x.lower(), x) for x in fields])
|
| 1929 |
+
return "{} {};".format(keywords[self.key], self.value)
|
| 1930 |
+
|
| 1931 |
+
|
| 1932 |
+
class STATDesignAxisStatement(Statement):
|
| 1933 |
+
"""A STAT table Design Axis
|
| 1934 |
+
|
| 1935 |
+
Args:
|
| 1936 |
+
tag (str): a 4 letter axis tag
|
| 1937 |
+
axisOrder (int): an int
|
| 1938 |
+
names (list): a list of :class:`STATNameStatement` objects
|
| 1939 |
+
"""
|
| 1940 |
+
|
| 1941 |
+
def __init__(self, tag, axisOrder, names, location=None):
|
| 1942 |
+
Statement.__init__(self, location)
|
| 1943 |
+
self.tag = tag
|
| 1944 |
+
self.axisOrder = axisOrder
|
| 1945 |
+
self.names = names
|
| 1946 |
+
self.location = location
|
| 1947 |
+
|
| 1948 |
+
def build(self, builder):
|
| 1949 |
+
builder.addDesignAxis(self, self.location)
|
| 1950 |
+
|
| 1951 |
+
def asFea(self, indent=""):
|
| 1952 |
+
indent += SHIFT
|
| 1953 |
+
res = f"DesignAxis {self.tag} {self.axisOrder} {{ \n"
|
| 1954 |
+
res += ("\n" + indent).join([s.asFea(indent=indent) for s in self.names]) + "\n"
|
| 1955 |
+
res += "};"
|
| 1956 |
+
return res
|
| 1957 |
+
|
| 1958 |
+
|
| 1959 |
+
class ElidedFallbackName(Statement):
|
| 1960 |
+
"""STAT table ElidedFallbackName
|
| 1961 |
+
|
| 1962 |
+
Args:
|
| 1963 |
+
names: a list of :class:`STATNameStatement` objects
|
| 1964 |
+
"""
|
| 1965 |
+
|
| 1966 |
+
def __init__(self, names, location=None):
|
| 1967 |
+
Statement.__init__(self, location)
|
| 1968 |
+
self.names = names
|
| 1969 |
+
self.location = location
|
| 1970 |
+
|
| 1971 |
+
def build(self, builder):
|
| 1972 |
+
builder.setElidedFallbackName(self.names, self.location)
|
| 1973 |
+
|
| 1974 |
+
def asFea(self, indent=""):
|
| 1975 |
+
indent += SHIFT
|
| 1976 |
+
res = "ElidedFallbackName { \n"
|
| 1977 |
+
res += ("\n" + indent).join([s.asFea(indent=indent) for s in self.names]) + "\n"
|
| 1978 |
+
res += "};"
|
| 1979 |
+
return res
|
| 1980 |
+
|
| 1981 |
+
|
| 1982 |
+
class ElidedFallbackNameID(Statement):
|
| 1983 |
+
"""STAT table ElidedFallbackNameID
|
| 1984 |
+
|
| 1985 |
+
Args:
|
| 1986 |
+
value: an int pointing to an existing name table name ID
|
| 1987 |
+
"""
|
| 1988 |
+
|
| 1989 |
+
def __init__(self, value, location=None):
|
| 1990 |
+
Statement.__init__(self, location)
|
| 1991 |
+
self.value = value
|
| 1992 |
+
self.location = location
|
| 1993 |
+
|
| 1994 |
+
def build(self, builder):
|
| 1995 |
+
builder.setElidedFallbackName(self.value, self.location)
|
| 1996 |
+
|
| 1997 |
+
def asFea(self, indent=""):
|
| 1998 |
+
return f"ElidedFallbackNameID {self.value};"
|
| 1999 |
+
|
| 2000 |
+
|
| 2001 |
+
class STATAxisValueStatement(Statement):
|
| 2002 |
+
"""A STAT table Axis Value Record
|
| 2003 |
+
|
| 2004 |
+
Args:
|
| 2005 |
+
names (list): a list of :class:`STATNameStatement` objects
|
| 2006 |
+
locations (list): a list of :class:`AxisValueLocationStatement` objects
|
| 2007 |
+
flags (int): an int
|
| 2008 |
+
"""
|
| 2009 |
+
|
| 2010 |
+
def __init__(self, names, locations, flags, location=None):
|
| 2011 |
+
Statement.__init__(self, location)
|
| 2012 |
+
self.names = names
|
| 2013 |
+
self.locations = locations
|
| 2014 |
+
self.flags = flags
|
| 2015 |
+
|
| 2016 |
+
def build(self, builder):
|
| 2017 |
+
builder.addAxisValueRecord(self, self.location)
|
| 2018 |
+
|
| 2019 |
+
def asFea(self, indent=""):
|
| 2020 |
+
res = "AxisValue {\n"
|
| 2021 |
+
for location in self.locations:
|
| 2022 |
+
res += location.asFea()
|
| 2023 |
+
|
| 2024 |
+
for nameRecord in self.names:
|
| 2025 |
+
res += nameRecord.asFea()
|
| 2026 |
+
res += "\n"
|
| 2027 |
+
|
| 2028 |
+
if self.flags:
|
| 2029 |
+
flags = ["OlderSiblingFontAttribute", "ElidableAxisValueName"]
|
| 2030 |
+
flagStrings = []
|
| 2031 |
+
curr = 1
|
| 2032 |
+
for i in range(len(flags)):
|
| 2033 |
+
if self.flags & curr != 0:
|
| 2034 |
+
flagStrings.append(flags[i])
|
| 2035 |
+
curr = curr << 1
|
| 2036 |
+
res += f"flag {' '.join(flagStrings)};\n"
|
| 2037 |
+
res += "};"
|
| 2038 |
+
return res
|
| 2039 |
+
|
| 2040 |
+
|
| 2041 |
+
class AxisValueLocationStatement(Statement):
|
| 2042 |
+
"""
|
| 2043 |
+
A STAT table Axis Value Location
|
| 2044 |
+
|
| 2045 |
+
Args:
|
| 2046 |
+
tag (str): a 4 letter axis tag
|
| 2047 |
+
values (list): a list of ints and/or floats
|
| 2048 |
+
"""
|
| 2049 |
+
|
| 2050 |
+
def __init__(self, tag, values, location=None):
|
| 2051 |
+
Statement.__init__(self, location)
|
| 2052 |
+
self.tag = tag
|
| 2053 |
+
self.values = values
|
| 2054 |
+
|
| 2055 |
+
def asFea(self, res=""):
|
| 2056 |
+
res += f"location {self.tag} "
|
| 2057 |
+
res += f"{' '.join(str(i) for i in self.values)};\n"
|
| 2058 |
+
return res
|
| 2059 |
+
|
| 2060 |
+
|
| 2061 |
+
class ConditionsetStatement(Statement):
|
| 2062 |
+
"""
|
| 2063 |
+
A variable layout conditionset
|
| 2064 |
+
|
| 2065 |
+
Args:
|
| 2066 |
+
name (str): the name of this conditionset
|
| 2067 |
+
conditions (dict): a dictionary mapping axis tags to a
|
| 2068 |
+
tuple of (min,max) userspace coordinates.
|
| 2069 |
+
"""
|
| 2070 |
+
|
| 2071 |
+
def __init__(self, name, conditions, location=None):
|
| 2072 |
+
Statement.__init__(self, location)
|
| 2073 |
+
self.name = name
|
| 2074 |
+
self.conditions = conditions
|
| 2075 |
+
|
| 2076 |
+
def build(self, builder):
|
| 2077 |
+
builder.add_conditionset(self.location, self.name, self.conditions)
|
| 2078 |
+
|
| 2079 |
+
def asFea(self, res="", indent=""):
|
| 2080 |
+
res += indent + f"conditionset {self.name} " + "{\n"
|
| 2081 |
+
for tag, (minvalue, maxvalue) in self.conditions.items():
|
| 2082 |
+
res += indent + SHIFT + f"{tag} {minvalue} {maxvalue};\n"
|
| 2083 |
+
res += indent + "}" + f" {self.name};\n"
|
| 2084 |
+
return res
|
| 2085 |
+
|
| 2086 |
+
|
| 2087 |
+
class VariationBlock(Block):
|
| 2088 |
+
"""A variation feature block, applicable in a given set of conditions."""
|
| 2089 |
+
|
| 2090 |
+
def __init__(self, name, conditionset, use_extension=False, location=None):
|
| 2091 |
+
Block.__init__(self, location)
|
| 2092 |
+
self.name, self.conditionset, self.use_extension = (
|
| 2093 |
+
name,
|
| 2094 |
+
conditionset,
|
| 2095 |
+
use_extension,
|
| 2096 |
+
)
|
| 2097 |
+
|
| 2098 |
+
def build(self, builder):
|
| 2099 |
+
"""Call the ``start_feature`` callback on the builder object, visit
|
| 2100 |
+
all the statements in this feature, and then call ``end_feature``."""
|
| 2101 |
+
builder.start_feature(self.location, self.name)
|
| 2102 |
+
if (
|
| 2103 |
+
self.conditionset != "NULL"
|
| 2104 |
+
and self.conditionset not in builder.conditionsets_
|
| 2105 |
+
):
|
| 2106 |
+
raise FeatureLibError(
|
| 2107 |
+
f"variation block used undefined conditionset {self.conditionset}",
|
| 2108 |
+
self.location,
|
| 2109 |
+
)
|
| 2110 |
+
|
| 2111 |
+
# language exclude_dflt statements modify builder.features_
|
| 2112 |
+
# limit them to this block with temporary builder.features_
|
| 2113 |
+
features = builder.features_
|
| 2114 |
+
builder.features_ = {}
|
| 2115 |
+
Block.build(self, builder)
|
| 2116 |
+
for key, value in builder.features_.items():
|
| 2117 |
+
items = builder.feature_variations_.setdefault(key, {}).setdefault(
|
| 2118 |
+
self.conditionset, []
|
| 2119 |
+
)
|
| 2120 |
+
items.extend(value)
|
| 2121 |
+
if key not in features:
|
| 2122 |
+
features[key] = [] # Ensure we make a feature record
|
| 2123 |
+
builder.features_ = features
|
| 2124 |
+
builder.end_feature()
|
| 2125 |
+
|
| 2126 |
+
def asFea(self, indent=""):
|
| 2127 |
+
res = indent + "variation %s " % self.name.strip()
|
| 2128 |
+
res += self.conditionset + " "
|
| 2129 |
+
if self.use_extension:
|
| 2130 |
+
res += "useExtension "
|
| 2131 |
+
res += "{\n"
|
| 2132 |
+
res += Block.asFea(self, indent=indent)
|
| 2133 |
+
res += indent + "} %s;\n" % self.name.strip()
|
| 2134 |
+
return res
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/builder.py
ADDED
|
@@ -0,0 +1,1729 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fontTools.misc import sstruct
|
| 2 |
+
from fontTools.misc.textTools import Tag, tostr, binary2num, safeEval
|
| 3 |
+
from fontTools.feaLib.error import FeatureLibError
|
| 4 |
+
from fontTools.feaLib.lookupDebugInfo import (
|
| 5 |
+
LookupDebugInfo,
|
| 6 |
+
LOOKUP_DEBUG_INFO_KEY,
|
| 7 |
+
LOOKUP_DEBUG_ENV_VAR,
|
| 8 |
+
)
|
| 9 |
+
from fontTools.feaLib.parser import Parser
|
| 10 |
+
from fontTools.feaLib.ast import FeatureFile
|
| 11 |
+
from fontTools.feaLib.variableScalar import VariableScalar
|
| 12 |
+
from fontTools.otlLib import builder as otl
|
| 13 |
+
from fontTools.otlLib.maxContextCalc import maxCtxFont
|
| 14 |
+
from fontTools.ttLib import newTable, getTableModule
|
| 15 |
+
from fontTools.ttLib.tables import otBase, otTables
|
| 16 |
+
from fontTools.otlLib.builder import (
|
| 17 |
+
AlternateSubstBuilder,
|
| 18 |
+
ChainContextPosBuilder,
|
| 19 |
+
ChainContextSubstBuilder,
|
| 20 |
+
LigatureSubstBuilder,
|
| 21 |
+
MultipleSubstBuilder,
|
| 22 |
+
CursivePosBuilder,
|
| 23 |
+
MarkBasePosBuilder,
|
| 24 |
+
MarkLigPosBuilder,
|
| 25 |
+
MarkMarkPosBuilder,
|
| 26 |
+
ReverseChainSingleSubstBuilder,
|
| 27 |
+
SingleSubstBuilder,
|
| 28 |
+
ClassPairPosSubtableBuilder,
|
| 29 |
+
PairPosBuilder,
|
| 30 |
+
SinglePosBuilder,
|
| 31 |
+
ChainContextualRule,
|
| 32 |
+
)
|
| 33 |
+
from fontTools.otlLib.error import OpenTypeLibError
|
| 34 |
+
from fontTools.varLib.varStore import OnlineVarStoreBuilder
|
| 35 |
+
from fontTools.varLib.builder import buildVarDevTable
|
| 36 |
+
from fontTools.varLib.featureVars import addFeatureVariationsRaw
|
| 37 |
+
from fontTools.varLib.models import normalizeValue, piecewiseLinearMap
|
| 38 |
+
from collections import defaultdict
|
| 39 |
+
import copy
|
| 40 |
+
import itertools
|
| 41 |
+
from io import StringIO
|
| 42 |
+
import logging
|
| 43 |
+
import warnings
|
| 44 |
+
import os
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
log = logging.getLogger(__name__)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def addOpenTypeFeatures(font, featurefile, tables=None, debug=False):
|
| 51 |
+
"""Add features from a file to a font. Note that this replaces any features
|
| 52 |
+
currently present.
|
| 53 |
+
|
| 54 |
+
Args:
|
| 55 |
+
font (feaLib.ttLib.TTFont): The font object.
|
| 56 |
+
featurefile: Either a path or file object (in which case we
|
| 57 |
+
parse it into an AST), or a pre-parsed AST instance.
|
| 58 |
+
tables: If passed, restrict the set of affected tables to those in the
|
| 59 |
+
list.
|
| 60 |
+
debug: Whether to add source debugging information to the font in the
|
| 61 |
+
``Debg`` table
|
| 62 |
+
|
| 63 |
+
"""
|
| 64 |
+
builder = Builder(font, featurefile)
|
| 65 |
+
builder.build(tables=tables, debug=debug)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def addOpenTypeFeaturesFromString(
|
| 69 |
+
font, features, filename=None, tables=None, debug=False
|
| 70 |
+
):
|
| 71 |
+
"""Add features from a string to a font. Note that this replaces any
|
| 72 |
+
features currently present.
|
| 73 |
+
|
| 74 |
+
Args:
|
| 75 |
+
font (feaLib.ttLib.TTFont): The font object.
|
| 76 |
+
features: A string containing feature code.
|
| 77 |
+
filename: The directory containing ``filename`` is used as the root of
|
| 78 |
+
relative ``include()`` paths; if ``None`` is provided, the current
|
| 79 |
+
directory is assumed.
|
| 80 |
+
tables: If passed, restrict the set of affected tables to those in the
|
| 81 |
+
list.
|
| 82 |
+
debug: Whether to add source debugging information to the font in the
|
| 83 |
+
``Debg`` table
|
| 84 |
+
|
| 85 |
+
"""
|
| 86 |
+
|
| 87 |
+
featurefile = StringIO(tostr(features))
|
| 88 |
+
if filename:
|
| 89 |
+
featurefile.name = filename
|
| 90 |
+
addOpenTypeFeatures(font, featurefile, tables=tables, debug=debug)
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
class Builder(object):
|
| 94 |
+
supportedTables = frozenset(
|
| 95 |
+
Tag(tag)
|
| 96 |
+
for tag in [
|
| 97 |
+
"BASE",
|
| 98 |
+
"GDEF",
|
| 99 |
+
"GPOS",
|
| 100 |
+
"GSUB",
|
| 101 |
+
"OS/2",
|
| 102 |
+
"head",
|
| 103 |
+
"hhea",
|
| 104 |
+
"name",
|
| 105 |
+
"vhea",
|
| 106 |
+
"STAT",
|
| 107 |
+
]
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
def __init__(self, font, featurefile):
|
| 111 |
+
self.font = font
|
| 112 |
+
# 'featurefile' can be either a path or file object (in which case we
|
| 113 |
+
# parse it into an AST), or a pre-parsed AST instance
|
| 114 |
+
if isinstance(featurefile, FeatureFile):
|
| 115 |
+
self.parseTree, self.file = featurefile, None
|
| 116 |
+
else:
|
| 117 |
+
self.parseTree, self.file = None, featurefile
|
| 118 |
+
self.glyphMap = font.getReverseGlyphMap()
|
| 119 |
+
self.varstorebuilder = None
|
| 120 |
+
if "fvar" in font:
|
| 121 |
+
self.axes = font["fvar"].axes
|
| 122 |
+
self.varstorebuilder = OnlineVarStoreBuilder(
|
| 123 |
+
[ax.axisTag for ax in self.axes]
|
| 124 |
+
)
|
| 125 |
+
self.default_language_systems_ = set()
|
| 126 |
+
self.script_ = None
|
| 127 |
+
self.lookupflag_ = 0
|
| 128 |
+
self.lookupflag_markFilterSet_ = None
|
| 129 |
+
self.language_systems = set()
|
| 130 |
+
self.seen_non_DFLT_script_ = False
|
| 131 |
+
self.named_lookups_ = {}
|
| 132 |
+
self.cur_lookup_ = None
|
| 133 |
+
self.cur_lookup_name_ = None
|
| 134 |
+
self.cur_feature_name_ = None
|
| 135 |
+
self.lookups_ = []
|
| 136 |
+
self.lookup_locations = {"GSUB": {}, "GPOS": {}}
|
| 137 |
+
self.features_ = {} # ('latn', 'DEU ', 'smcp') --> [LookupBuilder*]
|
| 138 |
+
self.required_features_ = {} # ('latn', 'DEU ') --> 'scmp'
|
| 139 |
+
self.feature_variations_ = {}
|
| 140 |
+
# for feature 'aalt'
|
| 141 |
+
self.aalt_features_ = [] # [(location, featureName)*], for 'aalt'
|
| 142 |
+
self.aalt_location_ = None
|
| 143 |
+
self.aalt_alternates_ = {}
|
| 144 |
+
# for 'featureNames'
|
| 145 |
+
self.featureNames_ = set()
|
| 146 |
+
self.featureNames_ids_ = {}
|
| 147 |
+
# for 'cvParameters'
|
| 148 |
+
self.cv_parameters_ = set()
|
| 149 |
+
self.cv_parameters_ids_ = {}
|
| 150 |
+
self.cv_num_named_params_ = {}
|
| 151 |
+
self.cv_characters_ = defaultdict(list)
|
| 152 |
+
# for feature 'size'
|
| 153 |
+
self.size_parameters_ = None
|
| 154 |
+
# for table 'head'
|
| 155 |
+
self.fontRevision_ = None # 2.71
|
| 156 |
+
# for table 'name'
|
| 157 |
+
self.names_ = []
|
| 158 |
+
# for table 'BASE'
|
| 159 |
+
self.base_horiz_axis_ = None
|
| 160 |
+
self.base_vert_axis_ = None
|
| 161 |
+
# for table 'GDEF'
|
| 162 |
+
self.attachPoints_ = {} # "a" --> {3, 7}
|
| 163 |
+
self.ligCaretCoords_ = {} # "f_f_i" --> {300, 600}
|
| 164 |
+
self.ligCaretPoints_ = {} # "f_f_i" --> {3, 7}
|
| 165 |
+
self.glyphClassDefs_ = {} # "fi" --> (2, (file, line, column))
|
| 166 |
+
self.markAttach_ = {} # "acute" --> (4, (file, line, column))
|
| 167 |
+
self.markAttachClassID_ = {} # frozenset({"acute", "grave"}) --> 4
|
| 168 |
+
self.markFilterSets_ = {} # frozenset({"acute", "grave"}) --> 4
|
| 169 |
+
# for table 'OS/2'
|
| 170 |
+
self.os2_ = {}
|
| 171 |
+
# for table 'hhea'
|
| 172 |
+
self.hhea_ = {}
|
| 173 |
+
# for table 'vhea'
|
| 174 |
+
self.vhea_ = {}
|
| 175 |
+
# for table 'STAT'
|
| 176 |
+
self.stat_ = {}
|
| 177 |
+
# for conditionsets
|
| 178 |
+
self.conditionsets_ = {}
|
| 179 |
+
# We will often use exactly the same locations (i.e. the font's masters)
|
| 180 |
+
# for a large number of variable scalars. Instead of creating a model
|
| 181 |
+
# for each, let's share the models.
|
| 182 |
+
self.model_cache = {}
|
| 183 |
+
|
| 184 |
+
def build(self, tables=None, debug=False):
|
| 185 |
+
if self.parseTree is None:
|
| 186 |
+
self.parseTree = Parser(self.file, self.glyphMap).parse()
|
| 187 |
+
self.parseTree.build(self)
|
| 188 |
+
# by default, build all the supported tables
|
| 189 |
+
if tables is None:
|
| 190 |
+
tables = self.supportedTables
|
| 191 |
+
else:
|
| 192 |
+
tables = frozenset(tables)
|
| 193 |
+
unsupported = tables - self.supportedTables
|
| 194 |
+
if unsupported:
|
| 195 |
+
unsupported_string = ", ".join(sorted(unsupported))
|
| 196 |
+
raise NotImplementedError(
|
| 197 |
+
"The following tables were requested but are unsupported: "
|
| 198 |
+
f"{unsupported_string}."
|
| 199 |
+
)
|
| 200 |
+
if "GSUB" in tables:
|
| 201 |
+
self.build_feature_aalt_()
|
| 202 |
+
if "head" in tables:
|
| 203 |
+
self.build_head()
|
| 204 |
+
if "hhea" in tables:
|
| 205 |
+
self.build_hhea()
|
| 206 |
+
if "vhea" in tables:
|
| 207 |
+
self.build_vhea()
|
| 208 |
+
if "name" in tables:
|
| 209 |
+
self.build_name()
|
| 210 |
+
if "OS/2" in tables:
|
| 211 |
+
self.build_OS_2()
|
| 212 |
+
if "STAT" in tables:
|
| 213 |
+
self.build_STAT()
|
| 214 |
+
for tag in ("GPOS", "GSUB"):
|
| 215 |
+
if tag not in tables:
|
| 216 |
+
continue
|
| 217 |
+
table = self.makeTable(tag)
|
| 218 |
+
if self.feature_variations_:
|
| 219 |
+
self.makeFeatureVariations(table, tag)
|
| 220 |
+
if (
|
| 221 |
+
table.ScriptList.ScriptCount > 0
|
| 222 |
+
or table.FeatureList.FeatureCount > 0
|
| 223 |
+
or table.LookupList.LookupCount > 0
|
| 224 |
+
):
|
| 225 |
+
fontTable = self.font[tag] = newTable(tag)
|
| 226 |
+
fontTable.table = table
|
| 227 |
+
elif tag in self.font:
|
| 228 |
+
del self.font[tag]
|
| 229 |
+
if any(tag in self.font for tag in ("GPOS", "GSUB")) and "OS/2" in self.font:
|
| 230 |
+
self.font["OS/2"].usMaxContext = maxCtxFont(self.font)
|
| 231 |
+
if "GDEF" in tables:
|
| 232 |
+
gdef = self.buildGDEF()
|
| 233 |
+
if gdef:
|
| 234 |
+
self.font["GDEF"] = gdef
|
| 235 |
+
elif "GDEF" in self.font:
|
| 236 |
+
del self.font["GDEF"]
|
| 237 |
+
if "BASE" in tables:
|
| 238 |
+
base = self.buildBASE()
|
| 239 |
+
if base:
|
| 240 |
+
self.font["BASE"] = base
|
| 241 |
+
elif "BASE" in self.font:
|
| 242 |
+
del self.font["BASE"]
|
| 243 |
+
if debug or os.environ.get(LOOKUP_DEBUG_ENV_VAR):
|
| 244 |
+
self.buildDebg()
|
| 245 |
+
|
| 246 |
+
def get_chained_lookup_(self, location, builder_class):
|
| 247 |
+
result = builder_class(self.font, location)
|
| 248 |
+
result.lookupflag = self.lookupflag_
|
| 249 |
+
result.markFilterSet = self.lookupflag_markFilterSet_
|
| 250 |
+
self.lookups_.append(result)
|
| 251 |
+
return result
|
| 252 |
+
|
| 253 |
+
def add_lookup_to_feature_(self, lookup, feature_name):
|
| 254 |
+
for script, lang in self.language_systems:
|
| 255 |
+
key = (script, lang, feature_name)
|
| 256 |
+
self.features_.setdefault(key, []).append(lookup)
|
| 257 |
+
|
| 258 |
+
def get_lookup_(self, location, builder_class):
|
| 259 |
+
if (
|
| 260 |
+
self.cur_lookup_
|
| 261 |
+
and type(self.cur_lookup_) == builder_class
|
| 262 |
+
and self.cur_lookup_.lookupflag == self.lookupflag_
|
| 263 |
+
and self.cur_lookup_.markFilterSet == self.lookupflag_markFilterSet_
|
| 264 |
+
):
|
| 265 |
+
return self.cur_lookup_
|
| 266 |
+
if self.cur_lookup_name_ and self.cur_lookup_:
|
| 267 |
+
raise FeatureLibError(
|
| 268 |
+
"Within a named lookup block, all rules must be of "
|
| 269 |
+
"the same lookup type and flag",
|
| 270 |
+
location,
|
| 271 |
+
)
|
| 272 |
+
self.cur_lookup_ = builder_class(self.font, location)
|
| 273 |
+
self.cur_lookup_.lookupflag = self.lookupflag_
|
| 274 |
+
self.cur_lookup_.markFilterSet = self.lookupflag_markFilterSet_
|
| 275 |
+
self.lookups_.append(self.cur_lookup_)
|
| 276 |
+
if self.cur_lookup_name_:
|
| 277 |
+
# We are starting a lookup rule inside a named lookup block.
|
| 278 |
+
self.named_lookups_[self.cur_lookup_name_] = self.cur_lookup_
|
| 279 |
+
if self.cur_feature_name_:
|
| 280 |
+
# We are starting a lookup rule inside a feature. This includes
|
| 281 |
+
# lookup rules inside named lookups inside features.
|
| 282 |
+
self.add_lookup_to_feature_(self.cur_lookup_, self.cur_feature_name_)
|
| 283 |
+
return self.cur_lookup_
|
| 284 |
+
|
| 285 |
+
def build_feature_aalt_(self):
|
| 286 |
+
if not self.aalt_features_ and not self.aalt_alternates_:
|
| 287 |
+
return
|
| 288 |
+
# > alternate glyphs will be sorted in the order that the source features
|
| 289 |
+
# > are named in the aalt definition, not the order of the feature definitions
|
| 290 |
+
# > in the file. Alternates defined explicitly ... will precede all others.
|
| 291 |
+
# https://github.com/fonttools/fonttools/issues/836
|
| 292 |
+
alternates = {g: list(a) for g, a in self.aalt_alternates_.items()}
|
| 293 |
+
for location, name in self.aalt_features_ + [(None, "aalt")]:
|
| 294 |
+
feature = [
|
| 295 |
+
(script, lang, feature, lookups)
|
| 296 |
+
for (script, lang, feature), lookups in self.features_.items()
|
| 297 |
+
if feature == name
|
| 298 |
+
]
|
| 299 |
+
# "aalt" does not have to specify its own lookups, but it might.
|
| 300 |
+
if not feature and name != "aalt":
|
| 301 |
+
warnings.warn("%s: Feature %s has not been defined" % (location, name))
|
| 302 |
+
continue
|
| 303 |
+
for script, lang, feature, lookups in feature:
|
| 304 |
+
for lookuplist in lookups:
|
| 305 |
+
if not isinstance(lookuplist, list):
|
| 306 |
+
lookuplist = [lookuplist]
|
| 307 |
+
for lookup in lookuplist:
|
| 308 |
+
for glyph, alts in lookup.getAlternateGlyphs().items():
|
| 309 |
+
alts_for_glyph = alternates.setdefault(glyph, [])
|
| 310 |
+
alts_for_glyph.extend(
|
| 311 |
+
g for g in alts if g not in alts_for_glyph
|
| 312 |
+
)
|
| 313 |
+
single = {
|
| 314 |
+
glyph: repl[0] for glyph, repl in alternates.items() if len(repl) == 1
|
| 315 |
+
}
|
| 316 |
+
multi = {glyph: repl for glyph, repl in alternates.items() if len(repl) > 1}
|
| 317 |
+
if not single and not multi:
|
| 318 |
+
return
|
| 319 |
+
self.features_ = {
|
| 320 |
+
(script, lang, feature): lookups
|
| 321 |
+
for (script, lang, feature), lookups in self.features_.items()
|
| 322 |
+
if feature != "aalt"
|
| 323 |
+
}
|
| 324 |
+
old_lookups = self.lookups_
|
| 325 |
+
self.lookups_ = []
|
| 326 |
+
self.start_feature(self.aalt_location_, "aalt")
|
| 327 |
+
if single:
|
| 328 |
+
single_lookup = self.get_lookup_(location, SingleSubstBuilder)
|
| 329 |
+
single_lookup.mapping = single
|
| 330 |
+
if multi:
|
| 331 |
+
multi_lookup = self.get_lookup_(location, AlternateSubstBuilder)
|
| 332 |
+
multi_lookup.alternates = multi
|
| 333 |
+
self.end_feature()
|
| 334 |
+
self.lookups_.extend(old_lookups)
|
| 335 |
+
|
| 336 |
+
def build_head(self):
|
| 337 |
+
if not self.fontRevision_:
|
| 338 |
+
return
|
| 339 |
+
table = self.font.get("head")
|
| 340 |
+
if not table: # this only happens for unit tests
|
| 341 |
+
table = self.font["head"] = newTable("head")
|
| 342 |
+
table.decompile(b"\0" * 54, self.font)
|
| 343 |
+
table.tableVersion = 1.0
|
| 344 |
+
table.created = table.modified = 3406620153 # 2011-12-13 11:22:33
|
| 345 |
+
table.fontRevision = self.fontRevision_
|
| 346 |
+
|
| 347 |
+
def build_hhea(self):
|
| 348 |
+
if not self.hhea_:
|
| 349 |
+
return
|
| 350 |
+
table = self.font.get("hhea")
|
| 351 |
+
if not table: # this only happens for unit tests
|
| 352 |
+
table = self.font["hhea"] = newTable("hhea")
|
| 353 |
+
table.decompile(b"\0" * 36, self.font)
|
| 354 |
+
table.tableVersion = 0x00010000
|
| 355 |
+
if "caretoffset" in self.hhea_:
|
| 356 |
+
table.caretOffset = self.hhea_["caretoffset"]
|
| 357 |
+
if "ascender" in self.hhea_:
|
| 358 |
+
table.ascent = self.hhea_["ascender"]
|
| 359 |
+
if "descender" in self.hhea_:
|
| 360 |
+
table.descent = self.hhea_["descender"]
|
| 361 |
+
if "linegap" in self.hhea_:
|
| 362 |
+
table.lineGap = self.hhea_["linegap"]
|
| 363 |
+
|
| 364 |
+
def build_vhea(self):
|
| 365 |
+
if not self.vhea_:
|
| 366 |
+
return
|
| 367 |
+
table = self.font.get("vhea")
|
| 368 |
+
if not table: # this only happens for unit tests
|
| 369 |
+
table = self.font["vhea"] = newTable("vhea")
|
| 370 |
+
table.decompile(b"\0" * 36, self.font)
|
| 371 |
+
table.tableVersion = 0x00011000
|
| 372 |
+
if "verttypoascender" in self.vhea_:
|
| 373 |
+
table.ascent = self.vhea_["verttypoascender"]
|
| 374 |
+
if "verttypodescender" in self.vhea_:
|
| 375 |
+
table.descent = self.vhea_["verttypodescender"]
|
| 376 |
+
if "verttypolinegap" in self.vhea_:
|
| 377 |
+
table.lineGap = self.vhea_["verttypolinegap"]
|
| 378 |
+
|
| 379 |
+
def get_user_name_id(self, table):
|
| 380 |
+
# Try to find first unused font-specific name id
|
| 381 |
+
nameIDs = [name.nameID for name in table.names]
|
| 382 |
+
for user_name_id in range(256, 32767):
|
| 383 |
+
if user_name_id not in nameIDs:
|
| 384 |
+
return user_name_id
|
| 385 |
+
|
| 386 |
+
def buildFeatureParams(self, tag):
|
| 387 |
+
params = None
|
| 388 |
+
if tag == "size":
|
| 389 |
+
params = otTables.FeatureParamsSize()
|
| 390 |
+
(
|
| 391 |
+
params.DesignSize,
|
| 392 |
+
params.SubfamilyID,
|
| 393 |
+
params.RangeStart,
|
| 394 |
+
params.RangeEnd,
|
| 395 |
+
) = self.size_parameters_
|
| 396 |
+
if tag in self.featureNames_ids_:
|
| 397 |
+
params.SubfamilyNameID = self.featureNames_ids_[tag]
|
| 398 |
+
else:
|
| 399 |
+
params.SubfamilyNameID = 0
|
| 400 |
+
elif tag in self.featureNames_:
|
| 401 |
+
if not self.featureNames_ids_:
|
| 402 |
+
# name table wasn't selected among the tables to build; skip
|
| 403 |
+
pass
|
| 404 |
+
else:
|
| 405 |
+
assert tag in self.featureNames_ids_
|
| 406 |
+
params = otTables.FeatureParamsStylisticSet()
|
| 407 |
+
params.Version = 0
|
| 408 |
+
params.UINameID = self.featureNames_ids_[tag]
|
| 409 |
+
elif tag in self.cv_parameters_:
|
| 410 |
+
params = otTables.FeatureParamsCharacterVariants()
|
| 411 |
+
params.Format = 0
|
| 412 |
+
params.FeatUILabelNameID = self.cv_parameters_ids_.get(
|
| 413 |
+
(tag, "FeatUILabelNameID"), 0
|
| 414 |
+
)
|
| 415 |
+
params.FeatUITooltipTextNameID = self.cv_parameters_ids_.get(
|
| 416 |
+
(tag, "FeatUITooltipTextNameID"), 0
|
| 417 |
+
)
|
| 418 |
+
params.SampleTextNameID = self.cv_parameters_ids_.get(
|
| 419 |
+
(tag, "SampleTextNameID"), 0
|
| 420 |
+
)
|
| 421 |
+
params.NumNamedParameters = self.cv_num_named_params_.get(tag, 0)
|
| 422 |
+
params.FirstParamUILabelNameID = self.cv_parameters_ids_.get(
|
| 423 |
+
(tag, "ParamUILabelNameID_0"), 0
|
| 424 |
+
)
|
| 425 |
+
params.CharCount = len(self.cv_characters_[tag])
|
| 426 |
+
params.Character = self.cv_characters_[tag]
|
| 427 |
+
return params
|
| 428 |
+
|
| 429 |
+
def build_name(self):
|
| 430 |
+
if not self.names_:
|
| 431 |
+
return
|
| 432 |
+
table = self.font.get("name")
|
| 433 |
+
if not table: # this only happens for unit tests
|
| 434 |
+
table = self.font["name"] = newTable("name")
|
| 435 |
+
table.names = []
|
| 436 |
+
for name in self.names_:
|
| 437 |
+
nameID, platformID, platEncID, langID, string = name
|
| 438 |
+
# For featureNames block, nameID is 'feature tag'
|
| 439 |
+
# For cvParameters blocks, nameID is ('feature tag', 'block name')
|
| 440 |
+
if not isinstance(nameID, int):
|
| 441 |
+
tag = nameID
|
| 442 |
+
if tag in self.featureNames_:
|
| 443 |
+
if tag not in self.featureNames_ids_:
|
| 444 |
+
self.featureNames_ids_[tag] = self.get_user_name_id(table)
|
| 445 |
+
assert self.featureNames_ids_[tag] is not None
|
| 446 |
+
nameID = self.featureNames_ids_[tag]
|
| 447 |
+
elif tag[0] in self.cv_parameters_:
|
| 448 |
+
if tag not in self.cv_parameters_ids_:
|
| 449 |
+
self.cv_parameters_ids_[tag] = self.get_user_name_id(table)
|
| 450 |
+
assert self.cv_parameters_ids_[tag] is not None
|
| 451 |
+
nameID = self.cv_parameters_ids_[tag]
|
| 452 |
+
table.setName(string, nameID, platformID, platEncID, langID)
|
| 453 |
+
table.names.sort()
|
| 454 |
+
|
| 455 |
+
def build_OS_2(self):
|
| 456 |
+
if not self.os2_:
|
| 457 |
+
return
|
| 458 |
+
table = self.font.get("OS/2")
|
| 459 |
+
if not table: # this only happens for unit tests
|
| 460 |
+
table = self.font["OS/2"] = newTable("OS/2")
|
| 461 |
+
data = b"\0" * sstruct.calcsize(getTableModule("OS/2").OS2_format_0)
|
| 462 |
+
table.decompile(data, self.font)
|
| 463 |
+
version = 0
|
| 464 |
+
if "fstype" in self.os2_:
|
| 465 |
+
table.fsType = self.os2_["fstype"]
|
| 466 |
+
if "panose" in self.os2_:
|
| 467 |
+
panose = getTableModule("OS/2").Panose()
|
| 468 |
+
(
|
| 469 |
+
panose.bFamilyType,
|
| 470 |
+
panose.bSerifStyle,
|
| 471 |
+
panose.bWeight,
|
| 472 |
+
panose.bProportion,
|
| 473 |
+
panose.bContrast,
|
| 474 |
+
panose.bStrokeVariation,
|
| 475 |
+
panose.bArmStyle,
|
| 476 |
+
panose.bLetterForm,
|
| 477 |
+
panose.bMidline,
|
| 478 |
+
panose.bXHeight,
|
| 479 |
+
) = self.os2_["panose"]
|
| 480 |
+
table.panose = panose
|
| 481 |
+
if "typoascender" in self.os2_:
|
| 482 |
+
table.sTypoAscender = self.os2_["typoascender"]
|
| 483 |
+
if "typodescender" in self.os2_:
|
| 484 |
+
table.sTypoDescender = self.os2_["typodescender"]
|
| 485 |
+
if "typolinegap" in self.os2_:
|
| 486 |
+
table.sTypoLineGap = self.os2_["typolinegap"]
|
| 487 |
+
if "winascent" in self.os2_:
|
| 488 |
+
table.usWinAscent = self.os2_["winascent"]
|
| 489 |
+
if "windescent" in self.os2_:
|
| 490 |
+
table.usWinDescent = self.os2_["windescent"]
|
| 491 |
+
if "vendor" in self.os2_:
|
| 492 |
+
table.achVendID = safeEval("'''" + self.os2_["vendor"] + "'''")
|
| 493 |
+
if "weightclass" in self.os2_:
|
| 494 |
+
table.usWeightClass = self.os2_["weightclass"]
|
| 495 |
+
if "widthclass" in self.os2_:
|
| 496 |
+
table.usWidthClass = self.os2_["widthclass"]
|
| 497 |
+
if "unicoderange" in self.os2_:
|
| 498 |
+
table.setUnicodeRanges(self.os2_["unicoderange"])
|
| 499 |
+
if "codepagerange" in self.os2_:
|
| 500 |
+
pages = self.build_codepages_(self.os2_["codepagerange"])
|
| 501 |
+
table.ulCodePageRange1, table.ulCodePageRange2 = pages
|
| 502 |
+
version = 1
|
| 503 |
+
if "xheight" in self.os2_:
|
| 504 |
+
table.sxHeight = self.os2_["xheight"]
|
| 505 |
+
version = 2
|
| 506 |
+
if "capheight" in self.os2_:
|
| 507 |
+
table.sCapHeight = self.os2_["capheight"]
|
| 508 |
+
version = 2
|
| 509 |
+
if "loweropsize" in self.os2_:
|
| 510 |
+
table.usLowerOpticalPointSize = self.os2_["loweropsize"]
|
| 511 |
+
version = 5
|
| 512 |
+
if "upperopsize" in self.os2_:
|
| 513 |
+
table.usUpperOpticalPointSize = self.os2_["upperopsize"]
|
| 514 |
+
version = 5
|
| 515 |
+
|
| 516 |
+
def checkattr(table, attrs):
|
| 517 |
+
for attr in attrs:
|
| 518 |
+
if not hasattr(table, attr):
|
| 519 |
+
setattr(table, attr, 0)
|
| 520 |
+
|
| 521 |
+
table.version = max(version, table.version)
|
| 522 |
+
# this only happens for unit tests
|
| 523 |
+
if version >= 1:
|
| 524 |
+
checkattr(table, ("ulCodePageRange1", "ulCodePageRange2"))
|
| 525 |
+
if version >= 2:
|
| 526 |
+
checkattr(
|
| 527 |
+
table,
|
| 528 |
+
(
|
| 529 |
+
"sxHeight",
|
| 530 |
+
"sCapHeight",
|
| 531 |
+
"usDefaultChar",
|
| 532 |
+
"usBreakChar",
|
| 533 |
+
"usMaxContext",
|
| 534 |
+
),
|
| 535 |
+
)
|
| 536 |
+
if version >= 5:
|
| 537 |
+
checkattr(table, ("usLowerOpticalPointSize", "usUpperOpticalPointSize"))
|
| 538 |
+
|
| 539 |
+
def setElidedFallbackName(self, value, location):
|
| 540 |
+
# ElidedFallbackName is a convenience method for setting
|
| 541 |
+
# ElidedFallbackNameID so only one can be allowed
|
| 542 |
+
for token in ("ElidedFallbackName", "ElidedFallbackNameID"):
|
| 543 |
+
if token in self.stat_:
|
| 544 |
+
raise FeatureLibError(
|
| 545 |
+
f"{token} is already set.",
|
| 546 |
+
location,
|
| 547 |
+
)
|
| 548 |
+
if isinstance(value, int):
|
| 549 |
+
self.stat_["ElidedFallbackNameID"] = value
|
| 550 |
+
elif isinstance(value, list):
|
| 551 |
+
self.stat_["ElidedFallbackName"] = value
|
| 552 |
+
else:
|
| 553 |
+
raise AssertionError(value)
|
| 554 |
+
|
| 555 |
+
def addDesignAxis(self, designAxis, location):
|
| 556 |
+
if "DesignAxes" not in self.stat_:
|
| 557 |
+
self.stat_["DesignAxes"] = []
|
| 558 |
+
if designAxis.tag in (r.tag for r in self.stat_["DesignAxes"]):
|
| 559 |
+
raise FeatureLibError(
|
| 560 |
+
f'DesignAxis already defined for tag "{designAxis.tag}".',
|
| 561 |
+
location,
|
| 562 |
+
)
|
| 563 |
+
if designAxis.axisOrder in (r.axisOrder for r in self.stat_["DesignAxes"]):
|
| 564 |
+
raise FeatureLibError(
|
| 565 |
+
f"DesignAxis already defined for axis number {designAxis.axisOrder}.",
|
| 566 |
+
location,
|
| 567 |
+
)
|
| 568 |
+
self.stat_["DesignAxes"].append(designAxis)
|
| 569 |
+
|
| 570 |
+
def addAxisValueRecord(self, axisValueRecord, location):
|
| 571 |
+
if "AxisValueRecords" not in self.stat_:
|
| 572 |
+
self.stat_["AxisValueRecords"] = []
|
| 573 |
+
# Check for duplicate AxisValueRecords
|
| 574 |
+
for record_ in self.stat_["AxisValueRecords"]:
|
| 575 |
+
if (
|
| 576 |
+
{n.asFea() for n in record_.names}
|
| 577 |
+
== {n.asFea() for n in axisValueRecord.names}
|
| 578 |
+
and {n.asFea() for n in record_.locations}
|
| 579 |
+
== {n.asFea() for n in axisValueRecord.locations}
|
| 580 |
+
and record_.flags == axisValueRecord.flags
|
| 581 |
+
):
|
| 582 |
+
raise FeatureLibError(
|
| 583 |
+
"An AxisValueRecord with these values is already defined.",
|
| 584 |
+
location,
|
| 585 |
+
)
|
| 586 |
+
self.stat_["AxisValueRecords"].append(axisValueRecord)
|
| 587 |
+
|
| 588 |
+
def build_STAT(self):
|
| 589 |
+
if not self.stat_:
|
| 590 |
+
return
|
| 591 |
+
|
| 592 |
+
axes = self.stat_.get("DesignAxes")
|
| 593 |
+
if not axes:
|
| 594 |
+
raise FeatureLibError("DesignAxes not defined", None)
|
| 595 |
+
axisValueRecords = self.stat_.get("AxisValueRecords")
|
| 596 |
+
axisValues = {}
|
| 597 |
+
format4_locations = []
|
| 598 |
+
for tag in axes:
|
| 599 |
+
axisValues[tag.tag] = []
|
| 600 |
+
if axisValueRecords is not None:
|
| 601 |
+
for avr in axisValueRecords:
|
| 602 |
+
valuesDict = {}
|
| 603 |
+
if avr.flags > 0:
|
| 604 |
+
valuesDict["flags"] = avr.flags
|
| 605 |
+
if len(avr.locations) == 1:
|
| 606 |
+
location = avr.locations[0]
|
| 607 |
+
values = location.values
|
| 608 |
+
if len(values) == 1: # format1
|
| 609 |
+
valuesDict.update({"value": values[0], "name": avr.names})
|
| 610 |
+
if len(values) == 2: # format3
|
| 611 |
+
valuesDict.update(
|
| 612 |
+
{
|
| 613 |
+
"value": values[0],
|
| 614 |
+
"linkedValue": values[1],
|
| 615 |
+
"name": avr.names,
|
| 616 |
+
}
|
| 617 |
+
)
|
| 618 |
+
if len(values) == 3: # format2
|
| 619 |
+
nominal, minVal, maxVal = values
|
| 620 |
+
valuesDict.update(
|
| 621 |
+
{
|
| 622 |
+
"nominalValue": nominal,
|
| 623 |
+
"rangeMinValue": minVal,
|
| 624 |
+
"rangeMaxValue": maxVal,
|
| 625 |
+
"name": avr.names,
|
| 626 |
+
}
|
| 627 |
+
)
|
| 628 |
+
axisValues[location.tag].append(valuesDict)
|
| 629 |
+
else:
|
| 630 |
+
valuesDict.update(
|
| 631 |
+
{
|
| 632 |
+
"location": {i.tag: i.values[0] for i in avr.locations},
|
| 633 |
+
"name": avr.names,
|
| 634 |
+
}
|
| 635 |
+
)
|
| 636 |
+
format4_locations.append(valuesDict)
|
| 637 |
+
|
| 638 |
+
designAxes = [
|
| 639 |
+
{
|
| 640 |
+
"ordering": a.axisOrder,
|
| 641 |
+
"tag": a.tag,
|
| 642 |
+
"name": a.names,
|
| 643 |
+
"values": axisValues[a.tag],
|
| 644 |
+
}
|
| 645 |
+
for a in axes
|
| 646 |
+
]
|
| 647 |
+
|
| 648 |
+
nameTable = self.font.get("name")
|
| 649 |
+
if not nameTable: # this only happens for unit tests
|
| 650 |
+
nameTable = self.font["name"] = newTable("name")
|
| 651 |
+
nameTable.names = []
|
| 652 |
+
|
| 653 |
+
if "ElidedFallbackNameID" in self.stat_:
|
| 654 |
+
nameID = self.stat_["ElidedFallbackNameID"]
|
| 655 |
+
name = nameTable.getDebugName(nameID)
|
| 656 |
+
if not name:
|
| 657 |
+
raise FeatureLibError(
|
| 658 |
+
f"ElidedFallbackNameID {nameID} points "
|
| 659 |
+
"to a nameID that does not exist in the "
|
| 660 |
+
'"name" table',
|
| 661 |
+
None,
|
| 662 |
+
)
|
| 663 |
+
elif "ElidedFallbackName" in self.stat_:
|
| 664 |
+
nameID = self.stat_["ElidedFallbackName"]
|
| 665 |
+
|
| 666 |
+
otl.buildStatTable(
|
| 667 |
+
self.font,
|
| 668 |
+
designAxes,
|
| 669 |
+
locations=format4_locations,
|
| 670 |
+
elidedFallbackName=nameID,
|
| 671 |
+
)
|
| 672 |
+
|
| 673 |
+
def build_codepages_(self, pages):
|
| 674 |
+
pages2bits = {
|
| 675 |
+
1252: 0,
|
| 676 |
+
1250: 1,
|
| 677 |
+
1251: 2,
|
| 678 |
+
1253: 3,
|
| 679 |
+
1254: 4,
|
| 680 |
+
1255: 5,
|
| 681 |
+
1256: 6,
|
| 682 |
+
1257: 7,
|
| 683 |
+
1258: 8,
|
| 684 |
+
874: 16,
|
| 685 |
+
932: 17,
|
| 686 |
+
936: 18,
|
| 687 |
+
949: 19,
|
| 688 |
+
950: 20,
|
| 689 |
+
1361: 21,
|
| 690 |
+
869: 48,
|
| 691 |
+
866: 49,
|
| 692 |
+
865: 50,
|
| 693 |
+
864: 51,
|
| 694 |
+
863: 52,
|
| 695 |
+
862: 53,
|
| 696 |
+
861: 54,
|
| 697 |
+
860: 55,
|
| 698 |
+
857: 56,
|
| 699 |
+
855: 57,
|
| 700 |
+
852: 58,
|
| 701 |
+
775: 59,
|
| 702 |
+
737: 60,
|
| 703 |
+
708: 61,
|
| 704 |
+
850: 62,
|
| 705 |
+
437: 63,
|
| 706 |
+
}
|
| 707 |
+
bits = [pages2bits[p] for p in pages if p in pages2bits]
|
| 708 |
+
pages = []
|
| 709 |
+
for i in range(2):
|
| 710 |
+
pages.append("")
|
| 711 |
+
for j in range(i * 32, (i + 1) * 32):
|
| 712 |
+
if j in bits:
|
| 713 |
+
pages[i] += "1"
|
| 714 |
+
else:
|
| 715 |
+
pages[i] += "0"
|
| 716 |
+
return [binary2num(p[::-1]) for p in pages]
|
| 717 |
+
|
| 718 |
+
def buildBASE(self):
|
| 719 |
+
if not self.base_horiz_axis_ and not self.base_vert_axis_:
|
| 720 |
+
return None
|
| 721 |
+
base = otTables.BASE()
|
| 722 |
+
base.Version = 0x00010000
|
| 723 |
+
base.HorizAxis = self.buildBASEAxis(self.base_horiz_axis_)
|
| 724 |
+
base.VertAxis = self.buildBASEAxis(self.base_vert_axis_)
|
| 725 |
+
|
| 726 |
+
result = newTable("BASE")
|
| 727 |
+
result.table = base
|
| 728 |
+
return result
|
| 729 |
+
|
| 730 |
+
def buildBASEAxis(self, axis):
|
| 731 |
+
if not axis:
|
| 732 |
+
return
|
| 733 |
+
bases, scripts = axis
|
| 734 |
+
axis = otTables.Axis()
|
| 735 |
+
axis.BaseTagList = otTables.BaseTagList()
|
| 736 |
+
axis.BaseTagList.BaselineTag = bases
|
| 737 |
+
axis.BaseTagList.BaseTagCount = len(bases)
|
| 738 |
+
axis.BaseScriptList = otTables.BaseScriptList()
|
| 739 |
+
axis.BaseScriptList.BaseScriptRecord = []
|
| 740 |
+
axis.BaseScriptList.BaseScriptCount = len(scripts)
|
| 741 |
+
for script in sorted(scripts):
|
| 742 |
+
record = otTables.BaseScriptRecord()
|
| 743 |
+
record.BaseScriptTag = script[0]
|
| 744 |
+
record.BaseScript = otTables.BaseScript()
|
| 745 |
+
record.BaseScript.BaseLangSysCount = 0
|
| 746 |
+
record.BaseScript.BaseValues = otTables.BaseValues()
|
| 747 |
+
record.BaseScript.BaseValues.DefaultIndex = bases.index(script[1])
|
| 748 |
+
record.BaseScript.BaseValues.BaseCoord = []
|
| 749 |
+
record.BaseScript.BaseValues.BaseCoordCount = len(script[2])
|
| 750 |
+
for c in script[2]:
|
| 751 |
+
coord = otTables.BaseCoord()
|
| 752 |
+
coord.Format = 1
|
| 753 |
+
coord.Coordinate = c
|
| 754 |
+
record.BaseScript.BaseValues.BaseCoord.append(coord)
|
| 755 |
+
axis.BaseScriptList.BaseScriptRecord.append(record)
|
| 756 |
+
return axis
|
| 757 |
+
|
| 758 |
+
def buildGDEF(self):
|
| 759 |
+
gdef = otTables.GDEF()
|
| 760 |
+
gdef.GlyphClassDef = self.buildGDEFGlyphClassDef_()
|
| 761 |
+
gdef.AttachList = otl.buildAttachList(self.attachPoints_, self.glyphMap)
|
| 762 |
+
gdef.LigCaretList = otl.buildLigCaretList(
|
| 763 |
+
self.ligCaretCoords_, self.ligCaretPoints_, self.glyphMap
|
| 764 |
+
)
|
| 765 |
+
gdef.MarkAttachClassDef = self.buildGDEFMarkAttachClassDef_()
|
| 766 |
+
gdef.MarkGlyphSetsDef = self.buildGDEFMarkGlyphSetsDef_()
|
| 767 |
+
gdef.Version = 0x00010002 if gdef.MarkGlyphSetsDef else 0x00010000
|
| 768 |
+
if self.varstorebuilder:
|
| 769 |
+
store = self.varstorebuilder.finish()
|
| 770 |
+
if store:
|
| 771 |
+
gdef.Version = 0x00010003
|
| 772 |
+
gdef.VarStore = store
|
| 773 |
+
varidx_map = store.optimize()
|
| 774 |
+
|
| 775 |
+
gdef.remap_device_varidxes(varidx_map)
|
| 776 |
+
if "GPOS" in self.font:
|
| 777 |
+
self.font["GPOS"].table.remap_device_varidxes(varidx_map)
|
| 778 |
+
self.model_cache.clear()
|
| 779 |
+
if any(
|
| 780 |
+
(
|
| 781 |
+
gdef.GlyphClassDef,
|
| 782 |
+
gdef.AttachList,
|
| 783 |
+
gdef.LigCaretList,
|
| 784 |
+
gdef.MarkAttachClassDef,
|
| 785 |
+
gdef.MarkGlyphSetsDef,
|
| 786 |
+
)
|
| 787 |
+
) or hasattr(gdef, "VarStore"):
|
| 788 |
+
result = newTable("GDEF")
|
| 789 |
+
result.table = gdef
|
| 790 |
+
return result
|
| 791 |
+
else:
|
| 792 |
+
return None
|
| 793 |
+
|
| 794 |
+
def buildGDEFGlyphClassDef_(self):
|
| 795 |
+
if self.glyphClassDefs_:
|
| 796 |
+
classes = {g: c for (g, (c, _)) in self.glyphClassDefs_.items()}
|
| 797 |
+
else:
|
| 798 |
+
classes = {}
|
| 799 |
+
for lookup in self.lookups_:
|
| 800 |
+
classes.update(lookup.inferGlyphClasses())
|
| 801 |
+
for markClass in self.parseTree.markClasses.values():
|
| 802 |
+
for markClassDef in markClass.definitions:
|
| 803 |
+
for glyph in markClassDef.glyphSet():
|
| 804 |
+
classes[glyph] = 3
|
| 805 |
+
if classes:
|
| 806 |
+
result = otTables.GlyphClassDef()
|
| 807 |
+
result.classDefs = classes
|
| 808 |
+
return result
|
| 809 |
+
else:
|
| 810 |
+
return None
|
| 811 |
+
|
| 812 |
+
def buildGDEFMarkAttachClassDef_(self):
|
| 813 |
+
classDefs = {g: c for g, (c, _) in self.markAttach_.items()}
|
| 814 |
+
if not classDefs:
|
| 815 |
+
return None
|
| 816 |
+
result = otTables.MarkAttachClassDef()
|
| 817 |
+
result.classDefs = classDefs
|
| 818 |
+
return result
|
| 819 |
+
|
| 820 |
+
def buildGDEFMarkGlyphSetsDef_(self):
|
| 821 |
+
sets = []
|
| 822 |
+
for glyphs, id_ in sorted(
|
| 823 |
+
self.markFilterSets_.items(), key=lambda item: item[1]
|
| 824 |
+
):
|
| 825 |
+
sets.append(glyphs)
|
| 826 |
+
return otl.buildMarkGlyphSetsDef(sets, self.glyphMap)
|
| 827 |
+
|
| 828 |
+
def buildDebg(self):
|
| 829 |
+
if "Debg" not in self.font:
|
| 830 |
+
self.font["Debg"] = newTable("Debg")
|
| 831 |
+
self.font["Debg"].data = {}
|
| 832 |
+
self.font["Debg"].data[LOOKUP_DEBUG_INFO_KEY] = self.lookup_locations
|
| 833 |
+
|
| 834 |
+
def buildLookups_(self, tag):
|
| 835 |
+
assert tag in ("GPOS", "GSUB"), tag
|
| 836 |
+
for lookup in self.lookups_:
|
| 837 |
+
lookup.lookup_index = None
|
| 838 |
+
lookups = []
|
| 839 |
+
for lookup in self.lookups_:
|
| 840 |
+
if lookup.table != tag:
|
| 841 |
+
continue
|
| 842 |
+
lookup.lookup_index = len(lookups)
|
| 843 |
+
self.lookup_locations[tag][str(lookup.lookup_index)] = LookupDebugInfo(
|
| 844 |
+
location=str(lookup.location),
|
| 845 |
+
name=self.get_lookup_name_(lookup),
|
| 846 |
+
feature=None,
|
| 847 |
+
)
|
| 848 |
+
lookups.append(lookup)
|
| 849 |
+
otLookups = []
|
| 850 |
+
for l in lookups:
|
| 851 |
+
try:
|
| 852 |
+
otLookups.append(l.build())
|
| 853 |
+
except OpenTypeLibError as e:
|
| 854 |
+
raise FeatureLibError(str(e), e.location) from e
|
| 855 |
+
except Exception as e:
|
| 856 |
+
location = self.lookup_locations[tag][str(l.lookup_index)].location
|
| 857 |
+
raise FeatureLibError(str(e), location) from e
|
| 858 |
+
return otLookups
|
| 859 |
+
|
| 860 |
+
def makeTable(self, tag):
|
| 861 |
+
table = getattr(otTables, tag, None)()
|
| 862 |
+
table.Version = 0x00010000
|
| 863 |
+
table.ScriptList = otTables.ScriptList()
|
| 864 |
+
table.ScriptList.ScriptRecord = []
|
| 865 |
+
table.FeatureList = otTables.FeatureList()
|
| 866 |
+
table.FeatureList.FeatureRecord = []
|
| 867 |
+
table.LookupList = otTables.LookupList()
|
| 868 |
+
table.LookupList.Lookup = self.buildLookups_(tag)
|
| 869 |
+
|
| 870 |
+
# Build a table for mapping (tag, lookup_indices) to feature_index.
|
| 871 |
+
# For example, ('liga', (2,3,7)) --> 23.
|
| 872 |
+
feature_indices = {}
|
| 873 |
+
required_feature_indices = {} # ('latn', 'DEU') --> 23
|
| 874 |
+
scripts = {} # 'latn' --> {'DEU': [23, 24]} for feature #23,24
|
| 875 |
+
# Sort the feature table by feature tag:
|
| 876 |
+
# https://github.com/fonttools/fonttools/issues/568
|
| 877 |
+
sortFeatureTag = lambda f: (f[0][2], f[0][1], f[0][0], f[1])
|
| 878 |
+
for key, lookups in sorted(self.features_.items(), key=sortFeatureTag):
|
| 879 |
+
script, lang, feature_tag = key
|
| 880 |
+
# l.lookup_index will be None when a lookup is not needed
|
| 881 |
+
# for the table under construction. For example, substitution
|
| 882 |
+
# rules will have no lookup_index while building GPOS tables.
|
| 883 |
+
# We also deduplicate lookup indices, as they only get applied once
|
| 884 |
+
# within a given feature:
|
| 885 |
+
# https://github.com/fonttools/fonttools/issues/2946
|
| 886 |
+
lookup_indices = tuple(
|
| 887 |
+
dict.fromkeys(
|
| 888 |
+
l.lookup_index for l in lookups if l.lookup_index is not None
|
| 889 |
+
)
|
| 890 |
+
)
|
| 891 |
+
|
| 892 |
+
size_feature = tag == "GPOS" and feature_tag == "size"
|
| 893 |
+
force_feature = self.any_feature_variations(feature_tag, tag)
|
| 894 |
+
if len(lookup_indices) == 0 and not size_feature and not force_feature:
|
| 895 |
+
continue
|
| 896 |
+
|
| 897 |
+
for ix in lookup_indices:
|
| 898 |
+
try:
|
| 899 |
+
self.lookup_locations[tag][str(ix)] = self.lookup_locations[tag][
|
| 900 |
+
str(ix)
|
| 901 |
+
]._replace(feature=key)
|
| 902 |
+
except KeyError:
|
| 903 |
+
warnings.warn(
|
| 904 |
+
"feaLib.Builder subclass needs upgrading to "
|
| 905 |
+
"stash debug information. See fonttools#2065."
|
| 906 |
+
)
|
| 907 |
+
|
| 908 |
+
feature_key = (feature_tag, lookup_indices)
|
| 909 |
+
feature_index = feature_indices.get(feature_key)
|
| 910 |
+
if feature_index is None:
|
| 911 |
+
feature_index = len(table.FeatureList.FeatureRecord)
|
| 912 |
+
frec = otTables.FeatureRecord()
|
| 913 |
+
frec.FeatureTag = feature_tag
|
| 914 |
+
frec.Feature = otTables.Feature()
|
| 915 |
+
frec.Feature.FeatureParams = self.buildFeatureParams(feature_tag)
|
| 916 |
+
frec.Feature.LookupListIndex = list(lookup_indices)
|
| 917 |
+
frec.Feature.LookupCount = len(lookup_indices)
|
| 918 |
+
table.FeatureList.FeatureRecord.append(frec)
|
| 919 |
+
feature_indices[feature_key] = feature_index
|
| 920 |
+
scripts.setdefault(script, {}).setdefault(lang, []).append(feature_index)
|
| 921 |
+
if self.required_features_.get((script, lang)) == feature_tag:
|
| 922 |
+
required_feature_indices[(script, lang)] = feature_index
|
| 923 |
+
|
| 924 |
+
# Build ScriptList.
|
| 925 |
+
for script, lang_features in sorted(scripts.items()):
|
| 926 |
+
srec = otTables.ScriptRecord()
|
| 927 |
+
srec.ScriptTag = script
|
| 928 |
+
srec.Script = otTables.Script()
|
| 929 |
+
srec.Script.DefaultLangSys = None
|
| 930 |
+
srec.Script.LangSysRecord = []
|
| 931 |
+
for lang, feature_indices in sorted(lang_features.items()):
|
| 932 |
+
langrec = otTables.LangSysRecord()
|
| 933 |
+
langrec.LangSys = otTables.LangSys()
|
| 934 |
+
langrec.LangSys.LookupOrder = None
|
| 935 |
+
|
| 936 |
+
req_feature_index = required_feature_indices.get((script, lang))
|
| 937 |
+
if req_feature_index is None:
|
| 938 |
+
langrec.LangSys.ReqFeatureIndex = 0xFFFF
|
| 939 |
+
else:
|
| 940 |
+
langrec.LangSys.ReqFeatureIndex = req_feature_index
|
| 941 |
+
|
| 942 |
+
langrec.LangSys.FeatureIndex = [
|
| 943 |
+
i for i in feature_indices if i != req_feature_index
|
| 944 |
+
]
|
| 945 |
+
langrec.LangSys.FeatureCount = len(langrec.LangSys.FeatureIndex)
|
| 946 |
+
|
| 947 |
+
if lang == "dflt":
|
| 948 |
+
srec.Script.DefaultLangSys = langrec.LangSys
|
| 949 |
+
else:
|
| 950 |
+
langrec.LangSysTag = lang
|
| 951 |
+
srec.Script.LangSysRecord.append(langrec)
|
| 952 |
+
srec.Script.LangSysCount = len(srec.Script.LangSysRecord)
|
| 953 |
+
table.ScriptList.ScriptRecord.append(srec)
|
| 954 |
+
|
| 955 |
+
table.ScriptList.ScriptCount = len(table.ScriptList.ScriptRecord)
|
| 956 |
+
table.FeatureList.FeatureCount = len(table.FeatureList.FeatureRecord)
|
| 957 |
+
table.LookupList.LookupCount = len(table.LookupList.Lookup)
|
| 958 |
+
return table
|
| 959 |
+
|
| 960 |
+
def makeFeatureVariations(self, table, table_tag):
|
| 961 |
+
feature_vars = {}
|
| 962 |
+
has_any_variations = False
|
| 963 |
+
# Sort out which lookups to build, gather their indices
|
| 964 |
+
for (_, _, feature_tag), variations in self.feature_variations_.items():
|
| 965 |
+
feature_vars[feature_tag] = []
|
| 966 |
+
for conditionset, builders in variations.items():
|
| 967 |
+
raw_conditionset = self.conditionsets_[conditionset]
|
| 968 |
+
indices = []
|
| 969 |
+
for b in builders:
|
| 970 |
+
if b.table != table_tag:
|
| 971 |
+
continue
|
| 972 |
+
assert b.lookup_index is not None
|
| 973 |
+
indices.append(b.lookup_index)
|
| 974 |
+
has_any_variations = True
|
| 975 |
+
feature_vars[feature_tag].append((raw_conditionset, indices))
|
| 976 |
+
|
| 977 |
+
if has_any_variations:
|
| 978 |
+
for feature_tag, conditions_and_lookups in feature_vars.items():
|
| 979 |
+
addFeatureVariationsRaw(
|
| 980 |
+
self.font, table, conditions_and_lookups, feature_tag
|
| 981 |
+
)
|
| 982 |
+
|
| 983 |
+
def any_feature_variations(self, feature_tag, table_tag):
|
| 984 |
+
for (_, _, feature), variations in self.feature_variations_.items():
|
| 985 |
+
if feature != feature_tag:
|
| 986 |
+
continue
|
| 987 |
+
for conditionset, builders in variations.items():
|
| 988 |
+
if any(b.table == table_tag for b in builders):
|
| 989 |
+
return True
|
| 990 |
+
return False
|
| 991 |
+
|
| 992 |
+
def get_lookup_name_(self, lookup):
|
| 993 |
+
rev = {v: k for k, v in self.named_lookups_.items()}
|
| 994 |
+
if lookup in rev:
|
| 995 |
+
return rev[lookup]
|
| 996 |
+
return None
|
| 997 |
+
|
| 998 |
+
def add_language_system(self, location, script, language):
|
| 999 |
+
# OpenType Feature File Specification, section 4.b.i
|
| 1000 |
+
if script == "DFLT" and language == "dflt" and self.default_language_systems_:
|
| 1001 |
+
raise FeatureLibError(
|
| 1002 |
+
'If "languagesystem DFLT dflt" is present, it must be '
|
| 1003 |
+
"the first of the languagesystem statements",
|
| 1004 |
+
location,
|
| 1005 |
+
)
|
| 1006 |
+
if script == "DFLT":
|
| 1007 |
+
if self.seen_non_DFLT_script_:
|
| 1008 |
+
raise FeatureLibError(
|
| 1009 |
+
'languagesystems using the "DFLT" script tag must '
|
| 1010 |
+
"precede all other languagesystems",
|
| 1011 |
+
location,
|
| 1012 |
+
)
|
| 1013 |
+
else:
|
| 1014 |
+
self.seen_non_DFLT_script_ = True
|
| 1015 |
+
if (script, language) in self.default_language_systems_:
|
| 1016 |
+
raise FeatureLibError(
|
| 1017 |
+
'"languagesystem %s %s" has already been specified'
|
| 1018 |
+
% (script.strip(), language.strip()),
|
| 1019 |
+
location,
|
| 1020 |
+
)
|
| 1021 |
+
self.default_language_systems_.add((script, language))
|
| 1022 |
+
|
| 1023 |
+
def get_default_language_systems_(self):
|
| 1024 |
+
# OpenType Feature File specification, 4.b.i. languagesystem:
|
| 1025 |
+
# If no "languagesystem" statement is present, then the
|
| 1026 |
+
# implementation must behave exactly as though the following
|
| 1027 |
+
# statement were present at the beginning of the feature file:
|
| 1028 |
+
# languagesystem DFLT dflt;
|
| 1029 |
+
if self.default_language_systems_:
|
| 1030 |
+
return frozenset(self.default_language_systems_)
|
| 1031 |
+
else:
|
| 1032 |
+
return frozenset({("DFLT", "dflt")})
|
| 1033 |
+
|
| 1034 |
+
def start_feature(self, location, name):
|
| 1035 |
+
self.language_systems = self.get_default_language_systems_()
|
| 1036 |
+
self.script_ = "DFLT"
|
| 1037 |
+
self.cur_lookup_ = None
|
| 1038 |
+
self.cur_feature_name_ = name
|
| 1039 |
+
self.lookupflag_ = 0
|
| 1040 |
+
self.lookupflag_markFilterSet_ = None
|
| 1041 |
+
if name == "aalt":
|
| 1042 |
+
self.aalt_location_ = location
|
| 1043 |
+
|
| 1044 |
+
def end_feature(self):
|
| 1045 |
+
assert self.cur_feature_name_ is not None
|
| 1046 |
+
self.cur_feature_name_ = None
|
| 1047 |
+
self.language_systems = None
|
| 1048 |
+
self.cur_lookup_ = None
|
| 1049 |
+
self.lookupflag_ = 0
|
| 1050 |
+
self.lookupflag_markFilterSet_ = None
|
| 1051 |
+
|
| 1052 |
+
def start_lookup_block(self, location, name):
|
| 1053 |
+
if name in self.named_lookups_:
|
| 1054 |
+
raise FeatureLibError(
|
| 1055 |
+
'Lookup "%s" has already been defined' % name, location
|
| 1056 |
+
)
|
| 1057 |
+
if self.cur_feature_name_ == "aalt":
|
| 1058 |
+
raise FeatureLibError(
|
| 1059 |
+
"Lookup blocks cannot be placed inside 'aalt' features; "
|
| 1060 |
+
"move it out, and then refer to it with a lookup statement",
|
| 1061 |
+
location,
|
| 1062 |
+
)
|
| 1063 |
+
self.cur_lookup_name_ = name
|
| 1064 |
+
self.named_lookups_[name] = None
|
| 1065 |
+
self.cur_lookup_ = None
|
| 1066 |
+
if self.cur_feature_name_ is None:
|
| 1067 |
+
self.lookupflag_ = 0
|
| 1068 |
+
self.lookupflag_markFilterSet_ = None
|
| 1069 |
+
|
| 1070 |
+
def end_lookup_block(self):
|
| 1071 |
+
assert self.cur_lookup_name_ is not None
|
| 1072 |
+
self.cur_lookup_name_ = None
|
| 1073 |
+
self.cur_lookup_ = None
|
| 1074 |
+
if self.cur_feature_name_ is None:
|
| 1075 |
+
self.lookupflag_ = 0
|
| 1076 |
+
self.lookupflag_markFilterSet_ = None
|
| 1077 |
+
|
| 1078 |
+
def add_lookup_call(self, lookup_name):
|
| 1079 |
+
assert lookup_name in self.named_lookups_, lookup_name
|
| 1080 |
+
self.cur_lookup_ = None
|
| 1081 |
+
lookup = self.named_lookups_[lookup_name]
|
| 1082 |
+
if lookup is not None: # skip empty named lookup
|
| 1083 |
+
self.add_lookup_to_feature_(lookup, self.cur_feature_name_)
|
| 1084 |
+
|
| 1085 |
+
def set_font_revision(self, location, revision):
|
| 1086 |
+
self.fontRevision_ = revision
|
| 1087 |
+
|
| 1088 |
+
def set_language(self, location, language, include_default, required):
|
| 1089 |
+
assert len(language) == 4
|
| 1090 |
+
if self.cur_feature_name_ in ("aalt", "size"):
|
| 1091 |
+
raise FeatureLibError(
|
| 1092 |
+
"Language statements are not allowed "
|
| 1093 |
+
'within "feature %s"' % self.cur_feature_name_,
|
| 1094 |
+
location,
|
| 1095 |
+
)
|
| 1096 |
+
if self.cur_feature_name_ is None:
|
| 1097 |
+
raise FeatureLibError(
|
| 1098 |
+
"Language statements are not allowed "
|
| 1099 |
+
"within standalone lookup blocks",
|
| 1100 |
+
location,
|
| 1101 |
+
)
|
| 1102 |
+
self.cur_lookup_ = None
|
| 1103 |
+
|
| 1104 |
+
key = (self.script_, language, self.cur_feature_name_)
|
| 1105 |
+
lookups = self.features_.get((key[0], "dflt", key[2]))
|
| 1106 |
+
if (language == "dflt" or include_default) and lookups:
|
| 1107 |
+
self.features_[key] = lookups[:]
|
| 1108 |
+
else:
|
| 1109 |
+
self.features_[key] = []
|
| 1110 |
+
self.language_systems = frozenset([(self.script_, language)])
|
| 1111 |
+
|
| 1112 |
+
if required:
|
| 1113 |
+
key = (self.script_, language)
|
| 1114 |
+
if key in self.required_features_:
|
| 1115 |
+
raise FeatureLibError(
|
| 1116 |
+
"Language %s (script %s) has already "
|
| 1117 |
+
"specified feature %s as its required feature"
|
| 1118 |
+
% (
|
| 1119 |
+
language.strip(),
|
| 1120 |
+
self.script_.strip(),
|
| 1121 |
+
self.required_features_[key].strip(),
|
| 1122 |
+
),
|
| 1123 |
+
location,
|
| 1124 |
+
)
|
| 1125 |
+
self.required_features_[key] = self.cur_feature_name_
|
| 1126 |
+
|
| 1127 |
+
def getMarkAttachClass_(self, location, glyphs):
|
| 1128 |
+
glyphs = frozenset(glyphs)
|
| 1129 |
+
id_ = self.markAttachClassID_.get(glyphs)
|
| 1130 |
+
if id_ is not None:
|
| 1131 |
+
return id_
|
| 1132 |
+
id_ = len(self.markAttachClassID_) + 1
|
| 1133 |
+
self.markAttachClassID_[glyphs] = id_
|
| 1134 |
+
for glyph in glyphs:
|
| 1135 |
+
if glyph in self.markAttach_:
|
| 1136 |
+
_, loc = self.markAttach_[glyph]
|
| 1137 |
+
raise FeatureLibError(
|
| 1138 |
+
"Glyph %s already has been assigned "
|
| 1139 |
+
"a MarkAttachmentType at %s" % (glyph, loc),
|
| 1140 |
+
location,
|
| 1141 |
+
)
|
| 1142 |
+
self.markAttach_[glyph] = (id_, location)
|
| 1143 |
+
return id_
|
| 1144 |
+
|
| 1145 |
+
def getMarkFilterSet_(self, location, glyphs):
|
| 1146 |
+
glyphs = frozenset(glyphs)
|
| 1147 |
+
id_ = self.markFilterSets_.get(glyphs)
|
| 1148 |
+
if id_ is not None:
|
| 1149 |
+
return id_
|
| 1150 |
+
id_ = len(self.markFilterSets_)
|
| 1151 |
+
self.markFilterSets_[glyphs] = id_
|
| 1152 |
+
return id_
|
| 1153 |
+
|
| 1154 |
+
def set_lookup_flag(self, location, value, markAttach, markFilter):
|
| 1155 |
+
value = value & 0xFF
|
| 1156 |
+
if markAttach:
|
| 1157 |
+
markAttachClass = self.getMarkAttachClass_(location, markAttach)
|
| 1158 |
+
value = value | (markAttachClass << 8)
|
| 1159 |
+
if markFilter:
|
| 1160 |
+
markFilterSet = self.getMarkFilterSet_(location, markFilter)
|
| 1161 |
+
value = value | 0x10
|
| 1162 |
+
self.lookupflag_markFilterSet_ = markFilterSet
|
| 1163 |
+
else:
|
| 1164 |
+
self.lookupflag_markFilterSet_ = None
|
| 1165 |
+
self.lookupflag_ = value
|
| 1166 |
+
|
| 1167 |
+
def set_script(self, location, script):
|
| 1168 |
+
if self.cur_feature_name_ in ("aalt", "size"):
|
| 1169 |
+
raise FeatureLibError(
|
| 1170 |
+
"Script statements are not allowed "
|
| 1171 |
+
'within "feature %s"' % self.cur_feature_name_,
|
| 1172 |
+
location,
|
| 1173 |
+
)
|
| 1174 |
+
if self.cur_feature_name_ is None:
|
| 1175 |
+
raise FeatureLibError(
|
| 1176 |
+
"Script statements are not allowed " "within standalone lookup blocks",
|
| 1177 |
+
location,
|
| 1178 |
+
)
|
| 1179 |
+
if self.language_systems == {(script, "dflt")}:
|
| 1180 |
+
# Nothing to do.
|
| 1181 |
+
return
|
| 1182 |
+
self.cur_lookup_ = None
|
| 1183 |
+
self.script_ = script
|
| 1184 |
+
self.lookupflag_ = 0
|
| 1185 |
+
self.lookupflag_markFilterSet_ = None
|
| 1186 |
+
self.set_language(location, "dflt", include_default=True, required=False)
|
| 1187 |
+
|
| 1188 |
+
def find_lookup_builders_(self, lookups):
|
| 1189 |
+
"""Helper for building chain contextual substitutions
|
| 1190 |
+
|
| 1191 |
+
Given a list of lookup names, finds the LookupBuilder for each name.
|
| 1192 |
+
If an input name is None, it gets mapped to a None LookupBuilder.
|
| 1193 |
+
"""
|
| 1194 |
+
lookup_builders = []
|
| 1195 |
+
for lookuplist in lookups:
|
| 1196 |
+
if lookuplist is not None:
|
| 1197 |
+
lookup_builders.append(
|
| 1198 |
+
[self.named_lookups_.get(l.name) for l in lookuplist]
|
| 1199 |
+
)
|
| 1200 |
+
else:
|
| 1201 |
+
lookup_builders.append(None)
|
| 1202 |
+
return lookup_builders
|
| 1203 |
+
|
| 1204 |
+
def add_attach_points(self, location, glyphs, contourPoints):
|
| 1205 |
+
for glyph in glyphs:
|
| 1206 |
+
self.attachPoints_.setdefault(glyph, set()).update(contourPoints)
|
| 1207 |
+
|
| 1208 |
+
def add_feature_reference(self, location, featureName):
|
| 1209 |
+
if self.cur_feature_name_ != "aalt":
|
| 1210 |
+
raise FeatureLibError(
|
| 1211 |
+
'Feature references are only allowed inside "feature aalt"', location
|
| 1212 |
+
)
|
| 1213 |
+
self.aalt_features_.append((location, featureName))
|
| 1214 |
+
|
| 1215 |
+
def add_featureName(self, tag):
|
| 1216 |
+
self.featureNames_.add(tag)
|
| 1217 |
+
|
| 1218 |
+
def add_cv_parameter(self, tag):
|
| 1219 |
+
self.cv_parameters_.add(tag)
|
| 1220 |
+
|
| 1221 |
+
def add_to_cv_num_named_params(self, tag):
|
| 1222 |
+
"""Adds new items to ``self.cv_num_named_params_``
|
| 1223 |
+
or increments the count of existing items."""
|
| 1224 |
+
if tag in self.cv_num_named_params_:
|
| 1225 |
+
self.cv_num_named_params_[tag] += 1
|
| 1226 |
+
else:
|
| 1227 |
+
self.cv_num_named_params_[tag] = 1
|
| 1228 |
+
|
| 1229 |
+
def add_cv_character(self, character, tag):
|
| 1230 |
+
self.cv_characters_[tag].append(character)
|
| 1231 |
+
|
| 1232 |
+
def set_base_axis(self, bases, scripts, vertical):
|
| 1233 |
+
if vertical:
|
| 1234 |
+
self.base_vert_axis_ = (bases, scripts)
|
| 1235 |
+
else:
|
| 1236 |
+
self.base_horiz_axis_ = (bases, scripts)
|
| 1237 |
+
|
| 1238 |
+
def set_size_parameters(
|
| 1239 |
+
self, location, DesignSize, SubfamilyID, RangeStart, RangeEnd
|
| 1240 |
+
):
|
| 1241 |
+
if self.cur_feature_name_ != "size":
|
| 1242 |
+
raise FeatureLibError(
|
| 1243 |
+
"Parameters statements are not allowed "
|
| 1244 |
+
'within "feature %s"' % self.cur_feature_name_,
|
| 1245 |
+
location,
|
| 1246 |
+
)
|
| 1247 |
+
self.size_parameters_ = [DesignSize, SubfamilyID, RangeStart, RangeEnd]
|
| 1248 |
+
for script, lang in self.language_systems:
|
| 1249 |
+
key = (script, lang, self.cur_feature_name_)
|
| 1250 |
+
self.features_.setdefault(key, [])
|
| 1251 |
+
|
| 1252 |
+
# GSUB rules
|
| 1253 |
+
|
| 1254 |
+
# GSUB 1
|
| 1255 |
+
def add_single_subst(self, location, prefix, suffix, mapping, forceChain):
|
| 1256 |
+
if self.cur_feature_name_ == "aalt":
|
| 1257 |
+
for from_glyph, to_glyph in mapping.items():
|
| 1258 |
+
alts = self.aalt_alternates_.setdefault(from_glyph, [])
|
| 1259 |
+
if to_glyph not in alts:
|
| 1260 |
+
alts.append(to_glyph)
|
| 1261 |
+
return
|
| 1262 |
+
if prefix or suffix or forceChain:
|
| 1263 |
+
self.add_single_subst_chained_(location, prefix, suffix, mapping)
|
| 1264 |
+
return
|
| 1265 |
+
lookup = self.get_lookup_(location, SingleSubstBuilder)
|
| 1266 |
+
for from_glyph, to_glyph in mapping.items():
|
| 1267 |
+
if from_glyph in lookup.mapping:
|
| 1268 |
+
if to_glyph == lookup.mapping[from_glyph]:
|
| 1269 |
+
log.info(
|
| 1270 |
+
"Removing duplicate single substitution from glyph"
|
| 1271 |
+
' "%s" to "%s" at %s',
|
| 1272 |
+
from_glyph,
|
| 1273 |
+
to_glyph,
|
| 1274 |
+
location,
|
| 1275 |
+
)
|
| 1276 |
+
else:
|
| 1277 |
+
raise FeatureLibError(
|
| 1278 |
+
'Already defined rule for replacing glyph "%s" by "%s"'
|
| 1279 |
+
% (from_glyph, lookup.mapping[from_glyph]),
|
| 1280 |
+
location,
|
| 1281 |
+
)
|
| 1282 |
+
lookup.mapping[from_glyph] = to_glyph
|
| 1283 |
+
|
| 1284 |
+
# GSUB 2
|
| 1285 |
+
def add_multiple_subst(
|
| 1286 |
+
self, location, prefix, glyph, suffix, replacements, forceChain=False
|
| 1287 |
+
):
|
| 1288 |
+
if prefix or suffix or forceChain:
|
| 1289 |
+
self.add_multi_subst_chained_(location, prefix, glyph, suffix, replacements)
|
| 1290 |
+
return
|
| 1291 |
+
lookup = self.get_lookup_(location, MultipleSubstBuilder)
|
| 1292 |
+
if glyph in lookup.mapping:
|
| 1293 |
+
if replacements == lookup.mapping[glyph]:
|
| 1294 |
+
log.info(
|
| 1295 |
+
"Removing duplicate multiple substitution from glyph"
|
| 1296 |
+
' "%s" to %s%s',
|
| 1297 |
+
glyph,
|
| 1298 |
+
replacements,
|
| 1299 |
+
f" at {location}" if location else "",
|
| 1300 |
+
)
|
| 1301 |
+
else:
|
| 1302 |
+
raise FeatureLibError(
|
| 1303 |
+
'Already defined substitution for glyph "%s"' % glyph, location
|
| 1304 |
+
)
|
| 1305 |
+
lookup.mapping[glyph] = replacements
|
| 1306 |
+
|
| 1307 |
+
# GSUB 3
|
| 1308 |
+
def add_alternate_subst(self, location, prefix, glyph, suffix, replacement):
|
| 1309 |
+
if self.cur_feature_name_ == "aalt":
|
| 1310 |
+
alts = self.aalt_alternates_.setdefault(glyph, [])
|
| 1311 |
+
alts.extend(g for g in replacement if g not in alts)
|
| 1312 |
+
return
|
| 1313 |
+
if prefix or suffix:
|
| 1314 |
+
chain = self.get_lookup_(location, ChainContextSubstBuilder)
|
| 1315 |
+
lookup = self.get_chained_lookup_(location, AlternateSubstBuilder)
|
| 1316 |
+
chain.rules.append(ChainContextualRule(prefix, [{glyph}], suffix, [lookup]))
|
| 1317 |
+
else:
|
| 1318 |
+
lookup = self.get_lookup_(location, AlternateSubstBuilder)
|
| 1319 |
+
if glyph in lookup.alternates:
|
| 1320 |
+
raise FeatureLibError(
|
| 1321 |
+
'Already defined alternates for glyph "%s"' % glyph, location
|
| 1322 |
+
)
|
| 1323 |
+
# We allow empty replacement glyphs here.
|
| 1324 |
+
lookup.alternates[glyph] = replacement
|
| 1325 |
+
|
| 1326 |
+
# GSUB 4
|
| 1327 |
+
def add_ligature_subst(
|
| 1328 |
+
self, location, prefix, glyphs, suffix, replacement, forceChain
|
| 1329 |
+
):
|
| 1330 |
+
if prefix or suffix or forceChain:
|
| 1331 |
+
chain = self.get_lookup_(location, ChainContextSubstBuilder)
|
| 1332 |
+
lookup = self.get_chained_lookup_(location, LigatureSubstBuilder)
|
| 1333 |
+
chain.rules.append(ChainContextualRule(prefix, glyphs, suffix, [lookup]))
|
| 1334 |
+
else:
|
| 1335 |
+
lookup = self.get_lookup_(location, LigatureSubstBuilder)
|
| 1336 |
+
|
| 1337 |
+
if not all(glyphs):
|
| 1338 |
+
raise FeatureLibError("Empty glyph class in substitution", location)
|
| 1339 |
+
|
| 1340 |
+
# OpenType feature file syntax, section 5.d, "Ligature substitution":
|
| 1341 |
+
# "Since the OpenType specification does not allow ligature
|
| 1342 |
+
# substitutions to be specified on target sequences that contain
|
| 1343 |
+
# glyph classes, the implementation software will enumerate
|
| 1344 |
+
# all specific glyph sequences if glyph classes are detected"
|
| 1345 |
+
for g in itertools.product(*glyphs):
|
| 1346 |
+
lookup.ligatures[g] = replacement
|
| 1347 |
+
|
| 1348 |
+
# GSUB 5/6
|
| 1349 |
+
def add_chain_context_subst(self, location, prefix, glyphs, suffix, lookups):
|
| 1350 |
+
if not all(glyphs) or not all(prefix) or not all(suffix):
|
| 1351 |
+
raise FeatureLibError(
|
| 1352 |
+
"Empty glyph class in contextual substitution", location
|
| 1353 |
+
)
|
| 1354 |
+
lookup = self.get_lookup_(location, ChainContextSubstBuilder)
|
| 1355 |
+
lookup.rules.append(
|
| 1356 |
+
ChainContextualRule(
|
| 1357 |
+
prefix, glyphs, suffix, self.find_lookup_builders_(lookups)
|
| 1358 |
+
)
|
| 1359 |
+
)
|
| 1360 |
+
|
| 1361 |
+
def add_single_subst_chained_(self, location, prefix, suffix, mapping):
|
| 1362 |
+
if not mapping or not all(prefix) or not all(suffix):
|
| 1363 |
+
raise FeatureLibError(
|
| 1364 |
+
"Empty glyph class in contextual substitution", location
|
| 1365 |
+
)
|
| 1366 |
+
# https://github.com/fonttools/fonttools/issues/512
|
| 1367 |
+
# https://github.com/fonttools/fonttools/issues/2150
|
| 1368 |
+
chain = self.get_lookup_(location, ChainContextSubstBuilder)
|
| 1369 |
+
sub = chain.find_chainable_subst(mapping, SingleSubstBuilder)
|
| 1370 |
+
if sub is None:
|
| 1371 |
+
sub = self.get_chained_lookup_(location, SingleSubstBuilder)
|
| 1372 |
+
sub.mapping.update(mapping)
|
| 1373 |
+
chain.rules.append(
|
| 1374 |
+
ChainContextualRule(prefix, [list(mapping.keys())], suffix, [sub])
|
| 1375 |
+
)
|
| 1376 |
+
|
| 1377 |
+
def add_multi_subst_chained_(self, location, prefix, glyph, suffix, replacements):
|
| 1378 |
+
if not all(prefix) or not all(suffix):
|
| 1379 |
+
raise FeatureLibError(
|
| 1380 |
+
"Empty glyph class in contextual substitution", location
|
| 1381 |
+
)
|
| 1382 |
+
# https://github.com/fonttools/fonttools/issues/3551
|
| 1383 |
+
chain = self.get_lookup_(location, ChainContextSubstBuilder)
|
| 1384 |
+
sub = chain.find_chainable_subst({glyph: replacements}, MultipleSubstBuilder)
|
| 1385 |
+
if sub is None:
|
| 1386 |
+
sub = self.get_chained_lookup_(location, MultipleSubstBuilder)
|
| 1387 |
+
sub.mapping[glyph] = replacements
|
| 1388 |
+
chain.rules.append(ChainContextualRule(prefix, [{glyph}], suffix, [sub]))
|
| 1389 |
+
|
| 1390 |
+
# GSUB 8
|
| 1391 |
+
def add_reverse_chain_single_subst(self, location, old_prefix, old_suffix, mapping):
|
| 1392 |
+
if not mapping:
|
| 1393 |
+
raise FeatureLibError("Empty glyph class in substitution", location)
|
| 1394 |
+
lookup = self.get_lookup_(location, ReverseChainSingleSubstBuilder)
|
| 1395 |
+
lookup.rules.append((old_prefix, old_suffix, mapping))
|
| 1396 |
+
|
| 1397 |
+
# GPOS rules
|
| 1398 |
+
|
| 1399 |
+
# GPOS 1
|
| 1400 |
+
def add_single_pos(self, location, prefix, suffix, pos, forceChain):
|
| 1401 |
+
if prefix or suffix or forceChain:
|
| 1402 |
+
self.add_single_pos_chained_(location, prefix, suffix, pos)
|
| 1403 |
+
else:
|
| 1404 |
+
lookup = self.get_lookup_(location, SinglePosBuilder)
|
| 1405 |
+
for glyphs, value in pos:
|
| 1406 |
+
if not glyphs:
|
| 1407 |
+
raise FeatureLibError(
|
| 1408 |
+
"Empty glyph class in positioning rule", location
|
| 1409 |
+
)
|
| 1410 |
+
otValueRecord = self.makeOpenTypeValueRecord(
|
| 1411 |
+
location, value, pairPosContext=False
|
| 1412 |
+
)
|
| 1413 |
+
for glyph in glyphs:
|
| 1414 |
+
try:
|
| 1415 |
+
lookup.add_pos(location, glyph, otValueRecord)
|
| 1416 |
+
except OpenTypeLibError as e:
|
| 1417 |
+
raise FeatureLibError(str(e), e.location) from e
|
| 1418 |
+
|
| 1419 |
+
# GPOS 2
|
| 1420 |
+
def add_class_pair_pos(self, location, glyphclass1, value1, glyphclass2, value2):
|
| 1421 |
+
if not glyphclass1 or not glyphclass2:
|
| 1422 |
+
raise FeatureLibError("Empty glyph class in positioning rule", location)
|
| 1423 |
+
lookup = self.get_lookup_(location, PairPosBuilder)
|
| 1424 |
+
v1 = self.makeOpenTypeValueRecord(location, value1, pairPosContext=True)
|
| 1425 |
+
v2 = self.makeOpenTypeValueRecord(location, value2, pairPosContext=True)
|
| 1426 |
+
lookup.addClassPair(location, glyphclass1, v1, glyphclass2, v2)
|
| 1427 |
+
|
| 1428 |
+
def add_specific_pair_pos(self, location, glyph1, value1, glyph2, value2):
|
| 1429 |
+
if not glyph1 or not glyph2:
|
| 1430 |
+
raise FeatureLibError("Empty glyph class in positioning rule", location)
|
| 1431 |
+
lookup = self.get_lookup_(location, PairPosBuilder)
|
| 1432 |
+
v1 = self.makeOpenTypeValueRecord(location, value1, pairPosContext=True)
|
| 1433 |
+
v2 = self.makeOpenTypeValueRecord(location, value2, pairPosContext=True)
|
| 1434 |
+
lookup.addGlyphPair(location, glyph1, v1, glyph2, v2)
|
| 1435 |
+
|
| 1436 |
+
# GPOS 3
|
| 1437 |
+
def add_cursive_pos(self, location, glyphclass, entryAnchor, exitAnchor):
|
| 1438 |
+
if not glyphclass:
|
| 1439 |
+
raise FeatureLibError("Empty glyph class in positioning rule", location)
|
| 1440 |
+
lookup = self.get_lookup_(location, CursivePosBuilder)
|
| 1441 |
+
lookup.add_attachment(
|
| 1442 |
+
location,
|
| 1443 |
+
glyphclass,
|
| 1444 |
+
self.makeOpenTypeAnchor(location, entryAnchor),
|
| 1445 |
+
self.makeOpenTypeAnchor(location, exitAnchor),
|
| 1446 |
+
)
|
| 1447 |
+
|
| 1448 |
+
# GPOS 4
|
| 1449 |
+
def add_mark_base_pos(self, location, bases, marks):
|
| 1450 |
+
builder = self.get_lookup_(location, MarkBasePosBuilder)
|
| 1451 |
+
self.add_marks_(location, builder, marks)
|
| 1452 |
+
if not bases:
|
| 1453 |
+
raise FeatureLibError("Empty glyph class in positioning rule", location)
|
| 1454 |
+
for baseAnchor, markClass in marks:
|
| 1455 |
+
otBaseAnchor = self.makeOpenTypeAnchor(location, baseAnchor)
|
| 1456 |
+
for base in bases:
|
| 1457 |
+
builder.bases.setdefault(base, {})[markClass.name] = otBaseAnchor
|
| 1458 |
+
|
| 1459 |
+
# GPOS 5
|
| 1460 |
+
def add_mark_lig_pos(self, location, ligatures, components):
|
| 1461 |
+
builder = self.get_lookup_(location, MarkLigPosBuilder)
|
| 1462 |
+
componentAnchors = []
|
| 1463 |
+
if not ligatures:
|
| 1464 |
+
raise FeatureLibError("Empty glyph class in positioning rule", location)
|
| 1465 |
+
for marks in components:
|
| 1466 |
+
anchors = {}
|
| 1467 |
+
self.add_marks_(location, builder, marks)
|
| 1468 |
+
for ligAnchor, markClass in marks:
|
| 1469 |
+
anchors[markClass.name] = self.makeOpenTypeAnchor(location, ligAnchor)
|
| 1470 |
+
componentAnchors.append(anchors)
|
| 1471 |
+
for glyph in ligatures:
|
| 1472 |
+
builder.ligatures[glyph] = componentAnchors
|
| 1473 |
+
|
| 1474 |
+
# GPOS 6
|
| 1475 |
+
def add_mark_mark_pos(self, location, baseMarks, marks):
|
| 1476 |
+
builder = self.get_lookup_(location, MarkMarkPosBuilder)
|
| 1477 |
+
self.add_marks_(location, builder, marks)
|
| 1478 |
+
if not baseMarks:
|
| 1479 |
+
raise FeatureLibError("Empty glyph class in positioning rule", location)
|
| 1480 |
+
for baseAnchor, markClass in marks:
|
| 1481 |
+
otBaseAnchor = self.makeOpenTypeAnchor(location, baseAnchor)
|
| 1482 |
+
for baseMark in baseMarks:
|
| 1483 |
+
builder.baseMarks.setdefault(baseMark, {})[
|
| 1484 |
+
markClass.name
|
| 1485 |
+
] = otBaseAnchor
|
| 1486 |
+
|
| 1487 |
+
# GPOS 7/8
|
| 1488 |
+
def add_chain_context_pos(self, location, prefix, glyphs, suffix, lookups):
|
| 1489 |
+
if not all(glyphs) or not all(prefix) or not all(suffix):
|
| 1490 |
+
raise FeatureLibError(
|
| 1491 |
+
"Empty glyph class in contextual positioning rule", location
|
| 1492 |
+
)
|
| 1493 |
+
lookup = self.get_lookup_(location, ChainContextPosBuilder)
|
| 1494 |
+
lookup.rules.append(
|
| 1495 |
+
ChainContextualRule(
|
| 1496 |
+
prefix, glyphs, suffix, self.find_lookup_builders_(lookups)
|
| 1497 |
+
)
|
| 1498 |
+
)
|
| 1499 |
+
|
| 1500 |
+
def add_single_pos_chained_(self, location, prefix, suffix, pos):
|
| 1501 |
+
if not pos or not all(prefix) or not all(suffix):
|
| 1502 |
+
raise FeatureLibError(
|
| 1503 |
+
"Empty glyph class in contextual positioning rule", location
|
| 1504 |
+
)
|
| 1505 |
+
# https://github.com/fonttools/fonttools/issues/514
|
| 1506 |
+
chain = self.get_lookup_(location, ChainContextPosBuilder)
|
| 1507 |
+
targets = []
|
| 1508 |
+
for _, _, _, lookups in chain.rules:
|
| 1509 |
+
targets.extend(lookups)
|
| 1510 |
+
subs = []
|
| 1511 |
+
for glyphs, value in pos:
|
| 1512 |
+
if value is None:
|
| 1513 |
+
subs.append(None)
|
| 1514 |
+
continue
|
| 1515 |
+
otValue = self.makeOpenTypeValueRecord(
|
| 1516 |
+
location, value, pairPosContext=False
|
| 1517 |
+
)
|
| 1518 |
+
sub = chain.find_chainable_single_pos(targets, glyphs, otValue)
|
| 1519 |
+
if sub is None:
|
| 1520 |
+
sub = self.get_chained_lookup_(location, SinglePosBuilder)
|
| 1521 |
+
targets.append(sub)
|
| 1522 |
+
for glyph in glyphs:
|
| 1523 |
+
sub.add_pos(location, glyph, otValue)
|
| 1524 |
+
subs.append(sub)
|
| 1525 |
+
assert len(pos) == len(subs), (pos, subs)
|
| 1526 |
+
chain.rules.append(
|
| 1527 |
+
ChainContextualRule(prefix, [g for g, v in pos], suffix, subs)
|
| 1528 |
+
)
|
| 1529 |
+
|
| 1530 |
+
def add_marks_(self, location, lookupBuilder, marks):
|
| 1531 |
+
"""Helper for add_mark_{base,liga,mark}_pos."""
|
| 1532 |
+
for _, markClass in marks:
|
| 1533 |
+
for markClassDef in markClass.definitions:
|
| 1534 |
+
for mark in markClassDef.glyphs.glyphSet():
|
| 1535 |
+
if mark not in lookupBuilder.marks:
|
| 1536 |
+
otMarkAnchor = self.makeOpenTypeAnchor(
|
| 1537 |
+
location, copy.deepcopy(markClassDef.anchor)
|
| 1538 |
+
)
|
| 1539 |
+
lookupBuilder.marks[mark] = (markClass.name, otMarkAnchor)
|
| 1540 |
+
else:
|
| 1541 |
+
existingMarkClass = lookupBuilder.marks[mark][0]
|
| 1542 |
+
if markClass.name != existingMarkClass:
|
| 1543 |
+
raise FeatureLibError(
|
| 1544 |
+
"Glyph %s cannot be in both @%s and @%s"
|
| 1545 |
+
% (mark, existingMarkClass, markClass.name),
|
| 1546 |
+
location,
|
| 1547 |
+
)
|
| 1548 |
+
|
| 1549 |
+
def add_subtable_break(self, location):
|
| 1550 |
+
self.cur_lookup_.add_subtable_break(location)
|
| 1551 |
+
|
| 1552 |
+
def setGlyphClass_(self, location, glyph, glyphClass):
|
| 1553 |
+
oldClass, oldLocation = self.glyphClassDefs_.get(glyph, (None, None))
|
| 1554 |
+
if oldClass and oldClass != glyphClass:
|
| 1555 |
+
raise FeatureLibError(
|
| 1556 |
+
"Glyph %s was assigned to a different class at %s"
|
| 1557 |
+
% (glyph, oldLocation),
|
| 1558 |
+
location,
|
| 1559 |
+
)
|
| 1560 |
+
self.glyphClassDefs_[glyph] = (glyphClass, location)
|
| 1561 |
+
|
| 1562 |
+
def add_glyphClassDef(
|
| 1563 |
+
self, location, baseGlyphs, ligatureGlyphs, markGlyphs, componentGlyphs
|
| 1564 |
+
):
|
| 1565 |
+
for glyph in baseGlyphs:
|
| 1566 |
+
self.setGlyphClass_(location, glyph, 1)
|
| 1567 |
+
for glyph in ligatureGlyphs:
|
| 1568 |
+
self.setGlyphClass_(location, glyph, 2)
|
| 1569 |
+
for glyph in markGlyphs:
|
| 1570 |
+
self.setGlyphClass_(location, glyph, 3)
|
| 1571 |
+
for glyph in componentGlyphs:
|
| 1572 |
+
self.setGlyphClass_(location, glyph, 4)
|
| 1573 |
+
|
| 1574 |
+
def add_ligatureCaretByIndex_(self, location, glyphs, carets):
|
| 1575 |
+
for glyph in glyphs:
|
| 1576 |
+
if glyph not in self.ligCaretPoints_:
|
| 1577 |
+
self.ligCaretPoints_[glyph] = carets
|
| 1578 |
+
|
| 1579 |
+
def makeLigCaret(self, location, caret):
|
| 1580 |
+
if not isinstance(caret, VariableScalar):
|
| 1581 |
+
return caret
|
| 1582 |
+
default, device = self.makeVariablePos(location, caret)
|
| 1583 |
+
if device is not None:
|
| 1584 |
+
return (default, device)
|
| 1585 |
+
return default
|
| 1586 |
+
|
| 1587 |
+
def add_ligatureCaretByPos_(self, location, glyphs, carets):
|
| 1588 |
+
carets = [self.makeLigCaret(location, caret) for caret in carets]
|
| 1589 |
+
for glyph in glyphs:
|
| 1590 |
+
if glyph not in self.ligCaretCoords_:
|
| 1591 |
+
self.ligCaretCoords_[glyph] = carets
|
| 1592 |
+
|
| 1593 |
+
def add_name_record(self, location, nameID, platformID, platEncID, langID, string):
|
| 1594 |
+
self.names_.append([nameID, platformID, platEncID, langID, string])
|
| 1595 |
+
|
| 1596 |
+
def add_os2_field(self, key, value):
|
| 1597 |
+
self.os2_[key] = value
|
| 1598 |
+
|
| 1599 |
+
def add_hhea_field(self, key, value):
|
| 1600 |
+
self.hhea_[key] = value
|
| 1601 |
+
|
| 1602 |
+
def add_vhea_field(self, key, value):
|
| 1603 |
+
self.vhea_[key] = value
|
| 1604 |
+
|
| 1605 |
+
def add_conditionset(self, location, key, value):
|
| 1606 |
+
if "fvar" not in self.font:
|
| 1607 |
+
raise FeatureLibError(
|
| 1608 |
+
"Cannot add feature variations to a font without an 'fvar' table",
|
| 1609 |
+
location,
|
| 1610 |
+
)
|
| 1611 |
+
|
| 1612 |
+
# Normalize
|
| 1613 |
+
axisMap = {
|
| 1614 |
+
axis.axisTag: (axis.minValue, axis.defaultValue, axis.maxValue)
|
| 1615 |
+
for axis in self.axes
|
| 1616 |
+
}
|
| 1617 |
+
|
| 1618 |
+
value = {
|
| 1619 |
+
tag: (
|
| 1620 |
+
normalizeValue(bottom, axisMap[tag]),
|
| 1621 |
+
normalizeValue(top, axisMap[tag]),
|
| 1622 |
+
)
|
| 1623 |
+
for tag, (bottom, top) in value.items()
|
| 1624 |
+
}
|
| 1625 |
+
|
| 1626 |
+
# NOTE: This might result in rounding errors (off-by-ones) compared to
|
| 1627 |
+
# rules in Designspace files, since we're working with what's in the
|
| 1628 |
+
# `avar` table rather than the original values.
|
| 1629 |
+
if "avar" in self.font:
|
| 1630 |
+
mapping = self.font["avar"].segments
|
| 1631 |
+
value = {
|
| 1632 |
+
axis: tuple(
|
| 1633 |
+
piecewiseLinearMap(v, mapping[axis]) if axis in mapping else v
|
| 1634 |
+
for v in condition_range
|
| 1635 |
+
)
|
| 1636 |
+
for axis, condition_range in value.items()
|
| 1637 |
+
}
|
| 1638 |
+
|
| 1639 |
+
self.conditionsets_[key] = value
|
| 1640 |
+
|
| 1641 |
+
def makeVariablePos(self, location, varscalar):
|
| 1642 |
+
if not self.varstorebuilder:
|
| 1643 |
+
raise FeatureLibError(
|
| 1644 |
+
"Can't define a variable scalar in a non-variable font", location
|
| 1645 |
+
)
|
| 1646 |
+
|
| 1647 |
+
varscalar.axes = self.axes
|
| 1648 |
+
if not varscalar.does_vary:
|
| 1649 |
+
return varscalar.default, None
|
| 1650 |
+
|
| 1651 |
+
default, index = varscalar.add_to_variation_store(
|
| 1652 |
+
self.varstorebuilder, self.model_cache, self.font.get("avar")
|
| 1653 |
+
)
|
| 1654 |
+
|
| 1655 |
+
device = None
|
| 1656 |
+
if index is not None and index != 0xFFFFFFFF:
|
| 1657 |
+
device = buildVarDevTable(index)
|
| 1658 |
+
|
| 1659 |
+
return default, device
|
| 1660 |
+
|
| 1661 |
+
def makeOpenTypeAnchor(self, location, anchor):
|
| 1662 |
+
"""ast.Anchor --> otTables.Anchor"""
|
| 1663 |
+
if anchor is None:
|
| 1664 |
+
return None
|
| 1665 |
+
variable = False
|
| 1666 |
+
deviceX, deviceY = None, None
|
| 1667 |
+
if anchor.xDeviceTable is not None:
|
| 1668 |
+
deviceX = otl.buildDevice(dict(anchor.xDeviceTable))
|
| 1669 |
+
if anchor.yDeviceTable is not None:
|
| 1670 |
+
deviceY = otl.buildDevice(dict(anchor.yDeviceTable))
|
| 1671 |
+
for dim in ("x", "y"):
|
| 1672 |
+
varscalar = getattr(anchor, dim)
|
| 1673 |
+
if not isinstance(varscalar, VariableScalar):
|
| 1674 |
+
continue
|
| 1675 |
+
if getattr(anchor, dim + "DeviceTable") is not None:
|
| 1676 |
+
raise FeatureLibError(
|
| 1677 |
+
"Can't define a device coordinate and variable scalar", location
|
| 1678 |
+
)
|
| 1679 |
+
default, device = self.makeVariablePos(location, varscalar)
|
| 1680 |
+
setattr(anchor, dim, default)
|
| 1681 |
+
if device is not None:
|
| 1682 |
+
if dim == "x":
|
| 1683 |
+
deviceX = device
|
| 1684 |
+
else:
|
| 1685 |
+
deviceY = device
|
| 1686 |
+
variable = True
|
| 1687 |
+
|
| 1688 |
+
otlanchor = otl.buildAnchor(
|
| 1689 |
+
anchor.x, anchor.y, anchor.contourpoint, deviceX, deviceY
|
| 1690 |
+
)
|
| 1691 |
+
if variable:
|
| 1692 |
+
otlanchor.Format = 3
|
| 1693 |
+
return otlanchor
|
| 1694 |
+
|
| 1695 |
+
_VALUEREC_ATTRS = {
|
| 1696 |
+
name[0].lower() + name[1:]: (name, isDevice)
|
| 1697 |
+
for _, name, isDevice, _ in otBase.valueRecordFormat
|
| 1698 |
+
if not name.startswith("Reserved")
|
| 1699 |
+
}
|
| 1700 |
+
|
| 1701 |
+
def makeOpenTypeValueRecord(self, location, v, pairPosContext):
|
| 1702 |
+
"""ast.ValueRecord --> otBase.ValueRecord"""
|
| 1703 |
+
if not v:
|
| 1704 |
+
return None
|
| 1705 |
+
|
| 1706 |
+
vr = {}
|
| 1707 |
+
for astName, (otName, isDevice) in self._VALUEREC_ATTRS.items():
|
| 1708 |
+
val = getattr(v, astName, None)
|
| 1709 |
+
if not val:
|
| 1710 |
+
continue
|
| 1711 |
+
if isDevice:
|
| 1712 |
+
vr[otName] = otl.buildDevice(dict(val))
|
| 1713 |
+
elif isinstance(val, VariableScalar):
|
| 1714 |
+
otDeviceName = otName[0:4] + "Device"
|
| 1715 |
+
feaDeviceName = otDeviceName[0].lower() + otDeviceName[1:]
|
| 1716 |
+
if getattr(v, feaDeviceName):
|
| 1717 |
+
raise FeatureLibError(
|
| 1718 |
+
"Can't define a device coordinate and variable scalar", location
|
| 1719 |
+
)
|
| 1720 |
+
vr[otName], device = self.makeVariablePos(location, val)
|
| 1721 |
+
if device is not None:
|
| 1722 |
+
vr[otDeviceName] = device
|
| 1723 |
+
else:
|
| 1724 |
+
vr[otName] = val
|
| 1725 |
+
|
| 1726 |
+
if pairPosContext and not vr:
|
| 1727 |
+
vr = {"YAdvance": 0} if v.vertical else {"XAdvance": 0}
|
| 1728 |
+
valRec = otl.buildValue(vr)
|
| 1729 |
+
return valRec
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/error.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
class FeatureLibError(Exception):
|
| 2 |
+
def __init__(self, message, location):
|
| 3 |
+
Exception.__init__(self, message)
|
| 4 |
+
self.location = location
|
| 5 |
+
|
| 6 |
+
def __str__(self):
|
| 7 |
+
message = Exception.__str__(self)
|
| 8 |
+
if self.location:
|
| 9 |
+
return f"{self.location}: {message}"
|
| 10 |
+
else:
|
| 11 |
+
return message
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class IncludedFeaNotFound(FeatureLibError):
|
| 15 |
+
def __str__(self):
|
| 16 |
+
assert self.location is not None
|
| 17 |
+
|
| 18 |
+
message = (
|
| 19 |
+
"The following feature file should be included but cannot be found: "
|
| 20 |
+
f"{Exception.__str__(self)}"
|
| 21 |
+
)
|
| 22 |
+
return f"{self.location}: {message}"
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/lexer.cpython-310-x86_64-linux-gnu.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:825459b9ea2323c58f4e686eff3a29748cd8202747b28cb8bab4c464e6a25baa
|
| 3 |
+
size 1346664
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/feaLib/parser.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/__pycache__/layout.cpython-310.pyc
ADDED
|
Binary file (11.7 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/merge/util.py
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2013 Google, Inc. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Google Author(s): Behdad Esfahbod, Roozbeh Pournader
|
| 4 |
+
|
| 5 |
+
from fontTools.misc.timeTools import timestampNow
|
| 6 |
+
from fontTools.ttLib.tables.DefaultTable import DefaultTable
|
| 7 |
+
from functools import reduce
|
| 8 |
+
import operator
|
| 9 |
+
import logging
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
log = logging.getLogger("fontTools.merge")
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# General utility functions for merging values from different fonts
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def equal(lst):
|
| 19 |
+
lst = list(lst)
|
| 20 |
+
t = iter(lst)
|
| 21 |
+
first = next(t)
|
| 22 |
+
assert all(item == first for item in t), "Expected all items to be equal: %s" % lst
|
| 23 |
+
return first
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def first(lst):
|
| 27 |
+
return next(iter(lst))
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def recalculate(lst):
|
| 31 |
+
return NotImplemented
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def current_time(lst):
|
| 35 |
+
return timestampNow()
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def bitwise_and(lst):
|
| 39 |
+
return reduce(operator.and_, lst)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def bitwise_or(lst):
|
| 43 |
+
return reduce(operator.or_, lst)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def avg_int(lst):
|
| 47 |
+
lst = list(lst)
|
| 48 |
+
return sum(lst) // len(lst)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def onlyExisting(func):
|
| 52 |
+
"""Returns a filter func that when called with a list,
|
| 53 |
+
only calls func on the non-NotImplemented items of the list,
|
| 54 |
+
and only so if there's at least one item remaining.
|
| 55 |
+
Otherwise returns NotImplemented."""
|
| 56 |
+
|
| 57 |
+
def wrapper(lst):
|
| 58 |
+
items = [item for item in lst if item is not NotImplemented]
|
| 59 |
+
return func(items) if items else NotImplemented
|
| 60 |
+
|
| 61 |
+
return wrapper
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def sumLists(lst):
|
| 65 |
+
l = []
|
| 66 |
+
for item in lst:
|
| 67 |
+
l.extend(item)
|
| 68 |
+
return l
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def sumDicts(lst):
|
| 72 |
+
d = {}
|
| 73 |
+
for item in lst:
|
| 74 |
+
d.update(item)
|
| 75 |
+
return d
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def mergeBits(bitmap):
|
| 79 |
+
def wrapper(lst):
|
| 80 |
+
lst = list(lst)
|
| 81 |
+
returnValue = 0
|
| 82 |
+
for bitNumber in range(bitmap["size"]):
|
| 83 |
+
try:
|
| 84 |
+
mergeLogic = bitmap[bitNumber]
|
| 85 |
+
except KeyError:
|
| 86 |
+
try:
|
| 87 |
+
mergeLogic = bitmap["*"]
|
| 88 |
+
except KeyError:
|
| 89 |
+
raise Exception("Don't know how to merge bit %s" % bitNumber)
|
| 90 |
+
shiftedBit = 1 << bitNumber
|
| 91 |
+
mergedValue = mergeLogic(bool(item & shiftedBit) for item in lst)
|
| 92 |
+
returnValue |= mergedValue << bitNumber
|
| 93 |
+
return returnValue
|
| 94 |
+
|
| 95 |
+
return wrapper
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class AttendanceRecordingIdentityDict(object):
|
| 99 |
+
"""A dictionary-like object that records indices of items actually accessed
|
| 100 |
+
from a list."""
|
| 101 |
+
|
| 102 |
+
def __init__(self, lst):
|
| 103 |
+
self.l = lst
|
| 104 |
+
self.d = {id(v): i for i, v in enumerate(lst)}
|
| 105 |
+
self.s = set()
|
| 106 |
+
|
| 107 |
+
def __getitem__(self, v):
|
| 108 |
+
self.s.add(self.d[id(v)])
|
| 109 |
+
return v
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class GregariousIdentityDict(object):
|
| 113 |
+
"""A dictionary-like object that welcomes guests without reservations and
|
| 114 |
+
adds them to the end of the guest list."""
|
| 115 |
+
|
| 116 |
+
def __init__(self, lst):
|
| 117 |
+
self.l = lst
|
| 118 |
+
self.s = set(id(v) for v in lst)
|
| 119 |
+
|
| 120 |
+
def __getitem__(self, v):
|
| 121 |
+
if id(v) not in self.s:
|
| 122 |
+
self.s.add(id(v))
|
| 123 |
+
self.l.append(v)
|
| 124 |
+
return v
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class NonhashableDict(object):
|
| 128 |
+
"""A dictionary-like object mapping objects to values."""
|
| 129 |
+
|
| 130 |
+
def __init__(self, keys, values=None):
|
| 131 |
+
if values is None:
|
| 132 |
+
self.d = {id(v): i for i, v in enumerate(keys)}
|
| 133 |
+
else:
|
| 134 |
+
self.d = {id(k): v for k, v in zip(keys, values)}
|
| 135 |
+
|
| 136 |
+
def __getitem__(self, k):
|
| 137 |
+
return self.d[id(k)]
|
| 138 |
+
|
| 139 |
+
def __setitem__(self, k, v):
|
| 140 |
+
self.d[id(k)] = v
|
| 141 |
+
|
| 142 |
+
def __delitem__(self, k):
|
| 143 |
+
del self.d[id(k)]
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/lazyTools.cpython-310.pyc
ADDED
|
Binary file (1.72 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/py23.cpython-310.pyc
ADDED
|
Binary file (2.35 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/__pycache__/xmlReader.cpython-310.pyc
ADDED
|
Binary file (4.94 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/bezierTools.c
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/bezierTools.py
ADDED
|
@@ -0,0 +1,1493 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""fontTools.misc.bezierTools.py -- tools for working with Bezier path segments.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from fontTools.misc.arrayTools import calcBounds, sectRect, rectArea
|
| 6 |
+
from fontTools.misc.transform import Identity
|
| 7 |
+
import math
|
| 8 |
+
from collections import namedtuple
|
| 9 |
+
|
| 10 |
+
try:
|
| 11 |
+
import cython
|
| 12 |
+
|
| 13 |
+
COMPILED = cython.compiled
|
| 14 |
+
except (AttributeError, ImportError):
|
| 15 |
+
# if cython not installed, use mock module with no-op decorators and types
|
| 16 |
+
from fontTools.misc import cython
|
| 17 |
+
|
| 18 |
+
COMPILED = False
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
EPSILON = 1e-9
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
Intersection = namedtuple("Intersection", ["pt", "t1", "t2"])
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
__all__ = [
|
| 28 |
+
"approximateCubicArcLength",
|
| 29 |
+
"approximateCubicArcLengthC",
|
| 30 |
+
"approximateQuadraticArcLength",
|
| 31 |
+
"approximateQuadraticArcLengthC",
|
| 32 |
+
"calcCubicArcLength",
|
| 33 |
+
"calcCubicArcLengthC",
|
| 34 |
+
"calcQuadraticArcLength",
|
| 35 |
+
"calcQuadraticArcLengthC",
|
| 36 |
+
"calcCubicBounds",
|
| 37 |
+
"calcQuadraticBounds",
|
| 38 |
+
"splitLine",
|
| 39 |
+
"splitQuadratic",
|
| 40 |
+
"splitCubic",
|
| 41 |
+
"splitQuadraticAtT",
|
| 42 |
+
"splitCubicAtT",
|
| 43 |
+
"splitCubicAtTC",
|
| 44 |
+
"splitCubicIntoTwoAtTC",
|
| 45 |
+
"solveQuadratic",
|
| 46 |
+
"solveCubic",
|
| 47 |
+
"quadraticPointAtT",
|
| 48 |
+
"cubicPointAtT",
|
| 49 |
+
"cubicPointAtTC",
|
| 50 |
+
"linePointAtT",
|
| 51 |
+
"segmentPointAtT",
|
| 52 |
+
"lineLineIntersections",
|
| 53 |
+
"curveLineIntersections",
|
| 54 |
+
"curveCurveIntersections",
|
| 55 |
+
"segmentSegmentIntersections",
|
| 56 |
+
]
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def calcCubicArcLength(pt1, pt2, pt3, pt4, tolerance=0.005):
|
| 60 |
+
"""Calculates the arc length for a cubic Bezier segment.
|
| 61 |
+
|
| 62 |
+
Whereas :func:`approximateCubicArcLength` approximates the length, this
|
| 63 |
+
function calculates it by "measuring", recursively dividing the curve
|
| 64 |
+
until the divided segments are shorter than ``tolerance``.
|
| 65 |
+
|
| 66 |
+
Args:
|
| 67 |
+
pt1,pt2,pt3,pt4: Control points of the Bezier as 2D tuples.
|
| 68 |
+
tolerance: Controls the precision of the calcuation.
|
| 69 |
+
|
| 70 |
+
Returns:
|
| 71 |
+
Arc length value.
|
| 72 |
+
"""
|
| 73 |
+
return calcCubicArcLengthC(
|
| 74 |
+
complex(*pt1), complex(*pt2), complex(*pt3), complex(*pt4), tolerance
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def _split_cubic_into_two(p0, p1, p2, p3):
|
| 79 |
+
mid = (p0 + 3 * (p1 + p2) + p3) * 0.125
|
| 80 |
+
deriv3 = (p3 + p2 - p1 - p0) * 0.125
|
| 81 |
+
return (
|
| 82 |
+
(p0, (p0 + p1) * 0.5, mid - deriv3, mid),
|
| 83 |
+
(mid, mid + deriv3, (p2 + p3) * 0.5, p3),
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
@cython.returns(cython.double)
|
| 88 |
+
@cython.locals(
|
| 89 |
+
p0=cython.complex,
|
| 90 |
+
p1=cython.complex,
|
| 91 |
+
p2=cython.complex,
|
| 92 |
+
p3=cython.complex,
|
| 93 |
+
)
|
| 94 |
+
@cython.locals(mult=cython.double, arch=cython.double, box=cython.double)
|
| 95 |
+
def _calcCubicArcLengthCRecurse(mult, p0, p1, p2, p3):
|
| 96 |
+
arch = abs(p0 - p3)
|
| 97 |
+
box = abs(p0 - p1) + abs(p1 - p2) + abs(p2 - p3)
|
| 98 |
+
if arch * mult + EPSILON >= box:
|
| 99 |
+
return (arch + box) * 0.5
|
| 100 |
+
else:
|
| 101 |
+
one, two = _split_cubic_into_two(p0, p1, p2, p3)
|
| 102 |
+
return _calcCubicArcLengthCRecurse(mult, *one) + _calcCubicArcLengthCRecurse(
|
| 103 |
+
mult, *two
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
@cython.returns(cython.double)
|
| 108 |
+
@cython.locals(
|
| 109 |
+
pt1=cython.complex,
|
| 110 |
+
pt2=cython.complex,
|
| 111 |
+
pt3=cython.complex,
|
| 112 |
+
pt4=cython.complex,
|
| 113 |
+
)
|
| 114 |
+
@cython.locals(
|
| 115 |
+
tolerance=cython.double,
|
| 116 |
+
mult=cython.double,
|
| 117 |
+
)
|
| 118 |
+
def calcCubicArcLengthC(pt1, pt2, pt3, pt4, tolerance=0.005):
|
| 119 |
+
"""Calculates the arc length for a cubic Bezier segment.
|
| 120 |
+
|
| 121 |
+
Args:
|
| 122 |
+
pt1,pt2,pt3,pt4: Control points of the Bezier as complex numbers.
|
| 123 |
+
tolerance: Controls the precision of the calcuation.
|
| 124 |
+
|
| 125 |
+
Returns:
|
| 126 |
+
Arc length value.
|
| 127 |
+
"""
|
| 128 |
+
mult = 1.0 + 1.5 * tolerance # The 1.5 is a empirical hack; no math
|
| 129 |
+
return _calcCubicArcLengthCRecurse(mult, pt1, pt2, pt3, pt4)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
epsilonDigits = 6
|
| 133 |
+
epsilon = 1e-10
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
@cython.cfunc
|
| 137 |
+
@cython.inline
|
| 138 |
+
@cython.returns(cython.double)
|
| 139 |
+
@cython.locals(v1=cython.complex, v2=cython.complex)
|
| 140 |
+
def _dot(v1, v2):
|
| 141 |
+
return (v1 * v2.conjugate()).real
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
@cython.cfunc
|
| 145 |
+
@cython.inline
|
| 146 |
+
@cython.returns(cython.double)
|
| 147 |
+
@cython.locals(x=cython.double)
|
| 148 |
+
def _intSecAtan(x):
|
| 149 |
+
# In : sympy.integrate(sp.sec(sp.atan(x)))
|
| 150 |
+
# Out: x*sqrt(x**2 + 1)/2 + asinh(x)/2
|
| 151 |
+
return x * math.sqrt(x**2 + 1) / 2 + math.asinh(x) / 2
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def calcQuadraticArcLength(pt1, pt2, pt3):
|
| 155 |
+
"""Calculates the arc length for a quadratic Bezier segment.
|
| 156 |
+
|
| 157 |
+
Args:
|
| 158 |
+
pt1: Start point of the Bezier as 2D tuple.
|
| 159 |
+
pt2: Handle point of the Bezier as 2D tuple.
|
| 160 |
+
pt3: End point of the Bezier as 2D tuple.
|
| 161 |
+
|
| 162 |
+
Returns:
|
| 163 |
+
Arc length value.
|
| 164 |
+
|
| 165 |
+
Example::
|
| 166 |
+
|
| 167 |
+
>>> calcQuadraticArcLength((0, 0), (0, 0), (0, 0)) # empty segment
|
| 168 |
+
0.0
|
| 169 |
+
>>> calcQuadraticArcLength((0, 0), (50, 0), (80, 0)) # collinear points
|
| 170 |
+
80.0
|
| 171 |
+
>>> calcQuadraticArcLength((0, 0), (0, 50), (0, 80)) # collinear points vertical
|
| 172 |
+
80.0
|
| 173 |
+
>>> calcQuadraticArcLength((0, 0), (50, 20), (100, 40)) # collinear points
|
| 174 |
+
107.70329614269008
|
| 175 |
+
>>> calcQuadraticArcLength((0, 0), (0, 100), (100, 0))
|
| 176 |
+
154.02976155645263
|
| 177 |
+
>>> calcQuadraticArcLength((0, 0), (0, 50), (100, 0))
|
| 178 |
+
120.21581243984076
|
| 179 |
+
>>> calcQuadraticArcLength((0, 0), (50, -10), (80, 50))
|
| 180 |
+
102.53273816445825
|
| 181 |
+
>>> calcQuadraticArcLength((0, 0), (40, 0), (-40, 0)) # collinear points, control point outside
|
| 182 |
+
66.66666666666667
|
| 183 |
+
>>> calcQuadraticArcLength((0, 0), (40, 0), (0, 0)) # collinear points, looping back
|
| 184 |
+
40.0
|
| 185 |
+
"""
|
| 186 |
+
return calcQuadraticArcLengthC(complex(*pt1), complex(*pt2), complex(*pt3))
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
@cython.returns(cython.double)
|
| 190 |
+
@cython.locals(
|
| 191 |
+
pt1=cython.complex,
|
| 192 |
+
pt2=cython.complex,
|
| 193 |
+
pt3=cython.complex,
|
| 194 |
+
d0=cython.complex,
|
| 195 |
+
d1=cython.complex,
|
| 196 |
+
d=cython.complex,
|
| 197 |
+
n=cython.complex,
|
| 198 |
+
)
|
| 199 |
+
@cython.locals(
|
| 200 |
+
scale=cython.double,
|
| 201 |
+
origDist=cython.double,
|
| 202 |
+
a=cython.double,
|
| 203 |
+
b=cython.double,
|
| 204 |
+
x0=cython.double,
|
| 205 |
+
x1=cython.double,
|
| 206 |
+
Len=cython.double,
|
| 207 |
+
)
|
| 208 |
+
def calcQuadraticArcLengthC(pt1, pt2, pt3):
|
| 209 |
+
"""Calculates the arc length for a quadratic Bezier segment.
|
| 210 |
+
|
| 211 |
+
Args:
|
| 212 |
+
pt1: Start point of the Bezier as a complex number.
|
| 213 |
+
pt2: Handle point of the Bezier as a complex number.
|
| 214 |
+
pt3: End point of the Bezier as a complex number.
|
| 215 |
+
|
| 216 |
+
Returns:
|
| 217 |
+
Arc length value.
|
| 218 |
+
"""
|
| 219 |
+
# Analytical solution to the length of a quadratic bezier.
|
| 220 |
+
# Documentation: https://github.com/fonttools/fonttools/issues/3055
|
| 221 |
+
d0 = pt2 - pt1
|
| 222 |
+
d1 = pt3 - pt2
|
| 223 |
+
d = d1 - d0
|
| 224 |
+
n = d * 1j
|
| 225 |
+
scale = abs(n)
|
| 226 |
+
if scale == 0.0:
|
| 227 |
+
return abs(pt3 - pt1)
|
| 228 |
+
origDist = _dot(n, d0)
|
| 229 |
+
if abs(origDist) < epsilon:
|
| 230 |
+
if _dot(d0, d1) >= 0:
|
| 231 |
+
return abs(pt3 - pt1)
|
| 232 |
+
a, b = abs(d0), abs(d1)
|
| 233 |
+
return (a * a + b * b) / (a + b)
|
| 234 |
+
x0 = _dot(d, d0) / origDist
|
| 235 |
+
x1 = _dot(d, d1) / origDist
|
| 236 |
+
Len = abs(2 * (_intSecAtan(x1) - _intSecAtan(x0)) * origDist / (scale * (x1 - x0)))
|
| 237 |
+
return Len
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
def approximateQuadraticArcLength(pt1, pt2, pt3):
|
| 241 |
+
"""Calculates the arc length for a quadratic Bezier segment.
|
| 242 |
+
|
| 243 |
+
Uses Gauss-Legendre quadrature for a branch-free approximation.
|
| 244 |
+
See :func:`calcQuadraticArcLength` for a slower but more accurate result.
|
| 245 |
+
|
| 246 |
+
Args:
|
| 247 |
+
pt1: Start point of the Bezier as 2D tuple.
|
| 248 |
+
pt2: Handle point of the Bezier as 2D tuple.
|
| 249 |
+
pt3: End point of the Bezier as 2D tuple.
|
| 250 |
+
|
| 251 |
+
Returns:
|
| 252 |
+
Approximate arc length value.
|
| 253 |
+
"""
|
| 254 |
+
return approximateQuadraticArcLengthC(complex(*pt1), complex(*pt2), complex(*pt3))
|
| 255 |
+
|
| 256 |
+
|
| 257 |
+
@cython.returns(cython.double)
|
| 258 |
+
@cython.locals(
|
| 259 |
+
pt1=cython.complex,
|
| 260 |
+
pt2=cython.complex,
|
| 261 |
+
pt3=cython.complex,
|
| 262 |
+
)
|
| 263 |
+
@cython.locals(
|
| 264 |
+
v0=cython.double,
|
| 265 |
+
v1=cython.double,
|
| 266 |
+
v2=cython.double,
|
| 267 |
+
)
|
| 268 |
+
def approximateQuadraticArcLengthC(pt1, pt2, pt3):
|
| 269 |
+
"""Calculates the arc length for a quadratic Bezier segment.
|
| 270 |
+
|
| 271 |
+
Uses Gauss-Legendre quadrature for a branch-free approximation.
|
| 272 |
+
See :func:`calcQuadraticArcLength` for a slower but more accurate result.
|
| 273 |
+
|
| 274 |
+
Args:
|
| 275 |
+
pt1: Start point of the Bezier as a complex number.
|
| 276 |
+
pt2: Handle point of the Bezier as a complex number.
|
| 277 |
+
pt3: End point of the Bezier as a complex number.
|
| 278 |
+
|
| 279 |
+
Returns:
|
| 280 |
+
Approximate arc length value.
|
| 281 |
+
"""
|
| 282 |
+
# This, essentially, approximates the length-of-derivative function
|
| 283 |
+
# to be integrated with the best-matching fifth-degree polynomial
|
| 284 |
+
# approximation of it.
|
| 285 |
+
#
|
| 286 |
+
# https://en.wikipedia.org/wiki/Gaussian_quadrature#Gauss.E2.80.93Legendre_quadrature
|
| 287 |
+
|
| 288 |
+
# abs(BezierCurveC[2].diff(t).subs({t:T})) for T in sorted(.5, .5±sqrt(3/5)/2),
|
| 289 |
+
# weighted 5/18, 8/18, 5/18 respectively.
|
| 290 |
+
v0 = abs(
|
| 291 |
+
-0.492943519233745 * pt1 + 0.430331482911935 * pt2 + 0.0626120363218102 * pt3
|
| 292 |
+
)
|
| 293 |
+
v1 = abs(pt3 - pt1) * 0.4444444444444444
|
| 294 |
+
v2 = abs(
|
| 295 |
+
-0.0626120363218102 * pt1 - 0.430331482911935 * pt2 + 0.492943519233745 * pt3
|
| 296 |
+
)
|
| 297 |
+
|
| 298 |
+
return v0 + v1 + v2
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
def calcQuadraticBounds(pt1, pt2, pt3):
|
| 302 |
+
"""Calculates the bounding rectangle for a quadratic Bezier segment.
|
| 303 |
+
|
| 304 |
+
Args:
|
| 305 |
+
pt1: Start point of the Bezier as a 2D tuple.
|
| 306 |
+
pt2: Handle point of the Bezier as a 2D tuple.
|
| 307 |
+
pt3: End point of the Bezier as a 2D tuple.
|
| 308 |
+
|
| 309 |
+
Returns:
|
| 310 |
+
A four-item tuple representing the bounding rectangle ``(xMin, yMin, xMax, yMax)``.
|
| 311 |
+
|
| 312 |
+
Example::
|
| 313 |
+
|
| 314 |
+
>>> calcQuadraticBounds((0, 0), (50, 100), (100, 0))
|
| 315 |
+
(0, 0, 100, 50.0)
|
| 316 |
+
>>> calcQuadraticBounds((0, 0), (100, 0), (100, 100))
|
| 317 |
+
(0.0, 0.0, 100, 100)
|
| 318 |
+
"""
|
| 319 |
+
(ax, ay), (bx, by), (cx, cy) = calcQuadraticParameters(pt1, pt2, pt3)
|
| 320 |
+
ax2 = ax * 2.0
|
| 321 |
+
ay2 = ay * 2.0
|
| 322 |
+
roots = []
|
| 323 |
+
if ax2 != 0:
|
| 324 |
+
roots.append(-bx / ax2)
|
| 325 |
+
if ay2 != 0:
|
| 326 |
+
roots.append(-by / ay2)
|
| 327 |
+
points = [
|
| 328 |
+
(ax * t * t + bx * t + cx, ay * t * t + by * t + cy)
|
| 329 |
+
for t in roots
|
| 330 |
+
if 0 <= t < 1
|
| 331 |
+
] + [pt1, pt3]
|
| 332 |
+
return calcBounds(points)
|
| 333 |
+
|
| 334 |
+
|
| 335 |
+
def approximateCubicArcLength(pt1, pt2, pt3, pt4):
|
| 336 |
+
"""Approximates the arc length for a cubic Bezier segment.
|
| 337 |
+
|
| 338 |
+
Uses Gauss-Lobatto quadrature with n=5 points to approximate arc length.
|
| 339 |
+
See :func:`calcCubicArcLength` for a slower but more accurate result.
|
| 340 |
+
|
| 341 |
+
Args:
|
| 342 |
+
pt1,pt2,pt3,pt4: Control points of the Bezier as 2D tuples.
|
| 343 |
+
|
| 344 |
+
Returns:
|
| 345 |
+
Arc length value.
|
| 346 |
+
|
| 347 |
+
Example::
|
| 348 |
+
|
| 349 |
+
>>> approximateCubicArcLength((0, 0), (25, 100), (75, 100), (100, 0))
|
| 350 |
+
190.04332968932817
|
| 351 |
+
>>> approximateCubicArcLength((0, 0), (50, 0), (100, 50), (100, 100))
|
| 352 |
+
154.8852074945903
|
| 353 |
+
>>> approximateCubicArcLength((0, 0), (50, 0), (100, 0), (150, 0)) # line; exact result should be 150.
|
| 354 |
+
149.99999999999991
|
| 355 |
+
>>> approximateCubicArcLength((0, 0), (50, 0), (100, 0), (-50, 0)) # cusp; exact result should be 150.
|
| 356 |
+
136.9267662156362
|
| 357 |
+
>>> approximateCubicArcLength((0, 0), (50, 0), (100, -50), (-50, 0)) # cusp
|
| 358 |
+
154.80848416537057
|
| 359 |
+
"""
|
| 360 |
+
return approximateCubicArcLengthC(
|
| 361 |
+
complex(*pt1), complex(*pt2), complex(*pt3), complex(*pt4)
|
| 362 |
+
)
|
| 363 |
+
|
| 364 |
+
|
| 365 |
+
@cython.returns(cython.double)
|
| 366 |
+
@cython.locals(
|
| 367 |
+
pt1=cython.complex,
|
| 368 |
+
pt2=cython.complex,
|
| 369 |
+
pt3=cython.complex,
|
| 370 |
+
pt4=cython.complex,
|
| 371 |
+
)
|
| 372 |
+
@cython.locals(
|
| 373 |
+
v0=cython.double,
|
| 374 |
+
v1=cython.double,
|
| 375 |
+
v2=cython.double,
|
| 376 |
+
v3=cython.double,
|
| 377 |
+
v4=cython.double,
|
| 378 |
+
)
|
| 379 |
+
def approximateCubicArcLengthC(pt1, pt2, pt3, pt4):
|
| 380 |
+
"""Approximates the arc length for a cubic Bezier segment.
|
| 381 |
+
|
| 382 |
+
Args:
|
| 383 |
+
pt1,pt2,pt3,pt4: Control points of the Bezier as complex numbers.
|
| 384 |
+
|
| 385 |
+
Returns:
|
| 386 |
+
Arc length value.
|
| 387 |
+
"""
|
| 388 |
+
# This, essentially, approximates the length-of-derivative function
|
| 389 |
+
# to be integrated with the best-matching seventh-degree polynomial
|
| 390 |
+
# approximation of it.
|
| 391 |
+
#
|
| 392 |
+
# https://en.wikipedia.org/wiki/Gaussian_quadrature#Gauss.E2.80.93Lobatto_rules
|
| 393 |
+
|
| 394 |
+
# abs(BezierCurveC[3].diff(t).subs({t:T})) for T in sorted(0, .5±(3/7)**.5/2, .5, 1),
|
| 395 |
+
# weighted 1/20, 49/180, 32/90, 49/180, 1/20 respectively.
|
| 396 |
+
v0 = abs(pt2 - pt1) * 0.15
|
| 397 |
+
v1 = abs(
|
| 398 |
+
-0.558983582205757 * pt1
|
| 399 |
+
+ 0.325650248872424 * pt2
|
| 400 |
+
+ 0.208983582205757 * pt3
|
| 401 |
+
+ 0.024349751127576 * pt4
|
| 402 |
+
)
|
| 403 |
+
v2 = abs(pt4 - pt1 + pt3 - pt2) * 0.26666666666666666
|
| 404 |
+
v3 = abs(
|
| 405 |
+
-0.024349751127576 * pt1
|
| 406 |
+
- 0.208983582205757 * pt2
|
| 407 |
+
- 0.325650248872424 * pt3
|
| 408 |
+
+ 0.558983582205757 * pt4
|
| 409 |
+
)
|
| 410 |
+
v4 = abs(pt4 - pt3) * 0.15
|
| 411 |
+
|
| 412 |
+
return v0 + v1 + v2 + v3 + v4
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
def calcCubicBounds(pt1, pt2, pt3, pt4):
|
| 416 |
+
"""Calculates the bounding rectangle for a quadratic Bezier segment.
|
| 417 |
+
|
| 418 |
+
Args:
|
| 419 |
+
pt1,pt2,pt3,pt4: Control points of the Bezier as 2D tuples.
|
| 420 |
+
|
| 421 |
+
Returns:
|
| 422 |
+
A four-item tuple representing the bounding rectangle ``(xMin, yMin, xMax, yMax)``.
|
| 423 |
+
|
| 424 |
+
Example::
|
| 425 |
+
|
| 426 |
+
>>> calcCubicBounds((0, 0), (25, 100), (75, 100), (100, 0))
|
| 427 |
+
(0, 0, 100, 75.0)
|
| 428 |
+
>>> calcCubicBounds((0, 0), (50, 0), (100, 50), (100, 100))
|
| 429 |
+
(0.0, 0.0, 100, 100)
|
| 430 |
+
>>> print("%f %f %f %f" % calcCubicBounds((50, 0), (0, 100), (100, 100), (50, 0)))
|
| 431 |
+
35.566243 0.000000 64.433757 75.000000
|
| 432 |
+
"""
|
| 433 |
+
(ax, ay), (bx, by), (cx, cy), (dx, dy) = calcCubicParameters(pt1, pt2, pt3, pt4)
|
| 434 |
+
# calc first derivative
|
| 435 |
+
ax3 = ax * 3.0
|
| 436 |
+
ay3 = ay * 3.0
|
| 437 |
+
bx2 = bx * 2.0
|
| 438 |
+
by2 = by * 2.0
|
| 439 |
+
xRoots = [t for t in solveQuadratic(ax3, bx2, cx) if 0 <= t < 1]
|
| 440 |
+
yRoots = [t for t in solveQuadratic(ay3, by2, cy) if 0 <= t < 1]
|
| 441 |
+
roots = xRoots + yRoots
|
| 442 |
+
|
| 443 |
+
points = [
|
| 444 |
+
(
|
| 445 |
+
ax * t * t * t + bx * t * t + cx * t + dx,
|
| 446 |
+
ay * t * t * t + by * t * t + cy * t + dy,
|
| 447 |
+
)
|
| 448 |
+
for t in roots
|
| 449 |
+
] + [pt1, pt4]
|
| 450 |
+
return calcBounds(points)
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
def splitLine(pt1, pt2, where, isHorizontal):
|
| 454 |
+
"""Split a line at a given coordinate.
|
| 455 |
+
|
| 456 |
+
Args:
|
| 457 |
+
pt1: Start point of line as 2D tuple.
|
| 458 |
+
pt2: End point of line as 2D tuple.
|
| 459 |
+
where: Position at which to split the line.
|
| 460 |
+
isHorizontal: Direction of the ray splitting the line. If true,
|
| 461 |
+
``where`` is interpreted as a Y coordinate; if false, then
|
| 462 |
+
``where`` is interpreted as an X coordinate.
|
| 463 |
+
|
| 464 |
+
Returns:
|
| 465 |
+
A list of two line segments (each line segment being two 2D tuples)
|
| 466 |
+
if the line was successfully split, or a list containing the original
|
| 467 |
+
line.
|
| 468 |
+
|
| 469 |
+
Example::
|
| 470 |
+
|
| 471 |
+
>>> printSegments(splitLine((0, 0), (100, 100), 50, True))
|
| 472 |
+
((0, 0), (50, 50))
|
| 473 |
+
((50, 50), (100, 100))
|
| 474 |
+
>>> printSegments(splitLine((0, 0), (100, 100), 100, True))
|
| 475 |
+
((0, 0), (100, 100))
|
| 476 |
+
>>> printSegments(splitLine((0, 0), (100, 100), 0, True))
|
| 477 |
+
((0, 0), (0, 0))
|
| 478 |
+
((0, 0), (100, 100))
|
| 479 |
+
>>> printSegments(splitLine((0, 0), (100, 100), 0, False))
|
| 480 |
+
((0, 0), (0, 0))
|
| 481 |
+
((0, 0), (100, 100))
|
| 482 |
+
>>> printSegments(splitLine((100, 0), (0, 0), 50, False))
|
| 483 |
+
((100, 0), (50, 0))
|
| 484 |
+
((50, 0), (0, 0))
|
| 485 |
+
>>> printSegments(splitLine((0, 100), (0, 0), 50, True))
|
| 486 |
+
((0, 100), (0, 50))
|
| 487 |
+
((0, 50), (0, 0))
|
| 488 |
+
"""
|
| 489 |
+
pt1x, pt1y = pt1
|
| 490 |
+
pt2x, pt2y = pt2
|
| 491 |
+
|
| 492 |
+
ax = pt2x - pt1x
|
| 493 |
+
ay = pt2y - pt1y
|
| 494 |
+
|
| 495 |
+
bx = pt1x
|
| 496 |
+
by = pt1y
|
| 497 |
+
|
| 498 |
+
a = (ax, ay)[isHorizontal]
|
| 499 |
+
|
| 500 |
+
if a == 0:
|
| 501 |
+
return [(pt1, pt2)]
|
| 502 |
+
t = (where - (bx, by)[isHorizontal]) / a
|
| 503 |
+
if 0 <= t < 1:
|
| 504 |
+
midPt = ax * t + bx, ay * t + by
|
| 505 |
+
return [(pt1, midPt), (midPt, pt2)]
|
| 506 |
+
else:
|
| 507 |
+
return [(pt1, pt2)]
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
def splitQuadratic(pt1, pt2, pt3, where, isHorizontal):
|
| 511 |
+
"""Split a quadratic Bezier curve at a given coordinate.
|
| 512 |
+
|
| 513 |
+
Args:
|
| 514 |
+
pt1,pt2,pt3: Control points of the Bezier as 2D tuples.
|
| 515 |
+
where: Position at which to split the curve.
|
| 516 |
+
isHorizontal: Direction of the ray splitting the curve. If true,
|
| 517 |
+
``where`` is interpreted as a Y coordinate; if false, then
|
| 518 |
+
``where`` is interpreted as an X coordinate.
|
| 519 |
+
|
| 520 |
+
Returns:
|
| 521 |
+
A list of two curve segments (each curve segment being three 2D tuples)
|
| 522 |
+
if the curve was successfully split, or a list containing the original
|
| 523 |
+
curve.
|
| 524 |
+
|
| 525 |
+
Example::
|
| 526 |
+
|
| 527 |
+
>>> printSegments(splitQuadratic((0, 0), (50, 100), (100, 0), 150, False))
|
| 528 |
+
((0, 0), (50, 100), (100, 0))
|
| 529 |
+
>>> printSegments(splitQuadratic((0, 0), (50, 100), (100, 0), 50, False))
|
| 530 |
+
((0, 0), (25, 50), (50, 50))
|
| 531 |
+
((50, 50), (75, 50), (100, 0))
|
| 532 |
+
>>> printSegments(splitQuadratic((0, 0), (50, 100), (100, 0), 25, False))
|
| 533 |
+
((0, 0), (12.5, 25), (25, 37.5))
|
| 534 |
+
((25, 37.5), (62.5, 75), (100, 0))
|
| 535 |
+
>>> printSegments(splitQuadratic((0, 0), (50, 100), (100, 0), 25, True))
|
| 536 |
+
((0, 0), (7.32233, 14.6447), (14.6447, 25))
|
| 537 |
+
((14.6447, 25), (50, 75), (85.3553, 25))
|
| 538 |
+
((85.3553, 25), (92.6777, 14.6447), (100, -7.10543e-15))
|
| 539 |
+
>>> # XXX I'm not at all sure if the following behavior is desirable:
|
| 540 |
+
>>> printSegments(splitQuadratic((0, 0), (50, 100), (100, 0), 50, True))
|
| 541 |
+
((0, 0), (25, 50), (50, 50))
|
| 542 |
+
((50, 50), (50, 50), (50, 50))
|
| 543 |
+
((50, 50), (75, 50), (100, 0))
|
| 544 |
+
"""
|
| 545 |
+
a, b, c = calcQuadraticParameters(pt1, pt2, pt3)
|
| 546 |
+
solutions = solveQuadratic(
|
| 547 |
+
a[isHorizontal], b[isHorizontal], c[isHorizontal] - where
|
| 548 |
+
)
|
| 549 |
+
solutions = sorted(t for t in solutions if 0 <= t < 1)
|
| 550 |
+
if not solutions:
|
| 551 |
+
return [(pt1, pt2, pt3)]
|
| 552 |
+
return _splitQuadraticAtT(a, b, c, *solutions)
|
| 553 |
+
|
| 554 |
+
|
| 555 |
+
def splitCubic(pt1, pt2, pt3, pt4, where, isHorizontal):
|
| 556 |
+
"""Split a cubic Bezier curve at a given coordinate.
|
| 557 |
+
|
| 558 |
+
Args:
|
| 559 |
+
pt1,pt2,pt3,pt4: Control points of the Bezier as 2D tuples.
|
| 560 |
+
where: Position at which to split the curve.
|
| 561 |
+
isHorizontal: Direction of the ray splitting the curve. If true,
|
| 562 |
+
``where`` is interpreted as a Y coordinate; if false, then
|
| 563 |
+
``where`` is interpreted as an X coordinate.
|
| 564 |
+
|
| 565 |
+
Returns:
|
| 566 |
+
A list of two curve segments (each curve segment being four 2D tuples)
|
| 567 |
+
if the curve was successfully split, or a list containing the original
|
| 568 |
+
curve.
|
| 569 |
+
|
| 570 |
+
Example::
|
| 571 |
+
|
| 572 |
+
>>> printSegments(splitCubic((0, 0), (25, 100), (75, 100), (100, 0), 150, False))
|
| 573 |
+
((0, 0), (25, 100), (75, 100), (100, 0))
|
| 574 |
+
>>> printSegments(splitCubic((0, 0), (25, 100), (75, 100), (100, 0), 50, False))
|
| 575 |
+
((0, 0), (12.5, 50), (31.25, 75), (50, 75))
|
| 576 |
+
((50, 75), (68.75, 75), (87.5, 50), (100, 0))
|
| 577 |
+
>>> printSegments(splitCubic((0, 0), (25, 100), (75, 100), (100, 0), 25, True))
|
| 578 |
+
((0, 0), (2.29379, 9.17517), (4.79804, 17.5085), (7.47414, 25))
|
| 579 |
+
((7.47414, 25), (31.2886, 91.6667), (68.7114, 91.6667), (92.5259, 25))
|
| 580 |
+
((92.5259, 25), (95.202, 17.5085), (97.7062, 9.17517), (100, 1.77636e-15))
|
| 581 |
+
"""
|
| 582 |
+
a, b, c, d = calcCubicParameters(pt1, pt2, pt3, pt4)
|
| 583 |
+
solutions = solveCubic(
|
| 584 |
+
a[isHorizontal], b[isHorizontal], c[isHorizontal], d[isHorizontal] - where
|
| 585 |
+
)
|
| 586 |
+
solutions = sorted(t for t in solutions if 0 <= t < 1)
|
| 587 |
+
if not solutions:
|
| 588 |
+
return [(pt1, pt2, pt3, pt4)]
|
| 589 |
+
return _splitCubicAtT(a, b, c, d, *solutions)
|
| 590 |
+
|
| 591 |
+
|
| 592 |
+
def splitQuadraticAtT(pt1, pt2, pt3, *ts):
|
| 593 |
+
"""Split a quadratic Bezier curve at one or more values of t.
|
| 594 |
+
|
| 595 |
+
Args:
|
| 596 |
+
pt1,pt2,pt3: Control points of the Bezier as 2D tuples.
|
| 597 |
+
*ts: Positions at which to split the curve.
|
| 598 |
+
|
| 599 |
+
Returns:
|
| 600 |
+
A list of curve segments (each curve segment being three 2D tuples).
|
| 601 |
+
|
| 602 |
+
Examples::
|
| 603 |
+
|
| 604 |
+
>>> printSegments(splitQuadraticAtT((0, 0), (50, 100), (100, 0), 0.5))
|
| 605 |
+
((0, 0), (25, 50), (50, 50))
|
| 606 |
+
((50, 50), (75, 50), (100, 0))
|
| 607 |
+
>>> printSegments(splitQuadraticAtT((0, 0), (50, 100), (100, 0), 0.5, 0.75))
|
| 608 |
+
((0, 0), (25, 50), (50, 50))
|
| 609 |
+
((50, 50), (62.5, 50), (75, 37.5))
|
| 610 |
+
((75, 37.5), (87.5, 25), (100, 0))
|
| 611 |
+
"""
|
| 612 |
+
a, b, c = calcQuadraticParameters(pt1, pt2, pt3)
|
| 613 |
+
return _splitQuadraticAtT(a, b, c, *ts)
|
| 614 |
+
|
| 615 |
+
|
| 616 |
+
def splitCubicAtT(pt1, pt2, pt3, pt4, *ts):
|
| 617 |
+
"""Split a cubic Bezier curve at one or more values of t.
|
| 618 |
+
|
| 619 |
+
Args:
|
| 620 |
+
pt1,pt2,pt3,pt4: Control points of the Bezier as 2D tuples.
|
| 621 |
+
*ts: Positions at which to split the curve.
|
| 622 |
+
|
| 623 |
+
Returns:
|
| 624 |
+
A list of curve segments (each curve segment being four 2D tuples).
|
| 625 |
+
|
| 626 |
+
Examples::
|
| 627 |
+
|
| 628 |
+
>>> printSegments(splitCubicAtT((0, 0), (25, 100), (75, 100), (100, 0), 0.5))
|
| 629 |
+
((0, 0), (12.5, 50), (31.25, 75), (50, 75))
|
| 630 |
+
((50, 75), (68.75, 75), (87.5, 50), (100, 0))
|
| 631 |
+
>>> printSegments(splitCubicAtT((0, 0), (25, 100), (75, 100), (100, 0), 0.5, 0.75))
|
| 632 |
+
((0, 0), (12.5, 50), (31.25, 75), (50, 75))
|
| 633 |
+
((50, 75), (59.375, 75), (68.75, 68.75), (77.3438, 56.25))
|
| 634 |
+
((77.3438, 56.25), (85.9375, 43.75), (93.75, 25), (100, 0))
|
| 635 |
+
"""
|
| 636 |
+
a, b, c, d = calcCubicParameters(pt1, pt2, pt3, pt4)
|
| 637 |
+
return _splitCubicAtT(a, b, c, d, *ts)
|
| 638 |
+
|
| 639 |
+
|
| 640 |
+
@cython.locals(
|
| 641 |
+
pt1=cython.complex,
|
| 642 |
+
pt2=cython.complex,
|
| 643 |
+
pt3=cython.complex,
|
| 644 |
+
pt4=cython.complex,
|
| 645 |
+
a=cython.complex,
|
| 646 |
+
b=cython.complex,
|
| 647 |
+
c=cython.complex,
|
| 648 |
+
d=cython.complex,
|
| 649 |
+
)
|
| 650 |
+
def splitCubicAtTC(pt1, pt2, pt3, pt4, *ts):
|
| 651 |
+
"""Split a cubic Bezier curve at one or more values of t.
|
| 652 |
+
|
| 653 |
+
Args:
|
| 654 |
+
pt1,pt2,pt3,pt4: Control points of the Bezier as complex numbers..
|
| 655 |
+
*ts: Positions at which to split the curve.
|
| 656 |
+
|
| 657 |
+
Yields:
|
| 658 |
+
Curve segments (each curve segment being four complex numbers).
|
| 659 |
+
"""
|
| 660 |
+
a, b, c, d = calcCubicParametersC(pt1, pt2, pt3, pt4)
|
| 661 |
+
yield from _splitCubicAtTC(a, b, c, d, *ts)
|
| 662 |
+
|
| 663 |
+
|
| 664 |
+
@cython.returns(cython.complex)
|
| 665 |
+
@cython.locals(
|
| 666 |
+
t=cython.double,
|
| 667 |
+
pt1=cython.complex,
|
| 668 |
+
pt2=cython.complex,
|
| 669 |
+
pt3=cython.complex,
|
| 670 |
+
pt4=cython.complex,
|
| 671 |
+
pointAtT=cython.complex,
|
| 672 |
+
off1=cython.complex,
|
| 673 |
+
off2=cython.complex,
|
| 674 |
+
)
|
| 675 |
+
@cython.locals(
|
| 676 |
+
t2=cython.double, _1_t=cython.double, _1_t_2=cython.double, _2_t_1_t=cython.double
|
| 677 |
+
)
|
| 678 |
+
def splitCubicIntoTwoAtTC(pt1, pt2, pt3, pt4, t):
|
| 679 |
+
"""Split a cubic Bezier curve at t.
|
| 680 |
+
|
| 681 |
+
Args:
|
| 682 |
+
pt1,pt2,pt3,pt4: Control points of the Bezier as complex numbers.
|
| 683 |
+
t: Position at which to split the curve.
|
| 684 |
+
|
| 685 |
+
Returns:
|
| 686 |
+
A tuple of two curve segments (each curve segment being four complex numbers).
|
| 687 |
+
"""
|
| 688 |
+
t2 = t * t
|
| 689 |
+
_1_t = 1 - t
|
| 690 |
+
_1_t_2 = _1_t * _1_t
|
| 691 |
+
_2_t_1_t = 2 * t * _1_t
|
| 692 |
+
pointAtT = (
|
| 693 |
+
_1_t_2 * _1_t * pt1 + 3 * (_1_t_2 * t * pt2 + _1_t * t2 * pt3) + t2 * t * pt4
|
| 694 |
+
)
|
| 695 |
+
off1 = _1_t_2 * pt1 + _2_t_1_t * pt2 + t2 * pt3
|
| 696 |
+
off2 = _1_t_2 * pt2 + _2_t_1_t * pt3 + t2 * pt4
|
| 697 |
+
|
| 698 |
+
pt2 = pt1 + (pt2 - pt1) * t
|
| 699 |
+
pt3 = pt4 + (pt3 - pt4) * _1_t
|
| 700 |
+
|
| 701 |
+
return ((pt1, pt2, off1, pointAtT), (pointAtT, off2, pt3, pt4))
|
| 702 |
+
|
| 703 |
+
|
| 704 |
+
def _splitQuadraticAtT(a, b, c, *ts):
|
| 705 |
+
ts = list(ts)
|
| 706 |
+
segments = []
|
| 707 |
+
ts.insert(0, 0.0)
|
| 708 |
+
ts.append(1.0)
|
| 709 |
+
ax, ay = a
|
| 710 |
+
bx, by = b
|
| 711 |
+
cx, cy = c
|
| 712 |
+
for i in range(len(ts) - 1):
|
| 713 |
+
t1 = ts[i]
|
| 714 |
+
t2 = ts[i + 1]
|
| 715 |
+
delta = t2 - t1
|
| 716 |
+
# calc new a, b and c
|
| 717 |
+
delta_2 = delta * delta
|
| 718 |
+
a1x = ax * delta_2
|
| 719 |
+
a1y = ay * delta_2
|
| 720 |
+
b1x = (2 * ax * t1 + bx) * delta
|
| 721 |
+
b1y = (2 * ay * t1 + by) * delta
|
| 722 |
+
t1_2 = t1 * t1
|
| 723 |
+
c1x = ax * t1_2 + bx * t1 + cx
|
| 724 |
+
c1y = ay * t1_2 + by * t1 + cy
|
| 725 |
+
|
| 726 |
+
pt1, pt2, pt3 = calcQuadraticPoints((a1x, a1y), (b1x, b1y), (c1x, c1y))
|
| 727 |
+
segments.append((pt1, pt2, pt3))
|
| 728 |
+
return segments
|
| 729 |
+
|
| 730 |
+
|
| 731 |
+
def _splitCubicAtT(a, b, c, d, *ts):
|
| 732 |
+
ts = list(ts)
|
| 733 |
+
ts.insert(0, 0.0)
|
| 734 |
+
ts.append(1.0)
|
| 735 |
+
segments = []
|
| 736 |
+
ax, ay = a
|
| 737 |
+
bx, by = b
|
| 738 |
+
cx, cy = c
|
| 739 |
+
dx, dy = d
|
| 740 |
+
for i in range(len(ts) - 1):
|
| 741 |
+
t1 = ts[i]
|
| 742 |
+
t2 = ts[i + 1]
|
| 743 |
+
delta = t2 - t1
|
| 744 |
+
|
| 745 |
+
delta_2 = delta * delta
|
| 746 |
+
delta_3 = delta * delta_2
|
| 747 |
+
t1_2 = t1 * t1
|
| 748 |
+
t1_3 = t1 * t1_2
|
| 749 |
+
|
| 750 |
+
# calc new a, b, c and d
|
| 751 |
+
a1x = ax * delta_3
|
| 752 |
+
a1y = ay * delta_3
|
| 753 |
+
b1x = (3 * ax * t1 + bx) * delta_2
|
| 754 |
+
b1y = (3 * ay * t1 + by) * delta_2
|
| 755 |
+
c1x = (2 * bx * t1 + cx + 3 * ax * t1_2) * delta
|
| 756 |
+
c1y = (2 * by * t1 + cy + 3 * ay * t1_2) * delta
|
| 757 |
+
d1x = ax * t1_3 + bx * t1_2 + cx * t1 + dx
|
| 758 |
+
d1y = ay * t1_3 + by * t1_2 + cy * t1 + dy
|
| 759 |
+
pt1, pt2, pt3, pt4 = calcCubicPoints(
|
| 760 |
+
(a1x, a1y), (b1x, b1y), (c1x, c1y), (d1x, d1y)
|
| 761 |
+
)
|
| 762 |
+
segments.append((pt1, pt2, pt3, pt4))
|
| 763 |
+
return segments
|
| 764 |
+
|
| 765 |
+
|
| 766 |
+
@cython.locals(
|
| 767 |
+
a=cython.complex,
|
| 768 |
+
b=cython.complex,
|
| 769 |
+
c=cython.complex,
|
| 770 |
+
d=cython.complex,
|
| 771 |
+
t1=cython.double,
|
| 772 |
+
t2=cython.double,
|
| 773 |
+
delta=cython.double,
|
| 774 |
+
delta_2=cython.double,
|
| 775 |
+
delta_3=cython.double,
|
| 776 |
+
a1=cython.complex,
|
| 777 |
+
b1=cython.complex,
|
| 778 |
+
c1=cython.complex,
|
| 779 |
+
d1=cython.complex,
|
| 780 |
+
)
|
| 781 |
+
def _splitCubicAtTC(a, b, c, d, *ts):
|
| 782 |
+
ts = list(ts)
|
| 783 |
+
ts.insert(0, 0.0)
|
| 784 |
+
ts.append(1.0)
|
| 785 |
+
for i in range(len(ts) - 1):
|
| 786 |
+
t1 = ts[i]
|
| 787 |
+
t2 = ts[i + 1]
|
| 788 |
+
delta = t2 - t1
|
| 789 |
+
|
| 790 |
+
delta_2 = delta * delta
|
| 791 |
+
delta_3 = delta * delta_2
|
| 792 |
+
t1_2 = t1 * t1
|
| 793 |
+
t1_3 = t1 * t1_2
|
| 794 |
+
|
| 795 |
+
# calc new a, b, c and d
|
| 796 |
+
a1 = a * delta_3
|
| 797 |
+
b1 = (3 * a * t1 + b) * delta_2
|
| 798 |
+
c1 = (2 * b * t1 + c + 3 * a * t1_2) * delta
|
| 799 |
+
d1 = a * t1_3 + b * t1_2 + c * t1 + d
|
| 800 |
+
pt1, pt2, pt3, pt4 = calcCubicPointsC(a1, b1, c1, d1)
|
| 801 |
+
yield (pt1, pt2, pt3, pt4)
|
| 802 |
+
|
| 803 |
+
|
| 804 |
+
#
|
| 805 |
+
# Equation solvers.
|
| 806 |
+
#
|
| 807 |
+
|
| 808 |
+
from math import sqrt, acos, cos, pi
|
| 809 |
+
|
| 810 |
+
|
| 811 |
+
def solveQuadratic(a, b, c, sqrt=sqrt):
|
| 812 |
+
"""Solve a quadratic equation.
|
| 813 |
+
|
| 814 |
+
Solves *a*x*x + b*x + c = 0* where a, b and c are real.
|
| 815 |
+
|
| 816 |
+
Args:
|
| 817 |
+
a: coefficient of *x²*
|
| 818 |
+
b: coefficient of *x*
|
| 819 |
+
c: constant term
|
| 820 |
+
|
| 821 |
+
Returns:
|
| 822 |
+
A list of roots. Note that the returned list is neither guaranteed to
|
| 823 |
+
be sorted nor to contain unique values!
|
| 824 |
+
"""
|
| 825 |
+
if abs(a) < epsilon:
|
| 826 |
+
if abs(b) < epsilon:
|
| 827 |
+
# We have a non-equation; therefore, we have no valid solution
|
| 828 |
+
roots = []
|
| 829 |
+
else:
|
| 830 |
+
# We have a linear equation with 1 root.
|
| 831 |
+
roots = [-c / b]
|
| 832 |
+
else:
|
| 833 |
+
# We have a true quadratic equation. Apply the quadratic formula to find two roots.
|
| 834 |
+
DD = b * b - 4.0 * a * c
|
| 835 |
+
if DD >= 0.0:
|
| 836 |
+
rDD = sqrt(DD)
|
| 837 |
+
roots = [(-b + rDD) / 2.0 / a, (-b - rDD) / 2.0 / a]
|
| 838 |
+
else:
|
| 839 |
+
# complex roots, ignore
|
| 840 |
+
roots = []
|
| 841 |
+
return roots
|
| 842 |
+
|
| 843 |
+
|
| 844 |
+
def solveCubic(a, b, c, d):
|
| 845 |
+
"""Solve a cubic equation.
|
| 846 |
+
|
| 847 |
+
Solves *a*x*x*x + b*x*x + c*x + d = 0* where a, b, c and d are real.
|
| 848 |
+
|
| 849 |
+
Args:
|
| 850 |
+
a: coefficient of *x³*
|
| 851 |
+
b: coefficient of *x²*
|
| 852 |
+
c: coefficient of *x*
|
| 853 |
+
d: constant term
|
| 854 |
+
|
| 855 |
+
Returns:
|
| 856 |
+
A list of roots. Note that the returned list is neither guaranteed to
|
| 857 |
+
be sorted nor to contain unique values!
|
| 858 |
+
|
| 859 |
+
Examples::
|
| 860 |
+
|
| 861 |
+
>>> solveCubic(1, 1, -6, 0)
|
| 862 |
+
[-3.0, -0.0, 2.0]
|
| 863 |
+
>>> solveCubic(-10.0, -9.0, 48.0, -29.0)
|
| 864 |
+
[-2.9, 1.0, 1.0]
|
| 865 |
+
>>> solveCubic(-9.875, -9.0, 47.625, -28.75)
|
| 866 |
+
[-2.911392, 1.0, 1.0]
|
| 867 |
+
>>> solveCubic(1.0, -4.5, 6.75, -3.375)
|
| 868 |
+
[1.5, 1.5, 1.5]
|
| 869 |
+
>>> solveCubic(-12.0, 18.0, -9.0, 1.50023651123)
|
| 870 |
+
[0.5, 0.5, 0.5]
|
| 871 |
+
>>> solveCubic(
|
| 872 |
+
... 9.0, 0.0, 0.0, -7.62939453125e-05
|
| 873 |
+
... ) == [-0.0, -0.0, -0.0]
|
| 874 |
+
True
|
| 875 |
+
"""
|
| 876 |
+
#
|
| 877 |
+
# adapted from:
|
| 878 |
+
# CUBIC.C - Solve a cubic polynomial
|
| 879 |
+
# public domain by Ross Cottrell
|
| 880 |
+
# found at: http://www.strangecreations.com/library/snippets/Cubic.C
|
| 881 |
+
#
|
| 882 |
+
if abs(a) < epsilon:
|
| 883 |
+
# don't just test for zero; for very small values of 'a' solveCubic()
|
| 884 |
+
# returns unreliable results, so we fall back to quad.
|
| 885 |
+
return solveQuadratic(b, c, d)
|
| 886 |
+
a = float(a)
|
| 887 |
+
a1 = b / a
|
| 888 |
+
a2 = c / a
|
| 889 |
+
a3 = d / a
|
| 890 |
+
|
| 891 |
+
Q = (a1 * a1 - 3.0 * a2) / 9.0
|
| 892 |
+
R = (2.0 * a1 * a1 * a1 - 9.0 * a1 * a2 + 27.0 * a3) / 54.0
|
| 893 |
+
|
| 894 |
+
R2 = R * R
|
| 895 |
+
Q3 = Q * Q * Q
|
| 896 |
+
R2 = 0 if R2 < epsilon else R2
|
| 897 |
+
Q3 = 0 if abs(Q3) < epsilon else Q3
|
| 898 |
+
|
| 899 |
+
R2_Q3 = R2 - Q3
|
| 900 |
+
|
| 901 |
+
if R2 == 0.0 and Q3 == 0.0:
|
| 902 |
+
x = round(-a1 / 3.0, epsilonDigits)
|
| 903 |
+
return [x, x, x]
|
| 904 |
+
elif R2_Q3 <= epsilon * 0.5:
|
| 905 |
+
# The epsilon * .5 above ensures that Q3 is not zero.
|
| 906 |
+
theta = acos(max(min(R / sqrt(Q3), 1.0), -1.0))
|
| 907 |
+
rQ2 = -2.0 * sqrt(Q)
|
| 908 |
+
a1_3 = a1 / 3.0
|
| 909 |
+
x0 = rQ2 * cos(theta / 3.0) - a1_3
|
| 910 |
+
x1 = rQ2 * cos((theta + 2.0 * pi) / 3.0) - a1_3
|
| 911 |
+
x2 = rQ2 * cos((theta + 4.0 * pi) / 3.0) - a1_3
|
| 912 |
+
x0, x1, x2 = sorted([x0, x1, x2])
|
| 913 |
+
# Merge roots that are close-enough
|
| 914 |
+
if x1 - x0 < epsilon and x2 - x1 < epsilon:
|
| 915 |
+
x0 = x1 = x2 = round((x0 + x1 + x2) / 3.0, epsilonDigits)
|
| 916 |
+
elif x1 - x0 < epsilon:
|
| 917 |
+
x0 = x1 = round((x0 + x1) / 2.0, epsilonDigits)
|
| 918 |
+
x2 = round(x2, epsilonDigits)
|
| 919 |
+
elif x2 - x1 < epsilon:
|
| 920 |
+
x0 = round(x0, epsilonDigits)
|
| 921 |
+
x1 = x2 = round((x1 + x2) / 2.0, epsilonDigits)
|
| 922 |
+
else:
|
| 923 |
+
x0 = round(x0, epsilonDigits)
|
| 924 |
+
x1 = round(x1, epsilonDigits)
|
| 925 |
+
x2 = round(x2, epsilonDigits)
|
| 926 |
+
return [x0, x1, x2]
|
| 927 |
+
else:
|
| 928 |
+
x = pow(sqrt(R2_Q3) + abs(R), 1 / 3.0)
|
| 929 |
+
x = x + Q / x
|
| 930 |
+
if R >= 0.0:
|
| 931 |
+
x = -x
|
| 932 |
+
x = round(x - a1 / 3.0, epsilonDigits)
|
| 933 |
+
return [x]
|
| 934 |
+
|
| 935 |
+
|
| 936 |
+
#
|
| 937 |
+
# Conversion routines for points to parameters and vice versa
|
| 938 |
+
#
|
| 939 |
+
|
| 940 |
+
|
| 941 |
+
def calcQuadraticParameters(pt1, pt2, pt3):
|
| 942 |
+
x2, y2 = pt2
|
| 943 |
+
x3, y3 = pt3
|
| 944 |
+
cx, cy = pt1
|
| 945 |
+
bx = (x2 - cx) * 2.0
|
| 946 |
+
by = (y2 - cy) * 2.0
|
| 947 |
+
ax = x3 - cx - bx
|
| 948 |
+
ay = y3 - cy - by
|
| 949 |
+
return (ax, ay), (bx, by), (cx, cy)
|
| 950 |
+
|
| 951 |
+
|
| 952 |
+
def calcCubicParameters(pt1, pt2, pt3, pt4):
|
| 953 |
+
x2, y2 = pt2
|
| 954 |
+
x3, y3 = pt3
|
| 955 |
+
x4, y4 = pt4
|
| 956 |
+
dx, dy = pt1
|
| 957 |
+
cx = (x2 - dx) * 3.0
|
| 958 |
+
cy = (y2 - dy) * 3.0
|
| 959 |
+
bx = (x3 - x2) * 3.0 - cx
|
| 960 |
+
by = (y3 - y2) * 3.0 - cy
|
| 961 |
+
ax = x4 - dx - cx - bx
|
| 962 |
+
ay = y4 - dy - cy - by
|
| 963 |
+
return (ax, ay), (bx, by), (cx, cy), (dx, dy)
|
| 964 |
+
|
| 965 |
+
|
| 966 |
+
@cython.cfunc
|
| 967 |
+
@cython.inline
|
| 968 |
+
@cython.locals(
|
| 969 |
+
pt1=cython.complex,
|
| 970 |
+
pt2=cython.complex,
|
| 971 |
+
pt3=cython.complex,
|
| 972 |
+
pt4=cython.complex,
|
| 973 |
+
a=cython.complex,
|
| 974 |
+
b=cython.complex,
|
| 975 |
+
c=cython.complex,
|
| 976 |
+
)
|
| 977 |
+
def calcCubicParametersC(pt1, pt2, pt3, pt4):
|
| 978 |
+
c = (pt2 - pt1) * 3.0
|
| 979 |
+
b = (pt3 - pt2) * 3.0 - c
|
| 980 |
+
a = pt4 - pt1 - c - b
|
| 981 |
+
return (a, b, c, pt1)
|
| 982 |
+
|
| 983 |
+
|
| 984 |
+
def calcQuadraticPoints(a, b, c):
|
| 985 |
+
ax, ay = a
|
| 986 |
+
bx, by = b
|
| 987 |
+
cx, cy = c
|
| 988 |
+
x1 = cx
|
| 989 |
+
y1 = cy
|
| 990 |
+
x2 = (bx * 0.5) + cx
|
| 991 |
+
y2 = (by * 0.5) + cy
|
| 992 |
+
x3 = ax + bx + cx
|
| 993 |
+
y3 = ay + by + cy
|
| 994 |
+
return (x1, y1), (x2, y2), (x3, y3)
|
| 995 |
+
|
| 996 |
+
|
| 997 |
+
def calcCubicPoints(a, b, c, d):
|
| 998 |
+
ax, ay = a
|
| 999 |
+
bx, by = b
|
| 1000 |
+
cx, cy = c
|
| 1001 |
+
dx, dy = d
|
| 1002 |
+
x1 = dx
|
| 1003 |
+
y1 = dy
|
| 1004 |
+
x2 = (cx / 3.0) + dx
|
| 1005 |
+
y2 = (cy / 3.0) + dy
|
| 1006 |
+
x3 = (bx + cx) / 3.0 + x2
|
| 1007 |
+
y3 = (by + cy) / 3.0 + y2
|
| 1008 |
+
x4 = ax + dx + cx + bx
|
| 1009 |
+
y4 = ay + dy + cy + by
|
| 1010 |
+
return (x1, y1), (x2, y2), (x3, y3), (x4, y4)
|
| 1011 |
+
|
| 1012 |
+
|
| 1013 |
+
@cython.cfunc
|
| 1014 |
+
@cython.inline
|
| 1015 |
+
@cython.locals(
|
| 1016 |
+
a=cython.complex,
|
| 1017 |
+
b=cython.complex,
|
| 1018 |
+
c=cython.complex,
|
| 1019 |
+
d=cython.complex,
|
| 1020 |
+
p2=cython.complex,
|
| 1021 |
+
p3=cython.complex,
|
| 1022 |
+
p4=cython.complex,
|
| 1023 |
+
)
|
| 1024 |
+
def calcCubicPointsC(a, b, c, d):
|
| 1025 |
+
p2 = c * (1 / 3) + d
|
| 1026 |
+
p3 = (b + c) * (1 / 3) + p2
|
| 1027 |
+
p4 = a + b + c + d
|
| 1028 |
+
return (d, p2, p3, p4)
|
| 1029 |
+
|
| 1030 |
+
|
| 1031 |
+
#
|
| 1032 |
+
# Point at time
|
| 1033 |
+
#
|
| 1034 |
+
|
| 1035 |
+
|
| 1036 |
+
def linePointAtT(pt1, pt2, t):
|
| 1037 |
+
"""Finds the point at time `t` on a line.
|
| 1038 |
+
|
| 1039 |
+
Args:
|
| 1040 |
+
pt1, pt2: Coordinates of the line as 2D tuples.
|
| 1041 |
+
t: The time along the line.
|
| 1042 |
+
|
| 1043 |
+
Returns:
|
| 1044 |
+
A 2D tuple with the coordinates of the point.
|
| 1045 |
+
"""
|
| 1046 |
+
return ((pt1[0] * (1 - t) + pt2[0] * t), (pt1[1] * (1 - t) + pt2[1] * t))
|
| 1047 |
+
|
| 1048 |
+
|
| 1049 |
+
def quadraticPointAtT(pt1, pt2, pt3, t):
|
| 1050 |
+
"""Finds the point at time `t` on a quadratic curve.
|
| 1051 |
+
|
| 1052 |
+
Args:
|
| 1053 |
+
pt1, pt2, pt3: Coordinates of the curve as 2D tuples.
|
| 1054 |
+
t: The time along the curve.
|
| 1055 |
+
|
| 1056 |
+
Returns:
|
| 1057 |
+
A 2D tuple with the coordinates of the point.
|
| 1058 |
+
"""
|
| 1059 |
+
x = (1 - t) * (1 - t) * pt1[0] + 2 * (1 - t) * t * pt2[0] + t * t * pt3[0]
|
| 1060 |
+
y = (1 - t) * (1 - t) * pt1[1] + 2 * (1 - t) * t * pt2[1] + t * t * pt3[1]
|
| 1061 |
+
return (x, y)
|
| 1062 |
+
|
| 1063 |
+
|
| 1064 |
+
def cubicPointAtT(pt1, pt2, pt3, pt4, t):
|
| 1065 |
+
"""Finds the point at time `t` on a cubic curve.
|
| 1066 |
+
|
| 1067 |
+
Args:
|
| 1068 |
+
pt1, pt2, pt3, pt4: Coordinates of the curve as 2D tuples.
|
| 1069 |
+
t: The time along the curve.
|
| 1070 |
+
|
| 1071 |
+
Returns:
|
| 1072 |
+
A 2D tuple with the coordinates of the point.
|
| 1073 |
+
"""
|
| 1074 |
+
t2 = t * t
|
| 1075 |
+
_1_t = 1 - t
|
| 1076 |
+
_1_t_2 = _1_t * _1_t
|
| 1077 |
+
x = (
|
| 1078 |
+
_1_t_2 * _1_t * pt1[0]
|
| 1079 |
+
+ 3 * (_1_t_2 * t * pt2[0] + _1_t * t2 * pt3[0])
|
| 1080 |
+
+ t2 * t * pt4[0]
|
| 1081 |
+
)
|
| 1082 |
+
y = (
|
| 1083 |
+
_1_t_2 * _1_t * pt1[1]
|
| 1084 |
+
+ 3 * (_1_t_2 * t * pt2[1] + _1_t * t2 * pt3[1])
|
| 1085 |
+
+ t2 * t * pt4[1]
|
| 1086 |
+
)
|
| 1087 |
+
return (x, y)
|
| 1088 |
+
|
| 1089 |
+
|
| 1090 |
+
@cython.returns(cython.complex)
|
| 1091 |
+
@cython.locals(
|
| 1092 |
+
t=cython.double,
|
| 1093 |
+
pt1=cython.complex,
|
| 1094 |
+
pt2=cython.complex,
|
| 1095 |
+
pt3=cython.complex,
|
| 1096 |
+
pt4=cython.complex,
|
| 1097 |
+
)
|
| 1098 |
+
@cython.locals(t2=cython.double, _1_t=cython.double, _1_t_2=cython.double)
|
| 1099 |
+
def cubicPointAtTC(pt1, pt2, pt3, pt4, t):
|
| 1100 |
+
"""Finds the point at time `t` on a cubic curve.
|
| 1101 |
+
|
| 1102 |
+
Args:
|
| 1103 |
+
pt1, pt2, pt3, pt4: Coordinates of the curve as complex numbers.
|
| 1104 |
+
t: The time along the curve.
|
| 1105 |
+
|
| 1106 |
+
Returns:
|
| 1107 |
+
A complex number with the coordinates of the point.
|
| 1108 |
+
"""
|
| 1109 |
+
t2 = t * t
|
| 1110 |
+
_1_t = 1 - t
|
| 1111 |
+
_1_t_2 = _1_t * _1_t
|
| 1112 |
+
return _1_t_2 * _1_t * pt1 + 3 * (_1_t_2 * t * pt2 + _1_t * t2 * pt3) + t2 * t * pt4
|
| 1113 |
+
|
| 1114 |
+
|
| 1115 |
+
def segmentPointAtT(seg, t):
|
| 1116 |
+
if len(seg) == 2:
|
| 1117 |
+
return linePointAtT(*seg, t)
|
| 1118 |
+
elif len(seg) == 3:
|
| 1119 |
+
return quadraticPointAtT(*seg, t)
|
| 1120 |
+
elif len(seg) == 4:
|
| 1121 |
+
return cubicPointAtT(*seg, t)
|
| 1122 |
+
raise ValueError("Unknown curve degree")
|
| 1123 |
+
|
| 1124 |
+
|
| 1125 |
+
#
|
| 1126 |
+
# Intersection finders
|
| 1127 |
+
#
|
| 1128 |
+
|
| 1129 |
+
|
| 1130 |
+
def _line_t_of_pt(s, e, pt):
|
| 1131 |
+
sx, sy = s
|
| 1132 |
+
ex, ey = e
|
| 1133 |
+
px, py = pt
|
| 1134 |
+
if abs(sx - ex) < epsilon and abs(sy - ey) < epsilon:
|
| 1135 |
+
# Line is a point!
|
| 1136 |
+
return -1
|
| 1137 |
+
# Use the largest
|
| 1138 |
+
if abs(sx - ex) > abs(sy - ey):
|
| 1139 |
+
return (px - sx) / (ex - sx)
|
| 1140 |
+
else:
|
| 1141 |
+
return (py - sy) / (ey - sy)
|
| 1142 |
+
|
| 1143 |
+
|
| 1144 |
+
def _both_points_are_on_same_side_of_origin(a, b, origin):
|
| 1145 |
+
xDiff = (a[0] - origin[0]) * (b[0] - origin[0])
|
| 1146 |
+
yDiff = (a[1] - origin[1]) * (b[1] - origin[1])
|
| 1147 |
+
return not (xDiff <= 0.0 and yDiff <= 0.0)
|
| 1148 |
+
|
| 1149 |
+
|
| 1150 |
+
def lineLineIntersections(s1, e1, s2, e2):
|
| 1151 |
+
"""Finds intersections between two line segments.
|
| 1152 |
+
|
| 1153 |
+
Args:
|
| 1154 |
+
s1, e1: Coordinates of the first line as 2D tuples.
|
| 1155 |
+
s2, e2: Coordinates of the second line as 2D tuples.
|
| 1156 |
+
|
| 1157 |
+
Returns:
|
| 1158 |
+
A list of ``Intersection`` objects, each object having ``pt``, ``t1``
|
| 1159 |
+
and ``t2`` attributes containing the intersection point, time on first
|
| 1160 |
+
segment and time on second segment respectively.
|
| 1161 |
+
|
| 1162 |
+
Examples::
|
| 1163 |
+
|
| 1164 |
+
>>> a = lineLineIntersections( (310,389), (453, 222), (289, 251), (447, 367))
|
| 1165 |
+
>>> len(a)
|
| 1166 |
+
1
|
| 1167 |
+
>>> intersection = a[0]
|
| 1168 |
+
>>> intersection.pt
|
| 1169 |
+
(374.44882952482897, 313.73458370177315)
|
| 1170 |
+
>>> (intersection.t1, intersection.t2)
|
| 1171 |
+
(0.45069111555824465, 0.5408153767394238)
|
| 1172 |
+
"""
|
| 1173 |
+
s1x, s1y = s1
|
| 1174 |
+
e1x, e1y = e1
|
| 1175 |
+
s2x, s2y = s2
|
| 1176 |
+
e2x, e2y = e2
|
| 1177 |
+
if (
|
| 1178 |
+
math.isclose(s2x, e2x) and math.isclose(s1x, e1x) and not math.isclose(s1x, s2x)
|
| 1179 |
+
): # Parallel vertical
|
| 1180 |
+
return []
|
| 1181 |
+
if (
|
| 1182 |
+
math.isclose(s2y, e2y) and math.isclose(s1y, e1y) and not math.isclose(s1y, s2y)
|
| 1183 |
+
): # Parallel horizontal
|
| 1184 |
+
return []
|
| 1185 |
+
if math.isclose(s2x, e2x) and math.isclose(s2y, e2y): # Line segment is tiny
|
| 1186 |
+
return []
|
| 1187 |
+
if math.isclose(s1x, e1x) and math.isclose(s1y, e1y): # Line segment is tiny
|
| 1188 |
+
return []
|
| 1189 |
+
if math.isclose(e1x, s1x):
|
| 1190 |
+
x = s1x
|
| 1191 |
+
slope34 = (e2y - s2y) / (e2x - s2x)
|
| 1192 |
+
y = slope34 * (x - s2x) + s2y
|
| 1193 |
+
pt = (x, y)
|
| 1194 |
+
return [
|
| 1195 |
+
Intersection(
|
| 1196 |
+
pt=pt, t1=_line_t_of_pt(s1, e1, pt), t2=_line_t_of_pt(s2, e2, pt)
|
| 1197 |
+
)
|
| 1198 |
+
]
|
| 1199 |
+
if math.isclose(s2x, e2x):
|
| 1200 |
+
x = s2x
|
| 1201 |
+
slope12 = (e1y - s1y) / (e1x - s1x)
|
| 1202 |
+
y = slope12 * (x - s1x) + s1y
|
| 1203 |
+
pt = (x, y)
|
| 1204 |
+
return [
|
| 1205 |
+
Intersection(
|
| 1206 |
+
pt=pt, t1=_line_t_of_pt(s1, e1, pt), t2=_line_t_of_pt(s2, e2, pt)
|
| 1207 |
+
)
|
| 1208 |
+
]
|
| 1209 |
+
|
| 1210 |
+
slope12 = (e1y - s1y) / (e1x - s1x)
|
| 1211 |
+
slope34 = (e2y - s2y) / (e2x - s2x)
|
| 1212 |
+
if math.isclose(slope12, slope34):
|
| 1213 |
+
return []
|
| 1214 |
+
x = (slope12 * s1x - s1y - slope34 * s2x + s2y) / (slope12 - slope34)
|
| 1215 |
+
y = slope12 * (x - s1x) + s1y
|
| 1216 |
+
pt = (x, y)
|
| 1217 |
+
if _both_points_are_on_same_side_of_origin(
|
| 1218 |
+
pt, e1, s1
|
| 1219 |
+
) and _both_points_are_on_same_side_of_origin(pt, s2, e2):
|
| 1220 |
+
return [
|
| 1221 |
+
Intersection(
|
| 1222 |
+
pt=pt, t1=_line_t_of_pt(s1, e1, pt), t2=_line_t_of_pt(s2, e2, pt)
|
| 1223 |
+
)
|
| 1224 |
+
]
|
| 1225 |
+
return []
|
| 1226 |
+
|
| 1227 |
+
|
| 1228 |
+
def _alignment_transformation(segment):
|
| 1229 |
+
# Returns a transformation which aligns a segment horizontally at the
|
| 1230 |
+
# origin. Apply this transformation to curves and root-find to find
|
| 1231 |
+
# intersections with the segment.
|
| 1232 |
+
start = segment[0]
|
| 1233 |
+
end = segment[-1]
|
| 1234 |
+
angle = math.atan2(end[1] - start[1], end[0] - start[0])
|
| 1235 |
+
return Identity.rotate(-angle).translate(-start[0], -start[1])
|
| 1236 |
+
|
| 1237 |
+
|
| 1238 |
+
def _curve_line_intersections_t(curve, line):
|
| 1239 |
+
aligned_curve = _alignment_transformation(line).transformPoints(curve)
|
| 1240 |
+
if len(curve) == 3:
|
| 1241 |
+
a, b, c = calcQuadraticParameters(*aligned_curve)
|
| 1242 |
+
intersections = solveQuadratic(a[1], b[1], c[1])
|
| 1243 |
+
elif len(curve) == 4:
|
| 1244 |
+
a, b, c, d = calcCubicParameters(*aligned_curve)
|
| 1245 |
+
intersections = solveCubic(a[1], b[1], c[1], d[1])
|
| 1246 |
+
else:
|
| 1247 |
+
raise ValueError("Unknown curve degree")
|
| 1248 |
+
return sorted(i for i in intersections if 0.0 <= i <= 1)
|
| 1249 |
+
|
| 1250 |
+
|
| 1251 |
+
def curveLineIntersections(curve, line):
|
| 1252 |
+
"""Finds intersections between a curve and a line.
|
| 1253 |
+
|
| 1254 |
+
Args:
|
| 1255 |
+
curve: List of coordinates of the curve segment as 2D tuples.
|
| 1256 |
+
line: List of coordinates of the line segment as 2D tuples.
|
| 1257 |
+
|
| 1258 |
+
Returns:
|
| 1259 |
+
A list of ``Intersection`` objects, each object having ``pt``, ``t1``
|
| 1260 |
+
and ``t2`` attributes containing the intersection point, time on first
|
| 1261 |
+
segment and time on second segment respectively.
|
| 1262 |
+
|
| 1263 |
+
Examples::
|
| 1264 |
+
>>> curve = [ (100, 240), (30, 60), (210, 230), (160, 30) ]
|
| 1265 |
+
>>> line = [ (25, 260), (230, 20) ]
|
| 1266 |
+
>>> intersections = curveLineIntersections(curve, line)
|
| 1267 |
+
>>> len(intersections)
|
| 1268 |
+
3
|
| 1269 |
+
>>> intersections[0].pt
|
| 1270 |
+
(84.9000930760723, 189.87306176459828)
|
| 1271 |
+
"""
|
| 1272 |
+
if len(curve) == 3:
|
| 1273 |
+
pointFinder = quadraticPointAtT
|
| 1274 |
+
elif len(curve) == 4:
|
| 1275 |
+
pointFinder = cubicPointAtT
|
| 1276 |
+
else:
|
| 1277 |
+
raise ValueError("Unknown curve degree")
|
| 1278 |
+
intersections = []
|
| 1279 |
+
for t in _curve_line_intersections_t(curve, line):
|
| 1280 |
+
pt = pointFinder(*curve, t)
|
| 1281 |
+
# Back-project the point onto the line, to avoid problems with
|
| 1282 |
+
# numerical accuracy in the case of vertical and horizontal lines
|
| 1283 |
+
line_t = _line_t_of_pt(*line, pt)
|
| 1284 |
+
pt = linePointAtT(*line, line_t)
|
| 1285 |
+
intersections.append(Intersection(pt=pt, t1=t, t2=line_t))
|
| 1286 |
+
return intersections
|
| 1287 |
+
|
| 1288 |
+
|
| 1289 |
+
def _curve_bounds(c):
|
| 1290 |
+
if len(c) == 3:
|
| 1291 |
+
return calcQuadraticBounds(*c)
|
| 1292 |
+
elif len(c) == 4:
|
| 1293 |
+
return calcCubicBounds(*c)
|
| 1294 |
+
raise ValueError("Unknown curve degree")
|
| 1295 |
+
|
| 1296 |
+
|
| 1297 |
+
def _split_segment_at_t(c, t):
|
| 1298 |
+
if len(c) == 2:
|
| 1299 |
+
s, e = c
|
| 1300 |
+
midpoint = linePointAtT(s, e, t)
|
| 1301 |
+
return [(s, midpoint), (midpoint, e)]
|
| 1302 |
+
if len(c) == 3:
|
| 1303 |
+
return splitQuadraticAtT(*c, t)
|
| 1304 |
+
elif len(c) == 4:
|
| 1305 |
+
return splitCubicAtT(*c, t)
|
| 1306 |
+
raise ValueError("Unknown curve degree")
|
| 1307 |
+
|
| 1308 |
+
|
| 1309 |
+
def _curve_curve_intersections_t(
|
| 1310 |
+
curve1, curve2, precision=1e-3, range1=None, range2=None
|
| 1311 |
+
):
|
| 1312 |
+
bounds1 = _curve_bounds(curve1)
|
| 1313 |
+
bounds2 = _curve_bounds(curve2)
|
| 1314 |
+
|
| 1315 |
+
if not range1:
|
| 1316 |
+
range1 = (0.0, 1.0)
|
| 1317 |
+
if not range2:
|
| 1318 |
+
range2 = (0.0, 1.0)
|
| 1319 |
+
|
| 1320 |
+
# If bounds don't intersect, go home
|
| 1321 |
+
intersects, _ = sectRect(bounds1, bounds2)
|
| 1322 |
+
if not intersects:
|
| 1323 |
+
return []
|
| 1324 |
+
|
| 1325 |
+
def midpoint(r):
|
| 1326 |
+
return 0.5 * (r[0] + r[1])
|
| 1327 |
+
|
| 1328 |
+
# If they do overlap but they're tiny, approximate
|
| 1329 |
+
if rectArea(bounds1) < precision and rectArea(bounds2) < precision:
|
| 1330 |
+
return [(midpoint(range1), midpoint(range2))]
|
| 1331 |
+
|
| 1332 |
+
c11, c12 = _split_segment_at_t(curve1, 0.5)
|
| 1333 |
+
c11_range = (range1[0], midpoint(range1))
|
| 1334 |
+
c12_range = (midpoint(range1), range1[1])
|
| 1335 |
+
|
| 1336 |
+
c21, c22 = _split_segment_at_t(curve2, 0.5)
|
| 1337 |
+
c21_range = (range2[0], midpoint(range2))
|
| 1338 |
+
c22_range = (midpoint(range2), range2[1])
|
| 1339 |
+
|
| 1340 |
+
found = []
|
| 1341 |
+
found.extend(
|
| 1342 |
+
_curve_curve_intersections_t(
|
| 1343 |
+
c11, c21, precision, range1=c11_range, range2=c21_range
|
| 1344 |
+
)
|
| 1345 |
+
)
|
| 1346 |
+
found.extend(
|
| 1347 |
+
_curve_curve_intersections_t(
|
| 1348 |
+
c12, c21, precision, range1=c12_range, range2=c21_range
|
| 1349 |
+
)
|
| 1350 |
+
)
|
| 1351 |
+
found.extend(
|
| 1352 |
+
_curve_curve_intersections_t(
|
| 1353 |
+
c11, c22, precision, range1=c11_range, range2=c22_range
|
| 1354 |
+
)
|
| 1355 |
+
)
|
| 1356 |
+
found.extend(
|
| 1357 |
+
_curve_curve_intersections_t(
|
| 1358 |
+
c12, c22, precision, range1=c12_range, range2=c22_range
|
| 1359 |
+
)
|
| 1360 |
+
)
|
| 1361 |
+
|
| 1362 |
+
unique_key = lambda ts: (int(ts[0] / precision), int(ts[1] / precision))
|
| 1363 |
+
seen = set()
|
| 1364 |
+
unique_values = []
|
| 1365 |
+
|
| 1366 |
+
for ts in found:
|
| 1367 |
+
key = unique_key(ts)
|
| 1368 |
+
if key in seen:
|
| 1369 |
+
continue
|
| 1370 |
+
seen.add(key)
|
| 1371 |
+
unique_values.append(ts)
|
| 1372 |
+
|
| 1373 |
+
return unique_values
|
| 1374 |
+
|
| 1375 |
+
|
| 1376 |
+
def _is_linelike(segment):
|
| 1377 |
+
maybeline = _alignment_transformation(segment).transformPoints(segment)
|
| 1378 |
+
return all(math.isclose(p[1], 0.0) for p in maybeline)
|
| 1379 |
+
|
| 1380 |
+
|
| 1381 |
+
def curveCurveIntersections(curve1, curve2):
|
| 1382 |
+
"""Finds intersections between a curve and a curve.
|
| 1383 |
+
|
| 1384 |
+
Args:
|
| 1385 |
+
curve1: List of coordinates of the first curve segment as 2D tuples.
|
| 1386 |
+
curve2: List of coordinates of the second curve segment as 2D tuples.
|
| 1387 |
+
|
| 1388 |
+
Returns:
|
| 1389 |
+
A list of ``Intersection`` objects, each object having ``pt``, ``t1``
|
| 1390 |
+
and ``t2`` attributes containing the intersection point, time on first
|
| 1391 |
+
segment and time on second segment respectively.
|
| 1392 |
+
|
| 1393 |
+
Examples::
|
| 1394 |
+
>>> curve1 = [ (10,100), (90,30), (40,140), (220,220) ]
|
| 1395 |
+
>>> curve2 = [ (5,150), (180,20), (80,250), (210,190) ]
|
| 1396 |
+
>>> intersections = curveCurveIntersections(curve1, curve2)
|
| 1397 |
+
>>> len(intersections)
|
| 1398 |
+
3
|
| 1399 |
+
>>> intersections[0].pt
|
| 1400 |
+
(81.7831487395506, 109.88904552375288)
|
| 1401 |
+
"""
|
| 1402 |
+
if _is_linelike(curve1):
|
| 1403 |
+
line1 = curve1[0], curve1[-1]
|
| 1404 |
+
if _is_linelike(curve2):
|
| 1405 |
+
line2 = curve2[0], curve2[-1]
|
| 1406 |
+
return lineLineIntersections(*line1, *line2)
|
| 1407 |
+
else:
|
| 1408 |
+
return curveLineIntersections(curve2, line1)
|
| 1409 |
+
elif _is_linelike(curve2):
|
| 1410 |
+
line2 = curve2[0], curve2[-1]
|
| 1411 |
+
return curveLineIntersections(curve1, line2)
|
| 1412 |
+
|
| 1413 |
+
intersection_ts = _curve_curve_intersections_t(curve1, curve2)
|
| 1414 |
+
return [
|
| 1415 |
+
Intersection(pt=segmentPointAtT(curve1, ts[0]), t1=ts[0], t2=ts[1])
|
| 1416 |
+
for ts in intersection_ts
|
| 1417 |
+
]
|
| 1418 |
+
|
| 1419 |
+
|
| 1420 |
+
def segmentSegmentIntersections(seg1, seg2):
|
| 1421 |
+
"""Finds intersections between two segments.
|
| 1422 |
+
|
| 1423 |
+
Args:
|
| 1424 |
+
seg1: List of coordinates of the first segment as 2D tuples.
|
| 1425 |
+
seg2: List of coordinates of the second segment as 2D tuples.
|
| 1426 |
+
|
| 1427 |
+
Returns:
|
| 1428 |
+
A list of ``Intersection`` objects, each object having ``pt``, ``t1``
|
| 1429 |
+
and ``t2`` attributes containing the intersection point, time on first
|
| 1430 |
+
segment and time on second segment respectively.
|
| 1431 |
+
|
| 1432 |
+
Examples::
|
| 1433 |
+
>>> curve1 = [ (10,100), (90,30), (40,140), (220,220) ]
|
| 1434 |
+
>>> curve2 = [ (5,150), (180,20), (80,250), (210,190) ]
|
| 1435 |
+
>>> intersections = segmentSegmentIntersections(curve1, curve2)
|
| 1436 |
+
>>> len(intersections)
|
| 1437 |
+
3
|
| 1438 |
+
>>> intersections[0].pt
|
| 1439 |
+
(81.7831487395506, 109.88904552375288)
|
| 1440 |
+
>>> curve3 = [ (100, 240), (30, 60), (210, 230), (160, 30) ]
|
| 1441 |
+
>>> line = [ (25, 260), (230, 20) ]
|
| 1442 |
+
>>> intersections = segmentSegmentIntersections(curve3, line)
|
| 1443 |
+
>>> len(intersections)
|
| 1444 |
+
3
|
| 1445 |
+
>>> intersections[0].pt
|
| 1446 |
+
(84.9000930760723, 189.87306176459828)
|
| 1447 |
+
|
| 1448 |
+
"""
|
| 1449 |
+
# Arrange by degree
|
| 1450 |
+
swapped = False
|
| 1451 |
+
if len(seg2) > len(seg1):
|
| 1452 |
+
seg2, seg1 = seg1, seg2
|
| 1453 |
+
swapped = True
|
| 1454 |
+
if len(seg1) > 2:
|
| 1455 |
+
if len(seg2) > 2:
|
| 1456 |
+
intersections = curveCurveIntersections(seg1, seg2)
|
| 1457 |
+
else:
|
| 1458 |
+
intersections = curveLineIntersections(seg1, seg2)
|
| 1459 |
+
elif len(seg1) == 2 and len(seg2) == 2:
|
| 1460 |
+
intersections = lineLineIntersections(*seg1, *seg2)
|
| 1461 |
+
else:
|
| 1462 |
+
raise ValueError("Couldn't work out which intersection function to use")
|
| 1463 |
+
if not swapped:
|
| 1464 |
+
return intersections
|
| 1465 |
+
return [Intersection(pt=i.pt, t1=i.t2, t2=i.t1) for i in intersections]
|
| 1466 |
+
|
| 1467 |
+
|
| 1468 |
+
def _segmentrepr(obj):
|
| 1469 |
+
"""
|
| 1470 |
+
>>> _segmentrepr([1, [2, 3], [], [[2, [3, 4], [0.1, 2.2]]]])
|
| 1471 |
+
'(1, (2, 3), (), ((2, (3, 4), (0.1, 2.2))))'
|
| 1472 |
+
"""
|
| 1473 |
+
try:
|
| 1474 |
+
it = iter(obj)
|
| 1475 |
+
except TypeError:
|
| 1476 |
+
return "%g" % obj
|
| 1477 |
+
else:
|
| 1478 |
+
return "(%s)" % ", ".join(_segmentrepr(x) for x in it)
|
| 1479 |
+
|
| 1480 |
+
|
| 1481 |
+
def printSegments(segments):
|
| 1482 |
+
"""Helper for the doctests, displaying each segment in a list of
|
| 1483 |
+
segments on a single line as a tuple.
|
| 1484 |
+
"""
|
| 1485 |
+
for segment in segments:
|
| 1486 |
+
print(_segmentrepr(segment))
|
| 1487 |
+
|
| 1488 |
+
|
| 1489 |
+
if __name__ == "__main__":
|
| 1490 |
+
import sys
|
| 1491 |
+
import doctest
|
| 1492 |
+
|
| 1493 |
+
sys.exit(doctest.testmod().failed)
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/classifyTools.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" fontTools.misc.classifyTools.py -- tools for classifying things.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class Classifier(object):
|
| 6 |
+
"""
|
| 7 |
+
Main Classifier object, used to classify things into similar sets.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
def __init__(self, sort=True):
|
| 11 |
+
self._things = set() # set of all things known so far
|
| 12 |
+
self._sets = [] # list of class sets produced so far
|
| 13 |
+
self._mapping = {} # map from things to their class set
|
| 14 |
+
self._dirty = False
|
| 15 |
+
self._sort = sort
|
| 16 |
+
|
| 17 |
+
def add(self, set_of_things):
|
| 18 |
+
"""
|
| 19 |
+
Add a set to the classifier. Any iterable is accepted.
|
| 20 |
+
"""
|
| 21 |
+
if not set_of_things:
|
| 22 |
+
return
|
| 23 |
+
|
| 24 |
+
self._dirty = True
|
| 25 |
+
|
| 26 |
+
things, sets, mapping = self._things, self._sets, self._mapping
|
| 27 |
+
|
| 28 |
+
s = set(set_of_things)
|
| 29 |
+
intersection = s.intersection(things) # existing things
|
| 30 |
+
s.difference_update(intersection) # new things
|
| 31 |
+
difference = s
|
| 32 |
+
del s
|
| 33 |
+
|
| 34 |
+
# Add new class for new things
|
| 35 |
+
if difference:
|
| 36 |
+
things.update(difference)
|
| 37 |
+
sets.append(difference)
|
| 38 |
+
for thing in difference:
|
| 39 |
+
mapping[thing] = difference
|
| 40 |
+
del difference
|
| 41 |
+
|
| 42 |
+
while intersection:
|
| 43 |
+
# Take one item and process the old class it belongs to
|
| 44 |
+
old_class = mapping[next(iter(intersection))]
|
| 45 |
+
old_class_intersection = old_class.intersection(intersection)
|
| 46 |
+
|
| 47 |
+
# Update old class to remove items from new set
|
| 48 |
+
old_class.difference_update(old_class_intersection)
|
| 49 |
+
|
| 50 |
+
# Remove processed items from todo list
|
| 51 |
+
intersection.difference_update(old_class_intersection)
|
| 52 |
+
|
| 53 |
+
# Add new class for the intersection with old class
|
| 54 |
+
sets.append(old_class_intersection)
|
| 55 |
+
for thing in old_class_intersection:
|
| 56 |
+
mapping[thing] = old_class_intersection
|
| 57 |
+
del old_class_intersection
|
| 58 |
+
|
| 59 |
+
def update(self, list_of_sets):
|
| 60 |
+
"""
|
| 61 |
+
Add a a list of sets to the classifier. Any iterable of iterables is accepted.
|
| 62 |
+
"""
|
| 63 |
+
for s in list_of_sets:
|
| 64 |
+
self.add(s)
|
| 65 |
+
|
| 66 |
+
def _process(self):
|
| 67 |
+
if not self._dirty:
|
| 68 |
+
return
|
| 69 |
+
|
| 70 |
+
# Do any deferred processing
|
| 71 |
+
sets = self._sets
|
| 72 |
+
self._sets = [s for s in sets if s]
|
| 73 |
+
|
| 74 |
+
if self._sort:
|
| 75 |
+
self._sets = sorted(self._sets, key=lambda s: (-len(s), sorted(s)))
|
| 76 |
+
|
| 77 |
+
self._dirty = False
|
| 78 |
+
|
| 79 |
+
# Output methods
|
| 80 |
+
|
| 81 |
+
def getThings(self):
|
| 82 |
+
"""Returns the set of all things known so far.
|
| 83 |
+
|
| 84 |
+
The return value belongs to the Classifier object and should NOT
|
| 85 |
+
be modified while the classifier is still in use.
|
| 86 |
+
"""
|
| 87 |
+
self._process()
|
| 88 |
+
return self._things
|
| 89 |
+
|
| 90 |
+
def getMapping(self):
|
| 91 |
+
"""Returns the mapping from things to their class set.
|
| 92 |
+
|
| 93 |
+
The return value belongs to the Classifier object and should NOT
|
| 94 |
+
be modified while the classifier is still in use.
|
| 95 |
+
"""
|
| 96 |
+
self._process()
|
| 97 |
+
return self._mapping
|
| 98 |
+
|
| 99 |
+
def getClasses(self):
|
| 100 |
+
"""Returns the list of class sets.
|
| 101 |
+
|
| 102 |
+
The return value belongs to the Classifier object and should NOT
|
| 103 |
+
be modified while the classifier is still in use.
|
| 104 |
+
"""
|
| 105 |
+
self._process()
|
| 106 |
+
return self._sets
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def classify(list_of_sets, sort=True):
|
| 110 |
+
"""
|
| 111 |
+
Takes a iterable of iterables (list of sets from here on; but any
|
| 112 |
+
iterable works.), and returns the smallest list of sets such that
|
| 113 |
+
each set, is either a subset, or is disjoint from, each of the input
|
| 114 |
+
sets.
|
| 115 |
+
|
| 116 |
+
In other words, this function classifies all the things present in
|
| 117 |
+
any of the input sets, into similar classes, based on which sets
|
| 118 |
+
things are a member of.
|
| 119 |
+
|
| 120 |
+
If sort=True, return class sets are sorted by decreasing size and
|
| 121 |
+
their natural sort order within each class size. Otherwise, class
|
| 122 |
+
sets are returned in the order that they were identified, which is
|
| 123 |
+
generally not significant.
|
| 124 |
+
|
| 125 |
+
>>> classify([]) == ([], {})
|
| 126 |
+
True
|
| 127 |
+
>>> classify([[]]) == ([], {})
|
| 128 |
+
True
|
| 129 |
+
>>> classify([[], []]) == ([], {})
|
| 130 |
+
True
|
| 131 |
+
>>> classify([[1]]) == ([{1}], {1: {1}})
|
| 132 |
+
True
|
| 133 |
+
>>> classify([[1,2]]) == ([{1, 2}], {1: {1, 2}, 2: {1, 2}})
|
| 134 |
+
True
|
| 135 |
+
>>> classify([[1],[2]]) == ([{1}, {2}], {1: {1}, 2: {2}})
|
| 136 |
+
True
|
| 137 |
+
>>> classify([[1,2],[2]]) == ([{1}, {2}], {1: {1}, 2: {2}})
|
| 138 |
+
True
|
| 139 |
+
>>> classify([[1,2],[2,4]]) == ([{1}, {2}, {4}], {1: {1}, 2: {2}, 4: {4}})
|
| 140 |
+
True
|
| 141 |
+
>>> classify([[1,2],[2,4,5]]) == (
|
| 142 |
+
... [{4, 5}, {1}, {2}], {1: {1}, 2: {2}, 4: {4, 5}, 5: {4, 5}})
|
| 143 |
+
True
|
| 144 |
+
>>> classify([[1,2],[2,4,5]], sort=False) == (
|
| 145 |
+
... [{1}, {4, 5}, {2}], {1: {1}, 2: {2}, 4: {4, 5}, 5: {4, 5}})
|
| 146 |
+
True
|
| 147 |
+
>>> classify([[1,2,9],[2,4,5]], sort=False) == (
|
| 148 |
+
... [{1, 9}, {4, 5}, {2}], {1: {1, 9}, 2: {2}, 4: {4, 5}, 5: {4, 5},
|
| 149 |
+
... 9: {1, 9}})
|
| 150 |
+
True
|
| 151 |
+
>>> classify([[1,2,9,15],[2,4,5]], sort=False) == (
|
| 152 |
+
... [{1, 9, 15}, {4, 5}, {2}], {1: {1, 9, 15}, 2: {2}, 4: {4, 5},
|
| 153 |
+
... 5: {4, 5}, 9: {1, 9, 15}, 15: {1, 9, 15}})
|
| 154 |
+
True
|
| 155 |
+
>>> classes, mapping = classify([[1,2,9,15],[2,4,5],[15,5]], sort=False)
|
| 156 |
+
>>> set([frozenset(c) for c in classes]) == set(
|
| 157 |
+
... [frozenset(s) for s in ({1, 9}, {4}, {2}, {5}, {15})])
|
| 158 |
+
True
|
| 159 |
+
>>> mapping == {1: {1, 9}, 2: {2}, 4: {4}, 5: {5}, 9: {1, 9}, 15: {15}}
|
| 160 |
+
True
|
| 161 |
+
"""
|
| 162 |
+
classifier = Classifier(sort=sort)
|
| 163 |
+
classifier.update(list_of_sets)
|
| 164 |
+
return classifier.getClasses(), classifier.getMapping()
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
if __name__ == "__main__":
|
| 168 |
+
import sys, doctest
|
| 169 |
+
|
| 170 |
+
sys.exit(doctest.testmod(optionflags=doctest.ELLIPSIS).failed)
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/configTools.py
ADDED
|
@@ -0,0 +1,349 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Code of the config system; not related to fontTools or fonts in particular.
|
| 3 |
+
|
| 4 |
+
The options that are specific to fontTools are in :mod:`fontTools.config`.
|
| 5 |
+
|
| 6 |
+
To create your own config system, you need to create an instance of
|
| 7 |
+
:class:`Options`, and a subclass of :class:`AbstractConfig` with its
|
| 8 |
+
``options`` class variable set to your instance of Options.
|
| 9 |
+
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
from __future__ import annotations
|
| 13 |
+
|
| 14 |
+
import logging
|
| 15 |
+
from dataclasses import dataclass
|
| 16 |
+
from typing import (
|
| 17 |
+
Any,
|
| 18 |
+
Callable,
|
| 19 |
+
ClassVar,
|
| 20 |
+
Dict,
|
| 21 |
+
Iterable,
|
| 22 |
+
Mapping,
|
| 23 |
+
MutableMapping,
|
| 24 |
+
Optional,
|
| 25 |
+
Set,
|
| 26 |
+
Union,
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
log = logging.getLogger(__name__)
|
| 31 |
+
|
| 32 |
+
__all__ = [
|
| 33 |
+
"AbstractConfig",
|
| 34 |
+
"ConfigAlreadyRegisteredError",
|
| 35 |
+
"ConfigError",
|
| 36 |
+
"ConfigUnknownOptionError",
|
| 37 |
+
"ConfigValueParsingError",
|
| 38 |
+
"ConfigValueValidationError",
|
| 39 |
+
"Option",
|
| 40 |
+
"Options",
|
| 41 |
+
]
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class ConfigError(Exception):
|
| 45 |
+
"""Base exception for the config module."""
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class ConfigAlreadyRegisteredError(ConfigError):
|
| 49 |
+
"""Raised when a module tries to register a configuration option that
|
| 50 |
+
already exists.
|
| 51 |
+
|
| 52 |
+
Should not be raised too much really, only when developing new fontTools
|
| 53 |
+
modules.
|
| 54 |
+
"""
|
| 55 |
+
|
| 56 |
+
def __init__(self, name):
|
| 57 |
+
super().__init__(f"Config option {name} is already registered.")
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class ConfigValueParsingError(ConfigError):
|
| 61 |
+
"""Raised when a configuration value cannot be parsed."""
|
| 62 |
+
|
| 63 |
+
def __init__(self, name, value):
|
| 64 |
+
super().__init__(
|
| 65 |
+
f"Config option {name}: value cannot be parsed (given {repr(value)})"
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class ConfigValueValidationError(ConfigError):
|
| 70 |
+
"""Raised when a configuration value cannot be validated."""
|
| 71 |
+
|
| 72 |
+
def __init__(self, name, value):
|
| 73 |
+
super().__init__(
|
| 74 |
+
f"Config option {name}: value is invalid (given {repr(value)})"
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class ConfigUnknownOptionError(ConfigError):
|
| 79 |
+
"""Raised when a configuration option is unknown."""
|
| 80 |
+
|
| 81 |
+
def __init__(self, option_or_name):
|
| 82 |
+
name = (
|
| 83 |
+
f"'{option_or_name.name}' (id={id(option_or_name)})>"
|
| 84 |
+
if isinstance(option_or_name, Option)
|
| 85 |
+
else f"'{option_or_name}'"
|
| 86 |
+
)
|
| 87 |
+
super().__init__(f"Config option {name} is unknown")
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
# eq=False because Options are unique, not fungible objects
|
| 91 |
+
@dataclass(frozen=True, eq=False)
|
| 92 |
+
class Option:
|
| 93 |
+
name: str
|
| 94 |
+
"""Unique name identifying the option (e.g. package.module:MY_OPTION)."""
|
| 95 |
+
help: str
|
| 96 |
+
"""Help text for this option."""
|
| 97 |
+
default: Any
|
| 98 |
+
"""Default value for this option."""
|
| 99 |
+
parse: Callable[[str], Any]
|
| 100 |
+
"""Turn input (e.g. string) into proper type. Only when reading from file."""
|
| 101 |
+
validate: Optional[Callable[[Any], bool]] = None
|
| 102 |
+
"""Return true if the given value is an acceptable value."""
|
| 103 |
+
|
| 104 |
+
@staticmethod
|
| 105 |
+
def parse_optional_bool(v: str) -> Optional[bool]:
|
| 106 |
+
s = str(v).lower()
|
| 107 |
+
if s in {"0", "no", "false"}:
|
| 108 |
+
return False
|
| 109 |
+
if s in {"1", "yes", "true"}:
|
| 110 |
+
return True
|
| 111 |
+
if s in {"auto", "none"}:
|
| 112 |
+
return None
|
| 113 |
+
raise ValueError("invalid optional bool: {v!r}")
|
| 114 |
+
|
| 115 |
+
@staticmethod
|
| 116 |
+
def validate_optional_bool(v: Any) -> bool:
|
| 117 |
+
return v is None or isinstance(v, bool)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
class Options(Mapping):
|
| 121 |
+
"""Registry of available options for a given config system.
|
| 122 |
+
|
| 123 |
+
Define new options using the :meth:`register()` method.
|
| 124 |
+
|
| 125 |
+
Access existing options using the Mapping interface.
|
| 126 |
+
"""
|
| 127 |
+
|
| 128 |
+
__options: Dict[str, Option]
|
| 129 |
+
|
| 130 |
+
def __init__(self, other: "Options" = None) -> None:
|
| 131 |
+
self.__options = {}
|
| 132 |
+
if other is not None:
|
| 133 |
+
for option in other.values():
|
| 134 |
+
self.register_option(option)
|
| 135 |
+
|
| 136 |
+
def register(
|
| 137 |
+
self,
|
| 138 |
+
name: str,
|
| 139 |
+
help: str,
|
| 140 |
+
default: Any,
|
| 141 |
+
parse: Callable[[str], Any],
|
| 142 |
+
validate: Optional[Callable[[Any], bool]] = None,
|
| 143 |
+
) -> Option:
|
| 144 |
+
"""Create and register a new option."""
|
| 145 |
+
return self.register_option(Option(name, help, default, parse, validate))
|
| 146 |
+
|
| 147 |
+
def register_option(self, option: Option) -> Option:
|
| 148 |
+
"""Register a new option."""
|
| 149 |
+
name = option.name
|
| 150 |
+
if name in self.__options:
|
| 151 |
+
raise ConfigAlreadyRegisteredError(name)
|
| 152 |
+
self.__options[name] = option
|
| 153 |
+
return option
|
| 154 |
+
|
| 155 |
+
def is_registered(self, option: Option) -> bool:
|
| 156 |
+
"""Return True if the same option object is already registered."""
|
| 157 |
+
return self.__options.get(option.name) is option
|
| 158 |
+
|
| 159 |
+
def __getitem__(self, key: str) -> Option:
|
| 160 |
+
return self.__options.__getitem__(key)
|
| 161 |
+
|
| 162 |
+
def __iter__(self) -> Iterator[str]:
|
| 163 |
+
return self.__options.__iter__()
|
| 164 |
+
|
| 165 |
+
def __len__(self) -> int:
|
| 166 |
+
return self.__options.__len__()
|
| 167 |
+
|
| 168 |
+
def __repr__(self) -> str:
|
| 169 |
+
return (
|
| 170 |
+
f"{self.__class__.__name__}({{\n"
|
| 171 |
+
+ "".join(
|
| 172 |
+
f" {k!r}: Option(default={v.default!r}, ...),\n"
|
| 173 |
+
for k, v in self.__options.items()
|
| 174 |
+
)
|
| 175 |
+
+ "})"
|
| 176 |
+
)
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
_USE_GLOBAL_DEFAULT = object()
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
class AbstractConfig(MutableMapping):
|
| 183 |
+
"""
|
| 184 |
+
Create a set of config values, optionally pre-filled with values from
|
| 185 |
+
the given dictionary or pre-existing config object.
|
| 186 |
+
|
| 187 |
+
The class implements the MutableMapping protocol keyed by option name (`str`).
|
| 188 |
+
For convenience its methods accept either Option or str as the key parameter.
|
| 189 |
+
|
| 190 |
+
.. seealso:: :meth:`set()`
|
| 191 |
+
|
| 192 |
+
This config class is abstract because it needs its ``options`` class
|
| 193 |
+
var to be set to an instance of :class:`Options` before it can be
|
| 194 |
+
instanciated and used.
|
| 195 |
+
|
| 196 |
+
.. code:: python
|
| 197 |
+
|
| 198 |
+
class MyConfig(AbstractConfig):
|
| 199 |
+
options = Options()
|
| 200 |
+
|
| 201 |
+
MyConfig.register_option( "test:option_name", "This is an option", 0, int, lambda v: isinstance(v, int))
|
| 202 |
+
|
| 203 |
+
cfg = MyConfig({"test:option_name": 10})
|
| 204 |
+
|
| 205 |
+
"""
|
| 206 |
+
|
| 207 |
+
options: ClassVar[Options]
|
| 208 |
+
|
| 209 |
+
@classmethod
|
| 210 |
+
def register_option(
|
| 211 |
+
cls,
|
| 212 |
+
name: str,
|
| 213 |
+
help: str,
|
| 214 |
+
default: Any,
|
| 215 |
+
parse: Callable[[str], Any],
|
| 216 |
+
validate: Optional[Callable[[Any], bool]] = None,
|
| 217 |
+
) -> Option:
|
| 218 |
+
"""Register an available option in this config system."""
|
| 219 |
+
return cls.options.register(
|
| 220 |
+
name, help=help, default=default, parse=parse, validate=validate
|
| 221 |
+
)
|
| 222 |
+
|
| 223 |
+
_values: Dict[str, Any]
|
| 224 |
+
|
| 225 |
+
def __init__(
|
| 226 |
+
self,
|
| 227 |
+
values: Union[AbstractConfig, Dict[Union[Option, str], Any]] = {},
|
| 228 |
+
parse_values: bool = False,
|
| 229 |
+
skip_unknown: bool = False,
|
| 230 |
+
):
|
| 231 |
+
self._values = {}
|
| 232 |
+
values_dict = values._values if isinstance(values, AbstractConfig) else values
|
| 233 |
+
for name, value in values_dict.items():
|
| 234 |
+
self.set(name, value, parse_values, skip_unknown)
|
| 235 |
+
|
| 236 |
+
def _resolve_option(self, option_or_name: Union[Option, str]) -> Option:
|
| 237 |
+
if isinstance(option_or_name, Option):
|
| 238 |
+
option = option_or_name
|
| 239 |
+
if not self.options.is_registered(option):
|
| 240 |
+
raise ConfigUnknownOptionError(option)
|
| 241 |
+
return option
|
| 242 |
+
elif isinstance(option_or_name, str):
|
| 243 |
+
name = option_or_name
|
| 244 |
+
try:
|
| 245 |
+
return self.options[name]
|
| 246 |
+
except KeyError:
|
| 247 |
+
raise ConfigUnknownOptionError(name)
|
| 248 |
+
else:
|
| 249 |
+
raise TypeError(
|
| 250 |
+
"expected Option or str, found "
|
| 251 |
+
f"{type(option_or_name).__name__}: {option_or_name!r}"
|
| 252 |
+
)
|
| 253 |
+
|
| 254 |
+
def set(
|
| 255 |
+
self,
|
| 256 |
+
option_or_name: Union[Option, str],
|
| 257 |
+
value: Any,
|
| 258 |
+
parse_values: bool = False,
|
| 259 |
+
skip_unknown: bool = False,
|
| 260 |
+
):
|
| 261 |
+
"""Set the value of an option.
|
| 262 |
+
|
| 263 |
+
Args:
|
| 264 |
+
* `option_or_name`: an `Option` object or its name (`str`).
|
| 265 |
+
* `value`: the value to be assigned to given option.
|
| 266 |
+
* `parse_values`: parse the configuration value from a string into
|
| 267 |
+
its proper type, as per its `Option` object. The default
|
| 268 |
+
behavior is to raise `ConfigValueValidationError` when the value
|
| 269 |
+
is not of the right type. Useful when reading options from a
|
| 270 |
+
file type that doesn't support as many types as Python.
|
| 271 |
+
* `skip_unknown`: skip unknown configuration options. The default
|
| 272 |
+
behaviour is to raise `ConfigUnknownOptionError`. Useful when
|
| 273 |
+
reading options from a configuration file that has extra entries
|
| 274 |
+
(e.g. for a later version of fontTools)
|
| 275 |
+
"""
|
| 276 |
+
try:
|
| 277 |
+
option = self._resolve_option(option_or_name)
|
| 278 |
+
except ConfigUnknownOptionError as e:
|
| 279 |
+
if skip_unknown:
|
| 280 |
+
log.debug(str(e))
|
| 281 |
+
return
|
| 282 |
+
raise
|
| 283 |
+
|
| 284 |
+
# Can be useful if the values come from a source that doesn't have
|
| 285 |
+
# strict typing (.ini file? Terminal input?)
|
| 286 |
+
if parse_values:
|
| 287 |
+
try:
|
| 288 |
+
value = option.parse(value)
|
| 289 |
+
except Exception as e:
|
| 290 |
+
raise ConfigValueParsingError(option.name, value) from e
|
| 291 |
+
|
| 292 |
+
if option.validate is not None and not option.validate(value):
|
| 293 |
+
raise ConfigValueValidationError(option.name, value)
|
| 294 |
+
|
| 295 |
+
self._values[option.name] = value
|
| 296 |
+
|
| 297 |
+
def get(
|
| 298 |
+
self, option_or_name: Union[Option, str], default: Any = _USE_GLOBAL_DEFAULT
|
| 299 |
+
) -> Any:
|
| 300 |
+
"""
|
| 301 |
+
Get the value of an option. The value which is returned is the first
|
| 302 |
+
provided among:
|
| 303 |
+
|
| 304 |
+
1. a user-provided value in the options's ``self._values`` dict
|
| 305 |
+
2. a caller-provided default value to this method call
|
| 306 |
+
3. the global default for the option provided in ``fontTools.config``
|
| 307 |
+
|
| 308 |
+
This is to provide the ability to migrate progressively from config
|
| 309 |
+
options passed as arguments to fontTools APIs to config options read
|
| 310 |
+
from the current TTFont, e.g.
|
| 311 |
+
|
| 312 |
+
.. code:: python
|
| 313 |
+
|
| 314 |
+
def fontToolsAPI(font, some_option):
|
| 315 |
+
value = font.cfg.get("someLib.module:SOME_OPTION", some_option)
|
| 316 |
+
# use value
|
| 317 |
+
|
| 318 |
+
That way, the function will work the same for users of the API that
|
| 319 |
+
still pass the option to the function call, but will favour the new
|
| 320 |
+
config mechanism if the given font specifies a value for that option.
|
| 321 |
+
"""
|
| 322 |
+
option = self._resolve_option(option_or_name)
|
| 323 |
+
if option.name in self._values:
|
| 324 |
+
return self._values[option.name]
|
| 325 |
+
if default is not _USE_GLOBAL_DEFAULT:
|
| 326 |
+
return default
|
| 327 |
+
return option.default
|
| 328 |
+
|
| 329 |
+
def copy(self):
|
| 330 |
+
return self.__class__(self._values)
|
| 331 |
+
|
| 332 |
+
def __getitem__(self, option_or_name: Union[Option, str]) -> Any:
|
| 333 |
+
return self.get(option_or_name)
|
| 334 |
+
|
| 335 |
+
def __setitem__(self, option_or_name: Union[Option, str], value: Any) -> None:
|
| 336 |
+
return self.set(option_or_name, value)
|
| 337 |
+
|
| 338 |
+
def __delitem__(self, option_or_name: Union[Option, str]) -> None:
|
| 339 |
+
option = self._resolve_option(option_or_name)
|
| 340 |
+
del self._values[option.name]
|
| 341 |
+
|
| 342 |
+
def __iter__(self) -> Iterable[str]:
|
| 343 |
+
return self._values.__iter__()
|
| 344 |
+
|
| 345 |
+
def __len__(self) -> int:
|
| 346 |
+
return len(self._values)
|
| 347 |
+
|
| 348 |
+
def __repr__(self) -> str:
|
| 349 |
+
return f"{self.__class__.__name__}({repr(self._values)})"
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/encodingTools.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""fontTools.misc.encodingTools.py -- tools for working with OpenType encodings.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import fontTools.encodings.codecs
|
| 5 |
+
|
| 6 |
+
# Map keyed by platformID, then platEncID, then possibly langID
|
| 7 |
+
_encodingMap = {
|
| 8 |
+
0: { # Unicode
|
| 9 |
+
0: "utf_16_be",
|
| 10 |
+
1: "utf_16_be",
|
| 11 |
+
2: "utf_16_be",
|
| 12 |
+
3: "utf_16_be",
|
| 13 |
+
4: "utf_16_be",
|
| 14 |
+
5: "utf_16_be",
|
| 15 |
+
6: "utf_16_be",
|
| 16 |
+
},
|
| 17 |
+
1: { # Macintosh
|
| 18 |
+
# See
|
| 19 |
+
# https://github.com/fonttools/fonttools/issues/236
|
| 20 |
+
0: { # Macintosh, platEncID==0, keyed by langID
|
| 21 |
+
15: "mac_iceland",
|
| 22 |
+
17: "mac_turkish",
|
| 23 |
+
18: "mac_croatian",
|
| 24 |
+
24: "mac_latin2",
|
| 25 |
+
25: "mac_latin2",
|
| 26 |
+
26: "mac_latin2",
|
| 27 |
+
27: "mac_latin2",
|
| 28 |
+
28: "mac_latin2",
|
| 29 |
+
36: "mac_latin2",
|
| 30 |
+
37: "mac_romanian",
|
| 31 |
+
38: "mac_latin2",
|
| 32 |
+
39: "mac_latin2",
|
| 33 |
+
40: "mac_latin2",
|
| 34 |
+
Ellipsis: "mac_roman", # Other
|
| 35 |
+
},
|
| 36 |
+
1: "x_mac_japanese_ttx",
|
| 37 |
+
2: "x_mac_trad_chinese_ttx",
|
| 38 |
+
3: "x_mac_korean_ttx",
|
| 39 |
+
6: "mac_greek",
|
| 40 |
+
7: "mac_cyrillic",
|
| 41 |
+
25: "x_mac_simp_chinese_ttx",
|
| 42 |
+
29: "mac_latin2",
|
| 43 |
+
35: "mac_turkish",
|
| 44 |
+
37: "mac_iceland",
|
| 45 |
+
},
|
| 46 |
+
2: { # ISO
|
| 47 |
+
0: "ascii",
|
| 48 |
+
1: "utf_16_be",
|
| 49 |
+
2: "latin1",
|
| 50 |
+
},
|
| 51 |
+
3: { # Microsoft
|
| 52 |
+
0: "utf_16_be",
|
| 53 |
+
1: "utf_16_be",
|
| 54 |
+
2: "shift_jis",
|
| 55 |
+
3: "gb2312",
|
| 56 |
+
4: "big5",
|
| 57 |
+
5: "euc_kr",
|
| 58 |
+
6: "johab",
|
| 59 |
+
10: "utf_16_be",
|
| 60 |
+
},
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def getEncoding(platformID, platEncID, langID, default=None):
|
| 65 |
+
"""Returns the Python encoding name for OpenType platformID/encodingID/langID
|
| 66 |
+
triplet. If encoding for these values is not known, by default None is
|
| 67 |
+
returned. That can be overriden by passing a value to the default argument.
|
| 68 |
+
"""
|
| 69 |
+
encoding = _encodingMap.get(platformID, {}).get(platEncID, default)
|
| 70 |
+
if isinstance(encoding, dict):
|
| 71 |
+
encoding = encoding.get(langID, encoding[Ellipsis])
|
| 72 |
+
return encoding
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/etree.py
ADDED
|
@@ -0,0 +1,479 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Shim module exporting the same ElementTree API for lxml and
|
| 2 |
+
xml.etree backends.
|
| 3 |
+
|
| 4 |
+
When lxml is installed, it is automatically preferred over the built-in
|
| 5 |
+
xml.etree module.
|
| 6 |
+
On Python 2.7, the cElementTree module is preferred over the pure-python
|
| 7 |
+
ElementTree module.
|
| 8 |
+
|
| 9 |
+
Besides exporting a unified interface, this also defines extra functions
|
| 10 |
+
or subclasses built-in ElementTree classes to add features that are
|
| 11 |
+
only availble in lxml, like OrderedDict for attributes, pretty_print and
|
| 12 |
+
iterwalk.
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
from fontTools.misc.textTools import tostr
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
XML_DECLARATION = """<?xml version='1.0' encoding='%s'?>"""
|
| 19 |
+
|
| 20 |
+
__all__ = [
|
| 21 |
+
# public symbols
|
| 22 |
+
"Comment",
|
| 23 |
+
"dump",
|
| 24 |
+
"Element",
|
| 25 |
+
"ElementTree",
|
| 26 |
+
"fromstring",
|
| 27 |
+
"fromstringlist",
|
| 28 |
+
"iselement",
|
| 29 |
+
"iterparse",
|
| 30 |
+
"parse",
|
| 31 |
+
"ParseError",
|
| 32 |
+
"PI",
|
| 33 |
+
"ProcessingInstruction",
|
| 34 |
+
"QName",
|
| 35 |
+
"SubElement",
|
| 36 |
+
"tostring",
|
| 37 |
+
"tostringlist",
|
| 38 |
+
"TreeBuilder",
|
| 39 |
+
"XML",
|
| 40 |
+
"XMLParser",
|
| 41 |
+
"register_namespace",
|
| 42 |
+
]
|
| 43 |
+
|
| 44 |
+
try:
|
| 45 |
+
from lxml.etree import *
|
| 46 |
+
|
| 47 |
+
_have_lxml = True
|
| 48 |
+
except ImportError:
|
| 49 |
+
try:
|
| 50 |
+
from xml.etree.cElementTree import *
|
| 51 |
+
|
| 52 |
+
# the cElementTree version of XML function doesn't support
|
| 53 |
+
# the optional 'parser' keyword argument
|
| 54 |
+
from xml.etree.ElementTree import XML
|
| 55 |
+
except ImportError: # pragma: no cover
|
| 56 |
+
from xml.etree.ElementTree import *
|
| 57 |
+
_have_lxml = False
|
| 58 |
+
|
| 59 |
+
import sys
|
| 60 |
+
|
| 61 |
+
# dict is always ordered in python >= 3.6 and on pypy
|
| 62 |
+
PY36 = sys.version_info >= (3, 6)
|
| 63 |
+
try:
|
| 64 |
+
import __pypy__
|
| 65 |
+
except ImportError:
|
| 66 |
+
__pypy__ = None
|
| 67 |
+
_dict_is_ordered = bool(PY36 or __pypy__)
|
| 68 |
+
del PY36, __pypy__
|
| 69 |
+
|
| 70 |
+
if _dict_is_ordered:
|
| 71 |
+
_Attrib = dict
|
| 72 |
+
else:
|
| 73 |
+
from collections import OrderedDict as _Attrib
|
| 74 |
+
|
| 75 |
+
if isinstance(Element, type):
|
| 76 |
+
_Element = Element
|
| 77 |
+
else:
|
| 78 |
+
# in py27, cElementTree.Element cannot be subclassed, so
|
| 79 |
+
# we need to import the pure-python class
|
| 80 |
+
from xml.etree.ElementTree import Element as _Element
|
| 81 |
+
|
| 82 |
+
class Element(_Element):
|
| 83 |
+
"""Element subclass that keeps the order of attributes."""
|
| 84 |
+
|
| 85 |
+
def __init__(self, tag, attrib=_Attrib(), **extra):
|
| 86 |
+
super(Element, self).__init__(tag)
|
| 87 |
+
self.attrib = _Attrib()
|
| 88 |
+
if attrib:
|
| 89 |
+
self.attrib.update(attrib)
|
| 90 |
+
if extra:
|
| 91 |
+
self.attrib.update(extra)
|
| 92 |
+
|
| 93 |
+
def SubElement(parent, tag, attrib=_Attrib(), **extra):
|
| 94 |
+
"""Must override SubElement as well otherwise _elementtree.SubElement
|
| 95 |
+
fails if 'parent' is a subclass of Element object.
|
| 96 |
+
"""
|
| 97 |
+
element = parent.__class__(tag, attrib, **extra)
|
| 98 |
+
parent.append(element)
|
| 99 |
+
return element
|
| 100 |
+
|
| 101 |
+
def _iterwalk(element, events, tag):
|
| 102 |
+
include = tag is None or element.tag == tag
|
| 103 |
+
if include and "start" in events:
|
| 104 |
+
yield ("start", element)
|
| 105 |
+
for e in element:
|
| 106 |
+
for item in _iterwalk(e, events, tag):
|
| 107 |
+
yield item
|
| 108 |
+
if include:
|
| 109 |
+
yield ("end", element)
|
| 110 |
+
|
| 111 |
+
def iterwalk(element_or_tree, events=("end",), tag=None):
|
| 112 |
+
"""A tree walker that generates events from an existing tree as
|
| 113 |
+
if it was parsing XML data with iterparse().
|
| 114 |
+
Drop-in replacement for lxml.etree.iterwalk.
|
| 115 |
+
"""
|
| 116 |
+
if iselement(element_or_tree):
|
| 117 |
+
element = element_or_tree
|
| 118 |
+
else:
|
| 119 |
+
element = element_or_tree.getroot()
|
| 120 |
+
if tag == "*":
|
| 121 |
+
tag = None
|
| 122 |
+
for item in _iterwalk(element, events, tag):
|
| 123 |
+
yield item
|
| 124 |
+
|
| 125 |
+
_ElementTree = ElementTree
|
| 126 |
+
|
| 127 |
+
class ElementTree(_ElementTree):
|
| 128 |
+
"""ElementTree subclass that adds 'pretty_print' and 'doctype'
|
| 129 |
+
arguments to the 'write' method.
|
| 130 |
+
Currently these are only supported for the default XML serialization
|
| 131 |
+
'method', and not also for "html" or "text", for these are delegated
|
| 132 |
+
to the base class.
|
| 133 |
+
"""
|
| 134 |
+
|
| 135 |
+
def write(
|
| 136 |
+
self,
|
| 137 |
+
file_or_filename,
|
| 138 |
+
encoding=None,
|
| 139 |
+
xml_declaration=False,
|
| 140 |
+
method=None,
|
| 141 |
+
doctype=None,
|
| 142 |
+
pretty_print=False,
|
| 143 |
+
):
|
| 144 |
+
if method and method != "xml":
|
| 145 |
+
# delegate to super-class
|
| 146 |
+
super(ElementTree, self).write(
|
| 147 |
+
file_or_filename,
|
| 148 |
+
encoding=encoding,
|
| 149 |
+
xml_declaration=xml_declaration,
|
| 150 |
+
method=method,
|
| 151 |
+
)
|
| 152 |
+
return
|
| 153 |
+
|
| 154 |
+
if encoding is not None and encoding.lower() == "unicode":
|
| 155 |
+
if xml_declaration:
|
| 156 |
+
raise ValueError(
|
| 157 |
+
"Serialisation to unicode must not request an XML declaration"
|
| 158 |
+
)
|
| 159 |
+
write_declaration = False
|
| 160 |
+
encoding = "unicode"
|
| 161 |
+
elif xml_declaration is None:
|
| 162 |
+
# by default, write an XML declaration only for non-standard encodings
|
| 163 |
+
write_declaration = encoding is not None and encoding.upper() not in (
|
| 164 |
+
"ASCII",
|
| 165 |
+
"UTF-8",
|
| 166 |
+
"UTF8",
|
| 167 |
+
"US-ASCII",
|
| 168 |
+
)
|
| 169 |
+
else:
|
| 170 |
+
write_declaration = xml_declaration
|
| 171 |
+
|
| 172 |
+
if encoding is None:
|
| 173 |
+
encoding = "ASCII"
|
| 174 |
+
|
| 175 |
+
if pretty_print:
|
| 176 |
+
# NOTE this will modify the tree in-place
|
| 177 |
+
_indent(self._root)
|
| 178 |
+
|
| 179 |
+
with _get_writer(file_or_filename, encoding) as write:
|
| 180 |
+
if write_declaration:
|
| 181 |
+
write(XML_DECLARATION % encoding.upper())
|
| 182 |
+
if pretty_print:
|
| 183 |
+
write("\n")
|
| 184 |
+
if doctype:
|
| 185 |
+
write(_tounicode(doctype))
|
| 186 |
+
if pretty_print:
|
| 187 |
+
write("\n")
|
| 188 |
+
|
| 189 |
+
qnames, namespaces = _namespaces(self._root)
|
| 190 |
+
_serialize_xml(write, self._root, qnames, namespaces)
|
| 191 |
+
|
| 192 |
+
import io
|
| 193 |
+
|
| 194 |
+
def tostring(
|
| 195 |
+
element,
|
| 196 |
+
encoding=None,
|
| 197 |
+
xml_declaration=None,
|
| 198 |
+
method=None,
|
| 199 |
+
doctype=None,
|
| 200 |
+
pretty_print=False,
|
| 201 |
+
):
|
| 202 |
+
"""Custom 'tostring' function that uses our ElementTree subclass, with
|
| 203 |
+
pretty_print support.
|
| 204 |
+
"""
|
| 205 |
+
stream = io.StringIO() if encoding == "unicode" else io.BytesIO()
|
| 206 |
+
ElementTree(element).write(
|
| 207 |
+
stream,
|
| 208 |
+
encoding=encoding,
|
| 209 |
+
xml_declaration=xml_declaration,
|
| 210 |
+
method=method,
|
| 211 |
+
doctype=doctype,
|
| 212 |
+
pretty_print=pretty_print,
|
| 213 |
+
)
|
| 214 |
+
return stream.getvalue()
|
| 215 |
+
|
| 216 |
+
# serialization support
|
| 217 |
+
|
| 218 |
+
import re
|
| 219 |
+
|
| 220 |
+
# Valid XML strings can include any Unicode character, excluding control
|
| 221 |
+
# characters, the surrogate blocks, FFFE, and FFFF:
|
| 222 |
+
# Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
|
| 223 |
+
# Here we reversed the pattern to match only the invalid characters.
|
| 224 |
+
# For the 'narrow' python builds supporting only UCS-2, which represent
|
| 225 |
+
# characters beyond BMP as UTF-16 surrogate pairs, we need to pass through
|
| 226 |
+
# the surrogate block. I haven't found a more elegant solution...
|
| 227 |
+
UCS2 = sys.maxunicode < 0x10FFFF
|
| 228 |
+
if UCS2:
|
| 229 |
+
_invalid_xml_string = re.compile(
|
| 230 |
+
"[\u0000-\u0008\u000B-\u000C\u000E-\u001F\uFFFE-\uFFFF]"
|
| 231 |
+
)
|
| 232 |
+
else:
|
| 233 |
+
_invalid_xml_string = re.compile(
|
| 234 |
+
"[\u0000-\u0008\u000B-\u000C\u000E-\u001F\uD800-\uDFFF\uFFFE-\uFFFF]"
|
| 235 |
+
)
|
| 236 |
+
|
| 237 |
+
def _tounicode(s):
|
| 238 |
+
"""Test if a string is valid user input and decode it to unicode string
|
| 239 |
+
using ASCII encoding if it's a bytes string.
|
| 240 |
+
Reject all bytes/unicode input that contains non-XML characters.
|
| 241 |
+
Reject all bytes input that contains non-ASCII characters.
|
| 242 |
+
"""
|
| 243 |
+
try:
|
| 244 |
+
s = tostr(s, encoding="ascii", errors="strict")
|
| 245 |
+
except UnicodeDecodeError:
|
| 246 |
+
raise ValueError(
|
| 247 |
+
"Bytes strings can only contain ASCII characters. "
|
| 248 |
+
"Use unicode strings for non-ASCII characters."
|
| 249 |
+
)
|
| 250 |
+
except AttributeError:
|
| 251 |
+
_raise_serialization_error(s)
|
| 252 |
+
if s and _invalid_xml_string.search(s):
|
| 253 |
+
raise ValueError(
|
| 254 |
+
"All strings must be XML compatible: Unicode or ASCII, "
|
| 255 |
+
"no NULL bytes or control characters"
|
| 256 |
+
)
|
| 257 |
+
return s
|
| 258 |
+
|
| 259 |
+
import contextlib
|
| 260 |
+
|
| 261 |
+
@contextlib.contextmanager
|
| 262 |
+
def _get_writer(file_or_filename, encoding):
|
| 263 |
+
# returns text write method and release all resources after using
|
| 264 |
+
try:
|
| 265 |
+
write = file_or_filename.write
|
| 266 |
+
except AttributeError:
|
| 267 |
+
# file_or_filename is a file name
|
| 268 |
+
f = open(
|
| 269 |
+
file_or_filename,
|
| 270 |
+
"w",
|
| 271 |
+
encoding="utf-8" if encoding == "unicode" else encoding,
|
| 272 |
+
errors="xmlcharrefreplace",
|
| 273 |
+
)
|
| 274 |
+
with f:
|
| 275 |
+
yield f.write
|
| 276 |
+
else:
|
| 277 |
+
# file_or_filename is a file-like object
|
| 278 |
+
# encoding determines if it is a text or binary writer
|
| 279 |
+
if encoding == "unicode":
|
| 280 |
+
# use a text writer as is
|
| 281 |
+
yield write
|
| 282 |
+
else:
|
| 283 |
+
# wrap a binary writer with TextIOWrapper
|
| 284 |
+
detach_buffer = False
|
| 285 |
+
if isinstance(file_or_filename, io.BufferedIOBase):
|
| 286 |
+
buf = file_or_filename
|
| 287 |
+
elif isinstance(file_or_filename, io.RawIOBase):
|
| 288 |
+
buf = io.BufferedWriter(file_or_filename)
|
| 289 |
+
detach_buffer = True
|
| 290 |
+
else:
|
| 291 |
+
# This is to handle passed objects that aren't in the
|
| 292 |
+
# IOBase hierarchy, but just have a write method
|
| 293 |
+
buf = io.BufferedIOBase()
|
| 294 |
+
buf.writable = lambda: True
|
| 295 |
+
buf.write = write
|
| 296 |
+
try:
|
| 297 |
+
# TextIOWrapper uses this methods to determine
|
| 298 |
+
# if BOM (for UTF-16, etc) should be added
|
| 299 |
+
buf.seekable = file_or_filename.seekable
|
| 300 |
+
buf.tell = file_or_filename.tell
|
| 301 |
+
except AttributeError:
|
| 302 |
+
pass
|
| 303 |
+
wrapper = io.TextIOWrapper(
|
| 304 |
+
buf,
|
| 305 |
+
encoding=encoding,
|
| 306 |
+
errors="xmlcharrefreplace",
|
| 307 |
+
newline="\n",
|
| 308 |
+
)
|
| 309 |
+
try:
|
| 310 |
+
yield wrapper.write
|
| 311 |
+
finally:
|
| 312 |
+
# Keep the original file open when the TextIOWrapper and
|
| 313 |
+
# the BufferedWriter are destroyed
|
| 314 |
+
wrapper.detach()
|
| 315 |
+
if detach_buffer:
|
| 316 |
+
buf.detach()
|
| 317 |
+
|
| 318 |
+
from xml.etree.ElementTree import _namespace_map
|
| 319 |
+
|
| 320 |
+
def _namespaces(elem):
|
| 321 |
+
# identify namespaces used in this tree
|
| 322 |
+
|
| 323 |
+
# maps qnames to *encoded* prefix:local names
|
| 324 |
+
qnames = {None: None}
|
| 325 |
+
|
| 326 |
+
# maps uri:s to prefixes
|
| 327 |
+
namespaces = {}
|
| 328 |
+
|
| 329 |
+
def add_qname(qname):
|
| 330 |
+
# calculate serialized qname representation
|
| 331 |
+
try:
|
| 332 |
+
qname = _tounicode(qname)
|
| 333 |
+
if qname[:1] == "{":
|
| 334 |
+
uri, tag = qname[1:].rsplit("}", 1)
|
| 335 |
+
prefix = namespaces.get(uri)
|
| 336 |
+
if prefix is None:
|
| 337 |
+
prefix = _namespace_map.get(uri)
|
| 338 |
+
if prefix is None:
|
| 339 |
+
prefix = "ns%d" % len(namespaces)
|
| 340 |
+
else:
|
| 341 |
+
prefix = _tounicode(prefix)
|
| 342 |
+
if prefix != "xml":
|
| 343 |
+
namespaces[uri] = prefix
|
| 344 |
+
if prefix:
|
| 345 |
+
qnames[qname] = "%s:%s" % (prefix, tag)
|
| 346 |
+
else:
|
| 347 |
+
qnames[qname] = tag # default element
|
| 348 |
+
else:
|
| 349 |
+
qnames[qname] = qname
|
| 350 |
+
except TypeError:
|
| 351 |
+
_raise_serialization_error(qname)
|
| 352 |
+
|
| 353 |
+
# populate qname and namespaces table
|
| 354 |
+
for elem in elem.iter():
|
| 355 |
+
tag = elem.tag
|
| 356 |
+
if isinstance(tag, QName):
|
| 357 |
+
if tag.text not in qnames:
|
| 358 |
+
add_qname(tag.text)
|
| 359 |
+
elif isinstance(tag, str):
|
| 360 |
+
if tag not in qnames:
|
| 361 |
+
add_qname(tag)
|
| 362 |
+
elif tag is not None and tag is not Comment and tag is not PI:
|
| 363 |
+
_raise_serialization_error(tag)
|
| 364 |
+
for key, value in elem.items():
|
| 365 |
+
if isinstance(key, QName):
|
| 366 |
+
key = key.text
|
| 367 |
+
if key not in qnames:
|
| 368 |
+
add_qname(key)
|
| 369 |
+
if isinstance(value, QName) and value.text not in qnames:
|
| 370 |
+
add_qname(value.text)
|
| 371 |
+
text = elem.text
|
| 372 |
+
if isinstance(text, QName) and text.text not in qnames:
|
| 373 |
+
add_qname(text.text)
|
| 374 |
+
return qnames, namespaces
|
| 375 |
+
|
| 376 |
+
def _serialize_xml(write, elem, qnames, namespaces, **kwargs):
|
| 377 |
+
tag = elem.tag
|
| 378 |
+
text = elem.text
|
| 379 |
+
if tag is Comment:
|
| 380 |
+
write("<!--%s-->" % _tounicode(text))
|
| 381 |
+
elif tag is ProcessingInstruction:
|
| 382 |
+
write("<?%s?>" % _tounicode(text))
|
| 383 |
+
else:
|
| 384 |
+
tag = qnames[_tounicode(tag) if tag is not None else None]
|
| 385 |
+
if tag is None:
|
| 386 |
+
if text:
|
| 387 |
+
write(_escape_cdata(text))
|
| 388 |
+
for e in elem:
|
| 389 |
+
_serialize_xml(write, e, qnames, None)
|
| 390 |
+
else:
|
| 391 |
+
write("<" + tag)
|
| 392 |
+
if namespaces:
|
| 393 |
+
for uri, prefix in sorted(
|
| 394 |
+
namespaces.items(), key=lambda x: x[1]
|
| 395 |
+
): # sort on prefix
|
| 396 |
+
if prefix:
|
| 397 |
+
prefix = ":" + prefix
|
| 398 |
+
write(' xmlns%s="%s"' % (prefix, _escape_attrib(uri)))
|
| 399 |
+
attrs = elem.attrib
|
| 400 |
+
if attrs:
|
| 401 |
+
# try to keep existing attrib order
|
| 402 |
+
if len(attrs) <= 1 or type(attrs) is _Attrib:
|
| 403 |
+
items = attrs.items()
|
| 404 |
+
else:
|
| 405 |
+
# if plain dict, use lexical order
|
| 406 |
+
items = sorted(attrs.items())
|
| 407 |
+
for k, v in items:
|
| 408 |
+
if isinstance(k, QName):
|
| 409 |
+
k = _tounicode(k.text)
|
| 410 |
+
else:
|
| 411 |
+
k = _tounicode(k)
|
| 412 |
+
if isinstance(v, QName):
|
| 413 |
+
v = qnames[_tounicode(v.text)]
|
| 414 |
+
else:
|
| 415 |
+
v = _escape_attrib(v)
|
| 416 |
+
write(' %s="%s"' % (qnames[k], v))
|
| 417 |
+
if text is not None or len(elem):
|
| 418 |
+
write(">")
|
| 419 |
+
if text:
|
| 420 |
+
write(_escape_cdata(text))
|
| 421 |
+
for e in elem:
|
| 422 |
+
_serialize_xml(write, e, qnames, None)
|
| 423 |
+
write("</" + tag + ">")
|
| 424 |
+
else:
|
| 425 |
+
write("/>")
|
| 426 |
+
if elem.tail:
|
| 427 |
+
write(_escape_cdata(elem.tail))
|
| 428 |
+
|
| 429 |
+
def _raise_serialization_error(text):
|
| 430 |
+
raise TypeError("cannot serialize %r (type %s)" % (text, type(text).__name__))
|
| 431 |
+
|
| 432 |
+
def _escape_cdata(text):
|
| 433 |
+
# escape character data
|
| 434 |
+
try:
|
| 435 |
+
text = _tounicode(text)
|
| 436 |
+
# it's worth avoiding do-nothing calls for short strings
|
| 437 |
+
if "&" in text:
|
| 438 |
+
text = text.replace("&", "&")
|
| 439 |
+
if "<" in text:
|
| 440 |
+
text = text.replace("<", "<")
|
| 441 |
+
if ">" in text:
|
| 442 |
+
text = text.replace(">", ">")
|
| 443 |
+
return text
|
| 444 |
+
except (TypeError, AttributeError):
|
| 445 |
+
_raise_serialization_error(text)
|
| 446 |
+
|
| 447 |
+
def _escape_attrib(text):
|
| 448 |
+
# escape attribute value
|
| 449 |
+
try:
|
| 450 |
+
text = _tounicode(text)
|
| 451 |
+
if "&" in text:
|
| 452 |
+
text = text.replace("&", "&")
|
| 453 |
+
if "<" in text:
|
| 454 |
+
text = text.replace("<", "<")
|
| 455 |
+
if ">" in text:
|
| 456 |
+
text = text.replace(">", ">")
|
| 457 |
+
if '"' in text:
|
| 458 |
+
text = text.replace('"', """)
|
| 459 |
+
if "\n" in text:
|
| 460 |
+
text = text.replace("\n", " ")
|
| 461 |
+
return text
|
| 462 |
+
except (TypeError, AttributeError):
|
| 463 |
+
_raise_serialization_error(text)
|
| 464 |
+
|
| 465 |
+
def _indent(elem, level=0):
|
| 466 |
+
# From http://effbot.org/zone/element-lib.htm#prettyprint
|
| 467 |
+
i = "\n" + level * " "
|
| 468 |
+
if len(elem):
|
| 469 |
+
if not elem.text or not elem.text.strip():
|
| 470 |
+
elem.text = i + " "
|
| 471 |
+
if not elem.tail or not elem.tail.strip():
|
| 472 |
+
elem.tail = i
|
| 473 |
+
for elem in elem:
|
| 474 |
+
_indent(elem, level + 1)
|
| 475 |
+
if not elem.tail or not elem.tail.strip():
|
| 476 |
+
elem.tail = i
|
| 477 |
+
else:
|
| 478 |
+
if level and (not elem.tail or not elem.tail.strip()):
|
| 479 |
+
elem.tail = i
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/filenames.py
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This module implements the algorithm for converting between a "user name" -
|
| 3 |
+
something that a user can choose arbitrarily inside a font editor - and a file
|
| 4 |
+
name suitable for use in a wide range of operating systems and filesystems.
|
| 5 |
+
|
| 6 |
+
The `UFO 3 specification <http://unifiedfontobject.org/versions/ufo3/conventions/>`_
|
| 7 |
+
provides an example of an algorithm for such conversion, which avoids illegal
|
| 8 |
+
characters, reserved file names, ambiguity between upper- and lower-case
|
| 9 |
+
characters, and clashes with existing files.
|
| 10 |
+
|
| 11 |
+
This code was originally copied from
|
| 12 |
+
`ufoLib <https://github.com/unified-font-object/ufoLib/blob/8747da7/Lib/ufoLib/filenames.py>`_
|
| 13 |
+
by Tal Leming and is copyright (c) 2005-2016, The RoboFab Developers:
|
| 14 |
+
|
| 15 |
+
- Erik van Blokland
|
| 16 |
+
- Tal Leming
|
| 17 |
+
- Just van Rossum
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
illegalCharacters = r"\" * + / : < > ? [ \ ] | \0".split(" ")
|
| 21 |
+
illegalCharacters += [chr(i) for i in range(1, 32)]
|
| 22 |
+
illegalCharacters += [chr(0x7F)]
|
| 23 |
+
reservedFileNames = "CON PRN AUX CLOCK$ NUL A:-Z: COM1".lower().split(" ")
|
| 24 |
+
reservedFileNames += "LPT1 LPT2 LPT3 COM2 COM3 COM4".lower().split(" ")
|
| 25 |
+
maxFileNameLength = 255
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class NameTranslationError(Exception):
|
| 29 |
+
pass
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def userNameToFileName(userName, existing=[], prefix="", suffix=""):
|
| 33 |
+
"""Converts from a user name to a file name.
|
| 34 |
+
|
| 35 |
+
Takes care to avoid illegal characters, reserved file names, ambiguity between
|
| 36 |
+
upper- and lower-case characters, and clashes with existing files.
|
| 37 |
+
|
| 38 |
+
Args:
|
| 39 |
+
userName (str): The input file name.
|
| 40 |
+
existing: A case-insensitive list of all existing file names.
|
| 41 |
+
prefix: Prefix to be prepended to the file name.
|
| 42 |
+
suffix: Suffix to be appended to the file name.
|
| 43 |
+
|
| 44 |
+
Returns:
|
| 45 |
+
A suitable filename.
|
| 46 |
+
|
| 47 |
+
Raises:
|
| 48 |
+
NameTranslationError: If no suitable name could be generated.
|
| 49 |
+
|
| 50 |
+
Examples::
|
| 51 |
+
|
| 52 |
+
>>> userNameToFileName("a") == "a"
|
| 53 |
+
True
|
| 54 |
+
>>> userNameToFileName("A") == "A_"
|
| 55 |
+
True
|
| 56 |
+
>>> userNameToFileName("AE") == "A_E_"
|
| 57 |
+
True
|
| 58 |
+
>>> userNameToFileName("Ae") == "A_e"
|
| 59 |
+
True
|
| 60 |
+
>>> userNameToFileName("ae") == "ae"
|
| 61 |
+
True
|
| 62 |
+
>>> userNameToFileName("aE") == "aE_"
|
| 63 |
+
True
|
| 64 |
+
>>> userNameToFileName("a.alt") == "a.alt"
|
| 65 |
+
True
|
| 66 |
+
>>> userNameToFileName("A.alt") == "A_.alt"
|
| 67 |
+
True
|
| 68 |
+
>>> userNameToFileName("A.Alt") == "A_.A_lt"
|
| 69 |
+
True
|
| 70 |
+
>>> userNameToFileName("A.aLt") == "A_.aL_t"
|
| 71 |
+
True
|
| 72 |
+
>>> userNameToFileName(u"A.alT") == "A_.alT_"
|
| 73 |
+
True
|
| 74 |
+
>>> userNameToFileName("T_H") == "T__H_"
|
| 75 |
+
True
|
| 76 |
+
>>> userNameToFileName("T_h") == "T__h"
|
| 77 |
+
True
|
| 78 |
+
>>> userNameToFileName("t_h") == "t_h"
|
| 79 |
+
True
|
| 80 |
+
>>> userNameToFileName("F_F_I") == "F__F__I_"
|
| 81 |
+
True
|
| 82 |
+
>>> userNameToFileName("f_f_i") == "f_f_i"
|
| 83 |
+
True
|
| 84 |
+
>>> userNameToFileName("Aacute_V.swash") == "A_acute_V_.swash"
|
| 85 |
+
True
|
| 86 |
+
>>> userNameToFileName(".notdef") == "_notdef"
|
| 87 |
+
True
|
| 88 |
+
>>> userNameToFileName("con") == "_con"
|
| 89 |
+
True
|
| 90 |
+
>>> userNameToFileName("CON") == "C_O_N_"
|
| 91 |
+
True
|
| 92 |
+
>>> userNameToFileName("con.alt") == "_con.alt"
|
| 93 |
+
True
|
| 94 |
+
>>> userNameToFileName("alt.con") == "alt._con"
|
| 95 |
+
True
|
| 96 |
+
"""
|
| 97 |
+
# the incoming name must be a str
|
| 98 |
+
if not isinstance(userName, str):
|
| 99 |
+
raise ValueError("The value for userName must be a string.")
|
| 100 |
+
# establish the prefix and suffix lengths
|
| 101 |
+
prefixLength = len(prefix)
|
| 102 |
+
suffixLength = len(suffix)
|
| 103 |
+
# replace an initial period with an _
|
| 104 |
+
# if no prefix is to be added
|
| 105 |
+
if not prefix and userName[0] == ".":
|
| 106 |
+
userName = "_" + userName[1:]
|
| 107 |
+
# filter the user name
|
| 108 |
+
filteredUserName = []
|
| 109 |
+
for character in userName:
|
| 110 |
+
# replace illegal characters with _
|
| 111 |
+
if character in illegalCharacters:
|
| 112 |
+
character = "_"
|
| 113 |
+
# add _ to all non-lower characters
|
| 114 |
+
elif character != character.lower():
|
| 115 |
+
character += "_"
|
| 116 |
+
filteredUserName.append(character)
|
| 117 |
+
userName = "".join(filteredUserName)
|
| 118 |
+
# clip to 255
|
| 119 |
+
sliceLength = maxFileNameLength - prefixLength - suffixLength
|
| 120 |
+
userName = userName[:sliceLength]
|
| 121 |
+
# test for illegal files names
|
| 122 |
+
parts = []
|
| 123 |
+
for part in userName.split("."):
|
| 124 |
+
if part.lower() in reservedFileNames:
|
| 125 |
+
part = "_" + part
|
| 126 |
+
parts.append(part)
|
| 127 |
+
userName = ".".join(parts)
|
| 128 |
+
# test for clash
|
| 129 |
+
fullName = prefix + userName + suffix
|
| 130 |
+
if fullName.lower() in existing:
|
| 131 |
+
fullName = handleClash1(userName, existing, prefix, suffix)
|
| 132 |
+
# finished
|
| 133 |
+
return fullName
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def handleClash1(userName, existing=[], prefix="", suffix=""):
|
| 137 |
+
"""
|
| 138 |
+
existing should be a case-insensitive list
|
| 139 |
+
of all existing file names.
|
| 140 |
+
|
| 141 |
+
>>> prefix = ("0" * 5) + "."
|
| 142 |
+
>>> suffix = "." + ("0" * 10)
|
| 143 |
+
>>> existing = ["a" * 5]
|
| 144 |
+
|
| 145 |
+
>>> e = list(existing)
|
| 146 |
+
>>> handleClash1(userName="A" * 5, existing=e,
|
| 147 |
+
... prefix=prefix, suffix=suffix) == (
|
| 148 |
+
... '00000.AAAAA000000000000001.0000000000')
|
| 149 |
+
True
|
| 150 |
+
|
| 151 |
+
>>> e = list(existing)
|
| 152 |
+
>>> e.append(prefix + "aaaaa" + "1".zfill(15) + suffix)
|
| 153 |
+
>>> handleClash1(userName="A" * 5, existing=e,
|
| 154 |
+
... prefix=prefix, suffix=suffix) == (
|
| 155 |
+
... '00000.AAAAA000000000000002.0000000000')
|
| 156 |
+
True
|
| 157 |
+
|
| 158 |
+
>>> e = list(existing)
|
| 159 |
+
>>> e.append(prefix + "AAAAA" + "2".zfill(15) + suffix)
|
| 160 |
+
>>> handleClash1(userName="A" * 5, existing=e,
|
| 161 |
+
... prefix=prefix, suffix=suffix) == (
|
| 162 |
+
... '00000.AAAAA000000000000001.0000000000')
|
| 163 |
+
True
|
| 164 |
+
"""
|
| 165 |
+
# if the prefix length + user name length + suffix length + 15 is at
|
| 166 |
+
# or past the maximum length, silce 15 characters off of the user name
|
| 167 |
+
prefixLength = len(prefix)
|
| 168 |
+
suffixLength = len(suffix)
|
| 169 |
+
if prefixLength + len(userName) + suffixLength + 15 > maxFileNameLength:
|
| 170 |
+
l = prefixLength + len(userName) + suffixLength + 15
|
| 171 |
+
sliceLength = maxFileNameLength - l
|
| 172 |
+
userName = userName[:sliceLength]
|
| 173 |
+
finalName = None
|
| 174 |
+
# try to add numbers to create a unique name
|
| 175 |
+
counter = 1
|
| 176 |
+
while finalName is None:
|
| 177 |
+
name = userName + str(counter).zfill(15)
|
| 178 |
+
fullName = prefix + name + suffix
|
| 179 |
+
if fullName.lower() not in existing:
|
| 180 |
+
finalName = fullName
|
| 181 |
+
break
|
| 182 |
+
else:
|
| 183 |
+
counter += 1
|
| 184 |
+
if counter >= 999999999999999:
|
| 185 |
+
break
|
| 186 |
+
# if there is a clash, go to the next fallback
|
| 187 |
+
if finalName is None:
|
| 188 |
+
finalName = handleClash2(existing, prefix, suffix)
|
| 189 |
+
# finished
|
| 190 |
+
return finalName
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
def handleClash2(existing=[], prefix="", suffix=""):
|
| 194 |
+
"""
|
| 195 |
+
existing should be a case-insensitive list
|
| 196 |
+
of all existing file names.
|
| 197 |
+
|
| 198 |
+
>>> prefix = ("0" * 5) + "."
|
| 199 |
+
>>> suffix = "." + ("0" * 10)
|
| 200 |
+
>>> existing = [prefix + str(i) + suffix for i in range(100)]
|
| 201 |
+
|
| 202 |
+
>>> e = list(existing)
|
| 203 |
+
>>> handleClash2(existing=e, prefix=prefix, suffix=suffix) == (
|
| 204 |
+
... '00000.100.0000000000')
|
| 205 |
+
True
|
| 206 |
+
|
| 207 |
+
>>> e = list(existing)
|
| 208 |
+
>>> e.remove(prefix + "1" + suffix)
|
| 209 |
+
>>> handleClash2(existing=e, prefix=prefix, suffix=suffix) == (
|
| 210 |
+
... '00000.1.0000000000')
|
| 211 |
+
True
|
| 212 |
+
|
| 213 |
+
>>> e = list(existing)
|
| 214 |
+
>>> e.remove(prefix + "2" + suffix)
|
| 215 |
+
>>> handleClash2(existing=e, prefix=prefix, suffix=suffix) == (
|
| 216 |
+
... '00000.2.0000000000')
|
| 217 |
+
True
|
| 218 |
+
"""
|
| 219 |
+
# calculate the longest possible string
|
| 220 |
+
maxLength = maxFileNameLength - len(prefix) - len(suffix)
|
| 221 |
+
maxValue = int("9" * maxLength)
|
| 222 |
+
# try to find a number
|
| 223 |
+
finalName = None
|
| 224 |
+
counter = 1
|
| 225 |
+
while finalName is None:
|
| 226 |
+
fullName = prefix + str(counter) + suffix
|
| 227 |
+
if fullName.lower() not in existing:
|
| 228 |
+
finalName = fullName
|
| 229 |
+
break
|
| 230 |
+
else:
|
| 231 |
+
counter += 1
|
| 232 |
+
if counter >= maxValue:
|
| 233 |
+
break
|
| 234 |
+
# raise an error if nothing has been found
|
| 235 |
+
if finalName is None:
|
| 236 |
+
raise NameTranslationError("No unique name could be found.")
|
| 237 |
+
# finished
|
| 238 |
+
return finalName
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
if __name__ == "__main__":
|
| 242 |
+
import doctest
|
| 243 |
+
import sys
|
| 244 |
+
|
| 245 |
+
sys.exit(doctest.testmod().failed)
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/intTools.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__all__ = ["popCount", "bit_count", "bit_indices"]
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
try:
|
| 5 |
+
bit_count = int.bit_count
|
| 6 |
+
except AttributeError:
|
| 7 |
+
|
| 8 |
+
def bit_count(v):
|
| 9 |
+
return bin(v).count("1")
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
"""Return number of 1 bits (population count) of the absolute value of an integer.
|
| 13 |
+
|
| 14 |
+
See https://docs.python.org/3.10/library/stdtypes.html#int.bit_count
|
| 15 |
+
"""
|
| 16 |
+
popCount = bit_count # alias
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def bit_indices(v):
|
| 20 |
+
"""Return list of indices where bits are set, 0 being the index of the least significant bit.
|
| 21 |
+
|
| 22 |
+
>>> bit_indices(0b101)
|
| 23 |
+
[0, 2]
|
| 24 |
+
"""
|
| 25 |
+
return [i for i, b in enumerate(bin(v)[::-1]) if b == "1"]
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/lazyTools.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from collections import UserDict, UserList
|
| 2 |
+
|
| 3 |
+
__all__ = ["LazyDict", "LazyList"]
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class LazyDict(UserDict):
|
| 7 |
+
def __init__(self, data):
|
| 8 |
+
super().__init__()
|
| 9 |
+
self.data = data
|
| 10 |
+
|
| 11 |
+
def __getitem__(self, k):
|
| 12 |
+
v = self.data[k]
|
| 13 |
+
if callable(v):
|
| 14 |
+
v = v(k)
|
| 15 |
+
self.data[k] = v
|
| 16 |
+
return v
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class LazyList(UserList):
|
| 20 |
+
def __getitem__(self, k):
|
| 21 |
+
if isinstance(k, slice):
|
| 22 |
+
indices = range(*k.indices(len(self)))
|
| 23 |
+
return [self[i] for i in indices]
|
| 24 |
+
v = self.data[k]
|
| 25 |
+
if callable(v):
|
| 26 |
+
v = v(k)
|
| 27 |
+
self.data[k] = v
|
| 28 |
+
return v
|
| 29 |
+
|
| 30 |
+
def __add__(self, other):
|
| 31 |
+
if isinstance(other, LazyList):
|
| 32 |
+
other = list(other)
|
| 33 |
+
elif isinstance(other, list):
|
| 34 |
+
pass
|
| 35 |
+
else:
|
| 36 |
+
return NotImplemented
|
| 37 |
+
return list(self) + other
|
| 38 |
+
|
| 39 |
+
def __radd__(self, other):
|
| 40 |
+
if not isinstance(other, list):
|
| 41 |
+
return NotImplemented
|
| 42 |
+
return other + list(self)
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/psOperators.py
ADDED
|
@@ -0,0 +1,572 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
_accessstrings = {0: "", 1: "readonly", 2: "executeonly", 3: "noaccess"}
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class ps_object(object):
|
| 5 |
+
literal = 1
|
| 6 |
+
access = 0
|
| 7 |
+
value = None
|
| 8 |
+
|
| 9 |
+
def __init__(self, value):
|
| 10 |
+
self.value = value
|
| 11 |
+
self.type = self.__class__.__name__[3:] + "type"
|
| 12 |
+
|
| 13 |
+
def __repr__(self):
|
| 14 |
+
return "<%s %s>" % (self.__class__.__name__[3:], repr(self.value))
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class ps_operator(ps_object):
|
| 18 |
+
literal = 0
|
| 19 |
+
|
| 20 |
+
def __init__(self, name, function):
|
| 21 |
+
self.name = name
|
| 22 |
+
self.function = function
|
| 23 |
+
self.type = self.__class__.__name__[3:] + "type"
|
| 24 |
+
|
| 25 |
+
def __repr__(self):
|
| 26 |
+
return "<operator %s>" % self.name
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class ps_procedure(ps_object):
|
| 30 |
+
literal = 0
|
| 31 |
+
|
| 32 |
+
def __repr__(self):
|
| 33 |
+
return "<procedure>"
|
| 34 |
+
|
| 35 |
+
def __str__(self):
|
| 36 |
+
psstring = "{"
|
| 37 |
+
for i in range(len(self.value)):
|
| 38 |
+
if i:
|
| 39 |
+
psstring = psstring + " " + str(self.value[i])
|
| 40 |
+
else:
|
| 41 |
+
psstring = psstring + str(self.value[i])
|
| 42 |
+
return psstring + "}"
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class ps_name(ps_object):
|
| 46 |
+
literal = 0
|
| 47 |
+
|
| 48 |
+
def __str__(self):
|
| 49 |
+
if self.literal:
|
| 50 |
+
return "/" + self.value
|
| 51 |
+
else:
|
| 52 |
+
return self.value
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class ps_literal(ps_object):
|
| 56 |
+
def __str__(self):
|
| 57 |
+
return "/" + self.value
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class ps_array(ps_object):
|
| 61 |
+
def __str__(self):
|
| 62 |
+
psstring = "["
|
| 63 |
+
for i in range(len(self.value)):
|
| 64 |
+
item = self.value[i]
|
| 65 |
+
access = _accessstrings[item.access]
|
| 66 |
+
if access:
|
| 67 |
+
access = " " + access
|
| 68 |
+
if i:
|
| 69 |
+
psstring = psstring + " " + str(item) + access
|
| 70 |
+
else:
|
| 71 |
+
psstring = psstring + str(item) + access
|
| 72 |
+
return psstring + "]"
|
| 73 |
+
|
| 74 |
+
def __repr__(self):
|
| 75 |
+
return "<array>"
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
_type1_pre_eexec_order = [
|
| 79 |
+
"FontInfo",
|
| 80 |
+
"FontName",
|
| 81 |
+
"Encoding",
|
| 82 |
+
"PaintType",
|
| 83 |
+
"FontType",
|
| 84 |
+
"FontMatrix",
|
| 85 |
+
"FontBBox",
|
| 86 |
+
"UniqueID",
|
| 87 |
+
"Metrics",
|
| 88 |
+
"StrokeWidth",
|
| 89 |
+
]
|
| 90 |
+
|
| 91 |
+
_type1_fontinfo_order = [
|
| 92 |
+
"version",
|
| 93 |
+
"Notice",
|
| 94 |
+
"FullName",
|
| 95 |
+
"FamilyName",
|
| 96 |
+
"Weight",
|
| 97 |
+
"ItalicAngle",
|
| 98 |
+
"isFixedPitch",
|
| 99 |
+
"UnderlinePosition",
|
| 100 |
+
"UnderlineThickness",
|
| 101 |
+
]
|
| 102 |
+
|
| 103 |
+
_type1_post_eexec_order = ["Private", "CharStrings", "FID"]
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def _type1_item_repr(key, value):
|
| 107 |
+
psstring = ""
|
| 108 |
+
access = _accessstrings[value.access]
|
| 109 |
+
if access:
|
| 110 |
+
access = access + " "
|
| 111 |
+
if key == "CharStrings":
|
| 112 |
+
psstring = psstring + "/%s %s def\n" % (
|
| 113 |
+
key,
|
| 114 |
+
_type1_CharString_repr(value.value),
|
| 115 |
+
)
|
| 116 |
+
elif key == "Encoding":
|
| 117 |
+
psstring = psstring + _type1_Encoding_repr(value, access)
|
| 118 |
+
else:
|
| 119 |
+
psstring = psstring + "/%s %s %sdef\n" % (str(key), str(value), access)
|
| 120 |
+
return psstring
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
def _type1_Encoding_repr(encoding, access):
|
| 124 |
+
encoding = encoding.value
|
| 125 |
+
psstring = "/Encoding 256 array\n0 1 255 {1 index exch /.notdef put} for\n"
|
| 126 |
+
for i in range(256):
|
| 127 |
+
name = encoding[i].value
|
| 128 |
+
if name != ".notdef":
|
| 129 |
+
psstring = psstring + "dup %d /%s put\n" % (i, name)
|
| 130 |
+
return psstring + access + "def\n"
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def _type1_CharString_repr(charstrings):
|
| 134 |
+
items = sorted(charstrings.items())
|
| 135 |
+
return "xxx"
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
class ps_font(ps_object):
|
| 139 |
+
def __str__(self):
|
| 140 |
+
psstring = "%d dict dup begin\n" % len(self.value)
|
| 141 |
+
for key in _type1_pre_eexec_order:
|
| 142 |
+
try:
|
| 143 |
+
value = self.value[key]
|
| 144 |
+
except KeyError:
|
| 145 |
+
pass
|
| 146 |
+
else:
|
| 147 |
+
psstring = psstring + _type1_item_repr(key, value)
|
| 148 |
+
items = sorted(self.value.items())
|
| 149 |
+
for key, value in items:
|
| 150 |
+
if key not in _type1_pre_eexec_order + _type1_post_eexec_order:
|
| 151 |
+
psstring = psstring + _type1_item_repr(key, value)
|
| 152 |
+
psstring = psstring + "currentdict end\ncurrentfile eexec\ndup "
|
| 153 |
+
for key in _type1_post_eexec_order:
|
| 154 |
+
try:
|
| 155 |
+
value = self.value[key]
|
| 156 |
+
except KeyError:
|
| 157 |
+
pass
|
| 158 |
+
else:
|
| 159 |
+
psstring = psstring + _type1_item_repr(key, value)
|
| 160 |
+
return (
|
| 161 |
+
psstring
|
| 162 |
+
+ "dup/FontName get exch definefont pop\nmark currentfile closefile\n"
|
| 163 |
+
+ 8 * (64 * "0" + "\n")
|
| 164 |
+
+ "cleartomark"
|
| 165 |
+
+ "\n"
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
def __repr__(self):
|
| 169 |
+
return "<font>"
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
class ps_file(ps_object):
|
| 173 |
+
pass
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
class ps_dict(ps_object):
|
| 177 |
+
def __str__(self):
|
| 178 |
+
psstring = "%d dict dup begin\n" % len(self.value)
|
| 179 |
+
items = sorted(self.value.items())
|
| 180 |
+
for key, value in items:
|
| 181 |
+
access = _accessstrings[value.access]
|
| 182 |
+
if access:
|
| 183 |
+
access = access + " "
|
| 184 |
+
psstring = psstring + "/%s %s %sdef\n" % (str(key), str(value), access)
|
| 185 |
+
return psstring + "end "
|
| 186 |
+
|
| 187 |
+
def __repr__(self):
|
| 188 |
+
return "<dict>"
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
class ps_mark(ps_object):
|
| 192 |
+
def __init__(self):
|
| 193 |
+
self.value = "mark"
|
| 194 |
+
self.type = self.__class__.__name__[3:] + "type"
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
class ps_procmark(ps_object):
|
| 198 |
+
def __init__(self):
|
| 199 |
+
self.value = "procmark"
|
| 200 |
+
self.type = self.__class__.__name__[3:] + "type"
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
class ps_null(ps_object):
|
| 204 |
+
def __init__(self):
|
| 205 |
+
self.type = self.__class__.__name__[3:] + "type"
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
class ps_boolean(ps_object):
|
| 209 |
+
def __str__(self):
|
| 210 |
+
if self.value:
|
| 211 |
+
return "true"
|
| 212 |
+
else:
|
| 213 |
+
return "false"
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
class ps_string(ps_object):
|
| 217 |
+
def __str__(self):
|
| 218 |
+
return "(%s)" % repr(self.value)[1:-1]
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
class ps_integer(ps_object):
|
| 222 |
+
def __str__(self):
|
| 223 |
+
return repr(self.value)
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
class ps_real(ps_object):
|
| 227 |
+
def __str__(self):
|
| 228 |
+
return repr(self.value)
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
class PSOperators(object):
|
| 232 |
+
def ps_def(self):
|
| 233 |
+
obj = self.pop()
|
| 234 |
+
name = self.pop()
|
| 235 |
+
self.dictstack[-1][name.value] = obj
|
| 236 |
+
|
| 237 |
+
def ps_bind(self):
|
| 238 |
+
proc = self.pop("proceduretype")
|
| 239 |
+
self.proc_bind(proc)
|
| 240 |
+
self.push(proc)
|
| 241 |
+
|
| 242 |
+
def proc_bind(self, proc):
|
| 243 |
+
for i in range(len(proc.value)):
|
| 244 |
+
item = proc.value[i]
|
| 245 |
+
if item.type == "proceduretype":
|
| 246 |
+
self.proc_bind(item)
|
| 247 |
+
else:
|
| 248 |
+
if not item.literal:
|
| 249 |
+
try:
|
| 250 |
+
obj = self.resolve_name(item.value)
|
| 251 |
+
except:
|
| 252 |
+
pass
|
| 253 |
+
else:
|
| 254 |
+
if obj.type == "operatortype":
|
| 255 |
+
proc.value[i] = obj
|
| 256 |
+
|
| 257 |
+
def ps_exch(self):
|
| 258 |
+
if len(self.stack) < 2:
|
| 259 |
+
raise RuntimeError("stack underflow")
|
| 260 |
+
obj1 = self.pop()
|
| 261 |
+
obj2 = self.pop()
|
| 262 |
+
self.push(obj1)
|
| 263 |
+
self.push(obj2)
|
| 264 |
+
|
| 265 |
+
def ps_dup(self):
|
| 266 |
+
if not self.stack:
|
| 267 |
+
raise RuntimeError("stack underflow")
|
| 268 |
+
self.push(self.stack[-1])
|
| 269 |
+
|
| 270 |
+
def ps_exec(self):
|
| 271 |
+
obj = self.pop()
|
| 272 |
+
if obj.type == "proceduretype":
|
| 273 |
+
self.call_procedure(obj)
|
| 274 |
+
else:
|
| 275 |
+
self.handle_object(obj)
|
| 276 |
+
|
| 277 |
+
def ps_count(self):
|
| 278 |
+
self.push(ps_integer(len(self.stack)))
|
| 279 |
+
|
| 280 |
+
def ps_eq(self):
|
| 281 |
+
any1 = self.pop()
|
| 282 |
+
any2 = self.pop()
|
| 283 |
+
self.push(ps_boolean(any1.value == any2.value))
|
| 284 |
+
|
| 285 |
+
def ps_ne(self):
|
| 286 |
+
any1 = self.pop()
|
| 287 |
+
any2 = self.pop()
|
| 288 |
+
self.push(ps_boolean(any1.value != any2.value))
|
| 289 |
+
|
| 290 |
+
def ps_cvx(self):
|
| 291 |
+
obj = self.pop()
|
| 292 |
+
obj.literal = 0
|
| 293 |
+
self.push(obj)
|
| 294 |
+
|
| 295 |
+
def ps_matrix(self):
|
| 296 |
+
matrix = [
|
| 297 |
+
ps_real(1.0),
|
| 298 |
+
ps_integer(0),
|
| 299 |
+
ps_integer(0),
|
| 300 |
+
ps_real(1.0),
|
| 301 |
+
ps_integer(0),
|
| 302 |
+
ps_integer(0),
|
| 303 |
+
]
|
| 304 |
+
self.push(ps_array(matrix))
|
| 305 |
+
|
| 306 |
+
def ps_string(self):
|
| 307 |
+
num = self.pop("integertype").value
|
| 308 |
+
self.push(ps_string("\0" * num))
|
| 309 |
+
|
| 310 |
+
def ps_type(self):
|
| 311 |
+
obj = self.pop()
|
| 312 |
+
self.push(ps_string(obj.type))
|
| 313 |
+
|
| 314 |
+
def ps_store(self):
|
| 315 |
+
value = self.pop()
|
| 316 |
+
key = self.pop()
|
| 317 |
+
name = key.value
|
| 318 |
+
for i in range(len(self.dictstack) - 1, -1, -1):
|
| 319 |
+
if name in self.dictstack[i]:
|
| 320 |
+
self.dictstack[i][name] = value
|
| 321 |
+
break
|
| 322 |
+
self.dictstack[-1][name] = value
|
| 323 |
+
|
| 324 |
+
def ps_where(self):
|
| 325 |
+
name = self.pop()
|
| 326 |
+
# XXX
|
| 327 |
+
self.push(ps_boolean(0))
|
| 328 |
+
|
| 329 |
+
def ps_systemdict(self):
|
| 330 |
+
self.push(ps_dict(self.dictstack[0]))
|
| 331 |
+
|
| 332 |
+
def ps_userdict(self):
|
| 333 |
+
self.push(ps_dict(self.dictstack[1]))
|
| 334 |
+
|
| 335 |
+
def ps_currentdict(self):
|
| 336 |
+
self.push(ps_dict(self.dictstack[-1]))
|
| 337 |
+
|
| 338 |
+
def ps_currentfile(self):
|
| 339 |
+
self.push(ps_file(self.tokenizer))
|
| 340 |
+
|
| 341 |
+
def ps_eexec(self):
|
| 342 |
+
f = self.pop("filetype").value
|
| 343 |
+
f.starteexec()
|
| 344 |
+
|
| 345 |
+
def ps_closefile(self):
|
| 346 |
+
f = self.pop("filetype").value
|
| 347 |
+
f.skipwhite()
|
| 348 |
+
f.stopeexec()
|
| 349 |
+
|
| 350 |
+
def ps_cleartomark(self):
|
| 351 |
+
obj = self.pop()
|
| 352 |
+
while obj != self.mark:
|
| 353 |
+
obj = self.pop()
|
| 354 |
+
|
| 355 |
+
def ps_readstring(self, ps_boolean=ps_boolean, len=len):
|
| 356 |
+
s = self.pop("stringtype")
|
| 357 |
+
oldstr = s.value
|
| 358 |
+
f = self.pop("filetype")
|
| 359 |
+
# pad = file.value.read(1)
|
| 360 |
+
# for StringIO, this is faster
|
| 361 |
+
f.value.pos = f.value.pos + 1
|
| 362 |
+
newstr = f.value.read(len(oldstr))
|
| 363 |
+
s.value = newstr
|
| 364 |
+
self.push(s)
|
| 365 |
+
self.push(ps_boolean(len(oldstr) == len(newstr)))
|
| 366 |
+
|
| 367 |
+
def ps_known(self):
|
| 368 |
+
key = self.pop()
|
| 369 |
+
d = self.pop("dicttype", "fonttype")
|
| 370 |
+
self.push(ps_boolean(key.value in d.value))
|
| 371 |
+
|
| 372 |
+
def ps_if(self):
|
| 373 |
+
proc = self.pop("proceduretype")
|
| 374 |
+
if self.pop("booleantype").value:
|
| 375 |
+
self.call_procedure(proc)
|
| 376 |
+
|
| 377 |
+
def ps_ifelse(self):
|
| 378 |
+
proc2 = self.pop("proceduretype")
|
| 379 |
+
proc1 = self.pop("proceduretype")
|
| 380 |
+
if self.pop("booleantype").value:
|
| 381 |
+
self.call_procedure(proc1)
|
| 382 |
+
else:
|
| 383 |
+
self.call_procedure(proc2)
|
| 384 |
+
|
| 385 |
+
def ps_readonly(self):
|
| 386 |
+
obj = self.pop()
|
| 387 |
+
if obj.access < 1:
|
| 388 |
+
obj.access = 1
|
| 389 |
+
self.push(obj)
|
| 390 |
+
|
| 391 |
+
def ps_executeonly(self):
|
| 392 |
+
obj = self.pop()
|
| 393 |
+
if obj.access < 2:
|
| 394 |
+
obj.access = 2
|
| 395 |
+
self.push(obj)
|
| 396 |
+
|
| 397 |
+
def ps_noaccess(self):
|
| 398 |
+
obj = self.pop()
|
| 399 |
+
if obj.access < 3:
|
| 400 |
+
obj.access = 3
|
| 401 |
+
self.push(obj)
|
| 402 |
+
|
| 403 |
+
def ps_not(self):
|
| 404 |
+
obj = self.pop("booleantype", "integertype")
|
| 405 |
+
if obj.type == "booleantype":
|
| 406 |
+
self.push(ps_boolean(not obj.value))
|
| 407 |
+
else:
|
| 408 |
+
self.push(ps_integer(~obj.value))
|
| 409 |
+
|
| 410 |
+
def ps_print(self):
|
| 411 |
+
str = self.pop("stringtype")
|
| 412 |
+
print("PS output --->", str.value)
|
| 413 |
+
|
| 414 |
+
def ps_anchorsearch(self):
|
| 415 |
+
seek = self.pop("stringtype")
|
| 416 |
+
s = self.pop("stringtype")
|
| 417 |
+
seeklen = len(seek.value)
|
| 418 |
+
if s.value[:seeklen] == seek.value:
|
| 419 |
+
self.push(ps_string(s.value[seeklen:]))
|
| 420 |
+
self.push(seek)
|
| 421 |
+
self.push(ps_boolean(1))
|
| 422 |
+
else:
|
| 423 |
+
self.push(s)
|
| 424 |
+
self.push(ps_boolean(0))
|
| 425 |
+
|
| 426 |
+
def ps_array(self):
|
| 427 |
+
num = self.pop("integertype")
|
| 428 |
+
array = ps_array([None] * num.value)
|
| 429 |
+
self.push(array)
|
| 430 |
+
|
| 431 |
+
def ps_astore(self):
|
| 432 |
+
array = self.pop("arraytype")
|
| 433 |
+
for i in range(len(array.value) - 1, -1, -1):
|
| 434 |
+
array.value[i] = self.pop()
|
| 435 |
+
self.push(array)
|
| 436 |
+
|
| 437 |
+
def ps_load(self):
|
| 438 |
+
name = self.pop()
|
| 439 |
+
self.push(self.resolve_name(name.value))
|
| 440 |
+
|
| 441 |
+
def ps_put(self):
|
| 442 |
+
obj1 = self.pop()
|
| 443 |
+
obj2 = self.pop()
|
| 444 |
+
obj3 = self.pop("arraytype", "dicttype", "stringtype", "proceduretype")
|
| 445 |
+
tp = obj3.type
|
| 446 |
+
if tp == "arraytype" or tp == "proceduretype":
|
| 447 |
+
obj3.value[obj2.value] = obj1
|
| 448 |
+
elif tp == "dicttype":
|
| 449 |
+
obj3.value[obj2.value] = obj1
|
| 450 |
+
elif tp == "stringtype":
|
| 451 |
+
index = obj2.value
|
| 452 |
+
obj3.value = obj3.value[:index] + chr(obj1.value) + obj3.value[index + 1 :]
|
| 453 |
+
|
| 454 |
+
def ps_get(self):
|
| 455 |
+
obj1 = self.pop()
|
| 456 |
+
if obj1.value == "Encoding":
|
| 457 |
+
pass
|
| 458 |
+
obj2 = self.pop(
|
| 459 |
+
"arraytype", "dicttype", "stringtype", "proceduretype", "fonttype"
|
| 460 |
+
)
|
| 461 |
+
tp = obj2.type
|
| 462 |
+
if tp in ("arraytype", "proceduretype"):
|
| 463 |
+
self.push(obj2.value[obj1.value])
|
| 464 |
+
elif tp in ("dicttype", "fonttype"):
|
| 465 |
+
self.push(obj2.value[obj1.value])
|
| 466 |
+
elif tp == "stringtype":
|
| 467 |
+
self.push(ps_integer(ord(obj2.value[obj1.value])))
|
| 468 |
+
else:
|
| 469 |
+
assert False, "shouldn't get here"
|
| 470 |
+
|
| 471 |
+
def ps_getinterval(self):
|
| 472 |
+
obj1 = self.pop("integertype")
|
| 473 |
+
obj2 = self.pop("integertype")
|
| 474 |
+
obj3 = self.pop("arraytype", "stringtype")
|
| 475 |
+
tp = obj3.type
|
| 476 |
+
if tp == "arraytype":
|
| 477 |
+
self.push(ps_array(obj3.value[obj2.value : obj2.value + obj1.value]))
|
| 478 |
+
elif tp == "stringtype":
|
| 479 |
+
self.push(ps_string(obj3.value[obj2.value : obj2.value + obj1.value]))
|
| 480 |
+
|
| 481 |
+
def ps_putinterval(self):
|
| 482 |
+
obj1 = self.pop("arraytype", "stringtype")
|
| 483 |
+
obj2 = self.pop("integertype")
|
| 484 |
+
obj3 = self.pop("arraytype", "stringtype")
|
| 485 |
+
tp = obj3.type
|
| 486 |
+
if tp == "arraytype":
|
| 487 |
+
obj3.value[obj2.value : obj2.value + len(obj1.value)] = obj1.value
|
| 488 |
+
elif tp == "stringtype":
|
| 489 |
+
newstr = obj3.value[: obj2.value]
|
| 490 |
+
newstr = newstr + obj1.value
|
| 491 |
+
newstr = newstr + obj3.value[obj2.value + len(obj1.value) :]
|
| 492 |
+
obj3.value = newstr
|
| 493 |
+
|
| 494 |
+
def ps_cvn(self):
|
| 495 |
+
self.push(ps_name(self.pop("stringtype").value))
|
| 496 |
+
|
| 497 |
+
def ps_index(self):
|
| 498 |
+
n = self.pop("integertype").value
|
| 499 |
+
if n < 0:
|
| 500 |
+
raise RuntimeError("index may not be negative")
|
| 501 |
+
self.push(self.stack[-1 - n])
|
| 502 |
+
|
| 503 |
+
def ps_for(self):
|
| 504 |
+
proc = self.pop("proceduretype")
|
| 505 |
+
limit = self.pop("integertype", "realtype").value
|
| 506 |
+
increment = self.pop("integertype", "realtype").value
|
| 507 |
+
i = self.pop("integertype", "realtype").value
|
| 508 |
+
while 1:
|
| 509 |
+
if increment > 0:
|
| 510 |
+
if i > limit:
|
| 511 |
+
break
|
| 512 |
+
else:
|
| 513 |
+
if i < limit:
|
| 514 |
+
break
|
| 515 |
+
if type(i) == type(0.0):
|
| 516 |
+
self.push(ps_real(i))
|
| 517 |
+
else:
|
| 518 |
+
self.push(ps_integer(i))
|
| 519 |
+
self.call_procedure(proc)
|
| 520 |
+
i = i + increment
|
| 521 |
+
|
| 522 |
+
def ps_forall(self):
|
| 523 |
+
proc = self.pop("proceduretype")
|
| 524 |
+
obj = self.pop("arraytype", "stringtype", "dicttype")
|
| 525 |
+
tp = obj.type
|
| 526 |
+
if tp == "arraytype":
|
| 527 |
+
for item in obj.value:
|
| 528 |
+
self.push(item)
|
| 529 |
+
self.call_procedure(proc)
|
| 530 |
+
elif tp == "stringtype":
|
| 531 |
+
for item in obj.value:
|
| 532 |
+
self.push(ps_integer(ord(item)))
|
| 533 |
+
self.call_procedure(proc)
|
| 534 |
+
elif tp == "dicttype":
|
| 535 |
+
for key, value in obj.value.items():
|
| 536 |
+
self.push(ps_name(key))
|
| 537 |
+
self.push(value)
|
| 538 |
+
self.call_procedure(proc)
|
| 539 |
+
|
| 540 |
+
def ps_definefont(self):
|
| 541 |
+
font = self.pop("dicttype")
|
| 542 |
+
name = self.pop()
|
| 543 |
+
font = ps_font(font.value)
|
| 544 |
+
self.dictstack[0]["FontDirectory"].value[name.value] = font
|
| 545 |
+
self.push(font)
|
| 546 |
+
|
| 547 |
+
def ps_findfont(self):
|
| 548 |
+
name = self.pop()
|
| 549 |
+
font = self.dictstack[0]["FontDirectory"].value[name.value]
|
| 550 |
+
self.push(font)
|
| 551 |
+
|
| 552 |
+
def ps_pop(self):
|
| 553 |
+
self.pop()
|
| 554 |
+
|
| 555 |
+
def ps_dict(self):
|
| 556 |
+
self.pop("integertype")
|
| 557 |
+
self.push(ps_dict({}))
|
| 558 |
+
|
| 559 |
+
def ps_begin(self):
|
| 560 |
+
self.dictstack.append(self.pop("dicttype").value)
|
| 561 |
+
|
| 562 |
+
def ps_end(self):
|
| 563 |
+
if len(self.dictstack) > 2:
|
| 564 |
+
del self.dictstack[-1]
|
| 565 |
+
else:
|
| 566 |
+
raise RuntimeError("dictstack underflow")
|
| 567 |
+
|
| 568 |
+
|
| 569 |
+
notdef = ".notdef"
|
| 570 |
+
from fontTools.encodings.StandardEncoding import StandardEncoding
|
| 571 |
+
|
| 572 |
+
ps_StandardEncoding = list(map(ps_name, StandardEncoding))
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/sstruct.py
ADDED
|
@@ -0,0 +1,231 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""sstruct.py -- SuperStruct
|
| 2 |
+
|
| 3 |
+
Higher level layer on top of the struct module, enabling to
|
| 4 |
+
bind names to struct elements. The interface is similar to
|
| 5 |
+
struct, except the objects passed and returned are not tuples
|
| 6 |
+
(or argument lists), but dictionaries or instances.
|
| 7 |
+
|
| 8 |
+
Just like struct, we use fmt strings to describe a data
|
| 9 |
+
structure, except we use one line per element. Lines are
|
| 10 |
+
separated by newlines or semi-colons. Each line contains
|
| 11 |
+
either one of the special struct characters ('@', '=', '<',
|
| 12 |
+
'>' or '!') or a 'name:formatchar' combo (eg. 'myFloat:f').
|
| 13 |
+
Repetitions, like the struct module offers them are not useful
|
| 14 |
+
in this context, except for fixed length strings (eg. 'myInt:5h'
|
| 15 |
+
is not allowed but 'myString:5s' is). The 'x' fmt character
|
| 16 |
+
(pad byte) is treated as 'special', since it is by definition
|
| 17 |
+
anonymous. Extra whitespace is allowed everywhere.
|
| 18 |
+
|
| 19 |
+
The sstruct module offers one feature that the "normal" struct
|
| 20 |
+
module doesn't: support for fixed point numbers. These are spelled
|
| 21 |
+
as "n.mF", where n is the number of bits before the point, and m
|
| 22 |
+
the number of bits after the point. Fixed point numbers get
|
| 23 |
+
converted to floats.
|
| 24 |
+
|
| 25 |
+
pack(fmt, object):
|
| 26 |
+
'object' is either a dictionary or an instance (or actually
|
| 27 |
+
anything that has a __dict__ attribute). If it is a dictionary,
|
| 28 |
+
its keys are used for names. If it is an instance, it's
|
| 29 |
+
attributes are used to grab struct elements from. Returns
|
| 30 |
+
a string containing the data.
|
| 31 |
+
|
| 32 |
+
unpack(fmt, data, object=None)
|
| 33 |
+
If 'object' is omitted (or None), a new dictionary will be
|
| 34 |
+
returned. If 'object' is a dictionary, it will be used to add
|
| 35 |
+
struct elements to. If it is an instance (or in fact anything
|
| 36 |
+
that has a __dict__ attribute), an attribute will be added for
|
| 37 |
+
each struct element. In the latter two cases, 'object' itself
|
| 38 |
+
is returned.
|
| 39 |
+
|
| 40 |
+
unpack2(fmt, data, object=None)
|
| 41 |
+
Convenience function. Same as unpack, except data may be longer
|
| 42 |
+
than needed. The returned value is a tuple: (object, leftoverdata).
|
| 43 |
+
|
| 44 |
+
calcsize(fmt)
|
| 45 |
+
like struct.calcsize(), but uses our own fmt strings:
|
| 46 |
+
it returns the size of the data in bytes.
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
+
from fontTools.misc.fixedTools import fixedToFloat as fi2fl, floatToFixed as fl2fi
|
| 50 |
+
from fontTools.misc.textTools import tobytes, tostr
|
| 51 |
+
import struct
|
| 52 |
+
import re
|
| 53 |
+
|
| 54 |
+
__version__ = "1.2"
|
| 55 |
+
__copyright__ = "Copyright 1998, Just van Rossum <just@letterror.com>"
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class Error(Exception):
|
| 59 |
+
pass
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def pack(fmt, obj):
|
| 63 |
+
formatstring, names, fixes = getformat(fmt, keep_pad_byte=True)
|
| 64 |
+
elements = []
|
| 65 |
+
if not isinstance(obj, dict):
|
| 66 |
+
obj = obj.__dict__
|
| 67 |
+
string_index = formatstring
|
| 68 |
+
if formatstring.startswith(">"):
|
| 69 |
+
string_index = formatstring[1:]
|
| 70 |
+
for ix, name in enumerate(names.keys()):
|
| 71 |
+
value = obj[name]
|
| 72 |
+
if name in fixes:
|
| 73 |
+
# fixed point conversion
|
| 74 |
+
value = fl2fi(value, fixes[name])
|
| 75 |
+
elif isinstance(value, str):
|
| 76 |
+
value = tobytes(value)
|
| 77 |
+
elements.append(value)
|
| 78 |
+
# Check it fits
|
| 79 |
+
try:
|
| 80 |
+
struct.pack(names[name], value)
|
| 81 |
+
except Exception as e:
|
| 82 |
+
raise ValueError(
|
| 83 |
+
"Value %s does not fit in format %s for %s" % (value, names[name], name)
|
| 84 |
+
) from e
|
| 85 |
+
data = struct.pack(*(formatstring,) + tuple(elements))
|
| 86 |
+
return data
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def unpack(fmt, data, obj=None):
|
| 90 |
+
if obj is None:
|
| 91 |
+
obj = {}
|
| 92 |
+
data = tobytes(data)
|
| 93 |
+
formatstring, names, fixes = getformat(fmt)
|
| 94 |
+
if isinstance(obj, dict):
|
| 95 |
+
d = obj
|
| 96 |
+
else:
|
| 97 |
+
d = obj.__dict__
|
| 98 |
+
elements = struct.unpack(formatstring, data)
|
| 99 |
+
for i in range(len(names)):
|
| 100 |
+
name = list(names.keys())[i]
|
| 101 |
+
value = elements[i]
|
| 102 |
+
if name in fixes:
|
| 103 |
+
# fixed point conversion
|
| 104 |
+
value = fi2fl(value, fixes[name])
|
| 105 |
+
elif isinstance(value, bytes):
|
| 106 |
+
try:
|
| 107 |
+
value = tostr(value)
|
| 108 |
+
except UnicodeDecodeError:
|
| 109 |
+
pass
|
| 110 |
+
d[name] = value
|
| 111 |
+
return obj
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def unpack2(fmt, data, obj=None):
|
| 115 |
+
length = calcsize(fmt)
|
| 116 |
+
return unpack(fmt, data[:length], obj), data[length:]
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def calcsize(fmt):
|
| 120 |
+
formatstring, names, fixes = getformat(fmt)
|
| 121 |
+
return struct.calcsize(formatstring)
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
# matches "name:formatchar" (whitespace is allowed)
|
| 125 |
+
_elementRE = re.compile(
|
| 126 |
+
r"\s*" # whitespace
|
| 127 |
+
r"([A-Za-z_][A-Za-z_0-9]*)" # name (python identifier)
|
| 128 |
+
r"\s*:\s*" # whitespace : whitespace
|
| 129 |
+
r"([xcbB?hHiIlLqQfd]|" # formatchar...
|
| 130 |
+
r"[0-9]+[ps]|" # ...formatchar...
|
| 131 |
+
r"([0-9]+)\.([0-9]+)(F))" # ...formatchar
|
| 132 |
+
r"\s*" # whitespace
|
| 133 |
+
r"(#.*)?$" # [comment] + end of string
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
# matches the special struct fmt chars and 'x' (pad byte)
|
| 137 |
+
_extraRE = re.compile(r"\s*([x@=<>!])\s*(#.*)?$")
|
| 138 |
+
|
| 139 |
+
# matches an "empty" string, possibly containing whitespace and/or a comment
|
| 140 |
+
_emptyRE = re.compile(r"\s*(#.*)?$")
|
| 141 |
+
|
| 142 |
+
_fixedpointmappings = {8: "b", 16: "h", 32: "l"}
|
| 143 |
+
|
| 144 |
+
_formatcache = {}
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
def getformat(fmt, keep_pad_byte=False):
|
| 148 |
+
fmt = tostr(fmt, encoding="ascii")
|
| 149 |
+
try:
|
| 150 |
+
formatstring, names, fixes = _formatcache[fmt]
|
| 151 |
+
except KeyError:
|
| 152 |
+
lines = re.split("[\n;]", fmt)
|
| 153 |
+
formatstring = ""
|
| 154 |
+
names = {}
|
| 155 |
+
fixes = {}
|
| 156 |
+
for line in lines:
|
| 157 |
+
if _emptyRE.match(line):
|
| 158 |
+
continue
|
| 159 |
+
m = _extraRE.match(line)
|
| 160 |
+
if m:
|
| 161 |
+
formatchar = m.group(1)
|
| 162 |
+
if formatchar != "x" and formatstring:
|
| 163 |
+
raise Error("a special fmt char must be first")
|
| 164 |
+
else:
|
| 165 |
+
m = _elementRE.match(line)
|
| 166 |
+
if not m:
|
| 167 |
+
raise Error("syntax error in fmt: '%s'" % line)
|
| 168 |
+
name = m.group(1)
|
| 169 |
+
formatchar = m.group(2)
|
| 170 |
+
if keep_pad_byte or formatchar != "x":
|
| 171 |
+
names[name] = formatchar
|
| 172 |
+
if m.group(3):
|
| 173 |
+
# fixed point
|
| 174 |
+
before = int(m.group(3))
|
| 175 |
+
after = int(m.group(4))
|
| 176 |
+
bits = before + after
|
| 177 |
+
if bits not in [8, 16, 32]:
|
| 178 |
+
raise Error("fixed point must be 8, 16 or 32 bits long")
|
| 179 |
+
formatchar = _fixedpointmappings[bits]
|
| 180 |
+
names[name] = formatchar
|
| 181 |
+
assert m.group(5) == "F"
|
| 182 |
+
fixes[name] = after
|
| 183 |
+
formatstring += formatchar
|
| 184 |
+
_formatcache[fmt] = formatstring, names, fixes
|
| 185 |
+
return formatstring, names, fixes
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
def _test():
|
| 189 |
+
fmt = """
|
| 190 |
+
# comments are allowed
|
| 191 |
+
> # big endian (see documentation for struct)
|
| 192 |
+
# empty lines are allowed:
|
| 193 |
+
|
| 194 |
+
ashort: h
|
| 195 |
+
along: l
|
| 196 |
+
abyte: b # a byte
|
| 197 |
+
achar: c
|
| 198 |
+
astr: 5s
|
| 199 |
+
afloat: f; adouble: d # multiple "statements" are allowed
|
| 200 |
+
afixed: 16.16F
|
| 201 |
+
abool: ?
|
| 202 |
+
apad: x
|
| 203 |
+
"""
|
| 204 |
+
|
| 205 |
+
print("size:", calcsize(fmt))
|
| 206 |
+
|
| 207 |
+
class foo(object):
|
| 208 |
+
pass
|
| 209 |
+
|
| 210 |
+
i = foo()
|
| 211 |
+
|
| 212 |
+
i.ashort = 0x7FFF
|
| 213 |
+
i.along = 0x7FFFFFFF
|
| 214 |
+
i.abyte = 0x7F
|
| 215 |
+
i.achar = "a"
|
| 216 |
+
i.astr = "12345"
|
| 217 |
+
i.afloat = 0.5
|
| 218 |
+
i.adouble = 0.5
|
| 219 |
+
i.afixed = 1.5
|
| 220 |
+
i.abool = True
|
| 221 |
+
|
| 222 |
+
data = pack(fmt, i)
|
| 223 |
+
print("data:", repr(data))
|
| 224 |
+
print(unpack(fmt, data))
|
| 225 |
+
i2 = foo()
|
| 226 |
+
unpack(fmt, data, i2)
|
| 227 |
+
print(vars(i2))
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
if __name__ == "__main__":
|
| 231 |
+
_test()
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/testTools.py
ADDED
|
@@ -0,0 +1,229 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helpers for writing unit tests."""
|
| 2 |
+
|
| 3 |
+
from collections.abc import Iterable
|
| 4 |
+
from io import BytesIO
|
| 5 |
+
import os
|
| 6 |
+
import re
|
| 7 |
+
import shutil
|
| 8 |
+
import sys
|
| 9 |
+
import tempfile
|
| 10 |
+
from unittest import TestCase as _TestCase
|
| 11 |
+
from fontTools.config import Config
|
| 12 |
+
from fontTools.misc.textTools import tobytes
|
| 13 |
+
from fontTools.misc.xmlWriter import XMLWriter
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def parseXML(xmlSnippet):
|
| 17 |
+
"""Parses a snippet of XML.
|
| 18 |
+
|
| 19 |
+
Input can be either a single string (unicode or UTF-8 bytes), or a
|
| 20 |
+
a sequence of strings.
|
| 21 |
+
|
| 22 |
+
The result is in the same format that would be returned by
|
| 23 |
+
XMLReader, but the parser imposes no constraints on the root
|
| 24 |
+
element so it can be called on small snippets of TTX files.
|
| 25 |
+
"""
|
| 26 |
+
# To support snippets with multiple elements, we add a fake root.
|
| 27 |
+
reader = TestXMLReader_()
|
| 28 |
+
xml = b"<root>"
|
| 29 |
+
if isinstance(xmlSnippet, bytes):
|
| 30 |
+
xml += xmlSnippet
|
| 31 |
+
elif isinstance(xmlSnippet, str):
|
| 32 |
+
xml += tobytes(xmlSnippet, "utf-8")
|
| 33 |
+
elif isinstance(xmlSnippet, Iterable):
|
| 34 |
+
xml += b"".join(tobytes(s, "utf-8") for s in xmlSnippet)
|
| 35 |
+
else:
|
| 36 |
+
raise TypeError(
|
| 37 |
+
"expected string or sequence of strings; found %r"
|
| 38 |
+
% type(xmlSnippet).__name__
|
| 39 |
+
)
|
| 40 |
+
xml += b"</root>"
|
| 41 |
+
reader.parser.Parse(xml, 0)
|
| 42 |
+
return reader.root[2]
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def parseXmlInto(font, parseInto, xmlSnippet):
|
| 46 |
+
parsed_xml = [e for e in parseXML(xmlSnippet.strip()) if not isinstance(e, str)]
|
| 47 |
+
for name, attrs, content in parsed_xml:
|
| 48 |
+
parseInto.fromXML(name, attrs, content, font)
|
| 49 |
+
parseInto.populateDefaults()
|
| 50 |
+
return parseInto
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class FakeFont:
|
| 54 |
+
def __init__(self, glyphs):
|
| 55 |
+
self.glyphOrder_ = glyphs
|
| 56 |
+
self.reverseGlyphOrderDict_ = {g: i for i, g in enumerate(glyphs)}
|
| 57 |
+
self.lazy = False
|
| 58 |
+
self.tables = {}
|
| 59 |
+
self.cfg = Config()
|
| 60 |
+
|
| 61 |
+
def __getitem__(self, tag):
|
| 62 |
+
return self.tables[tag]
|
| 63 |
+
|
| 64 |
+
def __setitem__(self, tag, table):
|
| 65 |
+
self.tables[tag] = table
|
| 66 |
+
|
| 67 |
+
def get(self, tag, default=None):
|
| 68 |
+
return self.tables.get(tag, default)
|
| 69 |
+
|
| 70 |
+
def getGlyphID(self, name):
|
| 71 |
+
return self.reverseGlyphOrderDict_[name]
|
| 72 |
+
|
| 73 |
+
def getGlyphIDMany(self, lst):
|
| 74 |
+
return [self.getGlyphID(gid) for gid in lst]
|
| 75 |
+
|
| 76 |
+
def getGlyphName(self, glyphID):
|
| 77 |
+
if glyphID < len(self.glyphOrder_):
|
| 78 |
+
return self.glyphOrder_[glyphID]
|
| 79 |
+
else:
|
| 80 |
+
return "glyph%.5d" % glyphID
|
| 81 |
+
|
| 82 |
+
def getGlyphNameMany(self, lst):
|
| 83 |
+
return [self.getGlyphName(gid) for gid in lst]
|
| 84 |
+
|
| 85 |
+
def getGlyphOrder(self):
|
| 86 |
+
return self.glyphOrder_
|
| 87 |
+
|
| 88 |
+
def getReverseGlyphMap(self):
|
| 89 |
+
return self.reverseGlyphOrderDict_
|
| 90 |
+
|
| 91 |
+
def getGlyphNames(self):
|
| 92 |
+
return sorted(self.getGlyphOrder())
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class TestXMLReader_(object):
|
| 96 |
+
def __init__(self):
|
| 97 |
+
from xml.parsers.expat import ParserCreate
|
| 98 |
+
|
| 99 |
+
self.parser = ParserCreate()
|
| 100 |
+
self.parser.StartElementHandler = self.startElement_
|
| 101 |
+
self.parser.EndElementHandler = self.endElement_
|
| 102 |
+
self.parser.CharacterDataHandler = self.addCharacterData_
|
| 103 |
+
self.root = None
|
| 104 |
+
self.stack = []
|
| 105 |
+
|
| 106 |
+
def startElement_(self, name, attrs):
|
| 107 |
+
element = (name, attrs, [])
|
| 108 |
+
if self.stack:
|
| 109 |
+
self.stack[-1][2].append(element)
|
| 110 |
+
else:
|
| 111 |
+
self.root = element
|
| 112 |
+
self.stack.append(element)
|
| 113 |
+
|
| 114 |
+
def endElement_(self, name):
|
| 115 |
+
self.stack.pop()
|
| 116 |
+
|
| 117 |
+
def addCharacterData_(self, data):
|
| 118 |
+
self.stack[-1][2].append(data)
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def makeXMLWriter(newlinestr="\n"):
|
| 122 |
+
# don't write OS-specific new lines
|
| 123 |
+
writer = XMLWriter(BytesIO(), newlinestr=newlinestr)
|
| 124 |
+
# erase XML declaration
|
| 125 |
+
writer.file.seek(0)
|
| 126 |
+
writer.file.truncate()
|
| 127 |
+
return writer
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def getXML(func, ttFont=None):
|
| 131 |
+
"""Call the passed toXML function and return the written content as a
|
| 132 |
+
list of lines (unicode strings).
|
| 133 |
+
Result is stripped of XML declaration and OS-specific newline characters.
|
| 134 |
+
"""
|
| 135 |
+
writer = makeXMLWriter()
|
| 136 |
+
func(writer, ttFont)
|
| 137 |
+
xml = writer.file.getvalue().decode("utf-8")
|
| 138 |
+
# toXML methods must always end with a writer.newline()
|
| 139 |
+
assert xml.endswith("\n")
|
| 140 |
+
return xml.splitlines()
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
def stripVariableItemsFromTTX(
|
| 144 |
+
string: str,
|
| 145 |
+
ttLibVersion: bool = True,
|
| 146 |
+
checkSumAdjustment: bool = True,
|
| 147 |
+
modified: bool = True,
|
| 148 |
+
created: bool = True,
|
| 149 |
+
sfntVersion: bool = False, # opt-in only
|
| 150 |
+
) -> str:
|
| 151 |
+
"""Strip stuff like ttLibVersion, checksums, timestamps, etc. from TTX dumps."""
|
| 152 |
+
# ttlib changes with the fontTools version
|
| 153 |
+
if ttLibVersion:
|
| 154 |
+
string = re.sub(' ttLibVersion="[^"]+"', "", string)
|
| 155 |
+
# sometimes (e.g. some subsetter tests) we don't care whether it's OTF or TTF
|
| 156 |
+
if sfntVersion:
|
| 157 |
+
string = re.sub(' sfntVersion="[^"]+"', "", string)
|
| 158 |
+
# head table checksum and creation and mod date changes with each save.
|
| 159 |
+
if checkSumAdjustment:
|
| 160 |
+
string = re.sub('<checkSumAdjustment value="[^"]+"/>', "", string)
|
| 161 |
+
if modified:
|
| 162 |
+
string = re.sub('<modified value="[^"]+"/>', "", string)
|
| 163 |
+
if created:
|
| 164 |
+
string = re.sub('<created value="[^"]+"/>', "", string)
|
| 165 |
+
return string
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
class MockFont(object):
|
| 169 |
+
"""A font-like object that automatically adds any looked up glyphname
|
| 170 |
+
to its glyphOrder."""
|
| 171 |
+
|
| 172 |
+
def __init__(self):
|
| 173 |
+
self._glyphOrder = [".notdef"]
|
| 174 |
+
|
| 175 |
+
class AllocatingDict(dict):
|
| 176 |
+
def __missing__(reverseDict, key):
|
| 177 |
+
self._glyphOrder.append(key)
|
| 178 |
+
gid = len(reverseDict)
|
| 179 |
+
reverseDict[key] = gid
|
| 180 |
+
return gid
|
| 181 |
+
|
| 182 |
+
self._reverseGlyphOrder = AllocatingDict({".notdef": 0})
|
| 183 |
+
self.lazy = False
|
| 184 |
+
|
| 185 |
+
def getGlyphID(self, glyph):
|
| 186 |
+
gid = self._reverseGlyphOrder[glyph]
|
| 187 |
+
return gid
|
| 188 |
+
|
| 189 |
+
def getReverseGlyphMap(self):
|
| 190 |
+
return self._reverseGlyphOrder
|
| 191 |
+
|
| 192 |
+
def getGlyphName(self, gid):
|
| 193 |
+
return self._glyphOrder[gid]
|
| 194 |
+
|
| 195 |
+
def getGlyphOrder(self):
|
| 196 |
+
return self._glyphOrder
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
class TestCase(_TestCase):
|
| 200 |
+
def __init__(self, methodName):
|
| 201 |
+
_TestCase.__init__(self, methodName)
|
| 202 |
+
# Python 3 renamed assertRaisesRegexp to assertRaisesRegex,
|
| 203 |
+
# and fires deprecation warnings if a program uses the old name.
|
| 204 |
+
if not hasattr(self, "assertRaisesRegex"):
|
| 205 |
+
self.assertRaisesRegex = self.assertRaisesRegexp
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
class DataFilesHandler(TestCase):
|
| 209 |
+
def setUp(self):
|
| 210 |
+
self.tempdir = None
|
| 211 |
+
self.num_tempfiles = 0
|
| 212 |
+
|
| 213 |
+
def tearDown(self):
|
| 214 |
+
if self.tempdir:
|
| 215 |
+
shutil.rmtree(self.tempdir)
|
| 216 |
+
|
| 217 |
+
def getpath(self, testfile):
|
| 218 |
+
folder = os.path.dirname(sys.modules[self.__module__].__file__)
|
| 219 |
+
return os.path.join(folder, "data", testfile)
|
| 220 |
+
|
| 221 |
+
def temp_dir(self):
|
| 222 |
+
if not self.tempdir:
|
| 223 |
+
self.tempdir = tempfile.mkdtemp()
|
| 224 |
+
|
| 225 |
+
def temp_font(self, font_path, file_name):
|
| 226 |
+
self.temp_dir()
|
| 227 |
+
temppath = os.path.join(self.tempdir, file_name)
|
| 228 |
+
shutil.copy2(font_path, temppath)
|
| 229 |
+
return temppath
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/misc/transform.py
ADDED
|
@@ -0,0 +1,507 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Affine 2D transformation matrix class.
|
| 2 |
+
|
| 3 |
+
The Transform class implements various transformation matrix operations,
|
| 4 |
+
both on the matrix itself, as well as on 2D coordinates.
|
| 5 |
+
|
| 6 |
+
Transform instances are effectively immutable: all methods that operate on the
|
| 7 |
+
transformation itself always return a new instance. This has as the
|
| 8 |
+
interesting side effect that Transform instances are hashable, ie. they can be
|
| 9 |
+
used as dictionary keys.
|
| 10 |
+
|
| 11 |
+
This module exports the following symbols:
|
| 12 |
+
|
| 13 |
+
Transform
|
| 14 |
+
this is the main class
|
| 15 |
+
Identity
|
| 16 |
+
Transform instance set to the identity transformation
|
| 17 |
+
Offset
|
| 18 |
+
Convenience function that returns a translating transformation
|
| 19 |
+
Scale
|
| 20 |
+
Convenience function that returns a scaling transformation
|
| 21 |
+
|
| 22 |
+
The DecomposedTransform class implements a transformation with separate
|
| 23 |
+
translate, rotation, scale, skew, and transformation-center components.
|
| 24 |
+
|
| 25 |
+
:Example:
|
| 26 |
+
|
| 27 |
+
>>> t = Transform(2, 0, 0, 3, 0, 0)
|
| 28 |
+
>>> t.transformPoint((100, 100))
|
| 29 |
+
(200, 300)
|
| 30 |
+
>>> t = Scale(2, 3)
|
| 31 |
+
>>> t.transformPoint((100, 100))
|
| 32 |
+
(200, 300)
|
| 33 |
+
>>> t.transformPoint((0, 0))
|
| 34 |
+
(0, 0)
|
| 35 |
+
>>> t = Offset(2, 3)
|
| 36 |
+
>>> t.transformPoint((100, 100))
|
| 37 |
+
(102, 103)
|
| 38 |
+
>>> t.transformPoint((0, 0))
|
| 39 |
+
(2, 3)
|
| 40 |
+
>>> t2 = t.scale(0.5)
|
| 41 |
+
>>> t2.transformPoint((100, 100))
|
| 42 |
+
(52.0, 53.0)
|
| 43 |
+
>>> import math
|
| 44 |
+
>>> t3 = t2.rotate(math.pi / 2)
|
| 45 |
+
>>> t3.transformPoint((0, 0))
|
| 46 |
+
(2.0, 3.0)
|
| 47 |
+
>>> t3.transformPoint((100, 100))
|
| 48 |
+
(-48.0, 53.0)
|
| 49 |
+
>>> t = Identity.scale(0.5).translate(100, 200).skew(0.1, 0.2)
|
| 50 |
+
>>> t.transformPoints([(0, 0), (1, 1), (100, 100)])
|
| 51 |
+
[(50.0, 100.0), (50.550167336042726, 100.60135501775433), (105.01673360427253, 160.13550177543362)]
|
| 52 |
+
>>>
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
import math
|
| 56 |
+
from typing import NamedTuple
|
| 57 |
+
from dataclasses import dataclass
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
__all__ = ["Transform", "Identity", "Offset", "Scale", "DecomposedTransform"]
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
_EPSILON = 1e-15
|
| 64 |
+
_ONE_EPSILON = 1 - _EPSILON
|
| 65 |
+
_MINUS_ONE_EPSILON = -1 + _EPSILON
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def _normSinCos(v):
|
| 69 |
+
if abs(v) < _EPSILON:
|
| 70 |
+
v = 0
|
| 71 |
+
elif v > _ONE_EPSILON:
|
| 72 |
+
v = 1
|
| 73 |
+
elif v < _MINUS_ONE_EPSILON:
|
| 74 |
+
v = -1
|
| 75 |
+
return v
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class Transform(NamedTuple):
|
| 79 |
+
"""2x2 transformation matrix plus offset, a.k.a. Affine transform.
|
| 80 |
+
Transform instances are immutable: all transforming methods, eg.
|
| 81 |
+
rotate(), return a new Transform instance.
|
| 82 |
+
|
| 83 |
+
:Example:
|
| 84 |
+
|
| 85 |
+
>>> t = Transform()
|
| 86 |
+
>>> t
|
| 87 |
+
<Transform [1 0 0 1 0 0]>
|
| 88 |
+
>>> t.scale(2)
|
| 89 |
+
<Transform [2 0 0 2 0 0]>
|
| 90 |
+
>>> t.scale(2.5, 5.5)
|
| 91 |
+
<Transform [2.5 0 0 5.5 0 0]>
|
| 92 |
+
>>>
|
| 93 |
+
>>> t.scale(2, 3).transformPoint((100, 100))
|
| 94 |
+
(200, 300)
|
| 95 |
+
|
| 96 |
+
Transform's constructor takes six arguments, all of which are
|
| 97 |
+
optional, and can be used as keyword arguments::
|
| 98 |
+
|
| 99 |
+
>>> Transform(12)
|
| 100 |
+
<Transform [12 0 0 1 0 0]>
|
| 101 |
+
>>> Transform(dx=12)
|
| 102 |
+
<Transform [1 0 0 1 12 0]>
|
| 103 |
+
>>> Transform(yx=12)
|
| 104 |
+
<Transform [1 0 12 1 0 0]>
|
| 105 |
+
|
| 106 |
+
Transform instances also behave like sequences of length 6::
|
| 107 |
+
|
| 108 |
+
>>> len(Identity)
|
| 109 |
+
6
|
| 110 |
+
>>> list(Identity)
|
| 111 |
+
[1, 0, 0, 1, 0, 0]
|
| 112 |
+
>>> tuple(Identity)
|
| 113 |
+
(1, 0, 0, 1, 0, 0)
|
| 114 |
+
|
| 115 |
+
Transform instances are comparable::
|
| 116 |
+
|
| 117 |
+
>>> t1 = Identity.scale(2, 3).translate(4, 6)
|
| 118 |
+
>>> t2 = Identity.translate(8, 18).scale(2, 3)
|
| 119 |
+
>>> t1 == t2
|
| 120 |
+
1
|
| 121 |
+
|
| 122 |
+
But beware of floating point rounding errors::
|
| 123 |
+
|
| 124 |
+
>>> t1 = Identity.scale(0.2, 0.3).translate(0.4, 0.6)
|
| 125 |
+
>>> t2 = Identity.translate(0.08, 0.18).scale(0.2, 0.3)
|
| 126 |
+
>>> t1
|
| 127 |
+
<Transform [0.2 0 0 0.3 0.08 0.18]>
|
| 128 |
+
>>> t2
|
| 129 |
+
<Transform [0.2 0 0 0.3 0.08 0.18]>
|
| 130 |
+
>>> t1 == t2
|
| 131 |
+
0
|
| 132 |
+
|
| 133 |
+
Transform instances are hashable, meaning you can use them as
|
| 134 |
+
keys in dictionaries::
|
| 135 |
+
|
| 136 |
+
>>> d = {Scale(12, 13): None}
|
| 137 |
+
>>> d
|
| 138 |
+
{<Transform [12 0 0 13 0 0]>: None}
|
| 139 |
+
|
| 140 |
+
But again, beware of floating point rounding errors::
|
| 141 |
+
|
| 142 |
+
>>> t1 = Identity.scale(0.2, 0.3).translate(0.4, 0.6)
|
| 143 |
+
>>> t2 = Identity.translate(0.08, 0.18).scale(0.2, 0.3)
|
| 144 |
+
>>> t1
|
| 145 |
+
<Transform [0.2 0 0 0.3 0.08 0.18]>
|
| 146 |
+
>>> t2
|
| 147 |
+
<Transform [0.2 0 0 0.3 0.08 0.18]>
|
| 148 |
+
>>> d = {t1: None}
|
| 149 |
+
>>> d
|
| 150 |
+
{<Transform [0.2 0 0 0.3 0.08 0.18]>: None}
|
| 151 |
+
>>> d[t2]
|
| 152 |
+
Traceback (most recent call last):
|
| 153 |
+
File "<stdin>", line 1, in ?
|
| 154 |
+
KeyError: <Transform [0.2 0 0 0.3 0.08 0.18]>
|
| 155 |
+
"""
|
| 156 |
+
|
| 157 |
+
xx: float = 1
|
| 158 |
+
xy: float = 0
|
| 159 |
+
yx: float = 0
|
| 160 |
+
yy: float = 1
|
| 161 |
+
dx: float = 0
|
| 162 |
+
dy: float = 0
|
| 163 |
+
|
| 164 |
+
def transformPoint(self, p):
|
| 165 |
+
"""Transform a point.
|
| 166 |
+
|
| 167 |
+
:Example:
|
| 168 |
+
|
| 169 |
+
>>> t = Transform()
|
| 170 |
+
>>> t = t.scale(2.5, 5.5)
|
| 171 |
+
>>> t.transformPoint((100, 100))
|
| 172 |
+
(250.0, 550.0)
|
| 173 |
+
"""
|
| 174 |
+
(x, y) = p
|
| 175 |
+
xx, xy, yx, yy, dx, dy = self
|
| 176 |
+
return (xx * x + yx * y + dx, xy * x + yy * y + dy)
|
| 177 |
+
|
| 178 |
+
def transformPoints(self, points):
|
| 179 |
+
"""Transform a list of points.
|
| 180 |
+
|
| 181 |
+
:Example:
|
| 182 |
+
|
| 183 |
+
>>> t = Scale(2, 3)
|
| 184 |
+
>>> t.transformPoints([(0, 0), (0, 100), (100, 100), (100, 0)])
|
| 185 |
+
[(0, 0), (0, 300), (200, 300), (200, 0)]
|
| 186 |
+
>>>
|
| 187 |
+
"""
|
| 188 |
+
xx, xy, yx, yy, dx, dy = self
|
| 189 |
+
return [(xx * x + yx * y + dx, xy * x + yy * y + dy) for x, y in points]
|
| 190 |
+
|
| 191 |
+
def transformVector(self, v):
|
| 192 |
+
"""Transform an (dx, dy) vector, treating translation as zero.
|
| 193 |
+
|
| 194 |
+
:Example:
|
| 195 |
+
|
| 196 |
+
>>> t = Transform(2, 0, 0, 2, 10, 20)
|
| 197 |
+
>>> t.transformVector((3, -4))
|
| 198 |
+
(6, -8)
|
| 199 |
+
>>>
|
| 200 |
+
"""
|
| 201 |
+
(dx, dy) = v
|
| 202 |
+
xx, xy, yx, yy = self[:4]
|
| 203 |
+
return (xx * dx + yx * dy, xy * dx + yy * dy)
|
| 204 |
+
|
| 205 |
+
def transformVectors(self, vectors):
|
| 206 |
+
"""Transform a list of (dx, dy) vector, treating translation as zero.
|
| 207 |
+
|
| 208 |
+
:Example:
|
| 209 |
+
>>> t = Transform(2, 0, 0, 2, 10, 20)
|
| 210 |
+
>>> t.transformVectors([(3, -4), (5, -6)])
|
| 211 |
+
[(6, -8), (10, -12)]
|
| 212 |
+
>>>
|
| 213 |
+
"""
|
| 214 |
+
xx, xy, yx, yy = self[:4]
|
| 215 |
+
return [(xx * dx + yx * dy, xy * dx + yy * dy) for dx, dy in vectors]
|
| 216 |
+
|
| 217 |
+
def translate(self, x=0, y=0):
|
| 218 |
+
"""Return a new transformation, translated (offset) by x, y.
|
| 219 |
+
|
| 220 |
+
:Example:
|
| 221 |
+
>>> t = Transform()
|
| 222 |
+
>>> t.translate(20, 30)
|
| 223 |
+
<Transform [1 0 0 1 20 30]>
|
| 224 |
+
>>>
|
| 225 |
+
"""
|
| 226 |
+
return self.transform((1, 0, 0, 1, x, y))
|
| 227 |
+
|
| 228 |
+
def scale(self, x=1, y=None):
|
| 229 |
+
"""Return a new transformation, scaled by x, y. The 'y' argument
|
| 230 |
+
may be None, which implies to use the x value for y as well.
|
| 231 |
+
|
| 232 |
+
:Example:
|
| 233 |
+
>>> t = Transform()
|
| 234 |
+
>>> t.scale(5)
|
| 235 |
+
<Transform [5 0 0 5 0 0]>
|
| 236 |
+
>>> t.scale(5, 6)
|
| 237 |
+
<Transform [5 0 0 6 0 0]>
|
| 238 |
+
>>>
|
| 239 |
+
"""
|
| 240 |
+
if y is None:
|
| 241 |
+
y = x
|
| 242 |
+
return self.transform((x, 0, 0, y, 0, 0))
|
| 243 |
+
|
| 244 |
+
def rotate(self, angle):
|
| 245 |
+
"""Return a new transformation, rotated by 'angle' (radians).
|
| 246 |
+
|
| 247 |
+
:Example:
|
| 248 |
+
>>> import math
|
| 249 |
+
>>> t = Transform()
|
| 250 |
+
>>> t.rotate(math.pi / 2)
|
| 251 |
+
<Transform [0 1 -1 0 0 0]>
|
| 252 |
+
>>>
|
| 253 |
+
"""
|
| 254 |
+
import math
|
| 255 |
+
|
| 256 |
+
c = _normSinCos(math.cos(angle))
|
| 257 |
+
s = _normSinCos(math.sin(angle))
|
| 258 |
+
return self.transform((c, s, -s, c, 0, 0))
|
| 259 |
+
|
| 260 |
+
def skew(self, x=0, y=0):
|
| 261 |
+
"""Return a new transformation, skewed by x and y.
|
| 262 |
+
|
| 263 |
+
:Example:
|
| 264 |
+
>>> import math
|
| 265 |
+
>>> t = Transform()
|
| 266 |
+
>>> t.skew(math.pi / 4)
|
| 267 |
+
<Transform [1 0 1 1 0 0]>
|
| 268 |
+
>>>
|
| 269 |
+
"""
|
| 270 |
+
import math
|
| 271 |
+
|
| 272 |
+
return self.transform((1, math.tan(y), math.tan(x), 1, 0, 0))
|
| 273 |
+
|
| 274 |
+
def transform(self, other):
|
| 275 |
+
"""Return a new transformation, transformed by another
|
| 276 |
+
transformation.
|
| 277 |
+
|
| 278 |
+
:Example:
|
| 279 |
+
>>> t = Transform(2, 0, 0, 3, 1, 6)
|
| 280 |
+
>>> t.transform((4, 3, 2, 1, 5, 6))
|
| 281 |
+
<Transform [8 9 4 3 11 24]>
|
| 282 |
+
>>>
|
| 283 |
+
"""
|
| 284 |
+
xx1, xy1, yx1, yy1, dx1, dy1 = other
|
| 285 |
+
xx2, xy2, yx2, yy2, dx2, dy2 = self
|
| 286 |
+
return self.__class__(
|
| 287 |
+
xx1 * xx2 + xy1 * yx2,
|
| 288 |
+
xx1 * xy2 + xy1 * yy2,
|
| 289 |
+
yx1 * xx2 + yy1 * yx2,
|
| 290 |
+
yx1 * xy2 + yy1 * yy2,
|
| 291 |
+
xx2 * dx1 + yx2 * dy1 + dx2,
|
| 292 |
+
xy2 * dx1 + yy2 * dy1 + dy2,
|
| 293 |
+
)
|
| 294 |
+
|
| 295 |
+
def reverseTransform(self, other):
|
| 296 |
+
"""Return a new transformation, which is the other transformation
|
| 297 |
+
transformed by self. self.reverseTransform(other) is equivalent to
|
| 298 |
+
other.transform(self).
|
| 299 |
+
|
| 300 |
+
:Example:
|
| 301 |
+
>>> t = Transform(2, 0, 0, 3, 1, 6)
|
| 302 |
+
>>> t.reverseTransform((4, 3, 2, 1, 5, 6))
|
| 303 |
+
<Transform [8 6 6 3 21 15]>
|
| 304 |
+
>>> Transform(4, 3, 2, 1, 5, 6).transform((2, 0, 0, 3, 1, 6))
|
| 305 |
+
<Transform [8 6 6 3 21 15]>
|
| 306 |
+
>>>
|
| 307 |
+
"""
|
| 308 |
+
xx1, xy1, yx1, yy1, dx1, dy1 = self
|
| 309 |
+
xx2, xy2, yx2, yy2, dx2, dy2 = other
|
| 310 |
+
return self.__class__(
|
| 311 |
+
xx1 * xx2 + xy1 * yx2,
|
| 312 |
+
xx1 * xy2 + xy1 * yy2,
|
| 313 |
+
yx1 * xx2 + yy1 * yx2,
|
| 314 |
+
yx1 * xy2 + yy1 * yy2,
|
| 315 |
+
xx2 * dx1 + yx2 * dy1 + dx2,
|
| 316 |
+
xy2 * dx1 + yy2 * dy1 + dy2,
|
| 317 |
+
)
|
| 318 |
+
|
| 319 |
+
def inverse(self):
|
| 320 |
+
"""Return the inverse transformation.
|
| 321 |
+
|
| 322 |
+
:Example:
|
| 323 |
+
>>> t = Identity.translate(2, 3).scale(4, 5)
|
| 324 |
+
>>> t.transformPoint((10, 20))
|
| 325 |
+
(42, 103)
|
| 326 |
+
>>> it = t.inverse()
|
| 327 |
+
>>> it.transformPoint((42, 103))
|
| 328 |
+
(10.0, 20.0)
|
| 329 |
+
>>>
|
| 330 |
+
"""
|
| 331 |
+
if self == Identity:
|
| 332 |
+
return self
|
| 333 |
+
xx, xy, yx, yy, dx, dy = self
|
| 334 |
+
det = xx * yy - yx * xy
|
| 335 |
+
xx, xy, yx, yy = yy / det, -xy / det, -yx / det, xx / det
|
| 336 |
+
dx, dy = -xx * dx - yx * dy, -xy * dx - yy * dy
|
| 337 |
+
return self.__class__(xx, xy, yx, yy, dx, dy)
|
| 338 |
+
|
| 339 |
+
def toPS(self):
|
| 340 |
+
"""Return a PostScript representation
|
| 341 |
+
|
| 342 |
+
:Example:
|
| 343 |
+
|
| 344 |
+
>>> t = Identity.scale(2, 3).translate(4, 5)
|
| 345 |
+
>>> t.toPS()
|
| 346 |
+
'[2 0 0 3 8 15]'
|
| 347 |
+
>>>
|
| 348 |
+
"""
|
| 349 |
+
return "[%s %s %s %s %s %s]" % self
|
| 350 |
+
|
| 351 |
+
def toDecomposed(self) -> "DecomposedTransform":
|
| 352 |
+
"""Decompose into a DecomposedTransform."""
|
| 353 |
+
return DecomposedTransform.fromTransform(self)
|
| 354 |
+
|
| 355 |
+
def __bool__(self):
|
| 356 |
+
"""Returns True if transform is not identity, False otherwise.
|
| 357 |
+
|
| 358 |
+
:Example:
|
| 359 |
+
|
| 360 |
+
>>> bool(Identity)
|
| 361 |
+
False
|
| 362 |
+
>>> bool(Transform())
|
| 363 |
+
False
|
| 364 |
+
>>> bool(Scale(1.))
|
| 365 |
+
False
|
| 366 |
+
>>> bool(Scale(2))
|
| 367 |
+
True
|
| 368 |
+
>>> bool(Offset())
|
| 369 |
+
False
|
| 370 |
+
>>> bool(Offset(0))
|
| 371 |
+
False
|
| 372 |
+
>>> bool(Offset(2))
|
| 373 |
+
True
|
| 374 |
+
"""
|
| 375 |
+
return self != Identity
|
| 376 |
+
|
| 377 |
+
def __repr__(self):
|
| 378 |
+
return "<%s [%g %g %g %g %g %g]>" % ((self.__class__.__name__,) + self)
|
| 379 |
+
|
| 380 |
+
|
| 381 |
+
Identity = Transform()
|
| 382 |
+
|
| 383 |
+
|
| 384 |
+
def Offset(x=0, y=0):
|
| 385 |
+
"""Return the identity transformation offset by x, y.
|
| 386 |
+
|
| 387 |
+
:Example:
|
| 388 |
+
>>> Offset(2, 3)
|
| 389 |
+
<Transform [1 0 0 1 2 3]>
|
| 390 |
+
>>>
|
| 391 |
+
"""
|
| 392 |
+
return Transform(1, 0, 0, 1, x, y)
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
def Scale(x, y=None):
|
| 396 |
+
"""Return the identity transformation scaled by x, y. The 'y' argument
|
| 397 |
+
may be None, which implies to use the x value for y as well.
|
| 398 |
+
|
| 399 |
+
:Example:
|
| 400 |
+
>>> Scale(2, 3)
|
| 401 |
+
<Transform [2 0 0 3 0 0]>
|
| 402 |
+
>>>
|
| 403 |
+
"""
|
| 404 |
+
if y is None:
|
| 405 |
+
y = x
|
| 406 |
+
return Transform(x, 0, 0, y, 0, 0)
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
@dataclass
|
| 410 |
+
class DecomposedTransform:
|
| 411 |
+
"""The DecomposedTransform class implements a transformation with separate
|
| 412 |
+
translate, rotation, scale, skew, and transformation-center components.
|
| 413 |
+
"""
|
| 414 |
+
|
| 415 |
+
translateX: float = 0
|
| 416 |
+
translateY: float = 0
|
| 417 |
+
rotation: float = 0 # in degrees, counter-clockwise
|
| 418 |
+
scaleX: float = 1
|
| 419 |
+
scaleY: float = 1
|
| 420 |
+
skewX: float = 0 # in degrees, clockwise
|
| 421 |
+
skewY: float = 0 # in degrees, counter-clockwise
|
| 422 |
+
tCenterX: float = 0
|
| 423 |
+
tCenterY: float = 0
|
| 424 |
+
|
| 425 |
+
def __bool__(self):
|
| 426 |
+
return (
|
| 427 |
+
self.translateX != 0
|
| 428 |
+
or self.translateY != 0
|
| 429 |
+
or self.rotation != 0
|
| 430 |
+
or self.scaleX != 1
|
| 431 |
+
or self.scaleY != 1
|
| 432 |
+
or self.skewX != 0
|
| 433 |
+
or self.skewY != 0
|
| 434 |
+
or self.tCenterX != 0
|
| 435 |
+
or self.tCenterY != 0
|
| 436 |
+
)
|
| 437 |
+
|
| 438 |
+
@classmethod
|
| 439 |
+
def fromTransform(self, transform):
|
| 440 |
+
# Adapted from an answer on
|
| 441 |
+
# https://math.stackexchange.com/questions/13150/extracting-rotation-scale-values-from-2d-transformation-matrix
|
| 442 |
+
a, b, c, d, x, y = transform
|
| 443 |
+
|
| 444 |
+
sx = math.copysign(1, a)
|
| 445 |
+
if sx < 0:
|
| 446 |
+
a *= sx
|
| 447 |
+
b *= sx
|
| 448 |
+
|
| 449 |
+
delta = a * d - b * c
|
| 450 |
+
|
| 451 |
+
rotation = 0
|
| 452 |
+
scaleX = scaleY = 0
|
| 453 |
+
skewX = skewY = 0
|
| 454 |
+
|
| 455 |
+
# Apply the QR-like decomposition.
|
| 456 |
+
if a != 0 or b != 0:
|
| 457 |
+
r = math.sqrt(a * a + b * b)
|
| 458 |
+
rotation = math.acos(a / r) if b >= 0 else -math.acos(a / r)
|
| 459 |
+
scaleX, scaleY = (r, delta / r)
|
| 460 |
+
skewX, skewY = (math.atan((a * c + b * d) / (r * r)), 0)
|
| 461 |
+
elif c != 0 or d != 0:
|
| 462 |
+
s = math.sqrt(c * c + d * d)
|
| 463 |
+
rotation = math.pi / 2 - (
|
| 464 |
+
math.acos(-c / s) if d >= 0 else -math.acos(c / s)
|
| 465 |
+
)
|
| 466 |
+
scaleX, scaleY = (delta / s, s)
|
| 467 |
+
skewX, skewY = (0, math.atan((a * c + b * d) / (s * s)))
|
| 468 |
+
else:
|
| 469 |
+
# a = b = c = d = 0
|
| 470 |
+
pass
|
| 471 |
+
|
| 472 |
+
return DecomposedTransform(
|
| 473 |
+
x,
|
| 474 |
+
y,
|
| 475 |
+
math.degrees(rotation),
|
| 476 |
+
scaleX * sx,
|
| 477 |
+
scaleY,
|
| 478 |
+
math.degrees(skewX) * sx,
|
| 479 |
+
math.degrees(skewY),
|
| 480 |
+
0,
|
| 481 |
+
0,
|
| 482 |
+
)
|
| 483 |
+
|
| 484 |
+
def toTransform(self):
|
| 485 |
+
"""Return the Transform() equivalent of this transformation.
|
| 486 |
+
|
| 487 |
+
:Example:
|
| 488 |
+
>>> DecomposedTransform(scaleX=2, scaleY=2).toTransform()
|
| 489 |
+
<Transform [2 0 0 2 0 0]>
|
| 490 |
+
>>>
|
| 491 |
+
"""
|
| 492 |
+
t = Transform()
|
| 493 |
+
t = t.translate(
|
| 494 |
+
self.translateX + self.tCenterX, self.translateY + self.tCenterY
|
| 495 |
+
)
|
| 496 |
+
t = t.rotate(math.radians(self.rotation))
|
| 497 |
+
t = t.scale(self.scaleX, self.scaleY)
|
| 498 |
+
t = t.skew(math.radians(self.skewX), math.radians(self.skewY))
|
| 499 |
+
t = t.translate(-self.tCenterX, -self.tCenterY)
|
| 500 |
+
return t
|
| 501 |
+
|
| 502 |
+
|
| 503 |
+
if __name__ == "__main__":
|
| 504 |
+
import sys
|
| 505 |
+
import doctest
|
| 506 |
+
|
| 507 |
+
sys.exit(doctest.testmod().failed)
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/mtiLib/__init__.py
ADDED
|
@@ -0,0 +1,1402 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/python
|
| 2 |
+
|
| 3 |
+
# FontDame-to-FontTools for OpenType Layout tables
|
| 4 |
+
#
|
| 5 |
+
# Source language spec is available at:
|
| 6 |
+
# http://monotype.github.io/OpenType_Table_Source/otl_source.html
|
| 7 |
+
# https://github.com/Monotype/OpenType_Table_Source/
|
| 8 |
+
|
| 9 |
+
from fontTools import ttLib
|
| 10 |
+
from fontTools.ttLib.tables._c_m_a_p import cmap_classes
|
| 11 |
+
from fontTools.ttLib.tables import otTables as ot
|
| 12 |
+
from fontTools.ttLib.tables.otBase import ValueRecord, valueRecordFormatDict
|
| 13 |
+
from fontTools.otlLib import builder as otl
|
| 14 |
+
from contextlib import contextmanager
|
| 15 |
+
from fontTools.ttLib import newTable
|
| 16 |
+
from fontTools.feaLib.lookupDebugInfo import LOOKUP_DEBUG_ENV_VAR, LOOKUP_DEBUG_INFO_KEY
|
| 17 |
+
from operator import setitem
|
| 18 |
+
import os
|
| 19 |
+
import logging
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class MtiLibError(Exception):
|
| 23 |
+
pass
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class ReferenceNotFoundError(MtiLibError):
|
| 27 |
+
pass
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class FeatureNotFoundError(ReferenceNotFoundError):
|
| 31 |
+
pass
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class LookupNotFoundError(ReferenceNotFoundError):
|
| 35 |
+
pass
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
log = logging.getLogger("fontTools.mtiLib")
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def makeGlyph(s):
|
| 42 |
+
if s[:2] in ["U ", "u "]:
|
| 43 |
+
return ttLib.TTFont._makeGlyphName(int(s[2:], 16))
|
| 44 |
+
elif s[:2] == "# ":
|
| 45 |
+
return "glyph%.5d" % int(s[2:])
|
| 46 |
+
assert s.find(" ") < 0, "Space found in glyph name: %s" % s
|
| 47 |
+
assert s, "Glyph name is empty"
|
| 48 |
+
return s
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def makeGlyphs(l):
|
| 52 |
+
return [makeGlyph(g) for g in l]
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def mapLookup(sym, mapping):
|
| 56 |
+
# Lookups are addressed by name. So resolved them using a map if available.
|
| 57 |
+
# Fallback to parsing as lookup index if a map isn't provided.
|
| 58 |
+
if mapping is not None:
|
| 59 |
+
try:
|
| 60 |
+
idx = mapping[sym]
|
| 61 |
+
except KeyError:
|
| 62 |
+
raise LookupNotFoundError(sym)
|
| 63 |
+
else:
|
| 64 |
+
idx = int(sym)
|
| 65 |
+
return idx
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def mapFeature(sym, mapping):
|
| 69 |
+
# Features are referenced by index according the spec. So, if symbol is an
|
| 70 |
+
# integer, use it directly. Otherwise look up in the map if provided.
|
| 71 |
+
try:
|
| 72 |
+
idx = int(sym)
|
| 73 |
+
except ValueError:
|
| 74 |
+
try:
|
| 75 |
+
idx = mapping[sym]
|
| 76 |
+
except KeyError:
|
| 77 |
+
raise FeatureNotFoundError(sym)
|
| 78 |
+
return idx
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def setReference(mapper, mapping, sym, setter, collection, key):
|
| 82 |
+
try:
|
| 83 |
+
mapped = mapper(sym, mapping)
|
| 84 |
+
except ReferenceNotFoundError as e:
|
| 85 |
+
try:
|
| 86 |
+
if mapping is not None:
|
| 87 |
+
mapping.addDeferredMapping(
|
| 88 |
+
lambda ref: setter(collection, key, ref), sym, e
|
| 89 |
+
)
|
| 90 |
+
return
|
| 91 |
+
except AttributeError:
|
| 92 |
+
pass
|
| 93 |
+
raise
|
| 94 |
+
setter(collection, key, mapped)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
class DeferredMapping(dict):
|
| 98 |
+
def __init__(self):
|
| 99 |
+
self._deferredMappings = []
|
| 100 |
+
|
| 101 |
+
def addDeferredMapping(self, setter, sym, e):
|
| 102 |
+
log.debug("Adding deferred mapping for symbol '%s' %s", sym, type(e).__name__)
|
| 103 |
+
self._deferredMappings.append((setter, sym, e))
|
| 104 |
+
|
| 105 |
+
def applyDeferredMappings(self):
|
| 106 |
+
for setter, sym, e in self._deferredMappings:
|
| 107 |
+
log.debug(
|
| 108 |
+
"Applying deferred mapping for symbol '%s' %s", sym, type(e).__name__
|
| 109 |
+
)
|
| 110 |
+
try:
|
| 111 |
+
mapped = self[sym]
|
| 112 |
+
except KeyError:
|
| 113 |
+
raise e
|
| 114 |
+
setter(mapped)
|
| 115 |
+
log.debug("Set to %s", mapped)
|
| 116 |
+
self._deferredMappings = []
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def parseScriptList(lines, featureMap=None):
|
| 120 |
+
self = ot.ScriptList()
|
| 121 |
+
records = []
|
| 122 |
+
with lines.between("script table"):
|
| 123 |
+
for line in lines:
|
| 124 |
+
while len(line) < 4:
|
| 125 |
+
line.append("")
|
| 126 |
+
scriptTag, langSysTag, defaultFeature, features = line
|
| 127 |
+
log.debug("Adding script %s language-system %s", scriptTag, langSysTag)
|
| 128 |
+
|
| 129 |
+
langSys = ot.LangSys()
|
| 130 |
+
langSys.LookupOrder = None
|
| 131 |
+
if defaultFeature:
|
| 132 |
+
setReference(
|
| 133 |
+
mapFeature,
|
| 134 |
+
featureMap,
|
| 135 |
+
defaultFeature,
|
| 136 |
+
setattr,
|
| 137 |
+
langSys,
|
| 138 |
+
"ReqFeatureIndex",
|
| 139 |
+
)
|
| 140 |
+
else:
|
| 141 |
+
langSys.ReqFeatureIndex = 0xFFFF
|
| 142 |
+
syms = stripSplitComma(features)
|
| 143 |
+
langSys.FeatureIndex = theList = [3] * len(syms)
|
| 144 |
+
for i, sym in enumerate(syms):
|
| 145 |
+
setReference(mapFeature, featureMap, sym, setitem, theList, i)
|
| 146 |
+
langSys.FeatureCount = len(langSys.FeatureIndex)
|
| 147 |
+
|
| 148 |
+
script = [s for s in records if s.ScriptTag == scriptTag]
|
| 149 |
+
if script:
|
| 150 |
+
script = script[0].Script
|
| 151 |
+
else:
|
| 152 |
+
scriptRec = ot.ScriptRecord()
|
| 153 |
+
scriptRec.ScriptTag = scriptTag + " " * (4 - len(scriptTag))
|
| 154 |
+
scriptRec.Script = ot.Script()
|
| 155 |
+
records.append(scriptRec)
|
| 156 |
+
script = scriptRec.Script
|
| 157 |
+
script.DefaultLangSys = None
|
| 158 |
+
script.LangSysRecord = []
|
| 159 |
+
script.LangSysCount = 0
|
| 160 |
+
|
| 161 |
+
if langSysTag == "default":
|
| 162 |
+
script.DefaultLangSys = langSys
|
| 163 |
+
else:
|
| 164 |
+
langSysRec = ot.LangSysRecord()
|
| 165 |
+
langSysRec.LangSysTag = langSysTag + " " * (4 - len(langSysTag))
|
| 166 |
+
langSysRec.LangSys = langSys
|
| 167 |
+
script.LangSysRecord.append(langSysRec)
|
| 168 |
+
script.LangSysCount = len(script.LangSysRecord)
|
| 169 |
+
|
| 170 |
+
for script in records:
|
| 171 |
+
script.Script.LangSysRecord = sorted(
|
| 172 |
+
script.Script.LangSysRecord, key=lambda rec: rec.LangSysTag
|
| 173 |
+
)
|
| 174 |
+
self.ScriptRecord = sorted(records, key=lambda rec: rec.ScriptTag)
|
| 175 |
+
self.ScriptCount = len(self.ScriptRecord)
|
| 176 |
+
return self
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
def parseFeatureList(lines, lookupMap=None, featureMap=None):
|
| 180 |
+
self = ot.FeatureList()
|
| 181 |
+
self.FeatureRecord = []
|
| 182 |
+
with lines.between("feature table"):
|
| 183 |
+
for line in lines:
|
| 184 |
+
name, featureTag, lookups = line
|
| 185 |
+
if featureMap is not None:
|
| 186 |
+
assert name not in featureMap, "Duplicate feature name: %s" % name
|
| 187 |
+
featureMap[name] = len(self.FeatureRecord)
|
| 188 |
+
# If feature name is integer, make sure it matches its index.
|
| 189 |
+
try:
|
| 190 |
+
assert int(name) == len(self.FeatureRecord), "%d %d" % (
|
| 191 |
+
name,
|
| 192 |
+
len(self.FeatureRecord),
|
| 193 |
+
)
|
| 194 |
+
except ValueError:
|
| 195 |
+
pass
|
| 196 |
+
featureRec = ot.FeatureRecord()
|
| 197 |
+
featureRec.FeatureTag = featureTag
|
| 198 |
+
featureRec.Feature = ot.Feature()
|
| 199 |
+
self.FeatureRecord.append(featureRec)
|
| 200 |
+
feature = featureRec.Feature
|
| 201 |
+
feature.FeatureParams = None
|
| 202 |
+
syms = stripSplitComma(lookups)
|
| 203 |
+
feature.LookupListIndex = theList = [None] * len(syms)
|
| 204 |
+
for i, sym in enumerate(syms):
|
| 205 |
+
setReference(mapLookup, lookupMap, sym, setitem, theList, i)
|
| 206 |
+
feature.LookupCount = len(feature.LookupListIndex)
|
| 207 |
+
|
| 208 |
+
self.FeatureCount = len(self.FeatureRecord)
|
| 209 |
+
return self
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
def parseLookupFlags(lines):
|
| 213 |
+
flags = 0
|
| 214 |
+
filterset = None
|
| 215 |
+
allFlags = [
|
| 216 |
+
"righttoleft",
|
| 217 |
+
"ignorebaseglyphs",
|
| 218 |
+
"ignoreligatures",
|
| 219 |
+
"ignoremarks",
|
| 220 |
+
"markattachmenttype",
|
| 221 |
+
"markfiltertype",
|
| 222 |
+
]
|
| 223 |
+
while lines.peeks()[0].lower() in allFlags:
|
| 224 |
+
line = next(lines)
|
| 225 |
+
flag = {
|
| 226 |
+
"righttoleft": 0x0001,
|
| 227 |
+
"ignorebaseglyphs": 0x0002,
|
| 228 |
+
"ignoreligatures": 0x0004,
|
| 229 |
+
"ignoremarks": 0x0008,
|
| 230 |
+
}.get(line[0].lower())
|
| 231 |
+
if flag:
|
| 232 |
+
assert line[1].lower() in ["yes", "no"], line[1]
|
| 233 |
+
if line[1].lower() == "yes":
|
| 234 |
+
flags |= flag
|
| 235 |
+
continue
|
| 236 |
+
if line[0].lower() == "markattachmenttype":
|
| 237 |
+
flags |= int(line[1]) << 8
|
| 238 |
+
continue
|
| 239 |
+
if line[0].lower() == "markfiltertype":
|
| 240 |
+
flags |= 0x10
|
| 241 |
+
filterset = int(line[1])
|
| 242 |
+
return flags, filterset
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
def parseSingleSubst(lines, font, _lookupMap=None):
|
| 246 |
+
mapping = {}
|
| 247 |
+
for line in lines:
|
| 248 |
+
assert len(line) == 2, line
|
| 249 |
+
line = makeGlyphs(line)
|
| 250 |
+
mapping[line[0]] = line[1]
|
| 251 |
+
return otl.buildSingleSubstSubtable(mapping)
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
def parseMultiple(lines, font, _lookupMap=None):
|
| 255 |
+
mapping = {}
|
| 256 |
+
for line in lines:
|
| 257 |
+
line = makeGlyphs(line)
|
| 258 |
+
mapping[line[0]] = line[1:]
|
| 259 |
+
return otl.buildMultipleSubstSubtable(mapping)
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
def parseAlternate(lines, font, _lookupMap=None):
|
| 263 |
+
mapping = {}
|
| 264 |
+
for line in lines:
|
| 265 |
+
line = makeGlyphs(line)
|
| 266 |
+
mapping[line[0]] = line[1:]
|
| 267 |
+
return otl.buildAlternateSubstSubtable(mapping)
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def parseLigature(lines, font, _lookupMap=None):
|
| 271 |
+
mapping = {}
|
| 272 |
+
for line in lines:
|
| 273 |
+
assert len(line) >= 2, line
|
| 274 |
+
line = makeGlyphs(line)
|
| 275 |
+
mapping[tuple(line[1:])] = line[0]
|
| 276 |
+
return otl.buildLigatureSubstSubtable(mapping)
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
def parseSinglePos(lines, font, _lookupMap=None):
|
| 280 |
+
values = {}
|
| 281 |
+
for line in lines:
|
| 282 |
+
assert len(line) == 3, line
|
| 283 |
+
w = line[0].title().replace(" ", "")
|
| 284 |
+
assert w in valueRecordFormatDict
|
| 285 |
+
g = makeGlyph(line[1])
|
| 286 |
+
v = int(line[2])
|
| 287 |
+
if g not in values:
|
| 288 |
+
values[g] = ValueRecord()
|
| 289 |
+
assert not hasattr(values[g], w), (g, w)
|
| 290 |
+
setattr(values[g], w, v)
|
| 291 |
+
return otl.buildSinglePosSubtable(values, font.getReverseGlyphMap())
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
def parsePair(lines, font, _lookupMap=None):
|
| 295 |
+
self = ot.PairPos()
|
| 296 |
+
self.ValueFormat1 = self.ValueFormat2 = 0
|
| 297 |
+
typ = lines.peeks()[0].split()[0].lower()
|
| 298 |
+
if typ in ("left", "right"):
|
| 299 |
+
self.Format = 1
|
| 300 |
+
values = {}
|
| 301 |
+
for line in lines:
|
| 302 |
+
assert len(line) == 4, line
|
| 303 |
+
side = line[0].split()[0].lower()
|
| 304 |
+
assert side in ("left", "right"), side
|
| 305 |
+
what = line[0][len(side) :].title().replace(" ", "")
|
| 306 |
+
mask = valueRecordFormatDict[what][0]
|
| 307 |
+
glyph1, glyph2 = makeGlyphs(line[1:3])
|
| 308 |
+
value = int(line[3])
|
| 309 |
+
if not glyph1 in values:
|
| 310 |
+
values[glyph1] = {}
|
| 311 |
+
if not glyph2 in values[glyph1]:
|
| 312 |
+
values[glyph1][glyph2] = (ValueRecord(), ValueRecord())
|
| 313 |
+
rec2 = values[glyph1][glyph2]
|
| 314 |
+
if side == "left":
|
| 315 |
+
self.ValueFormat1 |= mask
|
| 316 |
+
vr = rec2[0]
|
| 317 |
+
else:
|
| 318 |
+
self.ValueFormat2 |= mask
|
| 319 |
+
vr = rec2[1]
|
| 320 |
+
assert not hasattr(vr, what), (vr, what)
|
| 321 |
+
setattr(vr, what, value)
|
| 322 |
+
self.Coverage = makeCoverage(set(values.keys()), font)
|
| 323 |
+
self.PairSet = []
|
| 324 |
+
for glyph1 in self.Coverage.glyphs:
|
| 325 |
+
values1 = values[glyph1]
|
| 326 |
+
pairset = ot.PairSet()
|
| 327 |
+
records = pairset.PairValueRecord = []
|
| 328 |
+
for glyph2 in sorted(values1.keys(), key=font.getGlyphID):
|
| 329 |
+
values2 = values1[glyph2]
|
| 330 |
+
pair = ot.PairValueRecord()
|
| 331 |
+
pair.SecondGlyph = glyph2
|
| 332 |
+
pair.Value1 = values2[0]
|
| 333 |
+
pair.Value2 = values2[1] if self.ValueFormat2 else None
|
| 334 |
+
records.append(pair)
|
| 335 |
+
pairset.PairValueCount = len(pairset.PairValueRecord)
|
| 336 |
+
self.PairSet.append(pairset)
|
| 337 |
+
self.PairSetCount = len(self.PairSet)
|
| 338 |
+
elif typ.endswith("class"):
|
| 339 |
+
self.Format = 2
|
| 340 |
+
classDefs = [None, None]
|
| 341 |
+
while lines.peeks()[0].endswith("class definition begin"):
|
| 342 |
+
typ = lines.peek()[0][: -len("class definition begin")].lower()
|
| 343 |
+
idx, klass = {
|
| 344 |
+
"first": (0, ot.ClassDef1),
|
| 345 |
+
"second": (1, ot.ClassDef2),
|
| 346 |
+
}[typ]
|
| 347 |
+
assert classDefs[idx] is None
|
| 348 |
+
classDefs[idx] = parseClassDef(lines, font, klass=klass)
|
| 349 |
+
self.ClassDef1, self.ClassDef2 = classDefs
|
| 350 |
+
self.Class1Count, self.Class2Count = (
|
| 351 |
+
1 + max(c.classDefs.values()) for c in classDefs
|
| 352 |
+
)
|
| 353 |
+
self.Class1Record = [ot.Class1Record() for i in range(self.Class1Count)]
|
| 354 |
+
for rec1 in self.Class1Record:
|
| 355 |
+
rec1.Class2Record = [ot.Class2Record() for j in range(self.Class2Count)]
|
| 356 |
+
for rec2 in rec1.Class2Record:
|
| 357 |
+
rec2.Value1 = ValueRecord()
|
| 358 |
+
rec2.Value2 = ValueRecord()
|
| 359 |
+
for line in lines:
|
| 360 |
+
assert len(line) == 4, line
|
| 361 |
+
side = line[0].split()[0].lower()
|
| 362 |
+
assert side in ("left", "right"), side
|
| 363 |
+
what = line[0][len(side) :].title().replace(" ", "")
|
| 364 |
+
mask = valueRecordFormatDict[what][0]
|
| 365 |
+
class1, class2, value = (int(x) for x in line[1:4])
|
| 366 |
+
rec2 = self.Class1Record[class1].Class2Record[class2]
|
| 367 |
+
if side == "left":
|
| 368 |
+
self.ValueFormat1 |= mask
|
| 369 |
+
vr = rec2.Value1
|
| 370 |
+
else:
|
| 371 |
+
self.ValueFormat2 |= mask
|
| 372 |
+
vr = rec2.Value2
|
| 373 |
+
assert not hasattr(vr, what), (vr, what)
|
| 374 |
+
setattr(vr, what, value)
|
| 375 |
+
for rec1 in self.Class1Record:
|
| 376 |
+
for rec2 in rec1.Class2Record:
|
| 377 |
+
rec2.Value1 = ValueRecord(self.ValueFormat1, rec2.Value1)
|
| 378 |
+
rec2.Value2 = (
|
| 379 |
+
ValueRecord(self.ValueFormat2, rec2.Value2)
|
| 380 |
+
if self.ValueFormat2
|
| 381 |
+
else None
|
| 382 |
+
)
|
| 383 |
+
|
| 384 |
+
self.Coverage = makeCoverage(set(self.ClassDef1.classDefs.keys()), font)
|
| 385 |
+
else:
|
| 386 |
+
assert 0, typ
|
| 387 |
+
return self
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
def parseKernset(lines, font, _lookupMap=None):
|
| 391 |
+
typ = lines.peeks()[0].split()[0].lower()
|
| 392 |
+
if typ in ("left", "right"):
|
| 393 |
+
with lines.until(
|
| 394 |
+
("firstclass definition begin", "secondclass definition begin")
|
| 395 |
+
):
|
| 396 |
+
return parsePair(lines, font)
|
| 397 |
+
return parsePair(lines, font)
|
| 398 |
+
|
| 399 |
+
|
| 400 |
+
def makeAnchor(data, klass=ot.Anchor):
|
| 401 |
+
assert len(data) <= 2
|
| 402 |
+
anchor = klass()
|
| 403 |
+
anchor.Format = 1
|
| 404 |
+
anchor.XCoordinate, anchor.YCoordinate = intSplitComma(data[0])
|
| 405 |
+
if len(data) > 1 and data[1] != "":
|
| 406 |
+
anchor.Format = 2
|
| 407 |
+
anchor.AnchorPoint = int(data[1])
|
| 408 |
+
return anchor
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
def parseCursive(lines, font, _lookupMap=None):
|
| 412 |
+
records = {}
|
| 413 |
+
for line in lines:
|
| 414 |
+
assert len(line) in [3, 4], line
|
| 415 |
+
idx, klass = {
|
| 416 |
+
"entry": (0, ot.EntryAnchor),
|
| 417 |
+
"exit": (1, ot.ExitAnchor),
|
| 418 |
+
}[line[0]]
|
| 419 |
+
glyph = makeGlyph(line[1])
|
| 420 |
+
if glyph not in records:
|
| 421 |
+
records[glyph] = [None, None]
|
| 422 |
+
assert records[glyph][idx] is None, (glyph, idx)
|
| 423 |
+
records[glyph][idx] = makeAnchor(line[2:], klass)
|
| 424 |
+
return otl.buildCursivePosSubtable(records, font.getReverseGlyphMap())
|
| 425 |
+
|
| 426 |
+
|
| 427 |
+
def makeMarkRecords(data, coverage, c):
|
| 428 |
+
records = []
|
| 429 |
+
for glyph in coverage.glyphs:
|
| 430 |
+
klass, anchor = data[glyph]
|
| 431 |
+
record = c.MarkRecordClass()
|
| 432 |
+
record.Class = klass
|
| 433 |
+
setattr(record, c.MarkAnchor, anchor)
|
| 434 |
+
records.append(record)
|
| 435 |
+
return records
|
| 436 |
+
|
| 437 |
+
|
| 438 |
+
def makeBaseRecords(data, coverage, c, classCount):
|
| 439 |
+
records = []
|
| 440 |
+
idx = {}
|
| 441 |
+
for glyph in coverage.glyphs:
|
| 442 |
+
idx[glyph] = len(records)
|
| 443 |
+
record = c.BaseRecordClass()
|
| 444 |
+
anchors = [None] * classCount
|
| 445 |
+
setattr(record, c.BaseAnchor, anchors)
|
| 446 |
+
records.append(record)
|
| 447 |
+
for (glyph, klass), anchor in data.items():
|
| 448 |
+
record = records[idx[glyph]]
|
| 449 |
+
anchors = getattr(record, c.BaseAnchor)
|
| 450 |
+
assert anchors[klass] is None, (glyph, klass)
|
| 451 |
+
anchors[klass] = anchor
|
| 452 |
+
return records
|
| 453 |
+
|
| 454 |
+
|
| 455 |
+
def makeLigatureRecords(data, coverage, c, classCount):
|
| 456 |
+
records = [None] * len(coverage.glyphs)
|
| 457 |
+
idx = {g: i for i, g in enumerate(coverage.glyphs)}
|
| 458 |
+
|
| 459 |
+
for (glyph, klass, compIdx, compCount), anchor in data.items():
|
| 460 |
+
record = records[idx[glyph]]
|
| 461 |
+
if record is None:
|
| 462 |
+
record = records[idx[glyph]] = ot.LigatureAttach()
|
| 463 |
+
record.ComponentCount = compCount
|
| 464 |
+
record.ComponentRecord = [ot.ComponentRecord() for i in range(compCount)]
|
| 465 |
+
for compRec in record.ComponentRecord:
|
| 466 |
+
compRec.LigatureAnchor = [None] * classCount
|
| 467 |
+
assert record.ComponentCount == compCount, (
|
| 468 |
+
glyph,
|
| 469 |
+
record.ComponentCount,
|
| 470 |
+
compCount,
|
| 471 |
+
)
|
| 472 |
+
|
| 473 |
+
anchors = record.ComponentRecord[compIdx - 1].LigatureAnchor
|
| 474 |
+
assert anchors[klass] is None, (glyph, compIdx, klass)
|
| 475 |
+
anchors[klass] = anchor
|
| 476 |
+
return records
|
| 477 |
+
|
| 478 |
+
|
| 479 |
+
def parseMarkToSomething(lines, font, c):
|
| 480 |
+
self = c.Type()
|
| 481 |
+
self.Format = 1
|
| 482 |
+
markData = {}
|
| 483 |
+
baseData = {}
|
| 484 |
+
Data = {
|
| 485 |
+
"mark": (markData, c.MarkAnchorClass),
|
| 486 |
+
"base": (baseData, c.BaseAnchorClass),
|
| 487 |
+
"ligature": (baseData, c.BaseAnchorClass),
|
| 488 |
+
}
|
| 489 |
+
maxKlass = 0
|
| 490 |
+
for line in lines:
|
| 491 |
+
typ = line[0]
|
| 492 |
+
assert typ in ("mark", "base", "ligature")
|
| 493 |
+
glyph = makeGlyph(line[1])
|
| 494 |
+
data, anchorClass = Data[typ]
|
| 495 |
+
extraItems = 2 if typ == "ligature" else 0
|
| 496 |
+
extras = tuple(int(i) for i in line[2 : 2 + extraItems])
|
| 497 |
+
klass = int(line[2 + extraItems])
|
| 498 |
+
anchor = makeAnchor(line[3 + extraItems :], anchorClass)
|
| 499 |
+
if typ == "mark":
|
| 500 |
+
key, value = glyph, (klass, anchor)
|
| 501 |
+
else:
|
| 502 |
+
key, value = ((glyph, klass) + extras), anchor
|
| 503 |
+
assert key not in data, key
|
| 504 |
+
data[key] = value
|
| 505 |
+
maxKlass = max(maxKlass, klass)
|
| 506 |
+
|
| 507 |
+
# Mark
|
| 508 |
+
markCoverage = makeCoverage(set(markData.keys()), font, c.MarkCoverageClass)
|
| 509 |
+
markArray = c.MarkArrayClass()
|
| 510 |
+
markRecords = makeMarkRecords(markData, markCoverage, c)
|
| 511 |
+
setattr(markArray, c.MarkRecord, markRecords)
|
| 512 |
+
setattr(markArray, c.MarkCount, len(markRecords))
|
| 513 |
+
setattr(self, c.MarkCoverage, markCoverage)
|
| 514 |
+
setattr(self, c.MarkArray, markArray)
|
| 515 |
+
self.ClassCount = maxKlass + 1
|
| 516 |
+
|
| 517 |
+
# Base
|
| 518 |
+
self.classCount = 0 if not baseData else 1 + max(k[1] for k, v in baseData.items())
|
| 519 |
+
baseCoverage = makeCoverage(
|
| 520 |
+
set([k[0] for k in baseData.keys()]), font, c.BaseCoverageClass
|
| 521 |
+
)
|
| 522 |
+
baseArray = c.BaseArrayClass()
|
| 523 |
+
if c.Base == "Ligature":
|
| 524 |
+
baseRecords = makeLigatureRecords(baseData, baseCoverage, c, self.classCount)
|
| 525 |
+
else:
|
| 526 |
+
baseRecords = makeBaseRecords(baseData, baseCoverage, c, self.classCount)
|
| 527 |
+
setattr(baseArray, c.BaseRecord, baseRecords)
|
| 528 |
+
setattr(baseArray, c.BaseCount, len(baseRecords))
|
| 529 |
+
setattr(self, c.BaseCoverage, baseCoverage)
|
| 530 |
+
setattr(self, c.BaseArray, baseArray)
|
| 531 |
+
|
| 532 |
+
return self
|
| 533 |
+
|
| 534 |
+
|
| 535 |
+
class MarkHelper(object):
|
| 536 |
+
def __init__(self):
|
| 537 |
+
for Which in ("Mark", "Base"):
|
| 538 |
+
for What in ("Coverage", "Array", "Count", "Record", "Anchor"):
|
| 539 |
+
key = Which + What
|
| 540 |
+
if Which == "Mark" and What in ("Count", "Record", "Anchor"):
|
| 541 |
+
value = key
|
| 542 |
+
else:
|
| 543 |
+
value = getattr(self, Which) + What
|
| 544 |
+
if value == "LigatureRecord":
|
| 545 |
+
value = "LigatureAttach"
|
| 546 |
+
setattr(self, key, value)
|
| 547 |
+
if What != "Count":
|
| 548 |
+
klass = getattr(ot, value)
|
| 549 |
+
setattr(self, key + "Class", klass)
|
| 550 |
+
|
| 551 |
+
|
| 552 |
+
class MarkToBaseHelper(MarkHelper):
|
| 553 |
+
Mark = "Mark"
|
| 554 |
+
Base = "Base"
|
| 555 |
+
Type = ot.MarkBasePos
|
| 556 |
+
|
| 557 |
+
|
| 558 |
+
class MarkToMarkHelper(MarkHelper):
|
| 559 |
+
Mark = "Mark1"
|
| 560 |
+
Base = "Mark2"
|
| 561 |
+
Type = ot.MarkMarkPos
|
| 562 |
+
|
| 563 |
+
|
| 564 |
+
class MarkToLigatureHelper(MarkHelper):
|
| 565 |
+
Mark = "Mark"
|
| 566 |
+
Base = "Ligature"
|
| 567 |
+
Type = ot.MarkLigPos
|
| 568 |
+
|
| 569 |
+
|
| 570 |
+
def parseMarkToBase(lines, font, _lookupMap=None):
|
| 571 |
+
return parseMarkToSomething(lines, font, MarkToBaseHelper())
|
| 572 |
+
|
| 573 |
+
|
| 574 |
+
def parseMarkToMark(lines, font, _lookupMap=None):
|
| 575 |
+
return parseMarkToSomething(lines, font, MarkToMarkHelper())
|
| 576 |
+
|
| 577 |
+
|
| 578 |
+
def parseMarkToLigature(lines, font, _lookupMap=None):
|
| 579 |
+
return parseMarkToSomething(lines, font, MarkToLigatureHelper())
|
| 580 |
+
|
| 581 |
+
|
| 582 |
+
def stripSplitComma(line):
|
| 583 |
+
return [s.strip() for s in line.split(",")] if line else []
|
| 584 |
+
|
| 585 |
+
|
| 586 |
+
def intSplitComma(line):
|
| 587 |
+
return [int(i) for i in line.split(",")] if line else []
|
| 588 |
+
|
| 589 |
+
|
| 590 |
+
# Copied from fontTools.subset
|
| 591 |
+
class ContextHelper(object):
|
| 592 |
+
def __init__(self, klassName, Format):
|
| 593 |
+
if klassName.endswith("Subst"):
|
| 594 |
+
Typ = "Sub"
|
| 595 |
+
Type = "Subst"
|
| 596 |
+
else:
|
| 597 |
+
Typ = "Pos"
|
| 598 |
+
Type = "Pos"
|
| 599 |
+
if klassName.startswith("Chain"):
|
| 600 |
+
Chain = "Chain"
|
| 601 |
+
InputIdx = 1
|
| 602 |
+
DataLen = 3
|
| 603 |
+
else:
|
| 604 |
+
Chain = ""
|
| 605 |
+
InputIdx = 0
|
| 606 |
+
DataLen = 1
|
| 607 |
+
ChainTyp = Chain + Typ
|
| 608 |
+
|
| 609 |
+
self.Typ = Typ
|
| 610 |
+
self.Type = Type
|
| 611 |
+
self.Chain = Chain
|
| 612 |
+
self.ChainTyp = ChainTyp
|
| 613 |
+
self.InputIdx = InputIdx
|
| 614 |
+
self.DataLen = DataLen
|
| 615 |
+
|
| 616 |
+
self.LookupRecord = Type + "LookupRecord"
|
| 617 |
+
|
| 618 |
+
if Format == 1:
|
| 619 |
+
Coverage = lambda r: r.Coverage
|
| 620 |
+
ChainCoverage = lambda r: r.Coverage
|
| 621 |
+
ContextData = lambda r: (None,)
|
| 622 |
+
ChainContextData = lambda r: (None, None, None)
|
| 623 |
+
SetContextData = None
|
| 624 |
+
SetChainContextData = None
|
| 625 |
+
RuleData = lambda r: (r.Input,)
|
| 626 |
+
ChainRuleData = lambda r: (r.Backtrack, r.Input, r.LookAhead)
|
| 627 |
+
|
| 628 |
+
def SetRuleData(r, d):
|
| 629 |
+
(r.Input,) = d
|
| 630 |
+
(r.GlyphCount,) = (len(x) + 1 for x in d)
|
| 631 |
+
|
| 632 |
+
def ChainSetRuleData(r, d):
|
| 633 |
+
(r.Backtrack, r.Input, r.LookAhead) = d
|
| 634 |
+
(
|
| 635 |
+
r.BacktrackGlyphCount,
|
| 636 |
+
r.InputGlyphCount,
|
| 637 |
+
r.LookAheadGlyphCount,
|
| 638 |
+
) = (len(d[0]), len(d[1]) + 1, len(d[2]))
|
| 639 |
+
|
| 640 |
+
elif Format == 2:
|
| 641 |
+
Coverage = lambda r: r.Coverage
|
| 642 |
+
ChainCoverage = lambda r: r.Coverage
|
| 643 |
+
ContextData = lambda r: (r.ClassDef,)
|
| 644 |
+
ChainContextData = lambda r: (
|
| 645 |
+
r.BacktrackClassDef,
|
| 646 |
+
r.InputClassDef,
|
| 647 |
+
r.LookAheadClassDef,
|
| 648 |
+
)
|
| 649 |
+
|
| 650 |
+
def SetContextData(r, d):
|
| 651 |
+
(r.ClassDef,) = d
|
| 652 |
+
|
| 653 |
+
def SetChainContextData(r, d):
|
| 654 |
+
(r.BacktrackClassDef, r.InputClassDef, r.LookAheadClassDef) = d
|
| 655 |
+
|
| 656 |
+
RuleData = lambda r: (r.Class,)
|
| 657 |
+
ChainRuleData = lambda r: (r.Backtrack, r.Input, r.LookAhead)
|
| 658 |
+
|
| 659 |
+
def SetRuleData(r, d):
|
| 660 |
+
(r.Class,) = d
|
| 661 |
+
(r.GlyphCount,) = (len(x) + 1 for x in d)
|
| 662 |
+
|
| 663 |
+
def ChainSetRuleData(r, d):
|
| 664 |
+
(r.Backtrack, r.Input, r.LookAhead) = d
|
| 665 |
+
(
|
| 666 |
+
r.BacktrackGlyphCount,
|
| 667 |
+
r.InputGlyphCount,
|
| 668 |
+
r.LookAheadGlyphCount,
|
| 669 |
+
) = (len(d[0]), len(d[1]) + 1, len(d[2]))
|
| 670 |
+
|
| 671 |
+
elif Format == 3:
|
| 672 |
+
Coverage = lambda r: r.Coverage[0]
|
| 673 |
+
ChainCoverage = lambda r: r.InputCoverage[0]
|
| 674 |
+
ContextData = None
|
| 675 |
+
ChainContextData = None
|
| 676 |
+
SetContextData = None
|
| 677 |
+
SetChainContextData = None
|
| 678 |
+
RuleData = lambda r: r.Coverage
|
| 679 |
+
ChainRuleData = lambda r: (
|
| 680 |
+
r.BacktrackCoverage + r.InputCoverage + r.LookAheadCoverage
|
| 681 |
+
)
|
| 682 |
+
|
| 683 |
+
def SetRuleData(r, d):
|
| 684 |
+
(r.Coverage,) = d
|
| 685 |
+
(r.GlyphCount,) = (len(x) for x in d)
|
| 686 |
+
|
| 687 |
+
def ChainSetRuleData(r, d):
|
| 688 |
+
(r.BacktrackCoverage, r.InputCoverage, r.LookAheadCoverage) = d
|
| 689 |
+
(
|
| 690 |
+
r.BacktrackGlyphCount,
|
| 691 |
+
r.InputGlyphCount,
|
| 692 |
+
r.LookAheadGlyphCount,
|
| 693 |
+
) = (len(x) for x in d)
|
| 694 |
+
|
| 695 |
+
else:
|
| 696 |
+
assert 0, "unknown format: %s" % Format
|
| 697 |
+
|
| 698 |
+
if Chain:
|
| 699 |
+
self.Coverage = ChainCoverage
|
| 700 |
+
self.ContextData = ChainContextData
|
| 701 |
+
self.SetContextData = SetChainContextData
|
| 702 |
+
self.RuleData = ChainRuleData
|
| 703 |
+
self.SetRuleData = ChainSetRuleData
|
| 704 |
+
else:
|
| 705 |
+
self.Coverage = Coverage
|
| 706 |
+
self.ContextData = ContextData
|
| 707 |
+
self.SetContextData = SetContextData
|
| 708 |
+
self.RuleData = RuleData
|
| 709 |
+
self.SetRuleData = SetRuleData
|
| 710 |
+
|
| 711 |
+
if Format == 1:
|
| 712 |
+
self.Rule = ChainTyp + "Rule"
|
| 713 |
+
self.RuleCount = ChainTyp + "RuleCount"
|
| 714 |
+
self.RuleSet = ChainTyp + "RuleSet"
|
| 715 |
+
self.RuleSetCount = ChainTyp + "RuleSetCount"
|
| 716 |
+
self.Intersect = lambda glyphs, c, r: [r] if r in glyphs else []
|
| 717 |
+
elif Format == 2:
|
| 718 |
+
self.Rule = ChainTyp + "ClassRule"
|
| 719 |
+
self.RuleCount = ChainTyp + "ClassRuleCount"
|
| 720 |
+
self.RuleSet = ChainTyp + "ClassSet"
|
| 721 |
+
self.RuleSetCount = ChainTyp + "ClassSetCount"
|
| 722 |
+
self.Intersect = lambda glyphs, c, r: (
|
| 723 |
+
c.intersect_class(glyphs, r)
|
| 724 |
+
if c
|
| 725 |
+
else (set(glyphs) if r == 0 else set())
|
| 726 |
+
)
|
| 727 |
+
|
| 728 |
+
self.ClassDef = "InputClassDef" if Chain else "ClassDef"
|
| 729 |
+
self.ClassDefIndex = 1 if Chain else 0
|
| 730 |
+
self.Input = "Input" if Chain else "Class"
|
| 731 |
+
|
| 732 |
+
|
| 733 |
+
def parseLookupRecords(items, klassName, lookupMap=None):
|
| 734 |
+
klass = getattr(ot, klassName)
|
| 735 |
+
lst = []
|
| 736 |
+
for item in items:
|
| 737 |
+
rec = klass()
|
| 738 |
+
item = stripSplitComma(item)
|
| 739 |
+
assert len(item) == 2, item
|
| 740 |
+
idx = int(item[0])
|
| 741 |
+
assert idx > 0, idx
|
| 742 |
+
rec.SequenceIndex = idx - 1
|
| 743 |
+
setReference(mapLookup, lookupMap, item[1], setattr, rec, "LookupListIndex")
|
| 744 |
+
lst.append(rec)
|
| 745 |
+
return lst
|
| 746 |
+
|
| 747 |
+
|
| 748 |
+
def makeClassDef(classDefs, font, klass=ot.Coverage):
|
| 749 |
+
if not classDefs:
|
| 750 |
+
return None
|
| 751 |
+
self = klass()
|
| 752 |
+
self.classDefs = dict(classDefs)
|
| 753 |
+
return self
|
| 754 |
+
|
| 755 |
+
|
| 756 |
+
def parseClassDef(lines, font, klass=ot.ClassDef):
|
| 757 |
+
classDefs = {}
|
| 758 |
+
with lines.between("class definition"):
|
| 759 |
+
for line in lines:
|
| 760 |
+
glyph = makeGlyph(line[0])
|
| 761 |
+
assert glyph not in classDefs, glyph
|
| 762 |
+
classDefs[glyph] = int(line[1])
|
| 763 |
+
return makeClassDef(classDefs, font, klass)
|
| 764 |
+
|
| 765 |
+
|
| 766 |
+
def makeCoverage(glyphs, font, klass=ot.Coverage):
|
| 767 |
+
if not glyphs:
|
| 768 |
+
return None
|
| 769 |
+
if isinstance(glyphs, set):
|
| 770 |
+
glyphs = sorted(glyphs)
|
| 771 |
+
coverage = klass()
|
| 772 |
+
coverage.glyphs = sorted(set(glyphs), key=font.getGlyphID)
|
| 773 |
+
return coverage
|
| 774 |
+
|
| 775 |
+
|
| 776 |
+
def parseCoverage(lines, font, klass=ot.Coverage):
|
| 777 |
+
glyphs = []
|
| 778 |
+
with lines.between("coverage definition"):
|
| 779 |
+
for line in lines:
|
| 780 |
+
glyphs.append(makeGlyph(line[0]))
|
| 781 |
+
return makeCoverage(glyphs, font, klass)
|
| 782 |
+
|
| 783 |
+
|
| 784 |
+
def bucketizeRules(self, c, rules, bucketKeys):
|
| 785 |
+
buckets = {}
|
| 786 |
+
for seq, recs in rules:
|
| 787 |
+
buckets.setdefault(seq[c.InputIdx][0], []).append(
|
| 788 |
+
(tuple(s[1 if i == c.InputIdx else 0 :] for i, s in enumerate(seq)), recs)
|
| 789 |
+
)
|
| 790 |
+
|
| 791 |
+
rulesets = []
|
| 792 |
+
for firstGlyph in bucketKeys:
|
| 793 |
+
if firstGlyph not in buckets:
|
| 794 |
+
rulesets.append(None)
|
| 795 |
+
continue
|
| 796 |
+
thisRules = []
|
| 797 |
+
for seq, recs in buckets[firstGlyph]:
|
| 798 |
+
rule = getattr(ot, c.Rule)()
|
| 799 |
+
c.SetRuleData(rule, seq)
|
| 800 |
+
setattr(rule, c.Type + "Count", len(recs))
|
| 801 |
+
setattr(rule, c.LookupRecord, recs)
|
| 802 |
+
thisRules.append(rule)
|
| 803 |
+
|
| 804 |
+
ruleset = getattr(ot, c.RuleSet)()
|
| 805 |
+
setattr(ruleset, c.Rule, thisRules)
|
| 806 |
+
setattr(ruleset, c.RuleCount, len(thisRules))
|
| 807 |
+
rulesets.append(ruleset)
|
| 808 |
+
|
| 809 |
+
setattr(self, c.RuleSet, rulesets)
|
| 810 |
+
setattr(self, c.RuleSetCount, len(rulesets))
|
| 811 |
+
|
| 812 |
+
|
| 813 |
+
def parseContext(lines, font, Type, lookupMap=None):
|
| 814 |
+
self = getattr(ot, Type)()
|
| 815 |
+
typ = lines.peeks()[0].split()[0].lower()
|
| 816 |
+
if typ == "glyph":
|
| 817 |
+
self.Format = 1
|
| 818 |
+
log.debug("Parsing %s format %s", Type, self.Format)
|
| 819 |
+
c = ContextHelper(Type, self.Format)
|
| 820 |
+
rules = []
|
| 821 |
+
for line in lines:
|
| 822 |
+
assert line[0].lower() == "glyph", line[0]
|
| 823 |
+
while len(line) < 1 + c.DataLen:
|
| 824 |
+
line.append("")
|
| 825 |
+
seq = tuple(makeGlyphs(stripSplitComma(i)) for i in line[1 : 1 + c.DataLen])
|
| 826 |
+
recs = parseLookupRecords(line[1 + c.DataLen :], c.LookupRecord, lookupMap)
|
| 827 |
+
rules.append((seq, recs))
|
| 828 |
+
|
| 829 |
+
firstGlyphs = set(seq[c.InputIdx][0] for seq, recs in rules)
|
| 830 |
+
self.Coverage = makeCoverage(firstGlyphs, font)
|
| 831 |
+
bucketizeRules(self, c, rules, self.Coverage.glyphs)
|
| 832 |
+
elif typ.endswith("class"):
|
| 833 |
+
self.Format = 2
|
| 834 |
+
log.debug("Parsing %s format %s", Type, self.Format)
|
| 835 |
+
c = ContextHelper(Type, self.Format)
|
| 836 |
+
classDefs = [None] * c.DataLen
|
| 837 |
+
while lines.peeks()[0].endswith("class definition begin"):
|
| 838 |
+
typ = lines.peek()[0][: -len("class definition begin")].lower()
|
| 839 |
+
idx, klass = {
|
| 840 |
+
1: {
|
| 841 |
+
"": (0, ot.ClassDef),
|
| 842 |
+
},
|
| 843 |
+
3: {
|
| 844 |
+
"backtrack": (0, ot.BacktrackClassDef),
|
| 845 |
+
"": (1, ot.InputClassDef),
|
| 846 |
+
"lookahead": (2, ot.LookAheadClassDef),
|
| 847 |
+
},
|
| 848 |
+
}[c.DataLen][typ]
|
| 849 |
+
assert classDefs[idx] is None, idx
|
| 850 |
+
classDefs[idx] = parseClassDef(lines, font, klass=klass)
|
| 851 |
+
c.SetContextData(self, classDefs)
|
| 852 |
+
rules = []
|
| 853 |
+
for line in lines:
|
| 854 |
+
assert line[0].lower().startswith("class"), line[0]
|
| 855 |
+
while len(line) < 1 + c.DataLen:
|
| 856 |
+
line.append("")
|
| 857 |
+
seq = tuple(intSplitComma(i) for i in line[1 : 1 + c.DataLen])
|
| 858 |
+
recs = parseLookupRecords(line[1 + c.DataLen :], c.LookupRecord, lookupMap)
|
| 859 |
+
rules.append((seq, recs))
|
| 860 |
+
firstClasses = set(seq[c.InputIdx][0] for seq, recs in rules)
|
| 861 |
+
firstGlyphs = set(
|
| 862 |
+
g for g, c in classDefs[c.InputIdx].classDefs.items() if c in firstClasses
|
| 863 |
+
)
|
| 864 |
+
self.Coverage = makeCoverage(firstGlyphs, font)
|
| 865 |
+
bucketizeRules(self, c, rules, range(max(firstClasses) + 1))
|
| 866 |
+
elif typ.endswith("coverage"):
|
| 867 |
+
self.Format = 3
|
| 868 |
+
log.debug("Parsing %s format %s", Type, self.Format)
|
| 869 |
+
c = ContextHelper(Type, self.Format)
|
| 870 |
+
coverages = tuple([] for i in range(c.DataLen))
|
| 871 |
+
while lines.peeks()[0].endswith("coverage definition begin"):
|
| 872 |
+
typ = lines.peek()[0][: -len("coverage definition begin")].lower()
|
| 873 |
+
idx, klass = {
|
| 874 |
+
1: {
|
| 875 |
+
"": (0, ot.Coverage),
|
| 876 |
+
},
|
| 877 |
+
3: {
|
| 878 |
+
"backtrack": (0, ot.BacktrackCoverage),
|
| 879 |
+
"input": (1, ot.InputCoverage),
|
| 880 |
+
"lookahead": (2, ot.LookAheadCoverage),
|
| 881 |
+
},
|
| 882 |
+
}[c.DataLen][typ]
|
| 883 |
+
coverages[idx].append(parseCoverage(lines, font, klass=klass))
|
| 884 |
+
c.SetRuleData(self, coverages)
|
| 885 |
+
lines = list(lines)
|
| 886 |
+
assert len(lines) == 1
|
| 887 |
+
line = lines[0]
|
| 888 |
+
assert line[0].lower() == "coverage", line[0]
|
| 889 |
+
recs = parseLookupRecords(line[1:], c.LookupRecord, lookupMap)
|
| 890 |
+
setattr(self, c.Type + "Count", len(recs))
|
| 891 |
+
setattr(self, c.LookupRecord, recs)
|
| 892 |
+
else:
|
| 893 |
+
assert 0, typ
|
| 894 |
+
return self
|
| 895 |
+
|
| 896 |
+
|
| 897 |
+
def parseContextSubst(lines, font, lookupMap=None):
|
| 898 |
+
return parseContext(lines, font, "ContextSubst", lookupMap=lookupMap)
|
| 899 |
+
|
| 900 |
+
|
| 901 |
+
def parseContextPos(lines, font, lookupMap=None):
|
| 902 |
+
return parseContext(lines, font, "ContextPos", lookupMap=lookupMap)
|
| 903 |
+
|
| 904 |
+
|
| 905 |
+
def parseChainedSubst(lines, font, lookupMap=None):
|
| 906 |
+
return parseContext(lines, font, "ChainContextSubst", lookupMap=lookupMap)
|
| 907 |
+
|
| 908 |
+
|
| 909 |
+
def parseChainedPos(lines, font, lookupMap=None):
|
| 910 |
+
return parseContext(lines, font, "ChainContextPos", lookupMap=lookupMap)
|
| 911 |
+
|
| 912 |
+
|
| 913 |
+
def parseReverseChainedSubst(lines, font, _lookupMap=None):
|
| 914 |
+
self = ot.ReverseChainSingleSubst()
|
| 915 |
+
self.Format = 1
|
| 916 |
+
coverages = ([], [])
|
| 917 |
+
while lines.peeks()[0].endswith("coverage definition begin"):
|
| 918 |
+
typ = lines.peek()[0][: -len("coverage definition begin")].lower()
|
| 919 |
+
idx, klass = {
|
| 920 |
+
"backtrack": (0, ot.BacktrackCoverage),
|
| 921 |
+
"lookahead": (1, ot.LookAheadCoverage),
|
| 922 |
+
}[typ]
|
| 923 |
+
coverages[idx].append(parseCoverage(lines, font, klass=klass))
|
| 924 |
+
self.BacktrackCoverage = coverages[0]
|
| 925 |
+
self.BacktrackGlyphCount = len(self.BacktrackCoverage)
|
| 926 |
+
self.LookAheadCoverage = coverages[1]
|
| 927 |
+
self.LookAheadGlyphCount = len(self.LookAheadCoverage)
|
| 928 |
+
mapping = {}
|
| 929 |
+
for line in lines:
|
| 930 |
+
assert len(line) == 2, line
|
| 931 |
+
line = makeGlyphs(line)
|
| 932 |
+
mapping[line[0]] = line[1]
|
| 933 |
+
self.Coverage = makeCoverage(set(mapping.keys()), font)
|
| 934 |
+
self.Substitute = [mapping[k] for k in self.Coverage.glyphs]
|
| 935 |
+
self.GlyphCount = len(self.Substitute)
|
| 936 |
+
return self
|
| 937 |
+
|
| 938 |
+
|
| 939 |
+
def parseLookup(lines, tableTag, font, lookupMap=None):
|
| 940 |
+
line = lines.expect("lookup")
|
| 941 |
+
_, name, typ = line
|
| 942 |
+
log.debug("Parsing lookup type %s %s", typ, name)
|
| 943 |
+
lookup = ot.Lookup()
|
| 944 |
+
lookup.LookupFlag, filterset = parseLookupFlags(lines)
|
| 945 |
+
if filterset is not None:
|
| 946 |
+
lookup.MarkFilteringSet = filterset
|
| 947 |
+
lookup.LookupType, parseLookupSubTable = {
|
| 948 |
+
"GSUB": {
|
| 949 |
+
"single": (1, parseSingleSubst),
|
| 950 |
+
"multiple": (2, parseMultiple),
|
| 951 |
+
"alternate": (3, parseAlternate),
|
| 952 |
+
"ligature": (4, parseLigature),
|
| 953 |
+
"context": (5, parseContextSubst),
|
| 954 |
+
"chained": (6, parseChainedSubst),
|
| 955 |
+
"reversechained": (8, parseReverseChainedSubst),
|
| 956 |
+
},
|
| 957 |
+
"GPOS": {
|
| 958 |
+
"single": (1, parseSinglePos),
|
| 959 |
+
"pair": (2, parsePair),
|
| 960 |
+
"kernset": (2, parseKernset),
|
| 961 |
+
"cursive": (3, parseCursive),
|
| 962 |
+
"mark to base": (4, parseMarkToBase),
|
| 963 |
+
"mark to ligature": (5, parseMarkToLigature),
|
| 964 |
+
"mark to mark": (6, parseMarkToMark),
|
| 965 |
+
"context": (7, parseContextPos),
|
| 966 |
+
"chained": (8, parseChainedPos),
|
| 967 |
+
},
|
| 968 |
+
}[tableTag][typ]
|
| 969 |
+
|
| 970 |
+
with lines.until("lookup end"):
|
| 971 |
+
subtables = []
|
| 972 |
+
|
| 973 |
+
while lines.peek():
|
| 974 |
+
with lines.until(("% subtable", "subtable end")):
|
| 975 |
+
while lines.peek():
|
| 976 |
+
subtable = parseLookupSubTable(lines, font, lookupMap)
|
| 977 |
+
assert lookup.LookupType == subtable.LookupType
|
| 978 |
+
subtables.append(subtable)
|
| 979 |
+
if lines.peeks()[0] in ("% subtable", "subtable end"):
|
| 980 |
+
next(lines)
|
| 981 |
+
lines.expect("lookup end")
|
| 982 |
+
|
| 983 |
+
lookup.SubTable = subtables
|
| 984 |
+
lookup.SubTableCount = len(lookup.SubTable)
|
| 985 |
+
if lookup.SubTableCount == 0:
|
| 986 |
+
# Remove this return when following is fixed:
|
| 987 |
+
# https://github.com/fonttools/fonttools/issues/789
|
| 988 |
+
return None
|
| 989 |
+
return lookup
|
| 990 |
+
|
| 991 |
+
|
| 992 |
+
def parseGSUBGPOS(lines, font, tableTag):
|
| 993 |
+
container = ttLib.getTableClass(tableTag)()
|
| 994 |
+
lookupMap = DeferredMapping()
|
| 995 |
+
featureMap = DeferredMapping()
|
| 996 |
+
assert tableTag in ("GSUB", "GPOS")
|
| 997 |
+
log.debug("Parsing %s", tableTag)
|
| 998 |
+
self = getattr(ot, tableTag)()
|
| 999 |
+
self.Version = 0x00010000
|
| 1000 |
+
fields = {
|
| 1001 |
+
"script table begin": (
|
| 1002 |
+
"ScriptList",
|
| 1003 |
+
lambda lines: parseScriptList(lines, featureMap),
|
| 1004 |
+
),
|
| 1005 |
+
"feature table begin": (
|
| 1006 |
+
"FeatureList",
|
| 1007 |
+
lambda lines: parseFeatureList(lines, lookupMap, featureMap),
|
| 1008 |
+
),
|
| 1009 |
+
"lookup": ("LookupList", None),
|
| 1010 |
+
}
|
| 1011 |
+
for attr, parser in fields.values():
|
| 1012 |
+
setattr(self, attr, None)
|
| 1013 |
+
while lines.peek() is not None:
|
| 1014 |
+
typ = lines.peek()[0].lower()
|
| 1015 |
+
if typ not in fields:
|
| 1016 |
+
log.debug("Skipping %s", lines.peek())
|
| 1017 |
+
next(lines)
|
| 1018 |
+
continue
|
| 1019 |
+
attr, parser = fields[typ]
|
| 1020 |
+
if typ == "lookup":
|
| 1021 |
+
if self.LookupList is None:
|
| 1022 |
+
self.LookupList = ot.LookupList()
|
| 1023 |
+
self.LookupList.Lookup = []
|
| 1024 |
+
_, name, _ = lines.peek()
|
| 1025 |
+
lookup = parseLookup(lines, tableTag, font, lookupMap)
|
| 1026 |
+
if lookupMap is not None:
|
| 1027 |
+
assert name not in lookupMap, "Duplicate lookup name: %s" % name
|
| 1028 |
+
lookupMap[name] = len(self.LookupList.Lookup)
|
| 1029 |
+
else:
|
| 1030 |
+
assert int(name) == len(self.LookupList.Lookup), "%d %d" % (
|
| 1031 |
+
name,
|
| 1032 |
+
len(self.Lookup),
|
| 1033 |
+
)
|
| 1034 |
+
self.LookupList.Lookup.append(lookup)
|
| 1035 |
+
else:
|
| 1036 |
+
assert getattr(self, attr) is None, attr
|
| 1037 |
+
setattr(self, attr, parser(lines))
|
| 1038 |
+
if self.LookupList:
|
| 1039 |
+
self.LookupList.LookupCount = len(self.LookupList.Lookup)
|
| 1040 |
+
if lookupMap is not None:
|
| 1041 |
+
lookupMap.applyDeferredMappings()
|
| 1042 |
+
if os.environ.get(LOOKUP_DEBUG_ENV_VAR):
|
| 1043 |
+
if "Debg" not in font:
|
| 1044 |
+
font["Debg"] = newTable("Debg")
|
| 1045 |
+
font["Debg"].data = {}
|
| 1046 |
+
debug = (
|
| 1047 |
+
font["Debg"]
|
| 1048 |
+
.data.setdefault(LOOKUP_DEBUG_INFO_KEY, {})
|
| 1049 |
+
.setdefault(tableTag, {})
|
| 1050 |
+
)
|
| 1051 |
+
for name, lookup in lookupMap.items():
|
| 1052 |
+
debug[str(lookup)] = ["", name, ""]
|
| 1053 |
+
|
| 1054 |
+
featureMap.applyDeferredMappings()
|
| 1055 |
+
container.table = self
|
| 1056 |
+
return container
|
| 1057 |
+
|
| 1058 |
+
|
| 1059 |
+
def parseGSUB(lines, font):
|
| 1060 |
+
return parseGSUBGPOS(lines, font, "GSUB")
|
| 1061 |
+
|
| 1062 |
+
|
| 1063 |
+
def parseGPOS(lines, font):
|
| 1064 |
+
return parseGSUBGPOS(lines, font, "GPOS")
|
| 1065 |
+
|
| 1066 |
+
|
| 1067 |
+
def parseAttachList(lines, font):
|
| 1068 |
+
points = {}
|
| 1069 |
+
with lines.between("attachment list"):
|
| 1070 |
+
for line in lines:
|
| 1071 |
+
glyph = makeGlyph(line[0])
|
| 1072 |
+
assert glyph not in points, glyph
|
| 1073 |
+
points[glyph] = [int(i) for i in line[1:]]
|
| 1074 |
+
return otl.buildAttachList(points, font.getReverseGlyphMap())
|
| 1075 |
+
|
| 1076 |
+
|
| 1077 |
+
def parseCaretList(lines, font):
|
| 1078 |
+
carets = {}
|
| 1079 |
+
with lines.between("carets"):
|
| 1080 |
+
for line in lines:
|
| 1081 |
+
glyph = makeGlyph(line[0])
|
| 1082 |
+
assert glyph not in carets, glyph
|
| 1083 |
+
num = int(line[1])
|
| 1084 |
+
thisCarets = [int(i) for i in line[2:]]
|
| 1085 |
+
assert num == len(thisCarets), line
|
| 1086 |
+
carets[glyph] = thisCarets
|
| 1087 |
+
return otl.buildLigCaretList(carets, {}, font.getReverseGlyphMap())
|
| 1088 |
+
|
| 1089 |
+
|
| 1090 |
+
def makeMarkFilteringSets(sets, font):
|
| 1091 |
+
self = ot.MarkGlyphSetsDef()
|
| 1092 |
+
self.MarkSetTableFormat = 1
|
| 1093 |
+
self.MarkSetCount = 1 + max(sets.keys())
|
| 1094 |
+
self.Coverage = [None] * self.MarkSetCount
|
| 1095 |
+
for k, v in sorted(sets.items()):
|
| 1096 |
+
self.Coverage[k] = makeCoverage(set(v), font)
|
| 1097 |
+
return self
|
| 1098 |
+
|
| 1099 |
+
|
| 1100 |
+
def parseMarkFilteringSets(lines, font):
|
| 1101 |
+
sets = {}
|
| 1102 |
+
with lines.between("set definition"):
|
| 1103 |
+
for line in lines:
|
| 1104 |
+
assert len(line) == 2, line
|
| 1105 |
+
glyph = makeGlyph(line[0])
|
| 1106 |
+
# TODO accept set names
|
| 1107 |
+
st = int(line[1])
|
| 1108 |
+
if st not in sets:
|
| 1109 |
+
sets[st] = []
|
| 1110 |
+
sets[st].append(glyph)
|
| 1111 |
+
return makeMarkFilteringSets(sets, font)
|
| 1112 |
+
|
| 1113 |
+
|
| 1114 |
+
def parseGDEF(lines, font):
|
| 1115 |
+
container = ttLib.getTableClass("GDEF")()
|
| 1116 |
+
log.debug("Parsing GDEF")
|
| 1117 |
+
self = ot.GDEF()
|
| 1118 |
+
fields = {
|
| 1119 |
+
"class definition begin": (
|
| 1120 |
+
"GlyphClassDef",
|
| 1121 |
+
lambda lines, font: parseClassDef(lines, font, klass=ot.GlyphClassDef),
|
| 1122 |
+
),
|
| 1123 |
+
"attachment list begin": ("AttachList", parseAttachList),
|
| 1124 |
+
"carets begin": ("LigCaretList", parseCaretList),
|
| 1125 |
+
"mark attachment class definition begin": (
|
| 1126 |
+
"MarkAttachClassDef",
|
| 1127 |
+
lambda lines, font: parseClassDef(lines, font, klass=ot.MarkAttachClassDef),
|
| 1128 |
+
),
|
| 1129 |
+
"markfilter set definition begin": ("MarkGlyphSetsDef", parseMarkFilteringSets),
|
| 1130 |
+
}
|
| 1131 |
+
for attr, parser in fields.values():
|
| 1132 |
+
setattr(self, attr, None)
|
| 1133 |
+
while lines.peek() is not None:
|
| 1134 |
+
typ = lines.peek()[0].lower()
|
| 1135 |
+
if typ not in fields:
|
| 1136 |
+
log.debug("Skipping %s", typ)
|
| 1137 |
+
next(lines)
|
| 1138 |
+
continue
|
| 1139 |
+
attr, parser = fields[typ]
|
| 1140 |
+
assert getattr(self, attr) is None, attr
|
| 1141 |
+
setattr(self, attr, parser(lines, font))
|
| 1142 |
+
self.Version = 0x00010000 if self.MarkGlyphSetsDef is None else 0x00010002
|
| 1143 |
+
container.table = self
|
| 1144 |
+
return container
|
| 1145 |
+
|
| 1146 |
+
|
| 1147 |
+
def parseCmap(lines, font):
|
| 1148 |
+
container = ttLib.getTableClass("cmap")()
|
| 1149 |
+
log.debug("Parsing cmap")
|
| 1150 |
+
tables = []
|
| 1151 |
+
while lines.peek() is not None:
|
| 1152 |
+
lines.expect("cmap subtable %d" % len(tables))
|
| 1153 |
+
platId, encId, fmt, lang = [
|
| 1154 |
+
parseCmapId(lines, field)
|
| 1155 |
+
for field in ("platformID", "encodingID", "format", "language")
|
| 1156 |
+
]
|
| 1157 |
+
table = cmap_classes[fmt](fmt)
|
| 1158 |
+
table.platformID = platId
|
| 1159 |
+
table.platEncID = encId
|
| 1160 |
+
table.language = lang
|
| 1161 |
+
table.cmap = {}
|
| 1162 |
+
line = next(lines)
|
| 1163 |
+
while line[0] != "end subtable":
|
| 1164 |
+
table.cmap[int(line[0], 16)] = line[1]
|
| 1165 |
+
line = next(lines)
|
| 1166 |
+
tables.append(table)
|
| 1167 |
+
container.tableVersion = 0
|
| 1168 |
+
container.tables = tables
|
| 1169 |
+
return container
|
| 1170 |
+
|
| 1171 |
+
|
| 1172 |
+
def parseCmapId(lines, field):
|
| 1173 |
+
line = next(lines)
|
| 1174 |
+
assert field == line[0]
|
| 1175 |
+
return int(line[1])
|
| 1176 |
+
|
| 1177 |
+
|
| 1178 |
+
def parseTable(lines, font, tableTag=None):
|
| 1179 |
+
log.debug("Parsing table")
|
| 1180 |
+
line = lines.peeks()
|
| 1181 |
+
tag = None
|
| 1182 |
+
if line[0].split()[0] == "FontDame":
|
| 1183 |
+
tag = line[0].split()[1]
|
| 1184 |
+
elif " ".join(line[0].split()[:3]) == "Font Chef Table":
|
| 1185 |
+
tag = line[0].split()[3]
|
| 1186 |
+
if tag is not None:
|
| 1187 |
+
next(lines)
|
| 1188 |
+
tag = tag.ljust(4)
|
| 1189 |
+
if tableTag is None:
|
| 1190 |
+
tableTag = tag
|
| 1191 |
+
else:
|
| 1192 |
+
assert tableTag == tag, (tableTag, tag)
|
| 1193 |
+
|
| 1194 |
+
assert (
|
| 1195 |
+
tableTag is not None
|
| 1196 |
+
), "Don't know what table to parse and data doesn't specify"
|
| 1197 |
+
|
| 1198 |
+
return {
|
| 1199 |
+
"GSUB": parseGSUB,
|
| 1200 |
+
"GPOS": parseGPOS,
|
| 1201 |
+
"GDEF": parseGDEF,
|
| 1202 |
+
"cmap": parseCmap,
|
| 1203 |
+
}[tableTag](lines, font)
|
| 1204 |
+
|
| 1205 |
+
|
| 1206 |
+
class Tokenizer(object):
|
| 1207 |
+
def __init__(self, f):
|
| 1208 |
+
# TODO BytesIO / StringIO as needed? also, figure out whether we work on bytes or unicode
|
| 1209 |
+
lines = iter(f)
|
| 1210 |
+
try:
|
| 1211 |
+
self.filename = f.name
|
| 1212 |
+
except:
|
| 1213 |
+
self.filename = None
|
| 1214 |
+
self.lines = iter(lines)
|
| 1215 |
+
self.line = ""
|
| 1216 |
+
self.lineno = 0
|
| 1217 |
+
self.stoppers = []
|
| 1218 |
+
self.buffer = None
|
| 1219 |
+
|
| 1220 |
+
def __iter__(self):
|
| 1221 |
+
return self
|
| 1222 |
+
|
| 1223 |
+
def _next_line(self):
|
| 1224 |
+
self.lineno += 1
|
| 1225 |
+
line = self.line = next(self.lines)
|
| 1226 |
+
line = [s.strip() for s in line.split("\t")]
|
| 1227 |
+
if len(line) == 1 and not line[0]:
|
| 1228 |
+
del line[0]
|
| 1229 |
+
if line and not line[-1]:
|
| 1230 |
+
log.warning("trailing tab found on line %d: %s" % (self.lineno, self.line))
|
| 1231 |
+
while line and not line[-1]:
|
| 1232 |
+
del line[-1]
|
| 1233 |
+
return line
|
| 1234 |
+
|
| 1235 |
+
def _next_nonempty(self):
|
| 1236 |
+
while True:
|
| 1237 |
+
line = self._next_line()
|
| 1238 |
+
# Skip comments and empty lines
|
| 1239 |
+
if line and line[0] and (line[0][0] != "%" or line[0] == "% subtable"):
|
| 1240 |
+
return line
|
| 1241 |
+
|
| 1242 |
+
def _next_buffered(self):
|
| 1243 |
+
if self.buffer:
|
| 1244 |
+
ret = self.buffer
|
| 1245 |
+
self.buffer = None
|
| 1246 |
+
return ret
|
| 1247 |
+
else:
|
| 1248 |
+
return self._next_nonempty()
|
| 1249 |
+
|
| 1250 |
+
def __next__(self):
|
| 1251 |
+
line = self._next_buffered()
|
| 1252 |
+
if line[0].lower() in self.stoppers:
|
| 1253 |
+
self.buffer = line
|
| 1254 |
+
raise StopIteration
|
| 1255 |
+
return line
|
| 1256 |
+
|
| 1257 |
+
def next(self):
|
| 1258 |
+
return self.__next__()
|
| 1259 |
+
|
| 1260 |
+
def peek(self):
|
| 1261 |
+
if not self.buffer:
|
| 1262 |
+
try:
|
| 1263 |
+
self.buffer = self._next_nonempty()
|
| 1264 |
+
except StopIteration:
|
| 1265 |
+
return None
|
| 1266 |
+
if self.buffer[0].lower() in self.stoppers:
|
| 1267 |
+
return None
|
| 1268 |
+
return self.buffer
|
| 1269 |
+
|
| 1270 |
+
def peeks(self):
|
| 1271 |
+
ret = self.peek()
|
| 1272 |
+
return ret if ret is not None else ("",)
|
| 1273 |
+
|
| 1274 |
+
@contextmanager
|
| 1275 |
+
def between(self, tag):
|
| 1276 |
+
start = tag + " begin"
|
| 1277 |
+
end = tag + " end"
|
| 1278 |
+
self.expectendswith(start)
|
| 1279 |
+
self.stoppers.append(end)
|
| 1280 |
+
yield
|
| 1281 |
+
del self.stoppers[-1]
|
| 1282 |
+
self.expect(tag + " end")
|
| 1283 |
+
|
| 1284 |
+
@contextmanager
|
| 1285 |
+
def until(self, tags):
|
| 1286 |
+
if type(tags) is not tuple:
|
| 1287 |
+
tags = (tags,)
|
| 1288 |
+
self.stoppers.extend(tags)
|
| 1289 |
+
yield
|
| 1290 |
+
del self.stoppers[-len(tags) :]
|
| 1291 |
+
|
| 1292 |
+
def expect(self, s):
|
| 1293 |
+
line = next(self)
|
| 1294 |
+
tag = line[0].lower()
|
| 1295 |
+
assert tag == s, "Expected '%s', got '%s'" % (s, tag)
|
| 1296 |
+
return line
|
| 1297 |
+
|
| 1298 |
+
def expectendswith(self, s):
|
| 1299 |
+
line = next(self)
|
| 1300 |
+
tag = line[0].lower()
|
| 1301 |
+
assert tag.endswith(s), "Expected '*%s', got '%s'" % (s, tag)
|
| 1302 |
+
return line
|
| 1303 |
+
|
| 1304 |
+
|
| 1305 |
+
def build(f, font, tableTag=None):
|
| 1306 |
+
"""Convert a Monotype font layout file to an OpenType layout object
|
| 1307 |
+
|
| 1308 |
+
A font object must be passed, but this may be a "dummy" font; it is only
|
| 1309 |
+
used for sorting glyph sets when making coverage tables and to hold the
|
| 1310 |
+
OpenType layout table while it is being built.
|
| 1311 |
+
|
| 1312 |
+
Args:
|
| 1313 |
+
f: A file object.
|
| 1314 |
+
font (TTFont): A font object.
|
| 1315 |
+
tableTag (string): If provided, asserts that the file contains data for the
|
| 1316 |
+
given OpenType table.
|
| 1317 |
+
|
| 1318 |
+
Returns:
|
| 1319 |
+
An object representing the table. (e.g. ``table_G_S_U_B_``)
|
| 1320 |
+
"""
|
| 1321 |
+
lines = Tokenizer(f)
|
| 1322 |
+
return parseTable(lines, font, tableTag=tableTag)
|
| 1323 |
+
|
| 1324 |
+
|
| 1325 |
+
def main(args=None, font=None):
|
| 1326 |
+
"""Convert a FontDame OTL file to TTX XML
|
| 1327 |
+
|
| 1328 |
+
Writes XML output to stdout.
|
| 1329 |
+
|
| 1330 |
+
Args:
|
| 1331 |
+
args: Command line arguments (``--font``, ``--table``, input files).
|
| 1332 |
+
"""
|
| 1333 |
+
import sys
|
| 1334 |
+
from fontTools import configLogger
|
| 1335 |
+
from fontTools.misc.testTools import MockFont
|
| 1336 |
+
|
| 1337 |
+
if args is None:
|
| 1338 |
+
args = sys.argv[1:]
|
| 1339 |
+
|
| 1340 |
+
# configure the library logger (for >= WARNING)
|
| 1341 |
+
configLogger()
|
| 1342 |
+
# comment this out to enable debug messages from mtiLib's logger
|
| 1343 |
+
# log.setLevel(logging.DEBUG)
|
| 1344 |
+
|
| 1345 |
+
import argparse
|
| 1346 |
+
|
| 1347 |
+
parser = argparse.ArgumentParser(
|
| 1348 |
+
"fonttools mtiLib",
|
| 1349 |
+
description=main.__doc__,
|
| 1350 |
+
)
|
| 1351 |
+
|
| 1352 |
+
parser.add_argument(
|
| 1353 |
+
"--font",
|
| 1354 |
+
"-f",
|
| 1355 |
+
metavar="FILE",
|
| 1356 |
+
dest="font",
|
| 1357 |
+
help="Input TTF files (used for glyph classes and sorting coverage tables)",
|
| 1358 |
+
)
|
| 1359 |
+
parser.add_argument(
|
| 1360 |
+
"--table",
|
| 1361 |
+
"-t",
|
| 1362 |
+
metavar="TABLE",
|
| 1363 |
+
dest="tableTag",
|
| 1364 |
+
help="Table to fill (sniffed from input file if not provided)",
|
| 1365 |
+
)
|
| 1366 |
+
parser.add_argument(
|
| 1367 |
+
"inputs", metavar="FILE", type=str, nargs="+", help="Input FontDame .txt files"
|
| 1368 |
+
)
|
| 1369 |
+
|
| 1370 |
+
args = parser.parse_args(args)
|
| 1371 |
+
|
| 1372 |
+
if font is None:
|
| 1373 |
+
if args.font:
|
| 1374 |
+
font = ttLib.TTFont(args.font)
|
| 1375 |
+
else:
|
| 1376 |
+
font = MockFont()
|
| 1377 |
+
|
| 1378 |
+
for f in args.inputs:
|
| 1379 |
+
log.debug("Processing %s", f)
|
| 1380 |
+
with open(f, "rt", encoding="utf-8") as f:
|
| 1381 |
+
table = build(f, font, tableTag=args.tableTag)
|
| 1382 |
+
blob = table.compile(font) # Make sure it compiles
|
| 1383 |
+
decompiled = table.__class__()
|
| 1384 |
+
decompiled.decompile(blob, font) # Make sure it decompiles!
|
| 1385 |
+
|
| 1386 |
+
# continue
|
| 1387 |
+
from fontTools.misc import xmlWriter
|
| 1388 |
+
|
| 1389 |
+
tag = table.tableTag
|
| 1390 |
+
writer = xmlWriter.XMLWriter(sys.stdout)
|
| 1391 |
+
writer.begintag(tag)
|
| 1392 |
+
writer.newline()
|
| 1393 |
+
# table.toXML(writer, font)
|
| 1394 |
+
decompiled.toXML(writer, font)
|
| 1395 |
+
writer.endtag(tag)
|
| 1396 |
+
writer.newline()
|
| 1397 |
+
|
| 1398 |
+
|
| 1399 |
+
if __name__ == "__main__":
|
| 1400 |
+
import sys
|
| 1401 |
+
|
| 1402 |
+
sys.exit(main())
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/otlLib/__pycache__/error.cpython-310.pyc
ADDED
|
Binary file (706 Bytes). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/otlLib/error.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
class OpenTypeLibError(Exception):
|
| 2 |
+
def __init__(self, message, location):
|
| 3 |
+
Exception.__init__(self, message)
|
| 4 |
+
self.location = location
|
| 5 |
+
|
| 6 |
+
def __str__(self):
|
| 7 |
+
message = Exception.__str__(self)
|
| 8 |
+
if self.location:
|
| 9 |
+
return f"{self.location}: {message}"
|
| 10 |
+
else:
|
| 11 |
+
return message
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/otlLib/maxContextCalc.py
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__all__ = ["maxCtxFont"]
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def maxCtxFont(font):
|
| 5 |
+
"""Calculate the usMaxContext value for an entire font."""
|
| 6 |
+
|
| 7 |
+
maxCtx = 0
|
| 8 |
+
for tag in ("GSUB", "GPOS"):
|
| 9 |
+
if tag not in font:
|
| 10 |
+
continue
|
| 11 |
+
table = font[tag].table
|
| 12 |
+
if not table.LookupList:
|
| 13 |
+
continue
|
| 14 |
+
for lookup in table.LookupList.Lookup:
|
| 15 |
+
for st in lookup.SubTable:
|
| 16 |
+
maxCtx = maxCtxSubtable(maxCtx, tag, lookup.LookupType, st)
|
| 17 |
+
return maxCtx
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def maxCtxSubtable(maxCtx, tag, lookupType, st):
|
| 21 |
+
"""Calculate usMaxContext based on a single lookup table (and an existing
|
| 22 |
+
max value).
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
# single positioning, single / multiple substitution
|
| 26 |
+
if (tag == "GPOS" and lookupType == 1) or (
|
| 27 |
+
tag == "GSUB" and lookupType in (1, 2, 3)
|
| 28 |
+
):
|
| 29 |
+
maxCtx = max(maxCtx, 1)
|
| 30 |
+
|
| 31 |
+
# pair positioning
|
| 32 |
+
elif tag == "GPOS" and lookupType == 2:
|
| 33 |
+
maxCtx = max(maxCtx, 2)
|
| 34 |
+
|
| 35 |
+
# ligatures
|
| 36 |
+
elif tag == "GSUB" and lookupType == 4:
|
| 37 |
+
for ligatures in st.ligatures.values():
|
| 38 |
+
for ligature in ligatures:
|
| 39 |
+
maxCtx = max(maxCtx, ligature.CompCount)
|
| 40 |
+
|
| 41 |
+
# context
|
| 42 |
+
elif (tag == "GPOS" and lookupType == 7) or (tag == "GSUB" and lookupType == 5):
|
| 43 |
+
maxCtx = maxCtxContextualSubtable(maxCtx, st, "Pos" if tag == "GPOS" else "Sub")
|
| 44 |
+
|
| 45 |
+
# chained context
|
| 46 |
+
elif (tag == "GPOS" and lookupType == 8) or (tag == "GSUB" and lookupType == 6):
|
| 47 |
+
maxCtx = maxCtxContextualSubtable(
|
| 48 |
+
maxCtx, st, "Pos" if tag == "GPOS" else "Sub", "Chain"
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
# extensions
|
| 52 |
+
elif (tag == "GPOS" and lookupType == 9) or (tag == "GSUB" and lookupType == 7):
|
| 53 |
+
maxCtx = maxCtxSubtable(maxCtx, tag, st.ExtensionLookupType, st.ExtSubTable)
|
| 54 |
+
|
| 55 |
+
# reverse-chained context
|
| 56 |
+
elif tag == "GSUB" and lookupType == 8:
|
| 57 |
+
maxCtx = maxCtxContextualRule(maxCtx, st, "Reverse")
|
| 58 |
+
|
| 59 |
+
return maxCtx
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def maxCtxContextualSubtable(maxCtx, st, ruleType, chain=""):
|
| 63 |
+
"""Calculate usMaxContext based on a contextual feature subtable."""
|
| 64 |
+
|
| 65 |
+
if st.Format == 1:
|
| 66 |
+
for ruleset in getattr(st, "%s%sRuleSet" % (chain, ruleType)):
|
| 67 |
+
if ruleset is None:
|
| 68 |
+
continue
|
| 69 |
+
for rule in getattr(ruleset, "%s%sRule" % (chain, ruleType)):
|
| 70 |
+
if rule is None:
|
| 71 |
+
continue
|
| 72 |
+
maxCtx = maxCtxContextualRule(maxCtx, rule, chain)
|
| 73 |
+
|
| 74 |
+
elif st.Format == 2:
|
| 75 |
+
for ruleset in getattr(st, "%s%sClassSet" % (chain, ruleType)):
|
| 76 |
+
if ruleset is None:
|
| 77 |
+
continue
|
| 78 |
+
for rule in getattr(ruleset, "%s%sClassRule" % (chain, ruleType)):
|
| 79 |
+
if rule is None:
|
| 80 |
+
continue
|
| 81 |
+
maxCtx = maxCtxContextualRule(maxCtx, rule, chain)
|
| 82 |
+
|
| 83 |
+
elif st.Format == 3:
|
| 84 |
+
maxCtx = maxCtxContextualRule(maxCtx, st, chain)
|
| 85 |
+
|
| 86 |
+
return maxCtx
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def maxCtxContextualRule(maxCtx, st, chain):
|
| 90 |
+
"""Calculate usMaxContext based on a contextual feature rule."""
|
| 91 |
+
|
| 92 |
+
if not chain:
|
| 93 |
+
return max(maxCtx, st.GlyphCount)
|
| 94 |
+
elif chain == "Reverse":
|
| 95 |
+
return max(maxCtx, 1 + st.LookAheadGlyphCount)
|
| 96 |
+
return max(maxCtx, st.InputGlyphCount + st.LookAheadGlyphCount)
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/otlLib/optimize/__init__.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from argparse import RawTextHelpFormatter
|
| 2 |
+
from fontTools.otlLib.optimize.gpos import COMPRESSION_LEVEL, compact
|
| 3 |
+
from fontTools.ttLib import TTFont
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def main(args=None):
|
| 7 |
+
"""Optimize the layout tables of an existing font"""
|
| 8 |
+
from argparse import ArgumentParser
|
| 9 |
+
|
| 10 |
+
from fontTools import configLogger
|
| 11 |
+
|
| 12 |
+
parser = ArgumentParser(
|
| 13 |
+
prog="otlLib.optimize",
|
| 14 |
+
description=main.__doc__,
|
| 15 |
+
formatter_class=RawTextHelpFormatter,
|
| 16 |
+
)
|
| 17 |
+
parser.add_argument("font")
|
| 18 |
+
parser.add_argument(
|
| 19 |
+
"-o", metavar="OUTPUTFILE", dest="outfile", default=None, help="output file"
|
| 20 |
+
)
|
| 21 |
+
parser.add_argument(
|
| 22 |
+
"--gpos-compression-level",
|
| 23 |
+
help=COMPRESSION_LEVEL.help,
|
| 24 |
+
default=COMPRESSION_LEVEL.default,
|
| 25 |
+
choices=list(range(10)),
|
| 26 |
+
type=int,
|
| 27 |
+
)
|
| 28 |
+
logging_group = parser.add_mutually_exclusive_group(required=False)
|
| 29 |
+
logging_group.add_argument(
|
| 30 |
+
"-v", "--verbose", action="store_true", help="Run more verbosely."
|
| 31 |
+
)
|
| 32 |
+
logging_group.add_argument(
|
| 33 |
+
"-q", "--quiet", action="store_true", help="Turn verbosity off."
|
| 34 |
+
)
|
| 35 |
+
options = parser.parse_args(args)
|
| 36 |
+
|
| 37 |
+
configLogger(
|
| 38 |
+
level=("DEBUG" if options.verbose else "ERROR" if options.quiet else "INFO")
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
font = TTFont(options.font)
|
| 42 |
+
compact(font, options.gpos_compression_level)
|
| 43 |
+
font.save(options.outfile or options.font)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
if __name__ == "__main__":
|
| 47 |
+
import sys
|
| 48 |
+
|
| 49 |
+
if len(sys.argv) > 1:
|
| 50 |
+
sys.exit(main())
|
| 51 |
+
import doctest
|
| 52 |
+
|
| 53 |
+
sys.exit(doctest.testmod().failed)
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/otlLib/optimize/__main__.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from fontTools.otlLib.optimize import main
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
sys.exit(main())
|
evalkit_tf437/lib/python3.10/site-packages/fontTools/otlLib/optimize/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (309 Bytes). View file
|
|
|